feat(api, ui): add bookdrop bulk edit and metadata pattern extraction (#1846)

* feat: add bulk editors for bookdrop

* fix: update pattern behaviour and remove redundant frontend logic

* fix: clean up pattern extractor

* fix: create shared logic to align bulk edit and pattern extract and resolve some minor behaviour issues

* fix: date matching pattern and resolve issues with pattern matching to ignore extra trailing data

* chore: cleanup tests and code to be cleaner

* chore: cleanup autogenerated testing rules

* fix: update to use the new dialog launcher service

* fix: add boolean null check and data validation on pattern extract api

* feat: add bulk edit batching to avoid issues with extremely large import counts

* fix: adding timeout to avoid potential redos issue

* fix: add try blocks for issues with potential NumberFormatException

* fix: update isbn and asin regex to better match spec

* fix: improve error handling and logging

* fix: make component names consistent with the project

* fix: mising import for pattern syntax exception

* chore: add additional tests for the bulk edit service

* fix: improve accessibility to new ui elements

* fix: further improvements to the pattern extractor timeout

* fix: improve frontend placeholder validation

* fix: add back changes accidently removed by merge
This commit is contained in:
CounterClops
2025-12-17 14:27:17 +08:00
committed by GitHub
parent 0a5f12f38c
commit 6df338a0d7
24 changed files with 3139 additions and 10 deletions

View File

@@ -2,16 +2,23 @@ package com.adityachandel.booklore.controller;
import com.adityachandel.booklore.model.dto.BookdropFile;
import com.adityachandel.booklore.model.dto.BookdropFileNotification;
import com.adityachandel.booklore.model.dto.request.BookdropBulkEditRequest;
import com.adityachandel.booklore.model.dto.request.BookdropFinalizeRequest;
import com.adityachandel.booklore.model.dto.request.BookdropPatternExtractRequest;
import com.adityachandel.booklore.model.dto.request.BookdropSelectionRequest;
import com.adityachandel.booklore.model.dto.response.BookdropBulkEditResult;
import com.adityachandel.booklore.model.dto.response.BookdropFinalizeResult;
import com.adityachandel.booklore.model.dto.response.BookdropPatternExtractResult;
import com.adityachandel.booklore.service.bookdrop.BookDropService;
import com.adityachandel.booklore.service.bookdrop.BookdropBulkEditService;
import com.adityachandel.booklore.service.bookdrop.BookdropMonitoringService;
import com.adityachandel.booklore.service.monitoring.MonitoringService;
import com.adityachandel.booklore.service.bookdrop.FilenamePatternExtractor;
import io.swagger.v3.oas.annotations.Operation;
import io.swagger.v3.oas.annotations.Parameter;
import io.swagger.v3.oas.annotations.tags.Tag;
import io.swagger.v3.oas.annotations.responses.ApiResponse;
import jakarta.validation.Valid;
import lombok.AllArgsConstructor;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
@@ -26,6 +33,8 @@ public class BookdropFileController {
private final BookDropService bookDropService;
private final BookdropMonitoringService monitoringService;
private final FilenamePatternExtractor filenamePatternExtractor;
private final BookdropBulkEditService bookdropBulkEditService;
@Operation(summary = "Get bookdrop notification summary", description = "Retrieve a summary of bookdrop file notifications.")
@ApiResponse(responseCode = "200", description = "Notification summary returned successfully")
@@ -68,4 +77,22 @@ public class BookdropFileController {
monitoringService.rescanBookdropFolder();
return ResponseEntity.ok().build();
}
@Operation(summary = "Extract metadata from filenames using pattern", description = "Parse filenames of selected files using a pattern to extract metadata fields.")
@ApiResponse(responseCode = "200", description = "Pattern extraction completed")
@PostMapping("/files/extract-pattern")
public ResponseEntity<BookdropPatternExtractResult> extractFromPattern(
@Parameter(description = "Pattern extraction request") @Valid @RequestBody BookdropPatternExtractRequest request) {
BookdropPatternExtractResult result = filenamePatternExtractor.bulkExtract(request);
return ResponseEntity.ok(result);
}
@Operation(summary = "Bulk edit metadata for selected files", description = "Apply metadata changes to multiple selected files at once.")
@ApiResponse(responseCode = "200", description = "Bulk edit completed")
@PostMapping("/files/bulk-edit")
public ResponseEntity<BookdropBulkEditResult> bulkEditMetadata(
@Parameter(description = "Bulk edit request") @Valid @RequestBody BookdropBulkEditRequest request) {
BookdropBulkEditResult result = bookdropBulkEditService.bulkEdit(request);
return ResponseEntity.ok(result);
}
}

View File

@@ -0,0 +1,20 @@
package com.adityachandel.booklore.model.dto.request;
import com.adityachandel.booklore.model.dto.BookMetadata;
import lombok.Data;
import jakarta.validation.constraints.NotNull;
import java.util.List;
import java.util.Set;
@Data
public class BookdropBulkEditRequest {
@NotNull
private BookMetadata fields;
@NotNull
private Set<String> enabledFields;
private boolean mergeArrays;
private boolean selectAll;
private List<Long> excludedIds;
private List<Long> selectedIds;
}

View File

@@ -0,0 +1,16 @@
package com.adityachandel.booklore.model.dto.request;
import lombok.Data;
import jakarta.validation.constraints.NotBlank;
import java.util.List;
@Data
public class BookdropPatternExtractRequest {
@NotBlank
private String pattern;
private Boolean selectAll;
private List<Long> excludedIds;
private List<Long> selectedIds;
private Boolean preview;
}

View File

@@ -0,0 +1,12 @@
package com.adityachandel.booklore.model.dto.response;
import lombok.Builder;
import lombok.Data;
@Data
@Builder
public class BookdropBulkEditResult {
private int totalFiles;
private int successfullyUpdated;
private int failed;
}

View File

@@ -0,0 +1,26 @@
package com.adityachandel.booklore.model.dto.response;
import com.adityachandel.booklore.model.dto.BookMetadata;
import lombok.Builder;
import lombok.Data;
import java.util.List;
@Data
@Builder
public class BookdropPatternExtractResult {
private int totalFiles;
private int successfullyExtracted;
private int failed;
private List<FileExtractionResult> results;
@Data
@Builder
public static class FileExtractionResult {
private Long fileId;
private String fileName;
private boolean success;
private BookMetadata extractedMetadata;
private String errorMessage;
}
}

View File

@@ -0,0 +1,138 @@
package com.adityachandel.booklore.service.bookdrop;
import com.adityachandel.booklore.model.dto.BookMetadata;
import com.adityachandel.booklore.model.dto.request.BookdropBulkEditRequest;
import com.adityachandel.booklore.model.dto.response.BookdropBulkEditResult;
import com.adityachandel.booklore.model.entity.BookdropFileEntity;
import com.adityachandel.booklore.repository.BookdropFileRepository;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.util.*;
@Slf4j
@Service
@RequiredArgsConstructor
public class BookdropBulkEditService {
private static final int BATCH_SIZE = 500;
private final BookdropFileRepository bookdropFileRepository;
private final BookdropMetadataHelper metadataHelper;
@Transactional
public BookdropBulkEditResult bulkEdit(BookdropBulkEditRequest request) {
List<Long> fileIds = metadataHelper.resolveFileIds(
request.isSelectAll(),
request.getExcludedIds(),
request.getSelectedIds()
);
return processBulkEditInBatches(fileIds, request);
}
private BookdropBulkEditResult processBulkEditInBatches(List<Long> fileIds, BookdropBulkEditRequest request) {
int totalSuccessCount = 0;
int totalFailedCount = 0;
int totalFiles = fileIds.size();
for (int batchStart = 0; batchStart < fileIds.size(); batchStart += BATCH_SIZE) {
int batchEnd = Math.min(batchStart + BATCH_SIZE, fileIds.size());
BatchEditResult batchResult = processSingleBatch(fileIds, batchStart, batchEnd, request);
totalSuccessCount += batchResult.successCount();
totalFailedCount += batchResult.failureCount();
log.debug("Processed batch {}-{} of {}: {} successful, {} failed",
batchStart, batchEnd, totalFiles, batchResult.successCount(), batchResult.failureCount());
}
return BookdropBulkEditResult.builder()
.totalFiles(totalFiles)
.successfullyUpdated(totalSuccessCount)
.failed(totalFailedCount)
.build();
}
private BatchEditResult processSingleBatch(List<Long> allFileIds, int batchStart, int batchEnd,
BookdropBulkEditRequest request) {
List<Long> batchIds = allFileIds.subList(batchStart, batchEnd);
List<BookdropFileEntity> batchFiles = bookdropFileRepository.findAllById(batchIds);
int successCount = 0;
int failureCount = 0;
Set<Long> failedFileIds = new HashSet<>();
for (BookdropFileEntity file : batchFiles) {
try {
updateFileMetadata(file, request);
successCount++;
} catch (RuntimeException e) {
log.error("Failed to update metadata for file {} ({}): {}",
file.getId(), file.getFileName(), e.getMessage(), e);
failureCount++;
failedFileIds.add(file.getId());
}
}
List<BookdropFileEntity> filesToSave = batchFiles.stream()
.filter(file -> !failedFileIds.contains(file.getId()))
.toList();
if (!filesToSave.isEmpty()) {
bookdropFileRepository.saveAll(filesToSave);
}
return new BatchEditResult(successCount, failureCount);
}
private void updateFileMetadata(BookdropFileEntity file, BookdropBulkEditRequest request) {
BookMetadata currentMetadata = metadataHelper.getCurrentMetadata(file);
BookMetadata updates = request.getFields();
Set<String> enabledFields = request.getEnabledFields();
boolean mergeArrays = request.isMergeArrays();
if (enabledFields.contains("seriesName") && updates.getSeriesName() != null) {
currentMetadata.setSeriesName(updates.getSeriesName());
}
if (enabledFields.contains("seriesTotal") && updates.getSeriesTotal() != null) {
currentMetadata.setSeriesTotal(updates.getSeriesTotal());
}
if (enabledFields.contains("publisher") && updates.getPublisher() != null) {
currentMetadata.setPublisher(updates.getPublisher());
}
if (enabledFields.contains("language") && updates.getLanguage() != null) {
currentMetadata.setLanguage(updates.getLanguage());
}
updateArrayField("authors", enabledFields, currentMetadata.getAuthors(), updates.getAuthors(),
currentMetadata::setAuthors, mergeArrays);
updateArrayField("categories", enabledFields, currentMetadata.getCategories(), updates.getCategories(),
currentMetadata::setCategories, mergeArrays);
updateArrayField("moods", enabledFields, currentMetadata.getMoods(), updates.getMoods(),
currentMetadata::setMoods, mergeArrays);
updateArrayField("tags", enabledFields, currentMetadata.getTags(), updates.getTags(),
currentMetadata::setTags, mergeArrays);
metadataHelper.updateFetchedMetadata(file, currentMetadata);
}
private void updateArrayField(String fieldName, Set<String> enabledFields,
Set<String> currentValue, Set<String> newValue,
java.util.function.Consumer<Set<String>> setter, boolean mergeArrays) {
if (enabledFields.contains(fieldName) && newValue != null) {
if (mergeArrays && currentValue != null) {
Set<String> merged = new LinkedHashSet<>(currentValue);
merged.addAll(newValue);
setter.accept(merged);
} else {
setter.accept(newValue);
}
}
}
private record BatchEditResult(int successCount, int failureCount) {}
}

View File

@@ -0,0 +1,70 @@
package com.adityachandel.booklore.service.bookdrop;
import com.adityachandel.booklore.model.dto.BookMetadata;
import com.adityachandel.booklore.model.entity.BookdropFileEntity;
import com.adityachandel.booklore.repository.BookdropFileRepository;
import com.fasterxml.jackson.databind.ObjectMapper;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.stereotype.Component;
import java.util.Collections;
import java.util.List;
@Slf4j
@Component
@RequiredArgsConstructor
public class BookdropMetadataHelper {
private final BookdropFileRepository bookdropFileRepository;
private final ObjectMapper objectMapper;
public List<Long> resolveFileIds(boolean selectAll, List<Long> excludedIds, List<Long> selectedIds) {
if (selectAll) {
List<Long> excluded = excludedIds != null ? excludedIds : Collections.emptyList();
if (excluded.isEmpty()) {
return bookdropFileRepository.findAllIds();
} else {
return bookdropFileRepository.findAllExcludingIdsFlat(excluded);
}
}
return selectedIds != null ? selectedIds : Collections.emptyList();
}
public BookMetadata getCurrentMetadata(BookdropFileEntity file) {
try {
String fetchedMetadataJson = file.getFetchedMetadata();
if (fetchedMetadataJson != null && !fetchedMetadataJson.isBlank()) {
return objectMapper.readValue(fetchedMetadataJson, BookMetadata.class);
}
} catch (Exception e) {
log.error("Error parsing existing metadata for file {}: {}", file.getId(), e.getMessage());
}
return new BookMetadata();
}
public void updateFetchedMetadata(BookdropFileEntity file, BookMetadata metadata) {
try {
String updatedMetadataJson = objectMapper.writeValueAsString(metadata);
file.setFetchedMetadata(updatedMetadataJson);
} catch (Exception e) {
log.error("Error serializing metadata for file {}: {}", file.getId(), e.getMessage());
throw new RuntimeException("Failed to update metadata", e);
}
}
public void mergeMetadata(BookMetadata target, BookMetadata source) {
if (source.getSeriesName() != null) target.setSeriesName(source.getSeriesName());
if (source.getTitle() != null) target.setTitle(source.getTitle());
if (source.getSubtitle() != null) target.setSubtitle(source.getSubtitle());
if (source.getAuthors() != null && !source.getAuthors().isEmpty()) target.setAuthors(source.getAuthors());
if (source.getSeriesNumber() != null) target.setSeriesNumber(source.getSeriesNumber());
if (source.getPublishedDate() != null) target.setPublishedDate(source.getPublishedDate());
if (source.getPublisher() != null) target.setPublisher(source.getPublisher());
if (source.getLanguage() != null) target.setLanguage(source.getLanguage());
if (source.getSeriesTotal() != null) target.setSeriesTotal(source.getSeriesTotal());
if (source.getIsbn10() != null) target.setIsbn10(source.getIsbn10());
if (source.getIsbn13() != null) target.setIsbn13(source.getIsbn13());
if (source.getAsin() != null) target.setAsin(source.getAsin());
}
}

View File

@@ -0,0 +1,630 @@
package com.adityachandel.booklore.service.bookdrop;
import com.adityachandel.booklore.model.dto.BookMetadata;
import com.adityachandel.booklore.model.dto.request.BookdropPatternExtractRequest;
import com.adityachandel.booklore.model.dto.response.BookdropPatternExtractResult;
import com.adityachandel.booklore.model.entity.BookdropFileEntity;
import com.adityachandel.booklore.repository.BookdropFileRepository;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.io.FilenameUtils;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import jakarta.annotation.PreDestroy;
import java.time.LocalDate;
import java.time.format.DateTimeFormatter;
import java.time.format.DateTimeParseException;
import java.util.*;
import java.util.concurrent.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
@Slf4j
@Service
@RequiredArgsConstructor
public class FilenamePatternExtractor {
private final BookdropFileRepository bookdropFileRepository;
private final BookdropMetadataHelper metadataHelper;
private final ExecutorService regexExecutor = Executors.newCachedThreadPool(runnable -> {
Thread thread = new Thread(runnable);
thread.setDaemon(true);
return thread;
});
private static final int PREVIEW_FILE_LIMIT = 5;
private static final long REGEX_TIMEOUT_SECONDS = 5;
private static final int TWO_DIGIT_YEAR_CUTOFF = 50;
private static final int TWO_DIGIT_YEAR_CENTURY_BASE = 1900;
private static final int FOUR_DIGIT_YEAR_LENGTH = 4;
private static final int TWO_DIGIT_YEAR_LENGTH = 2;
private static final int COMPACT_DATE_LENGTH = 8;
private static final Map<String, PlaceholderConfig> PLACEHOLDER_CONFIGS = Map.ofEntries(
Map.entry("SeriesName", new PlaceholderConfig("(.+?)", "seriesName")),
Map.entry("Title", new PlaceholderConfig("(.+?)", "title")),
Map.entry("Subtitle", new PlaceholderConfig("(.+?)", "subtitle")),
Map.entry("Authors", new PlaceholderConfig("(.+?)", "authors")),
Map.entry("SeriesNumber", new PlaceholderConfig("(\\d+(?:\\.\\d+)?)", "seriesNumber")),
Map.entry("Published", new PlaceholderConfig("(.+?)", "publishedDate")),
Map.entry("Publisher", new PlaceholderConfig("(.+?)", "publisher")),
Map.entry("Language", new PlaceholderConfig("([a-zA-Z]+)", "language")),
Map.entry("SeriesTotal", new PlaceholderConfig("(\\d+)", "seriesTotal")),
Map.entry("ISBN10", new PlaceholderConfig("(\\d{9}[0-9Xx])", "isbn10")),
Map.entry("ISBN13", new PlaceholderConfig("([0-9]{13})", "isbn13")),
Map.entry("ASIN", new PlaceholderConfig("(B[A-Za-z0-9]{9}|\\d{9}[0-9Xx])", "asin"))
);
private static final Pattern PLACEHOLDER_PATTERN = Pattern.compile("\\{(\\w+)(?::(.*?))?}|\\*");
private static final Pattern FOUR_DIGIT_YEAR_PATTERN = Pattern.compile("\\d{4}");
private static final Pattern TWO_DIGIT_YEAR_PATTERN = Pattern.compile("\\d{2}");
private static final Pattern COMPACT_DATE_PATTERN = Pattern.compile("\\d{8}");
private static final Pattern FLEXIBLE_DATE_PATTERN = Pattern.compile("(\\d{1,4})([^\\d])(\\d{1,2})\\2(\\d{1,4})");
@Transactional
public BookdropPatternExtractResult bulkExtract(BookdropPatternExtractRequest request) {
List<Long> fileIds = metadataHelper.resolveFileIds(
Boolean.TRUE.equals(request.getSelectAll()),
request.getExcludedIds(),
request.getSelectedIds()
);
boolean isPreview = Boolean.TRUE.equals(request.getPreview());
ParsedPattern cachedPattern = parsePattern(request.getPattern());
if (cachedPattern == null) {
log.error("Failed to parse pattern: '{}'", request.getPattern());
return buildEmptyResult(fileIds.size());
}
return isPreview
? processPreviewExtraction(fileIds, cachedPattern)
: processFullExtractionInBatches(fileIds, cachedPattern);
}
private BookdropPatternExtractResult processPreviewExtraction(List<Long> fileIds, ParsedPattern pattern) {
List<Long> limitedFileIds = fileIds.size() > PREVIEW_FILE_LIMIT
? fileIds.subList(0, PREVIEW_FILE_LIMIT)
: fileIds;
List<BookdropFileEntity> previewFiles = bookdropFileRepository.findAllById(limitedFileIds);
List<BookdropPatternExtractResult.FileExtractionResult> results = new ArrayList<>();
int successCount = 0;
for (BookdropFileEntity file : previewFiles) {
BookdropPatternExtractResult.FileExtractionResult result = extractFromFile(file, pattern);
results.add(result);
if (result.isSuccess()) {
successCount++;
}
}
int failureCount = previewFiles.size() - successCount;
return BookdropPatternExtractResult.builder()
.totalFiles(fileIds.size())
.successfullyExtracted(successCount)
.failed(failureCount)
.results(results)
.build();
}
private BookdropPatternExtractResult processFullExtractionInBatches(List<Long> fileIds, ParsedPattern pattern) {
final int BATCH_SIZE = 500;
List<BookdropPatternExtractResult.FileExtractionResult> allResults = new ArrayList<>();
int totalSuccessCount = 0;
int totalFailureCount = 0;
int totalFiles = fileIds.size();
for (int batchStart = 0; batchStart < fileIds.size(); batchStart += BATCH_SIZE) {
int batchEnd = Math.min(batchStart + BATCH_SIZE, fileIds.size());
BatchExtractionResult batchResult = processSingleExtractionBatch(fileIds, batchStart, batchEnd, pattern);
allResults.addAll(batchResult.results());
totalSuccessCount += batchResult.successCount();
totalFailureCount += batchResult.failureCount();
log.debug("Processed pattern extraction batch {}-{} of {}: {} successful, {} failed",
batchStart, batchEnd, totalFiles, batchResult.successCount(), batchResult.failureCount());
}
return BookdropPatternExtractResult.builder()
.totalFiles(totalFiles)
.successfullyExtracted(totalSuccessCount)
.failed(totalFailureCount)
.results(allResults)
.build();
}
private BatchExtractionResult processSingleExtractionBatch(List<Long> allFileIds, int batchStart,
int batchEnd, ParsedPattern pattern) {
List<Long> batchIds = allFileIds.subList(batchStart, batchEnd);
List<BookdropFileEntity> batchFiles = bookdropFileRepository.findAllById(batchIds);
List<BookdropPatternExtractResult.FileExtractionResult> batchResults = new ArrayList<>();
for (BookdropFileEntity file : batchFiles) {
BookdropPatternExtractResult.FileExtractionResult result = extractFromFile(file, pattern);
batchResults.add(result);
}
persistExtractedMetadata(batchResults, batchFiles);
int successCount = (int) batchResults.stream().filter(BookdropPatternExtractResult.FileExtractionResult::isSuccess).count();
int failureCount = batchFiles.size() - successCount;
return new BatchExtractionResult(batchResults, successCount, failureCount);
}
private BookdropPatternExtractResult buildEmptyResult(int totalFiles) {
return BookdropPatternExtractResult.builder()
.totalFiles(totalFiles)
.successfullyExtracted(0)
.failed(totalFiles)
.results(Collections.emptyList())
.build();
}
public BookMetadata extractFromFilename(String filename, String pattern) {
ParsedPattern parsedPattern = parsePattern(pattern);
if (parsedPattern == null) {
return null;
}
return extractFromFilenameWithParsedPattern(filename, parsedPattern);
}
private BookMetadata extractFromFilenameWithParsedPattern(String filename, ParsedPattern parsedPattern) {
String nameOnly = FilenameUtils.getBaseName(filename);
Optional<Matcher> matcherResult = executeRegexMatchingWithTimeout(parsedPattern.compiledPattern(), nameOnly);
if (matcherResult.isEmpty()) {
return null;
}
Matcher matcher = matcherResult.get();
return buildMetadataFromMatch(matcher, parsedPattern.placeholderOrder());
}
private Optional<Matcher> executeRegexMatchingWithTimeout(Pattern pattern, String input) {
Future<Optional<Matcher>> future = regexExecutor.submit(() -> {
Matcher matcher = pattern.matcher(input);
return matcher.find() ? Optional.of(matcher) : Optional.empty();
});
try {
return future.get(REGEX_TIMEOUT_SECONDS, TimeUnit.SECONDS);
} catch (TimeoutException e) {
future.cancel(true);
log.warn("Pattern matching exceeded {} second timeout for: {}",
REGEX_TIMEOUT_SECONDS, input.substring(0, Math.min(50, input.length())));
return Optional.empty();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
return Optional.empty();
} catch (ExecutionException e) {
log.error("Pattern matching failed: {}", e.getCause() != null ? e.getCause().getMessage() : "Unknown");
return Optional.empty();
}
}
@PreDestroy
public void shutdownRegexExecutor() {
regexExecutor.shutdown();
try {
if (!regexExecutor.awaitTermination(5, TimeUnit.SECONDS)) {
regexExecutor.shutdownNow();
}
} catch (InterruptedException e) {
regexExecutor.shutdownNow();
Thread.currentThread().interrupt();
}
}
private BookdropPatternExtractResult.FileExtractionResult extractFromFile(
BookdropFileEntity file,
ParsedPattern parsedPattern) {
try {
BookMetadata extracted = extractFromFilenameWithParsedPattern(file.getFileName(), parsedPattern);
if (extracted == null) {
String errorMsg = "Pattern did not match filename structure. Check if the pattern aligns with the filename format.";
log.debug("Pattern mismatch for file '{}'", file.getFileName());
return BookdropPatternExtractResult.FileExtractionResult.builder()
.fileId(file.getId())
.fileName(file.getFileName())
.success(false)
.errorMessage(errorMsg)
.build();
}
return BookdropPatternExtractResult.FileExtractionResult.builder()
.fileId(file.getId())
.fileName(file.getFileName())
.success(true)
.extractedMetadata(extracted)
.build();
} catch (RuntimeException e) {
String errorMsg = "Extraction failed: " + e.getMessage();
log.debug("Pattern extraction failed for file '{}': {}", file.getFileName(), e.getMessage());
return BookdropPatternExtractResult.FileExtractionResult.builder()
.fileId(file.getId())
.fileName(file.getFileName())
.success(false)
.errorMessage(errorMsg)
.build();
}
}
private ParsedPattern parsePattern(String pattern) {
if (pattern == null || pattern.isBlank()) {
return null;
}
List<PlaceholderMatch> placeholderMatches = findAllPlaceholders(pattern);
StringBuilder regexBuilder = new StringBuilder();
List<String> placeholderOrder = new ArrayList<>();
int lastEnd = 0;
for (int i = 0; i < placeholderMatches.size(); i++) {
PlaceholderMatch placeholderMatch = placeholderMatches.get(i);
String literalTextBeforePlaceholder = pattern.substring(lastEnd, placeholderMatch.start);
regexBuilder.append(Pattern.quote(literalTextBeforePlaceholder));
String placeholderName = placeholderMatch.name;
String formatParameter = placeholderMatch.formatParameter;
boolean isLastPlaceholder = (i == placeholderMatches.size() - 1);
boolean hasTextAfterPlaceholder = (placeholderMatch.end < pattern.length());
boolean shouldUseGreedyMatching = isLastPlaceholder && !hasTextAfterPlaceholder;
String regexForPlaceholder;
if ("*".equals(placeholderName)) {
regexForPlaceholder = shouldUseGreedyMatching ? "(.+)" : "(.+?)";
} else if ("Published".equals(placeholderName) && formatParameter != null) {
regexForPlaceholder = buildRegexForDateFormat(formatParameter);
} else {
PlaceholderConfig config = PLACEHOLDER_CONFIGS.get(placeholderName);
regexForPlaceholder = determineRegexForPlaceholder(config, shouldUseGreedyMatching);
}
regexBuilder.append(regexForPlaceholder);
String placeholderWithFormat = formatParameter != null ? placeholderName + ":" + formatParameter : placeholderName;
placeholderOrder.add(placeholderWithFormat);
lastEnd = placeholderMatch.end;
}
String literalTextAfterLastPlaceholder = pattern.substring(lastEnd);
regexBuilder.append(Pattern.quote(literalTextAfterLastPlaceholder));
try {
Pattern compiledPattern = Pattern.compile(regexBuilder.toString());
return new ParsedPattern(compiledPattern, placeholderOrder);
} catch (PatternSyntaxException e) {
log.error("Invalid regex syntax from user input '{}': {}", pattern, e.getMessage());
return null;
}
}
private List<PlaceholderMatch> findAllPlaceholders(String pattern) {
List<PlaceholderMatch> placeholderMatches = new ArrayList<>();
Matcher matcher = PLACEHOLDER_PATTERN.matcher(pattern);
while (matcher.find()) {
String placeholderName;
String formatParameter = null;
if (matcher.group(0).equals("*")) {
placeholderName = "*";
} else {
placeholderName = matcher.group(1);
formatParameter = matcher.group(2);
}
placeholderMatches.add(new PlaceholderMatch(
matcher.start(),
matcher.end(),
placeholderName,
formatParameter
));
}
return placeholderMatches;
}
private String buildRegexForDateFormat(String dateFormat) {
StringBuilder result = new StringBuilder();
int i = 0;
while (i < dateFormat.length()) {
if (dateFormat.startsWith("yyyy", i)) {
result.append("\\d{4}");
i += 4;
} else if (dateFormat.startsWith("yy", i)) {
result.append("\\d{2}");
i += 2;
} else if (dateFormat.startsWith("MM", i)) {
result.append("\\d{2}");
i += 2;
} else if (dateFormat.startsWith("M", i)) {
result.append("\\d{1,2}");
i += 1;
} else if (dateFormat.startsWith("dd", i)) {
result.append("\\d{2}");
i += 2;
} else if (dateFormat.startsWith("d", i)) {
result.append("\\d{1,2}");
i += 1;
} else {
result.append(Pattern.quote(String.valueOf(dateFormat.charAt(i))));
i++;
}
}
return "(" + result.toString() + ")";
}
private String determineRegexForPlaceholder(PlaceholderConfig config, boolean shouldUseGreedyMatching) {
if (config != null) {
String configuredRegex = config.regex();
boolean isNonGreedyTextPattern = configuredRegex.equals("(.+?)");
if (shouldUseGreedyMatching && isNonGreedyTextPattern) {
return "(.+)";
}
return configuredRegex;
}
return shouldUseGreedyMatching ? "(.+)" : "(.+?)";
}
private BookMetadata buildMetadataFromMatch(Matcher matcher, List<String> placeholderOrder) {
BookMetadata metadata = new BookMetadata();
for (int i = 0; i < placeholderOrder.size(); i++) {
String placeholderWithFormat = placeholderOrder.get(i);
String[] parts = placeholderWithFormat.split(":", 2);
String placeholderName = parts[0];
String formatParameter = parts.length > 1 ? parts[1] : null;
if ("*".equals(placeholderName)) {
continue;
}
String value = matcher.group(i + 1).trim();
applyValueToMetadata(metadata, placeholderName, value, formatParameter);
}
return metadata;
}
private void applyValueToMetadata(BookMetadata metadata, String placeholderName, String value, String formatParameter) {
if (value == null || value.isBlank()) {
return;
}
switch (placeholderName) {
case "SeriesName" -> metadata.setSeriesName(value);
case "Title" -> metadata.setTitle(value);
case "Subtitle" -> metadata.setSubtitle(value);
case "Authors" -> metadata.setAuthors(parseAuthors(value));
case "SeriesNumber" -> setSeriesNumber(metadata, value);
case "Published" -> setPublishedDate(metadata, value, formatParameter);
case "Publisher" -> metadata.setPublisher(value);
case "Language" -> metadata.setLanguage(value);
case "SeriesTotal" -> setSeriesTotal(metadata, value);
case "ISBN10" -> metadata.setIsbn10(value);
case "ISBN13" -> metadata.setIsbn13(value);
case "ASIN" -> metadata.setAsin(value);
}
}
private Set<String> parseAuthors(String value) {
String[] parts = value.split("[,;&]");
Set<String> authors = new LinkedHashSet<>();
for (String part : parts) {
String trimmed = part.trim();
if (!trimmed.isEmpty()) {
authors.add(trimmed);
}
}
return authors;
}
private void setSeriesNumber(BookMetadata metadata, String value) {
try {
metadata.setSeriesNumber(Float.parseFloat(value));
} catch (NumberFormatException ignored) {
}
}
private void setPublishedDate(BookMetadata metadata, String value, String dateFormat) {
String detectedFormat = (dateFormat == null || dateFormat.isBlank())
? detectDateFormat(value)
: dateFormat;
if (detectedFormat == null) {
log.warn("Could not detect date format for value: '{}'", value);
return;
}
try {
if ("yyyy".equals(detectedFormat) || "yy".equals(detectedFormat)) {
int year = Integer.parseInt(value);
if ("yy".equals(detectedFormat) && year < 100) {
year += (year < TWO_DIGIT_YEAR_CUTOFF) ? 2000 : TWO_DIGIT_YEAR_CENTURY_BASE;
}
metadata.setPublishedDate(LocalDate.of(year, 1, 1));
return;
}
DateTimeFormatter formatter = DateTimeFormatter.ofPattern(detectedFormat);
LocalDate date = LocalDate.parse(value, formatter);
metadata.setPublishedDate(date);
} catch (NumberFormatException e) {
log.warn("Failed to parse year value '{}': {}", value, e.getMessage());
} catch (DateTimeParseException e) {
log.warn("Failed to parse date '{}' with format '{}': {}", value, detectedFormat, e.getMessage());
} catch (IllegalArgumentException e) {
log.warn("Invalid date format '{}' for value '{}': {}", detectedFormat, value, e.getMessage());
}
}
private String detectDateFormat(String value) {
if (value == null || value.isBlank()) {
return null;
}
String trimmed = value.trim();
int length = trimmed.length();
if (length == FOUR_DIGIT_YEAR_LENGTH && FOUR_DIGIT_YEAR_PATTERN.matcher(trimmed).matches()) {
return "yyyy";
}
if (length == TWO_DIGIT_YEAR_LENGTH && TWO_DIGIT_YEAR_PATTERN.matcher(trimmed).matches()) {
return "yy";
}
if (length == COMPACT_DATE_LENGTH && COMPACT_DATE_PATTERN.matcher(trimmed).matches()) {
return "yyyyMMdd";
}
Matcher flexibleMatcher = FLEXIBLE_DATE_PATTERN.matcher(trimmed);
if (flexibleMatcher.matches()) {
String separator = flexibleMatcher.group(2);
return determineFlexibleDateFormat(flexibleMatcher, separator);
}
return null;
}
private String determineFlexibleDateFormat(Matcher matcher, String separator) {
String part1 = matcher.group(1);
String part2 = matcher.group(3);
String part3 = matcher.group(4);
int val1, val2, val3;
try {
val1 = Integer.parseInt(part1);
val2 = Integer.parseInt(part2);
val3 = Integer.parseInt(part3);
} catch (NumberFormatException e) {
return null;
}
String format1, format2, format3;
if (isYearValue(part1, val1)) {
format1 = buildYearFormat(part1);
if (val2 <= 12 && val3 > 12) {
format2 = buildMonthFormat(part2);
format3 = buildDayFormat(part3);
} else if (val3 <= 12 && val2 > 12) {
format2 = buildDayFormat(part2);
format3 = buildMonthFormat(part3);
} else {
format2 = buildMonthFormat(part2);
format3 = buildDayFormat(part3);
}
} else if (isYearValue(part3, val3)) {
format3 = buildYearFormat(part3);
if (val1 <= 12 && val2 > 12) {
format1 = buildMonthFormat(part1);
format2 = buildDayFormat(part2);
} else if (val2 <= 12 && val1 > 12) {
format1 = buildDayFormat(part1);
format2 = buildMonthFormat(part2);
} else {
format1 = buildDayFormat(part1);
format2 = buildMonthFormat(part2);
}
} else {
format1 = buildDayFormat(part1);
format2 = buildMonthFormat(part2);
format3 = part3.length() == 2 ? "yy" : "y";
}
return format1 + separator + format2 + separator + format3;
}
private boolean isYearValue(String part, int value) {
return part.length() == 4 || value > 31;
}
private String buildYearFormat(String part) {
return part.length() == 4 ? "yyyy" : "yy";
}
private String buildMonthFormat(String part) {
return part.length() == 2 ? "MM" : "M";
}
private String buildDayFormat(String part) {
return part.length() == 2 ? "dd" : "d";
}
private void setSeriesTotal(BookMetadata metadata, String value) {
try {
metadata.setSeriesTotal(Integer.parseInt(value));
} catch (NumberFormatException ignored) {
}
}
private void persistExtractedMetadata(List<BookdropPatternExtractResult.FileExtractionResult> results, List<BookdropFileEntity> files) {
Map<Long, BookdropFileEntity> fileMap = new HashMap<>();
for (BookdropFileEntity file : files) {
fileMap.put(file.getId(), file);
}
Set<Long> failedFileIds = new HashSet<>();
for (BookdropPatternExtractResult.FileExtractionResult result : results) {
if (!result.isSuccess() || result.getExtractedMetadata() == null) {
continue;
}
BookdropFileEntity file = fileMap.get(result.getFileId());
if (file == null) {
continue;
}
try {
BookMetadata currentMetadata = metadataHelper.getCurrentMetadata(file);
BookMetadata extractedMetadata = result.getExtractedMetadata();
metadataHelper.mergeMetadata(currentMetadata, extractedMetadata);
metadataHelper.updateFetchedMetadata(file, currentMetadata);
} catch (RuntimeException e) {
log.error("Error persisting extracted metadata for file {} ({}): {}",
file.getId(), file.getFileName(), e.getMessage(), e);
failedFileIds.add(file.getId());
result.setSuccess(false);
result.setErrorMessage("Failed to save metadata: " + e.getMessage());
}
}
List<BookdropFileEntity> filesToSave = files.stream()
.filter(file -> !failedFileIds.contains(file.getId()))
.toList();
if (!filesToSave.isEmpty()) {
bookdropFileRepository.saveAll(filesToSave);
}
}
private record PlaceholderConfig(String regex, String metadataField) {}
private record ParsedPattern(Pattern compiledPattern, List<String> placeholderOrder) {}
private record PlaceholderMatch(int start, int end, String name, String formatParameter) {}
private record BatchExtractionResult(List<BookdropPatternExtractResult.FileExtractionResult> results,
int successCount, int failureCount) {}
}

View File

@@ -0,0 +1,341 @@
package com.adityachandel.booklore.service.bookdrop;
import com.adityachandel.booklore.model.dto.BookMetadata;
import com.adityachandel.booklore.model.dto.request.BookdropBulkEditRequest;
import com.adityachandel.booklore.model.dto.response.BookdropBulkEditResult;
import com.adityachandel.booklore.model.entity.BookdropFileEntity;
import com.adityachandel.booklore.repository.BookdropFileRepository;
import com.fasterxml.jackson.core.JsonProcessingException;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import java.util.*;
import static org.junit.jupiter.api.Assertions.*;
import static org.mockito.ArgumentMatchers.*;
import static org.mockito.Mockito.*;
@ExtendWith(MockitoExtension.class)
class BookdropBulkEditServiceTest {
@Mock
private BookdropFileRepository bookdropFileRepository;
@Mock
private BookdropMetadataHelper metadataHelper;
@InjectMocks
private BookdropBulkEditService bulkEditService;
@Captor
private ArgumentCaptor<List<BookdropFileEntity>> filesCaptor;
private BookdropFileEntity createFileEntity(Long id, String fileName, BookMetadata metadata) {
BookdropFileEntity entity = new BookdropFileEntity();
entity.setId(id);
entity.setFileName(fileName);
entity.setFilePath("/bookdrop/" + fileName);
return entity;
}
@BeforeEach
void setUp() {
when(metadataHelper.getCurrentMetadata(any())).thenReturn(new BookMetadata());
doNothing().when(metadataHelper).updateFetchedMetadata(any(), any());
}
@Test
void bulkEdit_WithSingleValueFields_ShouldUpdateTextAndNumericFields() {
BookMetadata existingMetadata = new BookMetadata();
existingMetadata.setSeriesName("Old Series");
BookdropFileEntity file1 = createFileEntity(1L, "file1.cbz", existingMetadata);
BookdropFileEntity file2 = createFileEntity(2L, "file2.cbz", existingMetadata);
when(metadataHelper.resolveFileIds(false, null, List.of(1L, 2L)))
.thenReturn(List.of(1L, 2L));
when(bookdropFileRepository.findAllById(anyList()))
.thenReturn(List.of(file1, file2));
BookMetadata updates = new BookMetadata();
updates.setSeriesName("New Series");
updates.setPublisher("Test Publisher");
updates.setLanguage("en");
updates.setSeriesTotal(100);
BookdropBulkEditRequest request = new BookdropBulkEditRequest();
request.setFields(updates);
request.setEnabledFields(Set.of("seriesName", "publisher", "language", "seriesTotal"));
request.setMergeArrays(false);
request.setSelectAll(false);
request.setSelectedIds(List.of(1L, 2L));
BookdropBulkEditResult result = bulkEditService.bulkEdit(request);
assertEquals(2, result.getTotalFiles());
assertEquals(2, result.getSuccessfullyUpdated());
assertEquals(0, result.getFailed());
verify(metadataHelper, times(2)).updateFetchedMetadata(any(), any());
verify(bookdropFileRepository, times(1)).saveAll(anyList());
}
@Test
void bulkEdit_WithArrayFieldsMergeMode_ShouldMergeArrays() {
BookMetadata existingMetadata = new BookMetadata();
existingMetadata.setAuthors(new LinkedHashSet<>(List.of("Author 1")));
existingMetadata.setCategories(new LinkedHashSet<>(List.of("Category 1")));
when(metadataHelper.getCurrentMetadata(any())).thenReturn(existingMetadata);
BookdropFileEntity file = createFileEntity(1L, "file.cbz", existingMetadata);
when(metadataHelper.resolveFileIds(false, null, List.of(1L)))
.thenReturn(List.of(1L));
when(bookdropFileRepository.findAllById(anyList()))
.thenReturn(List.of(file));
BookMetadata updates = new BookMetadata();
updates.setAuthors(new LinkedHashSet<>(List.of("Author 2")));
updates.setCategories(new LinkedHashSet<>(List.of("Category 2")));
BookdropBulkEditRequest request = new BookdropBulkEditRequest();
request.setFields(updates);
request.setEnabledFields(Set.of("authors", "categories"));
request.setMergeArrays(true);
request.setSelectAll(false);
request.setSelectedIds(List.of(1L));
BookdropBulkEditResult result = bulkEditService.bulkEdit(request);
assertEquals(1, result.getTotalFiles());
assertEquals(1, result.getSuccessfullyUpdated());
assertEquals(0, result.getFailed());
ArgumentCaptor<BookMetadata> metadataCaptor = ArgumentCaptor.forClass(BookMetadata.class);
verify(metadataHelper).updateFetchedMetadata(any(), metadataCaptor.capture());
BookMetadata captured = metadataCaptor.getValue();
assertTrue(captured.getAuthors().contains("Author 1"));
assertTrue(captured.getAuthors().contains("Author 2"));
assertTrue(captured.getCategories().contains("Category 1"));
assertTrue(captured.getCategories().contains("Category 2"));
}
@Test
void bulkEdit_WithArrayFieldsReplaceMode_ShouldReplaceArrays() {
BookMetadata existingMetadata = new BookMetadata();
existingMetadata.setAuthors(new LinkedHashSet<>(List.of("Author 1")));
when(metadataHelper.getCurrentMetadata(any())).thenReturn(existingMetadata);
BookdropFileEntity file = createFileEntity(1L, "file.cbz", existingMetadata);
when(metadataHelper.resolveFileIds(false, null, List.of(1L)))
.thenReturn(List.of(1L));
when(bookdropFileRepository.findAllById(anyList()))
.thenReturn(List.of(file));
BookMetadata updates = new BookMetadata();
updates.setAuthors(new LinkedHashSet<>(List.of("Author 2")));
BookdropBulkEditRequest request = new BookdropBulkEditRequest();
request.setFields(updates);
request.setEnabledFields(Set.of("authors"));
request.setMergeArrays(false);
request.setSelectAll(false);
request.setSelectedIds(List.of(1L));
bulkEditService.bulkEdit(request);
ArgumentCaptor<BookMetadata> metadataCaptor = ArgumentCaptor.forClass(BookMetadata.class);
verify(metadataHelper).updateFetchedMetadata(any(), metadataCaptor.capture());
BookMetadata captured = metadataCaptor.getValue();
assertFalse(captured.getAuthors().contains("Author 1"));
assertTrue(captured.getAuthors().contains("Author 2"));
assertEquals(1, captured.getAuthors().size());
}
@Test
void bulkEdit_WithDisabledFields_ShouldNotUpdateThoseFields() {
BookMetadata existingMetadata = new BookMetadata();
existingMetadata.setSeriesName("Original Series");
existingMetadata.setPublisher("Original Publisher");
when(metadataHelper.getCurrentMetadata(any())).thenReturn(existingMetadata);
BookdropFileEntity file = createFileEntity(1L, "file.cbz", existingMetadata);
when(metadataHelper.resolveFileIds(false, null, List.of(1L)))
.thenReturn(List.of(1L));
when(bookdropFileRepository.findAllById(anyList()))
.thenReturn(List.of(file));
BookMetadata updates = new BookMetadata();
updates.setSeriesName("New Series");
updates.setPublisher("New Publisher");
BookdropBulkEditRequest request = new BookdropBulkEditRequest();
request.setFields(updates);
request.setEnabledFields(Set.of("seriesName"));
request.setMergeArrays(false);
request.setSelectAll(false);
request.setSelectedIds(List.of(1L));
bulkEditService.bulkEdit(request);
ArgumentCaptor<BookMetadata> metadataCaptor = ArgumentCaptor.forClass(BookMetadata.class);
verify(metadataHelper).updateFetchedMetadata(any(), metadataCaptor.capture());
BookMetadata captured = metadataCaptor.getValue();
assertEquals("New Series", captured.getSeriesName());
assertEquals("Original Publisher", captured.getPublisher());
}
@Test
void bulkEdit_WithSelectAll_ShouldProcessAllFiles() {
BookdropFileEntity file1 = createFileEntity(1L, "file1.cbz", new BookMetadata());
BookdropFileEntity file2 = createFileEntity(2L, "file2.cbz", new BookMetadata());
BookdropFileEntity file3 = createFileEntity(3L, "file3.cbz", new BookMetadata());
when(metadataHelper.resolveFileIds(true, List.of(2L), null))
.thenReturn(List.of(1L, 3L));
when(bookdropFileRepository.findAllById(anyList()))
.thenReturn(List.of(file1, file3));
BookMetadata updates = new BookMetadata();
updates.setLanguage("en");
BookdropBulkEditRequest request = new BookdropBulkEditRequest();
request.setFields(updates);
request.setEnabledFields(Set.of("language"));
request.setMergeArrays(false);
request.setSelectAll(true);
request.setExcludedIds(List.of(2L));
BookdropBulkEditResult result = bulkEditService.bulkEdit(request);
assertEquals(2, result.getTotalFiles());
assertEquals(2, result.getSuccessfullyUpdated());
verify(metadataHelper, times(2)).updateFetchedMetadata(any(), any());
}
@Test
void bulkEdit_WithOneFileError_ShouldContinueWithOthers() {
BookdropFileEntity file1 = createFileEntity(1L, "file1.cbz", new BookMetadata());
BookdropFileEntity file2 = createFileEntity(2L, "file2.cbz", new BookMetadata());
BookdropFileEntity file3 = createFileEntity(3L, "file3.cbz", new BookMetadata());
when(metadataHelper.resolveFileIds(false, null, List.of(1L, 2L, 3L)))
.thenReturn(List.of(1L, 2L, 3L));
when(bookdropFileRepository.findAllById(anyList()))
.thenReturn(List.of(file1, file2, file3));
doThrow(new RuntimeException("JSON serialization error"))
.when(metadataHelper).updateFetchedMetadata(eq(file2), any());
BookMetadata updates = new BookMetadata();
updates.setLanguage("en");
BookdropBulkEditRequest request = new BookdropBulkEditRequest();
request.setFields(updates);
request.setEnabledFields(Set.of("language"));
request.setMergeArrays(false);
request.setSelectAll(false);
request.setSelectedIds(List.of(1L, 2L, 3L));
BookdropBulkEditResult result = bulkEditService.bulkEdit(request);
assertEquals(3, result.getTotalFiles());
assertEquals(2, result.getSuccessfullyUpdated());
assertEquals(1, result.getFailed());
verify(bookdropFileRepository).saveAll(filesCaptor.capture());
List<BookdropFileEntity> savedFiles = filesCaptor.getValue();
assertEquals(2, savedFiles.size());
assertTrue(savedFiles.stream().anyMatch(f -> f.getId().equals(1L)));
assertTrue(savedFiles.stream().anyMatch(f -> f.getId().equals(3L)));
assertFalse(savedFiles.stream().anyMatch(f -> f.getId().equals(2L)));
}
@Test
void bulkEdit_WithEmptyEnabledFields_ShouldNotUpdateAnything() {
BookdropFileEntity file = createFileEntity(1L, "file.cbz", new BookMetadata());
when(metadataHelper.resolveFileIds(false, null, List.of(1L)))
.thenReturn(List.of(1L));
when(bookdropFileRepository.findAllById(anyList()))
.thenReturn(List.of(file));
BookMetadata updates = new BookMetadata();
updates.setSeriesName("New Series");
BookdropBulkEditRequest request = new BookdropBulkEditRequest();
request.setFields(updates);
request.setEnabledFields(Collections.emptySet());
request.setMergeArrays(false);
request.setSelectAll(false);
request.setSelectedIds(List.of(1L));
BookdropBulkEditResult result = bulkEditService.bulkEdit(request);
assertEquals(1, result.getSuccessfullyUpdated());
ArgumentCaptor<BookMetadata> metadataCaptor = ArgumentCaptor.forClass(BookMetadata.class);
verify(metadataHelper).updateFetchedMetadata(any(), metadataCaptor.capture());
assertNull(metadataCaptor.getValue().getSeriesName());
}
@Test
void bulkEdit_WithLargeSelection_ShouldProcessInBatches() {
List<BookdropFileEntity> batch1 = new ArrayList<>();
List<BookdropFileEntity> batch2 = new ArrayList<>();
List<BookdropFileEntity> batch3 = new ArrayList<>();
List<Long> manyIds = new ArrayList<>();
for (long i = 1; i <= 1500; i++) {
manyIds.add(i);
BookdropFileEntity file = createFileEntity(i, "file" + i + ".cbz", new BookMetadata());
if (i <= 500) {
batch1.add(file);
} else if (i <= 1000) {
batch2.add(file);
} else {
batch3.add(file);
}
}
when(metadataHelper.resolveFileIds(false, null, manyIds))
.thenReturn(manyIds);
when(bookdropFileRepository.findAllById(anyList()))
.thenReturn(batch1, batch2, batch3);
BookMetadata updates = new BookMetadata();
updates.setLanguage("en");
BookdropBulkEditRequest request = new BookdropBulkEditRequest();
request.setFields(updates);
request.setEnabledFields(Set.of("language"));
request.setMergeArrays(false);
request.setSelectAll(false);
request.setSelectedIds(manyIds);
BookdropBulkEditResult result = bulkEditService.bulkEdit(request);
assertEquals(1500, result.getTotalFiles());
assertEquals(1500, result.getSuccessfullyUpdated());
assertEquals(0, result.getFailed());
verify(bookdropFileRepository, times(3)).findAllById(anyList());
verify(bookdropFileRepository, times(3)).saveAll(anyList());
}
}

View File

@@ -0,0 +1,644 @@
package com.adityachandel.booklore.service.bookdrop;
import com.adityachandel.booklore.model.dto.BookMetadata;
import com.adityachandel.booklore.model.dto.request.BookdropPatternExtractRequest;
import com.adityachandel.booklore.model.dto.response.BookdropPatternExtractResult;
import com.adityachandel.booklore.model.entity.BookdropFileEntity;
import com.adityachandel.booklore.repository.BookdropFileRepository;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import java.util.List;
import java.util.Set;
import static org.junit.jupiter.api.Assertions.*;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyList;
import static org.mockito.Mockito.*;
@ExtendWith(MockitoExtension.class)
class FilenamePatternExtractorTest {
@Mock
private BookdropFileRepository bookdropFileRepository;
@Mock
private BookdropMetadataHelper metadataHelper;
@InjectMocks
private FilenamePatternExtractor extractor;
private BookdropFileEntity createFileEntity(Long id, String fileName) {
BookdropFileEntity entity = new BookdropFileEntity();
entity.setId(id);
entity.setFileName(fileName);
entity.setFilePath("/bookdrop/" + fileName);
return entity;
}
@Test
void extractFromFilename_WithSeriesAndChapter_ShouldExtractBoth() {
String filename = "Chronicles of Earth - Ch 25.cbz";
String pattern = "{SeriesName} - Ch {SeriesNumber}";
BookMetadata result = extractor.extractFromFilename(filename, pattern);
assertNotNull(result);
assertEquals("Chronicles of Earth", result.getSeriesName());
assertEquals(25.0f, result.getSeriesNumber());
}
@Test
void extractFromFilename_WithVolumeAndIssuePattern_ShouldExtractCorrectly() {
String filename = "Chronicles of Earth Vol.3 (of 150).cbz";
String pattern = "{SeriesName} Vol.{SeriesNumber} (of {SeriesTotal})";
BookMetadata result = extractor.extractFromFilename(filename, pattern);
assertNotNull(result);
assertEquals("Chronicles of Earth", result.getSeriesName());
assertEquals(3.0f, result.getSeriesNumber());
assertEquals(150, result.getSeriesTotal());
}
@Test
void extractFromFilename_WithPublishedYearPattern_ShouldExtractYear() {
String filename = "Chronicles of Earth (2016) 001.cbz";
String pattern = "{SeriesName} ({Published:yyyy}) {SeriesNumber}";
BookMetadata result = extractor.extractFromFilename(filename, pattern);
assertNotNull(result);
assertEquals("Chronicles of Earth", result.getSeriesName());
assertEquals(2016, result.getPublishedDate().getYear());
assertEquals(1.0f, result.getSeriesNumber());
}
@Test
void extractFromFilename_WithAuthorAndTitle_ShouldExtractBoth() {
String filename = "John Smith - The Lost City.epub";
String pattern = "{Authors} - {Title}";
BookMetadata result = extractor.extractFromFilename(filename, pattern);
assertNotNull(result);
assertEquals(Set.of("John Smith"), result.getAuthors());
assertEquals("The Lost City", result.getTitle());
}
@Test
void extractFromFilename_WithMultipleAuthors_ShouldParseAll() {
String filename = "John Smith, Jane Doe - The Lost City.epub";
String pattern = "{Authors} - {Title}";
BookMetadata result = extractor.extractFromFilename(filename, pattern);
assertNotNull(result);
assertTrue(result.getAuthors().contains("John Smith"));
assertTrue(result.getAuthors().contains("Jane Doe"));
assertEquals("The Lost City", result.getTitle());
}
@Test
void extractFromFilename_WithDecimalSeriesNumber_ShouldParseCorrectly() {
String filename = "Chronicles of Earth - Ch 10.5.cbz";
String pattern = "{SeriesName} - Ch {SeriesNumber}";
BookMetadata result = extractor.extractFromFilename(filename, pattern);
assertNotNull(result);
assertEquals("Chronicles of Earth", result.getSeriesName());
assertEquals(10.5f, result.getSeriesNumber());
}
@Test
void extractFromFilename_WithNonMatchingPattern_ShouldReturnNull() {
String filename = "Random File Name.pdf";
String pattern = "{SeriesName} - Ch {SeriesNumber}";
BookMetadata result = extractor.extractFromFilename(filename, pattern);
assertNull(result);
}
@Test
void extractFromFilename_WithNullPattern_ShouldReturnNull() {
String filename = "Test File.pdf";
BookMetadata result = extractor.extractFromFilename(filename, null);
assertNull(result);
}
@Test
void extractFromFilename_WithEmptyPattern_ShouldReturnNull() {
String filename = "Test File.pdf";
BookMetadata result = extractor.extractFromFilename(filename, "");
assertNull(result);
}
@Test
void extractFromFilename_WithPublisherYearAndIssue_ShouldExtractAll() {
String filename = "Epic Press - Chronicles of Earth #001 (2011).cbz";
String pattern = "{Publisher} - {SeriesName} #{SeriesNumber} ({Published:yyyy})";
BookMetadata result = extractor.extractFromFilename(filename, pattern);
assertNotNull(result);
assertEquals("Epic Press", result.getPublisher());
assertEquals("Chronicles of Earth", result.getSeriesName());
assertEquals(1.0f, result.getSeriesNumber());
assertEquals(2011, result.getPublishedDate().getYear());
}
@Test
void extractFromFilename_WithLanguageTag_ShouldExtractLanguage() {
String filename = "Chronicles of Earth - Ch 500 [EN].cbz";
String pattern = "{SeriesName} - Ch {SeriesNumber} [{Language}]";
BookMetadata result = extractor.extractFromFilename(filename, pattern);
assertNotNull(result);
assertEquals("Chronicles of Earth", result.getSeriesName());
assertEquals(500.0f, result.getSeriesNumber());
assertEquals("EN", result.getLanguage());
}
@Test
void bulkExtract_WithPreviewMode_ShouldReturnExtractionResults() {
BookdropFileEntity file1 = createFileEntity(1L, "Chronicles A - Ch 1.cbz");
BookdropFileEntity file2 = createFileEntity(2L, "Chronicles B - Ch 2.cbz");
BookdropFileEntity file3 = createFileEntity(3L, "Random Name.cbz");
BookdropPatternExtractRequest request = new BookdropPatternExtractRequest();
request.setPattern("{SeriesName} - Ch {SeriesNumber}");
request.setSelectAll(false);
request.setSelectedIds(List.of(1L, 2L, 3L));
request.setPreview(true);
when(metadataHelper.resolveFileIds(false, null, List.of(1L, 2L, 3L)))
.thenReturn(List.of(1L, 2L, 3L));
when(bookdropFileRepository.findAllById(anyList())).thenReturn(List.of(file1, file2, file3));
BookdropPatternExtractResult result = extractor.bulkExtract(request);
assertNotNull(result);
assertEquals(3, result.getTotalFiles());
assertEquals(2, result.getSuccessfullyExtracted());
assertEquals(1, result.getFailed());
var successResults = result.getResults().stream()
.filter(BookdropPatternExtractResult.FileExtractionResult::isSuccess)
.toList();
assertEquals(2, successResults.size());
}
@Test
void bulkExtract_WithFullExtraction_ShouldProcessAndPersistAll() {
BookdropFileEntity file1 = createFileEntity(1L, "Chronicles A - Ch 1.cbz");
BookdropFileEntity file2 = createFileEntity(2L, "Chronicles B - Ch 2.cbz");
BookdropFileEntity file3 = createFileEntity(3L, "Random Name.cbz");
BookdropPatternExtractRequest request = new BookdropPatternExtractRequest();
request.setPattern("{SeriesName} - Ch {SeriesNumber}");
request.setSelectAll(false);
request.setSelectedIds(List.of(1L, 2L, 3L));
request.setPreview(false);
when(metadataHelper.resolveFileIds(false, null, List.of(1L, 2L, 3L)))
.thenReturn(List.of(1L, 2L, 3L));
when(bookdropFileRepository.findAllById(anyList())).thenReturn(List.of(file1, file2, file3));
when(metadataHelper.getCurrentMetadata(any())).thenReturn(new BookMetadata());
BookdropPatternExtractResult result = extractor.bulkExtract(request);
assertNotNull(result);
assertEquals(3, result.getTotalFiles());
assertEquals(2, result.getSuccessfullyExtracted());
assertEquals(1, result.getFailed());
// Verify metadata was updated for successful extractions (2 files matched pattern)
verify(metadataHelper, times(2)).updateFetchedMetadata(any(), any());
// Verify all files were saved (even the one that failed extraction keeps original metadata)
verify(bookdropFileRepository, times(1)).saveAll(anyList());
}
@Test
void extractFromFilename_WithSpecialCharacters_ShouldHandleCorrectly() {
String filename = "Chronicles (Special Edition) - Ch 5.cbz";
String pattern = "{SeriesName} - Ch {SeriesNumber}";
BookMetadata result = extractor.extractFromFilename(filename, pattern);
assertNotNull(result);
assertEquals("Chronicles (Special Edition)", result.getSeriesName());
assertEquals(5.0f, result.getSeriesNumber());
}
// ===== Greedy Matching Tests =====
@Test
void extractFromFilename_SeriesNameOnly_ShouldCaptureFullName() {
String filename = "Chronicles of Earth.cbz";
String pattern = "{SeriesName}";
BookMetadata result = extractor.extractFromFilename(filename, pattern);
assertNotNull(result);
assertEquals("Chronicles of Earth", result.getSeriesName());
}
@Test
void extractFromFilename_TitleOnly_ShouldCaptureFullTitle() {
String filename = "The Last Kingdom.epub";
String pattern = "{Title}";
BookMetadata result = extractor.extractFromFilename(filename, pattern);
assertNotNull(result);
assertEquals("The Last Kingdom", result.getTitle());
}
// ===== Complex Pattern Tests =====
@Test
void extractFromFilename_SeriesNumberAndTitle_ShouldExtractBoth() {
String filename = "Chronicles of Earth 01 - The Beginning.epub";
String pattern = "{SeriesName} {SeriesNumber} - {Title}";
BookMetadata result = extractor.extractFromFilename(filename, pattern);
assertNotNull(result);
assertEquals("Chronicles of Earth", result.getSeriesName());
assertEquals(1.0f, result.getSeriesNumber());
assertEquals("The Beginning", result.getTitle());
}
@Test
void extractFromFilename_AuthorSeriesTitleFormat_ShouldExtractAll() {
String filename = "Chronicles of Earth 07 - The Final Battle - John Smith.epub";
String pattern = "{SeriesName} {SeriesNumber} - {Title} - {Authors}";
BookMetadata result = extractor.extractFromFilename(filename, pattern);
assertNotNull(result);
assertEquals("Chronicles of Earth", result.getSeriesName());
assertEquals(7.0f, result.getSeriesNumber());
assertEquals("The Final Battle", result.getTitle());
assertEquals(Set.of("John Smith"), result.getAuthors());
}
@Test
void extractFromFilename_AuthorTitleYear_ShouldExtractAll() {
String filename = "John Smith - The Lost City (1949).epub";
String pattern = "{Authors} - {Title} ({Published:yyyy})";
BookMetadata result = extractor.extractFromFilename(filename, pattern);
assertNotNull(result);
assertEquals(Set.of("John Smith"), result.getAuthors());
assertEquals("The Lost City", result.getTitle());
assertEquals(1949, result.getPublishedDate().getYear());
}
@Test
void extractFromFilename_AuthorWithCommas_ShouldParseProperly() {
String filename = "Smith, John R. - The Lost City.epub";
String pattern = "{Authors} - {Title}";
BookMetadata result = extractor.extractFromFilename(filename, pattern);
assertNotNull(result);
assertEquals(Set.of("Smith", "John R."), result.getAuthors());
assertEquals("The Lost City", result.getTitle());
}
@Test
void extractFromFilename_PartNumberFormat_ShouldExtractCorrectly() {
String filename = "Chronicles of Earth - Part 2 - Rising Darkness.epub";
String pattern = "{SeriesName} - Part {SeriesNumber} - {Title}";
BookMetadata result = extractor.extractFromFilename(filename, pattern);
assertNotNull(result);
assertEquals("Chronicles of Earth", result.getSeriesName());
assertEquals(2.0f, result.getSeriesNumber());
assertEquals("Rising Darkness", result.getTitle());
}
@Test
void extractFromFilename_PublisherBracketFormat_ShouldExtractCorrectly() {
String filename = "[Epic Press] Chronicles of Earth Vol.5 [5 of 20].epub";
String pattern = "[{Publisher}] {SeriesName} Vol.{SeriesNumber} [* of {SeriesTotal}]";
BookMetadata result = extractor.extractFromFilename(filename, pattern);
assertNotNull(result);
assertEquals("Epic Press", result.getPublisher());
assertEquals("Chronicles of Earth", result.getSeriesName());
assertEquals(5.0f, result.getSeriesNumber());
assertEquals(20, result.getSeriesTotal());
}
@Test
void extractFromFilename_CalibreStyleFormat_ShouldExtractCorrectly() {
String filename = "Chronicles of Earth 01 The Beginning - John Smith.epub";
String pattern = "{SeriesName} {SeriesNumber} {Title} - {Authors}";
BookMetadata result = extractor.extractFromFilename(filename, pattern);
assertNotNull(result);
assertEquals("Chronicles of Earth", result.getSeriesName());
assertEquals(1.0f, result.getSeriesNumber());
assertEquals("The Beginning", result.getTitle());
assertEquals(Set.of("John Smith"), result.getAuthors());
}
// ===== New Placeholder Tests =====
@Test
void extractFromFilename_WithSubtitle_ShouldExtractBoth() {
String filename = "The Lost City - A Tale of Adventure.epub";
String pattern = "{Title} - {Subtitle}";
BookMetadata result = extractor.extractFromFilename(filename, pattern);
assertNotNull(result);
assertEquals("The Lost City", result.getTitle());
assertEquals("A Tale of Adventure", result.getSubtitle());
}
@Test
void extractFromFilename_WithISBN13_ShouldExtractISBN13() {
String filename = "The Lost City [1234567890123].epub";
String pattern = "{Title} [{ISBN13}]";
BookMetadata result = extractor.extractFromFilename(filename, pattern);
assertNotNull(result);
assertEquals("The Lost City", result.getTitle());
assertEquals("1234567890123", result.getIsbn13());
}
@Test
void extractFromFilename_WithISBN10_ShouldExtractCorrectly() {
String filename = "Chronicles of Tomorrow - 0553293354.epub";
String pattern = "{Title} - {ISBN10}";
BookMetadata result = extractor.extractFromFilename(filename, pattern);
assertNotNull(result);
assertEquals("Chronicles of Tomorrow", result.getTitle());
assertEquals("0553293354", result.getIsbn10());
}
@Test
void extractFromFilename_WithISBN10EndingInX_ShouldExtractCorrectly() {
String filename = "Test Book - 043942089X.epub";
String pattern = "{Title} - {ISBN10}";
BookMetadata result = extractor.extractFromFilename(filename, pattern);
assertNotNull(result);
assertEquals("Test Book", result.getTitle());
assertEquals("043942089X", result.getIsbn10());
}
@Test
void extractFromFilename_WithASIN_ShouldExtractCorrectly() {
String filename = "Chronicles of Earth - B001234567.epub";
String pattern = "{Title} - {ASIN}";
BookMetadata result = extractor.extractFromFilename(filename, pattern);
assertNotNull(result);
assertEquals("Chronicles of Earth", result.getTitle());
assertEquals("B001234567", result.getAsin());
}
// ===== Published Date Format Tests =====
@Test
void extractFromFilename_WithPublishedDateYYYYMMDD_ShouldExtractCorrectly() {
String filename = "The Lost City - 1925-04-10.epub";
String pattern = "{Title} - {Published:yyyy-MM-dd}";
BookMetadata result = extractor.extractFromFilename(filename, pattern);
assertNotNull(result);
assertEquals("The Lost City", result.getTitle());
assertEquals(1925, result.getPublishedDate().getYear());
assertEquals(4, result.getPublishedDate().getMonthValue());
assertEquals(10, result.getPublishedDate().getDayOfMonth());
}
@Test
void extractFromFilename_WithPublishedDateCompact_ShouldExtractCorrectly() {
String filename = "Chronicles of Tomorrow_19650801.epub";
String pattern = "{Title}_{Published:yyyyMMdd}";
BookMetadata result = extractor.extractFromFilename(filename, pattern);
assertNotNull(result);
assertEquals("Chronicles of Tomorrow", result.getTitle());
assertEquals(1965, result.getPublishedDate().getYear());
assertEquals(8, result.getPublishedDate().getMonthValue());
assertEquals(1, result.getPublishedDate().getDayOfMonth());
}
@Test
void extractFromFilename_WithPublishedDateDots_ShouldExtractCorrectly() {
String filename = "Chronicles of Tomorrow (1951.05.01).epub";
String pattern = "{Title} ({Published:yyyy.MM.dd})";
BookMetadata result = extractor.extractFromFilename(filename, pattern);
assertNotNull(result);
assertEquals("Chronicles of Tomorrow", result.getTitle());
assertEquals(1951, result.getPublishedDate().getYear());
assertEquals(5, result.getPublishedDate().getMonthValue());
assertEquals(1, result.getPublishedDate().getDayOfMonth());
}
@Test
void extractFromFilename_WithPublishedDateDashes_ShouldExtractCorrectly() {
String filename = "Chronicles of Earth [05-15-2020].epub";
String pattern = "{Title} [{Published:MM-dd-yyyy}]";
BookMetadata result = extractor.extractFromFilename(filename, pattern);
assertNotNull(result);
assertEquals("Chronicles of Earth", result.getTitle());
assertEquals(2020, result.getPublishedDate().getYear());
assertEquals(5, result.getPublishedDate().getMonthValue());
assertEquals(15, result.getPublishedDate().getDayOfMonth());
}
@Test
void extractFromFilename_WithPublishedDateSingleDigits_ShouldExtractCorrectly() {
String filename = "Chronicles of Earth - 2023-1-5.epub";
String pattern = "{Title} - {Published:yyyy-M-d}";
BookMetadata result = extractor.extractFromFilename(filename, pattern);
assertNotNull(result);
assertEquals("Chronicles of Earth", result.getTitle());
assertEquals(2023, result.getPublishedDate().getYear());
assertEquals(1, result.getPublishedDate().getMonthValue());
assertEquals(5, result.getPublishedDate().getDayOfMonth());
}
@Test
void extractFromFilename_ComplexPatternWithMultiplePlaceholders_ShouldExtractAll() {
String filename = "Chronicles of Earth - The Beginning [1234567890123] - 2020-05-15.epub";
String pattern = "{SeriesName} - {Title} [{ISBN13}] - {Published:yyyy-MM-dd}";
BookMetadata result = extractor.extractFromFilename(filename, pattern);
assertNotNull(result);
assertEquals("Chronicles of Earth", result.getSeriesName());
assertEquals("The Beginning", result.getTitle());
assertEquals("1234567890123", result.getIsbn13());
assertEquals(2020, result.getPublishedDate().getYear());
assertEquals(5, result.getPublishedDate().getMonthValue());
assertEquals(15, result.getPublishedDate().getDayOfMonth());
}
@Test
void extractFromFilename_PublishedWithoutFormat_AutoDetectsISODate() {
String filename = "The Lost City (2023-05-15).epub";
String pattern = "{Title} ({Published})";
BookMetadata result = extractor.extractFromFilename(filename, pattern);
assertNotNull(result);
assertEquals("The Lost City", result.getTitle());
assertEquals(2023, result.getPublishedDate().getYear());
assertEquals(5, result.getPublishedDate().getMonthValue());
assertEquals(15, result.getPublishedDate().getDayOfMonth());
}
@Test
void extractFromFilename_PublishedWithoutFormat_AutoDetectsCompactDate() {
String filename = "The Beginning [20231225].epub";
String pattern = "{Title} [{Published}]";
BookMetadata result = extractor.extractFromFilename(filename, pattern);
assertNotNull(result);
assertEquals("The Beginning", result.getTitle());
assertEquals(2023, result.getPublishedDate().getYear());
assertEquals(12, result.getPublishedDate().getMonthValue());
assertEquals(25, result.getPublishedDate().getDayOfMonth());
}
@Test
void extractFromFilename_PublishedWithoutFormat_AutoDetectsYear() {
String filename = "The Lost City (2023).epub";
String pattern = "{Title} ({Published})";
BookMetadata result = extractor.extractFromFilename(filename, pattern);
assertNotNull(result);
assertEquals("The Lost City", result.getTitle());
assertEquals(2023, result.getPublishedDate().getYear());
assertEquals(1, result.getPublishedDate().getMonthValue());
assertEquals(1, result.getPublishedDate().getDayOfMonth());
}
@Test
void extractFromFilename_PublishedWithoutFormat_AutoDetectsTwoDigitYear() {
String filename = "Chronicles of Tomorrow (99).epub";
String pattern = "{Title} ({Published})";
BookMetadata result = extractor.extractFromFilename(filename, pattern);
assertNotNull(result);
assertEquals("Chronicles of Tomorrow", result.getTitle());
assertEquals(1999, result.getPublishedDate().getYear());
}
@Test
void extractFromFilename_PublishedWithoutFormat_AutoDetectsFlexibleFormat() {
String filename = "Tomorrow (15|05|2023).epub";
String pattern = "{Title} ({Published})";
BookMetadata result = extractor.extractFromFilename(filename, pattern);
assertNotNull(result);
assertEquals("Tomorrow", result.getTitle());
assertEquals(2023, result.getPublishedDate().getYear());
assertEquals(5, result.getPublishedDate().getMonthValue());
assertEquals(15, result.getPublishedDate().getDayOfMonth());
}
@Test
void extractFromFilename_WildcardBeforePlaceholder_SkipsUnwantedText() {
String filename = "[Extra] Chronicles of Earth - Ch 42.cbz";
String pattern = "[*] {SeriesName} - Ch {SeriesNumber}";
BookMetadata result = extractor.extractFromFilename(filename, pattern);
assertNotNull(result);
assertEquals("Chronicles of Earth", result.getSeriesName());
assertEquals(42.0f, result.getSeriesNumber());
}
@Test
void extractFromFilename_WildcardBetweenPlaceholders_SkipsMiddleText() {
String filename = "The Lost City (extra) John Smith.epub";
String pattern = "{Title} (*) {Authors}";
BookMetadata result = extractor.extractFromFilename(filename, pattern);
assertNotNull(result);
assertEquals("The Lost City", result.getTitle());
assertEquals(Set.of("John Smith"), result.getAuthors());
}
@Test
void extractFromFilename_WildcardAtEnd_SkipsTrailingText() {
String filename = "Chronicles of Earth v1 - extra.cbz";
String pattern = "{SeriesName} v{SeriesNumber} - *";
BookMetadata result = extractor.extractFromFilename(filename, pattern);
assertNotNull(result);
assertEquals("Chronicles of Earth", result.getSeriesName());
assertEquals(1.0f, result.getSeriesNumber());
}
@Test
void extractFromFilename_WildcardAtEnd_AllowsPartialMatch() {
String filename = "Chronicles of Earth - Chapter 20.cbz";
String pattern = "{SeriesName} - * {SeriesNumber}";
BookMetadata result = extractor.extractFromFilename(filename, pattern);
assertNotNull(result);
assertEquals("Chronicles of Earth", result.getSeriesName());
assertEquals(20.0f, result.getSeriesNumber());
}
@Test
void extractFromFilename_WildcardWithVariousPlacements_HandlesCorrectly() {
String filename1 = "Chronicles of Tomorrow - Chapter 8.1 (2025).cbz";
String pattern1 = "{SeriesName} - * {SeriesNumber}";
BookMetadata result1 = extractor.extractFromFilename(filename1, pattern1);
assertNotNull(result1);
assertEquals("Chronicles of Tomorrow", result1.getSeriesName());
assertEquals(8.1f, result1.getSeriesNumber());
String filename2 = "Junk - Chapter 20.cbz";
String pattern2 = "* - Chapter {SeriesNumber}";
BookMetadata result2 = extractor.extractFromFilename(filename2, pattern2);
assertNotNull(result2);
assertEquals(20.0f, result2.getSeriesNumber());
}
}

View File

@@ -0,0 +1,118 @@
<div class="bulk-edit-container">
<div class="info-banner">
<i class="pi pi-info-circle"></i>
<span>Select which fields to apply to <strong>{{ fileCount }}</strong> selected file(s). Only checked fields will be updated.</span>
</div>
<p-divider></p-divider>
<div class="fields-section">
<h4>Text Fields</h4>
<div class="field-grid">
@for (field of textFields; track field.name) {
<div class="field-row">
<p-checkbox
[binary]="true"
[ngModel]="isFieldEnabled(field.name)"
(ngModelChange)="toggleField(field.name)"
[ariaLabel]="'Enable ' + field.label">
</p-checkbox>
<label class="field-label" [attr.for]="field.controlName">{{ field.label }}</label>
<input
pInputText
[formControl]="$any(bulkEditForm.get(field.controlName))"
[attr.id]="field.controlName"
[attr.aria-label]="field.label"
class="field-input"/>
</div>
}
</div>
</div>
<p-divider></p-divider>
<div class="fields-section">
<h4>Number Fields</h4>
<div class="field-grid">
@for (field of numberFields; track field.name) {
<div class="field-row">
<p-checkbox
[binary]="true"
[ngModel]="isFieldEnabled(field.name)"
(ngModelChange)="toggleField(field.name)"
[ariaLabel]="'Enable ' + field.label">
</p-checkbox>
<label class="field-label" [attr.for]="field.controlName">{{ field.label }}</label>
<input
pInputText
type="number"
[formControl]="$any(bulkEditForm.get(field.controlName))"
[attr.id]="field.controlName"
[attr.aria-label]="field.label"
class="field-input field-input-small"/>
</div>
}
</div>
</div>
<p-divider></p-divider>
<div class="fields-section">
<div class="section-header">
<h4>Array Fields</h4>
<div class="merge-toggle">
<span class="merge-label" id="merge-mode-label">Mode:</span>
<p-selectButton
[options]="mergeOptions"
[(ngModel)]="mergeArrays"
optionLabel="label"
optionValue="value"
ariaLabelledBy="merge-mode-label">
</p-selectButton>
</div>
</div>
<p class="helper-text"><i class="pi pi-info-circle"></i> Type and press Enter to add each item.</p>
<div class="field-grid">
@for (field of chipFields; track field.name) {
<div class="field-row">
<p-checkbox
[binary]="true"
[ngModel]="isFieldEnabled(field.name)"
(ngModelChange)="toggleField(field.name)"
[ariaLabel]="'Enable ' + field.label">
</p-checkbox>
<label class="field-label" [attr.for]="field.controlName">{{ field.label }}</label>
<p-autoComplete
[formControl]="$any(bulkEditForm.get(field.controlName))"
[multiple]="true"
[suggestions]="[]"
[forceSelection]="false"
[typeahead]="false"
[dropdown]="false"
[ariaLabel]="field.label"
(onBlur)="onAutoCompleteBlur(field.name, $event)"
class="field-input">
</p-autoComplete>
</div>
}
</div>
</div>
<p-divider></p-divider>
<div class="dialog-footer">
<p-button
label="Cancel"
icon="pi pi-times"
severity="secondary"
(click)="cancel()">
</p-button>
<p-button
label="Apply to Selected"
icon="pi pi-check"
severity="success"
[disabled]="!hasEnabledFields"
(click)="apply()">
</p-button>
</div>
</div>

View File

@@ -0,0 +1,100 @@
.bulk-edit-container {
display: flex;
flex-direction: column;
gap: 1rem;
padding: 1rem;
.helper-text {
font-size: 0.875rem;
color: var(--text-color-secondary);
display: flex;
align-items: center;
gap: 0.5rem;
margin: 0;
padding: 0.5rem 0;
i {
font-size: 1rem;
}
}
}
.info-banner {
display: flex;
align-items: center;
gap: 0.75rem;
padding: 0.75rem 1rem;
background: rgba(59, 130, 246, 0.08);
border: 1px solid var(--p-primary-color);
border-radius: 6px;
color: var(--p-text-color);
i {
font-size: 1.25rem;
color: var(--p-primary-color);
}
}
.fields-section {
display: flex;
flex-direction: column;
gap: 0.75rem;
h4 {
margin: 0;
font-size: 0.9rem;
font-weight: 600;
color: var(--p-text-secondary-color);
}
}
.section-header {
display: flex;
justify-content: space-between;
align-items: center;
}
.merge-toggle {
display: flex;
align-items: center;
gap: 0.5rem;
.merge-label {
font-size: 0.85rem;
color: var(--p-text-secondary-color);
}
}
.field-grid {
display: flex;
flex-direction: column;
gap: 0.5rem;
}
.field-row {
display: grid;
grid-template-columns: auto 120px 1fr;
gap: 0.75rem;
align-items: center;
}
.field-label {
font-size: 0.9rem;
font-weight: 500;
color: var(--p-text-color);
}
.field-input {
width: 100%;
}
.field-input-small {
max-width: 120px;
}
.dialog-footer {
display: flex;
justify-content: flex-end;
gap: 0.75rem;
padding-top: 0.5rem;
}

View File

@@ -0,0 +1,167 @@
import {Component, inject, OnInit, ChangeDetectorRef} from '@angular/core';
import {FormControl, FormGroup, FormsModule, ReactiveFormsModule} from '@angular/forms';
import {DynamicDialogConfig, DynamicDialogRef} from 'primeng/dynamicdialog';
import {Button} from 'primeng/button';
import {Checkbox} from 'primeng/checkbox';
import {InputText} from 'primeng/inputtext';
import {AutoComplete} from 'primeng/autocomplete';
import {Divider} from 'primeng/divider';
import {SelectButton} from 'primeng/selectbutton';
import {BookMetadata} from '../../../book/model/book.model';
export interface BulkEditResult {
fields: Partial<BookMetadata>;
enabledFields: Set<string>;
mergeArrays: boolean;
}
interface BulkEditField {
name: string;
label: string;
type: 'text' | 'chips' | 'number';
controlName: string;
}
@Component({
selector: 'app-bookdrop-bulk-edit-dialog',
standalone: true,
imports: [
ReactiveFormsModule,
FormsModule,
Button,
Checkbox,
InputText,
AutoComplete,
Divider,
SelectButton,
],
templateUrl: './bookdrop-bulk-edit-dialog.component.html',
styleUrl: './bookdrop-bulk-edit-dialog.component.scss'
})
export class BookdropBulkEditDialogComponent implements OnInit {
private readonly dialogRef = inject(DynamicDialogRef);
private readonly config = inject(DynamicDialogConfig);
private readonly cdr = inject(ChangeDetectorRef);
fileCount: number = 0;
mergeArrays = true;
enabledFields = new Set<string>();
bulkEditForm = new FormGroup({
seriesName: new FormControl(''),
seriesTotal: new FormControl<number | null>(null),
authors: new FormControl<string[]>([]),
publisher: new FormControl(''),
language: new FormControl(''),
categories: new FormControl<string[]>([]),
moods: new FormControl<string[]>([]),
tags: new FormControl<string[]>([]),
});
textFields: BulkEditField[] = [
{name: 'seriesName', label: 'Series Name', type: 'text', controlName: 'seriesName'},
{name: 'publisher', label: 'Publisher', type: 'text', controlName: 'publisher'},
{name: 'language', label: 'Language', type: 'text', controlName: 'language'},
];
numberFields: BulkEditField[] = [
{name: 'seriesTotal', label: 'Series Total', type: 'number', controlName: 'seriesTotal'},
];
chipFields: BulkEditField[] = [
{name: 'authors', label: 'Authors', type: 'chips', controlName: 'authors'},
{name: 'categories', label: 'Genres', type: 'chips', controlName: 'categories'},
{name: 'moods', label: 'Moods', type: 'chips', controlName: 'moods'},
{name: 'tags', label: 'Tags', type: 'chips', controlName: 'tags'},
];
mergeOptions = [
{label: 'Merge', value: true},
{label: 'Replace', value: false},
];
ngOnInit(): void {
this.fileCount = this.config.data?.fileCount ?? 0;
this.setupFormValueChangeListeners();
}
private setupFormValueChangeListeners(): void {
Object.keys(this.bulkEditForm.controls).forEach(fieldName => {
const control = this.bulkEditForm.get(fieldName);
control?.valueChanges.subscribe(value => {
const hasValue = Array.isArray(value) ? value.length > 0 : (value !== null && value !== '' && value !== undefined);
if (hasValue && !this.enabledFields.has(fieldName)) {
this.enabledFields.add(fieldName);
this.cdr.detectChanges();
}
});
});
}
onAutoCompleteBlur(fieldName: string, event: Event): void {
const target = event.target as HTMLInputElement;
const inputValue = target?.value?.trim();
if (inputValue) {
const control = this.bulkEditForm.get(fieldName);
const currentValue = (control?.value as string[]) || [];
if (!currentValue.includes(inputValue)) {
control?.setValue([...currentValue, inputValue]);
}
if (target) {
target.value = '';
}
}
if (!this.enabledFields.has(fieldName)) {
const control = this.bulkEditForm.get(fieldName);
const value = control?.value;
if (Array.isArray(value) && value.length > 0) {
this.enabledFields.add(fieldName);
this.cdr.detectChanges();
}
}
}
toggleField(fieldName: string): void {
if (this.enabledFields.has(fieldName)) {
this.enabledFields.delete(fieldName);
} else {
this.enabledFields.add(fieldName);
}
}
isFieldEnabled(fieldName: string): boolean {
return this.enabledFields.has(fieldName);
}
get hasEnabledFields(): boolean {
return this.enabledFields.size > 0;
}
cancel(): void {
this.dialogRef.close(null);
}
apply(): void {
const formValue = this.bulkEditForm.value;
const fields: Partial<BookMetadata> = {};
this.enabledFields.forEach(fieldName => {
const value = formValue[fieldName as keyof typeof formValue];
if (value !== undefined && value !== null) {
(fields as Record<string, unknown>)[fieldName] = value;
}
});
const result: BulkEditResult = {
fields,
enabledFields: new Set(this.enabledFields),
mergeArrays: this.mergeArrays,
};
this.dialogRef.close(result);
}
}

View File

@@ -153,8 +153,9 @@ export class BookdropFileMetadataPickerComponent {
}
}
onAutoCompleteBlur(fieldName: string, event: any) {
const inputValue = event.target.value?.trim();
onAutoCompleteBlur(fieldName: string, event: Event): void {
const target = event.target as HTMLInputElement;
const inputValue = target?.value?.trim();
if (inputValue) {
const currentValue = this.metadataForm.get(fieldName)?.value || [];
const values = Array.isArray(currentValue) ? currentValue :
@@ -163,7 +164,9 @@ export class BookdropFileMetadataPickerComponent {
values.push(inputValue);
this.metadataForm.get(fieldName)?.setValue(values);
}
event.target.value = '';
if (target) {
target.value = '';
}
}
}

View File

@@ -66,6 +66,28 @@
pTooltip="Replace current metadata with fetched metadata on all files"
tooltipPosition="top">
</p-button>
<p-button
size="small"
outlined
severity="help"
label="Bulk&nbsp;Edit"
icon="pi pi-pencil"
[disabled]="!hasSelectedFiles"
(click)="openBulkEditDialog()"
pTooltip="Edit metadata fields in bulk for selected files"
tooltipPosition="top">
</p-button>
<p-button
size="small"
outlined
severity="warn"
label="Extract&nbsp;Pattern"
icon="pi pi-sliders-h"
[disabled]="!hasSelectedFiles"
(click)="openPatternExtractDialog()"
pTooltip="Extract metadata from filenames using a pattern"
tooltipPosition="top">
</p-button>
<span pTooltip="Include book covers when importing fetched metadata"><p-checkbox
inputId="includecovers"
[binary]="true"
@@ -123,7 +145,6 @@
<div class="file-item">
<div class="file-row">
<p-checkbox
[binary]="true"
[(ngModel)]="file.selected"

View File

@@ -3,7 +3,7 @@ import {takeUntilDestroyed} from '@angular/core/rxjs-interop';
import {filter, startWith, take, tap} from 'rxjs/operators';
import {PageTitleService} from "../../../../shared/service/page-title.service";
import {BookdropFile, BookdropFinalizePayload, BookdropFinalizeResult, BookdropService} from '../../service/bookdrop.service';
import {BookdropFile, BookdropFinalizePayload, BookdropFinalizeResult, BookdropService, FileExtractionResult, BulkEditRequest as BackendBulkEditRequest, BulkEditResult as BackendBulkEditResult} from '../../service/bookdrop.service';
import {LibraryService} from '../../../book/service/library.service';
import {Library} from '../../../book/model/library.model';
@@ -25,6 +25,9 @@ import {NgClass} from '@angular/common';
import {Paginator} from 'primeng/paginator';
import {ActivatedRoute} from '@angular/router';
import {BookdropFileMetadataPickerComponent} from '../bookdrop-file-metadata-picker/bookdrop-file-metadata-picker.component';
import {BookdropFinalizeResultDialogComponent} from '../bookdrop-finalize-result-dialog/bookdrop-finalize-result-dialog.component';
import {BookdropBulkEditDialogComponent, BulkEditResult} from '../bookdrop-bulk-edit-dialog/bookdrop-bulk-edit-dialog.component';
import {BookdropPatternExtractDialogComponent} from '../bookdrop-pattern-extract-dialog/bookdrop-pattern-extract-dialog.component';
import {DialogLauncherService} from '../../../../shared/services/dialog-launcher.service';
export interface BookdropFileUI {
@@ -381,7 +384,7 @@ export class BookdropFileReviewComponent implements OnInit {
icon: 'pi pi-exclamation-triangle',
acceptButtonStyleClass: 'p-button-danger',
accept: () => {
const payload: any = {
const payload: { selectAll: boolean; excludedIds?: number[]; selectedIds?: number[] } = {
selectAll: this.selectAllAcrossPages,
};
@@ -583,4 +586,180 @@ export class BookdropFileReviewComponent implements OnInit {
}
});
}
openBulkEditDialog(): void {
const selectedFiles = this.getSelectedFiles();
const totalCount = this.selectAllAcrossPages
? this.totalRecords - this.excludedFiles.size
: selectedFiles.length;
if (totalCount === 0) {
this.messageService.add({
severity: 'warn',
summary: 'No files selected',
detail: 'Please select files to bulk edit.',
});
return;
}
const dialogRef = this.dialogLauncherService.openDialog(BookdropBulkEditDialogComponent, {
header: `Bulk Edit ${totalCount} Files`,
width: '600px',
modal: true,
closable: true,
data: {fileCount: totalCount},
});
dialogRef?.onClose.subscribe((result: BulkEditResult | null) => {
if (result) {
this.applyBulkMetadataViaBackend(result);
}
});
}
openPatternExtractDialog(): void {
const selectedFiles = this.getSelectedFiles();
const totalCount = this.selectAllAcrossPages
? this.totalRecords - this.excludedFiles.size
: selectedFiles.length;
if (totalCount === 0) {
this.messageService.add({
severity: 'warn',
summary: 'No files selected',
detail: 'Please select files to extract metadata from.',
});
return;
}
const sampleFiles = selectedFiles.slice(0, 5).map(f => f.file.fileName);
const selectedIds = selectedFiles.map(f => f.file.id);
const dialogRef = this.dialogLauncherService.openDialog(BookdropPatternExtractDialogComponent, {
header: 'Extract Metadata from Filenames',
width: '700px',
modal: true,
closable: true,
data: {
sampleFiles,
fileCount: totalCount,
selectAll: this.selectAllAcrossPages,
excludedIds: Array.from(this.excludedFiles),
selectedIds,
},
});
dialogRef?.onClose.subscribe((result: { results: FileExtractionResult[] } | null) => {
if (result?.results) {
this.applyExtractedMetadata(result.results);
}
});
}
private getSelectedFiles(): BookdropFileUI[] {
return Object.values(this.fileUiCache).filter(file => {
if (this.selectAllAcrossPages) {
return !this.excludedFiles.has(file.file.id);
}
return file.selected;
});
}
private applyBulkMetadataViaBackend(result: BulkEditResult): void {
const selectedFiles = this.getSelectedFiles();
const selectedIds = selectedFiles.map(f => f.file.id);
this.applyBulkMetadataToUI(result, selectedFiles);
const enabledFieldsArray = Array.from(result.enabledFields);
const payload: BackendBulkEditRequest = {
fields: result.fields,
enabledFields: enabledFieldsArray,
mergeArrays: result.mergeArrays,
selectAll: this.selectAllAcrossPages,
excludedIds: this.selectAllAcrossPages ? Array.from(this.excludedFiles) : undefined,
selectedIds: !this.selectAllAcrossPages ? selectedIds : undefined,
};
this.bookdropService.bulkEditMetadata(payload).subscribe({
next: (backendResult: BackendBulkEditResult) => {
this.messageService.add({
severity: 'success',
summary: 'Bulk Edit Applied',
detail: `Updated metadata for ${backendResult.successfullyUpdated} of ${backendResult.totalFiles} file(s).`,
});
},
error: (err) => {
console.error('Error applying bulk edit:', err);
this.messageService.add({
severity: 'error',
summary: 'Bulk Edit Failed',
detail: 'An error occurred while applying bulk edits.',
});
},
});
}
private applyBulkMetadataToUI(result: BulkEditResult, selectedFiles: BookdropFileUI[]): void {
selectedFiles.forEach(fileUi => {
result.enabledFields.forEach(fieldName => {
const value = result.fields[fieldName as keyof BookMetadata];
if (value === undefined || value === null) {
return;
}
if (Array.isArray(value) && value.length === 0) {
return;
}
const control = fileUi.metadataForm.get(fieldName);
if (!control) {
return;
}
if (result.mergeArrays && Array.isArray(value)) {
const currentValue = control.value || [];
const merged = [...new Set([...currentValue, ...value])];
control.setValue(merged);
} else {
control.setValue(value);
}
});
});
}
private applyExtractedMetadata(results: FileExtractionResult[]): void {
let appliedCount = 0;
results.forEach(result => {
if (!result.success || !result.extractedMetadata) {
return;
}
const fileUi = this.fileUiCache[result.fileId];
if (!fileUi) {
return;
}
Object.entries(result.extractedMetadata).forEach(([key, value]) => {
if (value === null || value === undefined) {
return;
}
const control = fileUi.metadataForm.get(key);
if (control) {
control.setValue(value);
}
});
appliedCount++;
});
this.messageService.add({
severity: 'success',
summary: 'Pattern Extraction Applied',
detail: `Applied extracted metadata to ${appliedCount} file(s).`,
});
}
}

View File

@@ -4,13 +4,13 @@ import {BookdropFinalizeResult} from '../../service/bookdrop.service';
import {DynamicDialogConfig, DynamicDialogRef} from "primeng/dynamicdialog";
@Component({
selector: 'app-bookdrop-finalize-result-dialog-component',
selector: 'app-bookdrop-finalize-result-dialog',
imports: [
NgClass,
DatePipe
],
templateUrl: './bookdrop-finalize-result-dialog-component.html',
styleUrl: './bookdrop-finalize-result-dialog-component.scss'
templateUrl: './bookdrop-finalize-result-dialog.component.html',
styleUrl: './bookdrop-finalize-result-dialog.component.scss'
})
export class BookdropFinalizeResultDialogComponent implements OnDestroy {

View File

@@ -0,0 +1,123 @@
<div class="pattern-extract-container">
<div class="info-banner">
<i class="pi pi-info-circle"></i>
<span>
Enter a pattern to extract metadata from filenames of <strong>{{ fileCount }}</strong> selected file(s).
Use placeholders like <code>{{ '{' }}SeriesName{{ '}' }}</code> to capture values.
</span>
</div>
<p-divider></p-divider>
<div class="pattern-section">
<h4>Pattern</h4>
<div class="pattern-input-row">
<input
#patternInput
pInputText
[formControl]="$any(patternForm.get('pattern'))"
[placeholder]="patternPlaceholderText"
id="pattern-input"
aria-label="Filename pattern for metadata extraction"
class="pattern-input"
(input)="previewPattern()"/>
<p-button
icon="pi pi-eye"
label="Preview"
severity="secondary"
[disabled]="!hasValidPattern"
[ariaLabel]="'Preview pattern extraction on sample files'"
(click)="previewPattern()">
</p-button>
</div>
</div>
<div class="placeholders-section">
<h4>Available Placeholders</h4>
<div class="placeholder-chips">
@for (placeholder of availablePlaceholders; track placeholder.name) {
<p-chip
[label]="getPlaceholderLabel(placeholder.name)"
[pTooltip]="getPlaceholderTooltip(placeholder)"
tooltipPosition="top"
styleClass="placeholder-chip"
(click)="insertPlaceholder(placeholder.name)">
</p-chip>
}
</div>
</div>
<div class="common-patterns-section">
<h4>Common Patterns</h4>
<div class="common-pattern-buttons">
@for (commonPattern of commonPatterns; track commonPattern.pattern) {
<p-button
[label]="commonPattern.label"
severity="secondary"
size="small"
outlined
(click)="applyCommonPattern(commonPattern.pattern)">
</p-button>
}
</div>
</div>
<p-divider></p-divider>
@if (previewResults.length > 0) {
<div class="preview-section">
<h4>Preview (Sample Files)</h4>
<div class="preview-list">
@for (preview of previewResults; track preview.fileName) {
<div class="preview-item" [ngClass]="getPreviewClass(preview)">
<div class="preview-filename">
<i class="pi"
[ngClass]="getPreviewIconClass(preview)"
[pTooltip]="getErrorTooltip(preview)"
tooltipPosition="top"
[tooltipOptions]="{showDelay: 300}"></i>
<span>{{ preview.fileName }}</span>
</div>
@if (preview.success) {
<div class="preview-extracted">
@for (entry of getPreviewEntries(preview); track entry.key) {
<div class="extracted-field">
<span class="field-name">{{ entry.key }}:</span>
<span class="field-value">{{ entry.value }}</span>
</div>
}
</div>
} @else {
<div class="preview-error">{{ getErrorMessage(preview) }}</div>
}
</div>
}
</div>
</div>
<p-divider></p-divider>
}
<div class="dialog-footer">
@if (isExtracting) {
<div class="extracting-indicator">
<p-progressSpinner strokeWidth="4" [style]="spinnerStyle"></p-progressSpinner>
<span>Extracting metadata...</span>
</div>
}
<p-button
label="Cancel"
icon="pi pi-times"
severity="secondary"
[disabled]="isExtracting"
(click)="cancel()">
</p-button>
<p-button
label="Extract and Apply"
icon="pi pi-check"
severity="success"
[disabled]="!hasValidPattern || isExtracting"
(click)="extract()">
</p-button>
</div>
</div>

View File

@@ -0,0 +1,171 @@
.pattern-extract-container {
display: flex;
flex-direction: column;
gap: 1rem;
padding: 0.5rem;
max-height: 70vh;
overflow-y: auto;
}
.info-banner {
display: flex;
align-items: flex-start;
gap: 0.75rem;
padding: 0.75rem 1rem;
background: rgba(59, 130, 246, 0.08);
border: 1px solid var(--p-primary-color);
border-radius: 6px;
color: var(--p-text-color);
i {
font-size: 1.25rem;
color: var(--p-primary-color);
margin-top: 2px;
}
code {
background: rgba(255, 255, 255, 0.1);
padding: 0.1rem 0.3rem;
border-radius: 4px;
font-family: monospace;
}
}
.pattern-section,
.placeholders-section,
.common-patterns-section,
.preview-section {
display: flex;
flex-direction: column;
gap: 0.5rem;
h4 {
margin: 0;
font-size: 0.9rem;
font-weight: 600;
color: var(--p-text-secondary-color);
}
}
.pattern-input-row {
display: flex;
gap: 0.5rem;
align-items: center;
}
.pattern-input {
flex: 1;
font-family: monospace;
}
.placeholder-chips {
display: flex;
flex-wrap: wrap;
gap: 0.5rem;
}
:host ::ng-deep .placeholder-chip {
cursor: pointer;
font-family: monospace;
font-size: 0.85rem;
&:hover {
background-color: var(--p-primary-color);
color: var(--p-primary-contrast-color);
}
}
.common-pattern-buttons {
display: flex;
flex-wrap: wrap;
gap: 0.5rem;
}
.preview-list {
display: flex;
flex-direction: column;
gap: 0.5rem;
max-height: 200px;
overflow-y: auto;
}
.preview-item {
padding: 0.75rem;
border-radius: 6px;
border: 1px solid var(--p-surface-300);
&.preview-success {
background-color: rgba(76, 175, 80, 0.1);
border-color: rgba(76, 175, 80, 0.3);
}
&.preview-failure {
background-color: rgba(244, 67, 54, 0.1);
border-color: rgba(244, 67, 54, 0.3);
}
}
.preview-filename {
display: flex;
align-items: center;
gap: 0.5rem;
font-weight: 500;
margin-bottom: 0.5rem;
i {
font-size: 1rem;
}
.pi-check-circle {
color: #4caf50;
}
.pi-times-circle {
color: #f44336;
}
}
.preview-extracted {
display: flex;
flex-wrap: wrap;
gap: 0.5rem 1rem;
padding-left: 1.5rem;
}
.extracted-field {
display: flex;
gap: 0.25rem;
font-size: 0.85rem;
.field-name {
color: var(--p-text-secondary-color);
}
.field-value {
font-weight: 500;
color: var(--p-primary-color);
}
}
.preview-error {
padding-left: 1.5rem;
font-size: 0.85rem;
color: var(--p-text-secondary-color);
font-style: italic;
}
.dialog-footer {
display: flex;
justify-content: flex-end;
align-items: center;
gap: 0.75rem;
padding-top: 0.5rem;
}
.extracting-indicator {
display: flex;
align-items: center;
gap: 0.5rem;
margin-right: auto;
color: var(--p-text-secondary-color);
}

View File

@@ -0,0 +1,277 @@
import {Component, ElementRef, inject, OnInit, ViewChild} from '@angular/core';
import {FormControl, FormGroup, ReactiveFormsModule, Validators} from '@angular/forms';
import {DynamicDialogConfig, DynamicDialogRef} from 'primeng/dynamicdialog';
import {Button} from 'primeng/button';
import {InputText} from 'primeng/inputtext';
import {Divider} from 'primeng/divider';
import {Chip} from 'primeng/chip';
import {ProgressSpinner} from 'primeng/progressspinner';
import {BookdropService, PatternExtractResult} from '../../service/bookdrop.service';
import {MessageService} from 'primeng/api';
import {NgClass} from '@angular/common';
import {Tooltip} from 'primeng/tooltip';
interface PatternPlaceholder {
name: string;
description: string;
example: string;
}
interface PreviewResult {
fileName: string;
success: boolean;
preview: Record<string, string>;
errorMessage?: string;
}
@Component({
selector: 'app-bookdrop-pattern-extract-dialog',
standalone: true,
imports: [
ReactiveFormsModule,
Button,
InputText,
Divider,
Chip,
ProgressSpinner,
NgClass,
Tooltip,
],
templateUrl: './bookdrop-pattern-extract-dialog.component.html',
styleUrl: './bookdrop-pattern-extract-dialog.component.scss'
})
export class BookdropPatternExtractDialogComponent implements OnInit {
private readonly dialogRef = inject(DynamicDialogRef);
private readonly config = inject(DynamicDialogConfig);
private readonly bookdropService = inject(BookdropService);
private readonly messageService = inject(MessageService);
@ViewChild('patternInput', {static: false}) patternInput?: ElementRef<HTMLInputElement>;
fileCount = 0;
selectAll = false;
excludedIds: number[] = [];
selectedIds: number[] = [];
isExtracting = false;
previewResults: PreviewResult[] = [];
patternPlaceholderText = 'e.g., {SeriesName} - Ch {SeriesNumber}';
spinnerStyle = {width: '24px', height: '24px'};
patternForm = new FormGroup({
pattern: new FormControl('', Validators.required),
});
availablePlaceholders: PatternPlaceholder[] = [
{name: '*', description: 'Wildcard - skips any text (not a metadata field)', example: 'anything'},
{name: 'SeriesName', description: 'Series or comic name', example: 'Chronicles of Earth'},
{name: 'Title', description: 'Book title', example: 'The Lost City'},
{name: 'Subtitle', description: 'Book subtitle', example: 'A Tale of Adventure'},
{name: 'Authors', description: 'Author name(s)', example: 'John Smith'},
{name: 'SeriesNumber', description: 'Book number in series', example: '25'},
{name: 'Published', description: 'Full date with format', example: '{Published:yyyy-MM-dd}'},
{name: 'Publisher', description: 'Publisher name', example: 'Epic Press'},
{name: 'Language', description: 'Language code', example: 'en'},
{name: 'SeriesTotal', description: 'Total books in series', example: '50'},
{name: 'ISBN10', description: 'ISBN-10 identifier', example: '1234567890'},
{name: 'ISBN13', description: 'ISBN-13 identifier', example: '1234567890123'},
{name: 'ASIN', description: 'Amazon ASIN', example: 'B012345678'},
];
commonPatterns = [
{label: 'Author - Title', pattern: '{Authors} - {Title}'},
{label: 'Title - Author', pattern: '{Title} - {Authors}'},
{label: 'Title (Year)', pattern: '{Title} ({Published:yyyy})'},
{label: 'Author - Title (Year)', pattern: '{Authors} - {Title} ({Published:yyyy})'},
{label: 'Series #Number', pattern: '{SeriesName} #{SeriesNumber}'},
{label: 'Series - Chapter Number', pattern: '{SeriesName} - Chapter {SeriesNumber}'},
{label: 'Series - Vol Number', pattern: '{SeriesName} - Vol {SeriesNumber}'},
{label: '[Tag] Series - Chapter Number', pattern: '[*] {SeriesName} - Chapter {SeriesNumber}'},
{label: 'Title by Author', pattern: '{Title} by {Authors}'},
{label: 'Series vX (of Total)', pattern: '{SeriesName} v{SeriesNumber} (of {SeriesTotal})'},
];
ngOnInit(): void {
this.fileCount = this.config.data?.fileCount ?? 0;
this.selectAll = this.config.data?.selectAll ?? false;
this.excludedIds = this.config.data?.excludedIds ?? [];
this.selectedIds = this.config.data?.selectedIds ?? [];
}
insertPlaceholder(placeholderName: string): void {
const patternControl = this.patternForm.get('pattern');
const currentPattern = patternControl?.value ?? '';
const inputElement = this.patternInput?.nativeElement;
const textToInsert = placeholderName === '*' ? '*' : `{${placeholderName}}`;
const patternToModify = placeholderName === '*'
? currentPattern
: this.removeExistingPlaceholder(currentPattern, placeholderName);
if (inputElement) {
const cursorPosition = this.calculateCursorPosition(inputElement, currentPattern, patternToModify);
const newPattern = this.insertTextAtCursor(patternToModify, textToInsert, cursorPosition);
patternControl?.setValue(newPattern);
this.focusInputAfterInsertion(inputElement, cursorPosition, textToInsert.length);
} else {
patternControl?.setValue(patternToModify + textToInsert);
}
this.previewPattern();
}
private removeExistingPlaceholder(pattern: string, placeholderName: string): string {
const existingPlaceholderRegex = new RegExp(`\\{${placeholderName}(?::[^}]*)?\\}`, 'g');
return pattern.replace(existingPlaceholderRegex, '');
}
private calculateCursorPosition(inputElement: HTMLInputElement, originalPattern: string, modifiedPattern: string): number {
let cursorPosition = inputElement.selectionStart ?? modifiedPattern.length;
if (originalPattern !== modifiedPattern) {
const existingPlaceholderRegex = new RegExp(`\\{\\w+(?::[^}]*)?\\}`, 'g');
const matchBefore = originalPattern.substring(0, cursorPosition).match(existingPlaceholderRegex);
if (matchBefore) {
cursorPosition -= matchBefore.reduce((sum, match) => sum + match.length, 0);
}
cursorPosition = Math.max(0, cursorPosition);
}
return cursorPosition;
}
private insertTextAtCursor(pattern: string, text: string, cursorPosition: number): string {
const textBefore = pattern.substring(0, cursorPosition);
const textAfter = pattern.substring(cursorPosition);
return textBefore + text + textAfter;
}
private focusInputAfterInsertion(inputElement: HTMLInputElement, cursorPosition: number, insertedTextLength: number): void {
setTimeout(() => {
const newCursorPosition = cursorPosition + insertedTextLength;
inputElement.setSelectionRange(newCursorPosition, newCursorPosition);
inputElement.focus();
}, 0);
}
applyCommonPattern(pattern: string): void {
this.patternForm.get('pattern')?.setValue(pattern);
this.previewPattern();
}
previewPattern(): void {
const pattern = this.patternForm.get('pattern')?.value;
if (!pattern) {
this.previewResults = [];
return;
}
const request = {
pattern,
selectAll: this.selectAll,
excludedIds: this.excludedIds,
selectedIds: this.selectedIds,
preview: true
};
this.bookdropService.extractFromPattern(request).subscribe({
next: (result) => {
this.previewResults = result.results.map(r => ({
fileName: r.fileName,
success: r.success,
preview: r.extractedMetadata || {},
errorMessage: r.errorMessage
}));
},
error: () => {
this.previewResults = [];
}
});
}
cancel(): void {
this.dialogRef.close(null);
}
extract(): void {
const pattern = this.patternForm.get('pattern')?.value;
if (!pattern) {
return;
}
this.isExtracting = true;
const payload = {
pattern,
selectAll: this.selectAll,
excludedIds: this.excludedIds,
selectedIds: this.selectedIds,
preview: false,
};
this.bookdropService.extractFromPattern(payload).subscribe({
next: (result: PatternExtractResult) => {
this.isExtracting = false;
this.messageService.add({
severity: 'success',
summary: 'Extraction Complete',
detail: `Successfully extracted metadata from ${result.successfullyExtracted} of ${result.totalFiles} files.`,
});
this.dialogRef.close(result);
},
error: (err) => {
this.isExtracting = false;
console.error('Pattern extraction failed:', err);
this.messageService.add({
severity: 'error',
summary: 'Extraction Failed',
detail: 'An error occurred during pattern extraction.',
});
},
});
}
get hasValidPattern(): boolean {
const pattern: string = this.patternForm.get('pattern')?.value ?? '';
if (!this.patternForm.valid || !pattern) {
return false;
}
const placeholderRegex = /\{[a-zA-Z0-9_]+(?::[^{}]+)?\}|\*/;
return placeholderRegex.test(pattern);
}
getPlaceholderLabel(name: string): string {
return name === '*' ? '*' : `{${name}}`;
}
getPlaceholderTooltip(placeholder: PatternPlaceholder): string {
return `${placeholder.description} (e.g., ${placeholder.example})`;
}
getPreviewClass(preview: PreviewResult): Record<string, boolean> {
return {
'preview-success': preview.success,
'preview-failure': !preview.success
};
}
getPreviewIconClass(preview: PreviewResult): string {
return preview.success ? 'pi-check-circle' : 'pi-times-circle';
}
getPreviewEntries(preview: PreviewResult): Array<{key: string; value: string}> {
return Object.entries(preview.preview).map(([key, value]) => ({key, value}));
}
getErrorMessage(preview: PreviewResult): string {
return preview.errorMessage || 'Pattern did not match';
}
getErrorTooltip(preview: PreviewResult): string {
return preview.success ? '' : (preview.errorMessage || 'Pattern did not match filename structure');
}
}

View File

@@ -56,6 +56,44 @@ export interface Page<T> {
number: number;
}
export interface PatternExtractRequest {
pattern: string;
selectAll?: boolean;
excludedIds?: number[];
selectedIds?: number[];
preview?: boolean;
}
export interface FileExtractionResult {
fileId: number;
fileName: string;
success: boolean;
extractedMetadata?: BookMetadata;
errorMessage?: string;
}
export interface PatternExtractResult {
totalFiles: number;
successfullyExtracted: number;
failed: number;
results: FileExtractionResult[];
}
export interface BulkEditRequest {
fields: Partial<BookMetadata>;
enabledFields: string[];
mergeArrays: boolean;
selectAll?: boolean;
excludedIds?: number[];
selectedIds?: number[];
}
export interface BulkEditResult {
totalFiles: number;
successfullyUpdated: number;
failed: number;
}
@Injectable({providedIn: 'root'})
export class BookdropService {
private readonly url = `${API_CONFIG.BASE_URL}/api/v1/bookdrop`;
@@ -76,4 +114,12 @@ export class BookdropService {
rescan(): Observable<void> {
return this.http.post<void>(`${this.url}/rescan`, {});
}
extractFromPattern(payload: PatternExtractRequest): Observable<PatternExtractResult> {
return this.http.post<PatternExtractResult>(`${this.url}/files/extract-pattern`, payload);
}
bulkEditMetadata(payload: BulkEditRequest): Observable<BulkEditResult> {
return this.http.post<BulkEditResult>(`${this.url}/files/bulk-edit`, payload);
}
}

View File

@@ -11,7 +11,7 @@ import {CreateUserDialogComponent} from '../../features/settings/user-management
import {CreateEmailRecipientDialogComponent} from '../../features/settings/email-v2/create-email-recipient-dialog/create-email-recipient-dialog.component';
import {CreateEmailProviderDialogComponent} from '../../features/settings/email-v2/create-email-provider-dialog/create-email-provider-dialog.component';
import {DirectoryPickerComponent} from '../components/directory-picker/directory-picker.component';
import {BookdropFinalizeResultDialogComponent} from '../../features/bookdrop/component/bookdrop-finalize-result-dialog/bookdrop-finalize-result-dialog-component';
import {BookdropFinalizeResultDialogComponent} from '../../features/bookdrop/component/bookdrop-finalize-result-dialog/bookdrop-finalize-result-dialog.component';
import {BookdropFinalizeResult} from '../../features/bookdrop/service/bookdrop.service';
import {MetadataReviewDialogComponent} from '../../features/metadata/component/metadata-review-dialog/metadata-review-dialog-component';
import {MetadataRefreshType} from '../../features/metadata/model/request/metadata-refresh-type.enum';