Make sure that we can cancel worker pools if something goes wrong and an exception is thrown

This commit is contained in:
Reinhard Pointner 2016-04-08 22:59:41 +00:00
parent e3be1e1bad
commit 1a4c66d977
5 changed files with 99 additions and 74 deletions

View File

@ -107,7 +107,6 @@ public final class WebServices {
}
public static final ExecutorService requestThreadPool = Executors.newCachedThreadPool();
public static final ExecutorService workerThreadPool = Executors.newWorkStealingPool(getPreferredThreadPoolSize());
public static class TheTVDBClientWithLocalSearch extends TheTVDBClient {

View File

@ -27,6 +27,8 @@ import java.util.Objects;
import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.logging.Level;
import java.util.regex.Pattern;
import java.util.stream.Stream;
@ -120,6 +122,8 @@ public class AutoDetection {
Map<Group, Set<File>> groups = new TreeMap<Group, Set<File>>();
// can't use parallel stream because default fork/join pool doesn't play well with the security manager
ExecutorService workerThreadPool = Executors.newWorkStealingPool();
try {
stream(files).collect(toMap(f -> f, f -> workerThreadPool.submit(() -> detectGroup(f)))).forEach((file, group) -> {
try {
groups.computeIfAbsent(group.get(), k -> new TreeSet<File>()).add(file);
@ -127,6 +131,9 @@ public class AutoDetection {
debug.log(Level.SEVERE, e.getMessage(), e);
}
});
} finally {
workerThreadPool.shutdownNow();
}
return groups;
}

View File

@ -15,6 +15,8 @@ import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.logging.Level;
import java.util.stream.Stream;
@ -37,6 +39,8 @@ class AutoDetectMatcher implements AutoCompleteMatcher {
Map<Group, Set<File>> groups = new AutoDetection(files, false, locale).group();
// can't use parallel stream because default fork/join pool doesn't play well with the security manager
ExecutorService workerThreadPool = Executors.newWorkStealingPool();
try {
Map<Group, Future<List<Match<File, ?>>>> matches = groups.entrySet().stream().collect(toMap(Entry::getKey, it -> {
return workerThreadPool.submit(() -> match(it.getKey(), it.getValue(), strict, order, locale, autodetection, parent));
}));
@ -50,6 +54,9 @@ class AutoDetectMatcher implements AutoCompleteMatcher {
}
return Stream.empty();
}).collect(toList());
} finally {
workerThreadPool.shutdownNow();
}
}
private List<Match<File, ?>> match(Group group, Collection<File> files, boolean strict, SortOrder order, Locale locale, boolean autodetection, Component parent) throws Exception {

View File

@ -24,6 +24,8 @@ import java.util.Set;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.concurrent.CancellationException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.FutureTask;
import java.util.concurrent.RunnableFuture;
@ -51,7 +53,7 @@ class EpisodeListMatcher implements AutoCompleteMatcher {
private boolean anime;
// only allow one fetch session at a time so later requests can make use of cached results
private final Object providerLock = new Object();
private Object providerLock = new Object();
public EpisodeListMatcher(EpisodeListProvider provider, boolean anime) {
this.provider = provider;
@ -141,7 +143,7 @@ class EpisodeListMatcher implements AutoCompleteMatcher {
return provider.getEpisodeList(selectedSearchResult, sortOrder, locale);
}
}
return new ArrayList<Episode>();
return (List<Episode>) EMPTY_LIST;
});
}).collect(toList());
@ -169,6 +171,11 @@ class EpisodeListMatcher implements AutoCompleteMatcher {
Map<String, SearchResult> selectionMemory = new TreeMap<String, SearchResult>(CommonSequenceMatcher.getLenientCollator(Locale.ENGLISH));
Map<String, List<String>> inputMemory = new TreeMap<String, List<String>>(CommonSequenceMatcher.getLenientCollator(Locale.ENGLISH));
// merge episode matches
List<Match<File, ?>> matches = new ArrayList<Match<File, ?>>();
ExecutorService workerThreadPool = Executors.newWorkStealingPool();
try {
// detect series names and create episode list fetch tasks
List<Future<List<Match<File, ?>>>> tasks = new ArrayList<Future<List<Match<File, ?>>>>();
@ -194,14 +201,15 @@ class EpisodeListMatcher implements AutoCompleteMatcher {
});
}
// merge episode matches
List<Match<File, ?>> matches = new ArrayList<Match<File, ?>>();
for (Future<List<Match<File, ?>>> future : tasks) {
// make sure each episode has unique object data
for (Match<File, ?> it : future.get()) {
matches.add(new Match<File, Episode>(it.getValue(), ((Episode) it.getCandidate()).clone()));
}
}
} finally {
workerThreadPool.shutdownNow();
}
// handle derived files
List<Match<File, ?>> derivateMatches = new ArrayList<Match<File, ?>>();

View File

@ -5,7 +5,6 @@ import static java.util.Comparator.*;
import static java.util.stream.Collectors.*;
import static net.filebot.Logging.*;
import static net.filebot.MediaTypes.*;
import static net.filebot.WebServices.*;
import static net.filebot.media.MediaDetection.*;
import static net.filebot.similarity.CommonSequenceMatcher.*;
import static net.filebot.similarity.Normalization.*;
@ -28,6 +27,8 @@ import java.util.SortedSet;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.concurrent.CancellationException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.FutureTask;
import java.util.concurrent.RunnableFuture;
@ -145,13 +146,15 @@ class MovieMatcher implements AutoCompleteMatcher {
movieMatchFiles.addAll(filter(orphanedFiles, SUBTITLE_FILES)); // run movie detection only on orphaned subtitle files
// match remaining movies file by file in parallel
ExecutorService workerThreadPool = Executors.newWorkStealingPool();
try {
List<Future<Map<File, List<Movie>>>> tasks = movieMatchFiles.stream().filter(f -> movieByFile.get(f) == null).map(f -> {
return workerThreadPool.submit(() -> {
if (strict) {
// in strict mode, only process movies that follow the name (year) pattern
List<Integer> year = parseMovieYear(getRelativePathTail(f, 3).getPath());
if (year.isEmpty() || isEpisode(f, true)) {
return null;
return (Map<File, List<Movie>>) EMPTY_MAP;
}
// allow only movie matches where the the movie year matches the year pattern in the filename
@ -169,7 +172,6 @@ class MovieMatcher implements AutoCompleteMatcher {
memory.put(MEMORY_SELECTION, new TreeMap<String, String>(getLenientCollator(locale)));
for (Future<Map<File, List<Movie>>> future : tasks) {
if (future.get() != null) {
for (Entry<File, List<Movie>> it : future.get().entrySet()) {
// auto-select movie or ask user
Movie movie = grabMovieName(it.getKey(), it.getValue(), strict, locale, autodetect, memory, parent);
@ -180,6 +182,8 @@ class MovieMatcher implements AutoCompleteMatcher {
}
}
}
} finally {
workerThreadPool.shutdownNow();
}
// map movies to (possibly multiple) files (in natural order)