* separate long-term caches that have different update frequencies

This commit is contained in:
Reinhard Pointner 2013-11-29 04:29:56 +00:00
parent e7668f2c5c
commit 198b8b0e06
5 changed files with 90 additions and 118 deletions

View File

@ -46,12 +46,26 @@
memoryStoreEvictionPolicy="LRU"
/>
<!--
Long-lived (2 months) persistent disk cache for web responses (that can be updated via If-Modified or If-None-Match)
-->
<cache name="web-datasource-lv3"
maxElementsInMemory="200"
maxElementsOnDisk="95000"
eternal="false"
timeToIdleSeconds="5256000"
timeToLiveSeconds="5256000"
overflowToDisk="true"
diskPersistent="true"
memoryStoreEvictionPolicy="LRU"
/>
<!--
Very long-lived cache (4 months) anime/series lists, movie index, etc
-->
<cache name="web-persistent-datasource"
maxElementsInMemory="200"
maxElementsOnDisk="95000"
maxElementsInMemory="50"
maxElementsOnDisk="5000"
eternal="false"
timeToIdleSeconds="10512000"
timeToLiveSeconds="10512000"

View File

@ -23,7 +23,7 @@ public class CachedXmlResource extends AbstractCachedResource<String, String> {
@Override
protected Cache getCache() {
return CacheManager.getInstance().getCache("web-persistent-datasource");
return CacheManager.getInstance().getCache("web-datasource-lv3");
}
public Document getDocument() throws IOException {

View File

@ -45,7 +45,7 @@ public abstract class ETagCachedResource<T extends Serializable> extends CachedR
@Override
protected Cache getCache() {
return CacheManager.getInstance().getCache("web-persistent-datasource");
return CacheManager.getInstance().getCache("web-datasource-lv3");
}
}

View File

@ -1,7 +1,5 @@
package net.sourceforge.filebot.web;
import static net.sourceforge.filebot.web.WebRequest.*;
import static net.sourceforge.tuned.XPathUtilities.*;
@ -25,37 +23,30 @@ import net.sourceforge.filebot.web.FanartTV.FanartDescriptor.FanartProperty;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
public class FanartTV {
private String apikey;
public FanartTV(String apikey) {
this.apikey = apikey;
}
public List<FanartDescriptor> getSeriesArtwork(int tvdbid) throws Exception {
return getSeriesArtwork(String.valueOf(tvdbid), "all", 1, 2);
}
public List<FanartDescriptor> getSeriesArtwork(String id, String type, int sort, int limit) throws Exception {
return getArtwork("series", id, type, sort, limit);
}
public List<FanartDescriptor> getMovieArtwork(int tmdbid) throws Exception {
return getMovieArtwork(String.valueOf(tmdbid), "all", 1, 2);
}
public List<FanartDescriptor> getMovieArtwork(String id, String type, int sort, int limit) throws Exception {
return getArtwork("movie", id, type, sort, limit);
}
public List<FanartDescriptor> getArtwork(String category, String id, String type, int sort, int limit) throws Exception {
String resource = getResource(category, id, "xml", type, sort, limit);
@ -83,23 +74,20 @@ public class FanartTV {
return fanart.toArray(new FanartDescriptor[0]);
}
@Override
protected Cache getCache() {
return CacheManager.getInstance().getCache("web-datasource");
return CacheManager.getInstance().getCache("web-datasource-lv2");
}
};
return Arrays.asList(data.get());
}
public String getResource(String category, String id, String format, String type, int sort, int limit) throws MalformedURLException {
// e.g. http://fanart.tv/webservice/series/780b986b22c35e6f7a134a2f392c2deb/70327/xml/all/1/2
return String.format("http://api.fanart.tv/webservice/%s/%s/%s/%s/%s/%s/%s", category, apikey, id, format, type, sort, limit);
}
public static class FanartDescriptor implements Serializable {
public static enum FanartProperty {
@ -108,32 +96,26 @@ public class FanartTV {
protected Map<FanartProperty, String> fields;
protected FanartDescriptor() {
// used by serializer
}
protected FanartDescriptor(Map<FanartProperty, String> fields) {
this.fields = new EnumMap<FanartProperty, String>(fields);
}
public String get(Object key) {
return fields.get(FanartProperty.valueOf(key.toString()));
}
public String get(FanartProperty key) {
return fields.get(key);
}
public String getType() {
return fields.get(FanartProperty.type);
}
public Integer getId() {
try {
return new Integer(fields.get(FanartProperty.id));
@ -142,12 +124,10 @@ public class FanartTV {
}
}
public String getName() {
return new File(getUrl().getFile()).getName();
}
public URL getUrl() {
try {
return new URL(fields.get(FanartProperty.url).replaceAll(" ", "%20")); // work around server-side url encoding issues
@ -156,7 +136,6 @@ public class FanartTV {
}
}
public Integer getLikes() {
try {
return new Integer(fields.get(FanartProperty.likes));
@ -165,7 +144,6 @@ public class FanartTV {
}
}
public Locale getLanguage() {
try {
return new Locale(fields.get(FanartProperty.lang));
@ -174,7 +152,6 @@ public class FanartTV {
}
}
public Integer getSeason() {
try {
return new Integer(fields.get(FanartProperty.season));
@ -183,12 +160,10 @@ public class FanartTV {
}
}
public String getDiskType() {
return fields.get(FanartProperty.disc_type);
}
@Override
public String toString() {
return fields.toString();

View File

@ -1,7 +1,5 @@
package net.sourceforge.filebot.web;
import static net.sourceforge.filebot.web.WebRequest.*;
import static net.sourceforge.tuned.XPathUtilities.*;
@ -38,24 +36,20 @@ import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.xml.sax.SAXException;
public class IMDbClient implements MovieIdentificationService {
private String host = "www.imdb.com";
@Override
public String getName() {
return "IMDb";
}
@Override
public Icon getIcon() {
return ResourceManager.getIcon("search.imdb");
}
protected int getImdbId(String link) {
Matcher matcher = Pattern.compile("tt(\\d{7})").matcher(link);
@ -67,7 +61,6 @@ public class IMDbClient implements MovieIdentificationService {
throw new IllegalArgumentException(String.format("Cannot find imdb id: %s", link));
}
@Override
public List<Movie> searchMovie(String query, Locale locale) throws Exception {
Document dom = parsePage(new URL("http", host, "/find?s=tt&q=" + encode(query, false)));
@ -107,7 +100,6 @@ public class IMDbClient implements MovieIdentificationService {
return results;
}
protected Movie scrapeMovie(Document dom, Locale locale) {
try {
int imdbid = getImdbId(selectString("//LINK[@rel='canonical']/@href", dom));
@ -124,7 +116,6 @@ public class IMDbClient implements MovieIdentificationService {
}
}
@Override
public Movie getMovieDescriptor(int imdbid, Locale locale) throws Exception {
try {
@ -134,7 +125,6 @@ public class IMDbClient implements MovieIdentificationService {
}
}
protected Document parsePage(URL url) throws IOException, SAXException {
CachedPage page = new CachedPage(url) {
@ -155,28 +145,23 @@ public class IMDbClient implements MovieIdentificationService {
return getHtmlDocument(page.get());
}
public String scrape(String imdbid, String xpath) throws IOException, SAXException {
return scrape(getMoviePageLink(getImdbId(imdbid)).toURL(), xpath); // helper for scraping data in user scripts
}
public String scrape(URL url, String xpath) throws IOException, SAXException {
return selectString(xpath, parsePage(url)); // helper for scraping data in user scripts
}
public URI getMoviePageLink(int imdbId) {
return URI.create(String.format("http://www.imdb.com/title/tt%07d/", imdbId));
}
@Override
public Map<File, Movie> getMovieDescriptors(Collection<File> movieFiles, Locale locale) throws Exception {
throw new UnsupportedOperationException();
}
@SuppressWarnings({ "unchecked", "rawtypes" })
public Map<String, String> getImdbApiData(Integer i, String t, String y, boolean tomatoes) throws IOException {
// e.g. http://www.imdbapi.com/?i=tt0379786&r=xml&tomatoes=true
@ -193,17 +178,15 @@ public class IMDbClient implements MovieIdentificationService {
return attr;
}
@Override
protected Cache getCache() {
return CacheManager.getInstance().getCache("web-datasource");
return CacheManager.getInstance().getCache("web-datasource-lv2");
}
};
return data.get();
}
public MovieInfo getImdbApiMovieInfo(Movie movie) throws IOException {
Map<String, String> data = movie.getImdbId() > 0 ? getImdbApiData(movie.getImdbId(), "", "", false) : getImdbApiData(null, movie.getName(), String.valueOf(movie.getYear()), false);