diff --git a/htroot/yacysearchitem.java b/htroot/yacysearchitem.java index d0ba27a06..9e7a2af7e 100644 --- a/htroot/yacysearchitem.java +++ b/htroot/yacysearchitem.java @@ -376,13 +376,13 @@ public class yacysearchitem { * is null */ private static String processFaviconURL(final boolean authenticated, DigestURL faviconURL) { - final String iconUrlExt = MultiProtocolURL.getFileExtension(faviconURL.getFileName()); - /* Image format ouput for ViewFavicon servlet : default is png, except with gif and svg icons */ - final String viewFaviconExt = !iconUrlExt.isEmpty() && ImageViewer.isBrowserRendered(iconUrlExt) ? iconUrlExt : "png"; - /* Only use licence code for non authentified users. For authenticated users licence would never be released and would unnecessarily fill URLLicense.permissions. */ StringBuilder contentFaviconURL = new StringBuilder(); if (faviconURL != null) { + final String iconUrlExt = MultiProtocolURL.getFileExtension(faviconURL.getFileName()); + /* Image format ouput for ViewFavicon servlet : default is png, except with gif and svg icons */ + final String viewFaviconExt = !iconUrlExt.isEmpty() && ImageViewer.isBrowserRendered(iconUrlExt) ? iconUrlExt : "png"; + contentFaviconURL.append("ViewFavicon.").append(viewFaviconExt).append("?maxwidth=16&maxheight=16&isStatic=true&quadratic"); if (authenticated) { contentFaviconURL.append("&url=").append(faviconURL.toNormalform(true)); diff --git a/source/net/yacy/crawler/CrawlStacker.java b/source/net/yacy/crawler/CrawlStacker.java index efc205516..2b15d8c0f 100644 --- a/source/net/yacy/crawler/CrawlStacker.java +++ b/source/net/yacy/crawler/CrawlStacker.java @@ -172,6 +172,7 @@ public final class CrawlStacker { * @param hyperlinks crawl starting points links to stack * @param replace Specify whether old indexed entries should be replaced * @param timezoneOffset local time-zone offset + * @throws IllegalCrawlProfileException when the crawl profile is not active */ public void enqueueEntries( final byte[] initiator, @@ -189,8 +190,9 @@ public final class CrawlStacker { } else { error = "Rejected " + hyperlinks.size() + " crawl entries. Reason : LOST STACKER PROFILE HANDLE '" + profileHandle + "'"; } - CrawlStacker.log.info(error); // this is NOT an error but a normal effect when terminating a crawl queue - return; + CrawlStacker.log.info(error); // this is NOT an error but a normal behavior when terminating a crawl queue + /* Throw an exception to signal caller it can stop stacking URLs using this crawl profile */ + throw new IllegalCrawlProfileException("Profile " + profileHandle + " is no more active"); } if (replace) { // delete old entries, if exists to force a re-load of the url (thats wanted here) diff --git a/source/net/yacy/crawler/FileCrawlStarterTask.java b/source/net/yacy/crawler/FileCrawlStarterTask.java index c3aabc991..f4a636af8 100644 --- a/source/net/yacy/crawler/FileCrawlStarterTask.java +++ b/source/net/yacy/crawler/FileCrawlStarterTask.java @@ -156,9 +156,17 @@ public class FileCrawlStarterTask extends Thread { writer.close(); } catch (IOException e) { log.severe("Error parsing the crawlingFile " + this.crawlingFile.getAbsolutePath(), e); - } catch (Throwable t) { - /* Other errors are likely to occur when the crawl is interrupted : still log this at warning level to avoid polluting regular error log level */ - log.warn("Error parsing the crawlingFile " + this.crawlingFile.getAbsolutePath(), t); + } catch (IllegalCrawlProfileException e) { + /* We should get here when the crawl is stopped manually before termination */ + log.info("Parsing crawlingFile " + this.crawlingFile.getAbsolutePath() + " terminated. Crawl profile " + + this.profile.handle() + " is no more active."); + } catch (Exception e) { + /* + * Other errors are likely to occur when the crawl is interrupted : + * still log this at warning level to avoid polluting regular error + * log level + */ + log.warn("Error parsing the crawlingFile " + this.crawlingFile.getAbsolutePath(), e); } finally { if (inStream != null) { try { diff --git a/source/net/yacy/crawler/IllegalCrawlProfileException.java b/source/net/yacy/crawler/IllegalCrawlProfileException.java new file mode 100644 index 000000000..5db4af293 --- /dev/null +++ b/source/net/yacy/crawler/IllegalCrawlProfileException.java @@ -0,0 +1,50 @@ +// IllegalCrawlProfileException.java +// Copyright 2016 by luccioman; https://github.com/luccioman +// +// This is a part of YaCy, a peer-to-peer based web search engine +// +// LICENSE +// +// This program is free software; you can redistribute it and/or modify +// it under the terms of the GNU General Public License as published by +// the Free Software Foundation; either version 2 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU General Public License for more details. +// +// You should have received a copy of the GNU General Public License +// along with this program; if not, write to the Free Software +// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + +package net.yacy.crawler; + +import net.yacy.crawler.data.CrawlProfile; + +/** + * Exception used to signal that an operation is trying to use an inactive or deleted {@link CrawlProfile}. + * @author luccioman + * + */ +public class IllegalCrawlProfileException extends RuntimeException { + + /** Generated serial ID */ + private static final long serialVersionUID = 8482302347823257958L; + + /** + * Default constructor : use a generic message + */ + public IllegalCrawlProfileException() { + super("Crawl profile can not be used"); + } + + /** + * @param message detail message + */ + public IllegalCrawlProfileException(String message) { + super(message); + } + +}