1
0
mirror of https://github.com/yacy/yacy_search_server.git synced 2025-05-22 23:39:33 -04:00

- refactoring of log to ConcurrentLog:

jdk-based logger tend to block
at java.util.logging.Logger.log(Logger.java:476) in concurrent
environments. This makes logging a main performance issue. To overcome
this problem, this is a add-on to jdk logging to put log entries on a
concurrent message queue and log the messages one by one using a
separate process.
- FTPClient uses the concurrent logging instead of the log4j logger
This commit is contained in:
Michael Peter Christen 2013-07-09 14:28:25 +02:00
parent 6d5533c9cd
commit 5878c1d599
280 changed files with 2547 additions and 2584 deletions
htroot
source/net/yacy

@ -45,8 +45,8 @@ import java.util.regex.Matcher;
import java.util.regex.Pattern;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.data.ListManager;
import net.yacy.kelondro.logging.Log;
import net.yacy.kelondro.util.FileUtils;
import net.yacy.repository.Blacklist;
import net.yacy.repository.Blacklist.BlacklistError;
@ -303,7 +303,7 @@ public class BlacklistCleaner_p {
try {
Switchboard.urlBlacklist.remove(supportedBlacklistType, host, path);
} catch (final RuntimeException e) {
Log.logSevere("BLACKLIST-CLEANER", e.getMessage() + ": " + host + "/" + path);
ConcurrentLog.severe("BLACKLIST-CLEANER", e.getMessage() + ": " + host + "/" + path);
}
}
}
@ -355,7 +355,7 @@ public class BlacklistCleaner_p {
}
pw.close();
} catch (final IOException e) {
Log.logSevere("BLACKLIST-CLEANER", "error on writing altered entries to blacklist", e);
ConcurrentLog.severe("BLACKLIST-CLEANER", "error on writing altered entries to blacklist", e);
}
return newEntry.length;
}

@ -37,10 +37,10 @@ import java.util.List;
import net.yacy.cora.protocol.HeaderFramework;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.data.ListManager;
import net.yacy.data.WorkTables;
import net.yacy.kelondro.data.meta.DigestURI;
import net.yacy.kelondro.logging.Log;
import net.yacy.kelondro.util.FileUtils;
import net.yacy.repository.Blacklist;
import net.yacy.repository.Blacklist.BlacklistType;
@ -181,7 +181,7 @@ public class Blacklist_p {
final File blackListFile = new File(ListManager.listsPath, blacklistToUse);
if(!blackListFile.delete()) {
Log.logWarning("Blacklist", "file "+ blackListFile +" could not be deleted!");
ConcurrentLog.warn("Blacklist", "file "+ blackListFile +" could not be deleted!");
}
for (final BlacklistType supportedBlacklistType : BlacklistType.values()) {
@ -590,7 +590,7 @@ public class Blacklist_p {
// ignore empty entries
if(newEntry == null || newEntry.isEmpty()) {
Log.logWarning("Blacklist", "skipped adding an empty entry");
ConcurrentLog.warn("Blacklist", "skipped adding an empty entry");
return "";
}

@ -39,12 +39,12 @@ import net.yacy.cora.document.UTF8;
import net.yacy.cora.protocol.Domains;
import net.yacy.cora.protocol.HeaderFramework;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.data.BlogBoard;
import net.yacy.data.BlogBoardComments;
import net.yacy.data.MessageBoard;
import net.yacy.data.UserDB;
import net.yacy.data.BlogBoard.BlogEntry;
import net.yacy.kelondro.logging.Log;
import net.yacy.peers.Network;
import net.yacy.search.Switchboard;
import net.yacy.server.serverObjects;
@ -147,7 +147,7 @@ public class BlogComments {
try {
Files.copy(notifierSource, notifierDest);
} catch (final IOException e) {
Log.logSevere("MESSAGE", "NEW MESSAGE ARRIVED! (error: " + e.getMessage() + ")");
ConcurrentLog.severe("MESSAGE", "NEW MESSAGE ARRIVED! (error: " + e.getMessage() + ")");
}
}
@ -320,7 +320,7 @@ public class BlogComments {
email.print(new String(emailTxt));
email.close();
} catch (final Exception e) {
Network.log.logWarning("message: message forwarding via email failed. ",e);
Network.log.warn("message: message forwarding via email failed. ",e);
}
}
}

@ -44,6 +44,7 @@ import net.yacy.cora.federate.yacy.CacheStrategy;
import net.yacy.cora.protocol.ClientIdentification;
import net.yacy.cora.protocol.HeaderFramework;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.data.BookmarkHelper;
import net.yacy.data.BookmarksDB;
import net.yacy.data.ListManager;
@ -54,7 +55,6 @@ import net.yacy.document.Document;
import net.yacy.document.Parser;
import net.yacy.kelondro.data.meta.DigestURI;
import net.yacy.kelondro.data.meta.URIMetadataNode;
import net.yacy.kelondro.logging.Log;
import net.yacy.peers.NewsPool;
import net.yacy.search.Switchboard;
import net.yacy.search.snippet.TextSnippet;
@ -212,7 +212,7 @@ public class Bookmarks {
prop.putHTML("mode_path","");
prop.put("mode_public", "0");
prop.put("mode_feed", "0"); //TODO: check if it IS a feed
} catch (final IOException e) {Log.logException(e);} catch (final Parser.Failure e) {Log.logException(e);}
} catch (final IOException e) {ConcurrentLog.logException(e);} catch (final Parser.Failure e) {ConcurrentLog.logException(e);}
} else {
// get from the bookmark database
prop.put("mode_edit", "1"); // edit mode
@ -243,14 +243,14 @@ public class Bookmarks {
tags="unsorted";
}
Log.logInfo("BOOKMARKS", "Trying to import bookmarks from HTML-file");
ConcurrentLog.info("BOOKMARKS", "Trying to import bookmarks from HTML-file");
try {
final File file = new File(post.get("htmlfile"));
BookmarkHelper.importFromBookmarks(sb.bookmarksDB, new DigestURI(file), post.get("htmlfile$file"), tags, isPublic);
} catch (final MalformedURLException e) {}
Log.logInfo("BOOKMARKS", "success!!");
ConcurrentLog.info("BOOKMARKS", "success!!");
} else if (post.containsKey("xmlfile")) {

@ -27,10 +27,10 @@ import java.net.MalformedURLException;
import net.yacy.cora.protocol.HeaderFramework;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.protocol.ResponseHeader;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.crawler.data.Cache;
import net.yacy.document.ImageParser;
import net.yacy.kelondro.data.meta.DigestURI;
import net.yacy.kelondro.logging.Log;
import net.yacy.server.serverObjects;
import net.yacy.server.serverSwitch;
import net.yacy.server.servletProperties;
@ -48,7 +48,7 @@ public class CacheResource_p {
try {
url = new DigestURI(u);
} catch (MalformedURLException e) {
Log.logException(e);
ConcurrentLog.logException(e);
return prop;
}

@ -38,9 +38,9 @@ import net.yacy.cora.order.Base64Order;
import net.yacy.cora.order.Digest;
import net.yacy.cora.protocol.Domains;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.data.UserDB;
import net.yacy.data.UserDB.AccessRight;
import net.yacy.kelondro.logging.Log;
import net.yacy.search.Switchboard;
import net.yacy.search.SwitchboardConstants;
import net.yacy.server.serverObjects;
@ -224,7 +224,7 @@ public class ConfigAccounts_p {
}
} catch (final Exception e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
} else {

@ -29,8 +29,8 @@ import com.google.common.io.Files;
import java.io.File;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.storage.Configuration;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.data.WorkTables;
import net.yacy.kelondro.logging.Log;
import net.yacy.search.Switchboard;
import java.io.IOException;
@ -187,7 +187,7 @@ public class ConfigHeuristics_p {
}
prop.put("osdcfg", c);
} catch (IOException e1) {
Log.logException(e1);
ConcurrentLog.logException(e1);
prop.put("osdcfg", 0);
}
prop.putHTML("osderrmsg",osderrmsg);
@ -234,7 +234,7 @@ public class ConfigHeuristics_p {
}
}
} catch (IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
// re-read config (and create/update work table)

@ -31,9 +31,9 @@ import java.io.FileNotFoundException;
import java.io.IOException;
import java.util.Properties;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.data.WorkTables;
import net.yacy.kelondro.data.meta.DigestURI;
import net.yacy.kelondro.logging.Log;
import net.yacy.search.Switchboard;
import net.yacy.search.SwitchboardConstants;
import net.yacy.server.serverObjects;
@ -109,17 +109,17 @@ public class ConfigPortal {
fis = new FileInputStream(new File(sb.appPath, "defaults/yacy.init"));
config.load(fis);
} catch (final FileNotFoundException e) {
Log.logSevere(mes, "could not find configuration file.");
ConcurrentLog.severe(mes, "could not find configuration file.");
return prop;
} catch (final IOException e) {
Log.logSevere(mes, "could not read configuration file.");
ConcurrentLog.severe(mes, "could not read configuration file.");
return prop;
} finally {
if (fis != null) {
try {
fis.close();
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
}

@ -33,8 +33,8 @@ import java.sql.Date;
import java.util.Properties;
import net.yacy.cora.date.GenericFormatter;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.data.WorkTables;
import net.yacy.kelondro.logging.Log;
import net.yacy.search.Switchboard;
import net.yacy.search.SwitchboardConstants;
import net.yacy.server.serverObjects;
@ -99,17 +99,17 @@ public class ConfigSearchPage_p {
fis = new FileInputStream(new File(sb.appPath, "defaults/yacy.init"));
config.load(fis);
} catch (final FileNotFoundException e) {
Log.logSevere(mes, "could not find configuration file.");
ConcurrentLog.severe(mes, "could not find configuration file.");
return prop;
} catch (final IOException e) {
Log.logSevere(mes, "could not read configuration file.");
ConcurrentLog.severe(mes, "could not read configuration file.");
return prop;
} finally {
if (fis != null) {
try {
fis.close();
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
}

@ -33,8 +33,8 @@ import java.util.Set;
import java.util.TreeSet;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.kelondro.data.meta.DigestURI;
import net.yacy.kelondro.logging.Log;
import net.yacy.kelondro.util.FileUtils;
import net.yacy.kelondro.util.OS;
import net.yacy.peers.operation.yacyBuildProperties;
@ -97,7 +97,7 @@ public class ConfigUpdate_p {
versionToDownload.downloadRelease();
} catch (final IOException e) {
// TODO Auto-generated catch block
Log.logException(e);
ConcurrentLog.logException(e);
}
}
}
@ -115,10 +115,10 @@ public class ConfigUpdate_p {
FileUtils.deletedelete(new File(sb.releasePath, release));
FileUtils.deletedelete(new File(sb.releasePath, release + ".sig"));
} else {
sb.getLog().logSevere("AUTO-UPDATE: could not delete " + release + ": file not in release directory.");
sb.getLog().severe("AUTO-UPDATE: could not delete " + release + ": file not in release directory.");
}
} catch (final NullPointerException e) {
sb.getLog().logSevere("AUTO-UPDATE: could not delete release " + release + ": " + e.getMessage());
sb.getLog().severe("AUTO-UPDATE: could not delete release " + release + ": " + e.getMessage());
}
}
}
@ -129,20 +129,20 @@ public class ConfigUpdate_p {
prop.put("candeploy_autoUpdate", "2"); // no more recent release found
} else {
// there is a version that is more recent. Load it and re-start with it
sb.getLog().logInfo("AUTO-UPDATE: downloading more recent release " + updateVersion.getUrl());
sb.getLog().info("AUTO-UPDATE: downloading more recent release " + updateVersion.getUrl());
final File downloaded = updateVersion.downloadRelease();
prop.putHTML("candeploy_autoUpdate_downloadedRelease", updateVersion.getName());
final boolean devenvironment = new File(sb.getAppPath(), ".git").exists();
if (devenvironment) {
sb.getLog().logInfo("AUTO-UPDATE: omitting update because this is a development environment");
sb.getLog().info("AUTO-UPDATE: omitting update because this is a development environment");
prop.put("candeploy_autoUpdate", "3");
} else if ((downloaded == null) || (!downloaded.exists()) || (downloaded.length() == 0)) {
sb.getLog().logInfo("AUTO-UPDATE: omitting update because download failed (file cannot be found, is too small or signature was bad)");
sb.getLog().info("AUTO-UPDATE: omitting update because download failed (file cannot be found, is too small or signature was bad)");
prop.put("candeploy_autoUpdate", "4");
} else {
yacyRelease.deployRelease(downloaded);
sb.terminate(10, "manual release update to " + downloaded.getName());
sb.getLog().logInfo("AUTO-UPDATE: deploy and restart initiated");
sb.getLog().info("AUTO-UPDATE: deploy and restart initiated");
prop.put("candeploy_autoUpdate", "1");
}
}

@ -37,7 +37,7 @@ import java.util.Set;
import net.yacy.cora.protocol.ConnectionInfo;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.kelondro.logging.Log;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.kelondro.workflow.WorkflowThread;
import net.yacy.peers.PeerActions;
import net.yacy.peers.Seed;
@ -111,7 +111,7 @@ public final class Connections_p {
prop.put("list_" + idx + "_serverSessionID",URLEncoder.encode(s.getName(),"UTF8"));
} catch (final UnsupportedEncodingException e) {
// TODO Auto-generated catch block
Log.logException(e);
ConcurrentLog.logException(e);
}
prop.putHTML("list_" + idx + "_sessionName", s.getName());
prop.put("list_" + idx + "_proto", prot);

@ -26,10 +26,10 @@ import java.io.File;
import net.yacy.cora.date.GenericFormatter;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.document.content.dao.Dao;
import net.yacy.document.content.dao.ImportDump;
import net.yacy.document.content.dao.PhpBB3Dao;
import net.yacy.kelondro.logging.Log;
import net.yacy.search.Switchboard;
import net.yacy.server.serverObjects;
import net.yacy.server.serverSwitch;
@ -87,7 +87,7 @@ public class ContentIntegrationPHPBB3_p {
prop.putHTML("check_last", db.latest().toString());
db.close();
} catch (Exception e) {
Log.logException(e);
ConcurrentLog.logException(e);
prop.put("check", 2);
prop.put("check_error", e.getMessage());
}
@ -111,7 +111,7 @@ public class ContentIntegrationPHPBB3_p {
prop.put("export_files", files);
db.close();
} catch (Exception e) {
Log.logException(e);
ConcurrentLog.logException(e);
prop.put("export", 2);
prop.put("export_error", e.getMessage());
}
@ -136,7 +136,7 @@ public class ContentIntegrationPHPBB3_p {
prop.put("import", 1);
importer.close();
} catch (Exception e) {
Log.logException(e);
ConcurrentLog.logException(e);
prop.put("import", 2);
prop.put("import_error", e.getMessage());
}

@ -27,12 +27,12 @@ import java.util.regex.Pattern;
import net.yacy.cora.federate.yacy.CacheStrategy;
import net.yacy.cora.protocol.ClientIdentification;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.crawler.data.CrawlQueues;
import net.yacy.crawler.retrieval.Request;
import net.yacy.crawler.retrieval.Response;
import net.yacy.crawler.robots.RobotsTxtEntry;
import net.yacy.kelondro.data.meta.DigestURI;
import net.yacy.kelondro.logging.Log;
import net.yacy.repository.Blacklist.BlacklistType;
import net.yacy.search.Switchboard;
import net.yacy.server.serverObjects;
@ -65,7 +65,7 @@ public class CrawlCheck_p {
DigestURI crawlingStartURL = new DigestURI(crawlingStart);
rootURLs.add(crawlingStartURL);
} catch (MalformedURLException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}

@ -31,10 +31,10 @@ import java.util.Map;
import java.util.TreeMap;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.cora.util.SpaceExceededException;
import net.yacy.crawler.CrawlSwitchboard;
import net.yacy.crawler.data.CrawlProfile;
import net.yacy.kelondro.logging.Log;
import net.yacy.search.Switchboard;
import net.yacy.server.serverObjects;
import net.yacy.server.serverSwitch;
@ -107,7 +107,7 @@ public class CrawlProfileEditor_p {
sb.crawler.removeActive(handle.getBytes());
sb.crawlQueues.noticeURL.removeByProfileHandle(handle, 10000);
} catch (final SpaceExceededException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
if (post.containsKey("delete")) {
// deletion of a terminated crawl profile
@ -159,7 +159,7 @@ public class CrawlProfileEditor_p {
}
}
} catch (final Exception ex) {
Log.logException(ex);
ConcurrentLog.logException(ex);
prop.put("error", "1");
prop.putHTML("error_message", ex.getMessage());
}

@ -34,11 +34,11 @@ import java.util.Map;
import net.yacy.cora.document.ASCII;
import net.yacy.cora.document.UTF8;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.crawler.data.ResultURLs;
import net.yacy.crawler.data.ResultURLs.EventOrigin;
import net.yacy.crawler.data.ResultURLs.InitExecEntry;
import net.yacy.kelondro.data.meta.URIMetadataNode;
import net.yacy.kelondro.logging.Log;
import net.yacy.peers.Seed;
import net.yacy.search.Switchboard;
import net.yacy.search.schema.CollectionSchema;
@ -184,7 +184,7 @@ public class CrawlResults {
try {
urle = sb.index.fulltext().getMetadata(UTF8.getBytes(entry.getKey()));
if (urle == null) {
Log.logWarning("PLASMA", "CrawlResults: URL not in index with url hash " + entry.getKey());
ConcurrentLog.warn("PLASMA", "CrawlResults: URL not in index with url hash " + entry.getKey());
urlstr = null;
urltxt = null;
continue;
@ -271,7 +271,7 @@ public class CrawlResults {
dark = !dark;
cnt++;
} catch (final Exception e) {
Log.logSevere("PLASMA", "genTableProps", e);
ConcurrentLog.severe("PLASMA", "genTableProps", e);
}
}
prop.put("table_indexed", cnt);

@ -38,9 +38,9 @@ import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.protocol.Scanner;
import net.yacy.cora.protocol.Scanner.Access;
import net.yacy.cora.sorting.ReversibleScoreMap;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.data.WorkTables;
import net.yacy.kelondro.data.meta.DigestURI;
import net.yacy.kelondro.logging.Log;
import net.yacy.search.Switchboard;
import net.yacy.search.SwitchboardConstants;
import net.yacy.search.query.SearchEventCache;
@ -201,7 +201,7 @@ public class CrawlStartScanner_p
uu = u.url();
pkmap.put(uu.hash(), uu);
} catch ( final MalformedURLException e ) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
// search for crawl start requests in this mapping
@ -266,7 +266,7 @@ public class CrawlStartScanner_p
u.hash());
}
} catch ( final MalformedURLException e ) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
} catch ( final ConcurrentModificationException e ) {

@ -39,6 +39,7 @@ import net.yacy.cora.document.ASCII;
import net.yacy.cora.federate.yacy.CacheStrategy;
import net.yacy.cora.protocol.ClientIdentification;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.cora.util.SpaceExceededException;
import net.yacy.crawler.CrawlSwitchboard;
import net.yacy.crawler.data.CrawlProfile;
@ -52,7 +53,6 @@ import net.yacy.document.parser.html.ContentScraper;
import net.yacy.document.parser.html.TransformerWriter;
import net.yacy.kelondro.data.meta.DigestURI;
import net.yacy.kelondro.index.RowHandleSet;
import net.yacy.kelondro.logging.Log;
import net.yacy.kelondro.util.FileUtils;
import net.yacy.peers.NewsPool;
import net.yacy.repository.Blacklist.BlacklistType;
@ -96,7 +96,7 @@ public class Crawler_p {
if (post != null) {
String c = post.toString();
if (c.length() < 1000) Log.logInfo("Crawl Start", c);
if (c.length() < 1000) ConcurrentLog.info("Crawl Start", c);
}
if (post != null && post.containsKey("continue")) {
@ -132,7 +132,7 @@ public class Crawler_p {
sb.crawler.removeActive(handle.getBytes());
sb.crawlQueues.noticeURL.removeByProfileHandle(handle, 10000);
} catch (final SpaceExceededException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
if (post != null && post.containsKey("crawlingstart")) {
@ -189,7 +189,7 @@ public class Crawler_p {
if (crawlingStartURL != null && (crawlingStartURL.isFile() || crawlingStartURL.isSMB())) storeHTCache = false;
} catch (MalformedURLException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
} else {
crawlName = crawlingFile.getName();
@ -294,7 +294,7 @@ public class Crawler_p {
newRootURLs.add(u);
}
} catch (IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
rootURLs = newRootURLs;
@ -319,7 +319,7 @@ public class Crawler_p {
String basepath = u.toNormalform(true);
if (!basepath.endsWith("/")) {int p = basepath.lastIndexOf("/"); if (p > 0) basepath = basepath.substring(0, p + 1);}
int count = sb.index.fulltext().remove(basepath, deleteageDate);
if (count > 0) Log.logInfo("Crawler_p", "deleted " + count + " documents for host " + u.getHost());
if (count > 0) ConcurrentLog.info("Crawler_p", "deleted " + count + " documents for host " + u.getHost());
}
}
}
@ -390,7 +390,7 @@ public class Crawler_p {
for (byte[] hosthash: hosthashes) {
try {
sb.index.fulltext().getDefaultConnector().deleteByQuery(CollectionSchema.host_id_s.getSolrFieldName() + ":\"" + ASCII.String(hosthash) + "\" AND " + CollectionSchema.failreason_s.getSolrFieldName() + ":[* TO *]");
} catch (IOException e) {Log.logException(e);}
} catch (IOException e) {ConcurrentLog.logException(e);}
}
sb.index.fulltext().commit(true);
@ -472,7 +472,7 @@ public class Crawler_p {
prop.put("info", "6");//Error with url
prop.putHTML("info_crawlingStart", sitemapURLStr);
prop.putHTML("info_error", e.getMessage());
Log.logException(e);
ConcurrentLog.logException(e);
}
} else if ("file".equals(crawlingMode)) {
if (post.containsKey("crawlingFile")) {
@ -510,7 +510,7 @@ public class Crawler_p {
prop.put("info", "7"); // Error with file
prop.putHTML("info_crawlingStart", crawlingFileName);
prop.putHTML("info_error", e.getMessage());
Log.logException(e);
ConcurrentLog.logException(e);
}
sb.continueCrawlJob(SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL);
}

@ -26,11 +26,11 @@ import net.yacy.cora.geo.GeonamesLocation;
import net.yacy.cora.geo.OpenGeoDBLocation;
import net.yacy.cora.protocol.ClientIdentification;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.crawler.data.CrawlQueues;
import net.yacy.crawler.retrieval.Response;
import net.yacy.document.LibraryProvider;
import net.yacy.kelondro.data.meta.DigestURI;
import net.yacy.kelondro.logging.Log;
import net.yacy.kelondro.util.FileUtils;
import net.yacy.search.Switchboard;
import net.yacy.server.serverObjects;
@ -75,11 +75,11 @@ public class DictionaryLoader_p {
prop.put("geon0Status", LibraryProvider.Dictionary.GEON0.file().exists() ? 1 : 0);
prop.put("geon0ActionLoaded", 1);
} catch (final MalformedURLException e) {
Log.logException(e);
ConcurrentLog.logException(e);
prop.put("geon0ActionLoaded", 2);
prop.put("geon0ActionLoaded_error", e.getMessage());
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
prop.put("geon0ActionLoaded", 2);
prop.put("geon0ActionLoaded_error", e.getMessage());
}
@ -117,11 +117,11 @@ public class DictionaryLoader_p {
prop.put("geon1Status", LibraryProvider.Dictionary.GEON1.file().exists() ? 1 : 0);
prop.put("geon1ActionLoaded", 1);
} catch (final MalformedURLException e) {
Log.logException(e);
ConcurrentLog.logException(e);
prop.put("geon1ActionLoaded", 2);
prop.put("geon1ActionLoaded_error", e.getMessage());
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
prop.put("geon1ActionLoaded", 2);
prop.put("geon1ActionLoaded_error", e.getMessage());
}
@ -159,11 +159,11 @@ public class DictionaryLoader_p {
prop.put("geon2Status", LibraryProvider.Dictionary.GEON2.file().exists() ? 1 : 0);
prop.put("geon2ActionLoaded", 1);
} catch (final MalformedURLException e) {
Log.logException(e);
ConcurrentLog.logException(e);
prop.put("geon2ActionLoaded", 2);
prop.put("geon2ActionLoaded_error", e.getMessage());
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
prop.put("geon2ActionLoaded", 2);
prop.put("geon2ActionLoaded_error", e.getMessage());
}
@ -202,11 +202,11 @@ public class DictionaryLoader_p {
prop.put("geo1Status", LibraryProvider.Dictionary.GEODB1.file().exists() ? 1 : 0);
prop.put("geo1ActionLoaded", 1);
} catch (final MalformedURLException e) {
Log.logException(e);
ConcurrentLog.logException(e);
prop.put("geo1ActionLoaded", 2);
prop.put("geo1ActionLoaded_error", e.getMessage());
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
prop.put("geo1ActionLoaded", 2);
prop.put("geo1ActionLoaded_error", e.getMessage());
}
@ -244,11 +244,11 @@ public class DictionaryLoader_p {
prop.put("drw0Status", LibraryProvider.Dictionary.DRW0.file().exists() ? 1 : 0);
prop.put("drw0ActionLoaded", 1);
} catch (final MalformedURLException e) {
Log.logException(e);
ConcurrentLog.logException(e);
prop.put("drw0ActionLoaded", 2);
prop.put("drw0ActionLoaded_error", e.getMessage());
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
prop.put("drw0ActionLoaded", 2);
prop.put("drw0ActionLoaded_error", e.getMessage());
}
@ -287,11 +287,11 @@ public class DictionaryLoader_p {
prop.put("pnd0Status", LibraryProvider.Dictionary.PND0.file().exists() ? 1 : 0);
prop.put("pnd0ActionLoaded", 1);
} catch (final MalformedURLException e) {
Log.logException(e);
ConcurrentLog.logException(e);
prop.put("pnd0ActionLoaded", 2);
prop.put("pnd0ActionLoaded_error", e.getMessage());
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
prop.put("pnd0ActionLoaded", 2);
prop.put("pnd0ActionLoaded_error", e.getMessage());
}

@ -42,12 +42,12 @@ import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.sorting.ClusteredScoreMap;
import net.yacy.cora.sorting.ReversibleScoreMap;
import net.yacy.cora.storage.HandleSet;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.crawler.HarvestProcess;
import net.yacy.crawler.data.NoticedURL.StackType;
import net.yacy.crawler.retrieval.Request;
import net.yacy.kelondro.data.meta.DigestURI;
import net.yacy.kelondro.data.meta.URIMetadataNode;
import net.yacy.kelondro.logging.Log;
import net.yacy.peers.graphics.WebStructureGraph.StructureEntry;
import net.yacy.search.Switchboard;
import net.yacy.search.index.Fulltext;
@ -151,15 +151,15 @@ public class HostBrowser {
try {
fulltext.getDefaultConnector().deleteByQuery("-" + CollectionSchema.httpstatus_i.getSolrFieldName() + ":200 AND "
+ CollectionSchema.httpstatus_i.getSolrFieldName() + ":[* TO *]"); // make sure field exists
Log.logInfo ("HostBrowser:", "delete documents with httpstatus_i <> 200");
ConcurrentLog.info ("HostBrowser:", "delete documents with httpstatus_i <> 200");
fulltext.getDefaultConnector().deleteByQuery(CollectionSchema.failtype_s.getSolrFieldName() + ":\"" + FailType.fail.name() + "\"" );
Log.logInfo ("HostBrowser:", "delete documents with failtype_s = fail");
ConcurrentLog.info ("HostBrowser:", "delete documents with failtype_s = fail");
fulltext.getDefaultConnector().deleteByQuery(CollectionSchema.failtype_s.getSolrFieldName() + ":\"" + FailType.excl.name() + "\"" );
Log.logInfo ("HostBrowser:", "delete documents with failtype_s = excl");
ConcurrentLog.info ("HostBrowser:", "delete documents with failtype_s = excl");
prop.putNum("ucount", fulltext.collectionSize());
return prop;
} catch (IOException ex) {
Log.logException(ex);
ConcurrentLog.logException(ex);
}
}
@ -217,7 +217,7 @@ public class HostBrowser {
prop.put("hosts_list", c);
prop.put("hosts", 1);
} catch (IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
@ -349,7 +349,7 @@ public class HostBrowser {
for (Map.Entry<String, FailType> e: errorDocs.entrySet()) files.put(e.getKey(), e.getValue() == FailType.fail ? StoreType.FAILED : StoreType.EXCLUDED);
for (String u: inboundLinks) if (!files.containsKey(u)) files.put(u, StoreType.LINK);
for (String u: loadingLinks) if (u.startsWith(path) && !files.containsKey(u)) files.put(u, StoreType.LINK);
Log.logInfo("HostBrowser", "collected " + files.size() + " urls for path " + path);
ConcurrentLog.info("HostBrowser", "collected " + files.size() + " urls for path " + path);
// distinguish files and folders
Map<String, Object> list = new TreeMap<String, Object>(); // a directory list; if object is boolean, its a file; if its a int[], then its a folder
@ -500,7 +500,7 @@ public class HostBrowser {
}
} catch (Throwable e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}

@ -39,6 +39,7 @@ import net.yacy.cora.document.analysis.Classification.ContentDomain;
import net.yacy.cora.federate.yacy.CacheStrategy;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.storage.HandleSet;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.cora.util.SpaceExceededException;
import net.yacy.data.ListManager;
import net.yacy.document.Condenser;
@ -49,7 +50,6 @@ import net.yacy.kelondro.data.word.Word;
import net.yacy.kelondro.data.word.WordReference;
import net.yacy.kelondro.data.word.WordReferenceRow;
import net.yacy.kelondro.index.RowHandleSet;
import net.yacy.kelondro.logging.Log;
import net.yacy.kelondro.rwi.Reference;
import net.yacy.kelondro.rwi.ReferenceContainer;
import net.yacy.kelondro.rwi.ReferenceContainerCache;
@ -118,7 +118,7 @@ public class IndexControlRWIs_p {
try {
urlb.put(s.getBytes());
} catch ( final SpaceExceededException e ) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
}
@ -172,7 +172,7 @@ public class IndexControlRWIs_p {
try {
urlb.put(en.next().urlhash());
} catch ( final SpaceExceededException e ) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
index = null;
@ -191,7 +191,7 @@ public class IndexControlRWIs_p {
post.remove("keyhashdeleteall");
post.put("urllist", "generated");
} catch ( final IOException e ) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
@ -215,7 +215,7 @@ public class IndexControlRWIs_p {
try {
urlHashes.put(b);
} catch ( final SpaceExceededException e ) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
segment.termIndex().remove(keyhash, urlHashes);
@ -224,7 +224,7 @@ public class IndexControlRWIs_p {
post.remove("keyhashdelete");
post.put("urllist", "generated");
} catch ( final IOException e ) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
@ -286,14 +286,14 @@ public class IndexControlRWIs_p {
try {
unknownURLEntries.put(iEntry.urlhash());
} catch (final SpaceExceededException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
urlIter.remove();
} else {
try {
knownURLs.put(iEntry.urlhash());
} catch (final SpaceExceededException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
}
@ -307,7 +307,7 @@ public class IndexControlRWIs_p {
try {
icc.add(index);
} catch (final SpaceExceededException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
// transport to other peer
@ -325,7 +325,7 @@ public class IndexControlRWIs_p {
prop.put("result", "Peer " + host + " not found");
}
} catch ( final IOException e ) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
@ -355,7 +355,7 @@ public class IndexControlRWIs_p {
prop.put("keyhashsimilar_rows", rows + 1);
prop.put("result", "");
} catch ( final IOException e ) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
@ -378,7 +378,7 @@ public class IndexControlRWIs_p {
try {
urlHashes.put(b);
} catch ( final SpaceExceededException e ) {
Log.logException(e);
ConcurrentLog.logException(e);
}
url = segment.fulltext().getURL(b);
segment.fulltext().remove(b);
@ -412,7 +412,7 @@ public class IndexControlRWIs_p {
try {
urlHashes.put(b);
} catch ( final SpaceExceededException e ) {
Log.logException(e);
ConcurrentLog.logException(e);
}
url = segment.fulltext().getURL(b);
segment.fulltext().remove(b);
@ -437,7 +437,7 @@ public class IndexControlRWIs_p {
try {
segment.termIndex().remove(keyhash, urlHashes);
} catch ( final IOException e ) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}

@ -38,13 +38,13 @@ import net.yacy.cora.federate.yacy.CacheStrategy;
import net.yacy.cora.lod.JenaTripleStore;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.sorting.ReversibleScoreMap;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.crawler.data.Cache;
import net.yacy.crawler.data.ResultURLs;
import net.yacy.data.WorkTables;
import net.yacy.kelondro.data.meta.DigestURI;
import net.yacy.kelondro.data.meta.URIMetadataNode;
import net.yacy.kelondro.data.word.Word;
import net.yacy.kelondro.logging.Log;
import net.yacy.search.Switchboard;
import net.yacy.search.index.Fulltext;
import net.yacy.search.index.Segment;
@ -318,7 +318,7 @@ public class IndexControlURLs_p {
cnt++;
}
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
prop.put("statisticslines_domains", cnt);
prop.put("statisticslines", 1);

@ -11,11 +11,11 @@ import java.util.regex.PatternSyntaxException;
import net.yacy.cora.document.ASCII;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.crawler.CrawlSwitchboard;
import net.yacy.crawler.data.CrawlProfile;
import net.yacy.crawler.data.NoticedURL.StackType;
import net.yacy.crawler.retrieval.Request;
import net.yacy.kelondro.logging.Log;
import net.yacy.peers.Seed;
import net.yacy.search.Switchboard;
import net.yacy.server.serverObjects;
@ -93,13 +93,13 @@ public class IndexCreateQueues_p {
if (value != null && compiledPattern.matcher(value).matches()) removehashes.add(entry.url().hash());
}
Log.logInfo("IndexCreateQueues_p", "created a remove list with " + removehashes.size() + " entries for pattern '" + deletepattern + "'");
ConcurrentLog.info("IndexCreateQueues_p", "created a remove list with " + removehashes.size() + " entries for pattern '" + deletepattern + "'");
for (final byte[] b: removehashes) {
sb.crawlQueues.noticeURL.removeByURLHash(b);
}
}
} catch (final PatternSyntaxException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
}

@ -32,7 +32,7 @@ import net.yacy.cora.federate.solr.connector.SolrConnector;
import net.yacy.cora.federate.solr.instance.RemoteInstance;
import net.yacy.cora.federate.solr.instance.ShardInstance;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.kelondro.logging.Log;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.kelondro.util.OS;
import net.yacy.search.Switchboard;
import net.yacy.search.SwitchboardConstants;
@ -56,7 +56,7 @@ public class IndexFederated_p {
final int wordCacheMaxCount = (int) sb.getConfigLong(SwitchboardConstants.WORDCACHE_MAX_COUNT, 20000);
final long fileSizeMax = (OS.isWindows) ? sb.getConfigLong("filesize.max.win", Integer.MAX_VALUE) : sb.getConfigLong( "filesize.max.other", Integer.MAX_VALUE);
sb.index.connectRWI(wordCacheMaxCount, fileSizeMax);
} catch (IOException e) { Log.logException(e); } // switch on
} catch (IOException e) { ConcurrentLog.logException(e); } // switch on
boolean post_core_citation = post.getBoolean(SwitchboardConstants.CORE_SERVICE_CITATION);
final boolean previous_core_citation = sb.index.connectedCitation() && env.getConfigBool(SwitchboardConstants.CORE_SERVICE_CITATION, false);
@ -66,7 +66,7 @@ public class IndexFederated_p {
final int wordCacheMaxCount = (int) sb.getConfigLong(SwitchboardConstants.WORDCACHE_MAX_COUNT, 20000);
final long fileSizeMax = (OS.isWindows) ? sb.getConfigLong("filesize.max.win", Integer.MAX_VALUE) : sb.getConfigLong( "filesize.max.other", Integer.MAX_VALUE);
sb.index.connectCitation(wordCacheMaxCount, fileSizeMax);
} catch (IOException e) { Log.logException(e); } // switch on
} catch (IOException e) { ConcurrentLog.logException(e); } // switch on
boolean post_core_fulltext = post.getBoolean(SwitchboardConstants.CORE_SERVICE_FULLTEXT);
final boolean previous_core_fulltext = sb.index.fulltext().connectedLocalSolr() && env.getConfigBool(SwitchboardConstants.CORE_SERVICE_FULLTEXT, false);
@ -80,7 +80,7 @@ public class IndexFederated_p {
if (!previous_core_fulltext && post_core_fulltext) {
// switch on
sb.index.connectUrlDb(sb.useTailCache, sb.exceed134217727);
try { sb.index.fulltext().connectLocalSolr(); } catch (IOException e) { Log.logException(e); }
try { sb.index.fulltext().connectLocalSolr(); } catch (IOException e) { ConcurrentLog.logException(e); }
}
boolean webgraph = post.getBoolean(SwitchboardConstants.CORE_SERVICE_WEBGRAPH);
@ -119,7 +119,7 @@ public class IndexFederated_p {
try {
sb.index.fulltext().disconnectRemoteSolr();
} catch (Throwable e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
@ -135,15 +135,15 @@ public class IndexFederated_p {
sb.index.fulltext().disconnectRemoteSolr();
}
} catch (final Throwable e) {
Log.logException(e);
ConcurrentLog.logException(e);
try {
sb.index.fulltext().disconnectRemoteSolr();
} catch (Throwable ee) {
Log.logException(ee);
ConcurrentLog.logException(ee);
}
}
} catch (SolrException e) {
Log.logSevere("IndexFederated_p", "change of solr connection failed", e);
ConcurrentLog.severe("IndexFederated_p", "change of solr connection failed", e);
}
boolean lazy = post.getBoolean("solr.indexing.lazy");

@ -32,12 +32,12 @@ import java.util.Set;
import java.util.TreeSet;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.data.WorkTables;
import net.yacy.document.importer.OAIPMHImporter;
import net.yacy.document.importer.OAIPMHLoader;
import net.yacy.document.importer.ResumptionToken;
import net.yacy.kelondro.data.meta.DigestURI;
import net.yacy.kelondro.logging.Log;
import net.yacy.search.Switchboard;
import net.yacy.server.serverObjects;
import net.yacy.server.serverSwitch;
@ -79,11 +79,11 @@ public class IndexImportOAIPMH_p {
prop.put("defaulturl", e.getMessage());
}
} catch (final MalformedURLException e) {
Log.logException(e);
ConcurrentLog.logException(e);
prop.put("import-one", 2);
prop.put("import-one_error", e.getMessage());
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
prop.put("import-one", 2);
prop.put("import-one_error", e.getMessage());
}
@ -101,7 +101,7 @@ public class IndexImportOAIPMH_p {
prop.put("optiongetlist", 1);
prop.put("iframetype", 1);
} catch (final MalformedURLException e) {
Log.logException(e);
ConcurrentLog.logException(e);
prop.put("status", 2);
prop.put("status_message", e.getMessage());
}
@ -134,7 +134,7 @@ public class IndexImportOAIPMH_p {
final OAIPMHImporter job = new OAIPMHImporter(sb.loader, url);
job.start();
} catch (final MalformedURLException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
}

@ -1,46 +0,0 @@
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" >
<head>
<title>YaCy '#[clientname]#': Field Re-Indexing</title>
#%env/templates/metas.template%#
</head>
<body id="IndexReindexMonitor">
#%env/templates/header.template%#
#%env/templates/submenuIndexControl.template%#
<h2>Field Re-Indexing</h2>
<p>In case that an index schema has changed, all documents with missing field entries can be indexed again with a reindex job.</p>
<form action="IndexReindexMonitor_p.html" method="post" enctype="multipart/form-data" accept-charset="UTF-8">
<fieldset>
<table>
<tr>
<td>Documents in current queue</td>
<td>#[querysize]#</td>
<td>#(showstartbutton)#<input type="submit" value="refresh page"/>::#(/showstartbutton)#</td>
</tr>
<tr>
<td>Documents processed</td>
<td>#[docsprocessed]#</td>
<td></td>
</tr>
<tr>
<td>current select query </td>
<td>#[currentselectquery]#</td>
<td></td>
</tr>
<tr>
<td>&nbsp;</td>
<td></td>
<td></td>
</tr>
</table>
#(showstartbutton)#
<input type="submit" name="stopreindex" value="stop reindexing"/>
::<input type="submit" name="reindexnow" value="start reindex job now"/>
#(/showstartbutton)#
<p class="info">#[infomessage]#</p>
</fieldset>
</form>
#%env/templates/footer.template%#
</body>
</html>

@ -25,7 +25,7 @@ import java.util.Iterator;
import net.yacy.cora.federate.solr.SchemaConfiguration;
import net.yacy.cora.federate.solr.SchemaDeclaration;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.kelondro.logging.Log;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.search.Switchboard;
import net.yacy.search.schema.CollectionSchema;
import net.yacy.search.schema.WebgraphConfiguration;
@ -94,7 +94,7 @@ public class IndexSchema_p {
}
cs.commit();
} catch (IOException ex) {
Log.logWarning("IndexSchema", "file " + solrInitFile.getAbsolutePath() + " not found");
ConcurrentLog.warn("IndexSchema", "file " + solrInitFile.getAbsolutePath() + " not found");
}
}

@ -39,6 +39,7 @@ import net.yacy.cora.federate.yacy.CacheStrategy;
import net.yacy.cora.protocol.ClientIdentification;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.util.CommonPattern;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.cora.util.SpaceExceededException;
import net.yacy.crawler.HarvestProcess;
import net.yacy.crawler.data.CrawlQueues;
@ -48,7 +49,6 @@ import net.yacy.data.WorkTables;
import net.yacy.kelondro.blob.Tables;
import net.yacy.kelondro.blob.Tables.Row;
import net.yacy.kelondro.data.meta.DigestURI;
import net.yacy.kelondro.logging.Log;
import net.yacy.repository.Blacklist.BlacklistType;
import net.yacy.search.Switchboard;
import net.yacy.search.schema.CollectionSchema;
@ -78,7 +78,7 @@ public class Load_RSS_p {
if (entry.getValue().startsWith("mark_")) try {
sb.tables.delete("rss", entry.getValue().substring(5).getBytes());
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
}
@ -103,9 +103,9 @@ public class Load_RSS_p {
sb.tables.delete("rss", pk);
}
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
} catch (final SpaceExceededException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
if (post != null && post.containsKey("removeSelectedFeedsScheduler")) {
@ -118,9 +118,9 @@ public class Load_RSS_p {
rssRow.remove("api_pk");
sb.tables.insert("rss", pk, rssRow);
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
} catch (final SpaceExceededException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
}
@ -149,9 +149,9 @@ public class Load_RSS_p {
sb.tables.insert("rss", pk, rssRow);
}
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
} catch (final SpaceExceededException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
if (post != null && post.containsKey("addSelectedFeedScheduler")) {
@ -162,17 +162,17 @@ public class Load_RSS_p {
final byte [] pk = entry.getValue().substring(5).getBytes();
row = sb.tables.select("rss", pk);
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
continue;
} catch (final SpaceExceededException e) {
Log.logException(e);
ConcurrentLog.logException(e);
continue;
}
DigestURI url = null;
try {
url = new DigestURI(row.get("url", ""));
} catch (final MalformedURLException e) {
Log.logWarning("Load_RSS", "malformed url '" + row.get("url", "") + "': " + e.getMessage());
ConcurrentLog.warn("Load_RSS", "malformed url '" + row.get("url", "") + "': " + e.getMessage());
continue;
}
// load feeds concurrently to get better responsibility in web interface
@ -240,9 +240,9 @@ public class Load_RSS_p {
prop.put("shownewfeeds_num", newc);
prop.put("shownewfeeds", newc > 0 ? 1 : 0);
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
} catch (final SpaceExceededException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
return prop;
@ -260,7 +260,7 @@ public class Load_RSS_p {
try {
url = post.containsKey("url") ? new DigestURI(post.get("url", "")) : null;
} catch (final MalformedURLException e) {
Log.logWarning("Load_RSS_p", "url not well-formed: '" + post.get("url", "") + "'");
ConcurrentLog.warn("Load_RSS_p", "url not well-formed: '" + post.get("url", "") + "'");
}
// if we have an url then try to load the rss
@ -271,7 +271,7 @@ public class Load_RSS_p {
final byte[] resource = response == null ? null : response.getContent();
rss = resource == null ? null : RSSReader.parse(RSSFeed.DEFAULT_MAXSIZE, resource);
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
// index all selected items: description only
@ -286,7 +286,7 @@ public class Load_RSS_p {
if (RSSLoader.indexTriggered.containsKey(messageurl.hash())) continue loop;
messages.put(ASCII.String(messageurl.hash()), message);
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
Map<String, HarvestProcess> existingurls = sb.urlExists(messages.keySet());
@ -298,7 +298,7 @@ public class Load_RSS_p {
list.add(messageurl);
RSSLoader.indexTriggered.insertIfAbsent(messageurl.hash(), new Date());
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
sb.addToIndex(list, null, null, collections);
@ -337,7 +337,7 @@ public class Load_RSS_p {
final DigestURI messageurl = new DigestURI(item.getLink());
urls.put(ASCII.String(messageurl.hash()), messageurl);
} catch (final MalformedURLException e) {
Log.logException(e);
ConcurrentLog.logException(e);
continue;
}
}
@ -361,7 +361,7 @@ public class Load_RSS_p {
prop.putHTML("showitems_item_" + i + "_date", (pubDate == null) ? "" : DateFormat.getDateTimeInstance().format(pubDate));
i++;
} catch (final MalformedURLException e) {
Log.logException(e);
ConcurrentLog.logException(e);
continue;
}
}

@ -27,7 +27,7 @@
import java.util.concurrent.Semaphore;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.kelondro.logging.Log;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.peers.graphics.EncodedImage;
import net.yacy.peers.graphics.NetworkGraph;
import net.yacy.search.Switchboard;
@ -52,7 +52,7 @@ public class NetworkPicture
final long timeSeconds = System.currentTimeMillis() / 1000;
if ( buffer != null && !authorized && timeSeconds - lastAccessSeconds < 2 ) {
Log.logInfo("NetworkPicture", "cache hit (1); authorized = "
ConcurrentLog.info("NetworkPicture", "cache hit (1); authorized = "
+ authorized
+ ", timeSeconds - lastAccessSeconds = "
+ (timeSeconds - lastAccessSeconds));
@ -65,7 +65,7 @@ public class NetworkPicture
sync.acquireUninterruptibly();
if ( buffer != null && !authorized && timeSeconds - lastAccessSeconds < 2 ) {
Log.logInfo("NetworkPicture", "cache hit (2); authorized = "
ConcurrentLog.info("NetworkPicture", "cache hit (2); authorized = "
+ authorized
+ ", timeSeconds - lastAccessSeconds = "
+ (timeSeconds - lastAccessSeconds));

@ -31,7 +31,7 @@ import java.util.Map.Entry;
import net.yacy.cora.date.GenericFormatter;
import net.yacy.cora.protocol.HeaderFramework;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.kelondro.logging.Log;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.peers.NewsDB;
import net.yacy.peers.NewsPool;
import net.yacy.peers.Seed;
@ -64,7 +64,7 @@ public class News {
id = check.substring(4);
try {
sb.peers.newsPool.moveOff(tableID, id);
} catch (final Exception ee) {Log.logException(ee);}
} catch (final Exception ee) {ConcurrentLog.logException(ee);}
}
}
}
@ -81,7 +81,7 @@ public class News {
sb.peers.newsPool.moveOffAll(tableID);
}
} catch (final Exception e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
}

@ -30,8 +30,8 @@
import java.io.File;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.crawler.data.Cache;
import net.yacy.kelondro.logging.Log;
import net.yacy.search.Switchboard;
import net.yacy.search.SwitchboardConstants;
import net.yacy.server.serverObjects;
@ -140,7 +140,7 @@ public class ProxyIndexingMonitor_p {
} catch (final Exception e) {
prop.put("info", "2"); //Error: errmsg
prop.putHTML("info_error", e.getMessage());
Log.logSevere("SERVLET", "ProxyIndexingMonitor.case3", e);
ConcurrentLog.severe("SERVLET", "ProxyIndexingMonitor.case3", e);
}
}

@ -27,9 +27,9 @@ import net.yacy.cora.document.ASCII;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.protocol.Scanner;
import net.yacy.cora.protocol.Scanner.Access;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.data.WorkTables;
import net.yacy.kelondro.data.meta.DigestURI;
import net.yacy.kelondro.logging.Log;
import net.yacy.search.Switchboard;
import net.yacy.server.serverObjects;
import net.yacy.server.serverSwitch;
@ -79,7 +79,7 @@ public class ServerScannerList {
prop.put("servertable_list_" + i + "_edit_preselected", host.getValue() == Access.granted && Scanner.inIndex(apiCommentCache, urlString) == null ? 1 : 0);
i++;
} catch (MalformedURLException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
prop.put("servertable_list", i);

@ -32,7 +32,7 @@ import java.io.File;
import net.yacy.cora.protocol.Domains;
import net.yacy.cora.protocol.HeaderFramework;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.kelondro.logging.Log;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.kelondro.util.FileUtils;
import net.yacy.peers.operation.yacyRelease;
import net.yacy.search.Switchboard;
@ -52,13 +52,13 @@ public class Steering {
// handle access rights
if (!sb.verifyAuthentication(header)) {
Log.logInfo("STEERING", "log-in attempt for steering from " + requestIP);
ConcurrentLog.info("STEERING", "log-in attempt for steering from " + requestIP);
prop.authenticationRequired();
return prop;
}
if (post.containsKey("shutdown")) {
Log.logInfo("STEERING", "shutdown request from " + requestIP);
ConcurrentLog.info("STEERING", "shutdown request from " + requestIP);
sb.terminate(10, "shutdown request from Steering; ip = " + requestIP);
prop.put("info", "3");
@ -66,7 +66,7 @@ public class Steering {
}
if (post.containsKey("restart")) {
Log.logInfo("STEERING", "restart request from " + requestIP);
ConcurrentLog.info("STEERING", "restart request from " + requestIP);
yacyRelease.restart();
prop.put("info", "4");
@ -74,7 +74,7 @@ public class Steering {
}
if (post.containsKey("update")) {
Log.logInfo("STEERING", "update request from " + requestIP);
ConcurrentLog.info("STEERING", "update request from " + requestIP);
final boolean devenvironment = new File(sb.getAppPath(), ".git").exists();
final String releaseFileName = post.get("releaseinstall", "");
final File releaseFile = new File(sb.releasePath, releaseFileName);

@ -32,10 +32,10 @@ import java.util.regex.Pattern;
import net.yacy.cora.document.UTF8;
import net.yacy.cora.protocol.Domains;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.data.WorkTables;
import net.yacy.kelondro.blob.Tables;
import net.yacy.kelondro.blob.Tables.Row;
import net.yacy.kelondro.logging.Log;
import net.yacy.search.Switchboard;
import net.yacy.search.SwitchboardConstants;
import net.yacy.search.query.QueryParams;
@ -144,7 +144,7 @@ public class Table_API_p {
WorkTables.calculateAPIScheduler(row, false);
sb.tables.update(WorkTables.TABLE_API_NAME, row);
}
} catch (Throwable e) { Log.logException(e); }
} catch (Throwable e) { ConcurrentLog.logException(e); }
}
if (post != null && !post.get("deleterows", "").isEmpty()) {
@ -153,7 +153,7 @@ public class Table_API_p {
try {
sb.tables.delete(WorkTables.TABLE_API_NAME, entry.getValue().substring(5).getBytes());
} catch (IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
}
@ -192,7 +192,7 @@ public class Table_API_p {
}
sb.tables.recordAPICall(post, "Table_API_p.html", WorkTables.TABLE_API_TYPE_STEERING, "delete API calls older than " + days + " days");
} catch (IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
@ -369,7 +369,7 @@ public class Table_API_p {
prop.put("showschedulerhint", 0);
}
} catch (IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
prop.put("showtable_list", count);
prop.put("showtable_num", count);

@ -7,12 +7,12 @@ import java.util.regex.Pattern;
import net.yacy.cora.document.UTF8;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.cora.util.SpaceExceededException;
import net.yacy.data.ymark.YMarkEntry;
import net.yacy.data.ymark.YMarkTables;
import net.yacy.data.ymark.YMarkUtil;
import net.yacy.kelondro.blob.Tables;
import net.yacy.kelondro.logging.Log;
import net.yacy.search.Switchboard;
import net.yacy.server.serverObjects;
import net.yacy.server.serverSwitch;
@ -105,7 +105,7 @@ public class Table_YMark_p {
if (columns.isEmpty() && table != null) try {
columns = sb.tables.columns(table);
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
count = 0;
@ -121,7 +121,7 @@ public class Table_YMark_p {
count++;
}
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
prop.put("showselection_columns", count);
@ -149,9 +149,9 @@ public class Table_YMark_p {
if (entry.getValue().startsWith("mark_")) try {
sb.tables.bookmarks.deleteBookmark(bmk_user, entry.getValue().substring(5).getBytes());
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
} catch (final SpaceExceededException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
}
@ -166,7 +166,7 @@ public class Table_YMark_p {
try {
sb.tables.bookmarks.addBookmark(bmk_user, bmk, false, false);
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
@ -190,18 +190,18 @@ public class Table_YMark_p {
setEdit(sb, prop, table, pk, columns);
}
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
} catch (final SpaceExceededException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
} else if (post.containsKey("addrow")) try {
// get a new key
final String pk = UTF8.String(sb.tables.createRow(table));
setEdit(sb, prop, table, pk, columns);
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
} catch (final SpaceExceededException e) {
Log.logException(e);
ConcurrentLog.logException(e);
} else {
prop.put("showtable", 1);
prop.put("showtable_table", table);
@ -212,7 +212,7 @@ public class Table_YMark_p {
prop.put("showtable_tagsize", sb.tables.size(YMarkTables.TABLES.TAGS.tablename(bmk_user)));
prop.put("showtable_foldersize", sb.tables.size(YMarkTables.TABLES.FOLDERS.tablename(bmk_user)));
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
prop.put("showtable_bmksize", 0);
prop.put("showtable_tagsize", 0);
prop.put("showtable_foldersize", 0);
@ -229,7 +229,7 @@ public class Table_YMark_p {
try {
maxcount = Math.min(maxcount, sb.tables.size(table));
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
maxcount = 0;
}
count = 0;
@ -265,7 +265,7 @@ public class Table_YMark_p {
count++;
}
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
prop.put("showtable_list", count);
prop.put("showtable_num", count);

@ -27,9 +27,9 @@ import java.util.regex.Pattern;
import net.yacy.cora.document.UTF8;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.cora.util.SpaceExceededException;
import net.yacy.kelondro.blob.Tables;
import net.yacy.kelondro.logging.Log;
import net.yacy.search.Switchboard;
import net.yacy.server.serverObjects;
import net.yacy.server.serverSwitch;
@ -78,7 +78,7 @@ public class Tables_p {
if (entry.getValue().startsWith("pk_")) try {
sb.tables.delete(table, entry.getValue().substring(3).getBytes());
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
}
@ -94,7 +94,7 @@ public class Tables_p {
try {
sb.tables.update(table, pk.getBytes(), map);
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
@ -109,7 +109,7 @@ public class Tables_p {
try {
columns = sb.tables.columns(table);
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
columns = new ArrayList<String>();
}
@ -127,18 +127,18 @@ public class Tables_p {
setEdit(sb, prop, table, pk, columns);
}
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
} catch (final SpaceExceededException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
} else if (post.containsKey("addrow")) try {
// get a new key
final String pk = UTF8.String(sb.tables.createRow(table));
setEdit(sb, prop, table, pk, columns);
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
} catch (final SpaceExceededException e) {
Log.logException(e);
ConcurrentLog.logException(e);
} else {
prop.put("showtable", 1);
prop.put("showtable_table", table);
@ -154,7 +154,7 @@ public class Tables_p {
try {
maxcount = Math.min(maxcount, sb.tables.size(table));
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
maxcount = 0;
}
count = 0;
@ -180,7 +180,7 @@ public class Tables_p {
count++;
}
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
prop.put("showtable_list", count);
prop.put("showtable_num", count);

@ -24,8 +24,8 @@ import java.net.MalformedURLException;
import net.yacy.cora.lod.JenaTripleStore;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.kelondro.data.meta.DigestURI;
import net.yacy.kelondro.logging.Log;
import net.yacy.server.serverObjects;
import net.yacy.server.serverSwitch;
import net.yacy.server.http.HTTPDemon;
@ -78,9 +78,9 @@ public class Triple_p {
}
JenaTripleStore.load(newurl);
} catch (MalformedURLException e) {
Log.logException(e);
ConcurrentLog.logException(e);
} catch (IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}

@ -32,8 +32,8 @@ import net.yacy.cora.order.Digest;
import net.yacy.cora.protocol.HeaderFramework;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.protocol.ResponseHeader;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.data.UserDB;
import net.yacy.kelondro.logging.Log;
import net.yacy.search.Switchboard;
import net.yacy.search.SwitchboardConstants;
import net.yacy.server.serverObjects;
@ -135,7 +135,7 @@ public class User{
entry.setProperty(UserDB.Entry.MD5ENCODED_USERPWD_STRING, Digest.encodeMD5Hex(entry.getUserName()+":"+post.get("newpass", "")));
prop.put("status_password", "0"); //changes
} catch (final Exception e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}else{
prop.put("status_password", "3"); //empty

@ -38,11 +38,11 @@ import net.yacy.cora.protocol.Domains;
import net.yacy.cora.protocol.HeaderFramework;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.storage.ConcurrentARC;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.crawler.data.CrawlQueues;
import net.yacy.data.URLLicense;
import net.yacy.document.ImageParser;
import net.yacy.kelondro.data.meta.DigestURI;
import net.yacy.kelondro.logging.Log;
import net.yacy.kelondro.util.FileUtils;
import net.yacy.kelondro.util.MemoryControl;
import net.yacy.kelondro.workflow.WorkflowProcessor;
@ -107,7 +107,7 @@ public class ViewImage {
if (url != null) try {
resourceb = sb.loader.loadContent(sb.loader.request(url, false, true), CacheStrategy.IFEXIST, BlacklistType.SEARCH, CrawlQueues.queuedMinLoadDelay, ClientIdentification.DEFAULT_TIMEOUT);
} catch (final IOException e) {
Log.logFine("ViewImage", "cannot load: " + e.getMessage());
ConcurrentLog.fine("ViewImage", "cannot load: " + e.getMessage());
}
byte[] imgb = null;
if (resourceb == null) {

@ -37,8 +37,8 @@ import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.kelondro.logging.GuiHandler;
import net.yacy.kelondro.logging.Log;
import net.yacy.kelondro.logging.LogalizerHandler;
import net.yacy.server.serverObjects;
import net.yacy.server.serverSwitch;
@ -96,7 +96,7 @@ public class ViewLog_p {
final Pattern filterPattern = Pattern.compile(filter,Pattern.MULTILINE);
filterMatcher = filterPattern.matcher("");
} catch (final PatternSyntaxException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
int level = 0;

@ -43,7 +43,7 @@ import java.util.Set;
import net.yacy.cora.order.Base64Order;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.kelondro.logging.Log;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.peers.Network;
import net.yacy.peers.NewsDB;
import net.yacy.peers.NewsPool;
@ -102,7 +102,7 @@ public class ViewProfile {
final NewsDB.Record record = sb.peers.newsPool.getByOriginator(NewsPool.INCOMING_DB, NewsPool.CATEGORY_PROFILE_UPDATE, seed.hash);
if (record != null) sb.peers.newsPool.moveOff(NewsPool.INCOMING_DB, record.id());
} catch (final Exception e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
// try to get the profile from remote peer
@ -113,7 +113,7 @@ public class ViewProfile {
if (profile == null) {
prop.put("success", "2"); // peer known, but disconnected
} else {
Network.log.logInfo("fetched profile:" + profile);
Network.log.info("fetched profile:" + profile);
prop.put("success", "3"); // everything ok
}
prop.putHTML("success_peername", seed.getName());

@ -33,10 +33,10 @@ import net.yacy.cora.lod.vocabulary.Tagging;
import net.yacy.cora.lod.vocabulary.Tagging.SOTuple;
import net.yacy.cora.lod.vocabulary.YaCyMetadata;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.document.LibraryProvider;
import net.yacy.kelondro.data.meta.DigestURI;
import net.yacy.kelondro.data.meta.URIMetadataNode;
import net.yacy.kelondro.logging.Log;
import net.yacy.search.Switchboard;
import net.yacy.search.index.Segment;
import net.yacy.server.serverObjects;
@ -167,7 +167,7 @@ public class Vocabulary_p {
}
}
} catch (IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}

@ -37,8 +37,8 @@ import net.yacy.cora.document.ASCII;
import net.yacy.cora.order.Base64Order;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.sorting.ClusteredScoreMap;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.kelondro.data.meta.DigestURI;
import net.yacy.kelondro.logging.Log;
import net.yacy.peers.graphics.WebStructureGraph;
import net.yacy.search.Switchboard;
import net.yacy.server.serverObjects;
@ -116,7 +116,7 @@ public class WebStructurePicture_p {
for (int i = 0; i < hostlist.length; i++) {
String host = hostlist[i];
String hash = null;
try {hash = ASCII.String((new DigestURI("http://" + host)).hash(), 6, 6);} catch (final MalformedURLException e) {Log.logException(e);}
try {hash = ASCII.String((new DigestURI("http://" + host)).hash(), 6, 6);} catch (final MalformedURLException e) {ConcurrentLog.logException(e);}
Map.Entry<String, String> centernode = new AbstractMap.SimpleEntry<String, String>(hash, host);
double angle = 2.0d * i * Math.PI / hostlist.length;
if (hostlist.length == 3) angle -= Math.PI / 2;

@ -4,6 +4,7 @@ import java.util.Iterator;
import net.yacy.cora.document.UTF8;
import net.yacy.cora.protocol.HeaderFramework;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.cora.util.SpaceExceededException;
import net.yacy.data.UserDB;
import net.yacy.data.ymark.YMarkEntry;
@ -11,7 +12,6 @@ import net.yacy.data.ymark.YMarkRDF;
import net.yacy.data.ymark.YMarkTables;
import net.yacy.data.ymark.YMarkTables.TABLES;
import net.yacy.kelondro.blob.Tables;
import net.yacy.kelondro.logging.Log;
import net.yacy.search.Switchboard;
import net.yacy.server.serverObjects;
import net.yacy.server.serverSwitch;
@ -69,7 +69,7 @@ public class YMarks {
try {
size = sb.tables.bookmarks.getSize(bmk_user);
} catch (IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
size = 0;
}
prop.put("size", size);

@ -36,10 +36,10 @@ import net.yacy.cora.document.MultiProtocolURI;
import net.yacy.cora.federate.yacy.CacheStrategy;
import net.yacy.cora.protocol.ClientIdentification;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.crawler.data.CrawlQueues;
import net.yacy.crawler.robots.RobotsTxtEntry;
import net.yacy.kelondro.data.meta.DigestURI;
import net.yacy.kelondro.logging.Log;
import net.yacy.repository.Blacklist.BlacklistType;
import net.yacy.search.Switchboard;
import net.yacy.server.serverObjects;
@ -93,13 +93,13 @@ public class getpageinfo {
try {
u = new DigestURI(url);
} catch (final MalformedURLException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
net.yacy.document.Document scraper = null;
if (u != null) try {
scraper = sb.loader.loadDocument(u, CacheStrategy.IFEXIST, BlacklistType.CRAWLER, CrawlQueues.queuedMinLoadDelay, ClientIdentification.DEFAULT_TIMEOUT);
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
// bad things are possible, i.e. that the Server responds with "403 Bad Behavior"
// that should not affect the robots.txt validity
}
@ -157,7 +157,7 @@ public class getpageinfo {
final MultiProtocolURI sitemapURL = robotsEntry == null ? null : robotsEntry.getSitemap();
prop.putXML("sitemap", sitemapURL == null ? "" : sitemapURL.toString());
} catch (final MalformedURLException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
if (actions.indexOf("oai",0) >= 0) {
@ -196,11 +196,11 @@ public class getpageinfo {
final DocumentBuilder builder = factory.newDocumentBuilder();
return parseXML(builder.parse(url));
} catch (final ParserConfigurationException ex) {
Log.logException(ex);
ConcurrentLog.logException(ex);
} catch (final SAXException ex) {
Log.logException(ex);
ConcurrentLog.logException(ex);
} catch (final IOException ex) {
Log.logException(ex);
ConcurrentLog.logException(ex);
}
return "";

@ -36,10 +36,10 @@ import net.yacy.cora.document.MultiProtocolURI;
import net.yacy.cora.federate.yacy.CacheStrategy;
import net.yacy.cora.protocol.ClientIdentification;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.crawler.data.CrawlQueues;
import net.yacy.crawler.robots.RobotsTxtEntry;
import net.yacy.kelondro.data.meta.DigestURI;
import net.yacy.kelondro.logging.Log;
import net.yacy.repository.Blacklist.BlacklistType;
import net.yacy.search.Switchboard;
import net.yacy.server.serverObjects;
@ -93,13 +93,13 @@ public class getpageinfo_p {
try {
u = new DigestURI(url);
} catch (final MalformedURLException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
net.yacy.document.Document scraper = null;
if (u != null) try {
scraper = sb.loader.loadDocument(u, CacheStrategy.IFEXIST, BlacklistType.CRAWLER, CrawlQueues.queuedMinLoadDelay, ClientIdentification.DEFAULT_TIMEOUT);
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
// bad things are possible, i.e. that the Server responds with "403 Bad Behavior"
// that should not affect the robots.txt validity
}
@ -158,7 +158,7 @@ public class getpageinfo_p {
final MultiProtocolURI sitemapURL = robotsEntry == null ? null : robotsEntry.getSitemap();
prop.putXML("sitemap", sitemapURL == null ? "" : sitemapURL.toString());
} catch (final MalformedURLException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
if (actions.indexOf("oai",0) >= 0) {
@ -190,11 +190,11 @@ public class getpageinfo_p {
final DocumentBuilder builder = factory.newDocumentBuilder();
return parseXML(builder.parse(url));
} catch (final ParserConfigurationException ex) {
Log.logException(ex);
ConcurrentLog.logException(ex);
} catch (final SAXException ex) {
Log.logException(ex);
ConcurrentLog.logException(ex);
} catch (final IOException ex) {
Log.logException(ex);
ConcurrentLog.logException(ex);
}
return "";

@ -26,9 +26,9 @@ import java.util.regex.Pattern;
import net.yacy.cora.document.UTF8;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.cora.util.SpaceExceededException;
import net.yacy.kelondro.blob.Tables;
import net.yacy.kelondro.logging.Log;
import net.yacy.search.Switchboard;
import net.yacy.server.serverObjects;
import net.yacy.server.serverSwitch;
@ -91,7 +91,7 @@ public class table_p {
if (entry.getValue().startsWith("pk_")) try {
sb.tables.delete(table, entry.getValue().substring(3).getBytes());
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
}
@ -111,9 +111,9 @@ public class table_p {
sb.tables.update(table, pk.getBytes(), map);
}
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
} catch (final SpaceExceededException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
@ -126,7 +126,7 @@ public class table_p {
try {
columns = sb.tables.columns(table);
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
columns = new ArrayList<String>();
}
@ -149,7 +149,7 @@ public class table_p {
try {
maxcount = Math.min(maxcount, sb.tables.size(table));
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
maxcount = 0;
}
int count = 0;
@ -188,7 +188,7 @@ public class table_p {
count++;
}
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
prop.put("showtable_list", count);
prop.put("showtable_num", count);

@ -28,8 +28,8 @@ import java.util.Iterator;
import net.yacy.cora.document.ASCII;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.sorting.Rating;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.kelondro.index.Row;
import net.yacy.kelondro.logging.Log;
import net.yacy.search.Switchboard;
import net.yacy.search.index.Segment;
import net.yacy.server.serverObjects;
@ -39,7 +39,7 @@ public class termlist_p {
public static serverObjects respond(@SuppressWarnings("unused") final RequestHeader header, final serverObjects post, final serverSwitch env) {
final Log log = new Log("TERMLIST");
final ConcurrentLog log = new ConcurrentLog("TERMLIST");
final serverObjects prop = new serverObjects();
final Switchboard sb = (Switchboard) env;
Segment segment = sb.index;
@ -86,7 +86,7 @@ public class termlist_p {
try {
segment.termIndex().delete(t);
} catch (final IOException e1) {
log.logWarning("Error deleting " + ASCII.String(t), e1);
log.warn("Error deleting " + ASCII.String(t), e1);
e1.printStackTrace();
}
}
@ -104,11 +104,11 @@ public class termlist_p {
prop.put("over10000000", over10000000);
prop.put("over100000000", over100000000);
log.logWarning("finished termlist_p -> terms: " + c);
log.logWarning("maxterm: "+ (maxterm == null ? "" : ASCII.String(maxterm)));
log.logWarning("maxcount: " + maxcount);
log.logWarning("termnumber: " + termnumber);
log.logWarning("totalmemory: " + totalmemory);
log.warn("finished termlist_p -> terms: " + c);
log.warn("maxterm: "+ (maxterm == null ? "" : ASCII.String(maxterm)));
log.warn("maxcount: " + maxcount);
log.warn("termnumber: " + termnumber);
log.warn("totalmemory: " + totalmemory);
// return rewrite properties
return prop;
}

@ -30,9 +30,9 @@ import java.util.Iterator;
import net.yacy.cora.date.GenericFormatter;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.storage.HandleSet;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.cora.util.SpaceExceededException;
import net.yacy.kelondro.data.word.WordReference;
import net.yacy.kelondro.logging.Log;
import net.yacy.kelondro.rwi.ReferenceContainer;
import net.yacy.kelondro.rwi.TermSearch;
import net.yacy.kelondro.util.ISO639;
@ -88,7 +88,7 @@ public final class timeline {
try {
search = segment.termIndex().query(q, qg.getExcludeHashes(), null, Segment.wordReferenceFactory, maxdist);
} catch (SpaceExceededException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
ReferenceContainer<WordReference> index = search.joined();
@ -111,7 +111,7 @@ public final class timeline {
prop.put("event", c);
// log
Network.log.logInfo("EXIT TIMELINE SEARCH: " +
Network.log.info("EXIT TIMELINE SEARCH: " +
QueryParams.anonymizedQueryHashes(q) + " - " + joincount + " links found, " +
prop.get("linkcount", "?") + " links selected, " +
indexabstractContainercount + " index abstracts, " +

@ -34,10 +34,10 @@ import net.yacy.cora.federate.yacy.CacheStrategy;
import net.yacy.cora.order.Base64Order;
import net.yacy.cora.protocol.ClientIdentification;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.crawler.data.CrawlQueues;
import net.yacy.kelondro.data.citation.CitationReference;
import net.yacy.kelondro.data.meta.DigestURI;
import net.yacy.kelondro.logging.Log;
import net.yacy.kelondro.rwi.IndexCell;
import net.yacy.kelondro.rwi.ReferenceContainer;
import net.yacy.peers.graphics.WebStructureGraph;
@ -100,7 +100,7 @@ public class webstructure {
if (url != null) try {
scraper = sb.loader.loadDocument(url, CacheStrategy.IFEXIST, null, CrawlQueues.queuedMinLoadDelay, ClientIdentification.DEFAULT_TIMEOUT);
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
if (scraper != null) {
prop.put("references_count", 1);

@ -35,10 +35,10 @@ import net.yacy.cora.lod.JenaTripleStore;
import net.yacy.cora.lod.vocabulary.YaCyMetadata;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.protocol.RequestHeader.FileType;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.kelondro.data.meta.DigestURI;
import net.yacy.kelondro.data.meta.URIMetadataNode;
import net.yacy.kelondro.data.word.Word;
import net.yacy.kelondro.logging.Log;
import net.yacy.search.Switchboard;
import net.yacy.search.index.Segment;
import net.yacy.server.serverObjects;
@ -93,7 +93,7 @@ public class yacydoc {
final DigestURI url = new DigestURI(urlstring);
urlhash = ASCII.String(url.hash());
} catch (final MalformedURLException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
if (urlhash == null || urlhash.isEmpty()) return prop;
@ -145,7 +145,7 @@ public class yacydoc {
references += r.toString()+",";
}
Log.logInfo("yacydoc", references);
ConcurrentLog.info("yacydoc", references);
prop.put("taglinks", references);

@ -1,13 +1,13 @@
import java.io.IOException;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.data.UserDB;
import net.yacy.data.ymark.YMarkEntry;
import net.yacy.data.ymark.YMarkTables;
import net.yacy.data.ymark.YMarkUtil;
import net.yacy.document.Parser.Failure;
import net.yacy.kelondro.data.meta.DigestURI;
import net.yacy.kelondro.logging.Log;
import net.yacy.search.Switchboard;
import net.yacy.server.serverObjects;
import net.yacy.server.serverSwitch;
@ -41,10 +41,10 @@ public class add_ymark {
prop.put("status", "1");
} catch (final IOException e) {
// TODO Auto-generated catch block
Log.logException(e);
ConcurrentLog.logException(e);
} catch (final Failure e) {
// TODO Auto-generated catch block
Log.logException(e);
ConcurrentLog.logException(e);
}
} else if(post.containsKey(YMarkEntry.BOOKMARK.URL.key())) {
@ -72,7 +72,7 @@ public class add_ymark {
try {
sb.tables.bookmarks.addBookmark(bmk_user, bmk, false, false);
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
prop.put("status", "1");
} else {

@ -1,12 +1,12 @@
import java.io.IOException;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.cora.util.SpaceExceededException;
import net.yacy.data.UserDB;
import net.yacy.data.ymark.YMarkEntry;
import net.yacy.data.ymark.YMarkTables;
import net.yacy.data.ymark.YMarkUtil;
import net.yacy.kelondro.logging.Log;
import net.yacy.search.Switchboard;
import net.yacy.server.serverObjects;
import net.yacy.server.serverSwitch;
@ -39,9 +39,9 @@ public class delete_ymark {
sb.tables.bookmarks.deleteBookmark(bmk_user, urlHash);
prop.put("result", "1");
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
} catch (final SpaceExceededException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
} else {
prop.put(serverObjects.ACTION_AUTHENTICATE, YMarkTables.USER_AUTHENTICATE_MSG);

@ -5,6 +5,7 @@ import java.util.Iterator;
import java.util.regex.Pattern;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.data.UserDB;
import net.yacy.data.ymark.YMarkAutoTagger;
import net.yacy.data.ymark.YMarkCrawlStart;
@ -15,7 +16,6 @@ import net.yacy.data.ymark.YMarkUtil;
import net.yacy.document.Document;
import net.yacy.document.Parser.Failure;
import net.yacy.kelondro.data.meta.DigestURI;
import net.yacy.kelondro.logging.Log;
import net.yacy.search.Switchboard;
import net.yacy.server.serverObjects;
import net.yacy.server.serverSwitch;
@ -73,15 +73,15 @@ public class get_metadata {
} catch (final MalformedURLException e) {
// TODO Auto-generated catch block
Log.logException(e);
ConcurrentLog.logException(e);
prop.put("status", "error");
} catch (final IOException e) {
// TODO Auto-generated catch block
Log.logException(e);
ConcurrentLog.logException(e);
prop.put("status", "error");
} catch (final Failure e) {
// TODO Auto-generated catch block
Log.logException(e);
ConcurrentLog.logException(e);
prop.put("status", "error");
}
} else {

@ -9,6 +9,7 @@ import java.util.regex.Pattern;
import net.yacy.cora.date.ISO8601Formatter;
import net.yacy.cora.document.UTF8;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.cora.util.SpaceExceededException;
import net.yacy.data.UserDB;
import net.yacy.data.ymark.YMarkAutoTagger;
@ -22,7 +23,6 @@ import net.yacy.document.Document;
import net.yacy.document.Parser.Failure;
import net.yacy.kelondro.blob.Tables;
import net.yacy.kelondro.data.meta.DigestURI;
import net.yacy.kelondro.logging.Log;
import net.yacy.search.Switchboard;
import net.yacy.server.serverObjects;
import net.yacy.server.serverSwitch;
@ -95,7 +95,7 @@ public class get_treeview {
// it = sb.tables.bookmarks.folders.getFolders(bmk_user, root);
it = sb.tables.bookmarks.getFolders(bmk_user, root).iterator();
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
int n = Pattern.compile(YMarkUtil.FOLDERS_SEPARATOR).split(root, 0).length;
if (n == 0) n = 1;
@ -205,9 +205,9 @@ public class get_treeview {
prop.put("folders", count);
}
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
} catch (final SpaceExceededException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
} else if (isAutoTagger || isMetadata || isURLdb || isCrawlStart) {
try {
@ -230,7 +230,7 @@ public class get_treeview {
} else if(isURLdb) {
count = putMeta(count, meta.getMetadata());
} else if(isCrawlStart) {
Log.logInfo("YMark", "I am looking for CrawlStart: "+post.get(ROOT).substring(2));
ConcurrentLog.info("YMark", "I am looking for CrawlStart: "+post.get(ROOT).substring(2));
final YMarkCrawlStart crawlStart = new YMarkCrawlStart(sb.tables, post.get(ROOT).substring(2));
final Iterator<String> iter = crawlStart.keySet().iterator();
String key;
@ -244,11 +244,11 @@ public class get_treeview {
}
} catch (final MalformedURLException e) {
Log.logException(e);
ConcurrentLog.logException(e);
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
} catch (final Failure e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
} else {

@ -4,6 +4,7 @@ import java.util.Iterator;
import net.yacy.cora.document.UTF8;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.data.UserDB;
import net.yacy.data.ymark.YMarkDate;
import net.yacy.data.ymark.YMarkEntry;
@ -12,7 +13,6 @@ import net.yacy.data.ymark.YMarkUtil;
import net.yacy.data.ymark.YMarkXBELImporter;
import net.yacy.document.parser.html.CharacterCoding;
import net.yacy.kelondro.blob.Tables;
import net.yacy.kelondro.logging.Log;
import net.yacy.search.Switchboard;
import net.yacy.server.serverObjects;
import net.yacy.server.serverSwitch;
@ -62,7 +62,7 @@ public class get_xbel {
try {
fit = sb.tables.bookmarks.getFolders(bmk_user, root).iterator();
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
while (fit.hasNext()) {

@ -5,6 +5,7 @@ import java.util.regex.Pattern;
import net.yacy.cora.document.UTF8;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.data.UserDB;
import net.yacy.data.ymark.YMarkCrawlStart;
import net.yacy.data.ymark.YMarkDate;
@ -14,7 +15,6 @@ import net.yacy.data.ymark.YMarkUtil;
import net.yacy.data.ymark.YMarkTables.TABLES;
import net.yacy.kelondro.blob.Tables;
import net.yacy.kelondro.blob.Tables.Row;
import net.yacy.kelondro.logging.Log;
import net.yacy.search.Switchboard;
import net.yacy.server.serverObjects;
import net.yacy.server.serverSwitch;
@ -84,7 +84,7 @@ public class get_ymark {
total = result.size();
bookmarks = result.iterator();
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
prop.put("page", page);
prop.put("total", total);

@ -14,6 +14,7 @@ import java.util.zip.GZIPInputStream;
import net.yacy.cora.document.UTF8;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.data.BookmarksDB;
import net.yacy.data.UserDB;
import net.yacy.data.WorkTables;
@ -30,7 +31,6 @@ import net.yacy.data.ymark.YMarkXBELImporter;
import net.yacy.document.Parser.Failure;
import net.yacy.document.content.SurrogateReader;
import net.yacy.kelondro.blob.Tables;
import net.yacy.kelondro.logging.Log;
import net.yacy.kelondro.workflow.InstantBusyThread;
import net.yacy.search.Switchboard;
import net.yacy.server.serverObjects;
@ -95,7 +95,7 @@ public class import_ymark {
surrogateReader = new SurrogateReader(stream, queueSize);
} catch (final IOException e) {
//TODO: display an error message
Log.logException(e);
ConcurrentLog.logException(e);
prop.put("status", "0");
return prop;
}
@ -110,7 +110,7 @@ public class import_ymark {
reader = new MonitoredReader(new InputStreamReader(stream,"UTF-8"), 1024*16, bytes.length);
} catch (final UnsupportedEncodingException e1) {
//TODO: display an error message
Log.logException(e1);
ConcurrentLog.logException(e1);
prop.put("status", "0");
return prop;
}
@ -126,7 +126,7 @@ public class import_ymark {
xbelImporter = new YMarkXBELImporter(reader, queueSize, root);
} catch (final SAXException e) {
//TODO: display an error message
Log.logException(e);
ConcurrentLog.logException(e);
prop.put("status", "0");
return prop;
}
@ -161,9 +161,9 @@ public class import_ymark {
}
prop.put("status", "1");
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
} catch (final Failure e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
} else if(post.containsKey("importer") && post.get("importer").equals("bmks")) {
if(!isAdmin) {
@ -191,9 +191,9 @@ public class import_ymark {
sb.tables.bookmarks.addBookmark(bmk_user, bmk_entry, merge, true);
prop.put("status", "1");
} catch (final MalformedURLException e) {
Log.logException(e);
ConcurrentLog.logException(e);
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
} catch (IOException e1) {
}
@ -221,7 +221,7 @@ public class import_ymark {
prop.put("status", "1");
} catch (Exception e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
} else {
@ -255,9 +255,9 @@ public class import_ymark {
}
}
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
} catch (final InterruptedException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
}

@ -3,12 +3,12 @@ import java.util.Iterator;
import java.util.regex.Pattern;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.data.UserDB;
import net.yacy.data.ymark.YMarkTables;
import net.yacy.data.ymark.YMarkUtil;
import net.yacy.data.ymark.YMarkTables.TABLES;
import net.yacy.kelondro.blob.Tables.Row;
import net.yacy.kelondro.logging.Log;
import net.yacy.search.Switchboard;
import net.yacy.server.serverObjects;
import net.yacy.server.serverSwitch;
@ -74,7 +74,7 @@ public class manage_tags {
sb.tables.bookmarks.replaceTags(row_iter, bmk_user, tags, replace);
prop.put("status", 1);
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
} else {
prop.put(serverObjects.ACTION_AUTHENTICATE, YMarkTables.USER_AUTHENTICATE_MSG);

@ -31,7 +31,7 @@ import net.yacy.cora.federate.solr.connector.EmbeddedSolrConnector;
import net.yacy.cora.federate.solr.responsewriter.GSAResponseWriter;
import net.yacy.cora.protocol.HeaderFramework;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.kelondro.logging.Log;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.search.Switchboard;
import net.yacy.search.query.AccessTracker;
import net.yacy.search.query.QueryGoal;
@ -98,7 +98,7 @@ public class searchresult {
// check post
if (post == null) {post = new serverObjects(); post.put("q", ""); post.put("num", "0");}
Log.logInfo("GSA Query", post.toString());
ConcurrentLog.info("GSA Query", post.toString());
sb.intermissionAllThreads(3000); // tell all threads to do nothing for a specific time
// rename post fields according to result style
@ -189,7 +189,7 @@ public class searchresult {
try {response = connector.query(req);} catch (SolrException ee) {e = ee;}
if (response != null) e = response.getException();
if (e != null) {
Log.logException(e);
ConcurrentLog.logException(e);
if (req != null) req.close();
SolrRequestInfo.clearRequestInfo();
return null;

@ -70,7 +70,7 @@ public class rct_p {
final String urlRejectReason = sb.crawlStacker.urlInAcceptedDomain(url);
if (urlRejectReason == null) {
// stack url
if (sb.getLog().isFinest()) sb.getLog().logFinest("crawlOrder: stack: url='" + url + "'");
if (sb.getLog().isFinest()) sb.getLog().finest("crawlOrder: stack: url='" + url + "'");
sb.crawlStacker.enqueueEntry(new Request(
peerhash.getBytes(),
url,
@ -84,7 +84,7 @@ public class rct_p {
item.getSize()
));
} else {
env.getLog().logWarning("crawlOrder: Rejected URL '" + urlToString(url) + "': " + urlRejectReason);
env.getLog().warn("crawlOrder: Rejected URL '" + urlToString(url) + "': " + urlRejectReason);
}
}
}

@ -39,7 +39,7 @@ import net.yacy.cora.federate.solr.responsewriter.YJsonResponseWriter;
import net.yacy.cora.federate.solr.responsewriter.OpensearchResponseWriter;
import net.yacy.cora.protocol.HeaderFramework;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.kelondro.logging.Log;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.search.Switchboard;
import net.yacy.search.SwitchboardConstants;
import net.yacy.search.query.AccessTracker;
@ -150,7 +150,7 @@ public class select {
// check post
if (post == null) {post = new serverObjects(); post.put(CommonParams.Q, ""); post.put(CommonParams.ROWS, "0");}
if (post.size() > 100) {
Log.logWarning("select", "rejected bad-formed search request with " + post.size() + " properties from " + header.refererHost());
ConcurrentLog.warn("select", "rejected bad-formed search request with " + post.size() + " properties from " + header.refererHost());
return null; // prevent the worst hacks here...
}
sb.intermissionAllThreads(3000); // tell all threads to do nothing for a specific time
@ -221,7 +221,7 @@ public class select {
try {response = connector.query(req);} catch (SolrException ee) {e = ee;}
if (response != null) e = response.getException();
if (e != null) {
Log.logException(e);
ConcurrentLog.logException(e);
if (req != null) req.close();
SolrRequestInfo.clearRequestInfo();
return null;
@ -246,7 +246,7 @@ public class select {
AccessTracker.addToDump(q, Integer.toString(matches));
}
Log.logInfo("SOLR Query", "results: " + matches + ", for query:" + post.toString());
ConcurrentLog.info("SOLR Query", "results: " + matches + ", for query:" + post.toString());
return null;
}
}

@ -21,7 +21,7 @@
import java.io.OutputStream;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.kelondro.logging.Log;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.server.serverObjects;
import net.yacy.server.serverSwitch;
@ -36,7 +36,7 @@ public class update {
}
public static serverObjects respond(@SuppressWarnings("unused") final RequestHeader header, final serverObjects post, @SuppressWarnings("unused") final serverSwitch env, @SuppressWarnings("unused") final OutputStream out) {
Log.logInfo("update", "post = " + post == null ? "NULL" : post.toString());
ConcurrentLog.info("update", "post = " + post == null ? "NULL" : post.toString());
return null;
}
}

@ -31,11 +31,11 @@ import java.io.IOException;
import net.yacy.cora.document.ASCII;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.crawler.data.ResultURLs;
import net.yacy.crawler.data.ResultURLs.EventOrigin;
import net.yacy.crawler.data.ZURL.FailCategory;
import net.yacy.kelondro.data.meta.URIMetadataRow;
import net.yacy.kelondro.logging.Log;
import net.yacy.peers.Protocol;
import net.yacy.peers.Seed;
import net.yacy.repository.Blacklist.BlacklistType;
@ -59,7 +59,7 @@ public final class crawlReceipt {
return prop;
}
final Log log = sb.getLog();
final ConcurrentLog log = sb.getLog();
//int proxyPrefetchDepth = Integer.parseInt(env.getConfig("proxyPrefetchDepth", "0"));
//int crawlingDepth = Integer.parseInt(env.getConfig("crawlingDepth", "0"));
@ -117,13 +117,13 @@ public final class crawlReceipt {
// generating a new loaded URL entry
final URIMetadataRow entry = URIMetadataRow.importEntry(propStr);
if (entry == null) {
if (log.isWarning()) log.logWarning("crawlReceipt: RECEIVED wrong RECEIPT (entry null) from peer " + iam + "\n\tURL properties: "+ propStr);
if (log.isWarn()) log.warn("crawlReceipt: RECEIVED wrong RECEIPT (entry null) from peer " + iam + "\n\tURL properties: "+ propStr);
prop.put("delay", "3600");
return prop;
}
if (entry.url() == null) {
if (log.isWarning()) log.logWarning("crawlReceipt: RECEIVED wrong RECEIPT (url null) for hash " + ASCII.String(entry.hash()) + " from peer " + iam + "\n\tURL properties: "+ propStr);
if (log.isWarn()) log.warn("crawlReceipt: RECEIVED wrong RECEIPT (url null) for hash " + ASCII.String(entry.hash()) + " from peer " + iam + "\n\tURL properties: "+ propStr);
prop.put("delay", "3600");
return prop;
}
@ -131,7 +131,7 @@ public final class crawlReceipt {
// check if the entry is in our network domain
final String urlRejectReason = sb.crawlStacker.urlInAcceptedDomain(entry.url());
if (urlRejectReason != null) {
log.logWarning("crawlReceipt: RECEIVED wrong RECEIPT (" + urlRejectReason + ") for hash " + ASCII.String(entry.hash()) + " from peer " + iam + "\n\tURL properties: "+ propStr);
log.warn("crawlReceipt: RECEIVED wrong RECEIPT (" + urlRejectReason + ") for hash " + ASCII.String(entry.hash()) + " from peer " + iam + "\n\tURL properties: "+ propStr);
prop.put("delay", "9999");
return prop;
}
@ -139,7 +139,7 @@ public final class crawlReceipt {
// Check URL against DHT blacklist
if (Switchboard.urlBlacklist.isListed(BlacklistType.DHT, entry)) {
// URL is blacklisted
log.logWarning("crawlReceipt: RECEIVED wrong RECEIPT (URL is blacklisted) for URL " + ASCII.String(entry.hash()) + ":" + entry.url().toNormalform(false) + " from peer " + iam);
log.warn("crawlReceipt: RECEIVED wrong RECEIPT (URL is blacklisted) for URL " + ASCII.String(entry.hash()) + ":" + entry.url().toNormalform(false) + " from peer " + iam);
prop.put("delay", "9999");
return prop;
}
@ -149,13 +149,13 @@ public final class crawlReceipt {
sb.index.fulltext().putMetadata(entry);
ResultURLs.stack(ASCII.String(entry.url().hash()), entry.url().getHost(), youare.getBytes(), iam.getBytes(), EventOrigin.REMOTE_RECEIPTS);
sb.crawlQueues.delegatedURL.remove(entry.hash()); // the delegated work has been done
if (log.isInfo()) log.logInfo("crawlReceipt: RECEIVED RECEIPT from " + otherPeerName + " for URL " + ASCII.String(entry.hash()) + ":" + entry.url().toNormalform(false));
if (log.isInfo()) log.info("crawlReceipt: RECEIVED RECEIPT from " + otherPeerName + " for URL " + ASCII.String(entry.hash()) + ":" + entry.url().toNormalform(false));
// ready for more
prop.put("delay", "10");
return prop;
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
prop.put("delay", "3600");
return prop;
}

@ -36,7 +36,7 @@ import java.util.Map;
import net.yacy.cora.protocol.Domains;
import net.yacy.cora.protocol.HeaderFramework;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.kelondro.logging.Log;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.peers.Network;
import net.yacy.peers.DHTSelection;
import net.yacy.peers.Protocol;
@ -80,12 +80,12 @@ public final class hello {
final InetAddress ias = Domains.dnsResolve(clientip);
final long time_dnsResolve = System.currentTimeMillis() - time;
if (ias == null) {
Network.log.logInfo("hello/server: failed contacting seed; clientip not resolvable (clientip=" + clientip + ", time_dnsResolve=" + time_dnsResolve + ")");
Network.log.info("hello/server: failed contacting seed; clientip not resolvable (clientip=" + clientip + ", time_dnsResolve=" + time_dnsResolve + ")");
prop.put("message", "cannot resolve your IP from your reported location " + clientip);
return prop;
}
if (seed.length() > Seed.maxsize) {
Network.log.logInfo("hello/server: rejected contacting seed; too large (" + seed.length() + " > " + Seed.maxsize + ", time_dnsResolve=" + time_dnsResolve + ")");
Network.log.info("hello/server: rejected contacting seed; too large (" + seed.length() + " > " + Seed.maxsize + ", time_dnsResolve=" + time_dnsResolve + ")");
prop.put("message", "your seed is too long (" + seed.length() + ")");
return prop;
}
@ -93,13 +93,13 @@ public final class hello {
try {
remoteSeed = Seed.genRemoteSeed(seed, true, ias.getHostAddress());
} catch (final IOException e) {
Network.log.logInfo("hello/server: bad seed: " + e.getMessage() + ", time_dnsResolve=" + time_dnsResolve);
Network.log.info("hello/server: bad seed: " + e.getMessage() + ", time_dnsResolve=" + time_dnsResolve);
prop.put("message", "bad seed: " + e.getMessage());
return prop;
}
if (remoteSeed == null || remoteSeed.hash == null) {
Network.log.logInfo("hello/server: bad seed: null, time_dnsResolve=" + time_dnsResolve);
Network.log.info("hello/server: bad seed: null, time_dnsResolve=" + time_dnsResolve);
prop.put("message", "cannot parse your seed");
return prop;
}
@ -186,12 +186,12 @@ public final class hello {
remoteSeed.put(Seed.PEERTYPE, Seed.PEERTYPE_SENIOR);
}
// connect the seed
Network.log.logInfo("hello/server: responded remote senior peer '" + remoteSeed.getName() + "' from " + reportedip + ", time_dnsResolve=" + time_dnsResolve + ", time_backping=" + time_backping + ", method=" + backping_method + ", urls=" + callback[0]);
Network.log.info("hello/server: responded remote senior peer '" + remoteSeed.getName() + "' from " + reportedip + ", time_dnsResolve=" + time_dnsResolve + ", time_backping=" + time_backping + ", method=" + backping_method + ", urls=" + callback[0]);
sb.peers.peerActions.peerArrival(remoteSeed, true);
} else {
prop.put(Seed.YOURTYPE, Seed.PEERTYPE_JUNIOR);
remoteSeed.put(Seed.PEERTYPE, Seed.PEERTYPE_JUNIOR);
Network.log.logInfo("hello/server: responded remote junior peer '" + remoteSeed.getName() + "' from " + reportedip + ", time_dnsResolve=" + time_dnsResolve + ", time_backping=" + time_backping + ", method=" + backping_method + ", urls=" + callback[0]);
Network.log.info("hello/server: responded remote junior peer '" + remoteSeed.getName() + "' from " + reportedip + ", time_dnsResolve=" + time_dnsResolve + ", time_backping=" + time_backping + ", method=" + backping_method + ", urls=" + callback[0]);
// no connection here, instead store junior in connection cache
if ((remoteSeed.hash != null) && (remoteSeed.isProper(false) == null)) {
sb.peers.peerActions.peerPing(remoteSeed);
@ -202,7 +202,7 @@ public final class hello {
// update event tracker
EventTracker.update(EventTracker.EClass.PEERPING, new ProfilingGraph.EventPing(remoteSeed.getName(), sb.peers.myName(), false, connectedAfter - connectedBefore), false);
if (!(prop.get(Seed.YOURTYPE)).equals(reportedPeerType)) {
Network.log.logInfo("hello/server: changing remote peer '" + remoteSeed.getName() +
Network.log.info("hello/server: changing remote peer '" + remoteSeed.getName() +
"' [" + reportedip +
"] peerType from '" + reportedPeerType +
"' to '" + prop.get(Seed.YOURTYPE) + "'.");
@ -237,7 +237,7 @@ public final class hello {
count++;
}
} catch (final ConcurrentModificationException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
}
@ -249,7 +249,7 @@ public final class hello {
prop.put("seedlist", seeds.toString());
// return rewrite properties
prop.put("message", "ok " + seed.length());
Network.log.logInfo("hello/server: responded remote peer '" + remoteSeed.getName() + "' [" + reportedip + "] in " + (System.currentTimeMillis() - start) + " milliseconds");
Network.log.info("hello/server: responded remote peer '" + remoteSeed.getName() + "' [" + reportedip + "] in " + (System.currentTimeMillis() - start) + " milliseconds");
return prop;
}

@ -39,8 +39,8 @@ import net.yacy.cora.document.UTF8;
import net.yacy.cora.protocol.Domains;
import net.yacy.cora.protocol.HeaderFramework;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.data.MessageBoard;
import net.yacy.kelondro.logging.Log;
import net.yacy.peers.Network;
import net.yacy.peers.Protocol;
import net.yacy.peers.Seed;
@ -153,7 +153,7 @@ public final class message {
try {
Files.copy(notifierSource, notifierDest);
} catch (final IOException e) {
Log.logSevere("MESSAGE", "NEW MESSAGE ARRIVED! (error: " + e.getMessage() + ")");
ConcurrentLog.severe("MESSAGE", "NEW MESSAGE ARRIVED! (error: " + e.getMessage() + ")");
}
}
@ -212,7 +212,7 @@ public final class message {
email.print(emailTxt.toString());
email.close();
} catch (final Exception e) {
Network.log.logWarning("message: message forwarding via email failed. ",e);
Network.log.warn("message: message forwarding via email failed. ",e);
}
}

@ -32,7 +32,7 @@ import java.io.IOException;
import net.yacy.cora.date.GenericFormatter;
import net.yacy.cora.protocol.HeaderFramework;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.kelondro.logging.Log;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.peers.Network;
import net.yacy.peers.Protocol;
import net.yacy.search.Switchboard;
@ -98,7 +98,7 @@ public final class query {
prop.put("response", sb.index.termIndex() == null ? 0 : sb.index.termIndex().get(env.getBytes(), null).size());
return prop;
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
if (obj.equals("rwicount")) {

@ -196,11 +196,11 @@ public final class search {
try {
remoteSeed = Seed.genRemoteSeed(oseed, false, client);
} catch (final IOException e) {
Network.log.logInfo("yacy.search: access with bad seed: " + e.getMessage());
Network.log.info("yacy.search: access with bad seed: " + e.getMessage());
remoteSeed = null;
}
if (sb.peers == null) {
Network.log.logSevere("yacy.search: seed cache not initialized");
Network.log.severe("yacy.search: seed cache not initialized");
} else {
sb.peers.peerActions.peerArrival(remoteSeed, true);
}
@ -253,7 +253,7 @@ public final class search {
0.0d,
0.0d
);
Network.log.logInfo("INIT HASH SEARCH (abstracts only): " + QueryParams.anonymizedQueryHashes(theQuery.getQueryGoal().getIncludeHashes()) + " - " + theQuery.itemsPerPage() + " links");
Network.log.info("INIT HASH SEARCH (abstracts only): " + QueryParams.anonymizedQueryHashes(theQuery.getQueryGoal().getIncludeHashes()) + " - " + theQuery.itemsPerPage() + " links");
final long timer = System.currentTimeMillis();
//final Map<byte[], ReferenceContainer<WordReference>>[] containers = sb.indexSegment.index().searchTerm(theQuery.queryHashes, theQuery.excludeHashes, plasmaSearchQuery.hashes2StringSet(urls));
@ -317,7 +317,7 @@ public final class search {
0.0d,
0.0d
);
Network.log.logInfo("INIT HASH SEARCH (query-" + abstracts + "): " + QueryParams.anonymizedQueryHashes(theQuery.getQueryGoal().getIncludeHashes()) + " - " + theQuery.itemsPerPage() + " links");
Network.log.info("INIT HASH SEARCH (query-" + abstracts + "): " + QueryParams.anonymizedQueryHashes(theQuery.getQueryGoal().getIncludeHashes()) + " - " + theQuery.itemsPerPage() + " links");
EventChannel.channels(EventChannel.REMOTESEARCH).addMessage(new RSSMessage("Remote Search Request from " + ((remoteSeed == null) ? "unknown" : remoteSeed.getName()), QueryParams.anonymizedQueryHashes(theQuery.getQueryGoal().getIncludeHashes()), ""));
// make event
@ -435,7 +435,7 @@ public final class search {
if (MemoryControl.shortStatus()) sb.remoteSearchTracker.clear();
// log
Network.log.logInfo("EXIT HASH SEARCH: " +
Network.log.info("EXIT HASH SEARCH: " +
QueryParams.anonymizedQueryHashes(theQuery.getQueryGoal().getIncludeHashes()) + " - " + resultCount + " links found, " +
prop.get("linkcount", "?") + " links selected, " +
indexabstractContainercount + " index abstracts, " +

@ -39,11 +39,11 @@ import net.yacy.cora.federate.yacy.Distribution;
import net.yacy.cora.protocol.HeaderFramework;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.storage.HandleSet;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.cora.util.SpaceExceededException;
import net.yacy.kelondro.data.meta.URIMetadataRow;
import net.yacy.kelondro.data.word.WordReferenceRow;
import net.yacy.kelondro.index.RowHandleSet;
import net.yacy.kelondro.logging.Log;
import net.yacy.kelondro.util.FileUtils;
import net.yacy.peers.EventChannel;
import net.yacy.peers.Network;
@ -106,39 +106,39 @@ public final class transferRWI {
final StringBuilder unknownURLs = new StringBuilder(6000);
if ((youare == null) || (!youare.equals(sb.peers.mySeed().hash))) {
sb.getLog().logInfo("Rejecting RWIs from peer " + otherPeerName + ". Wrong target. Wanted peer=" + youare + ", iam=" + sb.peers.mySeed().hash);
sb.getLog().info("Rejecting RWIs from peer " + otherPeerName + ". Wrong target. Wanted peer=" + youare + ", iam=" + sb.peers.mySeed().hash);
result = "wrong_target";
pause = 0;
} else if (otherPeer == null) {
// we dont want to receive indexes
sb.getLog().logInfo("Rejecting RWIs from peer " + otherPeerName + ". Not granted. Other Peer is unknown");
sb.getLog().info("Rejecting RWIs from peer " + otherPeerName + ". Not granted. Other Peer is unknown");
result = "not_granted";
pause = 60000;
} else if (!granted) {
// we dont want to receive indexes
sb.getLog().logInfo("Rejecting RWIs from peer " + otherPeerName + ". Granted is false");
sb.getLog().info("Rejecting RWIs from peer " + otherPeerName + ". Granted is false");
result = "not_granted";
pause = 60000;
} else if (sb.isRobinsonMode()) {
// we dont want to receive indexes
sb.getLog().logInfo("Rejecting RWIs from peer " + otherPeerName + ". Not granted. This peer is in robinson mode");
sb.getLog().info("Rejecting RWIs from peer " + otherPeerName + ". Not granted. This peer is in robinson mode");
result = "not_granted";
pause = 60000;
} else if (sb.index.RWIBufferCount() > cachelimit) {
// we are too busy to receive indexes
sb.getLog().logInfo("Rejecting RWIs from peer " + otherPeerName + ". We are too busy (buffersize=" + sb.index.RWIBufferCount() + ").");
sb.getLog().info("Rejecting RWIs from peer " + otherPeerName + ". We are too busy (buffersize=" + sb.index.RWIBufferCount() + ").");
granted = false; // don't accept more words if there are too many words to flush
result = "busy";
pause = 60000;
} else if (otherPeer.getVersion() < 0.75005845 && otherPeer.getVersion() >= 0.75005821) {
// version that sends [B@... hashes
sb.getLog().logInfo("Rejecting RWIs from peer " + otherPeerName + ". Bad version.");
sb.getLog().info("Rejecting RWIs from peer " + otherPeerName + ". Bad version.");
result = "not_granted";
pause = 1800000;
} else {
// we want and can receive indexes
// log value status (currently added to find outOfMemory error
if (sb.getLog().isFine()) sb.getLog().logFine("Processing " + indexes.length + " bytes / " + wordc + " words / " + entryc + " entries from " + otherPeerName);
if (sb.getLog().isFine()) sb.getLog().fine("Processing " + indexes.length + " bytes / " + wordc + " words / " + entryc + " entries from " + otherPeerName);
final long startProcess = System.currentTimeMillis();
// decode request
@ -180,7 +180,7 @@ public final class transferRWI {
// block blacklisted entries
if ((blockBlacklist) && (Switchboard.urlBlacklist.hashInBlacklistedCache(BlacklistType.DHT, urlHash))) {
Network.log.logFine("transferRWI: blocked blacklisted URLHash '" + ASCII.String(urlHash) + "' from peer " + otherPeerName);
Network.log.fine("transferRWI: blocked blacklisted URLHash '" + ASCII.String(urlHash) + "' from peer " + otherPeerName);
blocked++;
continue;
}
@ -188,7 +188,7 @@ public final class transferRWI {
// check if the entry is in our network domain
final String urlRejectReason = sb.crawlStacker.urlInAcceptedDomainHash(urlHash);
if (urlRejectReason != null) {
Network.log.logWarning("transferRWI: blocked URL hash '" + ASCII.String(urlHash) + "' (" + urlRejectReason + ") from peer " + otherPeerName + "; peer is suspected to be a spam-peer (or something is wrong)");
Network.log.warn("transferRWI: blocked URL hash '" + ASCII.String(urlHash) + "' (" + urlRejectReason + ") from peer " + otherPeerName + "; peer is suspected to be a spam-peer (or something is wrong)");
//if (yacyCore.log.isFine()) yacyCore.log.logFine("transferRWI: blocked URL hash '" + urlHash + "' (" + urlRejectReason + ") from peer " + otherPeerName);
blocked++;
continue;
@ -198,7 +198,7 @@ public final class transferRWI {
try {
sb.index.storeRWI(ASCII.getBytes(wordHash), iEntry);
} catch (final Exception e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
serverCore.checkInterruption();
@ -215,7 +215,7 @@ public final class transferRWI {
unknownURL.put(ASCII.getBytes(id));
}
} catch (SpaceExceededException e) {
sb.getLog().logWarning("transferRWI: DB-Error while trying to determine if URL with hash '" + id + "' is known.", e);
sb.getLog().warn("transferRWI: DB-Error while trying to determine if URL with hash '" + id + "' is known.", e);
}
}
sb.peers.mySeed().incRI(received);
@ -228,12 +228,12 @@ public final class transferRWI {
}
if (unknownURLs.length() > 0) { unknownURLs.setLength(unknownURLs.length() - 1); }
if (wordhashes.isEmpty() || received == 0) {
sb.getLog().logInfo("Received 0 RWIs from " + otherPeerName + ", processed in " + (System.currentTimeMillis() - startProcess) + " milliseconds, requesting " + unknownURL.size() + " URLs, blocked " + blocked + " RWIs");
sb.getLog().info("Received 0 RWIs from " + otherPeerName + ", processed in " + (System.currentTimeMillis() - startProcess) + " milliseconds, requesting " + unknownURL.size() + " URLs, blocked " + blocked + " RWIs");
} else {
final String firstHash = wordhashes.get(0);
final String lastHash = wordhashes.get(wordhashes.size() - 1);
final long avdist = (Distribution.horizontalDHTDistance(firstHash.getBytes(), ASCII.getBytes(sb.peers.mySeed().hash)) + Distribution.horizontalDHTDistance(lastHash.getBytes(), ASCII.getBytes(sb.peers.mySeed().hash))) / 2;
sb.getLog().logInfo("Received " + received + " RWIs, " + wordc + " Words [" + firstHash + " .. " + lastHash + "], processed in " + (System.currentTimeMillis() - startProcess) + " milliseconds, " + avdist + ", blocked " + blocked + ", requesting " + unknownURL.size() + "/" + received+ " URLs from " + otherPeerName);
sb.getLog().info("Received " + received + " RWIs, " + wordc + " Words [" + firstHash + " .. " + lastHash + "], processed in " + (System.currentTimeMillis() - startProcess) + " milliseconds, " + avdist + ", blocked " + blocked + ", requesting " + unknownURL.size() + "/" + received+ " URLs from " + otherPeerName);
EventChannel.channels(EventChannel.DHTRECEIVE).addMessage(new RSSMessage("Received " + received + " RWIs, " + wordc + " Words [" + firstHash + " .. " + lastHash + "], processed in " + (System.currentTimeMillis() - startProcess) + " milliseconds, " + avdist + ", blocked " + blocked + ", requesting " + unknownURL.size() + "/" + received + " URLs from " + otherPeerName, "", otherPeer.hash));
}
result = "ok";
@ -254,6 +254,6 @@ public final class transferRWI {
* @param msg
*/
private static void logWarning(final String requestIdentifier, final String msg) {
Log.logWarning("transferRWI", requestIdentifier +" "+ msg);
ConcurrentLog.warn("transferRWI", requestIdentifier +" "+ msg);
}
}

@ -36,10 +36,10 @@ import net.yacy.cora.date.GenericFormatter;
import net.yacy.cora.document.ASCII;
import net.yacy.cora.document.RSSMessage;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.crawler.data.ResultURLs;
import net.yacy.crawler.data.ResultURLs.EventOrigin;
import net.yacy.kelondro.data.meta.URIMetadataRow;
import net.yacy.kelondro.logging.Log;
import net.yacy.peers.EventChannel;
import net.yacy.peers.Network;
import net.yacy.peers.Protocol;
@ -79,10 +79,10 @@ public final class transferURL {
final String otherPeerName = iam + ":" + ((otherPeer == null) ? "NULL" : (otherPeer.getName() + "/" + otherPeer.getVersion()));
if ((youare == null) || (!youare.equals(sb.peers.mySeed().hash))) {
Network.log.logInfo("Rejecting URLs from peer " + otherPeerName + ". Wrong target. Wanted peer=" + youare + ", iam=" + sb.peers.mySeed().hash);
Network.log.info("Rejecting URLs from peer " + otherPeerName + ". Wrong target. Wanted peer=" + youare + ", iam=" + sb.peers.mySeed().hash);
result = "wrong_target";
} else if ((!granted) || (sb.isRobinsonMode())) {
Network.log.logInfo("Rejecting URLs from peer " + otherPeerName + ". Not granted.");
Network.log.info("Rejecting URLs from peer " + otherPeerName + ". Not granted.");
result = "error_not_granted";
} else {
int received = 0;
@ -98,7 +98,7 @@ public final class transferURL {
// read new lurl-entry
urls = post.get("url" + i);
if (urls == null) {
if (Network.log.isFine()) Network.log.logFine("transferURL: got null URL-string from peer " + otherPeerName);
if (Network.log.isFine()) Network.log.fine("transferURL: got null URL-string from peer " + otherPeerName);
blocked++;
continue;
}
@ -106,28 +106,28 @@ public final class transferURL {
// parse new lurl-entry
lEntry = URIMetadataRow.importEntry(urls);
if (lEntry == null) {
if (Network.log.isWarning()) Network.log.logWarning("transferURL: received invalid URL (entry null) from peer " + otherPeerName + "\n\tURL Property: " + urls);
if (Network.log.isWarn()) Network.log.warn("transferURL: received invalid URL (entry null) from peer " + otherPeerName + "\n\tURL Property: " + urls);
blocked++;
continue;
}
// check if entry is well-formed
if (lEntry.url() == null) {
if (Network.log.isWarning()) Network.log.logWarning("transferURL: received invalid URL from peer " + otherPeerName + "\n\tURL Property: " + urls);
if (Network.log.isWarn()) Network.log.warn("transferURL: received invalid URL from peer " + otherPeerName + "\n\tURL Property: " + urls);
blocked++;
continue;
}
// check whether entry is too old
if (lEntry.freshdate().getTime() <= freshdate) {
if (Network.log.isFine()) Network.log.logFine("transerURL: received too old URL from peer " + otherPeerName + ": " + lEntry.freshdate());
if (Network.log.isFine()) Network.log.fine("transerURL: received too old URL from peer " + otherPeerName + ": " + lEntry.freshdate());
blocked++;
continue;
}
// check if the entry is blacklisted
if ((blockBlacklist) && (Switchboard.urlBlacklist.isListed(BlacklistType.DHT, lEntry))) {
if (Network.log.isFine()) Network.log.logFine("transferURL: blocked blacklisted URL '" + lEntry.url().toNormalform(false) + "' from peer " + otherPeerName);
if (Network.log.isFine()) Network.log.fine("transferURL: blocked blacklisted URL '" + lEntry.url().toNormalform(false) + "' from peer " + otherPeerName);
lEntry = null;
blocked++;
continue;
@ -136,7 +136,7 @@ public final class transferURL {
// check if the entry is in our network domain
final String urlRejectReason = sb.crawlStacker.urlInAcceptedDomain(lEntry.url());
if (urlRejectReason != null) {
if (Network.log.isFine()) Network.log.logFine("transferURL: blocked URL '" + lEntry.url() + "' (" + urlRejectReason + ") from peer " + otherPeerName);
if (Network.log.isFine()) Network.log.fine("transferURL: blocked URL '" + lEntry.url() + "' (" + urlRejectReason + ") from peer " + otherPeerName);
lEntry = null;
blocked++;
continue;
@ -152,14 +152,14 @@ public final class transferURL {
lEntry = lEm.get(id);
// write entry to database
if (Network.log.isFine()) Network.log.logFine("Accepting URL from peer " + otherPeerName + ": " + lEntry.url().toNormalform(true));
if (Network.log.isFine()) Network.log.fine("Accepting URL from peer " + otherPeerName + ": " + lEntry.url().toNormalform(true));
try {
sb.index.fulltext().putMetadata(lEntry);
ResultURLs.stack(ASCII.String(lEntry.url().hash()), lEntry.url().getHost(), iam.getBytes(), iam.getBytes(), EventOrigin.DHT_TRANSFER);
if (Network.log.isFine()) Network.log.logFine("transferURL: received URL '" + lEntry.url().toNormalform(false) + "' from peer " + otherPeerName);
if (Network.log.isFine()) Network.log.fine("transferURL: received URL '" + lEntry.url().toNormalform(false) + "' from peer " + otherPeerName);
received++;
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
}
@ -167,10 +167,10 @@ public final class transferURL {
sb.peers.mySeed().incRU(received);
// return rewrite properties
Network.log.logInfo("Received " + received + " URLs from peer " + otherPeerName + " in " + (System.currentTimeMillis() - start) + " ms, blocked " + blocked + " URLs");
Network.log.info("Received " + received + " URLs from peer " + otherPeerName + " in " + (System.currentTimeMillis() - start) + " ms, blocked " + blocked + " URLs");
EventChannel.channels(EventChannel.DHTRECEIVE).addMessage(new RSSMessage("Received " + received + ", blocked " + blocked + " URLs from peer " + otherPeerName, "", otherPeer.hash));
if (doublecheck > 0) {
Network.log.logWarning("Received " + doublecheck + "/" + urlc + " double URLs from peer " + otherPeerName); // double should not happen because we demanded only documents which we do not have yet
Network.log.warn("Received " + doublecheck + "/" + urlc + " double URLs from peer " + otherPeerName); // double should not happen because we demanded only documents which we do not have yet
doublevalues = Integer.toString(doublecheck);
}
result = "ok";

@ -54,6 +54,7 @@ import net.yacy.cora.protocol.Domains;
import net.yacy.cora.protocol.HeaderFramework;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.protocol.ResponseHeader;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.data.DidYouMean;
import net.yacy.data.UserDB;
import net.yacy.data.ymark.YMarkTables;
@ -63,7 +64,6 @@ import net.yacy.document.LibraryProvider;
import net.yacy.document.Parser;
import net.yacy.kelondro.data.meta.DigestURI;
import net.yacy.kelondro.data.meta.URIMetadataNode;
import net.yacy.kelondro.logging.Log;
import net.yacy.kelondro.util.Bitfield;
import net.yacy.kelondro.util.Formatter;
import net.yacy.kelondro.util.ISO639;
@ -164,7 +164,7 @@ public class yacysearch {
prop.put("topmenu_resource-select", stealthmode ? 2 : global ? 1 : 0);
if ( post == null || indexSegment == null || env == null || !searchAllowed ) {
if (indexSegment == null) Log.logInfo("yacysearch", "indexSegment == null");
if (indexSegment == null) ConcurrentLog.info("yacysearch", "indexSegment == null");
// we create empty entries for template strings
prop.put("searchagain", "0");
prop.put("former", "");
@ -289,11 +289,11 @@ public class yacysearch {
snippetFetchStrategy = null;
}
block = true;
Log.logWarning("LOCAL_SEARCH", "ACCESS CONTROL: BLACKLISTED CLIENT FROM "
ConcurrentLog.warn("LOCAL_SEARCH", "ACCESS CONTROL: BLACKLISTED CLIENT FROM "
+ client
+ " gets no permission to search");
} else if ( Domains.matchesList(client, sb.networkWhitelist) ) {
Log.logInfo("LOCAL_SEARCH", "ACCESS CONTROL: WHITELISTED CLIENT FROM "
ConcurrentLog.info("LOCAL_SEARCH", "ACCESS CONTROL: WHITELISTED CLIENT FROM "
+ client
+ " gets no search restrictions");
} else if ( !authenticated && !localhostAccess && !intranetMode ) {
@ -309,7 +309,7 @@ public class yacysearch {
if ( global ) {
if ( accInTenMinutes >= 60 || accInOneMinute >= 6 || accInThreeSeconds >= 1 ) {
global = false;
Log.logWarning("LOCAL_SEARCH", "ACCESS CONTROL: CLIENT FROM "
ConcurrentLog.warn("LOCAL_SEARCH", "ACCESS CONTROL: CLIENT FROM "
+ client
+ ": "
+ accInThreeSeconds
@ -325,7 +325,7 @@ public class yacysearch {
if ( snippetFetchStrategy != null && snippetFetchStrategy.isAllowedToFetchOnline() ) {
if ( accInTenMinutes >= 20 || accInOneMinute >= 4 || accInThreeSeconds >= 1 ) {
snippetFetchStrategy = CacheStrategy.CACHEONLY;
Log.logWarning("LOCAL_SEARCH", "ACCESS CONTROL: CLIENT FROM "
ConcurrentLog.warn("LOCAL_SEARCH", "ACCESS CONTROL: CLIENT FROM "
+ client
+ ": "
+ accInThreeSeconds
@ -340,7 +340,7 @@ public class yacysearch {
// general load protection
if ( accInTenMinutes >= 3000 || accInOneMinute >= 600 || accInThreeSeconds >= 60 ) {
block = true;
Log.logWarning("LOCAL_SEARCH", "ACCESS CONTROL: CLIENT FROM "
ConcurrentLog.warn("LOCAL_SEARCH", "ACCESS CONTROL: CLIENT FROM "
+ client
+ ": "
+ accInThreeSeconds
@ -560,7 +560,7 @@ public class yacysearch {
// delete the search history since this still shows the entry
SearchEventCache.delete(delHash);
} catch ( final IOException e ) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
@ -626,7 +626,7 @@ public class yacysearch {
try {
Pattern.compile(urlmask);
} catch ( final PatternSyntaxException ex ) {
SearchEvent.log.logWarning("Illegal URL mask, not a valid regex: " + urlmask);
SearchEvent.log.warn("Illegal URL mask, not a valid regex: " + urlmask);
prop.put("urlmaskerror", 1);
prop.putHTML("urlmaskerror_urlmask", urlmask);
urlmask = ".*";
@ -635,7 +635,7 @@ public class yacysearch {
try {
Pattern.compile(prefermask);
} catch ( final PatternSyntaxException ex ) {
SearchEvent.log.logWarning("Illegal prefer mask, not a valid regex: " + prefermask);
SearchEvent.log.warn("Illegal prefer mask, not a valid regex: " + prefermask);
prop.put("prefermaskerror", 1);
prop.putHTML("prefermaskerror_prefermask", prefermask);
prefermask = "";
@ -685,7 +685,7 @@ public class yacysearch {
theQuery.getQueryGoal().filterOut(Switchboard.blueList);
// log
Log.logInfo(
ConcurrentLog.info(
"LOCAL_SEARCH",
"INIT WORD SEARCH: "
+ theQuery.getQueryGoal().getOriginalQueryString(false)
@ -738,7 +738,7 @@ public class yacysearch {
}
// log
Log.logInfo("LOCAL_SEARCH", "EXIT WORD SEARCH: "
ConcurrentLog.info("LOCAL_SEARCH", "EXIT WORD SEARCH: "
+ theQuery.getQueryGoal().getOriginalQueryString(false)
+ " - "
+ "local_rwi_available(" + theSearch.local_rwi_available.get() + "), "
@ -830,7 +830,7 @@ public class yacysearch {
sb.localSearchTracker.clear();
}
} catch ( final Exception e ) {
Log.logException(e);
ConcurrentLog.logException(e);
}
prop.put("num-results_offset", startRecord == 0 ? 0 : startRecord + 1);

@ -38,10 +38,10 @@ import net.yacy.cora.protocol.Domains;
import net.yacy.cora.protocol.HeaderFramework;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.protocol.RequestHeader.FileType;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.crawler.data.Cache;
import net.yacy.data.URLLicense;
import net.yacy.kelondro.data.meta.DigestURI;
import net.yacy.kelondro.logging.Log;
import net.yacy.kelondro.util.Formatter;
import net.yacy.peers.NewsPool;
import net.yacy.peers.Seed;
@ -124,7 +124,7 @@ public class yacysearchitem {
if ((fileType == FileType.HTML || fileType == FileType.JSON) && !sb.isIntranetMode()) try {
faviconURL = new DigestURI(resultURL.getProtocol() + "://" + resultURL.getHost() + ((port != -1) ? (":" + port) : "") + "/favicon.ico");
} catch (final MalformedURLException e1) {
Log.logException(e1);
ConcurrentLog.logException(e1);
faviconURL = null;
}
final String resource = theSearch.query.domType.toString();

@ -6,7 +6,7 @@ import java.util.HashMap;
import java.util.Map.Entry;
import java.util.concurrent.ArrayBlockingQueue;
import net.yacy.kelondro.logging.Log;
import net.yacy.cora.util.ConcurrentLog;
import org.json.simple.parser.ContentHandler;
import org.json.simple.parser.JSONParser;
@ -88,7 +88,7 @@ public class SMWListImporter implements Runnable, ContentHandler{
this.listEntries.put(this.row);
//this.count++;
} catch (InterruptedException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
this.obj.clear();
this.row = new SMWListRow();
@ -134,20 +134,20 @@ public class SMWListImporter implements Runnable, ContentHandler{
@Override
public void run() {
try {
Log.logInfo("SMWLISTSYNC", "Importer run()");
ConcurrentLog.info("SMWLISTSYNC", "Importer run()");
this.parser.parse(this.importFile, this, true);
} catch (IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
} catch (ParseException e) {
Log.logException(e);
ConcurrentLog.logException(e);
} finally {
try {
Log.logInfo("SMWLISTSYNC", "Importer inserted poison pill in queue");
ConcurrentLog.info("SMWLISTSYNC", "Importer inserted poison pill in queue");
this.listEntries.put(SMWListRow.POISON);
} catch (InterruptedException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
}
@ -156,7 +156,7 @@ public class SMWListImporter implements Runnable, ContentHandler{
try {
return this.listEntries.take();
} catch (InterruptedException e) {
Log.logException(e);
ConcurrentLog.logException(e);
return null;
}
}

@ -5,7 +5,7 @@ import java.io.Reader;
import java.util.Iterator;
import java.util.concurrent.ArrayBlockingQueue;
import net.yacy.kelondro.logging.Log;
import net.yacy.cora.util.ConcurrentLog;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
@ -29,7 +29,7 @@ public class SMWListImporterFormatObsolete implements Runnable{
@Override
public void run() {
try {
Log.logInfo("SMWLISTSYNC", "Importer run()");
ConcurrentLog.info("SMWLISTSYNC", "Importer run()");
Object obj = this.parser.parse(this.importFile);
JSONObject jsonObject = (JSONObject) obj;
@ -43,16 +43,16 @@ public class SMWListImporterFormatObsolete implements Runnable{
}
} catch (IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
} catch (ParseException e) {
Log.logException(e);
ConcurrentLog.logException(e);
} finally {
try {
Log.logInfo("SMWLISTSYNC", "Importer inserted poison pill in queue");
ConcurrentLog.info("SMWLISTSYNC", "Importer inserted poison pill in queue");
this.listEntries.put(SMWListRow.POISON);
} catch (InterruptedException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
}
@ -82,7 +82,7 @@ public class SMWListImporterFormatObsolete implements Runnable{
this.listEntries.put(row);
} catch (Exception e) {
Log.logInfo("SMWLISTSYNC", "import of entry failed");
ConcurrentLog.info("SMWLISTSYNC", "import of entry failed");
}
}
@ -110,7 +110,7 @@ public class SMWListImporterFormatObsolete implements Runnable{
try {
return this.listEntries.take();
} catch (InterruptedException e) {
Log.logException(e);
ConcurrentLog.logException(e);
return null;
}
}

@ -8,7 +8,7 @@ import java.net.URL;
import net.yacy.cora.document.UTF8;
import net.yacy.cora.protocol.ClientIdentification;
import net.yacy.cora.protocol.http.HTTPClient;
import net.yacy.kelondro.logging.Log;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.search.Switchboard;
public class SMWListSyncThread {
@ -85,7 +85,7 @@ public class SMWListSyncThread {
this.currentmax = Integer.parseInt(overallcount);
if (this.currentmax > 0) {
Log.logInfo("SMWLISTSYNC",
ConcurrentLog.info("SMWLISTSYNC",
"import job counts "
+ this.currentmax
+ " new elements between "
@ -99,7 +99,7 @@ public class SMWListSyncThread {
this.offset = 0;
}
} else {
Log.logWarning("SMWLISTSYNC",
ConcurrentLog.warn("SMWLISTSYNC",
"No SMWimport URL defined");
}
} catch (MalformedURLException e) {
@ -114,7 +114,7 @@ public class SMWListSyncThread {
} else {
// there are new elements to be imported
Log.logInfo("SMWLISTSYNC",
ConcurrentLog.info("SMWLISTSYNC",
"importing max. " + this.limit
+ " elements at " + this.offset + " of "
+ this.currentmax + ", since "
@ -146,7 +146,7 @@ public class SMWListSyncThread {
reader = new InputStreamReader(
urlImport.openStream(), "UTF-8");
} catch (Exception e) {
Log.logException(e);
ConcurrentLog.logException(e);
this.runningjob = false;
}
@ -157,7 +157,7 @@ public class SMWListSyncThread {
reader, 200);
} catch (final Exception e) {
// TODO: display an error message
Log.logException(e);
ConcurrentLog.logException(e);
this.runningjob = false;
}
Thread t;

@ -29,10 +29,10 @@ import java.util.Set;
import net.yacy.cora.federate.solr.connector.SolrConnector;
import net.yacy.cora.storage.Configuration;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.cora.util.SpaceExceededException;
import net.yacy.document.parser.xml.opensearchdescriptionReader;
import net.yacy.kelondro.blob.Tables;
import net.yacy.kelondro.logging.Log;
import net.yacy.search.Switchboard;
import net.yacy.search.SwitchboardConstants;
import net.yacy.search.query.SearchEvent;
@ -79,13 +79,13 @@ public class OpenSearchConnector {
try {
sb.tables.insert("opensearchsys", row);
} catch (SpaceExceededException ex) {
Log.logException(ex);
ConcurrentLog.logException(ex);
}
}
}
size = sb.tables.size("opensearchsys");
} catch (IOException ex) {
Log.logException(ex);
ConcurrentLog.logException(ex);
}
}
}
@ -112,7 +112,7 @@ public class OpenSearchConnector {
sb.heuristicRSS(parseSearchTemplate(osurl, "$", 0, theSearch.query.itemsPerPage), theSearch, "opensearch:" + name);
}
} catch (IOException ex) {
Log.logWarning("OpenSearchConnector.query", "failed reading table opensearchsys");
ConcurrentLog.warn("OpenSearchConnector.query", "failed reading table opensearchsys");
}
}
}
@ -152,12 +152,12 @@ public class OpenSearchConnector {
try {
conf.commit();
} catch (IOException ex) {
Log.logWarning("OpenSearchConnector.add", "config file write error");
ConcurrentLog.warn("OpenSearchConnector.add", "config file write error");
}
return true;
}
} catch (IOException e1) {
Log.logException(e1);
ConcurrentLog.logException(e1);
return false;
}
return false;
@ -184,14 +184,14 @@ public class OpenSearchConnector {
final SolrConnector connector = sb.index.fulltext().getWebgraphConnector();
// check if needed Solr fields are available (selected)
if (connector == null) {
Log.logSevere("OpenSearchConnector.Discover", "Error on connecting to embedded Solr webgraph index");
ConcurrentLog.severe("OpenSearchConnector.Discover", "Error on connecting to embedded Solr webgraph index");
return false;
}
final boolean metafieldavailable = sb.index.fulltext().getWebgraphConfiguration().contains(WebgraphSchema.target_rel_s.name())
&& ( sb.index.fulltext().getWebgraphConfiguration().contains(WebgraphSchema.target_protocol_s.name()) && sb.index.fulltext().getWebgraphConfiguration().contains(WebgraphSchema.target_urlstub_s.name()) )
&& sb.getConfigBool(SwitchboardConstants.CORE_SERVICE_WEBGRAPH, false);
if (!metafieldavailable) {
Log.logWarning("OpenSearchConnector.Discover", "webgraph option and webgraph Schema fields target_rel_s, target_protocol_s and target_urlstub_s must be switched on");
ConcurrentLog.warn("OpenSearchConnector.Discover", "webgraph option and webgraph Schema fields target_rel_s, target_protocol_s and target_urlstub_s must be switched on");
return false;
}
// the solr query
@ -204,12 +204,12 @@ public class OpenSearchConnector {
SolrDocumentList docList = connector.getDocumentListByQuery(webgraphquerystr, 0, 1, webgraphqueryfields);
numfound = docList.getNumFound();
if (numfound == 0) {
Log.logInfo("OpenSearchConnector.Discover", "no results found, abort discover job");
ConcurrentLog.info("OpenSearchConnector.Discover", "no results found, abort discover job");
return true;
}
Log.logInfo("OpenSearchConnector.Discover", "start checking " + Long.toString(numfound) + " found index results");
ConcurrentLog.info("OpenSearchConnector.Discover", "start checking " + Long.toString(numfound) + " found index results");
} catch (IOException ex) {
Log.logException(ex);
ConcurrentLog.logException(ex);
return false;
}
@ -225,12 +225,12 @@ public class OpenSearchConnector {
int loopnr = 0;
Set<String> dblmem = new HashSet<String>(); // temp memory for already checked url
while (doloop) {
Log.logInfo("OpenSearchConnector.Discover", "start Solr query loop at " + Integer.toString(loopnr * 20) + " of " + Long.toString(numfound));
ConcurrentLog.info("OpenSearchConnector.Discover", "start Solr query loop at " + Integer.toString(loopnr * 20) + " of " + Long.toString(numfound));
SolrDocumentList docList = connector.getDocumentListByQuery(webgraphquerystr, loopnr * 20, 20,webgraphqueryfields); // check chunk of 20 result documents
loopnr++;
if (stoptime < System.currentTimeMillis()) {// stop after max 1h
doloop = false;
Log.logInfo("OpenSearchConnector.Discover", "long running discover task aborted");
ConcurrentLog.info("OpenSearchConnector.Discover", "long running discover task aborted");
}
if (docList != null && docList.size() > 0) {
Iterator<SolrDocument> docidx = docList.iterator();
@ -246,9 +246,9 @@ public class OpenSearchConnector {
if (os.getRSSorAtomUrl() != null) {
// add found system to config file
add(os.getShortName(), os.getRSSorAtomUrl(), false, os.getItem("LongName"));
Log.logInfo("OpenSearchConnector.Discover", "added " + os.getShortName() + " " + hrefurltxt);
ConcurrentLog.info("OpenSearchConnector.Discover", "added " + os.getShortName() + " " + hrefurltxt);
} else {
Log.logInfo("OpenSearchConnector.Discover", "osd.xml check failed (no RSS or Atom support) for " + hrefurltxt);
ConcurrentLog.info("OpenSearchConnector.Discover", "osd.xml check failed (no RSS or Atom support) for " + hrefurltxt);
}
}
} catch (MalformedURLException ex) {
@ -258,9 +258,9 @@ public class OpenSearchConnector {
doloop = false;
}
}
Log.logInfo("OpenSearchConnector.Discover", "finisched Solr query (checked " + Integer.toString(dblmem.size()) + " unique opensearchdescription links found in " + Long.toString(numfound) + " results)");
ConcurrentLog.info("OpenSearchConnector.Discover", "finisched Solr query (checked " + Integer.toString(dblmem.size()) + " unique opensearchdescription links found in " + Long.toString(numfound) + " results)");
} catch (IOException ex) {
Log.logException(ex);
ConcurrentLog.logException(ex);
}
}
};

@ -33,10 +33,10 @@ import java.util.concurrent.LinkedBlockingQueue;
import net.yacy.cora.document.ASCII;
import net.yacy.cora.sorting.ReversibleScoreMap;
import net.yacy.cora.storage.HandleSet;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.cora.util.SpaceExceededException;
import net.yacy.kelondro.data.meta.URIMetadataRow;
import net.yacy.kelondro.index.RowHandleSet;
import net.yacy.kelondro.logging.Log;
import net.yacy.kelondro.util.MemoryControl;
import net.yacy.search.schema.CollectionSchema;
@ -72,11 +72,11 @@ public class ConcurrentUpdateSolrConnector implements SolrConnector {
ConcurrentUpdateSolrConnector.this.connector.deleteById(id);
ConcurrentUpdateSolrConnector.this.idCache.remove(ASCII.getBytes(id));
} catch (IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
} catch (InterruptedException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
}
@ -91,11 +91,11 @@ public class ConcurrentUpdateSolrConnector implements SolrConnector {
updateIdCache((String) doc.getFieldValue(CollectionSchema.id.getSolrFieldName()));
ConcurrentUpdateSolrConnector.this.connector.add(doc);
} catch (IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
} catch (InterruptedException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
}
@ -301,7 +301,7 @@ public class ConcurrentUpdateSolrConnector implements SolrConnector {
ConcurrentUpdateSolrConnector.this.connector.deleteByQuery(querystring);
ConcurrentUpdateSolrConnector.this.idCache.clear();
} catch (IOException e) {
Log.logSevere("ConcurrentUpdateSolrConnector", e.getMessage(), e);
ConcurrentLog.severe("ConcurrentUpdateSolrConnector", e.getMessage(), e);
}
ConcurrentUpdateSolrConnector.this.connector.commit(true);
}

@ -26,7 +26,7 @@ import java.util.List;
import net.yacy.cora.federate.solr.instance.EmbeddedInstance;
import net.yacy.cora.federate.solr.instance.SolrInstance;
import net.yacy.kelondro.logging.Log;
import net.yacy.cora.util.ConcurrentLog;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer;
@ -142,9 +142,9 @@ public class EmbeddedSolrConnector extends SolrServerConnector implements SolrCo
@Override
public synchronized void close() {
try {this.commit(false);} catch (Throwable e) {Log.logException(e);}
try {super.close();} catch (Throwable e) {Log.logException(e);}
try {this.core.close();} catch (Throwable e) {Log.logException(e);}
try {this.commit(false);} catch (Throwable e) {ConcurrentLog.logException(e);}
try {super.close();} catch (Throwable e) {ConcurrentLog.logException(e);}
try {this.core.close();} catch (Throwable e) {ConcurrentLog.logException(e);}
}
public SolrQueryRequest request(final SolrParams params) {

@ -28,7 +28,7 @@ import java.security.NoSuchAlgorithmException;
import java.util.concurrent.atomic.AtomicLong;
import net.yacy.cora.document.ASCII;
import net.yacy.kelondro.logging.Log;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.search.schema.CollectionSchema;
import org.apache.solr.common.SolrInputDocument;
@ -66,7 +66,7 @@ public class ShardSelection {
try {
return selectURL(url);
} catch (IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
return 0;
}
}

@ -26,7 +26,7 @@ import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import net.yacy.kelondro.logging.Log;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.search.schema.CollectionSchema;
import org.apache.log4j.Logger;
@ -85,7 +85,7 @@ public abstract class SolrServerConnector extends AbstractSolrConnector implemen
try {
this.server.optimize(true, true, maxSegments);
} catch (Throwable e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
}
@ -97,7 +97,7 @@ public abstract class SolrServerConnector extends AbstractSolrConnector implemen
if (this.server instanceof EmbeddedSolrServer) synchronized (this.server) {this.server.commit(true, true, false);}
this.server = null;
} catch (Throwable e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
@ -199,27 +199,27 @@ public abstract class SolrServerConnector extends AbstractSolrConnector implemen
if (solrdoc.containsKey("_version_")) solrdoc.setField("_version_",0L); // prevent Solr "version conflict"
this.server.add(solrdoc, -1);
} catch (Throwable e) {
Log.logException(e);
ConcurrentLog.logException(e);
// catches "version conflict for": try this again and delete the document in advance
try {
this.server.deleteById((String) solrdoc.getFieldValue(CollectionSchema.id.getSolrFieldName()));
} catch (SolrServerException e1) {
Log.logException(e1);
ConcurrentLog.logException(e1);
}
try {
this.server.add(solrdoc, -1);
} catch (Throwable ee) {
Log.logException(ee);
ConcurrentLog.logException(ee);
try {
this.server.commit();
} catch (Throwable eee) {
Log.logException(eee);
ConcurrentLog.logException(eee);
// a time-out may occur here
}
try {
this.server.add(solrdoc, -1);
} catch (Throwable eee) {
Log.logException(eee);
ConcurrentLog.logException(eee);
throw new IOException(eee);
}
}
@ -237,25 +237,25 @@ public abstract class SolrServerConnector extends AbstractSolrConnector implemen
}
this.server.add(solrdocs, -1);
} catch (Throwable e) {
Log.logException(e);
ConcurrentLog.logException(e);
// catches "version conflict for": try this again and delete the document in advance
List<String> ids = new ArrayList<String>();
for (SolrInputDocument solrdoc : solrdocs) ids.add((String) solrdoc.getFieldValue(CollectionSchema.id.getSolrFieldName()));
try {
this.server.deleteById(ids);
} catch (SolrServerException e1) {
Log.logException(e1);
ConcurrentLog.logException(e1);
}
try {
this.server.commit();
} catch (Throwable eee) {
Log.logException(eee);
ConcurrentLog.logException(eee);
// a time-out may occur here
}
try {
this.server.add(solrdocs, -1);
} catch (Throwable ee) {
Log.logException(ee);
ConcurrentLog.logException(ee);
log.warn(e.getMessage() + " IDs=" + ids.toString());
throw new IOException(ee);
}

@ -26,7 +26,7 @@ import java.util.Collection;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import net.yacy.kelondro.logging.Log;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.kelondro.util.MemoryControl;
import org.apache.solr.client.solrj.SolrServer;
@ -85,7 +85,7 @@ public class EmbeddedInstance implements SolrInstance {
// get the default core from the coreContainer
this.defaultCoreName = this.coreContainer.getDefaultCoreName();
assert(this.defaultCoreName.equals(givenDefaultCoreName));
Log.logInfo("SolrEmbeddedInstance", "detected default solr core: " + this.defaultCoreName);
ConcurrentLog.info("SolrEmbeddedInstance", "detected default solr core: " + this.defaultCoreName);
this.defaultCore = this.coreContainer.getCore(this.defaultCoreName);
assert givenDefaultCoreName.equals(this.defaultCore.getName()) : "givenDefaultCoreName = " + givenDefaultCoreName + ", this.defaultCore.getName() = " + this.defaultCore.getName();
if (this.defaultCore == null) {
@ -154,7 +154,7 @@ public class EmbeddedInstance implements SolrInstance {
if (source.exists()) {
try {
Files.copy(source, target);
Log.logFine("initializeCoreConf", "overwrite " + target.getAbsolutePath() + " with " + source.getAbsolutePath());
ConcurrentLog.fine("initializeCoreConf", "overwrite " + target.getAbsolutePath() + " with " + source.getAbsolutePath());
} catch (IOException ex) {
ex.printStackTrace();
}
@ -204,7 +204,7 @@ public class EmbeddedInstance implements SolrInstance {
@Override
public synchronized void close() {
try {this.coreContainer.shutdown();} catch (Throwable e) {Log.logException(e);}
try {this.coreContainer.shutdown();} catch (Throwable e) {ConcurrentLog.logException(e);}
}
}

@ -30,7 +30,7 @@ import java.util.Map;
import net.yacy.cora.document.MultiProtocolURI;
import net.yacy.cora.protocol.Domains;
import net.yacy.kelondro.logging.Log;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.search.schema.CollectionSchema;
import net.yacy.search.schema.WebgraphSchema;
@ -231,10 +231,10 @@ public class RemoteInstance implements SolrInstance {
int port = u.getPort();
String solrpath = u.getPath();
String p = "http://" + host + ":" + port + solrpath;
Log.logInfo("RemoteSolrConnector", "connecting Solr authenticated with url:" + p);
ConcurrentLog.info("RemoteSolrConnector", "connecting Solr authenticated with url:" + p);
s = new HttpSolrServer(p, client);
} else {
Log.logInfo("RemoteSolrConnector", "connecting Solr with url:" + this.solrurl + name);
ConcurrentLog.info("RemoteSolrConnector", "connecting Solr with url:" + this.solrurl + name);
s = new HttpSolrServer(this.solrurl + name);
}
s.setAllowCompression(true);

@ -40,8 +40,8 @@ import java.util.TreeSet;
import java.util.zip.GZIPInputStream;
import net.yacy.cora.document.WordCache;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.cora.util.StringBuilderComparator;
import net.yacy.kelondro.logging.Log;
/**
* this class loads and parses database dumps from the OpenGeoDB project files can be loaded from
@ -163,7 +163,7 @@ public class OpenGeoDBLocation implements Locations
}
reader.close();
} catch ( final IOException e ) {
Log.logException(e);
ConcurrentLog.logException(e);
} finally {
if ( reader != null ) {
try {

@ -7,10 +7,6 @@
* added html generation for directories: 5.9.2006
* migrated to the cora package and re-licensed under lgpl: 23.08.2010
*
* $LastChangedDate$
* $LastChangedRevision$
* $LastChangedBy$
*
* This file is part of YaCy Content Integration
*
* This library is free software; you can redistribute it and/or
@ -71,13 +67,11 @@ import java.util.regex.Pattern;
import net.yacy.cora.document.UTF8;
import net.yacy.cora.protocol.Domains;
import net.yacy.kelondro.logging.Log;
import org.apache.log4j.Logger;
import net.yacy.cora.util.ConcurrentLog;
public class FTPClient {
private static Logger log = Logger.getLogger(FTPClient.class);
private static ConcurrentLog log = new ConcurrentLog("FTPClient");
private static final String vDATE = "20100823";
@ -174,14 +168,14 @@ public class FTPClient {
if (notConnected()) {
// the error was probably caused because there is no
// connection
log.error("not connected. no effect.", e);
log.warn("not connected. no effect.", e);
} else {
log.error("ftp internal exception: target exception " + e);
log.warn("ftp internal exception: target exception " + e);
}
return ret;
}
} catch (final IllegalAccessException e) {
log.error("ftp internal exception: wrong access " + e);
log.warn("ftp internal exception: wrong access " + e);
return ret;
} catch (final NoSuchMethodException e) {
// consider first that the user attempted to execute a java
@ -192,7 +186,7 @@ public class FTPClient {
try {
javaexec(this.cmd);
} catch (final Exception ee) {
log.error("Command '" + this.cmd[0] + "' not supported. Try 'HELP'.");
log.warn("Command '" + this.cmd[0] + "' not supported. Try 'HELP'.");
}
} else {
// try a remote exec
@ -292,7 +286,7 @@ public class FTPClient {
// pr.put("java.class.path", "" + pr.get("user.dir") +
// pr.get("path.separator") + origPath);
// log.error("System Properties: " + pr.toString());
// log.warning("System Properties: " + pr.toString());
System.setProperties(pr);
@ -319,31 +313,31 @@ public class FTPClient {
this.currentLocalPath = new File((String) pr.get("user.dir"));
} catch (final ClassNotFoundException e) {
// log.error("cannot find class file " + obj +
// log.warning("cannot find class file " + obj +
// ".class");
// class file does not exist, go silently over it to not show
// everybody that the
// system attempted to load a class file
log.error("Command '" + obj + "' not supported. Try 'HELP'.");
log.warn("Command '" + obj + "' not supported. Try 'HELP'.");
} catch (final NoSuchMethodException e) {
log.error("no \"public static main(String args[])\" in " + obj);
log.warn("no \"public static main(String args[])\" in " + obj);
} catch (final InvocationTargetException e) {
final Throwable orig = e.getTargetException();
if (orig.getMessage() != null) {
log.error("Exception from " + obj + ": " + orig.getMessage(), orig);
log.warn("Exception from " + obj + ": " + orig.getMessage(), orig);
}
} catch (final IllegalAccessException e) {
log.error("Illegal access for " + obj + ": class is probably not declared as public", e);
log.warn("Illegal access for " + obj + ": class is probably not declared as public", e);
} catch (final NullPointerException e) {
log.error("main(String args[]) is not defined as static for " + obj);
log.warn("main(String args[]) is not defined as static for " + obj);
/*
* } catch (IOException e) { // class file does not exist, go
* silently over it to not show everybody that the // system
* attempted to load a class file log.error("Command '" + obj + "'
* attempted to load a class file log.warning("Command '" + obj + "'
* not supported. Try 'HELP'.");
*/
} catch (final Exception e) {
log.error("Exception caught: ", e);
log.warn("Exception caught: ", e);
}
// set the classpath to its original definition
@ -355,26 +349,26 @@ public class FTPClient {
public boolean ASCII() {
if (this.cmd.length != 1) {
log.error("Syntax: ASCII (no parameter)");
log.warn("Syntax: ASCII (no parameter)");
return true;
}
try {
literal("TYPE A");
} catch (final IOException e) {
log.error("Error: ASCII transfer type not supported by server.");
log.warn("Error: ASCII transfer type not supported by server.");
}
return true;
}
public boolean BINARY() {
if (this.cmd.length != 1) {
log.error("Syntax: BINARY (no parameter)");
log.warn("Syntax: BINARY (no parameter)");
return true;
}
try {
literal("TYPE I");
} catch (final IOException e) {
log.error("Error: BINARY transfer type not supported by server.");
log.warn("Error: BINARY transfer type not supported by server.");
}
return true;
}
@ -385,7 +379,7 @@ public class FTPClient {
public boolean CD() {
if (this.cmd.length != 2) {
log.error("Syntax: CD <path>");
log.warn("Syntax: CD <path>");
return true;
}
if (notConnected()) {
@ -400,7 +394,7 @@ public class FTPClient {
throw new IOException(reply);
}
} catch (final IOException e) {
log.error("Error: change of working directory to path " + this.cmd[1] + " failed.");
log.warn("Error: change of working directory to path " + this.cmd[1] + " failed.");
}
return true;
}
@ -445,7 +439,7 @@ public class FTPClient {
public boolean DEL() {
if (this.cmd.length != 2) {
log.error("Syntax: DEL <file>");
log.warn("Syntax: DEL <file>");
return true;
}
if (notConnected()) {
@ -454,7 +448,7 @@ public class FTPClient {
try {
rmForced(this.cmd[1]);
} catch (final IOException e) {
log.error("Error: deletion of file " + this.cmd[1] + " failed.");
log.warn("Error: deletion of file " + this.cmd[1] + " failed.");
}
return true;
}
@ -465,7 +459,7 @@ public class FTPClient {
public boolean DIR() {
if (this.cmd.length > 2) {
log.error("Syntax: DIR [<path>|<file>]");
log.warn("Syntax: DIR [<path>|<file>]");
return true;
}
if (notConnected()) {
@ -480,7 +474,7 @@ public class FTPClient {
}
printElements(l);
} catch (final IOException e) {
log.error("Error: remote list not available (1): " + e.getMessage());
log.warn("Error: remote list not available (1): " + e.getMessage());
}
return true;
}
@ -528,7 +522,7 @@ public class FTPClient {
public boolean GET() {
if ((this.cmd.length < 2) || (this.cmd.length > 3)) {
log.error("Syntax: GET <remote-file> [<local-file>]");
log.warn("Syntax: GET <remote-file> [<local-file>]");
return true;
}
final String remote = this.cmd[1]; // (new File(cmd[1])).getName();
@ -538,7 +532,7 @@ public class FTPClient {
final File local = absoluteLocalFile(localFilename);
if (local.exists()) {
log.error("Error: local file " + local.toString() + " already exists.\n" + " File " + remote
log.warn("Error: local file " + local.toString() + " already exists.\n" + " File " + remote
+ " not retrieved. Local file unchanged.");
} else {
if (withoutLocalFile) {
@ -547,7 +541,7 @@ public class FTPClient {
try {
get(local.getAbsolutePath(), remote);
} catch (final IOException e) {
log.error("Error: retrieving file " + remote + " failed. (" + e.getMessage() + ")");
log.warn("Error: retrieving file " + remote + " failed. (" + e.getMessage() + ")");
}
}
}
@ -578,7 +572,7 @@ public class FTPClient {
rmForced(remote);
}
} catch (final IOException eee) {
log.error("Warning: remote file or path " + remote + " cannot be removed.");
log.warn("Warning: remote file or path " + remote + " cannot be removed.");
}
} catch (final IOException e) {
if (e.getMessage().startsWith("550")) {
@ -601,13 +595,13 @@ public class FTPClient {
rmForced(remote);
}
} catch (final IOException eee) {
log.error("Warning: remote file or path " + remote + " cannot be removed.");
log.warn("Warning: remote file or path " + remote + " cannot be removed.");
}
} else {
log.error("Error: remote file or path " + remote + " does not exist.");
log.warn("Error: remote file or path " + remote + " does not exist.");
}
} else {
log.error("Error: retrieving file " + remote + " failed. (" + e.getMessage() + ")");
log.warn("Error: retrieving file " + remote + " failed. (" + e.getMessage() + ")");
}
}
}
@ -652,7 +646,7 @@ public class FTPClient {
public boolean GLOB() {
if (this.cmd.length != 1) {
log.error("Syntax: GLOB (no parameter)");
log.warn("Syntax: GLOB (no parameter)");
return true;
}
this.glob = !this.glob;
@ -661,7 +655,7 @@ public class FTPClient {
}
public boolean HASH() {
log.error("no games implemented");
log.warn("no games implemented");
return true;
}
@ -676,7 +670,7 @@ public class FTPClient {
public boolean JJENCODE() {
if (this.cmd.length != 2) {
log.error("Syntax: JJENCODE <path>");
log.warn("Syntax: JJENCODE <path>");
return true;
}
final String path = this.cmd[1];
@ -701,17 +695,17 @@ public class FTPClient {
exec("cd ..;jar -cfM \"" + path + ".jj\" \"" + path + ".jar\"", true);
exec("rm \"" + path + ".jar\"", true);
} else {
log.error("Error: local path " + newPath.toString() + " denotes not to a directory.");
log.warn("Error: local path " + newPath.toString() + " denotes not to a directory.");
}
} else {
log.error("Error: local path " + newPath.toString() + " does not exist.");
log.warn("Error: local path " + newPath.toString() + " does not exist.");
}
return true;
}
public boolean JJDECODE() {
if (this.cmd.length != 2) {
log.error("Syntax: JJENCODE <path>");
log.warn("Syntax: JJENCODE <path>");
return true;
}
final String path = this.cmd[1];
@ -730,13 +724,13 @@ public class FTPClient {
exec("mkdir \"" + path + ".dir\"", true);
} else {
log.error("Error: target dir " + newFolder.toString() + " cannot be created");
log.warn("Error: target dir " + newFolder.toString() + " cannot be created");
}
} else {
log.error("Error: local path " + newPath.toString() + " must denote to jar/jar file");
log.warn("Error: local path " + newPath.toString() + " must denote to jar/jar file");
}
} else {
log.error("Error: local path " + newPath.toString() + " does not exist.");
log.warn("Error: local path " + newPath.toString() + " does not exist.");
}
return true;
}
@ -751,7 +745,7 @@ public class FTPClient {
final String dest_name = args[1];
final File dest_file = new File(dest_name);
if (dest_file.exists()) {
log.error("join: destination file " + dest_name + " already exists");
log.warn("join: destination file " + dest_name + " already exists");
return true;
}
@ -817,10 +811,10 @@ public class FTPClient {
for (pc = 0; pc < args.length; pc++) {
try {
if (!(new File(args[pc])).delete()) {
log.error("join: unable to delete file " + args[pc]);
log.warn("join: unable to delete file " + args[pc]);
}
} catch (final SecurityException e) {
log.error("join: no permission to delete file " + args[pc]);
log.warn("join: no permission to delete file " + args[pc]);
}
}
} catch (final FileNotFoundException e) {
@ -844,7 +838,7 @@ public class FTPClient {
}
// print appropriate message
log.error("join created output from " + args.length + " source files");
log.warn("join created output from " + args.length + " source files");
}
return true;
}
@ -852,7 +846,7 @@ public class FTPClient {
public boolean COPY(final String[] args) {
final File dest_file = new File(args[2]);
if (dest_file.exists()) {
log.error("copy: destination file " + args[2] + " already exists");
log.warn("copy: destination file " + args[2] + " already exists");
return true;
}
int bytes_read = 0;
@ -913,7 +907,7 @@ public class FTPClient {
public boolean LCD() {
if (this.cmd.length != 2) {
log.error("Syntax: LCD <path>");
log.warn("Syntax: LCD <path>");
return true;
}
final String path = this.cmd[1];
@ -928,10 +922,10 @@ public class FTPClient {
this.currentLocalPath = newPath;
log.info("---- New local path: " + this.currentLocalPath.toString());
} else {
log.error("Error: local path " + newPath.toString() + " denotes not a directory.");
log.warn("Error: local path " + newPath.toString() + " denotes not a directory.");
}
} else {
log.error("Error: local path " + newPath.toString() + " does not exist.");
log.warn("Error: local path " + newPath.toString() + " does not exist.");
}
return true;
}
@ -942,7 +936,7 @@ public class FTPClient {
public boolean LDIR() {
if (this.cmd.length != 1) {
log.error("Syntax: LDIR (no parameter)");
log.warn("Syntax: LDIR (no parameter)");
return true;
}
final String[] name = this.currentLocalPath.list();
@ -1207,7 +1201,7 @@ public class FTPClient {
public boolean LITERAL() {
if (this.cmd.length == 1) {
log.error("Syntax: LITERAL <ftp-command> [<command-argument>] (see RFC959)");
log.warn("Syntax: LITERAL <ftp-command> [<command-argument>] (see RFC959)");
return true;
}
String s = "";
@ -1217,7 +1211,7 @@ public class FTPClient {
try {
literal(s.substring(1));
} catch (final IOException e) {
log.error("Error: Syntax of FTP-command wrong. See RFC959 for details.");
log.warn("Error: Syntax of FTP-command wrong. See RFC959 for details.");
}
return true;
}
@ -1232,15 +1226,15 @@ public class FTPClient {
public boolean LMKDIR() {
if (this.cmd.length != 2) {
log.error("Syntax: LMKDIR <folder-name>");
log.warn("Syntax: LMKDIR <folder-name>");
return true;
}
final File f = new File(this.currentLocalPath, this.cmd[1]);
if (f.exists()) {
log.error("Error: local file/folder " + this.cmd[1] + " already exists");
log.warn("Error: local file/folder " + this.cmd[1] + " already exists");
} else {
if (!f.mkdir()) {
log.error("Error: creation of local folder " + this.cmd[1] + " failed");
log.warn("Error: creation of local folder " + this.cmd[1] + " failed");
}
}
return true;
@ -1248,7 +1242,7 @@ public class FTPClient {
public boolean LMV() {
if (this.cmd.length != 3) {
log.error("Syntax: LMV <from> <to>");
log.warn("Syntax: LMV <from> <to>");
return true;
}
final File from = new File(this.cmd[1]);
@ -1257,17 +1251,17 @@ public class FTPClient {
if (from.renameTo(to)) {
log.info("---- \"" + from.toString() + "\" renamed to \"" + to.toString() + "\"");
} else {
log.error("rename failed");
log.warn("rename failed");
}
} else {
log.error("\"" + to.toString() + "\" already exists");
log.warn("\"" + to.toString() + "\" already exists");
}
return true;
}
public boolean LPWD() {
if (this.cmd.length != 1) {
log.error("Syntax: LPWD (no parameter)");
log.warn("Syntax: LPWD (no parameter)");
return true;
}
log.info("---- Local path: " + this.currentLocalPath.toString());
@ -1280,15 +1274,15 @@ public class FTPClient {
public boolean LRMDIR() {
if (this.cmd.length != 2) {
log.error("Syntax: LRMDIR <folder-name>");
log.warn("Syntax: LRMDIR <folder-name>");
return true;
}
final File f = new File(this.currentLocalPath, this.cmd[1]);
if (!f.exists()) {
log.error("Error: local folder " + this.cmd[1] + " does not exist");
log.warn("Error: local folder " + this.cmd[1] + " does not exist");
} else {
if (!f.delete()) {
log.error("Error: deletion of local folder " + this.cmd[1] + " failed");
log.warn("Error: deletion of local folder " + this.cmd[1] + " failed");
}
}
return true;
@ -1296,15 +1290,15 @@ public class FTPClient {
public boolean LRM() {
if (this.cmd.length != 2) {
log.error("Syntax: LRM <file-name>");
log.warn("Syntax: LRM <file-name>");
return true;
}
final File f = new File(this.currentLocalPath, this.cmd[1]);
if (!f.exists()) {
log.error("Error: local file " + this.cmd[1] + " does not exist");
log.warn("Error: local file " + this.cmd[1] + " does not exist");
} else {
if (!f.delete()) {
log.error("Error: deletion of file " + this.cmd[1] + " failed");
log.warn("Error: deletion of file " + this.cmd[1] + " failed");
}
}
return true;
@ -1312,7 +1306,7 @@ public class FTPClient {
public boolean LS() {
if (this.cmd.length > 2) {
log.error("Syntax: LS [<path>|<file>]");
log.warn("Syntax: LS [<path>|<file>]");
return true;
}
if (notConnected()) {
@ -1327,7 +1321,7 @@ public class FTPClient {
}
printElements(l);
} catch (final IOException e) {
log.error("Error: remote list not available (2): " + e.getMessage());
log.warn("Error: remote list not available (2): " + e.getMessage());
}
return true;
}
@ -1409,7 +1403,7 @@ public class FTPClient {
public boolean MKDIR() {
if (this.cmd.length != 2) {
log.error("Syntax: MKDIR <folder-name>");
log.warn("Syntax: MKDIR <folder-name>");
return true;
}
if (notConnected()) {
@ -1424,20 +1418,20 @@ public class FTPClient {
throw new IOException(reply);
}
} catch (final IOException e) {
log.error("Error: creation of folder " + this.cmd[1] + " failed");
log.warn("Error: creation of folder " + this.cmd[1] + " failed");
}
return true;
}
public boolean MGET() {
if (this.cmd.length != 2) {
log.error("Syntax: MGET <file-pattern>");
log.warn("Syntax: MGET <file-pattern>");
return true;
}
try {
mget(this.cmd[1], false);
} catch (final IOException e) {
log.error("Error: mget failed (" + e.getMessage() + ")");
log.warn("Error: mget failed (" + e.getMessage() + ")");
}
return true;
}
@ -1449,9 +1443,9 @@ public class FTPClient {
if (matches(remote, pattern)) {
local = new File(this.currentLocalPath, remote);
if (local.exists()) {
log.error("Warning: local file " + local.toString() + " overwritten.");
log.warn("Warning: local file " + local.toString() + " overwritten.");
if(!local.delete())
log.error("Warning: local file " + local.toString() + " could not be deleted.");
log.warn("Warning: local file " + local.toString() + " could not be deleted.");
}
retrieveFilesRecursively(remote, remove);
}
@ -1460,13 +1454,13 @@ public class FTPClient {
public boolean MOVEDOWN() {
if (this.cmd.length != 2) {
log.error("Syntax: MOVEDOWN <file-pattern>");
log.warn("Syntax: MOVEDOWN <file-pattern>");
return true;
}
try {
mget(this.cmd[1], true);
} catch (final IOException e) {
log.error("Error: movedown failed (" + e.getMessage() + ")");
log.warn("Error: movedown failed (" + e.getMessage() + ")");
}
return true;
}
@ -1478,7 +1472,7 @@ public class FTPClient {
*/
public boolean MV() {
if (this.cmd.length != 3) {
log.error("Syntax: MV <from> <to>");
log.warn("Syntax: MV <from> <to>");
return true;
}
if (notConnected()) {
@ -1499,27 +1493,27 @@ public class FTPClient {
throw new IOException(reply);
}
} catch (final IOException e) {
log.error("Error: rename of " + this.cmd[1] + " to " + this.cmd[2] + " failed.");
log.warn("Error: rename of " + this.cmd[1] + " to " + this.cmd[2] + " failed.");
}
return true;
}
public boolean NOOP() {
if (this.cmd.length != 1) {
log.error("Syntax: NOOP (no parameter)");
log.warn("Syntax: NOOP (no parameter)");
return true;
}
try {
literal("NOOP");
} catch (final IOException e) {
log.error("Error: server does not know how to do nothing");
log.warn("Error: server does not know how to do nothing");
}
return true;
}
public boolean OPEN() {
if ((this.cmd.length < 2) || (this.cmd.length > 3)) {
log.error("Syntax: OPEN <host> [<port>]");
log.warn("Syntax: OPEN <host> [<port>]");
return true;
}
int port = 21;
@ -1540,7 +1534,7 @@ public class FTPClient {
log.info("---- Connection to " + this.cmd[1] + " established.");
this.prompt = "ftp [" + this.cmd[1] + "]>";
} catch (final IOException e) {
log.error("Error: connecting " + this.cmd[1] + " on port " + port + " failed: " + e.getMessage());
log.warn("Error: connecting " + this.cmd[1] + " on port " + port + " failed: " + e.getMessage());
}
return true;
}
@ -1598,25 +1592,25 @@ public class FTPClient {
}
public boolean PROMPT() {
log.error("prompt is always off");
log.warn("prompt is always off");
return true;
}
public boolean PUT() {
if ((this.cmd.length < 2) || (this.cmd.length > 3)) {
log.error("Syntax: PUT <local-file> [<remote-file>]");
log.warn("Syntax: PUT <local-file> [<remote-file>]");
return true;
}
final File local = new File(this.currentLocalPath, this.cmd[1]);
final String remote = (this.cmd.length == 2) ? local.getName() : this.cmd[2];
if (!local.exists()) {
log.error("Error: local file " + local.toString() + " does not exist.");
log.error(" Remote file " + remote + " not overwritten.");
log.warn("Error: local file " + local.toString() + " does not exist.");
log.warn(" Remote file " + remote + " not overwritten.");
} else {
try {
put(local.getAbsolutePath(), remote);
} catch (final IOException e) {
log.error("Error: transmitting file " + local.toString() + " failed.");
log.warn("Error: transmitting file " + local.toString() + " failed.");
}
}
return true;
@ -1624,7 +1618,7 @@ public class FTPClient {
public boolean PWD() {
if (this.cmd.length > 1) {
log.error("Syntax: PWD (no parameter)");
log.warn("Syntax: PWD (no parameter)");
return true;
}
if (notConnected()) {
@ -1633,7 +1627,7 @@ public class FTPClient {
try {
log.info("---- Current remote path is: " + pwd());
} catch (final IOException e) {
log.error("Error: remote path not available");
log.warn("Error: remote path not available");
}
return true;
}
@ -1654,20 +1648,20 @@ public class FTPClient {
public boolean REMOTEHELP() {
if (this.cmd.length != 1) {
log.error("Syntax: REMOTEHELP (no parameter)");
log.warn("Syntax: REMOTEHELP (no parameter)");
return true;
}
try {
literal("HELP");
} catch (final IOException e) {
log.error("Error: remote help not supported by server.");
log.warn("Error: remote help not supported by server.");
}
return true;
}
public boolean RMDIR() {
if (this.cmd.length != 2) {
log.error("Syntax: RMDIR <folder-name>");
log.warn("Syntax: RMDIR <folder-name>");
return true;
}
if (notConnected()) {
@ -1676,7 +1670,7 @@ public class FTPClient {
try {
rmForced(this.cmd[1]);
} catch (final IOException e) {
log.error("Error: deletion of folder " + this.cmd[1] + " failed.");
log.warn("Error: deletion of folder " + this.cmd[1] + " failed.");
}
return true;
}
@ -1741,20 +1735,20 @@ public class FTPClient {
public boolean USER() {
if (this.cmd.length != 3) {
log.error("Syntax: USER <user-name> <password>");
log.warn("Syntax: USER <user-name> <password>");
return true;
}
try {
login(this.cmd[1], this.cmd[2]);
log.info("---- Granted access for user " + this.cmd[1] + ".");
} catch (final IOException e) {
log.error("Error: authorization of user " + this.cmd[1] + " failed: " + e.getMessage());
log.warn("Error: authorization of user " + this.cmd[1] + " failed: " + e.getMessage());
}
return true;
}
public boolean APPEND() {
log.error("not yet supported");
log.warn("not yet supported");
return true;
}
@ -1869,52 +1863,52 @@ public class FTPClient {
}
public boolean QUOTE() {
log.error("not yet supported");
log.warn("not yet supported");
return true;
}
public boolean BELL() {
log.error("not yet supported");
log.warn("not yet supported");
return true;
}
public boolean MDELETE() {
log.error("not yet supported");
log.warn("not yet supported");
return true;
}
public boolean SEND() {
log.error("not yet supported");
log.warn("not yet supported");
return true;
}
public boolean DEBUG() {
log.error("not yet supported");
log.warn("not yet supported");
return true;
}
public boolean MLS() {
log.error("not yet supported");
log.warn("not yet supported");
return true;
}
public boolean TRACE() {
log.error("not yet supported");
log.warn("not yet supported");
return true;
}
public boolean MPUT() {
log.error("not yet supported");
log.warn("not yet supported");
return true;
}
public boolean TYPE() {
log.error("not yet supported");
log.warn("not yet supported");
return true;
}
public boolean CREATE() {
log.error("not yet supported");
log.warn("not yet supported");
return true;
}
@ -2272,7 +2266,7 @@ public class FTPClient {
+ (((stop - start) < 2000) ? (stop - start) + " milliseconds"
: (((int) ((stop - start) / 100)) / 10) + " seconds"));
if (start == stop) {
log.error("start == stop");
log.warn("start == stop");
} else {
log.info(" (" + (length * 1000 / 1024 / (stop - start)) + " kbytes/second)");
}
@ -2498,7 +2492,7 @@ public class FTPClient {
try {
applyDataSocketTimeout();
} catch (final SocketException e) {
log.error("setDataSocketTimeout: " + e.getMessage());
log.warn("setDataSocketTimeout: " + e.getMessage());
}
}
@ -2567,7 +2561,7 @@ public class FTPClient {
try {
list = ftpClient.list(path, true);
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
return;
}
if (!path.endsWith("/")) path += "/";
@ -2777,7 +2771,7 @@ public class FTPClient {
Thread.currentThread().setName("FTP.pt(" + this.host + ")");
put(this.host, this.localFile, this.remotePath, this.remoteName, this.account, this.password);
} catch (final IOException e) {
log.error(e, e);
log.warn(e.getMessage(), e);
}
}
}
@ -2839,9 +2833,9 @@ public class FTPClient {
fos.write(UTF8.getBytes(page.toString()));
fos.close();
} catch (final FileNotFoundException e) {
log.error(e);
log.warn(e);
} catch (final IOException e) {
log.error(e);
log.warn(e);
}
} else if (args[0].equals("-sitelist")) {
try {
@ -2851,11 +2845,11 @@ public class FTPClient {
System.out.println(entry.toString());
}
} catch (final FileNotFoundException e) {
log.error(e);
log.warn(e);
} catch (final IOException e) {
log.error(e);
log.warn(e);
} catch (final InterruptedException e) {
log.error(e);
log.warn(e);
}
} else {
printHelp();
@ -2880,7 +2874,7 @@ public class FTPClient {
put(args[1], new File(args[2]), args[3], "", args[4], args[5]);
} catch (final IOException e) {
// TODO Auto-generated catch block
log.error(e, e);
log.warn(e.getMessage(), e);
}
} else {
printHelp();

@ -1,28 +1,24 @@
// Log.java
// -------------------------------------
// (C) by Michael Peter Christen; mc@yacy.net
// first published on http://www.anomic.de
// Frankfurt, Germany, 2004
//
// $LastChangedDate$
// $LastChangedRevision$
// $LastChangedBy$
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
/**
* ConcurrentLog
* Copyright 2013 by Michael Peter Christen; mc@yacy.net, Frankfurt a. M., Germany
* First published 2004, redesigned 9.7.2013 on http://yacy.net
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program in the file lgpl21.txt
* If not, see <http://www.gnu.org/licenses/>.
*/
package net.yacy.kelondro.logging;
package net.yacy.cora.util;
import java.io.ByteArrayOutputStream;
import java.io.File;
@ -41,31 +37,27 @@ import java.util.logging.LogManager;
import java.util.logging.Logger;
public final class Log {
/**
* jdk-based logger tend to block at java.util.logging.Logger.log(Logger.java:476)
* in concurrent environments. This makes logging a main performance issue.
* To overcome this problem, this is a add-on to jdk logging to put log entries
* on a concurrent message queue and log the messages one by one using a
* separate process
*/
public final class ConcurrentLog {
// log-level categories
public static final int LOGLEVEL_ZERO = Level.OFF.intValue(); // no output at all
public static final int LOGLEVEL_SEVERE = Level.SEVERE.intValue(); // system-level error, internal cause, critical and not fixeable (i.e. inconsistency)
public static final int LOGLEVEL_WARNING = Level.WARNING.intValue(); // uncritical service failure, may require user activity (i.e. input required, wrong authorization)
public static final int LOGLEVEL_CONFIG = Level.CONFIG.intValue(); // regular system status information (i.e. start-up messages)
public static final int LOGLEVEL_INFO = Level.INFO.intValue(); // regular action information (i.e. any httpd request URL)
public static final int LOGLEVEL_FINE = Level.FINE.intValue(); // in-function status debug output
public static final int LOGLEVEL_FINER = Level.FINER.intValue(); // in-function status debug output
public static final int LOGLEVEL_FINEST = Level.FINEST.intValue(); // in-function status debug output
// these categories are also present as character tokens
public static final char LOGTOKEN_ZERO = 'Z';
public static final char LOGTOKEN_SEVERE = 'E';
public static final char LOGTOKEN_WARNING = 'W';
public static final char LOGTOKEN_CONFIG = 'S';
public static final char LOGTOKEN_INFO = 'I';
public static final char LOGTOKEN_FINE = 'D';
public static final char LOGTOKEN_FINER = 'D';
public static final char LOGTOKEN_FINEST = 'D';
private final static Message POISON_MESSAGE = new Message();
private final static BlockingQueue<Message> logQueue = new ArrayBlockingQueue<Message>(300);
private final static Worker logRunnerThread = new Worker();
static {
logRunnerThread.start();
}
private final Logger theLogger;
public Log(final String appName) {
public ConcurrentLog(final String appName) {
this.theLogger = Logger.getLogger(appName);
//this.theLogger.setLevel(Level.FINEST); // set a default level
}
@ -74,11 +66,11 @@ public final class Log {
this.theLogger.setLevel(newLevel);
}
public final void logSevere(final String message) {
public final void severe(final String message) {
enQueueLog(this.theLogger, Level.SEVERE, message);
}
public final void logSevere(final String message, final Throwable thrown) {
public final void severe(final String message, final Throwable thrown) {
if (thrown == null) return;
enQueueLog(this.theLogger, Level.SEVERE, message, thrown);
}
@ -87,24 +79,29 @@ public final class Log {
return this.theLogger.isLoggable(Level.SEVERE);
}
public final void logWarning(final String message) {
public final void warn(final String message) {
enQueueLog(this.theLogger, Level.WARNING, message);
}
public final void logWarning(final String message, final Throwable thrown) {
public final void warn(final Throwable thrown) {
if (thrown == null) return;
enQueueLog(this.theLogger, Level.WARNING, thrown.getMessage(), thrown);
}
public final void warn(final String message, final Throwable thrown) {
if (thrown == null) return;
enQueueLog(this.theLogger, Level.WARNING, message, thrown);
}
public final boolean isWarning() {
public final boolean isWarn() {
return this.theLogger.isLoggable(Level.WARNING);
}
public final void logConfig(final String message) {
public final void config(final String message) {
enQueueLog(this.theLogger, Level.CONFIG, message);
}
public final void logConfig(final String message, final Throwable thrown) {
public final void config(final String message, final Throwable thrown) {
if (thrown == null) return;
enQueueLog(this.theLogger, Level.CONFIG, message, thrown);
}
@ -113,11 +110,11 @@ public final class Log {
return this.theLogger.isLoggable(Level.CONFIG);
}
public final void logInfo(final String message) {
public final void info(final String message) {
enQueueLog(this.theLogger, Level.INFO, message);
}
public final void logInfo(final String message, final Throwable thrown) {
public final void info(final String message, final Throwable thrown) {
if (thrown == null) return;
enQueueLog(this.theLogger, Level.INFO, message, thrown);
}
@ -126,11 +123,11 @@ public final class Log {
return this.theLogger.isLoggable(Level.INFO);
}
public final void logFine(final String message) {
public final void fine(final String message) {
enQueueLog(this.theLogger, Level.FINE, message);
}
public final void logFine(final String message, final Throwable thrown) {
public final void fine(final String message, final Throwable thrown) {
if (thrown == null) return;
enQueueLog(this.theLogger, Level.FINE, message, thrown);
}
@ -139,11 +136,11 @@ public final class Log {
return this.theLogger.isLoggable(Level.FINE);
}
public final void logFiner(final String message) {
public final void finer(final String message) {
enQueueLog(this.theLogger, Level.FINER, message);
}
public final void logFiner(final String message, final Throwable thrown) {
public final void finer(final String message, final Throwable thrown) {
if (thrown == null) return;
enQueueLog(this.theLogger, Level.FINER, message, thrown);
}
@ -152,11 +149,11 @@ public final class Log {
return this.theLogger.isLoggable(Level.FINER);
}
public final void logFinest(final String message) {
public final void finest(final String message) {
enQueueLog(this.theLogger, Level.FINEST, message);
}
public final void logFinest(final String message, final Throwable thrown) {
public final void finest(final String message, final Throwable thrown) {
if (thrown == null) return;
enQueueLog(this.theLogger, Level.FINEST, message, thrown);
}
@ -171,46 +168,46 @@ public final class Log {
// static log messages
public final static void logSevere(final String appName, final String message) {
enQueueLog(appName, Level.SEVERE, message);
}
public final static void logSevere(final String appName, final String message, final Throwable thrown) {
if (thrown == null) return;
enQueueLog(appName, Level.SEVERE, message, thrown);
}
public final static void logWarning(final String appName, final String message) {
enQueueLog(appName, Level.WARNING, message);
}
public final static void logException(final Throwable thrown) {
if (thrown == null) return;
enQueueLog("StackTrace", Level.WARNING, thrown.getMessage(), thrown);
}
public final static void logWarning(final String appName, final String message, final Throwable thrown) {
public final static void severe(final String appName, final String message) {
enQueueLog(appName, Level.SEVERE, message);
}
public final static void severe(final String appName, final String message, final Throwable thrown) {
if (thrown == null) return;
enQueueLog(appName, Level.SEVERE, message, thrown);
}
public final static void warn(final String appName, final String message) {
enQueueLog(appName, Level.WARNING, message);
}
public final static void warn(final String appName, final String message, final Throwable thrown) {
if (thrown == null) return;
enQueueLog(appName, Level.WARNING, message, thrown);
}
public final static void logConfig(final String appName, final String message) {
public final static void config(final String appName, final String message) {
enQueueLog(appName, Level.CONFIG, message);
}
public final static void logConfig(final String appName, final String message, final Throwable thrown) {
public final static void config(final String appName, final String message, final Throwable thrown) {
if (thrown == null) return;
enQueueLog(appName, Level.CONFIG, message, thrown);
}
public final static void logInfo(final String appName, final String message) {
public final static void info(final String appName, final String message) {
enQueueLog(appName, Level.INFO, message);
}
public final static void logInfo(final String appName, final String message, final Throwable thrown) {
public final static void info(final String appName, final String message, final Throwable thrown) {
if (thrown == null) return;
enQueueLog(appName, Level.INFO, message, thrown);
}
public final static void logFine(final String appName, final String message) {
public final static void fine(final String appName, final String message) {
enQueueLog(appName, Level.FINE, message);
}
public final static void logFine(final String appName, final String message, final Throwable thrown) {
public final static void fine(final String appName, final String message, final Throwable thrown) {
if (thrown == null) return;
enQueueLog(appName, Level.FINE, message, thrown);
}
@ -218,18 +215,18 @@ public final class Log {
return Logger.getLogger(appName).isLoggable(Level.FINE);
}
public final static void logFiner(final String appName, final String message) {
public final static void finer(final String appName, final String message) {
enQueueLog(appName, Level.FINER, message);
}
public final static void logFiner(final String appName, final String message, final Throwable thrown) {
public final static void finer(final String appName, final String message, final Throwable thrown) {
if (thrown == null) return;
enQueueLog(appName, Level.FINER, message, thrown);
}
public final static void logFinest(final String appName, final String message) {
public final static void finest(final String appName, final String message) {
enQueueLog(appName, Level.FINEST, message);
}
public final static void logFinest(final String appName, final String message, final Throwable thrown) {
public final static void finest(final String appName, final String message, final Throwable thrown) {
if (thrown == null) return;
enQueueLog(appName, Level.FINEST, message, thrown);
}
@ -243,7 +240,7 @@ public final class Log {
if (thrown == null) logger.log(level, message); else logger.log(level, message, thrown);
} else {
try {
if (thrown == null) logQueue.put(new logEntry(logger, level, message)); else logQueue.put(new logEntry(logger, level, message, thrown));
if (thrown == null) logQueue.put(new Message(logger, level, message)); else logQueue.put(new Message(logger, level, message, thrown));
} catch (final InterruptedException e) {
if (thrown == null) logger.log(level, message); else logger.log(level, message, thrown);
}
@ -256,7 +253,7 @@ public final class Log {
logger.log(level, message);
} else {
try {
logQueue.put(new logEntry(logger, level, message));
logQueue.put(new Message(logger, level, message));
} catch (final InterruptedException e) {
logger.log(level, message);
}
@ -268,7 +265,7 @@ public final class Log {
if (thrown == null) Logger.getLogger(loggername).log(level, message); else Logger.getLogger(loggername).log(level, message, thrown);
} else {
try {
if (thrown == null) logQueue.put(new logEntry(loggername, level, message)); else logQueue.put(new logEntry(loggername, level, message, thrown));
if (thrown == null) logQueue.put(new Message(loggername, level, message)); else logQueue.put(new Message(loggername, level, message, thrown));
} catch (final InterruptedException e) {
if (thrown == null) Logger.getLogger(loggername).log(level, message); else Logger.getLogger(loggername).log(level, message, thrown);
}
@ -280,48 +277,48 @@ public final class Log {
Logger.getLogger(loggername).log(level, message);
} else {
try {
logQueue.put(new logEntry(loggername, level, message));
logQueue.put(new Message(loggername, level, message));
} catch (final InterruptedException e) {
Logger.getLogger(loggername).log(level, message);
}
}
}
protected final static class logEntry {
protected final static class Message {
private final Level level;
private final String message;
private Logger logger;
private String loggername;
private Throwable thrown;
private logEntry(final Level level, final String message) {
private Message(final Level level, final String message) {
this.level = level;
this.message = message == null || message.length() <= 1024 ? message : message.substring(0, 1024);
}
public logEntry(final Logger logger, final Level level, final String message, final Throwable thrown) {
public Message(final Logger logger, final Level level, final String message, final Throwable thrown) {
this(level, message);
this.logger = logger;
this.loggername = null;
this.thrown = thrown;
}
public logEntry(final Logger logger, final Level level, final String message) {
public Message(final Logger logger, final Level level, final String message) {
this(level, message);
this.logger = logger;
this.loggername = null;
this.thrown = null;
}
public logEntry(final String loggername, final Level level, final String message, final Throwable thrown) {
public Message(final String loggername, final Level level, final String message, final Throwable thrown) {
this(level, message);
this.logger = null;
this.loggername = loggername;
this.thrown = thrown;
}
public logEntry(final String loggername, final Level level, final String message) {
public Message(final String loggername, final Level level, final String message) {
this(level, message);
this.logger = null;
this.loggername = loggername;
this.thrown = null;
}
public logEntry() {
public Message() {
this.logger = null;
this.loggername = null;
this.level = null;
@ -330,26 +327,18 @@ public final class Log {
}
}
private final static logEntry poison = new logEntry();
private final static BlockingQueue<logEntry> logQueue = new ArrayBlockingQueue<logEntry>(300);
private final static logRunner logRunnerThread = new logRunner();
static {
logRunnerThread.start();
}
protected final static class logRunner extends Thread {
public logRunner() {
super("Log Runner");
protected final static class Worker extends Thread {
public Worker() {
super("Log Worker");
}
@Override
public void run() {
logEntry entry;
Message entry;
Map<String, Logger> loggerCache = new HashMap<String, Logger>();
//Map<String, AtomicInteger> loggerCounter = new HashMap<String, AtomicInteger>();
try {
while ((entry = logQueue.take()) != poison) {
while ((entry = logQueue.take()) != POISON_MESSAGE) {
if (entry.logger == null) {
assert entry.loggername != null;
//AtomicInteger i = loggerCounter.get(entry.loggername);
@ -372,7 +361,7 @@ public final class Log {
}
}
} catch (final InterruptedException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
//Logger.getLogger("Log").log(Level.INFO, "closing logRunner with cached loggers: " + loggerCounter.entrySet().toString());
}
@ -408,7 +397,7 @@ public final class Log {
}
// redirect uncaught exceptions to logging
final Log exceptionLog = new Log("UNCAUGHT-EXCEPTION");
final ConcurrentLog exceptionLog = new ConcurrentLog("UNCAUGHT-EXCEPTION");
Thread.setDefaultUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler(){
@Override
public void uncaughtException(final Thread t, final Throwable e) {
@ -418,10 +407,10 @@ public final class Log {
final PrintStream ps = new PrintStream(baos);
e.printStackTrace(ps);
ps.close();
exceptionLog.logSevere(msg + "\n" + baos.toString(), e);
Log.logException(e);
Log.logException(e.getCause());
if (e instanceof InvocationTargetException) Log.logException(((InvocationTargetException) e).getTargetException());
exceptionLog.severe(msg + "\n" + baos.toString(), e);
ConcurrentLog.logException(e);
ConcurrentLog.logException(e.getCause());
if (e instanceof InvocationTargetException) ConcurrentLog.logException(((InvocationTargetException) e).getTargetException());
}
});
} finally {
@ -432,7 +421,7 @@ public final class Log {
public final static void shutdown() {
if (logRunnerThread == null || !logRunnerThread.isAlive()) return;
try {
logQueue.put(poison);
logQueue.put(POISON_MESSAGE);
logRunnerThread.join(1000);
} catch (final InterruptedException e) {
}

@ -46,6 +46,7 @@ import net.yacy.cora.order.Base64Order;
import net.yacy.cora.protocol.Domains;
import net.yacy.cora.sorting.OrderedScoreMap;
import net.yacy.cora.storage.HandleSet;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.cora.util.SpaceExceededException;
import net.yacy.crawler.data.Cache;
import net.yacy.crawler.data.CrawlProfile;
@ -57,7 +58,6 @@ import net.yacy.kelondro.data.meta.URIMetadataRow;
import net.yacy.kelondro.index.BufferedObjectIndex;
import net.yacy.kelondro.index.Row;
import net.yacy.kelondro.index.RowHandleSet;
import net.yacy.kelondro.logging.Log;
import net.yacy.kelondro.table.Table;
import net.yacy.kelondro.util.MemoryControl;
import net.yacy.repository.Blacklist.BlacklistType;
@ -122,11 +122,11 @@ public class Balancer {
try {
this.urlFileIndex = new BufferedObjectIndex(new Table(f, Request.rowdef, 0, 0, false, exceed134217727, true), objectIndexBufferSize);
} catch (final SpaceExceededException e1) {
Log.logException(e1);
ConcurrentLog.logException(e1);
}
}
this.lastDomainStackFill = 0;
Log.logInfo("Balancer", "opened balancer file with " + this.urlFileIndex.size() + " entries from " + f.toString());
ConcurrentLog.info("Balancer", "opened balancer file with " + this.urlFileIndex.size() + " entries from " + f.toString());
}
public int getMinimumLocalDelta() {
@ -150,11 +150,11 @@ public class Balancer {
}
public void clear() {
Log.logInfo("Balancer", "cleaning balancer with " + this.urlFileIndex.size() + " entries from " + this.urlFileIndex.filename());
ConcurrentLog.info("Balancer", "cleaning balancer with " + this.urlFileIndex.size() + " entries from " + this.urlFileIndex.filename());
try {
this.urlFileIndex.clear();
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
this.domainStacks.clear();
this.double_push_check.clear();
@ -435,7 +435,7 @@ public class Balancer {
// check blacklist (again) because the user may have created blacklist entries after the queue has been filled
if (Switchboard.urlBlacklist.isListed(BlacklistType.CRAWLER, crawlEntry.url())) {
Log.logFine("CRAWLER", "URL '" + crawlEntry.url() + "' is in blacklist.");
ConcurrentLog.fine("CRAWLER", "URL '" + crawlEntry.url() + "' is in blacklist.");
continue;
}
@ -443,7 +443,7 @@ public class Balancer {
// if not: return null. A calling method must handle the null value and try again
profileEntry = cs.getActive(UTF8.getBytes(crawlEntry.profileHandle()));
if (profileEntry == null) {
Log.logWarning("Balancer", "no profile entry for handle " + crawlEntry.profileHandle());
ConcurrentLog.warn("Balancer", "no profile entry for handle " + crawlEntry.profileHandle());
continue;
}
// depending on the caching policy we need sleep time to avoid DoS-like situations
@ -465,7 +465,7 @@ public class Balancer {
// in best case, this should never happen if the balancer works propertly
// this is only to protection against the worst case, where the crawler could
// behave in a DoS-manner
Log.logInfo("BALANCER", "forcing crawl-delay of " + sleeptime + " milliseconds for " + crawlEntry.url().getHost() + ": " + Latency.waitingRemainingExplain(crawlEntry.url(), robots, this.myAgentIDs, this.minimumLocalDelta, this.minimumGlobalDelta) + ", domainStacks.size() = " + this.domainStacks.size() + ", domainStacksInitSize = " + this.domStackInitSize);
ConcurrentLog.info("BALANCER", "forcing crawl-delay of " + sleeptime + " milliseconds for " + crawlEntry.url().getHost() + ": " + Latency.waitingRemainingExplain(crawlEntry.url(), robots, this.myAgentIDs, this.minimumLocalDelta, this.minimumGlobalDelta) + ", domainStacks.size() = " + this.domainStacks.size() + ", domainStacksInitSize = " + this.domStackInitSize);
long loops = sleeptime / 1000;
long rest = sleeptime % 1000;
if (loops < 3) {
@ -477,7 +477,7 @@ public class Balancer {
// must be synchronized here to avoid 'takeover' moves from other threads which then idle the same time which would not be enough
if (rest > 0) {try {this.wait(rest);} catch (final InterruptedException e) {}}
for (int i = 0; i < loops; i++) {
Log.logInfo("BALANCER", "waiting for " + crawlEntry.url().getHost() + ": " + (loops - i) + " seconds remaining...");
ConcurrentLog.info("BALANCER", "waiting for " + crawlEntry.url().getHost() + ": " + (loops - i) + " seconds remaining...");
try {this.wait(1000); } catch (final InterruptedException e) {}
}
}
@ -499,7 +499,7 @@ public class Balancer {
try {
fillDomainStacks();
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
// iterate over the domain stacks
@ -596,7 +596,7 @@ public class Balancer {
host = z.getKey(); if (host == null) continue;
hash = z.getValue(); if (hash == null) continue;
removeHashFromDomainStacks(host, hash);
Log.logInfo("Balancer", "// getbest: picked a random from the zero-waiting stack: " + host + ", zeroWaitingCandidates.size = " + this.zeroWaitingCandidates.size());
ConcurrentLog.info("Balancer", "// getbest: picked a random from the zero-waiting stack: " + host + ", zeroWaitingCandidates.size = " + this.zeroWaitingCandidates.size());
return hash;
}
@ -620,7 +620,7 @@ public class Balancer {
// check blacklist (again) because the user may have created blacklist entries after the queue has been filled
if (Switchboard.urlBlacklist.isListed(BlacklistType.CRAWLER, request.url())) {
Log.logFine("CRAWLER", "URL '" + request.url() + "' is in blacklist.");
ConcurrentLog.fine("CRAWLER", "URL '" + request.url() + "' is in blacklist.");
try {blackhandles.put(entry.getPrimaryKeyBytes());} catch (SpaceExceededException e) {}
continue;
}
@ -638,7 +638,7 @@ public class Balancer {
// if we collected blacklist entries then delete them now
for (byte[] blackhandle: blackhandles) this.urlFileIndex.remove(blackhandle);
Log.logInfo("BALANCER", "re-fill of domain stacks; fileIndex.size() = " + this.urlFileIndex.size() + ", domainStacks.size = " + this.domainStacks.size() + ", blackhandles = " + blackhandles.size() + ", collection time = " + (System.currentTimeMillis() - this.lastDomainStackFill) + " ms");
ConcurrentLog.info("BALANCER", "re-fill of domain stacks; fileIndex.size() = " + this.urlFileIndex.size() + ", domainStacks.size = " + this.domainStacks.size() + ", blackhandles = " + blackhandles.size() + ", collection time = " + (System.currentTimeMillis() - this.lastDomainStackFill) + " ms");
this.domStackInitSize = this.domainStacks.size();
}
@ -665,7 +665,7 @@ public class Balancer {
try {
return (entry == null) ? null : new Request(entry);
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
this.rowIterator = null;
return null;
}

@ -42,6 +42,7 @@ import net.yacy.cora.document.UTF8;
import net.yacy.cora.order.Base64Order;
import net.yacy.cora.protocol.Domains;
import net.yacy.cora.protocol.ftp.FTPClient;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.crawler.data.CrawlProfile;
import net.yacy.crawler.data.CrawlQueues;
import net.yacy.crawler.data.NoticedURL;
@ -56,7 +57,6 @@ import net.yacy.crawler.retrieval.SMBLoader;
import net.yacy.crawler.robots.RobotsTxt;
import net.yacy.kelondro.data.citation.CitationReference;
import net.yacy.kelondro.data.meta.DigestURI;
import net.yacy.kelondro.logging.Log;
import net.yacy.kelondro.rwi.IndexCell;
import net.yacy.kelondro.workflow.WorkflowProcessor;
import net.yacy.peers.SeedDB;
@ -71,7 +71,7 @@ public final class CrawlStacker {
public static String ERROR_MATCH_WITH_MUST_NOT_MATCH_FILTER = "url matches must-not-match filter ";
private final Log log = new Log("STACKCRAWL");
private final ConcurrentLog log = new ConcurrentLog("STACKCRAWL");
private final RobotsTxt robots;
private final WorkflowProcessor<Request> requestQueue;
private final CrawlQueues nextQueue;
@ -101,7 +101,7 @@ public final class CrawlStacker {
this.acceptGlobalURLs = acceptGlobalURLs;
this.domainList = domainList;
this.requestQueue = new WorkflowProcessor<Request>("CrawlStacker", "This process checks new urls before they are enqueued into the balancer (proper, double-check, correct domain, filter)", new String[]{"Balancer"}, this, "job", 10000, null, WorkflowProcessor.availableCPU);
this.log.logInfo("STACKCRAWL thread initialized.");
this.log.info("STACKCRAWL thread initialized.");
}
@ -118,15 +118,15 @@ public final class CrawlStacker {
}
public void announceClose() {
this.log.logInfo("Flushing remaining " + size() + " crawl stacker job entries.");
this.log.info("Flushing remaining " + size() + " crawl stacker job entries.");
this.requestQueue.shutdown();
}
public synchronized void close() {
this.log.logInfo("Shutdown. waiting for remaining " + size() + " crawl stacker job entries. please wait.");
this.log.info("Shutdown. waiting for remaining " + size() + " crawl stacker job entries. please wait.");
this.requestQueue.shutdown();
this.log.logInfo("Shutdown. Closing stackCrawl queue.");
this.log.info("Shutdown. Closing stackCrawl queue.");
clear();
}
@ -141,7 +141,7 @@ public final class CrawlStacker {
if (urlCitationIndex != null && entry.referrerhash() != null) try {
urlCitationIndex.add(anchorhash, new CitationReference(entry.referrerhash(), entry.appdate().getTime()));
} catch (final Exception e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
try {
@ -152,7 +152,7 @@ public final class CrawlStacker {
this.nextQueue.errorURL.push(entry, ASCII.getBytes(this.peers.mySeed().hash), new Date(), 1, FailCategory.FINAL_LOAD_CONTEXT, rejectReason, -1);
}
} catch (final Exception e) {
CrawlStacker.this.log.logWarning("Error while processing stackCrawl entry.\n" + "Entry: " + entry.toString() + "Error: " + e.toString(), e);
CrawlStacker.this.log.warn("Error while processing stackCrawl entry.\n" + "Entry: " + entry.toString() + "Error: " + e.toString(), e);
return null;
}
return null;
@ -161,7 +161,7 @@ public final class CrawlStacker {
public void enqueueEntry(final Request entry) {
// DEBUG
if (this.log.isFinest()) this.log.logFinest("ENQUEUE " + entry.url() + ", referer=" + entry.referrerhash() + ", initiator=" + ((entry.initiator() == null) ? "" : ASCII.String(entry.initiator())) + ", name=" + entry.name() + ", appdate=" + entry.appdate() + ", depth=" + entry.depth());
if (this.log.isFinest()) this.log.finest("ENQUEUE " + entry.url() + ", referer=" + entry.referrerhash() + ", initiator=" + ((entry.initiator() == null) ? "" : ASCII.String(entry.initiator())) + ", name=" + entry.name() + ", appdate=" + entry.appdate() + ", depth=" + entry.depth());
this.requestQueue.enQueue(entry);
}
public void enqueueEntriesAsynchronous(final byte[] initiator, final String profileHandle, final Map<DigestURI, Properties> hyperlinks) {
@ -299,7 +299,7 @@ public final class CrawlStacker {
String error;
if (profile == null) {
error = "LOST STACKER PROFILE HANDLE '" + entry.profileHandle() + "' for URL " + entry.url();
this.log.logWarning(error);
this.log.warn(error);
return error;
}
@ -321,7 +321,7 @@ public final class CrawlStacker {
if (!local && !global && !remote && !proxy) {
error = "URL '" + entry.url().toString() + "' cannot be crawled. initiator = " + ((entry.initiator() == null) ? "" : ASCII.String(entry.initiator())) + ", profile.handle = " + profile.handle();
this.log.logSevere(error);
this.log.severe(error);
return error;
}
@ -353,20 +353,20 @@ public final class CrawlStacker {
if (global) {
// it may be possible that global == true and local == true, so do not check an error case against it
if (proxy) this.log.logWarning("URL '" + entry.url().toString() + "' has conflicting initiator properties: global = true, proxy = true, initiator = proxy" + ", profile.handle = " + profile.handle());
if (remote) this.log.logWarning("URL '" + entry.url().toString() + "' has conflicting initiator properties: global = true, remote = true, initiator = " + ASCII.String(entry.initiator()) + ", profile.handle = " + profile.handle());
if (proxy) this.log.warn("URL '" + entry.url().toString() + "' has conflicting initiator properties: global = true, proxy = true, initiator = proxy" + ", profile.handle = " + profile.handle());
if (remote) this.log.warn("URL '" + entry.url().toString() + "' has conflicting initiator properties: global = true, remote = true, initiator = " + ASCII.String(entry.initiator()) + ", profile.handle = " + profile.handle());
warning = this.nextQueue.noticeURL.push(NoticedURL.StackType.GLOBAL, entry, this.robots);
} else if (local) {
if (proxy) this.log.logWarning("URL '" + entry.url().toString() + "' has conflicting initiator properties: local = true, proxy = true, initiator = proxy" + ", profile.handle = " + profile.handle());
if (remote) this.log.logWarning("URL '" + entry.url().toString() + "' has conflicting initiator properties: local = true, remote = true, initiator = " + ASCII.String(entry.initiator()) + ", profile.handle = " + profile.handle());
if (proxy) this.log.warn("URL '" + entry.url().toString() + "' has conflicting initiator properties: local = true, proxy = true, initiator = proxy" + ", profile.handle = " + profile.handle());
if (remote) this.log.warn("URL '" + entry.url().toString() + "' has conflicting initiator properties: local = true, remote = true, initiator = " + ASCII.String(entry.initiator()) + ", profile.handle = " + profile.handle());
warning = this.nextQueue.noticeURL.push(NoticedURL.StackType.LOCAL, entry, this.robots);
} else if (proxy) {
if (remote) this.log.logWarning("URL '" + entry.url().toString() + "' has conflicting initiator properties: proxy = true, remote = true, initiator = " + ASCII.String(entry.initiator()) + ", profile.handle = " + profile.handle());
if (remote) this.log.warn("URL '" + entry.url().toString() + "' has conflicting initiator properties: proxy = true, remote = true, initiator = " + ASCII.String(entry.initiator()) + ", profile.handle = " + profile.handle());
warning = this.nextQueue.noticeURL.push(NoticedURL.StackType.LOCAL, entry, this.robots);
} else if (remote) {
warning = this.nextQueue.noticeURL.push(NoticedURL.StackType.REMOTE, entry, this.robots);
}
if (warning != null && this.log.isFine()) this.log.logFine("CrawlStacker.stackCrawl of URL " + entry.url().toNormalform(true) + " - not pushed: " + warning);
if (warning != null && this.log.isFine()) this.log.fine("CrawlStacker.stackCrawl of URL " + entry.url().toNormalform(true) + " - not pushed: " + warning);
return null;
}
@ -377,44 +377,44 @@ public final class CrawlStacker {
final String urlProtocol = url.getProtocol();
final String urlstring = url.toString();
if (!Switchboard.getSwitchboard().loader.isSupportedProtocol(urlProtocol)) {
this.log.logSevere("Unsupported protocol in URL '" + urlstring + "'.");
this.log.severe("Unsupported protocol in URL '" + urlstring + "'.");
return "unsupported protocol";
}
// check if ip is local ip address
final String urlRejectReason = urlInAcceptedDomain(url);
if (urlRejectReason != null) {
if (this.log.isFine()) this.log.logFine("denied_(" + urlRejectReason + ")");
if (this.log.isFine()) this.log.fine("denied_(" + urlRejectReason + ")");
return "denied_(" + urlRejectReason + ")";
}
// check blacklist
if (Switchboard.urlBlacklist.isListed(BlacklistType.CRAWLER, url)) {
this.log.logFine("URL '" + urlstring + "' is in blacklist.");
this.log.fine("URL '" + urlstring + "' is in blacklist.");
return "url in blacklist";
}
// filter with must-match for URLs
if ((depth > 0) && !profile.urlMustMatchPattern().matcher(urlstring).matches()) {
if (this.log.isFine()) this.log.logFine("URL '" + urlstring + "' does not match must-match crawling filter '" + profile.urlMustMatchPattern().toString() + "'.");
if (this.log.isFine()) this.log.fine("URL '" + urlstring + "' does not match must-match crawling filter '" + profile.urlMustMatchPattern().toString() + "'.");
return ERROR_NO_MATCH_MUST_MATCH_FILTER + profile.urlMustMatchPattern().toString();
}
// filter with must-not-match for URLs
if ((depth > 0) && profile.urlMustNotMatchPattern().matcher(urlstring).matches()) {
if (this.log.isFine()) this.log.logFine("URL '" + urlstring + "' matches must-not-match crawling filter '" + profile.urlMustNotMatchPattern().toString() + "'.");
if (this.log.isFine()) this.log.fine("URL '" + urlstring + "' matches must-not-match crawling filter '" + profile.urlMustNotMatchPattern().toString() + "'.");
return ERROR_MATCH_WITH_MUST_NOT_MATCH_FILTER + profile.urlMustNotMatchPattern().toString();
}
// deny cgi
if (url.isIndividual() && !profile.crawlingQ()) { // TODO: make special property for crawlingIndividual
if (this.log.isFine()) this.log.logFine("URL '" + urlstring + "' is CGI URL.");
if (this.log.isFine()) this.log.fine("URL '" + urlstring + "' is CGI URL.");
return "individual url (sessionid etc) not wanted";
}
// deny post properties
if (url.isPOST() && !profile.crawlingQ()) {
if (this.log.isFine()) this.log.logFine("URL '" + urlstring + "' is post URL.");
if (this.log.isFine()) this.log.fine("URL '" + urlstring + "' is post URL.");
return "post url not allowed";
}
@ -434,13 +434,13 @@ public final class CrawlStacker {
final boolean recrawl = profile.recrawlIfOlder() > oldDate.getTime();
if (recrawl) {
if (this.log.isInfo())
this.log.logInfo("RE-CRAWL of URL '" + urlstring + "': this url was crawled " +
this.log.info("RE-CRAWL of URL '" + urlstring + "': this url was crawled " +
((System.currentTimeMillis() - oldDate.getTime()) / 60000 / 60 / 24) + " days ago.");
} else {
if (dbocc == null) {
return "double in: LURL-DB, oldDate = " + oldDate.toString();
}
if (this.log.isInfo()) this.log.logInfo("URL '" + urlstring + "' is double registered in '" + dbocc.toString() + "'. " + "Stack processing time:");
if (this.log.isInfo()) this.log.info("URL '" + urlstring + "' is double registered in '" + dbocc.toString() + "'. " + "Stack processing time:");
if (dbocc == HarvestProcess.ERRORS) {
final ZURL.Entry errorEntry = this.nextQueue.errorURL.get(url.hash());
return "double in: errors (" + errorEntry.anycause() + "), oldDate = " + oldDate.toString();
@ -454,12 +454,12 @@ public final class CrawlStacker {
if (maxAllowedPagesPerDomain < Integer.MAX_VALUE && maxAllowedPagesPerDomain > 0) {
final AtomicInteger dp = profile.getCount(url.getHost());
if (dp != null && dp.get() >= maxAllowedPagesPerDomain) {
if (this.log.isFine()) this.log.logFine("URL '" + urlstring + "' appeared too often in crawl stack, a maximum of " + maxAllowedPagesPerDomain + " is allowed.");
if (this.log.isFine()) this.log.fine("URL '" + urlstring + "' appeared too often in crawl stack, a maximum of " + maxAllowedPagesPerDomain + " is allowed.");
return "crawl stack domain counter exceeded";
}
if (ResultURLs.domainCount(EventOrigin.LOCAL_CRAWLING, url.getHost()) >= maxAllowedPagesPerDomain) {
if (this.log.isFine()) this.log.logFine("URL '" + urlstring + "' appeared too often in result stack, a maximum of " + maxAllowedPagesPerDomain + " is allowed.");
if (this.log.isFine()) this.log.fine("URL '" + urlstring + "' appeared too often in result stack, a maximum of " + maxAllowedPagesPerDomain + " is allowed.");
return "result stack domain counter exceeded";
}
}
@ -469,13 +469,13 @@ public final class CrawlStacker {
// filter with must-match for IPs
if ((depth > 0) && profile.ipMustMatchPattern() != CrawlProfile.MATCH_ALL_PATTERN && url.getHost() != null && !profile.ipMustMatchPattern().matcher(url.getInetAddress().getHostAddress()).matches()) {
if (this.log.isFine()) this.log.logFine("IP " + url.getInetAddress().getHostAddress() + " of URL '" + urlstring + "' does not match must-match crawling filter '" + profile.ipMustMatchPattern().toString() + "'.");
if (this.log.isFine()) this.log.fine("IP " + url.getInetAddress().getHostAddress() + " of URL '" + urlstring + "' does not match must-match crawling filter '" + profile.ipMustMatchPattern().toString() + "'.");
return "ip " + url.getInetAddress().getHostAddress() + " of url does not match must-match filter";
}
// filter with must-not-match for IPs
if ((depth > 0) && profile.ipMustNotMatchPattern() != CrawlProfile.MATCH_NEVER_PATTERN && url.getHost() != null && profile.ipMustNotMatchPattern().matcher(url.getInetAddress().getHostAddress()).matches()) {
if (this.log.isFine()) this.log.logFine("IP " + url.getInetAddress().getHostAddress() + " of URL '" + urlstring + "' matches must-not-match crawling filter '" + profile.ipMustNotMatchPattern().toString() + "'.");
if (this.log.isFine()) this.log.fine("IP " + url.getInetAddress().getHostAddress() + " of URL '" + urlstring + "' matches must-not-match crawling filter '" + profile.ipMustNotMatchPattern().toString() + "'.");
return "ip " + url.getInetAddress().getHostAddress() + " of url matches must-not-match filter";
}
@ -493,7 +493,7 @@ public final class CrawlStacker {
}
}
if (!granted) {
if (this.log.isFine()) this.log.logFine("IP " + url.getInetAddress().getHostAddress() + " of URL '" + urlstring + "' does not match must-match crawling filter '" + profile.ipMustMatchPattern().toString() + "'.");
if (this.log.isFine()) this.log.fine("IP " + url.getInetAddress().getHostAddress() + " of URL '" + urlstring + "' does not match must-match crawling filter '" + profile.ipMustMatchPattern().toString() + "'.");
return "country " + c0 + " of url does not match must-match filter for countries";
}
}

@ -41,6 +41,7 @@ import net.yacy.cora.document.UTF8;
import net.yacy.cora.federate.yacy.CacheStrategy;
import net.yacy.cora.order.Base64Order;
import net.yacy.cora.order.NaturalOrder;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.cora.util.SpaceExceededException;
import net.yacy.crawler.data.CrawlProfile;
import net.yacy.crawler.data.CrawlQueues;
@ -50,7 +51,6 @@ import net.yacy.kelondro.blob.MapHeap;
import net.yacy.kelondro.data.meta.URIMetadataRow;
import net.yacy.kelondro.data.word.Word;
import net.yacy.kelondro.index.RowHandleSet;
import net.yacy.kelondro.logging.Log;
import net.yacy.kelondro.util.FileUtils;
import net.yacy.kelondro.util.kelondroException;
import net.yacy.search.Switchboard;
@ -90,7 +90,7 @@ public final class CrawlSwitchboard {
public static final long CRAWL_PROFILE_SNIPPET_GLOBAL_MEDIA_RECRAWL_CYCLE = 60L * 24L * 30L;
public static final long CRAWL_PROFILE_SURROGATE_RECRAWL_CYCLE = 60L * 24L * 30L;
private final Log log;
private final ConcurrentLog log;
private MapHeap profilesActiveCrawls;
private final MapHeap profilesPassiveCrawls;
private final Map<byte[], CrawlProfile> profilesActiveCrawlsCache; //TreeMap<byte[], DigestURI>(Base64Order.enhancedCoder);
@ -103,12 +103,12 @@ public final class CrawlSwitchboard {
public CrawlProfile defaultSurrogateProfile;
private final File queuesRoot;
public CrawlSwitchboard(final String networkName, final Log log, final File queuesRoot) {
public CrawlSwitchboard(final String networkName, final ConcurrentLog log, final File queuesRoot) {
log.logInfo("Initializing Word Index for the network '" + networkName + "'.");
log.info("Initializing Word Index for the network '" + networkName + "'.");
if ( networkName == null || networkName.isEmpty() ) {
log.logSevere("no network name given - shutting down");
log.severe("no network name given - shutting down");
System.exit(0);
}
this.log = log;
@ -118,7 +118,7 @@ public final class CrawlSwitchboard {
// make crawl profiles database and default profiles
this.queuesRoot = queuesRoot;
this.queuesRoot.mkdirs();
this.log.logConfig("Initializing Crawl Profiles");
this.log.config("Initializing Crawl Profiles");
final File profilesActiveFile = new File(queuesRoot, DBFILE_ACTIVE_CRAWL_PROFILES);
this.profilesActiveCrawls = loadFromDB(profilesActiveFile);
@ -136,7 +136,7 @@ public final class CrawlSwitchboard {
}
}
initActiveCrawlProfiles();
log.logInfo("Loaded active crawl profiles from file "
log.info("Loaded active crawl profiles from file "
+ profilesActiveFile.getName()
+ ", "
+ this.profilesActiveCrawls.size()
@ -148,14 +148,14 @@ public final class CrawlSwitchboard {
CrawlProfile p;
try {
p = new CrawlProfile(this.profilesPassiveCrawls.get(handle));
Log.logInfo("CrawlProfiles", "loaded Profile " + p.handle() + ": " + p.collectionName());
ConcurrentLog.info("CrawlProfiles", "loaded Profile " + p.handle() + ": " + p.collectionName());
} catch ( final IOException e ) {
continue;
} catch ( final SpaceExceededException e ) {
continue;
}
}
log.logInfo("Loaded passive crawl profiles from file "
log.info("Loaded passive crawl profiles from file "
+ profilesPassiveFile.getName()
+ ", "
+ this.profilesPassiveCrawls.size()
@ -486,7 +486,7 @@ public final class CrawlSwitchboard {
this.profilesActiveCrawls =
new MapHeap(pdb, Word.commonHashLength, NaturalOrder.naturalOrder, 1024 * 64, 500, ' ');
} catch ( final IOException e1 ) {
Log.logException(e1);
ConcurrentLog.logException(e1);
this.profilesActiveCrawls = null;
}
initActiveCrawlProfiles();
@ -593,14 +593,14 @@ public final class CrawlSwitchboard {
try {
ret = new MapHeap(file, Word.commonHashLength, NaturalOrder.naturalOrder, 1024 * 64, 500, ' ');
} catch ( final IOException e ) {
Log.logException(e);
Log.logException(e);
ConcurrentLog.logException(e);
ConcurrentLog.logException(e);
FileUtils.deletedelete(file);
try {
ret =
new MapHeap(file, Word.commonHashLength, NaturalOrder.naturalOrder, 1024 * 64, 500, ' ');
} catch ( final IOException e1 ) {
Log.logException(e1);
ConcurrentLog.logException(e1);
ret = null;
}
}

@ -45,6 +45,7 @@ import net.yacy.cora.document.ASCII;
import net.yacy.cora.order.Base64Order;
import net.yacy.cora.protocol.ResponseHeader;
import net.yacy.cora.storage.HandleSet;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.cora.util.SpaceExceededException;
import net.yacy.kelondro.blob.ArrayStack;
import net.yacy.kelondro.blob.Compressor;
@ -52,7 +53,6 @@ import net.yacy.kelondro.blob.MapHeap;
import net.yacy.kelondro.data.meta.DigestURI;
import net.yacy.kelondro.data.word.Word;
import net.yacy.kelondro.index.RowHandleSet;
import net.yacy.kelondro.logging.Log;
public final class Cache {
@ -67,7 +67,7 @@ public final class Cache {
private static long maxCacheSize = Long.MAX_VALUE;
private static File cachePath = null;
private static String prefix;
public static final Log log = new Log("HTCACHE");
public static final ConcurrentLog log = new ConcurrentLog("HTCACHE");
public static void init(final File htCachePath, final String peerSalt, final long CacheSizeMax) {
@ -85,7 +85,7 @@ public final class Cache {
try {
responseHeaderDB = new MapHeap(dbfile, Word.commonHashLength, Base64Order.enhancedCoder, 1024 * 1024, 100, ' ');
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
// open the cache file
try {
@ -93,14 +93,14 @@ public final class Cache {
fileDBunbuffered.setMaxSize(maxCacheSize);
fileDB = new Compressor(fileDBunbuffered, 6 * 1024 * 1024);
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
Log.logInfo("Cache", "initialized cache database responseHeaderDB.size() = " + responseHeaderDB.size() + ", fileDB.size() = " + fileDB.size());
ConcurrentLog.info("Cache", "initialized cache database responseHeaderDB.size() = " + responseHeaderDB.size() + ", fileDB.size() = " + fileDB.size());
// clean up the responseHeaderDB which cannot be cleaned the same way as the cache files.
// We do this as a concurrent job only once after start-up silently
if (responseHeaderDB.size() != fileDB.size()) {
Log.logWarning("Cache", "file and metadata size is not equal, starting a cleanup thread...");
ConcurrentLog.warn("Cache", "file and metadata size is not equal, starting a cleanup thread...");
Thread startupCleanup = new Thread() {
@Override
public void run() {
@ -108,7 +108,7 @@ public final class Cache {
// enumerate the responseHeaderDB and find out all entries that are not inside the fileDBunbuffered
BlockingQueue<byte[]> q = responseHeaderDB.keyQueue(1000);
final HandleSet delkeys = new RowHandleSet(Word.commonHashLength, Base64Order.enhancedCoder, 1);
Log.logInfo("Cache", "started cleanup thread to remove unused cache metadata");
ConcurrentLog.info("Cache", "started cleanup thread to remove unused cache metadata");
try {
byte[] k;
while (((k = q.take()) != MapHeap.POISON_QUEUE_ENTRY)) {
@ -117,7 +117,7 @@ public final class Cache {
} catch (InterruptedException e) {
} finally {
// delete the collected keys from the metadata
Log.logInfo("Cache", "cleanup thread collected " + delkeys.size() + " unused metadata entries; now deleting them from the file...");
ConcurrentLog.info("Cache", "cleanup thread collected " + delkeys.size() + " unused metadata entries; now deleting them from the file...");
for (byte[] k: delkeys) {
try {
responseHeaderDB.delete(k);
@ -126,19 +126,19 @@ public final class Cache {
}
}
Log.logInfo("Cache", "running check to remove unused file cache data");
ConcurrentLog.info("Cache", "running check to remove unused file cache data");
delkeys.clear();
for (byte[] k: fileDB) {
if (!responseHeaderDB.containsKey(k)) try { delkeys.put(k); } catch (SpaceExceededException e) { break; }
}
Log.logInfo("Cache", "cleanup thread collected " + delkeys.size() + " unused cache entries; now deleting them from the file...");
ConcurrentLog.info("Cache", "cleanup thread collected " + delkeys.size() + " unused cache entries; now deleting them from the file...");
for (byte[] k: delkeys) {
try {
fileDB.delete(k);
} catch (IOException e) {
}
}
Log.logInfo("Cache", "terminated cleanup thread; responseHeaderDB.size() = " + responseHeaderDB.size() + ", fileDB.size() = " + fileDB.size());
ConcurrentLog.info("Cache", "terminated cleanup thread; responseHeaderDB.size() = " + responseHeaderDB.size() + ", fileDB.size() = " + fileDB.size());
}
};
startupCleanup.start();
@ -157,12 +157,12 @@ public final class Cache {
try {
fileDB.clear();
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
try {
fileDBunbuffered.clear();
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
@ -195,7 +195,7 @@ public final class Cache {
if (maxCacheSize == 0) return;
if (responseHeader == null) throw new IOException("Cache.store of url " + url.toString() + " not possible: responseHeader == null");
if (file == null) throw new IOException("Cache.store of url " + url.toString() + " not possible: file == null");
log.logInfo("storing content of url " + url.toString() + ", " + file.length + " bytes");
log.info("storing content of url " + url.toString() + ", " + file.length + " bytes");
// store the file
try {
@ -216,7 +216,7 @@ public final class Cache {
fileDB.delete(url.hash());
throw new IOException("Cache.store: cannot write to headerDB: " + e.getMessage());
}
if (log.isFine()) log.logFine("stored in cache: " + url.toNormalform(true));
if (log.isFine()) log.fine("stored in cache: " + url.toNormalform(true));
}
/**
@ -235,7 +235,7 @@ public final class Cache {
if (!headerExists && !fileExists) return false;
// if not both is there then we do a clean-up
if (headerExists) try {
log.logWarning("header but not content of urlhash " + ASCII.String(urlhash) + " in cache; cleaned up");
log.warn("header but not content of urlhash " + ASCII.String(urlhash) + " in cache; cleaned up");
responseHeaderDB.delete(urlhash);
} catch (final IOException e) {}
if (fileExists) try {
@ -285,16 +285,16 @@ public final class Cache {
if (b == null) return null;
return b;
} catch (final UnsupportedEncodingException e) {
Log.logException(e);
ConcurrentLog.logException(e);
return null;
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
return null;
} catch (final SpaceExceededException e) {
Log.logException(e);
ConcurrentLog.logException(e);
return null;
} catch (final OutOfMemoryError e) {
Log.logException(e);
ConcurrentLog.logException(e);
return null;
}
}
@ -304,7 +304,7 @@ public final class Cache {
try {
return fileDB.containsKey(hash);
} catch (final OutOfMemoryError e) {
Log.logException(e);
ConcurrentLog.logException(e);
return false;
}
}

@ -39,9 +39,9 @@ import net.yacy.cora.federate.yacy.CacheStrategy;
import net.yacy.cora.order.Base64Order;
import net.yacy.cora.order.Digest;
import net.yacy.cora.util.CommonPattern;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.crawler.CrawlSwitchboard;
import net.yacy.kelondro.data.word.Word;
import net.yacy.kelondro.logging.Log;
import net.yacy.server.serverObjects;
public class CrawlProfile extends ConcurrentHashMap<String, String> implements Map<String, String> {
@ -430,7 +430,7 @@ public class CrawlProfile extends ConcurrentHashMap<String, String> implements M
try {
return Integer.parseInt(r);
} catch (final NumberFormatException e) {
Log.logException(e);
ConcurrentLog.logException(e);
return 0;
}
}
@ -447,7 +447,7 @@ public class CrawlProfile extends ConcurrentHashMap<String, String> implements M
try {
return CacheStrategy.decode(Integer.parseInt(r));
} catch (final NumberFormatException e) {
Log.logException(e);
ConcurrentLog.logException(e);
return CacheStrategy.IFEXIST;
}
}
@ -469,7 +469,7 @@ public class CrawlProfile extends ConcurrentHashMap<String, String> implements M
final long l = Long.parseLong(r);
return (l < 0) ? 0L : l;
} catch (final NumberFormatException e) {
Log.logException(e);
ConcurrentLog.logException(e);
return 0L;
}
}
@ -484,7 +484,7 @@ public class CrawlProfile extends ConcurrentHashMap<String, String> implements M
if (i < 0) return Integer.MAX_VALUE;
return i;
} catch (final NumberFormatException e) {
Log.logException(e);
ConcurrentLog.logException(e);
return Integer.MAX_VALUE;
}
}

@ -43,6 +43,7 @@ import net.yacy.cora.federate.yacy.CacheStrategy;
import net.yacy.cora.order.Base64Order;
import net.yacy.cora.protocol.ClientIdentification;
import net.yacy.cora.protocol.ConnectionInfo;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.crawler.HarvestProcess;
import net.yacy.crawler.data.NoticedURL.StackType;
import net.yacy.crawler.data.ZURL.FailCategory;
@ -50,7 +51,6 @@ import net.yacy.crawler.retrieval.Request;
import net.yacy.crawler.retrieval.Response;
import net.yacy.crawler.robots.RobotsTxtEntry;
import net.yacy.kelondro.data.meta.DigestURI;
import net.yacy.kelondro.logging.Log;
import net.yacy.kelondro.util.FileUtils;
import net.yacy.kelondro.workflow.WorkflowJob;
import net.yacy.peers.DHTSelection;
@ -68,7 +68,7 @@ public class CrawlQueues {
private static final String DELEGATED_DB_FILENAME = "urlDelegated4.db";
private Switchboard sb;
private Log log;
private ConcurrentLog log;
private Map<Integer, Loader> workers; // mapping from url hash to Worker thread object
private final ArrayList<String> remoteCrawlProviderHashes;
@ -77,12 +77,12 @@ public class CrawlQueues {
public CrawlQueues(final Switchboard sb, final File queuePath) {
this.sb = sb;
this.log = new Log("CRAWLER");
this.log = new ConcurrentLog("CRAWLER");
this.workers = new ConcurrentHashMap<Integer, Loader>();
this.remoteCrawlProviderHashes = new ArrayList<String>();
// start crawling management
this.log.logConfig("Starting Crawling Management");
this.log.config("Starting Crawling Management");
this.noticeURL = new NoticedURL(queuePath, sb.peers.myBotIDs(), sb.useTailCache, sb.exceed134217727);
FileUtils.deletedelete(new File(queuePath, ERROR_DB_FILENAME));
this.errorURL = new ZURL(sb.index.fulltext(), queuePath, ERROR_DB_FILENAME, false, sb.useTailCache, sb.exceed134217727);
@ -110,7 +110,7 @@ public class CrawlQueues {
try {
w.join();
} catch (final InterruptedException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
this.noticeURL.close();
@ -128,12 +128,12 @@ public class CrawlQueues {
try {
this.errorURL.clear();
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
try {
this.delegatedURL.clear();
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
@ -229,7 +229,7 @@ public class CrawlQueues {
for (int i = 0; i < toshift; i++) {
this.noticeURL.shift(NoticedURL.StackType.GLOBAL, NoticedURL.StackType.LOCAL, this.sb.crawler, this.sb.robots);
}
this.log.logInfo("shifted " + toshift + " jobs from global crawl to local crawl (coreCrawlJobSize()=" + coreCrawlJobSize() +
this.log.info("shifted " + toshift + " jobs from global crawl to local crawl (coreCrawlJobSize()=" + coreCrawlJobSize() +
", limitCrawlJobSize()=" + limitCrawlJobSize() + ", cluster.mode=" + this.sb.getConfig(SwitchboardConstants.CLUSTER_MODE, "") +
", robinsonMode=" + ((this.sb.isRobinsonMode()) ? "on" : "off"));
}
@ -238,14 +238,14 @@ public class CrawlQueues {
final String queueCheckNoload = loadIsPossible(NoticedURL.StackType.NOLOAD);
if (queueCheckCore != null && queueCheckNoload != null) {
if (this.log.isFine()) {
this.log.logFine("omitting de-queue/local: " + queueCheckCore + ":" + queueCheckNoload);
this.log.fine("omitting de-queue/local: " + queueCheckCore + ":" + queueCheckNoload);
}
return false;
}
if (isPaused(SwitchboardConstants.CRAWLJOB_LOCAL_CRAWL)) {
if (this.log.isFine()) {
this.log.logFine("omitting de-queue/local: paused");
this.log.fine("omitting de-queue/local: paused");
}
return false;
}
@ -267,16 +267,16 @@ public class CrawlQueues {
}
final String profileHandle = urlEntry.profileHandle();
if (profileHandle == null) {
this.log.logSevere(stats + ": NULL PROFILE HANDLE '" + urlEntry.profileHandle() + "' for URL " + urlEntry.url());
this.log.severe(stats + ": NULL PROFILE HANDLE '" + urlEntry.profileHandle() + "' for URL " + urlEntry.url());
return true;
}
final CrawlProfile profile = this.sb.crawler.getActive(ASCII.getBytes(profileHandle));
if (profile == null) {
this.log.logSevere(stats + ": NULL PROFILE HANDLE '" + urlEntry.profileHandle() + "' for URL " + urlEntry.url());
this.log.severe(stats + ": NULL PROFILE HANDLE '" + urlEntry.profileHandle() + "' for URL " + urlEntry.url());
return true;
}
this.sb.indexingDocumentProcessor.enQueue(new IndexingQueueEntry(new Response(urlEntry, profile), null, null));
Log.logInfo("CrawlQueues", "placed NOLOAD URL on indexing queue: " + urlEntry.url().toNormalform(true));
ConcurrentLog.info("CrawlQueues", "placed NOLOAD URL on indexing queue: " + urlEntry.url().toNormalform(true));
return true;
}
@ -288,13 +288,13 @@ public class CrawlQueues {
// System.out.println("DEBUG plasmaSwitchboard.processCrawling:
// profileHandle = " + profileHandle + ", urlEntry.url = " + urlEntry.url());
if (profileHandle == null) {
this.log.logSevere(stats + ": NULL PROFILE HANDLE '" + urlEntry.profileHandle() + "' for URL " + urlEntry.url());
this.log.severe(stats + ": NULL PROFILE HANDLE '" + urlEntry.profileHandle() + "' for URL " + urlEntry.url());
return true;
}
load(urlEntry, stats, profileHandle);
return true;
} catch (final IOException e) {
this.log.logSevere(stats + ": CANNOT FETCH ENTRY: " + e.getMessage(), e);
this.log.severe(stats + ": CANNOT FETCH ENTRY: " + e.getMessage(), e);
if (e.getMessage().indexOf("hash is null",0) > 0) {
this.noticeURL.clear(NoticedURL.StackType.LOCAL);
}
@ -320,7 +320,7 @@ public class CrawlQueues {
final String urlProtocol = url.getProtocol();
if (this.sb.loader.isSupportedProtocol(urlProtocol)) {
if (this.log.isFine()) {
this.log.logFine(stats + ": URL=" + urlEntry.url()
this.log.fine(stats + ": URL=" + urlEntry.url()
+ ", initiator=" + ((urlEntry.initiator() == null) ? "" : ASCII.String(urlEntry.initiator()))
+ ", crawlOrder=" + ((profile.remoteIndexing()) ? "true" : "false")
+ ", depth=" + urlEntry.depth()
@ -332,7 +332,7 @@ public class CrawlQueues {
// work off one Crawl stack entry
if (urlEntry == null || urlEntry.url() == null) {
this.log.logInfo(stats + ": urlEntry = null");
this.log.info(stats + ": urlEntry = null");
} else {
if (!this.workers.containsKey(Integer.valueOf(urlEntry.hashCode()))) {
Loader loader = new Loader(urlEntry);
@ -340,17 +340,17 @@ public class CrawlQueues {
try {
loader.start();
} catch (final OutOfMemoryError e) {
Log.logWarning("CrawlQueues", "crawlWorker sequential fail-over: " + e.getMessage());
ConcurrentLog.warn("CrawlQueues", "crawlWorker sequential fail-over: " + e.getMessage());
loader.run();
}
}
}
} else {
this.log.logSevere("Unsupported protocol in URL '" + url.toString());
this.log.severe("Unsupported protocol in URL '" + url.toString());
}
} else {
if (this.log.isFine()) this.log.logFine(stats + ": LOST PROFILE HANDLE '" + urlEntry.profileHandle() + "' for URL " + urlEntry.url());
if (this.log.isFine()) this.log.fine(stats + ": LOST PROFILE HANDLE '" + urlEntry.profileHandle() + "' for URL " + urlEntry.url());
}
}
@ -424,7 +424,7 @@ public class CrawlQueues {
// check again
if (this.workers.size() >= this.sb.getConfigLong(SwitchboardConstants.CRAWLER_THREADS_ACTIVE_MAX, 20)) {
if (this.log.isFine()) {
this.log.logFine("remoteCrawlLoaderJob: too many processes in loader queue, dismissed (" + "cacheLoader=" + this.workers.size() + "), httpClients = " + ConnectionInfo.getCount());
this.log.fine("remoteCrawlLoaderJob: too many processes in loader queue, dismissed (" + "cacheLoader=" + this.workers.size() + "), httpClients = " + ConnectionInfo.getCount());
}
return false;
}
@ -432,21 +432,21 @@ public class CrawlQueues {
final String cautionCause = this.sb.onlineCaution();
if (cautionCause != null) {
if (this.log.isFine()) {
this.log.logFine("remoteCrawlLoaderJob: online caution for " + cautionCause + ", omitting processing");
this.log.fine("remoteCrawlLoaderJob: online caution for " + cautionCause + ", omitting processing");
}
return false;
}
if (remoteTriggeredCrawlJobSize() > 200) {
if (this.log.isFine()) {
this.log.logFine("remoteCrawlLoaderJob: the remote-triggered crawl job queue is filled, omitting processing");
this.log.fine("remoteCrawlLoaderJob: the remote-triggered crawl job queue is filled, omitting processing");
}
return false;
}
if (coreCrawlJobSize() > 0 /*&& sb.indexingStorageProcessor.queueSize() > 0*/) {
if (this.log.isFine()) {
this.log.logFine("remoteCrawlLoaderJob: a local crawl is running, omitting processing");
this.log.fine("remoteCrawlLoaderJob: a local crawl is running, omitting processing");
}
return false;
}
@ -522,7 +522,7 @@ public class CrawlQueues {
if (urlRejectReason == null) {
// stack url
if (this.sb.getLog().isFinest()) {
this.sb.getLog().logFinest("crawlOrder: stack: url='" + url + "'");
this.sb.getLog().finest("crawlOrder: stack: url='" + url + "'");
}
this.sb.crawlStacker.enqueueEntry(new Request(
ASCII.getBytes(hash),
@ -537,7 +537,7 @@ public class CrawlQueues {
item.getSize()
));
} else {
this.log.logWarning("crawlOrder: Rejected URL '" + urlToString(url) + "': " + urlRejectReason);
this.log.warn("crawlOrder: Rejected URL '" + urlToString(url) + "': " + urlRejectReason);
}
}
return true;
@ -571,14 +571,14 @@ public class CrawlQueues {
final String queueCheck = loadIsPossible(NoticedURL.StackType.REMOTE);
if (queueCheck != null) {
if (this.log.isFinest()) {
this.log.logFinest("omitting de-queue/remote: " + queueCheck);
this.log.finest("omitting de-queue/remote: " + queueCheck);
}
return false;
}
if (isPaused(SwitchboardConstants.CRAWLJOB_REMOTE_TRIGGERED_CRAWL)) {
if (this.log.isFinest()) {
this.log.logFinest("omitting de-queue/remote: paused");
this.log.finest("omitting de-queue/remote: paused");
}
return false;
}
@ -595,7 +595,7 @@ public class CrawlQueues {
load(urlEntry, stats, profileHandle);
return true;
} catch (final IOException e) {
this.log.logSevere(stats + ": CANNOT FETCH ENTRY: " + e.getMessage(), e);
this.log.severe(stats + ": CANNOT FETCH ENTRY: " + e.getMessage(), e);
if (e.getMessage().indexOf("hash is null",0) > 0) {
this.noticeURL.clear(NoticedURL.StackType.REMOTE);
}
@ -657,7 +657,7 @@ public class CrawlQueues {
if (response == null) {
this.request.setStatus("error", WorkflowJob.STATUS_FINISHED);
if (CrawlQueues.this.log.isFine()) {
CrawlQueues.this.log.logFine("problem loading " + this.request.url().toString() + ": no content (possibly caused by cache policy)");
CrawlQueues.this.log.fine("problem loading " + this.request.url().toString() + ": no content (possibly caused by cache policy)");
}
result = "no content (possibly caused by cache policy)";
} else {
@ -669,7 +669,7 @@ public class CrawlQueues {
} catch (final IOException e) {
this.request.setStatus("error", WorkflowJob.STATUS_FINISHED);
if (CrawlQueues.this.log.isFine()) {
CrawlQueues.this.log.logFine("problem loading " + this.request.url().toString() + ": " + e.getMessage());
CrawlQueues.this.log.fine("problem loading " + this.request.url().toString() + ": " + e.getMessage());
}
result = "load error - " + e.getMessage();
}
@ -695,7 +695,7 @@ public class CrawlQueues {
1,
FailCategory.TEMPORARY_NETWORK_FAILURE,
e.getMessage() + " - in worker", -1);
Log.logException(e);
ConcurrentLog.logException(e);
this.request.setStatus("worker-exception", WorkflowJob.STATUS_FINISHED);
} finally {
CrawlQueues.this.workers.remove(this.code);

@ -37,13 +37,13 @@ import java.util.Set;
import net.yacy.cora.order.Base64Order;
import net.yacy.cora.storage.HandleSet;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.cora.util.SpaceExceededException;
import net.yacy.crawler.Balancer;
import net.yacy.crawler.CrawlSwitchboard;
import net.yacy.crawler.retrieval.Request;
import net.yacy.crawler.robots.RobotsTxt;
import net.yacy.kelondro.index.RowHandleSet;
import net.yacy.kelondro.logging.Log;
public class NoticedURL {
@ -64,7 +64,7 @@ public class NoticedURL {
final Set<String> myAgentIDs,
final boolean useTailCache,
final boolean exceed134217727) {
Log.logInfo("NoticedURL", "CREATING STACKS at " + cachePath.toString());
ConcurrentLog.info("NoticedURL", "CREATING STACKS at " + cachePath.toString());
this.coreStack = new Balancer(cachePath, "urlNoticeCoreStack", minimumLocalDeltaInit, minimumGlobalDeltaInit, myAgentIDs, useTailCache, exceed134217727);
this.limitStack = new Balancer(cachePath, "urlNoticeLimitStack", minimumLocalDeltaInit, minimumGlobalDeltaInit, myAgentIDs, useTailCache, exceed134217727);
//overhangStack = new plasmaCrawlBalancer(overhangStackFile);
@ -88,7 +88,7 @@ public class NoticedURL {
}
public void clear() {
Log.logInfo("NoticedURL", "CLEARING ALL STACKS");
ConcurrentLog.info("NoticedURL", "CLEARING ALL STACKS");
if (this.coreStack != null) this.coreStack.clear();
if (this.limitStack != null) this.limitStack.clear();
if (this.remoteStack != null) this.remoteStack.clear();
@ -96,7 +96,7 @@ public class NoticedURL {
}
protected void close() {
Log.logInfo("NoticedURL", "CLOSING ALL STACKS");
ConcurrentLog.info("NoticedURL", "CLOSING ALL STACKS");
if (this.coreStack != null) {
this.coreStack.close();
this.coreStack = null;
@ -119,7 +119,7 @@ public class NoticedURL {
@Override
protected void finalize() throws Throwable {
if ((this.coreStack != null) || (this.limitStack != null) || (this.remoteStack != null)) {
Log.logWarning("plasmaCrawlNURL", "NURL stack closed by finalizer");
ConcurrentLog.warn("plasmaCrawlNURL", "NURL stack closed by finalizer");
close();
}
super.finalize();
@ -182,7 +182,7 @@ public class NoticedURL {
default: return "stack type unknown";
}
} catch (final Exception er) {
Log.logException(er);
ConcurrentLog.logException(er);
return "error pushing onto the crawl stack: " + er.getMessage();
}
}
@ -213,7 +213,7 @@ public class NoticedURL {
try {ret |= this.remoteStack.remove(urlHashes) > 0;} catch (final IOException e) {}
return ret;
} catch (final SpaceExceededException e) {
Log.logException(e);
ConcurrentLog.logException(e);
return false;
}
}
@ -273,7 +273,7 @@ public class NoticedURL {
if (entry != null) {
final String warning = push(toStack, entry, robots);
if (warning != null) {
Log.logWarning("NoticedURL", "shift from " + fromStack + " to " + toStack + ": " + warning);
ConcurrentLog.warn("NoticedURL", "shift from " + fromStack + " to " + toStack + ": " + warning);
}
}
} catch (final IOException e) {
@ -282,7 +282,7 @@ public class NoticedURL {
}
public void clear(final StackType stackType) {
Log.logInfo("NoticedURL", "CLEARING STACK " + stackType);
ConcurrentLog.info("NoticedURL", "CLEARING STACK " + stackType);
switch (stackType) {
case LOCAL: this.coreStack.clear(); break;
case GLOBAL: this.limitStack.clear(); break;
@ -305,7 +305,7 @@ public class NoticedURL {
if (errors < 100) continue;
final int aftersize = balancer.size();
balancer.clear(); // the balancer is broken and cannot shrink
Log.logWarning("BALANCER", "entry is null, balancer cannot shrink (bevore pop = " + s + ", after pop = " + aftersize + "); reset of balancer");
ConcurrentLog.warn("BALANCER", "entry is null, balancer cannot shrink (bevore pop = " + s + ", after pop = " + aftersize + "); reset of balancer");
}
return entry;
}

@ -41,13 +41,13 @@ import net.yacy.cora.document.UTF8;
import net.yacy.cora.federate.solr.FailType;
import net.yacy.cora.order.Base64Order;
import net.yacy.cora.order.NaturalOrder;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.cora.util.SpaceExceededException;
import net.yacy.crawler.retrieval.Request;
import net.yacy.kelondro.data.meta.DigestURI;
import net.yacy.kelondro.data.word.Word;
import net.yacy.kelondro.index.Index;
import net.yacy.kelondro.index.Row;
import net.yacy.kelondro.logging.Log;
import net.yacy.kelondro.table.SplitTable;
import net.yacy.kelondro.table.Table;
import net.yacy.kelondro.util.FileUtils;
@ -55,7 +55,7 @@ import net.yacy.search.index.Fulltext;
public class ZURL implements Iterable<ZURL.Entry> {
private static Log log = new Log("REJECTED");
private static ConcurrentLog log = new ConcurrentLog("REJECTED");
private static final int EcoFSBufferSize = 2000;
private static final int maxStackSize = 1000;
@ -115,7 +115,7 @@ public class ZURL implements Iterable<ZURL.Entry> {
try {
this.urlIndex = new Table(f, rowdef, 0, 0, false, exceed134217727, true);
} catch (final SpaceExceededException e1) {
Log.logException(e1);
ConcurrentLog.logException(e1);
}
}
//urlIndex = new kelondroFlexTable(cachePath, tablename, -1, rowdef, 0, true);
@ -186,14 +186,14 @@ public class ZURL implements Iterable<ZURL.Entry> {
final Entry entry = new Entry(bentry, executor, workdate, workcount, reason);
put(entry);
this.stack.add(entry.hash());
if (!reason.startsWith("double")) log.logInfo(bentry.url().toNormalform(true) + " - " + reason);
if (!reason.startsWith("double")) log.info(bentry.url().toNormalform(true) + " - " + reason);
if (this.fulltext.getDefaultConnector() != null && failCategory.store) {
// send the error to solr
try {
SolrInputDocument errorDoc = this.fulltext.getDefaultConfiguration().err(bentry.url(), failCategory.name() + " " + reason, failCategory.failType, httpcode);
this.fulltext.getDefaultConnector().add(errorDoc);
} catch (final IOException e) {
Log.logWarning("SOLR", "failed to send error " + bentry.url().toNormalform(true) + " to solr: " + e.getMessage());
ConcurrentLog.warn("SOLR", "failed to send error " + bentry.url().toNormalform(true) + " to solr: " + e.getMessage());
}
}
while (this.stack.size() > maxStackSize) this.stack.poll();
@ -247,7 +247,7 @@ public class ZURL implements Iterable<ZURL.Entry> {
if (entry == null) return null;
return new Entry(entry);
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
return null;
}
}
@ -271,7 +271,7 @@ public class ZURL implements Iterable<ZURL.Entry> {
if (this.urlIndex != null) this.urlIndex.put(newrow);
entry.stored = true;
} catch (final Exception e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}

@ -39,12 +39,12 @@ import net.yacy.cora.protocol.HeaderFramework;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.protocol.ResponseHeader;
import net.yacy.cora.protocol.ftp.FTPClient;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.crawler.data.CrawlProfile;
import net.yacy.crawler.data.Latency;
import net.yacy.crawler.data.ZURL.FailCategory;
import net.yacy.document.TextParser;
import net.yacy.kelondro.data.meta.DigestURI;
import net.yacy.kelondro.logging.Log;
import net.yacy.search.Switchboard;
public class FTPLoader {
@ -52,10 +52,10 @@ public class FTPLoader {
public static final long DEFAULT_MAXFILESIZE = 1024 * 1024 * 10;
private final Switchboard sb;
private final Log log;
private final ConcurrentLog log;
private final long maxFileSize;
public FTPLoader(final Switchboard sb, final Log log) {
public FTPLoader(final Switchboard sb, final ConcurrentLog log) {
this.sb = sb;
this.log = log;
this.maxFileSize = sb.getConfigLong("crawler.ftp.maxFileSize", -1l);
@ -145,7 +145,7 @@ public class FTPLoader {
response = getFile(ftpClient, request, acceptOnlyParseable);
} catch (final Exception e) {
// add message to errorLog
Log.logException(e);
ConcurrentLog.logException(e);
(new PrintStream(berr)).print(e.getMessage());
}
}
@ -242,9 +242,9 @@ public class FTPLoader {
// only the metadata is returned
if (parserError != null) {
this.log.logInfo("No parser available in FTP crawler: '" + parserError + "' for URL " + request.url().toString() + ": parsing only metadata");
this.log.info("No parser available in FTP crawler: '" + parserError + "' for URL " + request.url().toString() + ": parsing only metadata");
} else {
this.log.logInfo("Too big file in FTP crawler with size = " + size + " Bytes for URL " + request.url().toString() + ": parsing only metadata");
this.log.info("Too big file in FTP crawler with size = " + size + " Bytes for URL " + request.url().toString() + ": parsing only metadata");
}
// create response with metadata only

@ -38,20 +38,20 @@ import net.yacy.cora.protocol.HeaderFramework;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.protocol.ResponseHeader;
import net.yacy.cora.protocol.ftp.FTPClient;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.crawler.data.CrawlProfile;
import net.yacy.document.TextParser;
import net.yacy.kelondro.data.meta.DigestURI;
import net.yacy.kelondro.logging.Log;
import net.yacy.kelondro.util.FileUtils;
import net.yacy.search.Switchboard;
public class FileLoader {
private final Switchboard sb;
private final Log log;
private final ConcurrentLog log;
private final int maxFileSize;
public FileLoader(final Switchboard sb, final Log log) {
public FileLoader(final Switchboard sb, final ConcurrentLog log) {
this.sb = sb;
this.log = log;
this.maxFileSize = (int) sb.getConfigLong("crawler.file.maxFileSize", -1l);
@ -115,9 +115,9 @@ public class FileLoader {
// only the metadata is returned
if (parserError != null) {
this.log.logInfo("No parser available in File crawler: '" + parserError + "' for URL " + request.url().toString() + ": parsing only metadata");
this.log.info("No parser available in File crawler: '" + parserError + "' for URL " + request.url().toString() + ": parsing only metadata");
} else {
this.log.logInfo("Too big file in File crawler with size = " + size + " Bytes for URL " + request.url().toString() + ": parsing only metadata");
this.log.info("Too big file in File crawler with size = " + size + " Bytes for URL " + request.url().toString() + ": parsing only metadata");
}
// create response with metadata only

@ -33,13 +33,13 @@ import net.yacy.cora.protocol.HeaderFramework;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.protocol.ResponseHeader;
import net.yacy.cora.protocol.http.HTTPClient;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.crawler.data.Cache;
import net.yacy.crawler.data.CrawlProfile;
import net.yacy.crawler.data.Latency;
import net.yacy.crawler.data.ZURL.FailCategory;
import net.yacy.kelondro.data.meta.DigestURI;
import net.yacy.kelondro.io.ByteCount;
import net.yacy.kelondro.logging.Log;
import net.yacy.repository.Blacklist.BlacklistType;
import net.yacy.search.Switchboard;
import net.yacy.search.SwitchboardConstants;
@ -60,9 +60,9 @@ public final class HTTPLoader {
*/
private final int socketTimeout;
private final Switchboard sb;
private final Log log;
private final ConcurrentLog log;
public HTTPLoader(final Switchboard sb, final Log theLog) {
public HTTPLoader(final Switchboard sb, final ConcurrentLog theLog) {
this.sb = sb;
this.log = theLog;
@ -154,8 +154,8 @@ public final class HTTPLoader {
final DigestURI redirectionUrl = DigestURI.newURL(request.url(), redirectionUrlString);
// restart crawling with new url
this.log.logInfo("CRAWLER Redirection detected ('" + client.getHttpResponse().getStatusLine() + "') for URL " + requestURLString);
this.log.logInfo("CRAWLER ..Redirecting request to: " + redirectionUrl);
this.log.info("CRAWLER Redirection detected ('" + client.getHttpResponse().getStatusLine() + "') for URL " + requestURLString);
this.log.info("CRAWLER ..Redirecting request to: " + redirectionUrl);
this.sb.webStructure.generateCitationReference(url, redirectionUrl);

@ -42,13 +42,13 @@ import net.yacy.cora.order.Base64Order;
import net.yacy.cora.protocol.ClientIdentification;
import net.yacy.cora.storage.ARC;
import net.yacy.cora.storage.ComparableARC;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.cora.util.SpaceExceededException;
import net.yacy.crawler.HarvestProcess;
import net.yacy.crawler.data.CrawlQueues;
import net.yacy.data.WorkTables;
import net.yacy.kelondro.blob.Tables;
import net.yacy.kelondro.data.meta.DigestURI;
import net.yacy.kelondro.logging.Log;
import net.yacy.repository.Blacklist.BlacklistType;
import net.yacy.search.Switchboard;
import net.yacy.server.serverObjects;
@ -75,14 +75,14 @@ public class RSSLoader extends Thread {
final byte[] resource = response == null ? null : response.getContent();
rss = resource == null ? null : RSSReader.parse(RSSFeed.DEFAULT_MAXSIZE, resource);
} catch (final MalformedURLException e) {
Log.logWarning("Load_RSS", "rss loading for url '" + getName().substring(9) + "' failed: " + e.getMessage());
ConcurrentLog.warn("Load_RSS", "rss loading for url '" + getName().substring(9) + "' failed: " + e.getMessage());
return;
} catch (final IOException e) {
Log.logWarning("Load_RSS", "rss loading for url '" + this.urlf.toNormalform(true) + "' failed: " + e.getMessage());
ConcurrentLog.warn("Load_RSS", "rss loading for url '" + this.urlf.toNormalform(true) + "' failed: " + e.getMessage());
return;
}
if (rss == null) {
Log.logWarning("Load_RSS", "no rss for url " + this.urlf.toNormalform(true));
ConcurrentLog.warn("Load_RSS", "no rss for url " + this.urlf.toNormalform(true));
return;
}
final RSSFeed feed = rss.getFeed();
@ -102,7 +102,7 @@ public class RSSLoader extends Thread {
if (indexTriggered.containsKey(messageurl.hash())) continue;
urlmap.put(ASCII.String(messageurl.hash()), messageurl);
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
Map<String, HarvestProcess> existingids = sb.urlExists(urlmap.keySet());
@ -132,9 +132,9 @@ public class RSSLoader extends Thread {
rssRow.put("avg_upd_per_day", nextAvg);
sb.tables.update("rss", url.hash(), rssRow);
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
} catch (final SpaceExceededException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
@ -162,7 +162,7 @@ public class RSSLoader extends Thread {
try {
sb.tables.update("rss", url.hash(), rssRow);
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
}

@ -46,10 +46,10 @@ import net.yacy.cora.protocol.HeaderFramework;
import net.yacy.cora.protocol.RequestHeader;
import net.yacy.cora.protocol.ResponseHeader;
import net.yacy.cora.protocol.ftp.FTPClient;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.crawler.data.CrawlProfile;
import net.yacy.document.TextParser;
import net.yacy.kelondro.data.meta.DigestURI;
import net.yacy.kelondro.logging.Log;
import net.yacy.kelondro.util.FileUtils;
import net.yacy.search.Switchboard;
@ -58,10 +58,10 @@ public class SMBLoader {
public static final long DEFAULT_MAXFILESIZE = 1024 * 1024 * 10;
private final Switchboard sb;
private final Log log;
private final ConcurrentLog log;
private final long maxFileSize;
public SMBLoader(final Switchboard sb, final Log log) {
public SMBLoader(final Switchboard sb, final ConcurrentLog log) {
this.sb = sb;
this.log = log;
this.maxFileSize = sb.getConfigLong("crawler.smb.maxFileSize", -1l);
@ -133,9 +133,9 @@ public class SMBLoader {
// only the metadata is returned
if (parserError != null) {
this.log.logInfo("No parser available in SMB crawler: '" + parserError + "' for URL " + request.url().toString() + ": parsing only metadata");
this.log.info("No parser available in SMB crawler: '" + parserError + "' for URL " + request.url().toString() + ": parsing only metadata");
} else {
this.log.logInfo("Too big file in SMB crawler with size = " + size + " Bytes for URL " + request.url().toString() + ": parsing only metadata");
this.log.info("Too big file in SMB crawler with size = " + size + " Bytes for URL " + request.url().toString() + ": parsing only metadata");
}
// create response with metadata only
@ -186,13 +186,13 @@ public class SMBLoader {
}
}
} catch (SmbException e) {
Log.logException(e);
ConcurrentLog.logException(e);
} catch (MalformedURLException e) {
Log.logException(e);
ConcurrentLog.logException(e);
} catch (UnknownHostException e) {
Log.logException(e);
ConcurrentLog.logException(e);
} catch (IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
}
}
}

@ -29,19 +29,19 @@ import java.net.MalformedURLException;
import java.util.Date;
import net.yacy.cora.document.ASCII;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.crawler.HarvestProcess;
import net.yacy.crawler.data.CrawlProfile;
import net.yacy.document.parser.sitemapParser;
import net.yacy.document.parser.sitemapParser.URLEntry;
import net.yacy.kelondro.data.meta.DigestURI;
import net.yacy.kelondro.data.meta.URIMetadataNode;
import net.yacy.kelondro.logging.Log;
import net.yacy.search.Switchboard;
public class SitemapImporter extends Thread {
private CrawlProfile crawlingProfile = null;
private static final Log logger = new Log("SITEMAP");
private static final ConcurrentLog logger = new ConcurrentLog("SITEMAP");
private DigestURI siteMapURL = null;
private final Switchboard sb;
@ -56,7 +56,7 @@ public class SitemapImporter extends Thread {
@Override
public void run() {
try {
logger.logInfo("Start parsing sitemap file " + this.siteMapURL);
logger.info("Start parsing sitemap file " + this.siteMapURL);
sitemapParser.SitemapReader parser = sitemapParser.parse(this.siteMapURL);
parser.start();
URLEntry item;
@ -64,7 +64,7 @@ public class SitemapImporter extends Thread {
process(item);
}
} catch (final Exception e) {
logger.logWarning("Unable to parse sitemap file " + this.siteMapURL, e);
logger.warn("Unable to parse sitemap file " + this.siteMapURL, e);
}
}
@ -108,6 +108,6 @@ public class SitemapImporter extends Thread {
0,
0
));
logger.logInfo("New URL '" + entry.url() + "' added for loading.");
logger.info("New URL '" + entry.url() + "' added for loading.");
}
}

@ -52,10 +52,10 @@ import net.yacy.cora.document.UTF8;
import net.yacy.cora.order.Base64Order;
import net.yacy.cora.order.NaturalOrder;
import net.yacy.cora.protocol.Domains;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.cora.util.SpaceExceededException;
import net.yacy.data.wiki.WikiBoard;
import net.yacy.kelondro.blob.MapHeap;
import net.yacy.kelondro.logging.Log;
import net.yacy.kelondro.util.kelondroException;
import org.w3c.dom.Document;
@ -130,9 +130,9 @@ public class BlogBoard {
this.database.insert(UTF8.getBytes(page.key), page.record);
ret = page.key;
} catch (IOException ex) {
Log.logException(ex);
ConcurrentLog.logException(ex);
} catch (SpaceExceededException ex) {
Log.logException(ex);
ConcurrentLog.logException(ex);
}
return ret;
}
@ -147,10 +147,10 @@ public class BlogBoard {
try {
record = base.get(UTF8.getBytes(normalized.substring(0, Math.min(normalized.length(), KEY_LENGTH))));
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
record = null;
} catch (SpaceExceededException e) {
Log.logException(e);
ConcurrentLog.logException(e);
record = null;
}
return (record == null) ?
@ -164,11 +164,11 @@ public class BlogBoard {
final DocumentBuilder builder = factory.newDocumentBuilder();
return parseXMLimport(builder.parse(new ByteArrayInputStream(UTF8.getBytes(input))));
} catch (final ParserConfigurationException ex) {
Log.logException(ex);
ConcurrentLog.logException(ex);
} catch (final SAXException ex) {
Log.logException(ex);
ConcurrentLog.logException(ex);
} catch (final IOException ex) {
Log.logException(ex);
ConcurrentLog.logException(ex);
}
return false;
@ -399,8 +399,8 @@ public class BlogBoard {
try {
final String date = this.record.get("date");
if (date == null) {
if (Log.isFinest("Blog")) {
Log.logFinest("Blog", "ERROR: date field missing in blogBoard");
if (ConcurrentLog.isFinest("Blog")) {
ConcurrentLog.finest("Blog", "ERROR: date field missing in blogBoard");
}
return new Date();
}
@ -421,8 +421,8 @@ public class BlogBoard {
public String getTimestamp() {
final String timestamp = this.record.get("date");
if (timestamp == null) {
if (Log.isFinest("Blog")) {
Log.logFinest("Blog", "ERROR: date field missing in blogBoard");
if (ConcurrentLog.isFinest("Blog")) {
ConcurrentLog.finest("Blog", "ERROR: date field missing in blogBoard");
}
return GenericFormatter.SHORT_SECOND_FORMATTER.format();
}

@ -49,10 +49,10 @@ import net.yacy.cora.document.UTF8;
import net.yacy.cora.order.Base64Order;
import net.yacy.cora.order.NaturalOrder;
import net.yacy.cora.protocol.Domains;
import net.yacy.cora.util.ConcurrentLog;
import net.yacy.cora.util.SpaceExceededException;
import net.yacy.data.wiki.WikiBoard;
import net.yacy.kelondro.blob.MapHeap;
import net.yacy.kelondro.logging.Log;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
@ -120,7 +120,7 @@ public class BlogBoardComments {
this.database.insert(UTF8.getBytes(page.key), page.record);
return page.key;
} catch (final Exception e) {
Log.logException(e);
ConcurrentLog.logException(e);
return null;
}
}
@ -136,10 +136,10 @@ public class BlogBoardComments {
try {
record = base.get(UTF8.getBytes(copyOfKey));
} catch (final IOException e) {
Log.logException(e);
ConcurrentLog.logException(e);
record = null;
} catch (final SpaceExceededException e) {
Log.logException(e);
ConcurrentLog.logException(e);
record = null;
}
return (record == null) ?
@ -285,7 +285,7 @@ public class BlogBoardComments {
try {
final String date = this.record.get("date");
if (date == null) {
if (Log.isFinest("Blog")) Log.logFinest("Blog", "ERROR: date field missing in blogBoard");
if (ConcurrentLog.isFinest("Blog")) ConcurrentLog.finest("Blog", "ERROR: date field missing in blogBoard");
return new Date();
}
synchronized (SIMPLE_DATE_FORMATTER) {
@ -299,7 +299,7 @@ public class BlogBoardComments {
public String getTimestamp() {
final String timestamp = this.record.get("date");
if (timestamp == null) {
if (Log.isFinest("Blog")) Log.logFinest("Blog", "ERROR: date field missing in blogBoard");
if (ConcurrentLog.isFinest("Blog")) ConcurrentLog.finest("Blog", "ERROR: date field missing in blogBoard");
return dateString(new Date());
}
return timestamp;

Some files were not shown because too many files have changed in this diff Show More