Made crawlinfo update interval configurable

This commit is contained in:
Ivan Skytte Jørgensen
2017-03-23 15:46:26 +01:00
parent fd1e307670
commit e908e34c92
3 changed files with 14 additions and 2 deletions

2
Conf.h

@ -185,6 +185,8 @@ class Conf {
int32_t m_maxTotalSpiders;
int32_t m_crawlInfoUpdateInterval;
// indexdb has a max cached age for getting IndexLists (10 mins deflt)
int32_t m_indexdbMaxIndexListAge;

@ -6253,7 +6253,17 @@ void Parms::init ( ) {
m->m_group = true;
m->m_flags = 0;
m->m_page = PAGE_MASTER;
m->m_group = false;
m++;
m->m_title = "Crawlinfo update interval";
m->m_desc = "How often to get updated crawling info from all spider hosts. This is used for doling out new work.";
m->m_cgi = "crawlinfoupdateinterval";
simple_m_set(Conf,m_crawlInfoUpdateInterval);
m->m_def = "20000";
m->m_units = "milliseconds";
m->m_group = true;
m->m_flags = 0;
m->m_page = PAGE_MASTER;
m++;
m->m_title = "weights.cpp slider parm (tmp)";

@ -203,7 +203,7 @@ void SpiderLoop::startLoop ( ) {
// let's move back down to 1 second
// . make it 20 seconds because handlerequestc1 is always on
// profiler when we have thousands of collections
if ( !g_loop.registerSleepCallback(20000, this, updateAllCrawlInfosSleepWrapper)) {
if ( !g_loop.registerSleepCallback(g_conf.m_crawlInfoUpdateInterval, this, updateAllCrawlInfosSleepWrapper)) {
log(LOG_ERROR, "build: failed to register updatecrawlinfowrapper");
}