more various char -> bool fixes. yep, bored today..

This commit is contained in:
Brian Rasmusson
2016-10-19 16:23:11 +02:00
parent 08dab2bc3a
commit 3ddd7029ac
12 changed files with 129 additions and 129 deletions

@ -1210,7 +1210,7 @@ CollectionRec::CollectionRec() {
m_maxOtherDocLen = 0;
m_summaryMaxWidth = 0;
m_maxRobotsCacheAge = 0;
m_queryExpansion = 0;
m_queryExpansion = false;
m_rcache = false;
m_hideAllClustered = false;
m_END_COPY = 0;
@ -1464,14 +1464,14 @@ bool CollectionRec::rebuildUrlFilters2 ( ) {
m_maxSpidersPerRule[n] = 99;
m_spiderIpWaits[n] = 1000;
m_spiderIpMaxSpiders[n] = 7;
m_harvestLinks[n] = 1;
m_harvestLinks[n] = true;
*/
// max spiders per ip
int32_t ipms = 7;
m_regExs[n].set("isreindex");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 0; // 30 days default
m_maxSpidersPerRule [n] = 99; // max spiders
m_spiderIpMaxSpiders [n] = 1; // max spiders per ip
@ -1481,7 +1481,7 @@ bool CollectionRec::rebuildUrlFilters2 ( ) {
// if not in the site list then nuke it
m_regExs[n].set("!ismanualadd && !insitelist");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 0; // 30 days default
m_maxSpidersPerRule [n] = 99; // max spiders
m_spiderIpMaxSpiders [n] = 1; // max spiders per ip
@ -1491,7 +1491,7 @@ bool CollectionRec::rebuildUrlFilters2 ( ) {
n++;
m_regExs[n].set("errorcount>=3 && hastmperror");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 1; // 30 days default
m_maxSpidersPerRule [n] = 1; // max spiders
m_spiderIpMaxSpiders [n] = 1; // max spiders per ip
@ -1501,7 +1501,7 @@ bool CollectionRec::rebuildUrlFilters2 ( ) {
n++;
m_regExs[n].set("errorcount>=1 && hastmperror");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 1; // 30 days default
m_maxSpidersPerRule [n] = 1; // max spiders
m_spiderIpMaxSpiders [n] = 1; // max spiders per ip
@ -1513,7 +1513,7 @@ bool CollectionRec::rebuildUrlFilters2 ( ) {
// a non temporary error, like a 404? retry once per 5 days
m_regExs[n].set("errorcount>=1");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 5; // 5 day retry
m_maxSpidersPerRule [n] = 1; // max spiders
m_spiderIpMaxSpiders [n] = 1; // max spiders per ip
@ -1523,7 +1523,7 @@ bool CollectionRec::rebuildUrlFilters2 ( ) {
n++;
m_regExs[n].set("isaddurl");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 7; // 30 days default
m_maxSpidersPerRule [n] = 99; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -1535,7 +1535,7 @@ bool CollectionRec::rebuildUrlFilters2 ( ) {
// 20+ unique c block parent request urls means it is important!
m_regExs[n].set("numinlinks>7 && isnew");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 7; // 30 days default
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -1547,7 +1547,7 @@ bool CollectionRec::rebuildUrlFilters2 ( ) {
// 20+ unique c block parent request urls means it is important!
m_regExs[n].set("numinlinks>7");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 7; // 30 days default
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -1560,7 +1560,7 @@ bool CollectionRec::rebuildUrlFilters2 ( ) {
m_regExs[n].set("hopcount==0 && iswww && isnew");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 7; // 30 days default
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -1571,7 +1571,7 @@ bool CollectionRec::rebuildUrlFilters2 ( ) {
n++;
m_regExs[n].set("hopcount==0 && iswww");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 7.0; // days b4 respider
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -1582,7 +1582,7 @@ bool CollectionRec::rebuildUrlFilters2 ( ) {
n++;
m_regExs[n].set("hopcount==0 && isnew");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 7.0;
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -1593,7 +1593,7 @@ bool CollectionRec::rebuildUrlFilters2 ( ) {
n++;
m_regExs[n].set("hopcount==0");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 10.0;
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -1604,7 +1604,7 @@ bool CollectionRec::rebuildUrlFilters2 ( ) {
n++;
m_regExs[n].set("hopcount==1 && isnew");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 20.0;
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -1615,7 +1615,7 @@ bool CollectionRec::rebuildUrlFilters2 ( ) {
n++;
m_regExs[n].set("hopcount==1");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 20.0;
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -1626,7 +1626,7 @@ bool CollectionRec::rebuildUrlFilters2 ( ) {
n++;
m_regExs[n].set("hopcount==2 && isnew");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 40;
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -1635,12 +1635,12 @@ bool CollectionRec::rebuildUrlFilters2 ( ) {
// do not harvest links if we are spiderings NEWS
if ( ! strcmp(s,"news") ) {
m_spiderFreqs [n] = 5.0;
m_harvestLinks [n] = 0;
m_harvestLinks [n] = false;
}
n++;
m_regExs[n].set("hopcount==2");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 40;
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -1649,12 +1649,12 @@ bool CollectionRec::rebuildUrlFilters2 ( ) {
// do not harvest links if we are spiderings NEWS
if ( ! strcmp(s,"news") ) {
m_spiderFreqs [n] = 5.0;
m_harvestLinks [n] = 0;
m_harvestLinks [n] = false;
}
n++;
m_regExs[n].set("hopcount>=3 && isnew");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 60;
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -1663,14 +1663,14 @@ bool CollectionRec::rebuildUrlFilters2 ( ) {
// turn off spidering if hopcount is too big and we are spiderings NEWS
if ( ! strcmp(s,"news") ) {
m_maxSpidersPerRule [n] = 0;
m_harvestLinks [n] = 0;
m_harvestLinks [n] = false;
}
else {
n++;
}
m_regExs[n].set("hopcount>=3");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 60;
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -1679,14 +1679,14 @@ bool CollectionRec::rebuildUrlFilters2 ( ) {
// turn off spidering if hopcount is too big and we are spiderings NEWS
if ( ! strcmp(s,"news") ) {
m_maxSpidersPerRule [n] = 0;
m_harvestLinks [n] = 0;
m_harvestLinks [n] = false;
}
else {
n++;
}
m_regExs[n].set("default");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 60;
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -1694,7 +1694,7 @@ bool CollectionRec::rebuildUrlFilters2 ( ) {
m_spiderPriorities [n] = 1;
if ( ! strcmp(s,"news") ) {
m_maxSpidersPerRule [n] = 0;
m_harvestLinks [n] = 0;
m_harvestLinks [n] = false;
}
n++;
@ -1722,7 +1722,7 @@ bool CollectionRec::rebuildPrivacoreRules () {
int32_t n = 0;
m_regExs[n].set("isreindex");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 0; // 0 days default
m_maxSpidersPerRule [n] = 99; // max spiders
m_spiderIpMaxSpiders [n] = 1; // max spiders per ip
@ -1732,7 +1732,7 @@ bool CollectionRec::rebuildPrivacoreRules () {
m_regExs[n].reset();
m_regExs[n].safePrintf("lang!=%s", langWhitelistStr);
m_harvestLinks [n] = 0;
m_harvestLinks [n] = false;
m_spiderFreqs [n] = 0; // 0 days default
m_maxSpidersPerRule [n] = 99; // max spiders
m_spiderIpMaxSpiders [n] = 1; // max spiders per ip
@ -1743,7 +1743,7 @@ bool CollectionRec::rebuildPrivacoreRules () {
m_regExs[n].reset();
m_regExs[n].safePrintf("tld==%s", getPrivacoreBlacklistedTLD());
m_harvestLinks [n] = 0;
m_harvestLinks [n] = false;
m_spiderFreqs [n] = 0; // 0 days default
m_maxSpidersPerRule [n] = 99; // max spiders
m_spiderIpMaxSpiders [n] = 1; // max spiders per ip
@ -1754,7 +1754,7 @@ bool CollectionRec::rebuildPrivacoreRules () {
// 3 or more non-temporary errors - delete it
m_regExs[n].set("errorcount>=3 && !hastmperror");
m_harvestLinks [n] = 0;
m_harvestLinks [n] = false;
m_spiderFreqs [n] = 0; // 1 days default
m_maxSpidersPerRule [n] = 99; // max spiders
m_spiderIpMaxSpiders [n] = 1; // max spiders per ip
@ -1765,7 +1765,7 @@ bool CollectionRec::rebuildPrivacoreRules () {
// 3 or more temporary errors - slow down retries a bit
m_regExs[n].set("errorcount>=3 && hastmperror");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 3; // 1 days default
m_maxSpidersPerRule [n] = 1; // max spiders
m_spiderIpMaxSpiders [n] = 1; // max spiders per ip
@ -1776,7 +1776,7 @@ bool CollectionRec::rebuildPrivacoreRules () {
// 1 or more temporary errors - retry in a day
m_regExs[n].set("errorcount>=1 && hastmperror");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 1; // 1 days default
m_maxSpidersPerRule [n] = 1; // max spiders
m_spiderIpMaxSpiders [n] = 1; // max spiders per ip
@ -1786,7 +1786,7 @@ bool CollectionRec::rebuildPrivacoreRules () {
n++;
m_regExs[n].set("isaddurl");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 7; // 7 days default
m_maxSpidersPerRule [n] = 99; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -1796,7 +1796,7 @@ bool CollectionRec::rebuildPrivacoreRules () {
n++;
m_regExs[n].set("hopcount==0 && iswww && isnew");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 7; // 7 days default
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -1806,7 +1806,7 @@ bool CollectionRec::rebuildPrivacoreRules () {
n++;
m_regExs[n].set("hopcount==0 && iswww");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 7.0; // 7 days before respider
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -1816,7 +1816,7 @@ bool CollectionRec::rebuildPrivacoreRules () {
n++;
m_regExs[n].set("hopcount==0 && isnew");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 7.0; // 7 days before respider
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -1826,7 +1826,7 @@ bool CollectionRec::rebuildPrivacoreRules () {
n++;
m_regExs[n].set("hopcount==0");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 10.0; // 10 days before respider
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -1836,7 +1836,7 @@ bool CollectionRec::rebuildPrivacoreRules () {
n++;
m_regExs[n].set("hopcount==1 && isnew");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 20.0; // 20 days before respider
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -1846,7 +1846,7 @@ bool CollectionRec::rebuildPrivacoreRules () {
n++;
m_regExs[n].set("hopcount==1");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 20.0; // 20 days before respider
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -1856,7 +1856,7 @@ bool CollectionRec::rebuildPrivacoreRules () {
n++;
m_regExs[n].set("hopcount==2 && isnew");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 40; // 40 days before respider
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -1866,7 +1866,7 @@ bool CollectionRec::rebuildPrivacoreRules () {
n++;
m_regExs[n].set("hopcount==2");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 40; // 40 days before respider
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -1876,7 +1876,7 @@ bool CollectionRec::rebuildPrivacoreRules () {
n++;
m_regExs[n].set("hopcount>=3 && isnew");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 60; // 60 days before respider
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -1886,7 +1886,7 @@ bool CollectionRec::rebuildPrivacoreRules () {
n++;
m_regExs[n].set("hopcount>=3");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 60; // 60 days before respider
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -1896,7 +1896,7 @@ bool CollectionRec::rebuildPrivacoreRules () {
n++;
m_regExs[n].set("default");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 60; // 60 days before respider
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -1929,7 +1929,7 @@ bool CollectionRec::rebuildLangRules ( const char *langStr , const char *tldStr
int32_t n = 0;
m_regExs[n].set("isreindex");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 0; // 30 days default
m_maxSpidersPerRule [n] = 99; // max spiders
m_spiderIpMaxSpiders [n] = 1; // max spiders per ip
@ -1939,7 +1939,7 @@ bool CollectionRec::rebuildLangRules ( const char *langStr , const char *tldStr
// if not in the site list then nuke it
m_regExs[n].set("!ismanualadd && !insitelist");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 0; // 30 days default
m_maxSpidersPerRule [n] = 99; // max spiders
m_spiderIpMaxSpiders [n] = 1; // max spiders per ip
@ -1949,7 +1949,7 @@ bool CollectionRec::rebuildLangRules ( const char *langStr , const char *tldStr
n++;
m_regExs[n].set("errorcount>=3 && hastmperror");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 1; // 30 days default
m_maxSpidersPerRule [n] = 1; // max spiders
m_spiderIpMaxSpiders [n] = 1; // max spiders per ip
@ -1959,7 +1959,7 @@ bool CollectionRec::rebuildLangRules ( const char *langStr , const char *tldStr
n++;
m_regExs[n].set("errorcount>=1 && hastmperror");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 1; // 30 days default
m_maxSpidersPerRule [n] = 1; // max spiders
m_spiderIpMaxSpiders [n] = 1; // max spiders per ip
@ -1968,7 +1968,7 @@ bool CollectionRec::rebuildLangRules ( const char *langStr , const char *tldStr
n++;
m_regExs[n].set("isaddurl");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 7; // 30 days default
m_maxSpidersPerRule [n] = 99; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -1979,7 +1979,7 @@ bool CollectionRec::rebuildLangRules ( const char *langStr , const char *tldStr
m_regExs[n].reset();
m_regExs[n].safePrintf("hopcount==0 && iswww && isnew && tld==%s",
tldStr);
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 7; // 30 days default
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -1991,7 +1991,7 @@ bool CollectionRec::rebuildLangRules ( const char *langStr , const char *tldStr
m_regExs[n].safePrintf("hopcount==0 && iswww && isnew && "
"lang==%s,xx"
,langStr);
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 7; // 30 days default
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -2000,7 +2000,7 @@ bool CollectionRec::rebuildLangRules ( const char *langStr , const char *tldStr
n++;
// m_regExs[n].set("hopcount==0 && iswww && isnew");
// m_harvestLinks [n] = 1;
// m_harvestLinks [n] = true;
// m_spiderFreqs [n] = 7; // 30 days default
// m_maxSpidersPerRule [n] = 9; // max spiders
// m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -2012,7 +2012,7 @@ bool CollectionRec::rebuildLangRules ( const char *langStr , const char *tldStr
m_regExs[n].reset();
m_regExs[n].safePrintf("hopcount==0 && iswww && tld==%s",tldStr);
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 7.0; // days b4 respider
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -2023,7 +2023,7 @@ bool CollectionRec::rebuildLangRules ( const char *langStr , const char *tldStr
m_regExs[n].reset();
m_regExs[n].safePrintf("hopcount==0 && iswww && lang==%s,xx",
langStr);
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 7.0; // days b4 respider
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -2032,7 +2032,7 @@ bool CollectionRec::rebuildLangRules ( const char *langStr , const char *tldStr
n++;
m_regExs[n].set("hopcount==0 && iswww");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 7.0; // days b4 respider
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -2046,7 +2046,7 @@ bool CollectionRec::rebuildLangRules ( const char *langStr , const char *tldStr
m_regExs[n].reset();
m_regExs[n].safePrintf("hopcount==0 && isnew && tld==%s",tldStr);
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 7.0;
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -2057,7 +2057,7 @@ bool CollectionRec::rebuildLangRules ( const char *langStr , const char *tldStr
m_regExs[n].reset();
m_regExs[n].safePrintf("hopcount==0 && isnew && lang==%s,xx",
langStr);
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 7.0;
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -2066,7 +2066,7 @@ bool CollectionRec::rebuildLangRules ( const char *langStr , const char *tldStr
n++;
m_regExs[n].set("hopcount==0 && isnew");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 7.0;
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -2078,7 +2078,7 @@ bool CollectionRec::rebuildLangRules ( const char *langStr , const char *tldStr
m_regExs[n].reset();
m_regExs[n].safePrintf("hopcount==0 && tld==%s",tldStr);
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 10.0;
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -2088,7 +2088,7 @@ bool CollectionRec::rebuildLangRules ( const char *langStr , const char *tldStr
m_regExs[n].reset();
m_regExs[n].safePrintf("hopcount==0 && lang==%s,xx",langStr);
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 10.0;
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -2097,7 +2097,7 @@ bool CollectionRec::rebuildLangRules ( const char *langStr , const char *tldStr
n++;
m_regExs[n].set("hopcount==0");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 10.0;
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -2110,7 +2110,7 @@ bool CollectionRec::rebuildLangRules ( const char *langStr , const char *tldStr
m_regExs[n].reset();
m_regExs[n].safePrintf("hopcount==1 && isnew && tld==%s",tldStr);
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 20.0;
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -2121,7 +2121,7 @@ bool CollectionRec::rebuildLangRules ( const char *langStr , const char *tldStr
m_regExs[n].reset();
m_regExs[n].safePrintf("hopcount==1 && isnew && lang==%s,xx",
tldStr);
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 20.0;
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -2130,7 +2130,7 @@ bool CollectionRec::rebuildLangRules ( const char *langStr , const char *tldStr
n++;
m_regExs[n].set("hopcount==1 && isnew");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 20.0;
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -2142,7 +2142,7 @@ bool CollectionRec::rebuildLangRules ( const char *langStr , const char *tldStr
m_regExs[n].reset();
m_regExs[n].safePrintf("hopcount==1 && tld==%s",tldStr);
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 20.0;
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -2152,7 +2152,7 @@ bool CollectionRec::rebuildLangRules ( const char *langStr , const char *tldStr
m_regExs[n].reset();
m_regExs[n].safePrintf("hopcount==1 && lang==%s,xx",langStr);
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 20.0;
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -2161,7 +2161,7 @@ bool CollectionRec::rebuildLangRules ( const char *langStr , const char *tldStr
n++;
m_regExs[n].set("hopcount==1");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 20.0;
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -2173,7 +2173,7 @@ bool CollectionRec::rebuildLangRules ( const char *langStr , const char *tldStr
m_regExs[n].reset();
m_regExs[n].safePrintf("hopcount==2 && isnew && tld==%s",tldStr);
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 40;
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -2184,7 +2184,7 @@ bool CollectionRec::rebuildLangRules ( const char *langStr , const char *tldStr
m_regExs[n].reset();
m_regExs[n].safePrintf("hopcount==2 && isnew && lang==%s,xx",
langStr);
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 40;
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -2193,7 +2193,7 @@ bool CollectionRec::rebuildLangRules ( const char *langStr , const char *tldStr
n++;
m_regExs[n].set("hopcount==2 && isnew");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 40;
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -2206,7 +2206,7 @@ bool CollectionRec::rebuildLangRules ( const char *langStr , const char *tldStr
m_regExs[n].reset();
m_regExs[n].safePrintf("hopcount==2 && tld==%s",tldStr);
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 40;
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -2216,7 +2216,7 @@ bool CollectionRec::rebuildLangRules ( const char *langStr , const char *tldStr
m_regExs[n].reset();
m_regExs[n].safePrintf("hopcount==2 && lang==%s,xx",langStr);
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 40;
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -2225,7 +2225,7 @@ bool CollectionRec::rebuildLangRules ( const char *langStr , const char *tldStr
n++;
m_regExs[n].set("hopcount==2");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 40;
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -2238,7 +2238,7 @@ bool CollectionRec::rebuildLangRules ( const char *langStr , const char *tldStr
m_regExs[n].reset();
m_regExs[n].safePrintf("hopcount>=3 && isnew && tld==%s",tldStr);
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 60;
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -2249,7 +2249,7 @@ bool CollectionRec::rebuildLangRules ( const char *langStr , const char *tldStr
m_regExs[n].reset();
m_regExs[n].safePrintf("hopcount>=3 && isnew && lang==%s,xx",
langStr);
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 60;
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -2258,7 +2258,7 @@ bool CollectionRec::rebuildLangRules ( const char *langStr , const char *tldStr
n++;
m_regExs[n].set("hopcount>=3 && isnew");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 60;
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -2271,7 +2271,7 @@ bool CollectionRec::rebuildLangRules ( const char *langStr , const char *tldStr
m_regExs[n].reset();
m_regExs[n].safePrintf("hopcount>=3 && tld==%s",tldStr);
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 60;
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -2281,7 +2281,7 @@ bool CollectionRec::rebuildLangRules ( const char *langStr , const char *tldStr
m_regExs[n].reset();
m_regExs[n].safePrintf("hopcount>=3 && lang==%s,xx",langStr);
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 60;
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -2290,7 +2290,7 @@ bool CollectionRec::rebuildLangRules ( const char *langStr , const char *tldStr
n++;
m_regExs[n].set("hopcount>=3");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 60;
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -2301,7 +2301,7 @@ bool CollectionRec::rebuildLangRules ( const char *langStr , const char *tldStr
m_regExs[n].set("default");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 60;
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -2330,7 +2330,7 @@ bool CollectionRec::rebuildShallowRules ( ) {
int32_t n = 0;
m_regExs[n].set("isreindex");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 0; // 30 days default
m_maxSpidersPerRule [n] = 99; // max spiders
m_spiderIpMaxSpiders [n] = 1; // max spiders per ip
@ -2340,7 +2340,7 @@ bool CollectionRec::rebuildShallowRules ( ) {
// if not in the site list then nuke it
m_regExs[n].set("!ismanualadd && !insitelist");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 0; // 30 days default
m_maxSpidersPerRule [n] = 99; // max spiders
m_spiderIpMaxSpiders [n] = 1; // max spiders per ip
@ -2350,7 +2350,7 @@ bool CollectionRec::rebuildShallowRules ( ) {
n++;
m_regExs[n].set("errorcount>=3 && hastmperror");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 1; // 30 days default
m_maxSpidersPerRule [n] = 1; // max spiders
m_spiderIpMaxSpiders [n] = 1; // max spiders per ip
@ -2360,7 +2360,7 @@ bool CollectionRec::rebuildShallowRules ( ) {
n++;
m_regExs[n].set("errorcount>=1 && hastmperror");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 1; // 30 days default
m_maxSpidersPerRule [n] = 1; // max spiders
m_spiderIpMaxSpiders [n] = 1; // max spiders per ip
@ -2369,7 +2369,7 @@ bool CollectionRec::rebuildShallowRules ( ) {
n++;
m_regExs[n].set("isaddurl");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 7; // 30 days default
m_maxSpidersPerRule [n] = 99; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -2384,7 +2384,7 @@ bool CollectionRec::rebuildShallowRules ( ) {
// stop if hopcount>=2 for things tagged shallow in sitelist
//
m_regExs[n].set("tag:shallow && hopcount>=2");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 40;
m_maxSpidersPerRule [n] = 0; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -2395,7 +2395,7 @@ bool CollectionRec::rebuildShallowRules ( ) {
// if # of pages in this site indexed is >= 10 then stop as well...
m_regExs[n].set("tag:shallow && sitepages>=10");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 40;
m_maxSpidersPerRule [n] = 0; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -2407,7 +2407,7 @@ bool CollectionRec::rebuildShallowRules ( ) {
m_regExs[n].set("hopcount==0 && iswww && isnew");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 7; // 30 days default
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -2416,7 +2416,7 @@ bool CollectionRec::rebuildShallowRules ( ) {
n++;
m_regExs[n].set("hopcount==0 && iswww");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 7.0; // days b4 respider
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -2428,7 +2428,7 @@ bool CollectionRec::rebuildShallowRules ( ) {
m_regExs[n].set("hopcount==0 && isnew");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 7.0;
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -2440,7 +2440,7 @@ bool CollectionRec::rebuildShallowRules ( ) {
m_regExs[n].set("hopcount==0");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 10.0;
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -2453,7 +2453,7 @@ bool CollectionRec::rebuildShallowRules ( ) {
m_regExs[n].set("hopcount==1 && isnew");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 20.0;
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -2463,7 +2463,7 @@ bool CollectionRec::rebuildShallowRules ( ) {
m_regExs[n].set("hopcount==1");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 20.0;
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -2475,7 +2475,7 @@ bool CollectionRec::rebuildShallowRules ( ) {
m_regExs[n].set("hopcount==2 && isnew");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 40;
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -2484,7 +2484,7 @@ bool CollectionRec::rebuildShallowRules ( ) {
n++;
m_regExs[n].set("hopcount==2");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 40;
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -2496,7 +2496,7 @@ bool CollectionRec::rebuildShallowRules ( ) {
m_regExs[n].set("hopcount>=3 && isnew");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 60;
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -2505,7 +2505,7 @@ bool CollectionRec::rebuildShallowRules ( ) {
n++;
m_regExs[n].set("hopcount>=3");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 60;
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip
@ -2516,7 +2516,7 @@ bool CollectionRec::rebuildShallowRules ( ) {
m_regExs[n].set("default");
m_harvestLinks [n] = 1;
m_harvestLinks [n] = true;
m_spiderFreqs [n] = 60;
m_maxSpidersPerRule [n] = 9; // max spiders
m_spiderIpMaxSpiders [n] = ipms; // max spiders per ip

@ -449,7 +449,7 @@ class CollectionRec {
int32_t m_spiderIpMaxSpiders [ MAX_FILTERS ];
int32_t m_numHarvestLinks;
char m_harvestLinks[ MAX_FILTERS ];
bool m_harvestLinks[ MAX_FILTERS ];
int32_t m_numForceDelete;
char m_forceDelete[ MAX_FILTERS ];
@ -504,7 +504,7 @@ class CollectionRec {
int32_t m_maxRobotsCacheAge;
// use query expansion for this collection?
char m_queryExpansion;
bool m_queryExpansion;
// read from cache
bool m_rcache;

@ -40,7 +40,7 @@ class GigablastRequest {
///////////
int64_t m_docId;
int32_t m_strip;
char m_includeHeader;
bool m_includeHeader;
///////////
//
@ -48,8 +48,8 @@ class GigablastRequest {
//
///////////
char *m_urlsBuf;
char m_stripBox;
char m_harvestLinks;
bool m_stripBox;
bool m_harvestLinks;
SafeBuf m_listBuf;
Msg4 m_msg4;

@ -11,7 +11,7 @@ static void handleRequest22 ( UdpSlot *slot , int32_t netnice ) ;
Msg22Request::Msg22Request() {
//use memset() to clear out the padding bytes in the structure
memset(this, 0, sizeof(*this));
m_inUse = 0;
m_inUse = false;
}
bool Msg22::registerHandler ( ) {
@ -126,8 +126,8 @@ bool Msg22::getTitleRec ( Msg22Request *r ,
// set request
r->m_docId = docId;
r->m_niceness = niceness;
r->m_justCheckTfndb = (bool)justCheckTfndb;
r->m_getAvailDocIdOnly = (bool)getAvailDocIdOnly;
r->m_justCheckTfndb = justCheckTfndb;
r->m_getAvailDocIdOnly = getAvailDocIdOnly;
r->m_collnum = g_collectiondb.getCollnum ( coll );
r->m_addToCache = false;
r->m_maxCacheAge = 0;
@ -167,7 +167,7 @@ bool Msg22::getTitleRec ( Msg22Request *r ,
int32_t firstHostId = firstHost->m_hostId;
m_outstanding = true;
r->m_inUse = 1;
r->m_inUse = true;
// . send this request to the least-loaded host that can handle it
// . returns false and sets g_errno on error
@ -198,7 +198,7 @@ void Msg22::gotReply ( ) {
Msg22Request *r = m_r;
// back
m_outstanding = false;
r->m_inUse = 0;
r->m_inUse = false;
// bail on error, multicast will free the reply buffer if it should
if ( g_errno ) {

@ -15,10 +15,10 @@ public:
int32_t m_niceness;
int32_t m_maxCacheAge;
collnum_t m_collnum;
unsigned char m_justCheckTfndb :1;
unsigned char m_getAvailDocIdOnly:1;
unsigned char m_addToCache :1;
unsigned char m_inUse :1;
bool m_justCheckTfndb :1;
bool m_getAvailDocIdOnly:1;
bool m_addToCache :1;
bool m_inUse :1;
char m_url[MAX_URL_LEN+1];
Msg22Request();

@ -559,7 +559,7 @@ bool Msg39::getLists () {
qt->m_rightPhraseTerm->m_isWikiHalfStopBigram )
rightwikibigram = 1;
int32_t isSynonym = 0;
bool isSynonym = 0;
const QueryTerm *synterm = qt->m_synonymOf;
if ( synterm )
isSynonym = true;
@ -582,7 +582,7 @@ bool Msg39::getLists () {
"rightwikibigram=%" PRId32" "
"hc=%" PRId32" "
"otermLen=%" PRId32" "
"isSynonym=%" PRId32" "
"isSynonym=%s"
"querylangid=%" PRId32" " ,
(PTRTYPE)this ,
i ,
@ -604,7 +604,7 @@ bool Msg39::getLists () {
(int32_t)rightwikibigram,
(int32_t)m_query.m_qterms[i].m_hardCount ,
(int32_t)m_query.getTermLen(i) ,
isSynonym,
(isSynonym ? "true" : "false"),
(int32_t)m_query.m_langId );
if ( synterm ) {
int32_t stnum = synterm - m_query.m_qterms;

@ -40,18 +40,18 @@ class Msg39Request {
uint8_t m_language;
// flags
char m_queryExpansion;
bool m_queryExpansion;
bool m_debug;
bool m_doSiteClustering;
bool m_hideAllClustered;
//char m_doIpClustering;
bool m_doDupContentRemoval;
char m_addToCache;
bool m_addToCache;
bool m_familyFilter;
bool m_getDocIdScoringInfo;
char m_realMaxTop;
char m_stripe;
char m_useQueryStopWords;
bool m_useQueryStopWords;
bool m_allowHighFrequencyTermCache;
bool m_doMaxScoreAlgo;

@ -70,7 +70,7 @@ public:
Url m_urls [ MAX_OUTSTANDING_MSGE0 ];
int32_t m_ns [ MAX_OUTSTANDING_MSGE0 ];
char m_used [ MAX_OUTSTANDING_MSGE0 ];
bool m_used [ MAX_OUTSTANDING_MSGE0 ];
Msg8a m_msg8as [ MAX_OUTSTANDING_MSGE0 ]; //for getting tag bufs
//TagRec m_tagRecs [ MAX_OUTSTANDING_MSGE0 ];

@ -75,7 +75,7 @@ public:
char *m_nextPtr;
int32_t m_ns [ MAX_OUTSTANDING_MSGE1 ];
char m_used [ MAX_OUTSTANDING_MSGE1 ];
bool m_used [ MAX_OUTSTANDING_MSGE1 ];
MsgC m_msgCs [ MAX_OUTSTANDING_MSGE1 ]; // ips
// vector of TagRec ptrs

@ -105,7 +105,7 @@ bool sendPageGet ( TcpSocket *s , HttpRequest *r ) {
// . we need to match summary here so we need to know this
//bool seq = r->getLong ( "seq" , false );
// restrict to root file?
bool rtq = r->getLong ( "rtq" , false );
bool rtq = r->getLong ( "rtq" , 0) ? true : false;
// . get the titleRec
// . TODO: redirect client to a better http server to save bandwidth
@ -124,11 +124,11 @@ bool sendPageGet ( TcpSocket *s , HttpRequest *r ) {
st->m_docId = docId;
st->m_printed = false;
// include header ... "this page cached by Gigablast on..."
st->m_includeHeader = r->getLong ("ih" , true );
st->m_includeBaseHref = r->getLong ("ibh" , false );
st->m_queryHighlighting = r->getLong ("qh" , true );
st->m_strip = r->getLong ("strip" , 0 );
st->m_cnsPage = r->getLong ("cnsp" , true );
st->m_includeHeader = r->getLong ("ih" , 1) ? true : false;
st->m_includeBaseHref = r->getLong ("ibh" , 0) ? true : false;
st->m_queryHighlighting = r->getLong ("qh" , 1) ? true : false;
st->m_strip = r->getLong ("strip" , 0);
st->m_cnsPage = r->getLong ("cnsp" , 1) ? true : false;
const char *langAbbr = r->getString("qlang",NULL);
st->m_langId = langUnknown;
if ( langAbbr ) {
@ -137,7 +137,7 @@ bool sendPageGet ( TcpSocket *s , HttpRequest *r ) {
}
strncpy ( st->m_coll , coll , MAX_COLL_LEN+1 );
// store query for query highlighting
st->m_netTestResults = r->getLong ("rnettest", false );
st->m_netTestResults = r->getLong ("rnettest", 0) ? true : false;
st->m_qsb.setBuf ( st->m_qtmpBuf,128,0,false );
st->m_qsb.setLabel ( "qsbpg" );

@ -79,7 +79,7 @@ SearchInput::SearchInput() {
m_displayOutlinks = 0;
m_docIdsOnly = 0;
m_formatStr = NULL;
m_queryExpansion = 0;
m_queryExpansion = false;
m_END_HASH = 0;
m_END_TEST = 0;
}

@ -200,7 +200,7 @@ public:
char *m_formatStr;
// this should be part of the key because it will affect the results!
char m_queryExpansion;
bool m_queryExpansion;
////////
//