fix another core in crawlbot

This commit is contained in:
Matt Wells
2015-08-21 14:30:13 -07:00
parent 74ec812959
commit 035d232673

@ -928,13 +928,17 @@ bool Msg3::doneScanning ( ) {
// . this returns false and sets g_errno on error
// . like if data is corrupt
BigFile *ff = base->getFile(m_fileNums[i]);
// if we did a merge really quick and delete one of the
// files we were reading, i've seen 'ff' be NULL
char *filename = "lostfilename";
if ( ff ) filename = ff->getFilename();
if ( ! m_lists[i].constrain ( m_startKey ,
m_constrainKey , // m_endKey
mrs , // m_minRecSizes
m_hintOffsets[i] ,
//m_hintKeys [i] ,
&m_hintKeys [i*m_ks] ,
ff->getFilename() ,
filename,//ff->getFilename() ,
m_niceness ) ) {
log("net: Had error while constraining list read from "
"%s: %s/%s. vfd=%"INT32" parts=%"INT32". "