emergency fix so ppl can download large # objects in json

This commit is contained in:
Matt Wells 2014-12-08 07:13:00 -08:00
parent 2c5f6daca2
commit c692b54bfd
2 changed files with 10 additions and 6 deletions

View File

@ -72,9 +72,10 @@ bool TopTree::setNumNodes ( int32_t docsWanted , bool doSiteClustering ) {
//m_lastKickedOutDocId = -1LL;
// how many nodes to we need to accomodate "docsWanted" docids?
m_ridiculousMax = docsWanted * 2;
// we boost it up here for domain/host counting for site clustering.
m_ridiculousMax = (int64_t)docsWanted * 2;
if ( m_ridiculousMax < 50 ) m_ridiculousMax = 50;
int32_t numNodes = m_ridiculousMax * 256;
int64_t numNodes = m_ridiculousMax * 256;
// i would say limit it to 100,000 nodes regarless
if ( numNodes > MAXDOCIDSTOCOMPUTE ) numNodes = MAXDOCIDSTOCOMPUTE;
// craziness overflow?
@ -82,6 +83,9 @@ bool TopTree::setNumNodes ( int32_t docsWanted , bool doSiteClustering ) {
// amp it up last minute, after we set numNodes, if we need to
if ( ! m_doSiteClustering ) m_ridiculousMax = 0x7fffffff;
// if not doing siteclustering... don't use 5gb of ram!
if ( ! m_doSiteClustering ) numNodes = m_docsWanted;
// how many docids do we have, not FULLY counting docids from
// "dominating" domains? aka the "variety count"
m_vcount = 0.0;
@ -112,8 +116,8 @@ bool TopTree::setNumNodes ( int32_t docsWanted , bool doSiteClustering ) {
//if ( useSampleVectors ) vecSize = SAMPLE_VECTOR_SIZE ;
char *nn ;
int32_t oldsize = (m_numNodes+1) * ( sizeof(TopNode) );
int32_t newsize = ( numNodes+1) * ( sizeof(TopNode) );
int64_t oldsize = (m_numNodes+1) * ( sizeof(TopNode) );
int64_t newsize = ( numNodes+1) * ( sizeof(TopNode) );
// if they ask for to many, this can go negative
if ( newsize < 0 ) {
g_errno = ENOMEM;
@ -129,7 +133,7 @@ bool TopTree::setNumNodes ( int32_t docsWanted , bool doSiteClustering ) {
nn=(char *)mrealloc(m_nodes,oldsize,newsize,"TopTree");
updated = true;
}
if ( ! nn ) return log("query: Can not allocate %"INT32" bytes for "
if ( ! nn ) return log("query: Can not allocate %"INT64" bytes for "
"holding resulting docids.", newsize);
// save this for freeing
m_allocSize = newsize;

View File

@ -130,7 +130,7 @@ class TopTree {
bool m_doSiteClustering;
bool m_useIntScores;
int32_t m_docsWanted;
int32_t m_ridiculousMax;
int64_t m_ridiculousMax;
char m_kickedOutDocIds;
//int64_t m_lastKickedOutDocId;
int32_t m_domCount[256];