⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 indexed.cpp

📁 非常难得的eMule(电骡) V0.45b 源码下载 值得研究
💻 CPP
📖 第 1 页 / 共 3 页
字号:

		CCKey key;
		CCKey key2;
		POSITION pos = m_Notes_map.GetStartPosition();
		while( pos != NULL )
		{
			SrcHash* currNoteHash;
			m_Notes_map.GetNextAssoc( pos, key, currNoteHash );
			CKadSourcePtrList& KeyHashNoteMap = currNoteHash->m_Source_map;
			POSITION pos2 = KeyHashNoteMap.GetHeadPosition();
			while( pos2 != NULL )
			{
				Source* currNote = KeyHashNoteMap.GetNext(pos2);
				CKadEntryPtrList& NoteEntryList = currNote->entryList;
				for(POSITION pos5 = NoteEntryList.GetHeadPosition(); pos5 != NULL; )
				{
					Kademlia::CEntry* currName = NoteEntryList.GetNext(pos5);
					delete currName;
				}
				delete currNote;
			}
			delete currNoteHash;
		} 
	}
	catch ( CIOException *ioe )
	{
		AddDebugLogLine( false, _T("Exception in CIndexed::~CIndexed (IO error(%i))"), ioe->m_cause);
		ioe->Delete();
	}
	catch (...) 
	{
		AddDebugLogLine(false, _T("Exception in CIndexed::~CIndexed"));
	}
}

void CIndexed::clean(void)
{
	try
	{
		if( m_lastClean > time(NULL) )
		{
			return;
		}

		uint32 k_Removed = 0;
		uint32 s_Removed = 0;
		uint32 s_Total = 0;
		uint32 k_Total = 0;
		time_t tNow = time(NULL);

		POSITION pos = m_Keyword_map.GetStartPosition();
		while( pos != NULL )
		{
			KeyHash* currKeyHash;
			CCKey key;
			m_Keyword_map.GetNextAssoc( pos, key, currKeyHash );
			POSITION pos2 = currKeyHash->m_Source_map.GetStartPosition();
			while( pos2 != NULL )
			{
				Source* currSource;
				CCKey key2;
                currKeyHash->m_Source_map.GetNextAssoc( pos2, key2, currSource );
				for(POSITION pos5 = currSource->entryList.GetHeadPosition(); pos5 != NULL; )
				{
					POSITION pos6 = pos5;
					Kademlia::CEntry* currName = currSource->entryList.GetNext(pos5);
					k_Total++;
					if( !currName->source && currName->lifetime < tNow)
					{
						k_Removed++;
						currSource->entryList.RemoveAt(pos6);
						delete currName;
					}
				}
				if( currSource->entryList.IsEmpty())
				{
					currKeyHash->m_Source_map.RemoveKey(key2);
					delete currSource;
				}
			}
			if( currKeyHash->m_Source_map.IsEmpty())
			{
				m_Keyword_map.RemoveKey(key);
				delete currKeyHash;
			}
		}

		pos = m_Sources_map.GetStartPosition();
		while( pos != NULL )
		{
			SrcHash* currSrcHash;
			CCKey key;
			m_Sources_map.GetNextAssoc( pos, key, currSrcHash );
			for(POSITION pos2 = currSrcHash->m_Source_map.GetHeadPosition(); pos2 != NULL; )
			{
				POSITION pos3 = pos2;
				Source* currSource = currSrcHash->m_Source_map.GetNext(pos2);
				for(POSITION pos5 = currSource->entryList.GetHeadPosition(); pos5 != NULL; )
				{
					POSITION pos6 = pos5;
					Kademlia::CEntry* currName = currSource->entryList.GetNext(pos5);
					s_Total++;
					if( currName->lifetime < tNow)
					{
						s_Removed++;
						currSource->entryList.RemoveAt(pos6);
						delete currName;
					}
				}
				if( currSource->entryList.IsEmpty())
				{
					currSrcHash->m_Source_map.RemoveAt(pos3);
					delete currSource;
				}
			}
			if( currSrcHash->m_Source_map.IsEmpty())
			{
				m_Sources_map.RemoveKey(key);
				delete currSrcHash;
			}
		}

		m_totalIndexSource = s_Total;
		m_totalIndexKeyword = k_Total;
		AddDebugLogLine( false, _T("Removed %u keyword out of %u and %u source out of %u"), k_Removed, k_Total, s_Removed, s_Total);
		m_lastClean = time(NULL) + MIN2S(30);
	} 
	catch(...)
	{
		AddDebugLogLine(false, _T("Exception in CIndexed::clean"));
		ASSERT(0);
	}
}

bool CIndexed::AddKeyword(const CUInt128& keyID, const CUInt128& sourceID, Kademlia::CEntry* entry, uint8& load){
	try
	{
		if( !entry )
			return false;

		if( m_totalIndexKeyword > KADEMLIAMAXENTRIES )
		{
			load = 100;
			return false;
		}

		if( entry->size == 0 || entry->fileName.IsEmpty() || entry->taglist.size() == 0 || entry->lifetime < time(NULL))
			return false;

		KeyHash* currKeyHash;
		if(!m_Keyword_map.Lookup(CCKey(keyID.getData()), currKeyHash))
		{
			Source* currSource = new Source;
			currSource->sourceID.setValue(sourceID);
			currSource->entryList.AddHead(entry);
			currKeyHash = new KeyHash;
			currKeyHash->keyID.setValue(keyID);
			currKeyHash->m_Source_map.SetAt(CCKey(currSource->sourceID.getData()), currSource);
			m_Keyword_map.SetAt(CCKey(currKeyHash->keyID.getData()), currKeyHash);
			load = 1;
			m_totalIndexKeyword++;
			return true;
		}
		else
		{
			uint32 indexTotal = currKeyHash->m_Source_map.GetCount();
			if ( indexTotal > KADEMLIAMAXINDEX )
			{
				load = 100;
				//Too many entries for this Keyword..
				return false;
			}
			Source* currSource;
			if(currKeyHash->m_Source_map.Lookup(CCKey(sourceID.getData()), currSource))
			{
				if (currSource->entryList.GetCount() > 0)
				{
					if( indexTotal > KADEMLIAMAXINDEX - 5000 )
					{
						load = 100;
						//We are in a hot node.. If we continued to update all the publishes
						//while this index is full, popular files will be the only thing you index.
						return false;
					}
					delete currSource->entryList.GetHead();
					currSource->entryList.RemoveHead();
				}
				else
					m_totalIndexKeyword++;
				load = (indexTotal*100)/KADEMLIAMAXINDEX;
				currSource->entryList.AddHead(entry);
				return true;
			}
			else
			{
				currSource = new Source;
				currSource->sourceID.setValue(sourceID);
				currSource->entryList.AddHead(entry);
				currKeyHash->m_Source_map.SetAt(CCKey(currSource->sourceID.getData()), currSource);
				m_totalIndexKeyword++;
				load = (indexTotal*100)/KADEMLIAMAXINDEX;
				return true;
			}
		}
	}
	catch(...)
	{
		AddDebugLogLine(false, _T("Exception in CIndexed::writeFile"));
		ASSERT(0);
	}
	return false;

}

bool CIndexed::AddSources(const CUInt128& keyID, const CUInt128& sourceID, Kademlia::CEntry* entry, uint8& load)
{
	if( !entry )
		return false;
	if( entry->ip == 0 || entry->tcpport == 0 || entry->udpport == 0 || entry->taglist.size() == 0 || entry->lifetime < time(NULL))
		return false;
	try
	{
		SrcHash* currSrcHash;
		if(!m_Sources_map.Lookup(CCKey(keyID.getData()), currSrcHash))
		{
			Source* currSource = new Source;
			currSource->sourceID.setValue(sourceID);
			currSource->entryList.AddHead(entry);
			currSrcHash = new SrcHash;
			currSrcHash->keyID.setValue(keyID);
			currSrcHash->m_Source_map.AddHead(currSource);
			m_Sources_map.SetAt(CCKey(currSrcHash->keyID.getData()), currSrcHash);
			m_totalIndexSource++;
			load = 1;
			return true;
		}
		else
		{
			uint32 size = currSrcHash->m_Source_map.GetSize();
			for(POSITION pos2 = currSrcHash->m_Source_map.GetHeadPosition(); pos2 != NULL; )
			{
				Source* currSource = currSrcHash->m_Source_map.GetNext(pos2);
				if( currSource->entryList.GetSize() )
				{
					CEntry* currEntry = currSource->entryList.GetHead();
					ASSERT(currEntry!=NULL);
					if( currEntry->ip == entry->ip && ( currEntry->tcpport == entry->tcpport || currEntry->udpport == entry->udpport ))
					{
						CEntry* currName = currSource->entryList.RemoveHead();
						delete currName;
						currSource->entryList.AddHead(entry);
						load = (size*100)/KADEMLIAMAXSOUCEPERFILE;
						return true;
					}
				}
				else
				{
					//This should never happen!
					currSource->entryList.AddHead(entry);
					ASSERT(0);
					load = (size*100)/KADEMLIAMAXSOUCEPERFILE;
					return true;
				}
			}
			if( size > KADEMLIAMAXSOUCEPERFILE )
			{
				Source* currSource = currSrcHash->m_Source_map.RemoveTail();
				ASSERT(currSource!=NULL);
				Kademlia::CEntry* currName = currSource->entryList.RemoveTail();
				ASSERT(currName!=NULL);
				delete currName;
				currSource->sourceID.setValue(sourceID);
				currSource->entryList.AddHead(entry);
				currSrcHash->m_Source_map.AddHead(currSource);
				load = 100;
				return true;
			}
			else
			{
				Source* currSource = new Source;
				currSource->sourceID.setValue(sourceID);
				currSource->entryList.AddHead(entry);
				currSrcHash->m_Source_map.AddHead(currSource);
				m_totalIndexSource++;
				load = (size*100)/KADEMLIAMAXSOUCEPERFILE;
				return true;
			}
		}
	}
	catch(...)
	{
		AddDebugLogLine(false, _T("Exception in CIndexed::AddSource"));
	}
	return false;
}

bool CIndexed::AddNotes(const CUInt128& keyID, const CUInt128& sourceID, Kademlia::CEntry* entry, uint8& load)
{
	if( !entry )
		return false;
	if( entry->ip == 0 || entry->taglist.size() == 0 )
		return false;
	try
	{
		SrcHash* currNoteHash;
		if(!m_Notes_map.Lookup(CCKey(keyID.getData()), currNoteHash))
		{
			Source* currNote = new Source;
			currNote->sourceID.setValue(sourceID);
			currNote->entryList.AddHead(entry);
			currNoteHash = new SrcHash;
			currNoteHash->keyID.setValue(keyID);
			currNoteHash->m_Source_map.AddHead(currNote);
			m_Notes_map.SetAt(CCKey(currNoteHash->keyID.getData()), currNoteHash);
			load = 1;
			return true;
		}
		else
		{
			uint32 size = currNoteHash->m_Source_map.GetSize();
			for(POSITION pos2 = currNoteHash->m_Source_map.GetHeadPosition(); pos2 != NULL; )
			{
				Source* currNote = currNoteHash->m_Source_map.GetNext(pos2);
				if( currNote->entryList.GetSize() )
				{
					CEntry* currEntry = currNote->entryList.GetHead();
					ASSERT(currEntry!=NULL);
					if(currEntry->ip == entry->ip || !currEntry->sourceID.compareTo(entry->sourceID))
					{
						CEntry* currName = currNote->entryList.RemoveHead();
						delete currName;
						currNote->entryList.AddHead(entry);
						load = (size*100)/KADEMLIAMAXNOTESPERFILE;
						return true;
					}
				}
				else
				{
					//This should never happen!
					currNote->entryList.AddHead(entry);
					ASSERT(0);
					load = (size*100)/KADEMLIAMAXNOTESPERFILE;
					return true;
				}
			}
			if( size > KADEMLIAMAXNOTESPERFILE )
			{
				Source* currNote = currNoteHash->m_Source_map.RemoveTail();
				ASSERT(currNote!=NULL);
				CEntry* currName = currNote->entryList.RemoveTail();
				ASSERT(currName!=NULL);
				delete currName;
				currNote->sourceID.setValue(sourceID);
				currNote->entryList.AddHead(entry);
				currNoteHash->m_Source_map.AddHead(currNote);
				load = 100;
				return true;
			}
			else
			{
				Source* currNote = new Source;
				currNote->sourceID.setValue(sourceID);
				currNote->entryList.AddHead(entry);
				currNoteHash->m_Source_map.AddHead(currNote);
				load = (size*100)/KADEMLIAMAXNOTESPERFILE;
				return true;
			}
		}
	}
	catch(...)
	{
		AddDebugLogLine(false, _T("Exception in CIndexed::AddNotes"));
	}
	return false;
}

bool CIndexed::AddLoad(const CUInt128& keyID, uint32 timet)
{
	Load* load;
	if(m_Load_map.Lookup(CCKey(keyID.getData()), load))
	{
		ASSERT(0);
		return false;
	}
	ASSERT((uint32)time(NULL)<timet);
	load = new Load();
	load->keyID.setValue(keyID);
	load->time = timet;
	m_Load_map.SetAt(CCKey(load->keyID.getData()), load);
	return true;
}

bool SearchTermsMatch(const SSearchTerm* pSearchTerm, const Kademlia::CEntry* item/*, CStringArray& astrFileNameTokens*/)
{
	// boolean operators
	if (pSearchTerm->type == SSearchTerm::AND)
		return SearchTermsMatch(pSearchTerm->left, item/*, astrFileNameTokens*/) && SearchTermsMatch(pSearchTerm->right, item/*, astrFileNameTokens*/);
	
	if (pSearchTerm->type == SSearchTerm::OR)
		return SearchTermsMatch(pSearchTerm->left, item/*, astrFileNameTokens*/) || SearchTermsMatch(pSearchTerm->right, item/*, astrFileNameTokens*/);
	
	if (pSearchTerm->type == SSearchTerm::NAND)
		return SearchTermsMatch(pSearchTerm->left, item/*, astrFileNameTokens*/) && !SearchTermsMatch(pSearchTerm->right, item/*, astrFileNameTokens*/);

	// word which is to be searched in the file name (and in additional meta data as done by some ed2k servers???)
	if (pSearchTerm->type == SSearchTerm::String)
	{
		int iStrSearchTerms = pSearchTerm->astr->GetCount();
		if (iStrSearchTerms == 0)
			return false;
#if 0
		//TODO: Use a pre-tokenized list for better performance.
		// tokenize the filename (very expensive) only once per search expression and only if really needed
		if (astrFileNameTokens.GetCount() == 0)
		{
			int iPosTok = 0;
			CString strTok(item->fileName.Tokenize(_aszInvKadKeywordChars, iPosTok));
			while (!strTok.IsEmpty())
			{
				astrFileNameTokens.Add(strTok);
				strTok = item->fileName.Tokenize(_aszInvKadKeywordChars, iPosTok);
			}
		}
		if (astrFileNameTokens.GetCount() == 0)
			return false;

		// if there are more than one search strings specified (e.g. "aaa bbb ccc") the entire string is handled

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -