⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 lexicalanalyse.cpp

📁 词法分析器
💻 CPP
字号:
// LexicalAnalyse.cpp : 定义 DLL 的初始化例程。
//

#include "stdafx.h"
#include "LexicalAnalyse.h"

/////////////////////////////// 我的 include  /////////////////////////////////

#include <string>

#include "Lex_Define.h"

using namespace std;

///////////////////////////////////////////////////////////////////////////////
#ifdef _DEBUG
#define new DEBUG_NEW
#endif

// CLexicalAnalyseApp

BEGIN_MESSAGE_MAP(CLexicalAnalyseApp, CWinApp)
END_MESSAGE_MAP()

/////////////////////////// 变量定义 ///////////////////////////////////
const int TOKEN_ERROR	= -1;
const int TOKEN_END		= -2;

struct struAnalyseInfo
{
	string strText;
}SAI;

struct struToken
{
	string TokenText;
	int TokenType;
	int TokenStartPos;
};

int nFirstPos, nLastPos, nAcceptPos;

////////////////////////  正式代码开始  ////////////////////////////////
////////////////////////////////////////////////////////////////////////

// CLexicalAnalyseApp 构造

CLexicalAnalyseApp::CLexicalAnalyseApp()
{
}


// 唯一的一个 CLexicalAnalyseApp 对象

CLexicalAnalyseApp theApp;


// CLexicalAnalyseApp 初始化

BOOL CLexicalAnalyseApp::InitInstance()
{
	CWinApp::InitInstance();

	return TRUE;
}

struToken next_token()
{
	struToken ret;
	int now_state = START_STATE;
	BOOL is_err = FALSE;
	nAcceptPos = -1;

	char nc;

	if(nFirstPos >= SAI.strText.length())
	{
		ret.TokenType = TOKEN_END;
		return ret;
	}
	nLastPos = nFirstPos;
	while(nLastPos < SAI.strText.length())
	{

		nc = SAI.strText[nLastPos];

		if(DFA_MAP[now_state][int(nc)] == ERR)
		{
			is_err = TRUE;
			break;
		}else
		{
			//状态转移
			now_state = DFA_MAP[now_state][int(nc)];
			if(DSI[now_state].IsAccept)
			{
				nAcceptPos = nLastPos;
				ret.TokenType = DSI[now_state].TokenTypeID;
			}
		}
		nLastPos++;
	}
	if(nAcceptPos>=0)
	{
		ret.TokenStartPos = nFirstPos;
		ret.TokenText = SAI.strText.substr(nFirstPos,nAcceptPos-nFirstPos+1);
		nFirstPos = nAcceptPos + 1;
	}else
	{
		if(is_err)
			nLastPos++;
		ret.TokenText = SAI.strText.substr(nFirstPos,nLastPos-nFirstPos);
		ret.TokenType = TOKEN_ERROR;
		nFirstPos = nLastPos;
	}
	return ret;
}

extern "C" BOOL PASCAL EXPORT SetAnalyseTextW(LPCTSTR lpstrText)
{
	char * buf;
	CString cstText = lpstrText;
	buf = new char[cstText.GetLength()*2];
	WideCharToMultiByte(CP_ACP, 0,cstText.GetBuffer(),-1,buf,cstText.GetLength()*2,NULL,NULL);
	SAI.strText = buf;
	nFirstPos = nLastPos = 0;
	delete [] buf;

	return TRUE;
}

extern "C" BOOL PASCAL EXPORT SetAnalyseTextA(const char * lpstrText)
{
	SAI.strText = lpstrText;
	nFirstPos = nLastPos = 0;

	return TRUE;
}

extern "C" BOOL PASCAL EXPORT NextTokenW(LPCTSTR & TokenText, int & TokenType, int & TokenLength)
{
	static TCHAR token_buff[10000];
	int wlen;
	struToken t_ret = next_token();
	if(t_ret.TokenType == TOKEN_END)
	{
		TokenType = TOKEN_END;
		TokenLength = 0;
		return FALSE;
	}
	
	wlen = MultiByteToWideChar(CP_ACP, 0, t_ret.TokenText.c_str(), -1, NULL, 0);

	MultiByteToWideChar(CP_ACP, 0, t_ret.TokenText.c_str(), -1, token_buff, wlen);

	TokenText = LPCTSTR(token_buff);
	TokenType = t_ret.TokenType;
	TokenLength = t_ret.TokenText.length();
	return TRUE;
}

extern "C" BOOL PASCAL EXPORT NextTokenA(char * TokenText, int & TokenType, int & TokenLength)
{
	struToken t_ret = next_token();
	if(t_ret.TokenType == TOKEN_END)
	{
		*TokenText = '\0';
		TokenType = TOKEN_END;
		TokenLength = 0;
		return FALSE;
	}
	
	strcpy(TokenText,t_ret.TokenText.c_str());
	TokenType = t_ret.TokenType;
	TokenLength = t_ret.TokenText.length();
	return TRUE;
}


extern "C" BOOL PASCAL EXPORT NextTokenForVB(char * TokenText, int * TokenType, int * TokenStart, int * TokenLength)
{
	struToken t_ret = next_token();
	if(t_ret.TokenType == TOKEN_END)
		return FALSE;
	for(int i=0;i<t_ret.TokenText.length();i++)
	{
		TokenText[i*2+1] = '\0';
		TokenText[i*2] = t_ret.TokenText[i];
	}

	*TokenStart = t_ret.TokenStartPos;
	*TokenType = t_ret.TokenType;
	*TokenLength = t_ret.TokenText.length();
	return TRUE;
}

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -