📄 parser.cs
字号:
/*
* Parser.cs
*
* Copyright (c) 2001, The HSQL Development Group
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* Neither the name of the HSQL Development Group nor the names of its
* contributors may be used to endorse or promote products derived from this
* software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* This package is based on HypersonicSQL, originally developed by Thomas Mueller.
*
* C# port by Mark Tutt
*
*/
namespace SharpHSQL
{
using System;
using System.Collections;
/**
* Class declaration
*
*
* @version 1.0.0.1
*/
class Parser
{
private Database dDatabase;
private Tokenizer tTokenizer;
private Channel cChannel;
private string sTable;
private string sToken;
private object oData;
private int iType;
private int iToken;
/**
* Constructor declaration
*
*
* @param db
* @param t
* @param channel
*/
public Parser(Database db, Tokenizer t, Channel channel)
{
dDatabase = db;
tTokenizer = t;
cChannel = channel;
}
/**
* Method declaration
*
*
* @return
*
* @throws Exception
*/
public Result processSelect()
{
Select select = parseSelect();
if (select.sIntoTable == null)
{
// fredt@users.sourceforge.net begin changes from 1.50
// return select.getResult(cChannel.getMaxRows());
return select.getResult( select.limitStart, select.limitCount );
// fredt@users.sourceforge.net end changes from 1.50
}
else
{
Result r = select.getResult(0);
Table t = new Table(dDatabase, true, select.sIntoTable, false);
t.addColumns(r);
t.createPrimaryKey();
// SELECT .. INTO can't fail because of violation of primary key
t.insert(r, cChannel);
dDatabase.linkTable(t);
int i = r.getSize();
r = new Result();
r.iUpdateCount = i;
return r;
}
}
/**
* Method declaration
*
*
* @return
*
* @throws Exception
*/
public Result processCall()
{
Expression e = parseExpression();
e.resolve(null);
int type = e.getDataType();
object o = e.getValue();
Result r = new Result(1);
r.sTable[0] = "";
r.iType[0] = type;
r.sLabel[0] = "";
r.sName[0] = "";
object[] row = new object[1];
row[0] = o;
r.add(row);
return r;
}
/**
* Method declaration
*
*
* @return
*
* @throws Exception
*/
public Result processUpdate()
{
string token = tTokenizer.getstring();
cChannel.checkReadWrite();
cChannel.check(token, Access.UPDATE);
Table table = dDatabase.getTable(token, cChannel);
TableFilter filter = new TableFilter(table, null, false);
tTokenizer.getThis("SET");
ArrayList vColumn = new ArrayList();
ArrayList eColumn = new ArrayList();
int len = 0;
token = null;
do
{
len++;
int i = table.getColumnNr(tTokenizer.getstring());
vColumn.Add(i);
tTokenizer.getThis("=");
Expression e = parseExpression();
e.resolve(filter);
eColumn.Add(e);
token = tTokenizer.getstring();
} while (token.Equals(","));
Expression eCondition = null;
if (token.Equals("WHERE"))
{
eCondition = parseExpression();
eCondition.resolve(filter);
filter.setCondition(eCondition);
}
else
{
tTokenizer.back();
}
// do the update
Expression[] exp = new Expression[len];
eColumn.CopyTo(exp);
int[] col = new int[len];
int[] type = new int[len];
for (int i = 0; i < len; i++)
{
col[i] = ((int) vColumn[i]);
type[i] = table.getType(col[i]);
}
int count = 0;
if (filter.findFirst())
{
Result del = new Result(); // don't need column count and so on
Result ins = new Result();
int size = table.getColumnCount();
do
{
if (eCondition == null || eCondition.test())
{
object[] nd = filter.oCurrentData;
del.add(nd);
object[] ni = table.getNewRow();
for (int i = 0; i < size; i++)
{
ni[i] = nd[i];
}
for (int i = 0; i < len; i++)
{
ni[col[i]] = exp[i].getValue(type[i]);
}
ins.add(ni);
}
} while (filter.next());
cChannel.beginNestedTransaction();
try
{
Record nd = del.rRoot;
while (nd != null)
{
table.deleteNoCheck(nd.data, cChannel);
nd = nd.next;
}
Record ni = ins.rRoot;
while (ni != null)
{
table.insertNoCheck(ni.data, cChannel);
ni = ni.next;
count++;
}
table.checkUpdate(col, del, ins);
ni = ins.rRoot;
while (ni != null)
{
ni = ni.next;
}
cChannel.endNestedTransaction(false);
}
catch (Exception e)
{
// update failed (violation of primary key / referential integrity)
cChannel.endNestedTransaction(true);
throw e;
}
}
Result r = new Result();
r.iUpdateCount = count;
return r;
}
/**
* Method declaration
*
*
* @return
*
* @throws Exception
*/
public Result processDelete()
{
tTokenizer.getThis("FROM");
string token = tTokenizer.getstring();
cChannel.checkReadWrite();
cChannel.check(token, Access.DELETE);
Table table = dDatabase.getTable(token, cChannel);
TableFilter filter = new TableFilter(table, null, false);
token = tTokenizer.getstring();
Expression eCondition = null;
if (token.Equals("WHERE"))
{
eCondition = parseExpression();
eCondition.resolve(filter);
filter.setCondition(eCondition);
}
else
{
tTokenizer.back();
}
int count = 0;
if (filter.findFirst())
{
Result del = new Result(); // don't need column count and so on
do
{
if (eCondition == null || eCondition.test())
{
del.add(filter.oCurrentData);
}
} while (filter.next());
Record n = del.rRoot;
while (n != null)
{
table.delete(n.data, cChannel);
count++;
n = n.next;
}
}
Result r = new Result();
r.iUpdateCount = count;
return r;
}
/**
* Method declaration
*
*
* @return
*
* @throws Exception
*/
public Result processInsert()
{
tTokenizer.getThis("INTO");
string token = tTokenizer.getstring();
cChannel.checkReadWrite();
cChannel.check(token, Access.INSERT);
Table t = dDatabase.getTable(token, cChannel);
token = tTokenizer.getstring();
ArrayList vcolumns = null;
if (token.Equals("("))
{
vcolumns = new ArrayList();
int i = 0;
while (true)
{
vcolumns.Add(tTokenizer.getstring());
i++;
token = tTokenizer.getstring();
if (token.Equals(")"))
{
break;
}
if (!token.Equals(","))
{
throw Trace.error(Trace.UNEXPECTED_TOKEN, token);
}
}
token = tTokenizer.getstring();
}
int count = 0;
int len;
if (vcolumns == null)
{
len = t.getColumnCount();
}
else
{
len = vcolumns.Count;
}
if (token.Equals("VALUES"))
{
tTokenizer.getThis("(");
object[] row = t.getNewRow();
int i = 0;
while (true)
{
int column;
if (vcolumns == null)
{
column = i;
if (i > len)
{
throw Trace.error(Trace.COLUMN_COUNT_DOES_NOT_MATCH);
}
}
else
{
column = t.getColumnNr((string) vcolumns[i]);
}
row[column] = getValue(t.getType(column));
i++;
token = tTokenizer.getstring();
if (token.Equals(")"))
{
break;
}
if (!token.Equals(","))
{
throw Trace.error(Trace.UNEXPECTED_TOKEN, token);
}
}
t.insert(row, cChannel);
count = 1;
}
else if (token.Equals("SELECT"))
{
Result result = processSelect();
Record r = result.rRoot;
Trace.check(len == result.getColumnCount(),
Trace.COLUMN_COUNT_DOES_NOT_MATCH);
int[] col = new int[len];
int[] type = new int[len];
for (int i = 0; i < len; i++)
{
int j;
if (vcolumns == null)
{
j = i;
}
else
{
j = t.getColumnNr((string) vcolumns[i]);
}
col[i] = j;
type[i] = t.getType(j);
}
cChannel.beginNestedTransaction();
try
{
while (r != null)
{
object[] row = t.getNewRow();
for (int i = 0; i < len; i++)
{
row[col[i]] = Column.convertobject(r.data[i],
type[i]);
}
t.insert(row, cChannel);
count++;
r = r.next;
}
cChannel.endNestedTransaction(false);
}
catch (Exception e)
{
// insert failed (violation of primary key)
cChannel.endNestedTransaction(true);
throw e;
}
}
else
{
throw Trace.error(Trace.UNEXPECTED_TOKEN, token);
}
Result rs = new Result();
rs.iUpdateCount = count;
return rs;
}
/**
* Method declaration
*
* ALTER TABLE tableName ADD COLUMN columnName columnType;
* ALTER TABLE tableName DELETE COLUMN columnName;
*
* <B>Note: </B>The only change I've made to Sergio's original code was
* changing the insert's to call insertNoCheck to bypass the trigger
* mechanism that is a part of hsqldb 1.60 and beyond. - Mark Tutt
*
* @return
*
* @throws Exception
*/
public Result processAlter()
{
tTokenizer.getThis("TABLE");
string token = tTokenizer.getstring();
cChannel.checkReadWrite();
// cChannel.check(token,Access.ALTER); --> Accessul nu-l inca controleaza...
string tName = token;
string swap = tName + "SWAP";
// nimicirea swapului...
dDatabase.execute("DROP TABLE " + swap, cChannel);
Table initialTable = dDatabase.getTable(token, cChannel);
int count = 0;
token = tTokenizer.getstring();
if (token.Equals("ADD"))
{
token = tTokenizer.getstring();
if (token.Equals("COLUMN"))
{
Table swapTable = new Table(dDatabase, true, swap,
initialTable.isCached());
// copiem coloanele (fara date) din tabelul initial in swap
for (int i = 0; i < initialTable.getColumnCount(); i++)
{
Column aColumn = initialTable.getColumn(i);
swapTable.addColumn(aColumn);
}
// end Of copiem coloanele...
// aflam daca are PrimaryKey & o cream...
string cName = tTokenizer.getstring();
string cType = tTokenizer.getstring();
int iType = Column.getTypeNr(cType);
string sToken = cType;
// int primarykeycolumn = -1;
bool identity = false;
int column = initialTable.getColumnCount() + 1;
// !--
// stolen from CREATE TABLE...
string sColumn = cName;
if (iType == Column.VARCHAR && dDatabase.isIgnoreCase())
{
iType = Column.VARCHAR_IGNORECASE;
}
sToken = tTokenizer.getstring();
if (iType == Column.DOUBLE && sToken.Equals("PRECISION"))
{
sToken = tTokenizer.getstring();
}
if (sToken.Equals("("))
{
// overread length
do
{
sToken = tTokenizer.getstring();
} while (!sToken.Equals(")"));
sToken = tTokenizer.getstring();
}
// !--
bool nullable = true;
if (sToken.Equals("NULL"))
{
sToken = tTokenizer.getstring();
}
else if (sToken.Equals("NOT"))
{
tTokenizer.getThis("NULL");
nullable = false;
sToken = tTokenizer.getstring();
}
/*
* if(sToken.Equals("IDENTITY")) {
* identity=true;
* Trace.check(primarykeycolumn==-1,Trace.SECOND_PRIMARY_KEY,sColumn);
* sToken=tTokenizer.getstring();
* primarykeycolumn=column;
* }
*
* if(sToken.Equals("PRIMARY")) {
* tTokenizer.getThis("KEY");
* Trace.check(identity || primarykeycolumn==-1,
* Trace.SECOND_PRIMARY_KEY,sColumn);
* primarykeycolumn=column;
* //sToken=tTokenizer.getstring();
* }
* //end of STOLEN...
*/
swapTable.addColumn(cName, iType, nullable,
identity); // under construction...
if (initialTable.getColumnCount()
< initialTable.getInternalColumnCount())
{
swapTable.createPrimaryKey();
}
else
{
swapTable.createPrimaryKey(initialTable.getPrimaryIndex().getColumns()[0]);
}
// endof PrimaryKey...
// sa ne farimam cu indicii... ;-((
Index idx = null;
while (true)
{
idx = initialTable.getNextIndex(idx);
if (idx == null)
{
break;
}
if (idx == initialTable.getPrimaryIndex())
{
continue;
}
swapTable.createIndex(idx);
}
// end of Index...
cChannel.commit();
dDatabase.linkTable(swapTable);
Tokenizer tmpTokenizer = new Tokenizer("SELECT * FROM "
+ tName);
Parser pp = new Parser(dDatabase, tmpTokenizer, cChannel);
string ff = tmpTokenizer.getstring();
if (!initialTable.isEmpty())
{
Record n = ((Result) pp.processSelect()).rRoot;
do
{
object[] row = swapTable.getNewRow();
object[] row1 = n.data;
for (int i = 0; i < initialTable.getColumnCount();
i++)
{
row[i] = row1[i];
}
swapTable.insertNoCheck(row, cChannel);
n = n.next;
} while (n != null);
}
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -