📄 export.java
字号:
package cn.myapps.core.dynaform.dts.exp;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.Statement;
import java.sql.Types;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import cn.myapps.base.action.ParamsTable;
import cn.myapps.base.dao.DataPackage;
import cn.myapps.core.dynaform.document.ejb.Document;
import cn.myapps.core.dynaform.document.ejb.DocumentProcess;
import cn.myapps.core.dynaform.document.ejb.Item;
import cn.myapps.core.dynaform.dts.datasource.ejb.DataSource;
import cn.myapps.core.dynaform.dts.exp.columnmapping.ejb.ColumnMapping;
import cn.myapps.core.dynaform.dts.exp.mappingconfig.ejb.MappingConfig;
import cn.myapps.core.dynaform.dts.exp.mappingconfig.ejb.MappingConfigProcess;
import cn.myapps.core.macro.runner.JavaScriptRunner;
import cn.myapps.util.DateUtil;
import cn.myapps.util.ProcessFactory;
import cn.myapps.util.StringUtil;
import cn.myapps.util.file.FileOperate;
import cn.myapps.util.property.DefaultProperty;
public class Export {
private static SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd");
public static final String _ERROR_TYPE_01 = "Do not hold out owing a data base a type";
public static final String _ERROR_TYPE_02 = " MappingConfig has a mistake";
public static final String _ERROR_TYPE_03 = " Without eligible data";
public static final String _ERROR_TYPE_04 = " Without ColumnMapping";
public static final String _ERROR_TYPE_05 = "Can not derive a data";
private static final String defaultPath = "C:/DoucemntExportLog/";
public Export() {
}
public static String exprotDocument(String mappcfgId,
boolean IsIncrementExprot, String application) throws Exception {
MappingConfigProcess mp = (MappingConfigProcess) ProcessFactory
.createProcess(MappingConfigProcess.class);
MappingConfig mappingConfig = (MappingConfig) mp.doView(mappcfgId);
return exprotDocument(mappingConfig, IsIncrementExprot, application);
}
public static String exprotDocument(MappingConfig mappingconfig,
boolean IsIncrementExprot, String application) throws Exception {
StringBuffer errorInfo = new StringBuffer();// 记录出错信息
Connection conn = null;
Statement stmt = null;
PreparedStatement pstmt = null;
try {
String path = DefaultProperty.getProperty("DOCUMENTEXPORTLOG_PATH");
if (path == null || path.trim().length() < 1)
path = defaultPath;
String fileFullName = path + mappingconfig.getName()
+ DateUtil.getDateStrCompact(new Date())
+ System.currentTimeMillis() + ".txt";// 输出出错信息的路径和文件名
if (mappingconfig == null) {
errorInfo.append(_ERROR_TYPE_02 + "\r\n");
FileOperate.writeFile(fileFullName, errorInfo.toString(), true);
return _ERROR_TYPE_02;
}
DataSource dts = mappingconfig.getDatasource();
Class.forName(dts.getDriverClass()).newInstance();
conn = DriverManager.getConnection(dts.getUrl(), dts.getUsername(),
dts.getPassword());
DatabaseMetaData dma = conn.getMetaData();
String DatabaseVersion = dma.getDatabaseProductVersion();
Collection temp_columnmappings = mappingconfig.getColumnMappings();
stmt = conn.createStatement();
if (temp_columnmappings == null || temp_columnmappings.size() == 0) {
errorInfo.append(_ERROR_TYPE_04 + "\r\n");
FileOperate.writeFile(fileFullName, errorInfo.toString(), true);
return _ERROR_TYPE_04;
}
Collection columnmappings = new HashSet();
columnmappings.addAll(temp_columnmappings);
ColumnMapping expdate = new ColumnMapping();
expdate.setFromName("EXPORTDATE"); // 每条记录的导也时间
expdate.setToName("EXPORTDATE");
expdate.setToType("TIMESTAMP");
expdate.setLength("6");
columnmappings.add(expdate);
ColumnMapping docid = new ColumnMapping();
docid.setFromName("DOCID"); // 主键DocID
docid.setToName("DOCID");
docid.setToType(ColumnMapping.DATA_TYPE_VARCHAR);
docid.setLength("255");
columnmappings.add(docid);
ColumnMapping parentid = new ColumnMapping();
parentid.setFromName("PARENTID"); // Parentid
parentid.setToName("PARENTID");
parentid.setToType(ColumnMapping.DATA_TYPE_VARCHAR);
parentid.setLength("255");
columnmappings.add(parentid);
ColumnMapping flowState = new ColumnMapping();
flowState.setFromName("FLOWSTATE"); // FLOWSTATE
flowState.setToName("FLOWSTATE");
flowState.setToType(ColumnMapping.DATA_TYPE_VARCHAR);
flowState.setLength("255");
columnmappings.add(flowState);
ColumnMapping flowStateName = new ColumnMapping();
flowStateName.setFromName("FLOWSTATENAME"); // FLOWSTATE
flowStateName.setToName("FLOWSTATENAME");
flowStateName.setToType(ColumnMapping.DATA_TYPE_VARCHAR);
flowStateName.setLength("255");
columnmappings.add(flowStateName);
String tablename = mappingconfig.getTablename();
Map DateFiledInfo = null; // 数据库中存在的字段
Collection needAddColumns = null; // 需要新增的段
ResultSet rs = dma.getTables(null, null, tablename.toUpperCase(),
null);
if (!rs.next()) {// 当不存在该表时,生成创建表语句
String sqlCreatTable = creatTableSQL(mappingconfig,
columnmappings, DatabaseVersion);
System.out.print(sqlCreatTable);
stmt.executeUpdate(sqlCreatTable);
} else {
rs = dma.getColumns(null, null, tablename.toUpperCase(), null);
DateFiledInfo = new HashMap();
while (rs.next()) {
DateFiledInfo.put(rs.getString("COLUMN_NAME"), rs
.getString("TYPE_NAME"));
}
needAddColumns = new ArrayList();
for (Iterator iter = columnmappings.iterator(); iter.hasNext();) {
ColumnMapping cm = (ColumnMapping) iter.next();
if (!DateFiledInfo
.containsKey(cm.getToName().toUpperCase()))
needAddColumns.add(cm);
}
if (needAddColumns != null && needAddColumns.size() > 0) // 修改表,添加字段
{
for (Iterator iter = needAddColumns.iterator(); iter
.hasNext();) {
ColumnMapping cm = (ColumnMapping) iter.next();
String sqlAlterTable = creatAlterTableSQL(
mappingconfig, cm, DatabaseVersion);
stmt.executeUpdate(sqlAlterTable.toString());
}
}
}
// ////////
Collection compareColums = null; // MappconfigConfig.columns
// 和数据库比较后的结果
if (DateFiledInfo != null && DateFiledInfo.size() > 0) { // MappconfigConfig.columns与数据库存在的字段类型不同时,以数据库字段为准
compareColums = compareDateType(DateFiledInfo, columnmappings);
compareColums.addAll(needAddColumns);
}
// /////////
// 生成插入语句
String sqlInsert = null;
if (compareColums != null)
sqlInsert = creatInsertSQL(mappingconfig, compareColums); // 表已存在情况
else
sqlInsert = creatInsertSQL(mappingconfig, columnmappings); // 第一次创建表
// 准备PSTMT
pstmt = conn.prepareStatement(sqlInsert);
// 获取数据
String dql = mappingconfig.getValuescript();
DocumentProcess process = (DocumentProcess) ProcessFactory
.createProcess(DocumentProcess.class);
int totalLine = 0;
if (!IsIncrementExprot || mappingconfig.getLastRun() == null)
totalLine = process.getNeedExportDocumentTotal(dql, null,
application);
else
totalLine = process.getNeedExportDocumentTotal(dql,
mappingconfig.getLastRun(), application);
if (totalLine == 0) {
errorInfo.append(_ERROR_TYPE_03 + "\r\n");
FileOperate.writeFile(fileFullName, errorInfo.toString(), true);
return _ERROR_TYPE_03;
}
System.out.println("一共需要导出" + totalLine + "条数据");
if (!IsIncrementExprot) { // 如果是全部导出的话,先将以前的数据删掉
stmt.executeUpdate("delete from "
+ mappingconfig.getTablename());
}
int page = totalLine / 400;
if (totalLine % 200 != 0)
page++;
for (int i = 1; i <= page; i++) {
DataPackage documents = null;
if (IsIncrementExprot && mappingconfig.getLastRun() != null)
documents = process.queryByDQLAndDocumentLastModifyDate(
dql, mappingconfig.getLastRun(), i, 400,
application); // 每次取200条数据
else
documents = process
.queryByDQLPage(dql, i, 400, application); // 每次取200条数据
Collection docs = documents.datas;
System.out.println("第" + i + "次取" + documents.datas.size()
+ "条数据");
// 插入数据
JavaScriptRunner runner = JavaScriptRunner.getInstance();
if (docs != null) {
Collection tempcols = null;
if (compareColums != null && compareColums.size() > 0)
tempcols = compareColums; // 表已在情况
else
tempcols = columnmappings; // 第一次创建表时
for (Iterator iter = docs.iterator(); iter.hasNext();) {
Document dm = (Document) iter.next();
runner.initBSFManager(dm, new ParamsTable(), null,
new ArrayList(), application);
int columnNum = 1;
if (tempcols != null) {
for (Iterator it = tempcols.iterator(); it
.hasNext();) {
ColumnMapping cm = (ColumnMapping) it.next();
if (cm.getToName().equals("DOCID")) { // 设置主键
pstmt.setString(columnNum, dm.getId());
} else if (cm.getToName().equals("PARENTID")) { //
if (dm.getParent() != null)
pstmt.setString(columnNum, dm
.getParent().getId());
else
pstmt.setNull(columnNum, Types.NULL);
} else if (cm.getToName().equals("EXPORTDATE")) { // 设置导出时间
pstmt.setDate(columnNum, new java.sql.Date(
new Date().getTime()));
} else if (cm.getToName().equals("FLOWSTATE")) {
if (dm.getState() != null)
pstmt.setString(columnNum, String
.valueOf(dm.getState()
.getState()));
else
pstmt.setNull(columnNum, Types.NULL);
} else if (cm.getToName().equals(
"FLOWSTATENAME")) {
if (dm.getState() != null)
pstmt.setString(columnNum, dm
.getStateStr());
else
pstmt.setNull(columnNum, Types.NULL);
} else {
if (cm.getType().equals(
ColumnMapping.COLUMN_TYPE_FIELD)) { // COLUMNMAPPING_TYPE_FIELD
Item item = dm.findItem(cm
.getFromName());
if (item != null
&& item.getValue() != null) {
if (cm
.getToType()
.equals(
ColumnMapping.DATA_TYPE_NUMBER)) {
try {
pstmt
.setDouble(
columnNum,
item
.getNumbervalue()
.doubleValue());
} catch (Exception e) {
errorInfo
.append("Document "
+ dm
.getId()
+ " "
+ item
.getName()
+ " Can not convert Number "
+ "\r\n");
pstmt.setNull(columnNum,
Types.NULL);
}
} else if (cm
.getToType()
.equals(
ColumnMapping.DATA_TYPE_VARCHAR)) {
try {
pstmt
.setString(
columnNum,
item
.getVarcharvalue());
} catch (Exception e) {
errorInfo
.append("Document "
+ dm
.getId()
+ " "
+ item
.getName()
+ " Can not convert Varchar "
+ "\r\n");
pstmt.setNull(columnNum,
Types.NULL);
}
} else if (cm
.getToType()
.equals(
ColumnMapping.DATA_TYPE_DATE)) {
try {
pstmt
.setDate(
columnNum,
new java.sql.Date(
item
.getDatevalue()
.getTime()));
} catch (Exception e) {
errorInfo
.append("Document "
+ dm
.getId()
+ " "
+ item
.getName()
+ " Can not convert Date "
+ "\r\n");
pstmt.setNull(columnNum,
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -