Browse Source

Build package from repo (first steps) #1

etl_0.4_Release
Daniel Quathamer 2 years ago
parent
commit
eccea3dbd4
  1. 92
      build.xml
  2. 51
      src/de/superx/etl/ActionHandler/EtlActionHandlerExecuteMask.java
  3. 779
      src/de/superx/etl/TableUploader.java

92
build.xml

@ -7,8 +7,42 @@ ant -DMODULE_PATH=$ETL_PFAD -DBASE_DIR=. -DWEBAPP=$WEBAPP -DMODULE=etl all
So erzeugen Sie dann ein SuperX-Paket: So erzeugen Sie dann ein SuperX-Paket:
ant -DMODULE_PATH=$ETL_PFAD -DWEBAPP_DIR=$WEBAPP -DMODULE=etl dist ant -DMODULE_PATH=$ETL_PFAD -DWEBAPP_DIR=$WEBAPP -DMODULE=etl dist
ETL-JAR erzeugen:
ant -DWEBAPP=$WEBAPP distEtl
ant -DWEBAPP=$WEBAPP cleanBuildPath
--> -->
<dirname file="${ant.file.superx}" property="superxBuildBaseDir" />
<property name="distDir" location="${superxBuildBaseDir}/../dist" />
<property environment="env" />
<!-- set global properties for this build -->
<property name="BASE_DIR" value="." />
<property name="SUPERX_DIR" value="${WEBAPP}"/>
<property name="BUILD_PATH" value="${WEBAPP}/WEB-INF/classes"/>
<property name="SRC_DIR" value="${BASE_DIR}/src" />
<property name="SRC_DIR_TEST" value="${BASE_DIR}/test-src"/>
<property name="LIB_SUPERX_DIR" value="${WEBAPP}/WEB-INF/lib"/>
<property name="LIB_WEBAPP_DIR" value="${WEBAPP}"/>
<property name="LIB_EXT_SUPERX_DIR" value="${WEBAPP}/WEB-INF/lib_ext"/>
<property name="superx-classes" value="${WEBAPP}/WEB-INF/classes" />
<property name="build_cobertura" value="${superxBuildBaseDir}/cobertura_build_classes" />
<property name="test-target" value="${superxBuildBaseDir}/cobertura_build_tests"/>
<property name="superx-lib" value="${WEBAPP}/WEB-INF/lib" />
<property name="build-results" value="${superxBuildBaseDir}/results" />
<property name="build_cobertura_report" value="${superxBuildBaseDir}/coveragereport" />
<property name="src-java" value="${SRC_DIR}"/>
<property name="test-resource" location="resource"/>
<property name="version" value="0.1" />
<property name="docm_version" value="1.0" />
<property name="etl_version" value="0.2" />
<dirname file="${ant.file}" property="moduleCreateBaseDir" /> <dirname file="${ant.file}" property="moduleCreateBaseDir" />
<property name="BASE_DIR" value="${moduleCreateBaseDir}/../.." /> <property name="BASE_DIR" value="${moduleCreateBaseDir}/../.." />
<property name="WEBAPP" value="${MODULE_PATH}/../../../webserver/tomcat/webapps/superx" /> <property name="WEBAPP" value="${MODULE_PATH}/../../../webserver/tomcat/webapps/superx" />
@ -18,6 +52,10 @@ ant -DMODULE_PATH=$ETL_PFAD -DWEBAPP_DIR=$WEBAPP -DMODULE=etl dist
<include name="lib/**/*.jar" /> <include name="lib/**/*.jar" />
<include name="classes" /> <include name="classes" />
</fileset> </fileset>
<fileset dir="${LIB_EXT_SUPERX_DIR}">
<include name="**/*.jar" />
</fileset>
</path> </path>
<condition property="isWindoze"> <condition property="isWindoze">
<os family="windows" /> <os family="windows" />
@ -26,6 +64,14 @@ ant -DMODULE_PATH=$ETL_PFAD -DWEBAPP_DIR=$WEBAPP -DMODULE=etl dist
<condition property="isUnix"> <condition property="isUnix">
<os family="unix" /> <os family="unix" />
</condition> </condition>
<taskdef name="groovy"
classname="org.codehaus.groovy.ant.Groovy"
classpathref="classpath"/>
<target name="initTimestamp">
<tstamp>
<format property="SX_TIMESTAMP" pattern="dd.MM.yyyy HH:mm" />
</tstamp>
</target>
<target name="all" description="Gruppe: Kopiere Modul Sourcen in den WEB-INF-Verzeichnisbaum"> <target name="all" description="Gruppe: Kopiere Modul Sourcen in den WEB-INF-Verzeichnisbaum">
<antcall target="module_copy_source" /> <antcall target="module_copy_source" />
<antcall target="call_module_scripts_create_ant" /> <antcall target="call_module_scripts_create_ant" />
@ -67,4 +113,50 @@ ant -DMODULE_PATH=$ETL_PFAD -DWEBAPP_DIR=$WEBAPP -DMODULE=etl dist
<fileset dir="${MODULE_PATH}/../../conf" includes="build.xml" /> <fileset dir="${MODULE_PATH}/../../conf" includes="build.xml" />
</subant> </subant>
</target> </target>
<target name="distEtl" depends="compileSuperx" description="Erzeugt die superx-etl${VERSION}.jar, d.h. das Servlet. ">
<delete>
<fileset dir="${LIB_WEBAPP_DIR}" includes="superx-etl*.jar" />
</delete>
<jar destfile="${LIB_WEBAPP_DIR}/superx-etl${etl_version}.jar" basedir="${BUILD_PATH}" includes="de/superx/etl/** de/superx/sxrest/**" excludes="freemarker/**/* com/**/* javax/**/* **/*.java">
<manifest>
<!-- Who is building this jar? -->
<attribute name="Built-By" value="superx"/>
<!-- Information about the program itself -->
<attribute name="Implementation-Date" value="${SX_TIMESTAMP}"/>
<attribute name="Implementation-Title" value="SuperX"/>
<attribute name="Implementation-Version" value="${version}"/>
</manifest>
</jar>
</target>
<target name="compileSuperx" depends="cleanBuildPath" description="Compile all classes for superx.">
<filter token="sxtimestamp" value="${SX_TIMESTAMP}" />
<filter token="version" value="${etl_version}" />
<!-- MB 13.8.2009 auch mitkompilieren <echo message="de/memtext/db/Servlet* weggelassen"/>-->
<copy todir="${BUILD_PATH}" filtering="true">
<fileset dir="${SRC_DIR}">
<include name="**/*.java" />
<include name="**/*.properties" />
<include name="**/*.gif" />
<include name="**/*.xml" />
<include name="edustore/*" />
<include name="saiku-datasources/*" />
<include name="saiku-repository/*" />
<exclude name="de/superx/docmanagement/*"/>
</fileset>
</copy>
<!--<echo message="cp:${FULL_CLASSPATH}"/>-->
<javac source="1.8" target="1.8"
srcdir="${BUILD_PATH}" excludes="**/*Test*.java,test/**/*"
destdir="${BUILD_PATH}"
listfiles="false" encoding="UTF-8" debug="yes">
<classpath refid="classpath" />
</javac>
</target>
<target name="cleanBuildPath" depends="initTimestamp" description="Löscht alle *.class-Dateien in WEB-INF/classes">
<delete failonerror="false" includeemptydirs="true">
<fileset dir="${BUILD_PATH}" includes="**/*.class,**/*.java" excludes="*.properties" />
</delete>
</target>
</project> </project>

51
src/de/superx/etl/ActionHandler/EtlActionHandlerExecuteMask.java

@ -1,51 +0,0 @@
/*
* de.superx.etl - a package for controlling ETL routines
* Copyright (C) 2021 Daniel Quathamer <danielq@memtext.de>
*
* This package is licensed under the CampusSource License;
* http://www.campussource.de/org/license/
*/
package de.superx.etl.ActionHandler;
import java.io.StringWriter;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Properties;
import java.util.logging.Logger;
import de.superx.bin.SxConnection;
import de.superx.etl.MaskExecutor;
import de.superx.etl.QueryResultSerializer;
public class EtlActionHandlerExecuteMask extends EtlActionHandler implements EtlActionHandlerI {
public EtlActionHandlerExecuteMask() {
}
@Override
public int execute(StringWriter sw, String mandantid, String stepUniquename,String outFormat)
throws SQLException, Exception
{
int returnCode=0;
this.getConnection();
int maskeninfo_id=new Integer(handlerSpecificProperties.getProperty("maskeninfo_id"));
String username=handlerSpecificProperties.getProperty("username");
String mandantenId="default";
Properties params=null ;//todo aus assertion lesen
MaskExecutor myMaskExecutor=new MaskExecutor(mandantenId, stepSxConnection,maskeninfo_id, username,
params,sw);
myMaskExecutor.setLogger(logger);
super.setNumberOfRows(myMaskExecutor.executeMask(mandantenId,
maskeninfo_id,
username,
params));
super.setReturnCode(myMaskExecutor.getReturnCode());
super.setOutputStringWriter(myMaskExecutor.getOutputString());
//System.out.println(sw.toString());
stepSxConnection.close();
//super.setLogOutput(msg);
return returnCode;
}
}

779
src/de/superx/etl/TableUploader.java

@ -1,779 +0,0 @@
/*
* de.superx.etl - a package for controlling ETL routines
* Copyright (C) 2021 Daniel Quathamer <danielq@memtext.de>
*
* This package is licensed under the CampusSource License;
* http://www.campussource.de/org/license/
*/
package de.superx.etl;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.PrintStream;
import java.io.Reader;
import java.io.UnsupportedEncodingException;
import java.nio.charset.CodingErrorAction;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Types;
import java.text.ParseException;
import java.util.Iterator;
import java.util.Properties;
import de.superx.util.FileUtils;
import de.superx.util.SqlStringUtils;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.xpath.XPathFactory;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.*;
import org.postgresql.PGConnection;
import org.postgresql.copy.CopyManager;
import de.superx.bin.SxConnection;
import de.superx.bin.SxJdbcClient;
import de.superx.bin.UploadRecords;
import de.memtext.util.DateUtils;
import de.memtext.util.GetOpts;
import de.memtext.util.StringUtils;
import de.memtext.util.XMLUtils;
public class TableUploader {
private String logfile;
private String dbpropfile;
private String mode="stop";
private String inFormat;
private String targetTable;
private String srcFile;
private boolean header=false;
private String delim="^";
private String encoding=SqlStringUtils.getEncoding();
private String inserts="";
private boolean truncateTargetTable=true;
private boolean continueAfterError;
private boolean removeTrailingDelim=true;
private boolean isPostgres;
private boolean useBatch=true;
private static int maxCols=1000;
private String[] insert_cols = new String[maxCols];
private int[] insert_types = new int[maxCols];
private int numberOfColumns;
public long numberOfRows;
private int returnCode;
private String xml_search_path;
private Connection uploadConnection;
private DatabaseMetaData dbmd;
private PreparedStatement pst;
public TableUploader() {
// TODO Auto-generated constructor stub
}
public Connection getUploadConnection() {
return uploadConnection;
}
public void setUploadConnection(Connection uploadConnection) {
this.uploadConnection = uploadConnection;
}
public boolean isRemoveTrailingDelim() {
return removeTrailingDelim;
}
public void setRemoveTrailingDelim(boolean removeTrailingDelim) {
this.removeTrailingDelim = removeTrailingDelim;
}
public void setHeader(boolean header) {
this.header = header;
}
public String getDbpropfile() {
return dbpropfile;
}
public void setDbpropfile(String dbpropfile) {
this.dbpropfile = dbpropfile;
}
public String getMode() {
return mode;
}
public void setMode(String mode) {
if (!mode.equals("stop") && !mode.equals("exclude-field")&& !mode.equals("transaction"))
mode = "exclude-row";
this.mode = mode;
}
public String getInFormat() {
return inFormat;
}
public void setInFormat(String inFormat) {
this.inFormat = inFormat;
}
public String getTargetTable() {
return targetTable;
}
public void setTargetTable(String targetTable) {
this.targetTable = targetTable;
}
public String getSrcFile() {
return srcFile;
}
public void setSrcFile(String srcFile) {
this.srcFile = srcFile;
}
public String getDelim() {
return delim;
}
public void setDelim(String delim) {
if (delim.equals("tab"))
delim = "\t"; //Tab
if (delim.equals(""))
delim = "^"; //default Delimiter
this.delim = delim;
}
public String getEncoding() {
return encoding;
}
public void setEncoding(String encoding) {
if(encoding==null || encoding.equals(""))
encoding="UTF-8";
this.encoding = encoding;
}
public String getInserts() {
return inserts;
}
public void setInserts(String inserts) {
if(inserts.equalsIgnoreCase("batch"))
useBatch=true;
if(inserts.equalsIgnoreCase("simple"))
useBatch=false;
this.inserts = inserts;
}
public boolean isTruncateTargetTable() {
return truncateTargetTable;
}
public void setTruncateTargetTable(boolean truncateTargetTable) {
this.truncateTargetTable = truncateTargetTable;
}
public void setTruncateTargetTable(String truncateTargetTable) {
if(truncateTargetTable!=null)
{if(truncateTargetTable.equalsIgnoreCase("true"))
this.truncateTargetTable =true;
else
this.truncateTargetTable =false;
}
else
this.truncateTargetTable =false;
}
public int getReturnCode() {
return returnCode;
}
public void setReturnCode(int returnCode) {
this.returnCode = returnCode;
}
public boolean isContinueAfterError() {
return continueAfterError;
}
public void setContinueAfterError(boolean continueAfterError) {
this.continueAfterError = continueAfterError;
}
public String getXml_search_path() {
return xml_search_path;
}
public void setXml_search_path(String xml_search_path) {
this.xml_search_path = xml_search_path;
}
public long uploadFile() throws Exception
{
String protokoll="";
long numberOfRows=0;
returnCode=0;
try {
//dbmd=uploadConnection.getMetaData();
String dbname=uploadConnection.getCatalog();
if(truncateTargetTable)
{
Statement stm=uploadConnection.createStatement();
stm.execute("delete from "+this.targetTable+";");
stm.close();
}
if(inFormat.equalsIgnoreCase("xml"))
{
numberOfRows=uploadXML();
}
else
numberOfRows=uploadCSV();
} catch (Exception e) {
returnCode=1;
throw new Exception(e);
}
return numberOfRows;
}
private long uploadXML() throws Exception
{
String feld_wert;
String errmsg="";
Document mydomres=null;
numberOfRows=0;
org.xml.sax.InputSource is;
NodeList rowlist;
mydomres = de.superx.etl.EtlUtils.buildDocumentFromXmlFile(srcFile);
if(xml_search_path==null)
{
rowlist = mydomres.getElementsByTagName("row");
}
else
{
//XPath xPath = XPathFactory.newInstance().newXPath();
File inputFile = new File(srcFile);
//XPathFactory factory = XPathFactory.newInstance();
XPathFactory factory = new net.sf.saxon.xpath.XPathFactoryImpl();
XPath xPath = factory.newXPath();
//Document doc = builder.parse(inputFile);
//doc.getDocumentElement().normalize();
rowlist=(NodeList) xPath.compile(xml_search_path).evaluate(
mydomres, XPathConstants.NODESET);
}
Node rownode;
initializeColumnSchema();
String insertHead=createPreparedStatementHead();
pst = uploadConnection.prepareStatement(insertHead);
if(useBatch)
pst.clearBatch();
int anz_rows = rowlist.getLength();
for (int zeilennr = 0; zeilennr < anz_rows; zeilennr++) {
//Schleife über jede Zeile des XML-Stroms
rownode = rowlist.item(zeilennr);
//pst.clearParameters();
for(int col=0; col < numberOfColumns;col++)
{
for (Iterator it = XMLUtils.getChildNodeIterator(rownode); it.hasNext();) {
Node fldNode = (Node) it.next();
//System.out.println(XMLUtils.getTheValue(fldNode));
if (XMLUtils.getAttribValue(fldNode,"name").equalsIgnoreCase(insert_cols[col])) {
//int p;
feld_wert="";
try{
feld_wert = XMLUtils.getTheValue(fldNode).trim();
feld_wert=StringUtils.replace(feld_wert,
"CDATASTART", "<![CDATA[");
feld_wert = StringUtils.replace(feld_wert,
"CDATAEND", "]]>");
}
catch (IllegalArgumentException e)
{
//Node ist NULL, keine Warnung notwendig
}
errmsg = feld_wert_to_pst(zeilennr, col, errmsg, feld_wert);
} //Wenn Feldname übereinstimmt
}
} //Ende der Schleife über die Spalten
if(!errmsg.equals("") && mode.equals("stop"))
{
break;
}
if(useBatch)
pst.addBatch();
else
pst.executeUpdate();
numberOfRows++;
} //Ende der Schleife über die Zeilen
if(useBatch)
pst.executeBatch();
return numberOfRows;
}
private long uploadCSV() throws Exception
{
String line;
String line2;
File outFile=null;
String protokoll="";
long numberOfRows=0;
if(isPostgres && !inserts.equalsIgnoreCase("simple") && !inserts.equalsIgnoreCase("batch"))
{
if(removeTrailingDelim)
srcFile=removeTrailingDelim(srcFile);
numberOfRows=uploadCSVinPostgres(srcFile,removeTrailingDelim);
}
else
numberOfRows=uploadCSVwithAnsiSQL(srcFile);
return numberOfRows;
}
private String removeTrailingDelim(String srcFile) throws UnsupportedEncodingException, FileNotFoundException, IOException {
String line;
File outFile;
String returnSrcFile=srcFile+".tmp";
BufferedReader in2 = new BufferedReader(new InputStreamReader(new FileInputStream(srcFile), encoding));
outFile=new File(srcFile+".tmp");
FileOutputStream out = new FileOutputStream(outFile, false);
PrintStream out2 = new PrintStream(out, true, encoding);
while ((line = in2.readLine()) != null) {
if (line.endsWith(delim))
line=line.substring(0,line.length()-delim.length());
out2.println(line);
out2.flush();
}
return returnSrcFile;
}
private long uploadCSVinPostgres(String srcFile, boolean deleteSrcFile) {
long numOfRows=0;
String copySql = "COPY " + targetTable + " FROM STDIN WITH DELIMITER '" + delim + "' NULL '' ENCODING '"+ encoding+"'";
copySql += header ? " HEADER" : "";
String srcFileContent=de.superx.etl.EtlUtils.getFileContentsWithEncoding(srcFile, encoding);
String msg="";
try {
//dbmd=uploadConnection.getMetaData();
String dbname=uploadConnection.getCatalog();
Statement stm=uploadConnection.createStatement();
int isIso=0;
ResultSet rs=stm.executeQuery("SELECT distinct 1 FROM pg_catalog.pg_database where datname='"+dbname+"' and datctype ilike '%euro%' or datctype ilike '%1252%' or datctype ilike '%8859%';");
while (rs.next()) {
if(rs.getObject(1)!=null)
isIso= Integer.parseInt(rs.getObject(1).toString());
}
rs.close();
stm.close();
Reader in4=null;
final CopyManager cpm = ((PGConnection) uploadConnection).getCopyAPI();
long anz = 0;
msg = "";
if(isIso==1)
{
String srcFileIso=srcFile+"_iso.tmp";
String srcFileContentValidIso = FileUtils.convertToIso(srcFileContent,"postgres") ;//new String(srcFileContent.getBytes("ISO-8859-1"));
de.superx.etl.EtlUtils.saveFileContentsWithEncoding(srcFileIso, srcFileContentValidIso, "iso-8859-9");
FileInputStream fis = new FileInputStream(srcFileIso);
in4 = new BufferedReader(new InputStreamReader(fis, "iso-8859-9"));
}
else
{
FileReader in3 = new FileReader(srcFile);
in4 = new BufferedReader(in3);
}
numOfRows= cpm.copyIn(copySql, in4);
numberOfRows =numOfRows;
if(deleteSrcFile)
{
File outFile=new File(srcFile);
if(outFile!=null)
outFile.delete();
}
} catch (Exception e) {
// TODO Auto-generated catch block
msg=e.toString();
}
return numOfRows;
}
private long uploadCSVwithAnsiSQL(String srcFile) throws SQLException, FileNotFoundException, IOException {
numberOfRows=0;
String text;
String text2;
String msg="";
int zeilennr=1;
int fehlerSaetze=0;
BufferedReader in = new BufferedReader(new InputStreamReader(new FileInputStream(srcFile), encoding));
initializeColumnSchema();
String insertHead=createPreparedStatementHead();
pst = uploadConnection.prepareStatement(insertHead);
if(useBatch)
pst.clearBatch();
while ((text = in.readLine()) != null) {
if (text.endsWith("\\")) {
text=text.substring(0, text.length()-1);
text2 = in.readLine();
if (text2 != null) {
text += "\n"+ text2;
while (text2.endsWith("\\")) {
text=text.substring(0, text.length()-1);
text2 = in.readLine();
if (text2 != null)
text += "\n"+text2;
}
}
}
String prepare =
createPreparedInsertStatement(zeilennr,
insertHead,
text);
if(!prepare.equals("") && mode.equals("stop"))
{
msg=prepare;
break;
}
if(useBatch)
pst.addBatch();
else
pst.executeUpdate();
numberOfRows++;
}
if(useBatch)
pst.executeBatch();
//TODO: msg
return numberOfRows;
}
private String createPreparedInsertStatement(
int line,
String insertHead,
String text)
throws SQLException {
int p;
int i=0;
int k=0;
String errmsg = "";
String feld_wert;
//pst.clearParameters();
do {
//ggf. Trennzeichen am Ende hinzufügen:
if(!text.endsWith(delim))
text+= delim;
p = text.indexOf(delim, i);
//logger.config("Type "+types[k]);
//maskierte Trennzeichen abfangen:
if(p>0 && text.substring(p-1, p).equals("\\"))
p = text.indexOf(delim, p+1);
if (p > -1 ) {
if(p==-1)
feld_wert = text.substring(i);
else
feld_wert = text.substring(i, p);
//wenn der Feldwert zufällig das Zeichen "\\n" enthält, wird es zu "\n"
if(feld_wert != null && (feld_wert.indexOf("\\\\n") >0 ))
{
feld_wert=de.memtext.util.StringUtils.replace(feld_wert, "\\\\n", "\\n");
}
//wenn der Feldwert das Zeichen "\Trennzeichen" enthält, wird der \ entfernt
if(feld_wert != null && (feld_wert.indexOf("\\"+delim) >0 ))
{
feld_wert=de.memtext.util.StringUtils.replace(feld_wert, "\\", "");
}
//wenn der Feldwert das Zeichen "\\" enthält, wird ein \ entfernt
if(feld_wert != null && (feld_wert.indexOf("\\\\") >0 ))
{
feld_wert=de.memtext.util.StringUtils.replace(feld_wert, "\\\\", "\\");
}
errmsg = feld_wert_to_pst(line,k, errmsg, feld_wert);
k++;
i = p + 1;
}
} while (p > -1);
return errmsg;
}
private String feld_wert_to_pst(int line, int col, String errmsg, String feld_wert) throws SQLException {
if( col >= numberOfColumns)
errmsg+= "Anzahl Spalten in Datei ist "+col+", aber es sollten nur "+(numberOfColumns-1)+" Spalten sein. Bitte prüfen Sie das Trennzeichen";
else
{
if (feld_wert.equals(""))
try {
pst.setNull(col + 1, insert_types[col]);
} catch (SQLException e1) {
errmsg += e1.toString();
} else {
switch (insert_types[col]) {
case Types.BIGINT :
case Types.TINYINT :
case Types.SMALLINT :
case Types.INTEGER :
try {
int myInt = (int) Integer.parseInt(feld_wert.trim());
pst.setInt(col + 1, myInt);
} catch (NumberFormatException e1) {
errmsg += e1.toString();
setFieldToNull(col, insert_types, pst);
} catch (SQLException e1) {
errmsg += conversionException(line, col, feld_wert,e1.toString());
setFieldToNull(col, insert_types, pst);
}
break;
case Types.FLOAT :
try {
float myFloat =
(float) Float.parseFloat(feld_wert.trim());
pst.setFloat(col + 1, myFloat);
} catch (NumberFormatException e1) {
errmsg += conversionException(line, col, feld_wert,e1.toString());
setFieldToNull(col, insert_types, pst);
} catch (SQLException e1) {
errmsg += conversionException(line, col, feld_wert,e1.toString());
setFieldToNull(col, insert_types, pst);
}
break;
case Types.REAL :
case Types.DOUBLE :
case Types.NUMERIC :
case Types.DECIMAL :
try {
double myDouble =
(double) Double.parseDouble(feld_wert.trim());
pst.setDouble(col + 1, myDouble);
} catch (NumberFormatException e1) {
errmsg += conversionException(line, col, feld_wert,e1.toString());
setFieldToNull(col, insert_types, pst);
} catch (SQLException e1) {
errmsg += conversionException(line, col, feld_wert, e1.toString());
setFieldToNull(col, insert_types, pst);
}
break;
case Types.CHAR :
case Types.VARCHAR :
default :
if(feld_wert.equals(" "))
feld_wert=""; //Leerzeichen im UNL-File wird zu Leerstring
try {
pst.setString(col + 1, feld_wert);
} catch (SQLException e1) {
errmsg += conversionException(line, col, feld_wert,e1.toString());
setFieldToNull(col, insert_types, pst);
}
break;
case Types.LONGVARCHAR :
ByteArrayInputStream by =
new ByteArrayInputStream(feld_wert.getBytes());
pst.setAsciiStream(
col + 1,
by,
feld_wert.length());
break;
case Types.DATE :
try {
java.util.Date datum =
DateUtils.parse(feld_wert.trim());
feld_wert = DateUtils.formatUS(datum);
//Leider ist dieser Schritt wg java.sql.Date nötig
pst.setDate(
col + 1,
java.sql.Date.valueOf(feld_wert));
} catch (SQLException e1) {
errmsg += conversionException(line, col, feld_wert, e1.toString());
setFieldToNull(col, insert_types, pst);
} catch (ParseException e1) {
errmsg += conversionException(line, col, feld_wert, e1.toString());
setFieldToNull(col, insert_types, pst);
}
catch (IllegalArgumentException e1) {
errmsg += conversionException(line, col, feld_wert, e1.toString());
setFieldToNull(col, insert_types, pst);
}
break;
case Types.TIME :
try {
//Time zeit = (java.sql.Time)
//DateUtils.timeParse(feld_wert);
pst.setTime(col + 1, java.sql.Time.valueOf(
feld_wert.trim()));
} catch (SQLException e1) {
errmsg += conversionException(line, col, feld_wert, e1.toString());
setFieldToNull(col, insert_types, pst);
}
catch (IllegalArgumentException e1) {
errmsg += conversionException(line, col, feld_wert, e1.toString());
setFieldToNull(col, insert_types, pst);
}
break;
case Types.TIMESTAMP :
try {
java.util.Date datum =
DateUtils.dateTimeParse(feld_wert.trim());
feld_wert = DateUtils.dateTimeFormatUS(datum);
//Leider ist dieser Schritt wg java.sql.Date nötig
pst.setTimestamp(
col + 1,
java.sql.Timestamp.valueOf(
feld_wert + ".0"));
} catch (SQLException e1) {
errmsg += conversionException(line, col,feld_wert, e1.toString());
setFieldToNull(col, insert_types, pst);
} catch (ParseException e1) {
errmsg += conversionException(line, col, feld_wert, e1.toString());
setFieldToNull(col, insert_types, pst);
}
catch (IllegalArgumentException e1) {
errmsg += conversionException(line, col, feld_wert, e1.toString());
setFieldToNull(col, insert_types, pst);
}
break;
case Types.BIT :
// Types.BOOLEAN gibt es im jdk 1.3 nicht
try {
boolean wf =
(boolean) Boolean.getBoolean(feld_wert.trim());
pst.setBoolean(col + 1, wf);
} catch (SQLException e1) {
errmsg += conversionException(line, col, feld_wert, e1.toString());
setFieldToNull(col, insert_types, pst);
}
//Boolean wird vom Informix-Treiber als OTHER (1111) erkannt
//Da aber default '' ist, klappt es trotzdem
break;
}
}
}
return errmsg;
}
private void setFieldToNull(
int k,
int[] insert_types,
PreparedStatement pst) {
if (mode.equals("exclude-field"))
try {
pst.setNull(k + 1, insert_types[k]);
} catch (SQLException e3) {
System.err.println("Invalid Field " + (k + 1) + " could not be set to null");
}
}
private String conversionException(int line,int col, String field_value, String error) {
String err_msg = "";
err_msg = "Error in line "+line+" in Column " + (col + 1) + " "+insert_cols[col]+" value "+ field_value+ ": " + error.toString() + "; ";
return err_msg;
}
private void initializeColumnSchema() throws SQLException
{
int i=0;
ResultSet rs = null;
ResultSetMetaData rsmd = null;
String tabelle=targetTable;
if (!dbmd.storesLowerCaseIdentifiers())
tabelle = tabelle.toUpperCase();
rs =dbmd.getColumns(uploadConnection.getCatalog(), null, tabelle, null);
rsmd = rs.getMetaData();
while (rs.next()) {
insert_cols[i] = rs.getObject("COLUMN_NAME").toString();
insert_types[i] = rs.getInt("DATA_TYPE");
i++;
}
numberOfColumns=i;
if(!dbmd.supportsBatchUpdates())
useBatch=false;
}
private String createPreparedStatementHead() throws SQLException
{
String sql=null;
String insert_head = "insert into " + targetTable+"(";
String insert_val="";
for (int i = 0; i < numberOfColumns; i++)
{
insert_head += insert_cols[i] + ", ";
insert_val+="?, ";
}
insert_head = insert_head.substring(0, insert_head.length() - 2);
insert_val = insert_val.substring(0, insert_val.length() - 2);
insert_head +=") values( ";
sql=insert_head + insert_val+");";
return sql;
}
public Connection getConnection(Connection myConnection,String propfile) throws Exception {
if(myConnection==null)
{
SxConnection mySxConnection = null;
mySxConnection = new SxConnection();
mySxConnection.setPropfile(propfile);
myConnection = mySxConnection.getConnection();
String db_driver = mySxConnection.m_DriverClass;
if(db_driver.equals("org.postgresql.Driver"))
isPostgres=true;
}
dbmd = myConnection.getMetaData();
return myConnection;
}
}
Loading…
Cancel
Save