Browse Source

Build package from repo (first steps) #1

etl_0.4_Release
Daniel Quathamer 2 years ago
parent
commit
fdff2097f5
  1. 1
      .gitignore
  2. 70
      build.xml
  3. 16
      src-modules/module/etl/bin/SQL_ENV_etl.sam
  4. 115
      src-modules/module/etl/bin/etl_dosql.x
  5. 11
      src-modules/module/etl/bin/etl_sx_execute_actions.x
  6. 69
      src-modules/module/etl/bin/etl_sx_insert_mask.x
  7. 46
      src-modules/module/etl/bin/etl_sx_select_mask.x
  8. 46
      src-modules/module/etl/bin/etl_sx_unload_records.x
  9. 46
      src-modules/module/etl/bin/etl_sx_upload_records.x
  10. 40
      src-modules/module/etl/conf/build.xml
  11. 433
      src-modules/module/etl/conf/etl.xml
  12. 1327
      src-modules/module/etl/conf/etl_step_fuellen.xsl
  13. 10
      src-modules/module/etl/conf/excludes.txt
  14. 4
      src-modules/module/etl/conf/includes.txt
  15. 6
      src-modules/module/etl/masken/42000_felderinfo.unl
  16. 1
      src-modules/module/etl/masken/42000_maske_system_bez.unl
  17. 6
      src-modules/module/etl/masken/42000_masken_felder_bez.unl
  18. 92
      src-modules/module/etl/masken/42000_maskeninfo.unl
  19. 1
      src-modules/module/etl/masken/42000_sachgeb_maske_bez.unl
  20. 12
      src-modules/module/etl/rohdaten/etl_unload.xml
  21. 592
      src-modules/module/etl/schluesseltabellen/etl_step_fuellen.sql
  22. 5
      src-modules/module/etl/schluesseltabellen/etl_step_type.unl
  23. 440
      src-modules/module/etl/schluesseltabellen/fm_templates.unl
  24. 25
      src-modules/module/etl/schluesseltabellen/fm_templates_fuellen.sql
  25. 25
      src-modules/module/etl/schluesseltabellen/fm_templates_unload.x
  26. 16
      src-modules/module/etl/schluesseltabellen/fm_templates_update.x
  27. 14
      src-modules/module/etl/schluesseltabellen/sachgebiete_fuellen.sql
  28. 3
      src-modules/module/etl/schluesseltabellen/sachgebiete_loeschen.sql
  29. 49
      src-modules/module/etl/schluesseltabellen/sx_stylesheets_fuellen.sql
  30. 22
      src-modules/module/etl/schluesseltabellen/themenbaum_fuellen.sql
  31. 131
      src/de/superx/etl/ActionHandler/EtlActionHandler.java
  32. 48
      src/de/superx/etl/ActionHandler/EtlActionHandlerDoquery.java
  33. 68
      src/de/superx/etl/ActionHandler/EtlActionHandlerDosql.java
  34. 51
      src/de/superx/etl/ActionHandler/EtlActionHandlerExecuteMask.java
  35. 34
      src/de/superx/etl/ActionHandler/EtlActionHandlerI.java
  36. 30
      src/de/superx/etl/ActionHandler/EtlActionHandlerMsg.java
  37. 43
      src/de/superx/etl/ActionHandler/EtlActionHandlerUnloadRecords.java
  38. 58
      src/de/superx/etl/ActionHandler/EtlActionHandlerUploadRecords.java
  39. 200
      src/de/superx/etl/EtlAction.java
  40. 235
      src/de/superx/etl/EtlActionJob.java
  41. 239
      src/de/superx/etl/EtlStep.java
  42. 336
      src/de/superx/etl/EtlUtils.java
  43. 437
      src/de/superx/etl/QueryResultSerializer.java
  44. 340
      src/de/superx/etl/SqlExecutor.java
  45. 779
      src/de/superx/etl/TableUploader.java
  46. 212
      src/de/superx/etl/bin/Doquery.java
  47. 192
      src/de/superx/etl/bin/Dosql.java
  48. 70
      src/de/superx/etl/bin/EtlJobExecutor.java
  49. 908
      src/de/superx/etl/bin/SxTransformer.java
  50. 214
      src/de/superx/etl/bin/UnloadRecords.java
  51. 94
      src/de/superx/etl/bin/UploadRecords.java
  52. 155
      src/de/superx/etl/util/GetOpts.java
  53. 47
      src/de/superx/sxrest/JobExecutor.java
  54. 81
      src/de/superx/sxrest/LaunchUpgrade.java
  55. 183
      src/de/superx/sxrest/MaskXml.java
  56. 319
      superx/edit/etl/etl_manager.jsp
  57. 163
      superx/edit/etl/jobexecutor.jsp
  58. 143
      superx/edit/etl/saveMask.jsp
  59. 45
      superx/edit/etl/systeminfo_list.inc

1
.gitignore vendored

@ -0,0 +1 @@
/classes/

70
build.xml

@ -0,0 +1,70 @@
<?xml version="1.0" encoding="UTF-8"?>
<project name="ModuleCreation" default="all" basedir=".">
<!--Aufruf mit
ant -DMODULE_PATH=$ETL_PFAD -DBASE_DIR=. -DWEBAPP=$WEBAPP -DMODULE=etl all
So erzeugen Sie dann ein SuperX-Paket:
ant -DMODULE_PATH=$ETL_PFAD -DWEBAPP_DIR=$WEBAPP -DMODULE=etl dist
-->
<dirname file="${ant.file}" property="moduleCreateBaseDir" />
<property name="BASE_DIR" value="${moduleCreateBaseDir}/../.." />
<property name="WEBAPP" value="${MODULE_PATH}/../../../webserver/tomcat/webapps/superx" />
<path id="classpath">
<!--WEB-INF/lib-->
<fileset dir="${WEBAPP}/WEB-INF">
<include name="lib/**/*.jar" />
<include name="classes" />
</fileset>
</path>
<condition property="isWindoze">
<os family="windows" />
</condition>
<condition property="isUnix">
<os family="unix" />
</condition>
<target name="all" description="Gruppe: Kopiere Modul Sourcen in den WEB-INF-Verzeichnisbaum">
<antcall target="module_copy_source" />
<antcall target="call_module_scripts_create_ant" />
</target>
<target name="module_copy_source" description="Gruppe: Kopiere Modul Sourcen in den WEB-INF-Verzeichnisbaum">
<echo message="Start module_copy_sources für Modul ${MODULE} in Pfad ${MODULE_PATH}" />
<copy todir="${MODULE_PATH}" overwrite="true">
<fileset dir="${BASE_DIR}/src-modules/module/${MODULE}" />
</copy>
<copy todir="${WEBAPP}/edit/${MODULE}" overwrite="true">
<fileset dir="${BASE_DIR}/superx/edit/${MODULE}" />
</copy>
</target>
<target name="call_module_scripts_create_ant" description="Call to module_scripts_create_ant">
<subant target="all">
<!--<property name="basedir" value="${MODULE_PATH}/../../conf/" />-->
<property name="MODULE" value="${MODULE}" />
<property name="DATABASE" value="POSTGRES" />
<property name="WEBAPP_EXT_MODULE" value="${WEBAPP}" />
<property name="WEBAPP" value="${WEBAPP}" />
<property name="SUPERX_BASE" value="${WEBAPP}/WEB-INF" />
<fileset dir="${MODULE_PATH}/../../conf" includes="module_scripts_create_ant.xml" />
</subant>
<chmod dir="${MODULE_PATH}/rohdaten" perm="ugo+x" includes="**/*.x" />
</target>
<target name="dist" description="Create Download package">
<subant target="erzeuge_paket">
<!--<property name="basedir" value="${MODULE_PATH}/../../conf/" />-->
<property name="MODULE" value="${MODULE}" />
<property name="PFAD" value="/home/superx/fertige_module" />
<property name="DATABASE" value="POSTGRES" />
<property name="ENCODING" value="utf8" />
<property name="PLATFORM" value="superx" />
<property name="WEBAPP_EXT_MODULE" value="${MODULE_PATH}/../../../webserver/tomcat/webapps/superx" />
<property name="WEBAPP" value="${MODULE_PATH}/../../../webserver/tomcat/webapps/superx" />
<property name="SUPERX_BASE" value="${WEBAPP}/WEB-INF" />
<fileset dir="${MODULE_PATH}/../../conf" includes="build.xml" />
</subant>
</target>
</project>

16
src-modules/module/etl/bin/SQL_ENV_etl.sam

@ -0,0 +1,16 @@
#Beispielumgebung für das ETL-Modul:
#SUPERX_MODULE=$SUPERX_DIR/db/module
#SUPERX_ROHDATEN=rohdaten
ETL_PFAD=$SUPERX_MODULE/etl; export ETL_PFAD
ETL_ERRORDAT=$ETL_PFAD/etl_update.err; export ETL_ERRORDAT
#Hier stehen die Rohdaten (im Unterverzeichnis unl)
ETL_LOAD_PFAD=$ETL_PFAD/$SUPERX_ROHDATEN; export ETL_LOAD_PFAD
ETL_ERRORMAIL=$ERRORMAIL; export ETL_ERRORMAIL
ETL_LOGMAIL=$LOGMAIL; export ETL_LOGMAIL
PATH=$ETL_PFAD/bin:$PATH
export PATH

115
src-modules/module/etl/bin/etl_dosql.x

@ -0,0 +1,115 @@
#!/bin/bash
#-----------------------------
# Shell-Kommando "etl_dosql"
#-----------------------------
#-------------------------------------------------------------------
# Shellvariablen setzen und SQL-Anweisung(en) aus Datei
# in der SuperX-Datenbank ausfuehren.
#-------------------------------------------------------------------
#13.12.07 DQ Freemarker-Parsing auch mandantenfähig
#9.12.06 MB Erweiterung für Freemarker-Parsing
if [ "$1" = "" ]
then echo "Aufruf: etl_dosql <Dateiname> <header (true|false)>(optional) <Ausgabedatei> (optional) <Parameter> (optional, nur bei SX_CLIENT=jdbc)"
echo " "
echo "Aktion: etl_dosql führt die Kommandos in der Datei <Dateiname> aus."
echo "Das Ergebnis kann mit Feldnamen (<header=true>) in eine Datei <Ausgabedatei> ausgegeben werden"
echo " "
exit 0
fi
#. $SUPERX_DIR/db/bin/SQL_ENV
filename="$1"
header=$2
outfile=$3
params=$4
fgrep -i -s "freemarker template" $filename > /dev/null
FLAG1=$?
fgrep -i -s "FREEMARKER TEMPLATE" $filename > /dev/null
FLAG2=$?
orgfilename="$filename"
tmpfilename="$filename"$MANDANTID.tmp$MANDANTENID.sql
if [ $FLAG1 -eq 0 -o $FLAG2 -eq 0 ]
then
rm -f $tmpfilename
#Anlegen einer temp. Kopie filename.tmp$MANDANTENID.sql
java -cp $JDBC_CLASSPATH $JAVA_OPTS de.superx.bin.FMParser -dbproperties:$DB_PROPERTIES -in:$filename -out:$tmpfilename
if [ ! -f $tmpfilename ]
then
echo "Error : etl_dosql abgebrochen"
exit 1
fi
filename="$tmpfilename"
fi
fgrep -i -s "<xupdate>" $filename > /dev/null
FLAG1=$?
if [ $FLAG1 -eq 0 ]
then
CP=".:$JDBC_CLASSPATH:$XML_CLASSPATH"
if [ "$LANG" != "" ]
then LOCALE="-Duser.language=$LANG"
fi
java $LOCALE -cp $CP $JAVA_OPTS de.superx.etl.bin.Dosql -logger:$LOGGER_PROPERTIES -dbproperties:$DB_PROPERTIES -sqlfile:"$filename" -outFormat:"txt" -delim:$DBDELIMITER -header:$header -outfile:$outfile -params:$params
else
case $SX_CLIENT in
jdbc)
CP=".:$JDBC_CLASSPATH:$XML_CLASSPATH"
if [ "$LANG" != "" ]
then LOCALE="-Duser.language=$LANG"
fi
java $LOCALE -cp $CP $JAVA_OPTS de.superx.etl.bin.Dosql -logger:$LOGGER_PROPERTIES -dbproperties:$DB_PROPERTIES -sqlfile:"$filename" -outFormat:"txt" -delim:$DBDELIMITER -header:$header -outfile:$outfile -params:$params
;;
psql)
if [ "$header" != "true" ]
then
tuples_only='-t'
fi
if [ "$outfile" = "" ]
then
psql --dbname $DBNAME -f $filename $tuples_only
else
echo "Ergebnis mit Fieldsep $DBDELIMITER klappt mit psql noch nicht"
echo "Bitte benutzen Sie SX_CLIENT=jdbc"
psql --dbname $DBNAME -P fieldsep="$DBDELIMITER" -f $filename $tuples_only -o $outfile
fi
;;
dbaccess)
if [ "$header" = "true" ]
then
echo "Mit dbaccess ist kein Export der Feldnamen möglich"
exit 0
fi
if [ "$outfile" = "" ]
then
$INF_BIN/dbaccess $DBACCESS_PARAMS $DBNAME $filename
else
$INF_BIN/dbaccess $DBACCESS_PARAMS $DBNAME $filename >$outfile
fi
;;
hsql)
java -Xmx300M -cp .:$JDBC_CLASSPATH org.hsqldb.util.ScriptTool -database joolap -user admin -password ???? -script $filename
esac
fi
if [ -f $tmpfilename -a "$FM_DEBUG" == "" ]
then
echo
#Zum Debuggen auskommentieren:
rm -f "$tmpfilename"
fi

11
src-modules/module/etl/bin/etl_sx_execute_actions.x

@ -0,0 +1,11 @@
#!/bin/bash
if [ "$1" = "" ]
then echo "Aufruf: sx_execute_actions.x MandantenID WEB_INF_PFAD MODULE_PFAD (z.B. /home/superx/webserver/tomcat/webapps/superx/WEB-INF/conf/edustore/db/module oder /home/superx/db/module) Job_uniquename outfile params (optional)"
exit 0
fi
java $LOCALE -cp $JDBC_CLASSPATH:$XML_CLASSPATH $JAVA_OPTS -DSX_LOG_TO_TMP=true de.superx.bin.ActionExecutor -mandantenID:$1 -WEB_INF_PFAD:$2 -MODULE_PFAD:$3 -job_uniquename:$4 -outfile:$5 -params:$6

69
src-modules/module/etl/bin/etl_sx_insert_mask.x

@ -0,0 +1,69 @@
#!/bin/bash
#. $SUPERX_DIR/bin/SQL_ENV
#---------------------------------------------------------------------
# Shell-Kommando "etl_sx_insert_mask.x"
#
#---------------------------------------------------------------------
##WEITER sicherheitshalber zurücksetzen
WEITER=""
if [ "$1" = "" ]
then echo "Aufruf: etl_sx_insert_mask.x <TID> <Dateiname (optional)> <Sicherheitsabfrage ausschalten=J (optional)"
echo " "
echo "Aktion: etl_sx_insert_mask.x laedt mittels etl_sx_select_mask entladene XML-Metadaten"
echo " in die Datenbank."
echo " "
exit 0
fi
TID="$1"
if [ "$2" == "" ]
then infile=$TID.xml
else
infile=$2
fi
##Massendaten-Einlesen
if [ "$3" = "j" -o "$3" = "J" ]
then WEITER=j
fi
#-------------------------------------------------------------------
#-- Sicherheitsabfrage
#-------------------------------------------------------------------
echo "Maske Nr. $TID hochladen"
echo "VORSICHT: Evtl. vorhandene Daten werden überschrieben."
if [ "$WEITER" != "j" -a "$WEITER" != "J" ]
then
echo "Weiter (J/N) ? "
read WEITER
fi
if [ "$WEITER" != "J" ]
then
if [ "$WEITER" != "j" ]
then echo "etl_etl_sx_insert_mask.x aborted"
exit 0
fi
fi
SX_CLIENT=jdbc
export SX_CLIENT
CP=".:$JDBC_CLASSPATH"
CMD="java $JAVA_OPTS -cp $CP de.superx.etl.bin.EtlJobExecutor -dbproperties:$DB_PROPERTIES -job:sx_insert_mask -params:TID=$TID,PATH_TO_INPUTFILE=$infile"
$CMD

46
src-modules/module/etl/bin/etl_sx_select_mask.x

@ -0,0 +1,46 @@
#!/bin/bash
#Änderungen
#---------------------------------------------------------------------
# Shell-Kommando "etl_sx_select_mask.x"
#
#---------------------------------------------------------------------
if [ "$1" = "" ]
then echo "Aufruf: etl_sx_select_mask.x <TID> <Ausgabedatei (optional)>"
echo " "
echo "Aktion: etl_sx_select_mask.x entlaedt alle Metadaten zur Maske mit der TID <TID>."
echo " Mit Hilfe von sx_insert_mask koennen diese Daten importiert werden."
echo " "
exit 0
fi
#-------------------------------------------------------------------
#-- Metadaten entladen
#-------------------------------------------------------------------
TID=$1
if [ "$2" == "" ]
then outfile=$TID.xml
else
outfile=$2
fi
echo "Maske Nummer $TID"
DOQUERY "select name from maskeninfo where tid = $1" false
echo "entladen"
SX_CLIENT=jdbc
export SX_CLIENT
CP=".:$JDBC_CLASSPATH"
CMD="java $JAVA_OPTS -cp $CP de.superx.etl.bin.EtlJobExecutor -dbproperties:$DB_PROPERTIES -job:sx_select_mask -outfile:$outfile -params:TID=$TID"
$CMD

46
src-modules/module/etl/bin/etl_sx_unload_records.x

@ -0,0 +1,46 @@
#!/bin/bash
#---------------------------------------------------------------------
# Shell-Kommando "etl_sx_unload_records.x"
#
# Erstellt von D. Quathamer am 6.1.2020
#---------------------------------------------------------------------
if [ "$1" = "" ]
then echo "Aufruf: etl_sx_unload_records.x <tabelle> <exportdatei>(optional) <format (txt|html|xml)>(optional) <header (true|false)>(optional) "
echo " "
echo "Aktion: etl_sx_unload_records.x entlädt Inhalte einer Tabelle "
echo "Die Ausgabeformate sind txt (default), html und xml "
echo "html und xml sind nur möglich,wenn java installiert ist und JAVA_HOME gesetzt ist."
echo "Der Dateiname ist optional"
echo " "
exit 1
fi
table=$1
filename=$2
outformat=$3
header=$4
currpath=`pwd`
UNLLOG=$currpath/unload.err
if [ "$filename" = "" ]
then filename="$currpath/$table.unl"
fi
if [ "$header" = "" ]
then header="false"
fi
if [ "$outformat" = "" ]
then outformat="txt"
fi
if [ "$LANG" != "" ]
then LOCALE="-Duser.language=$LANG"
fi
CP=".:$JDBC_CLASSPATH"
echo "Unload $table to $filename"
java $LOCALE -cp "$CP" de.superx.etl.bin.Doquery $LOGGER_PROPERTIES $DB_PROPERTIES 'select * from '$table $outformat $DBDELIMITER $header $filename

46
src-modules/module/etl/bin/etl_sx_upload_records.x

@ -0,0 +1,46 @@
#!/bin/bash
#---------------------------------------------------------------------
# Shell-Kommando "etl_sx_upload_records.x"
#
# Erstellt von D. Quathamer am 6.1.2020
#---------------------------------------------------------------------
if [ "$1" = "" ]
then echo "Aufruf: etl_sx_upload_records.x <tabellenname> <Quelldatei>(optional,Default ist Tabellenname+.unl) <header (true|false)>(optional) <informat (txt|xml>(optional) <encoding (utf8,ISO-8859-1)>(optional)"
echo " "
echo "Aktion: etl_sx_upload_records.x lädt von einer Quelldatei in die Tabelle"
echo " "
exit 0
fi
tabelle=$1
quelldatei=$2
header=$3
informat=$4
encoding=$5
if [ "$encoding" = "" ]
then
if [ "$LANG" = "de_DE@euro" ]
then
encoding=ISO-8859-1
fi
if [ "$LANG" = "de_DE.utf8" ]
then
encoding=UTF-8
fi
fi
if [ "$2" = "" ]
then
quelldatei=$tabelle.unl
fi
echo "$tabelle wird aus der Datei $quelldatei gefüllt"
CP=".:$JDBC_CLASSPATH"
CMD="java $JAVA_OPTS -cp $CP de.superx.etl.bin.UploadRecords -logger:$LOGGER_PROPERTIES -dbproperties:$DB_PROPERTIES -table:$tabelle -unl:$quelldatei -delim:$DBDELIMITER -header:$header -informat:$informat -encoding:$encoding"
$CMD

40
src-modules/module/etl/conf/build.xml

@ -0,0 +1,40 @@
<?xml version="1.0" encoding="UTF-8"?>
<project name="Edustore" default="help" basedir=".">
<!--Aufruf Beispiel: ant -DDATABASE=POSTGRES -DMODULE=etl -DMODULE_PATH=/home/superx/git/community/superx/WEB-INF/conf/edustore/db/module/etl all -->
<target name="all" depends="init" description="Ausführbar: Modulscripte komplett für ${DATABASE}">
<antcall target="etl_upgrade" />
<antcall target="etl_update" />
</target>
<target name="help">
<echo>
File = ${ant.file}
Script Parameters:
all Modulscripte komplett erzeugen
</echo>
</target>
<target name="init" depends="">
<tstamp>
<format property="SX_TIMESTAMP" pattern="dd.MM.yyyy HH:mm:ss" />
<format property="SX_DATESTAMP" pattern="dd.MM.yyyy" />
</tstamp>
</target>
<target name="etl_upgrade" depends="init" description="Target: Erzeugt das Script zum Erzeugen der ETL-FM Variablen fürs Upgrade">
<xslt in="${MODULE_PATH}/conf/${MODULE}.xml" style="etl_step_fuellen.xsl" out="${MODULE_PATH}/upgrade/${MODULE}_upgrade_etl_steps_fuellen.sql" force="true">
<outputproperty name="method" value="text" />
<param name="jobtype" expression="upgrade" />
</xslt>
<echo message="Datei erzeugt: ${MODULE_PATH}/upgrade/${MODULE}_upgrade_etl_steps_fuellen.sql (${SX_TIMESTAMP})" />
</target>
<target name="etl_update" depends="init" description="Target: Erzeugt das Script zum Erzeugen der ETL-FM Variablen für die HLR">
<xslt in="${MODULE_PATH}/conf/${MODULE}.xml" style="etl_step_fuellen.xsl" out="${MODULE_PATH}/schluesseltabellen/${MODULE}_update_etl_steps_fuellen.sql" force="true">
<outputproperty name="method" value="text" />
<param name="jobtype" expression="update" />
</xslt>
<echo message="Datei erzeugt: ${MODULE_PATH}/schluesseltabellen/${MODULE}_update_etl_steps_fuellen.sql (${SX_TIMESTAMP})" />
</target>
</project>

433
src-modules/module/etl/conf/etl.xml

@ -0,0 +1,433 @@
<?xml version="1.0" encoding="UTF-8"?>
<!--<!DOCTYPE module SYSTEM "../../../conf/superx-module.dtd"> -->
<module name="etl" version="0.2" sachgebiet_id="270"
sachgebiet="Laderoutinen" systeminfo_id="270"
system="Laderoutinen" thema="Laderoutinen" thema_parent="Abfragen">
<database name="superx" system="superx">
<sachgebiete>
<sachgebiet id="271" name="Laderoutinen Administr."
rightname="CS_BIA_STANDARDREPORTS_ADMIN[ETL]" />
</sachgebiete>
<table name="etl_step" thema="Administration" typ="Schlüsseltabelle"
releaseUnload="empty">
<description>Schlüsseltabelle für Ladeschritte</description>
<columns>
<column name="tid" type="SERIAL" size="" default="" notnull="true" description="tid" isKey="true"/>
<column name="uniquename" type="VARCHAR" size="255" default="" notnull="true" description="Unique Name" />
<column name="caption" type="VARCHAR" size="255" default="" notnull="" description="Bezeichnung" />
<column name="systeminfo_id" type="INTEGER" size="" default="" notnull="true" description="Komponente" />
<column name="step_type_id" type="INTEGER" size="2" default="" notnull="true" description="Typ des Schrittes" ><comment>1=Gruppierung, 2=Loadtable,3=nativeaction,4=loadmask,5=select</comment></column>
<column name="logfile" type="VARCHAR" size="255" default="" notnull="false" description="Logdatei" />
<column name="custom_step" type="SMALLINT" size="255" default="1" notnull="false" description="Ladeschritt custom"><comment>1=ja, d.h. von der Hochschule angepaßt, 0=nein, d.h. Auslieferung der Software</comment></column>
</columns>
<primaryKeys><rs>
<row>
<fld name='table_cat'>superx</fld>
<fld name='table_schem'>superx</fld>
<fld name='table_name'>etl_step</fld>
<fld name='column_name'>tid</fld>
<fld name='key_seq'>1</fld>
<fld name='pk_name'>etl_step_pk</fld>
</row>
</rs>
</primaryKeys>
<indexes>
<index name="ix_etl_step1" type="unique">
<index-column name="uniquename" />
<index-column name="systeminfo_id" />
</index>
</indexes>
</table>
<table name="etl_step_type" thema="Administration" typ="Schlüsseltabelle"
releaseUnload="empty">
<description>Schlüsseltabelle für Arten von Ladeschritten, z.B. loadtable, nativeaction,unload</description>
<columns>
<column name="tid" type="SERIAL" size="" default="" notnull="true" description="tid" isKey="true"/>
<column name="uniquename" type="VARCHAR" size="255" default="" notnull="true" description="Unique Name" />
<column name="caption" type="VARCHAR" size="255" default="" notnull="" description="Bezeichnung" />
<column name="handler" type="VARCHAR" size="255" default="" notnull="false" description="Handler-Anwendung" />
</columns>
<primaryKeys><rs>
<row>
<fld name='table_cat'>superx</fld>
<fld name='table_schem'>superx</fld>
<fld name='table_name'>etl_step_type</fld>
<fld name='column_name'>tid</fld>
<fld name='key_seq'>1</fld>
<fld name='pk_name'>etl_step_type_pk</fld>
</row>
</rs>
</primaryKeys>
<indexes>
</indexes>
</table>
<table name="etl_step_type_param" thema="Administration" typ="Schlüsseltabelle"
releaseUnload="empty">
<description>Schlüsseltabelle für Parameter für Arten von Ladeschritten</description>
<columns>
<column name="tid" type="SERIAL" size="" default="" notnull="true" description="tid" isKey="true"/>
<column name="etl_step_type_id" type="INTEGER" size="" default="" notnull="true" description="Arten des Ladeschritts" />
<column name="uniquename" type="VARCHAR" size="255" default="" notnull="true" description="Unique Name" />
<column name="name" type="VARCHAR" size="255" default="" notnull="" description="Bezeichnung" />
<column name="param_default" type="VARCHAR" size="255" default="" notnull="false" description="Defaultwert" />
<!-- in Zukunft ggf. typisiert: <column name="param_type" type="VARCHAR" size="255" default="" notnull="false" description="Defaultwert" />
-->
</columns>
<primaryKeys><rs>
<row>
<fld name='table_cat'>superx</fld>
<fld name='table_schem'>superx</fld>
<fld name='table_name'>etl_step_type_param</fld>
<fld name='column_name'>tid</fld>
<fld name='key_seq'>1</fld>
<fld name='pk_name'>etl_step_type_param_pk</fld>
</row>
</rs>
</primaryKeys>
<indexes>
</indexes>
</table>
<table name="etl_step_property" thema="Administration" typ="Schlüsseltabelle"
releaseUnload="empty">
<description>Schlüsseltabelle für Eigenschaften von Ladeschritten</description>
<columns>
<column name="tid" type="SERIAL" size="" default="" notnull="true" description="tid" isKey="true"/>
<column name="etl_step_id" type="INTEGER" size="" default="" notnull="true" description="Arten des Ladeschritts" />
<column name="prop_name" type="VARCHAR" size="255" default="" notnull="" description="Bezeichnung" />
<column name="prop_value" type="TEXT" size="255" default="" notnull="false" description="Wert" />
</columns>
<primaryKeys><rs>
<row>
<fld name='table_cat'>superx</fld>
<fld name='table_schem'>superx</fld>
<fld name='table_name'>etl_step_property</fld>
<fld name='column_name'>tid</fld>
<fld name='key_seq'>1</fld>
<fld name='pk_name'>etl_step_property_pk</fld>
</row>
</rs>
</primaryKeys>
<indexes>
</indexes>
</table>
<table name="etl_step_relation" thema="Administration" typ="Schlüsseltabelle"
releaseUnload="empty">
<description>Schlüsseltabelle für Beziehungen zwischen Ladeschritten, und Ausführungssteuerung</description>
<columns>
<column name="tid" type="SERIAL" size="" default="" notnull="true" description="tid" isKey="true"/>
<column name="step_id" type="INTEGER" size="2" default="" notnull="true" description="Ladeschritt" ></column>
<column name="parent_step_id" type="INTEGER" size="2" default="" notnull="false" description="Übergeordneter Ladeschritt" ></column>
<column name="job_id" type="INTEGER" size="2" default="" notnull="true" description="Ladejob" ></column>
<column name="force_continue" type="SMALLINT" size="2" default="1" notnull="false" description="Ladejob bei Fehler weiterführen" ></column>
<column name="step_active" type="SMALLINT" size="2" default="1" notnull="false" description="Ladeschritt aktiv" ></column>
<column name="sortnr" type="INTEGER" size="2" default="1" notnull="false" description="Ladeschritt Sortiernr." ></column>
<column name="custom_step" type="SMALLINT" size="2" default="1" notnull="false" description="Ladeschritt custom" ><comment>1=ja, d.h. von der Hochschule angepaßt, 0=nein, d.h. Auslieferung der Software</comment></column>
</columns>
<primaryKeys><rs>
<row>
<fld name='table_cat'>superx</fld>
<fld name='table_schem'>superx</fld>
<fld name='table_name'>etl_step_relation</fld>
<fld name='column_name'>tid</fld>
<fld name='key_seq'>1</fld>
<fld name='pk_name'>etl_step_relation_pk</fld>
</row>
</rs>
</primaryKeys>
<indexes>
</indexes>
</table>
<table name="etl_job" thema="Administration" typ="Schlüsseltabelle"
releaseUnload="empty">
<description>Schlüsseltabelle für Ladejobs</description>
<columns>
<column name="tid" type="SERIAL" size="" default="" notnull="true" description="tid" isKey="true"/>
<column name="uniquename" type="VARCHAR" size="255" default="" notnull="true" description="Schlüssel" />
<column name="caption" type="VARCHAR" size="255" default="" notnull="" description="Bezeichnung" />
<column name="systeminfo_id" type="INTEGER" size="" default="" notnull="true" description="Komponente" />
<column name="logfile" type="VARCHAR" size="255" default="" notnull="false" description="Logdatei" />
<column name="custom_job" type="SMALLINT" size="255" default="1" notnull="false" description="Job custom" ><comment>1=ja, d.h. von der Hochschule angepaßt, 0=nein, d.h. Auslieferung der Software</comment></column>
</columns>
<primaryKeys><rs>
<row>
<fld name='table_cat'>superx</fld>
<fld name='table_schem'>superx</fld>
<fld name='table_name'>etl_job</fld>
<fld name='column_name'>tid</fld>
<fld name='key_seq'>1</fld>
<fld name='pk_name'>etl_job_pk</fld>
</row>
</rs>
</primaryKeys>
<indexes>
<index name="ix_etl_job1" type="unique">
<index-column name="uniquename" />
</index>
</indexes>
</table>
<table name="etl_job_param" thema="Administration" typ="Schlüsseltabelle"
releaseUnload="empty">
<description>Schlüsseltabelle für Parameter für Ladejobs</description>
<columns>
<column name="tid" type="SERIAL" size="" default="" notnull="true" description="tid" isKey="true"/>
<column name="etl_job_id" type="INTEGER" size="" default="" notnull="true" description="Ladejob" />
<column name="uniquename" type="VARCHAR" size="255" default="" notnull="true" description="Unique Name" />
<column name="name" type="VARCHAR" size="255" default="" notnull="" description="Bezeichnung" />
<column name="param_default" type="VARCHAR" size="255" default="" notnull="false" description="Defaultwert" />
<!-- in Zukunft ggf. typisiert: <column name="param_type" type="VARCHAR" size="255" default="" notnull="false" description="Defaultwert" />
-->
</columns>
<primaryKeys><rs>
<row>
<fld name='table_cat'>superx</fld>
<fld name='table_schem'>superx</fld>
<fld name='table_name'>etl_job_param</fld>
<fld name='column_name'>tid</fld>
<fld name='key_seq'>1</fld>
<fld name='pk_name'>etl_job_param_pk</fld>
</row>
</rs>
</primaryKeys>
<indexes>
</indexes>
</table>
<views>
</views>
<functions>
</functions>
<themen>
<thema name="Administration Laderoutinen"
parent="Laderoutinen">Abfragen zur Administration</thema>
</themen>
<masken>
<maske tid="42000" name="Laderoutinen suchen" thema="Administration Laderoutinen">
<description>Laderoutinen verwalten</description>
<src>
<path>$ETL_PFAD/masken</path>
</src>
</maske>
</masken>
<data-integrity>
<!--etl_job-->
<relation from="systeminfo" to="etl_job" delete="FALSE"
displayType="select" visibleFields="name" format="%s">
<relation-column from="tid" to="systeminfo_id" />
</relation>
<!--etl_step-->
<relation from="systeminfo" to="etl_step" delete="FALSE"
displayType="select" visibleFields="name" format="%s">
<relation-column from="tid" to="systeminfo_id" />
</relation>
<relation from="etl_step_type" to="etl_step" delete="FALSE"
displayType="select" visibleFields="caption" format="%s">
<relation-column from="tid" to="step_type_id" />
</relation>
<!--etl_step_property-->
<relation from="etl_step" to="etl_step_property" delete="FALSE"
displayType="select" visibleFields="caption" format="%s">
<relation-column from="tid" to="etl_step_id" />
</relation>
<!--etl_step_relation-->
<relation from="etl_job" to="etl_step_relation" delete="FALSE"
displayType="select" visibleFields="caption" format="%s">
<relation-column from="tid" to="job_id" />
</relation>
<relation from="etl_step" to="etl_step_relation" delete="FALSE"
displayType="select" visibleFields="caption" format="%s">
<relation-column from="tid" to="step_id" />
</relation>
<relation from="etl_step" to="etl_step_relation" delete="FALSE"
displayType="select" visibleFields="caption" format="%s">
<relation-column from="tid" to="parent_step_id" />
</relation>
</data-integrity>
</database>
<!-- ********************* Liste der ETL-Prozesse ************************** -->
<etl>
<etl-step name="Transformation" type="trans">
<action>
<nativeaction sql="update systeminfo set datum=today() where tid in (270)" scriptfile="" database=""/>
</action>
</etl-step>
</etl>
<install>
<install-step name="Füllen der Modul-Tabellen">
<action error="stop">
<loadtable refresh="true" delimiter="^" header="false" tabname="etl_step_type"><file path="$ETL_PFAD/schluesseltabellen/etl_step_type.unl"/></loadtable>
<nativeaction sql="" scriptfile="$ETL_PFAD/schluesseltabellen/sachgebiete_fuellen.sql" database=""/>
<nativeaction sql="" scriptfile="$ETL_PFAD/schluesseltabellen/etl_step_fuellen.sql" database=""/>
<!--<nativeaction sql="" scriptfile="$ETL_PFAD/schluesseltabellen/sx_jobs_fuellen.sql" database=""/>-->
</action>
</install-step>
</install>
<upgrade>
<upgrade-step>
<action error="stop">
<nativeaction sql="create table tmp_templates(tid integer,
id char(200) not null,
content text not null,
description char(200) ,
comment char(200) ,
version integer
)
;" database=""/>
</action>
<action error="stop">
<loadtable refresh="true" delimiter="^" header="false" tabname="tmp_templates"><file path="$ETL_PFAD/schluesseltabellen/fm_templates.unl"/></loadtable>
</action>
<action>
<loadtable refresh="true" delimiter="^" header="false" tabname="etl_step_type"><file path="$ETL_PFAD/schluesseltabellen/etl_step_type.unl"/></loadtable>
<nativeaction sql="" scriptfile="$ETL_PFAD/schluesseltabellen/etl_step_fuellen.sql" database=""/>
<nativeaction sql="" scriptfile="$ETL_PFAD/schluesseltabellen/sachgebiete_fuellen.sql" database=""/>
</action>
</upgrade-step>
</upgrade>
<uninstall-step name="Deinstalliere Schlüssel">
<action error="stop">
<nativeaction sql="" scriptfile="$ETL_PFAD/schluesseltabellen/sachgebiete_loeschen.sql" database=""/>
</action>
</uninstall-step>
<!-- ********************* Liste der ETL-Prozesse ************************** -->
<dbforms>
<form name="etl_job_edit"
table="etl_job"
path="/edit/etl/etl_job_edit.jsp"
followUp=""
caption="Laderoutinen verwalten"
orderBy="caption"
gotoHt=""
helpfile=""
maxRows="1"
mode="update_insert_delete_copy">
<description>In diesem Formular können Sie Laderoutinen verwalten.</description>
<filters>
<filter mandatory="true" type="equals">tid</filter>
</filters>
<field-selection complete="false" />
<customfield name="tid" />
<customfield name="caption" visibleSize="50" nullFieldValue="" />
<customfield name="uniquename" visibleSize="50" nullFieldValue="" />
<customfield name="custom_job" visibleSize="50" nullFieldValue="" />
<customfield name="systeminfo_id" visibleSize="50" nullFieldValue="" />
<customfield name="Parameter" type="subform" multipart="false" autoUpdate="false"
maxRows="*" table="etl_job_param" parentField="tid" childField="etl_job_id"
orderBy="sortnr" allowNew="true"
mode="full">
<field-selection complete="false" />
<customfield name="tid" nullFieldValue="" />
<customfield name="job_id" type="hidden" overrideValue="tid" />
<customfield name="name" nullFieldValue="" visibleSize="30" />
<customfield name="uniquename" visibleSize="50" nullFieldValue="" />
<customfield name="param_default" nullFieldValue="" visibleSize="30" />
</customfield>
<customfield name="Ladeschritte" type="subform" multipart="false" autoUpdate="false"
maxRows="*" table="etl_step_relation" parentField="tid" childField="job_id"
orderBy="sortnr" allowNew="true"
mode="full">
<field-selection complete="false" />
<customfield name="tid" nullFieldValue="" />
<customfield name="job_id" type="hidden" overrideValue="tid" />
<customfield name="step_id" nullFieldValue="" visibleSize="30" />
<customfield name="parent_step_id" nullFieldValue="" visibleSize="30" />
<customfield name="force_continue" nullFieldValue="" pattern="" visibleSize="10" />
<customfield name="step_active" nullFieldValue="" pattern="" visibleSize="20" />
<customfield name="sortnr" nullFieldValue="" pattern="" visibleSize="20" />
<customfield name="custom_step" nullFieldValue="" pattern="" visibleSize="20" />
<customfield type="link" name="Details" path="/superx/edit/etl/etl_step_edit.jsp" linkVar="tid" linkid="step_id"/>
</customfield>
</form>
<form name="etl_step_edit"
table="etl_step"
path="/edit/etl/etl_step_edit.jsp"
followUp=""
caption="Ladeschritt verwalten"
orderBy=""
gotoHt=""
helpfile=""
maxRows="1"
mode="full">
<description>In diesem Formular können Sie Ladeschritte verwalten.</description>
<filters>
<filter mandatory="true" type="equals">tid</filter>
</filters>
<field-selection complete="false" />
<customfield name="tid" nullFieldValue="" />
<customfield name="caption" nullFieldValue="" visibleSize="30" />
<customfield name="uniquename" nullFieldValue="" visibleSize="30" />
<customfield name="systeminfo_id" nullFieldValue="" pattern="" visibleSize="10" />
<customfield name="step_type_id" nullFieldValue="" pattern="" visibleSize="20" />
<customfield name="custom_step" nullFieldValue="" pattern="" visibleSize="30" />
<customfield name="logfile" nullFieldValue="" pattern="" visibleSize="20" />
<customfield name="Eigenschaften" type="subform" multipart="false" autoUpdate="false"
maxRows="*" table="etl_step_property" parentField="tid" childField="etl_step_id"
orderBy="prop_name" allowNew="true"
mode="full">
<field-selection complete="false" />
<customfield name="tid" nullFieldValue="" />
<customfield name="etl_step_id" type="hidden" overrideValue="tid" />
<customfield name="prop_name" nullFieldValue="" visibleSize="30" />
<customfield name="prop_value" visibleSize="50" nullFieldValue="" />
</customfield>
</form>
<form name="etl_step_type_list"
table="etl_step_type"
path="/edit/etl/etl_step_type_list.jsp"
followUp=""
caption="Arten von Ladeschritten verwalten"
orderBy="caption"
gotoHt=""
helpfile=""
maxRows="*"
mode="full">
<description>In diesem Formular können Sie Arten von Ladeschritten verwalten.</description>
<filters>
</filters>
<field-selection complete="true" />
</form>
</dbforms>
</module>

1327
src-modules/module/etl/conf/etl_step_fuellen.xsl

File diff suppressed because it is too large Load Diff

10
src-modules/module/etl/conf/excludes.txt

@ -0,0 +1,10 @@
rohdaten/ETL_ENV
rohdaten/*.properties
preparation.sql
finalize.sql
rohdaten/*.err
*.log
*.err
rohdaten/unl/*
conf/customize.sql
conf/*.log

4
src-modules/module/etl/conf/includes.txt

@ -0,0 +1,4 @@
doku/etl_modul/etl.html
WEB-INF/conf/edustore/db/bin/SQL_ENV_etl.sam
WEB-INF/lib/superx-etl0.2.jar

6
src-modules/module/etl/masken/42000_felderinfo.unl

@ -0,0 +1,6 @@
42000^Jobstatus^50^0^0^150^200^1^integer^200^0^1^<<SQL>> select 0,'Release' from xdummy union select 1,'Eigene' from xdummy^^ ^
42001^Laderoutine^0^0^0^150^80^1^integer^200^0^1^<<SQL>> select tid,caption from etl_job order by 2;^^^
42002^Arten von Ladeschritten^100^0^0^150^300^1^char^30^0^18^^^<<SQL>>select '../edit/etl/etl_step_type_list.jsp' from xdummy;^
42003^Name^20^0^0^150^150^1^sql^50^0^0^^^^
42004^Komponente^30^0^0^150^200^1^integer^200^0^1^<<SQL>> select tid,name from systeminfo order by 2;^^^
42005^ETL-Manager^1000^0^0^150^300^1^char^30^0^18^^^<<SQL>>select '../edit/etl/etl_manager.jsp' from xdummy;^

1
src-modules/module/etl/masken/42000_maske_system_bez.unl

@ -0,0 +1 @@
42000^270^

6
src-modules/module/etl/masken/42000_masken_felder_bez.unl

@ -0,0 +1,6 @@
42000^42000^
42000^42001^
42000^42002^
42000^42003^
42000^42004^
42000^42005^

92
src-modules/module/etl/masken/42000_maskeninfo.unl

@ -0,0 +1,92 @@
42000^Laderoutinen verwalten^--Autor: D. Quathamer\
--Datum: 2.8.2019\
--freemarker template\
create temp table tmp_ergebnis (\
ord smallint,\
tid integer, \
uniquename varchar(255) ,\
caption varchar(255),\
systeminfo_id integer ,\
systeminfo_str varchar(255),\
logfile varchar(255),\
custom_job smallint ,\
letzter_lauf date,\
nextedit varchar(255)\
);\
insert into tmp_ergebnis ( tid,\
uniquename,\
caption,\
systeminfo_id,\
logfile,\
custom_job,\
nextedit) \
select tid,\
uniquename,\
caption,\
systeminfo_id,\
logfile,\
custom_job,\
('../edit/etl/etl_job_edit.jsp|tid=' || J.tid)::varchar(255)\
FROM etl_job J\
where 1=1\
/* and J.tid=<<Laderoutine>> */\
/* and J.custom_job=<<Jobstatus>> */\
/* and J.caption like '%<<Name>>%' */\
/* and E.systeminfo_id=<<Komponente>> */\
;\
\
update tmp_ergebnis set systeminfo_str=(select name from systeminfo where tid=tmp_ergebnis.systeminfo_id);\
\
\
<@selectintotmp \
select=" tid,\
uniquename,\
caption,\
systeminfo_str,\
logfile,\
letzter_lauf,\
custom_job,\
nextedit"\
source="tmp_ergebnis"\
target="tmp_ergebnis2">\
order by systeminfo_str,\
caption\
</@selectintotmp>\
<@informixnolog/>;\
\
drop table tmp_ergebnis;\
\
select systeminfo_str,\
caption,\
uniquename,\
logfile,\
custom_job,\
letzter_lauf,\
nextedit \
from tmp_ergebnis2\
;^XIL List\
drop_and_delete movable_columns sizable_columns horizontal_scrolling\
white_space_color=COLOR_WHITE fixed_columns=2\
min_heading_height=35\
Column CID=0 heading_text="Komponente" center_heading\
row_selectable col_selectable heading_platform readonly\
width=50 text_size=100\
Column CID=0 heading_text="Name" center_heading\
row_selectable col_selectable heading_platform readonly\
width=50 text_size=100\
Column CID=1 heading_text="Schlüssel" center_heading\
row_selectable col_selectable heading_platform readonly\
width=150 text_size=200\
Column CID=1 heading_text="Logdatei" center_heading\
row_selectable col_selectable heading_platform readonly\
width=30 text_size=200\
Column CID=1 heading_text="Eigene Laderoutine" center_heading\
row_selectable col_selectable heading_platform readonly\
width=5 text_size=200\
Column CID=1 heading_text="Letzter Lauf" center_heading\
row_selectable col_selectable heading_platform readonly\
width=5 text_size=200\
Column CID=1 heading_text="Bearbeiten" center_heading\
row_selectable col_selectable heading_platform readonly\
width=5 text_size=200\
@@@^^^Suchen und Bearbeiten von Laderoutinen^drop table tmp_ergebnis2;^^1^440^360^0^1^^

1
src-modules/module/etl/masken/42000_sachgeb_maske_bez.unl

@ -0,0 +1 @@
270^42000^

12
src-modules/module/etl/rohdaten/etl_unload.xml

@ -0,0 +1,12 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<superx-unload version="4.0">
<module id="etl" version="0.3b" systeminfo_id="310">ETL-Modul</module>
<parameters>
</parameters>
<sourcesystems>
</sourcesystems>
</superx-unload>

592
src-modules/module/etl/schluesseltabellen/etl_step_fuellen.sql

@ -0,0 +1,592 @@
--Freemarker Template
<#include "SQL_lingua_franca"/>
<#include "SuperX_general"/>
<#assign etl_jobs = [
{"uniquename":"sx_select_mask", "name":"Maske entladen", "systeminfo_id":9 ,"logfile":""},
{"uniquename":"sx_insert_mask", "name":"Maske hochladen", "systeminfo_id":9 ,"logfile":""},
{"uniquename":"kern_konstanten_update", "name":"Konstanten aktualisieren", "systeminfo_id":9 ,"logfile":""}
] />
<#assign etl_job_params = [
{"etl_job":"sx_select_mask","param_name":"TID", "name":"Maskennr.", "param_default":""},
{"etl_job":"sx_select_mask","param_name":"PATH_TO_OUTPUTFILE", "name":"Ausgabedatei", "param_default":"$SUPERX_DIR/db/masken/$TID.xml"},
{"etl_job":"sx_select_mask","param_name":"FORMAT", "name":"Ausgabeformat", "param_default":"XML"},
{"etl_job":"sx_insert_mask","param_name":"TID", "name":"Maskennr.", "param_default":""},
{"etl_job":"sx_insert_mask","param_name":"PATH_TO_INPUTFILE", "name":"Eingabepfad", "param_default":"$SUPERX_DIR/db/masken/$TID.xml"},
{"etl_job":"sx_insert_mask","param_name":"FORMAT", "name":"Format", "param_default":"XML"},
{"etl_job":"sx_insert_mask","param_name":"SUPERX_DIR", "name":"Superx-Pfad zu WEB-INF/conf/edustore", "param_default":"$WEBAPP/WEB-INF/conf/edustore"},
{"etl_job":"kern_konstanten_update","param_name":"SUPERX_DIR", "name":"Superx-Pfad zu WEB-INF/conf/edustore", "param_default":"$WEBAPP/WEB-INF/conf/edustore"},
{"etl_job":"kern_konstanten_update","param_name":"PATH_TO_INPUTFILE", "name":"Eingabepfad", "param_default":"$SUPERX_DIR/db/install/schluesseltabellen/kern_feste_konstanten_fuellen.sql"}
] />
<#assign etl_steps = [
{"etl_job":"sx_select_mask", "uniquename":"unload_masken_stammdaten", "name":"Masken-Daten entladen", "type":"MSG"},
{"etl_job":"sx_select_mask", "uniquename":"unload_maskeninfo", "name":"Maskeninfo entladen", "type":"UNLOAD", "parent":"unload_masken_stammdaten"},
{"etl_job":"sx_select_mask", "uniquename":"unload_felderinfo", "name":"felderinfo entladen", "type":"UNLOAD", "parent":"unload_masken_stammdaten"},
{"etl_job":"sx_select_mask", "uniquename":"unload_masken_felder_bez", "name":"masken_felder_bez entladen", "type":"UNLOAD", "parent":"unload_masken_stammdaten"},
{"etl_job":"sx_select_mask", "uniquename":"unload_sachgeb_maske_bez", "name":"sachgeb_maske_bez entladen", "type":"UNLOAD", "parent":"unload_masken_stammdaten"},
{"etl_job":"sx_select_mask", "uniquename":"unload_maske_system_bez", "name":"maske_system_bez entladen", "type":"UNLOAD", "parent":"unload_masken_stammdaten"},
{"etl_job":"sx_select_mask", "uniquename":"unload_themenbaum", "name":"themenbaum entladen", "type":"UNLOAD"},
{"etl_job":"sx_select_mask", "uniquename":"unload_sx_mask_style", "name":"sx_mask_style entladen", "type":"UNLOAD"},
{"etl_job":"sx_select_mask", "uniquename":"unload_sx_stylesheets", "name":"sx_stylesheets entladen", "type":"UNLOAD"},
{"etl_job":"sx_select_mask", "uniquename":"unload_stylesheet_field", "name":"stylesheet_field entladen", "type":"UNLOAD"},
{"etl_job":"sx_insert_mask", "uniquename":"delete_maskeninfo", "name":"Maskeninfo löschen", "type":"DOQUERY" },
{"etl_job":"sx_insert_mask", "uniquename":"upload_maskeninfo", "name":"Maskeninfo hochladen", "type":"LOAD" },
{"etl_job":"sx_insert_mask", "uniquename":"delete_felderinfo", "name":"felderinfo löschen", "type":"DOQUERY" },
{"etl_job":"sx_insert_mask", "uniquename":"upload_felderinfo", "name":"felderinfo hochladen", "type":"LOAD" },
{"etl_job":"sx_insert_mask", "uniquename":"delete_masken_felder_bez", "name":"masken_felder_bez löschen", "type":"DOQUERY" },
{"etl_job":"sx_insert_mask", "uniquename":"upload_masken_felder_bez", "name":"masken_felder_bez hochladen", "type":"LOAD" },
{"etl_job":"sx_insert_mask", "uniquename":"delete_sachgeb_maske_bez", "name":"sachgeb_maske_bez löschen", "type":"DOQUERY" },
{"etl_job":"sx_insert_mask", "uniquename":"upload_sachgeb_maske_bez", "name":"sachgeb_maske_bez hochladen", "type":"LOAD" },
{"etl_job":"sx_insert_mask", "uniquename":"delete_maske_system_bez", "name":"maske_system_bez löschen", "type":"DOQUERY" },
{"etl_job":"sx_insert_mask", "uniquename":"upload_maske_system_bez", "name":"maske_system_bez hochladen", "type":"LOAD" },
{"etl_job":"sx_insert_mask", "uniquename":"create_tmp_etl_themenbaum", "name":"Tabelle tmp_etl_themenbaum erzeugen", "type":"DOSQL" },
{"etl_job":"sx_insert_mask", "uniquename":"create_tmp_etl_stylesheets", "name":"Tabelle tmp_etl_stylesheets erzeugen", "type":"DOSQL" },
{"etl_job":"sx_insert_mask", "uniquename":"upload_tmp_etl_themenbaum", "name":"tmp_etl_themenbaum hochladen", "type":"LOAD" },
{"etl_job":"sx_insert_mask", "uniquename":"upload_tmp_etl_stylesheets", "name":"tmp_etl_stylesheets hochladen", "type":"LOAD" },
{"etl_job":"sx_insert_mask", "uniquename":"upload_tmp_etl_mask_style", "name":"tmp_etl_mask_style hochladen", "type":"LOAD" },
{"etl_job":"sx_insert_mask", "uniquename":"upload_tmp_etl_stylesheet_field", "name":"tmp_etl_stylesheet_field hochladen", "type":"LOAD" },
{"etl_job":"sx_insert_mask", "uniquename":"themenbaum_fuellen", "name":"themenbaum_fuellen", "type":"DOSQL" },
{"etl_job":"sx_insert_mask", "uniquename":"sx_stylesheets_fuellen", "name":"sx_stylesheets_fuellen", "type":"DOSQL" },
{"etl_job":"kern_konstanten_update", "uniquename":"kern_konstanten_update", "name":"Kern Konstanten aktualisieren", "type":"DOSQL" }
] />
<#assign etl_step_properties = [
{"etl_step":"unload_masken_stammdaten","prop_name":"msg", "prop_value":"Entlade Stammdaten Maske $TID" },
{"etl_step":"unload_maskeninfo","prop_name":"select_stmt", "prop_value":"select tid,name,select_stmt,xil_proplist,chart_xtitel,chart_ytitel,erlaeuterung,cleanup_stmt,default_file,frontend,breite,hoehe,ampel,hilfe,hinweis from maskeninfo where tid=$TID" },
{"etl_step":"unload_felderinfo","prop_name":"select_stmt", "prop_value":"select tid ,name,nummer,x,y,buttonbreite,feldbreite,zeilenanzahl,typ,laenge,obligatorisch,art,relation,attribut,defaultwert from felderinfo where tid in (select felderinfo_id from masken_felder_bez where maskeninfo_id = $TID) order by tid" },
{"etl_step":"unload_masken_felder_bez","prop_name":"select_stmt", "prop_value":"select maskeninfo_id,felderinfo_id from masken_felder_bez where maskeninfo_id=$TID order by 1,2" },
{"etl_step":"unload_sachgeb_maske_bez","prop_name":"select_stmt", "prop_value":"select sachgebiete_id,maskeninfo_id from sachgeb_maske_bez where maskeninfo_id=$TID order by 1,2" },
{"etl_step":"unload_maske_system_bez","prop_name":"select_stmt", "prop_value":"select maskeninfo_id,systeminfo_id from maske_system_bez where maskeninfo_id=$TID order by 1,2" },
{"etl_step":"unload_themenbaum","prop_name":"select_stmt", "prop_value":"select T.tid,name,maskeninfo_id,parent,(select name from themenbaum where tid=T.parent) as parent_name,gueltig_seit,gueltig_bis,erlaeuterung,sort,css_class from themenbaum T where maskeninfo_id=$TID order by 2,1" },
{"etl_step":"unload_sx_mask_style","prop_name":"select_stmt", "prop_value":"select S.tid,S.maskeninfo_id,S.stylesheet_id,S.ord,(select filename from sx_stylesheets where tid=S.stylesheet_id) as stylesheet_filename from sx_mask_style S where maskeninfo_id=$TID order by 1,2,3" },
{"etl_step":"unload_sx_stylesheets","prop_name":"select_stmt", "prop_value":"select S.tid,S.filename,S.caption,S.description,S.relation,S.useragent,S.contenttype from sx_stylesheets S, sx_mask_style M where S.tid=M.stylesheet_id and M.maskeninfo_id=$TID order by 1,2,3" },
{"etl_step":"unload_stylesheet_field","prop_name":"select_stmt", "prop_value":"select F.tid,F.stylesheet_id,S.filename as stylesheet_filename,F.tablename,F.fieldname FROM stylesheet_field F, sx_stylesheets S, sx_mask_style M where F.stylesheet_id=S.tid and S.tid=M.stylesheet_id and M.maskeninfo_id=$TID order by 1,2,3,4,5" },
{"etl_step":"delete_maskeninfo","prop_name":"select_stmt", "prop_value":"delete from maskeninfo where tid=$TID" },
{"etl_step":"upload_maskeninfo","prop_name":"target_table", "prop_value":"maskeninfo" },
{"etl_step":"upload_maskeninfo","prop_name":"format", "prop_value":"xml" },
{"etl_step":"upload_maskeninfo","prop_name":"search_path", "prop_value":"/etlAction/unload [@name=\"unload_maskeninfo\"]/rs/row" },
{"etl_step":"upload_maskeninfo","prop_name":"path_to_inputfile", "prop_value":"$PATH_TO_INPUTFILE" },
{"etl_step":"delete_felderinfo","prop_name":"select_stmt", "prop_value":"delete from felderinfo where tid in (select felderinfo_id from masken_felder_bez where maskeninfo_id =$TID)" },
{"etl_step":"upload_felderinfo","prop_name":"target_table", "prop_value":"felderinfo" },
{"etl_step":"upload_felderinfo","prop_name":"format", "prop_value":"xml" },
{"etl_step":"upload_felderinfo","prop_name":"search_path", "prop_value":"/etlAction/unload [@name=\"unload_felderinfo\"]/rs/row" },
{"etl_step":"upload_felderinfo","prop_name":"path_to_inputfile", "prop_value":"$PATH_TO_INPUTFILE" },
{"etl_step":"delete_masken_felder_bez","prop_name":"select_stmt", "prop_value":"delete from masken_felder_bez where maskeninfo_id =$TID" },
{"etl_step":"upload_masken_felder_bez","prop_name":"target_table", "prop_value":"masken_felder_bez" },
{"etl_step":"upload_masken_felder_bez","prop_name":"format", "prop_value":"xml" },
{"etl_step":"upload_masken_felder_bez","prop_name":"search_path", "prop_value":"/etlAction/unload [@name=\"unload_masken_felder_bez\"]/rs/row" },
{"etl_step":"upload_masken_felder_bez","prop_name":"path_to_inputfile", "prop_value":"$PATH_TO_INPUTFILE" },
{"etl_step":"delete_sachgeb_maske_bez","prop_name":"select_stmt", "prop_value":"delete from sachgeb_maske_bez where maskeninfo_id =$TID" },
{"etl_step":"upload_sachgeb_maske_bez","prop_name":"target_table", "prop_value":"sachgeb_maske_bez" },
{"etl_step":"upload_sachgeb_maske_bez","prop_name":"format", "prop_value":"xml" },
{"etl_step":"upload_sachgeb_maske_bez","prop_name":"search_path", "prop_value":"/etlAction/unload [@name=\"unload_sachgeb_maske_bez\"]/rs/row" },
{"etl_step":"upload_sachgeb_maske_bez","prop_name":"path_to_inputfile", "prop_value":"$PATH_TO_INPUTFILE" },
{"etl_step":"delete_maske_system_bez","prop_name":"select_stmt", "prop_value":"delete from maske_system_bez where maskeninfo_id =$TID" },
{"etl_step":"upload_maske_system_bez","prop_name":"target_table", "prop_value":"maske_system_bez" },
{"etl_step":"upload_maske_system_bez","prop_name":"format", "prop_value":"xml" },
{"etl_step":"upload_maske_system_bez","prop_name":"search_path", "prop_value":"/etlAction/unload [@name=\"unload_maske_system_bez\"]/rs/row" },
{"etl_step":"upload_maske_system_bez","prop_name":"path_to_inputfile", "prop_value":"$PATH_TO_INPUTFILE" },
{"etl_step":"create_tmp_etl_themenbaum","prop_name":"PATH_TO_INPUTFILE", "prop_value":"$SUPERX_DIR/db/module/etl/schluesseltabellen/create_tmp_etl_themenbaum.sql" },
{"etl_step":"create_tmp_etl_stylesheets","prop_name":"PATH_TO_INPUTFILE", "prop_value":"$SUPERX_DIR/db/module/etl/schluesseltabellen/create_tmp_etl_stylesheets.sql" },
{"etl_step":"upload_tmp_etl_themenbaum","prop_name":"target_table", "prop_value":"tmp_etl_themenbaum" },
{"etl_step":"upload_tmp_etl_themenbaum","prop_name":"format", "prop_value":"xml" },
{"etl_step":"upload_tmp_etl_themenbaum","prop_name":"search_path", "prop_value":"/etlAction/unload [@name=\"unload_themenbaum\"]/rs/row" },
{"etl_step":"upload_tmp_etl_themenbaum","prop_name":"path_to_inputfile", "prop_value":"$PATH_TO_INPUTFILE" },
{"etl_step":"upload_tmp_etl_stylesheets","prop_name":"target_table", "prop_value":"tmp_etl_stylesheets" },
{"etl_step":"upload_tmp_etl_stylesheets","prop_name":"format", "prop_value":"xml" },
{"etl_step":"upload_tmp_etl_stylesheets","prop_name":"search_path", "prop_value":"/etlAction/unload [@name=\"unload_sx_stylesheets\"]/rs/row" },
{"etl_step":"upload_tmp_etl_stylesheets","prop_name":"path_to_inputfile", "prop_value":"$PATH_TO_INPUTFILE" },
{"etl_step":"upload_tmp_etl_mask_style","prop_name":"target_table", "prop_value":"tmp_etl_mask_style" },
{"etl_step":"upload_tmp_etl_mask_style","prop_name":"format", "prop_value":"xml" },
{"etl_step":"upload_tmp_etl_mask_style","prop_name":"search_path", "prop_value":"/etlAction/unload [@name=\"unload_sx_mask_style\"]/rs/row" },
{"etl_step":"upload_tmp_etl_mask_style","prop_name":"path_to_inputfile", "prop_value":"$PATH_TO_INPUTFILE" },
{"etl_step":"upload_tmp_etl_stylesheet_field","prop_name":"target_table", "prop_value":"tmp_etl_stylesheet_field" },
{"etl_step":"upload_tmp_etl_stylesheet_field","prop_name":"format", "prop_value":"xml" },
{"etl_step":"upload_tmp_etl_stylesheet_field","prop_name":"search_path", "prop_value":"/etlAction/unload [@name=\"unload_stylesheet_field\"]/rs/row" },
{"etl_step":"upload_tmp_etl_stylesheet_field","prop_name":"path_to_inputfile", "prop_value":"$PATH_TO_INPUTFILE" },
{"etl_step":"themenbaum_fuellen","prop_name":"PATH_TO_INPUTFILE", "prop_value":"$SUPERX_DIR/db/module/etl/schluesseltabellen/themenbaum_fuellen.sql" },
{"etl_step":"sx_stylesheets_fuellen","prop_name":"PATH_TO_INPUTFILE", "prop_value":"$SUPERX_DIR/db/module/etl/schluesseltabellen/sx_stylesheets_fuellen.sql" },
{"etl_step":"kern_konstanten_update","prop_name":"path_to_inputfile", "prop_value":"$PATH_TO_INPUTFILE" }
] />
<#assign testfaelle = [
{"testcase":"test_sx_select_mask","assertion":1, "sql":"select count(*) from etl_job where uniquename='sx_select_mask'" },
{"testcase":"test_sx_insert_mask","assertion":1, "sql":"select count(*) from etl_job where uniquename='sx_insert_mask'" },
{"testcase":"test_sx_insert_mask_params","assertion":4, "sql":"select count(*) from etl_job_param P, etl_job J where J.tid=P.etl_job_id and J.uniquename='sx_insert_mask'" },
{"testcase":"test_sx_insert_mask_steps","assertion":18, "sql":"select count(*) from etl_step S, etl_step_relation R, etl_job J where J.tid=R.job_id and S.tid=R.step_id and J.uniquename='sx_insert_mask'" },
{"testcase":"test_sx_select_mask_steps","assertion":10, "sql":"select count(*) from etl_step S, etl_step_relation R, etl_job J where J.tid=R.job_id and S.tid=R.step_id and J.uniquename='sx_select_mask'" }
] />
create temp table tmp_etl_step(
tid INTEGER,
uniquename VARCHAR(255) ,
caption VARCHAR(255) ,
systeminfo_id INTEGER not null,
step_type INTEGER,
step_type_uniquename VARCHAR(255),
sortnr SMALLINT not null,
force_continue SMALLINT,
etl_job_id INTEGER ,
parent_step_id INTEGER ,
parent_step_uniquename varchar(255),
parent_job_uniquename varchar(255),
logfile varchar(255),
custom_step smallint,
already_exists smallint
)
;
create temp table tmp_etl_job(
tid INTEGER,
uniquename VARCHAR(255) ,
caption VARCHAR(255) ,
systeminfo_id INTEGER not null,
logfile varchar(255),
already_exists smallint,
custom_job smallint
)
;
create temp table tmp_etl_job_param(
tid SERIAL not null,
etl_job_id INTEGER ,
uniquename VARCHAR(255) not null,
name VARCHAR(255) ,
param_default VARCHAR(255)
)
;
create temp table tmp_etl_step_property(
tid SERIAL not null,
etl_step_id INTEGER not null,
prop_name VARCHAR(255) ,
prop_value text
)
;
create temp table tmp_etl_step_relation(
tid SERIAL not null,
step_id INTEGER not null,
parent_step_id INTEGER ,
job_id INTEGER not null,
force_continue SMALLINT default 1 ,
step_active SMALLINT default 1,
sortnr SMALLINT default 1,
custom_step SMALLINT default 1
)
;
<#foreach etl_job in etl_jobs>
truncate table tmp_etl_job;
truncate table tmp_etl_step;
truncate table tmp_etl_job_param;
truncate table tmp_etl_step_property;
truncate table tmp_etl_step_relation;
--tids der jobs dürfen sich nicht ändern, daher
--
-- 1. vorh. Jobs updaten
-- 2. neue Jobs einfügen
-- 3. alte Jobs löschen
insert into tmp_etl_job(uniquename,caption,systeminfo_id,logfile,already_exists,custom_job)
values ('${etl_job.uniquename}',
'${etl_job.name}',
${etl_job.systeminfo_id},
'${etl_job.logfile}',0,0);
<#if SQLdialect='Postgres'>
--Postgres Dialekt:
update tmp_etl_job set tid=J.tid,
already_exists=1,
caption=J.caption,
logfile=J.logfile,
custom_job=J.custom_job
from etl_job J where J.uniquename=tmp_etl_job.uniquename
and J.systeminfo_id=tmp_etl_job.systeminfo_id
;
<#else>
--Informix Dialekt:
update tmp_etl_job set (tid,
already_exists,
caption,
logfile,
custom_job)
= ((select
tid,
1 as already_exists,
caption,
logfile,
custom_job
from etl_job J where J.uniquename=tmp_etl_job.uniquename
and J.systeminfo_id=tmp_etl_job.systeminfo_id))
where 0 <(select count(*)
from etl_job J where J.uniquename=tmp_etl_job.uniquename
and J.systeminfo_id=tmp_etl_job.systeminfo_id)
;
</#if>
--TODO Informix
--neue jobs:
insert into etl_job(uniquename,caption,systeminfo_id,custom_job)
select uniquename,caption,systeminfo_id,custom_job
from tmp_etl_job
where already_exists=0;
--tid von neuen Jobs ermitteln:
update tmp_etl_job set tid=(select J.tid
from etl_job J where J.uniquename=tmp_etl_job.uniquename
and J.systeminfo_id=tmp_etl_job.systeminfo_id)
where already_exists=0
;
--TODO
--obsolete Jobs: sollen bei Deinstallation des Moduls entfernt werden
--Parameter:
<#foreach etl_job_param in etl_job_params>
<#if etl_job_param.etl_job==etl_job.uniquename>
insert into tmp_etl_job_param(
etl_job_id ,
uniquename,
name ,
param_default)
select J.tid,
'${etl_job_param.param_name}',
'${etl_job_param.name}',
'${etl_job_param.param_default}'
from tmp_etl_job J
;
</#if>
</#foreach>
--ETL-Schritte
<#assign sortnr=0 />
<#foreach etl_step in etl_steps>
<#if etl_step.etl_job==etl_job.uniquename>
<#assign sortnr=sortnr+1 />
insert into tmp_etl_step(
uniquename ,
caption ,
systeminfo_id ,
step_type_uniquename,
sortnr,
force_continue,
etl_job_id ,
parent_step_uniquename,
parent_job_uniquename,
logfile,
custom_step,
already_exists
)
select '${etl_step.uniquename}',
'${etl_step.name}',
${etl_job.systeminfo_id},
'${etl_step.type}' as step_type_uniquename,
${sortnr}*10 as sortnr,
0 as force_continue,
J.tid as etl_job_id,
<#if etl_step.parent?exists && etl_step.parent !="" >
'${etl_step.parent}',
<#else>
'' as parent_step_uniquename,
</#if>
J.uniquename,
'${etl_job.logfile}' as logfile,
0,
0
from etl_job J
where J.uniquename='${etl_job.uniquename}'
and J.systeminfo_id=${etl_job.systeminfo_id};
</#if> --Ende steps eines job
</#foreach>
--erst job-params einfügen:
delete from etl_job_param
where etl_job_id in (
SELECT distinct
etl_job_id
FROM tmp_etl_job_param )
;
insert into etl_job_param
(
etl_job_id,
uniquename,
name,
param_default
)
SELECT
etl_job_id,
uniquename,
name,
param_default
FROM tmp_etl_job_param
;
--nun steps einfügen:
update tmp_etl_step set step_type=(select T.tid from etl_step_type T
where T.uniquename=tmp_etl_step.step_type_uniquename);
-- select * from tmp_etl_step
-- where step_type is null;--_uniquename from tmp_etl_step;
--vorhandene Steps erkennen:
<#if SQLdialect='Postgres'>
--Postgres Dialekt:
update tmp_etl_step set tid=S.tid,
already_exists=1,
caption=S.caption,
logfile=S.logfile,
custom_step=S.custom_step
from etl_step S where S.uniquename=tmp_etl_step.uniquename
and S.systeminfo_id=tmp_etl_step.systeminfo_id
;
<#else>
--Informix Dialekt:
update tmp_etl_step set (tid,
already_exists,
caption,
logfile,
custom_step)
= ((select
tid,
1 as already_exists,
caption,
logfile,
custom_step
from etl_step S where S.uniquename=tmp_etl_step.uniquename
and S.systeminfo_id=tmp_etl_step.systeminfo_id))
where 0 <(select count(*)
from etl_step S where S.uniquename=tmp_etl_step.uniquename
and S.systeminfo_id=tmp_etl_step.systeminfo_id)
;
</#if>
--neue Steps einfügen:
insert into etl_step(
uniquename,
caption,
systeminfo_id,
step_type_id,
logfile,
custom_step)
select
uniquename,
caption,
systeminfo_id,
step_type,
logfile,
custom_step
FROM tmp_etl_step
where already_exists=0
;
--tid von neuen steps ermitteln:
update tmp_etl_step set tid=(select S.tid
from etl_step S where S.uniquename=tmp_etl_step.uniquename
and S.systeminfo_id=tmp_etl_step.systeminfo_id)
where already_exists=0
;
--parent ermitteln:
update tmp_etl_step set parent_step_id=(select S.tid
from etl_step S where S.uniquename=tmp_etl_step.parent_step_uniquename
and S.systeminfo_id=tmp_etl_step.systeminfo_id)
;
delete from etl_step_property
where etl_step_id in (select T.tid
FROM tmp_etl_step T )
;
delete from etl_step_relation
where job_id in (select J.tid
FROM tmp_etl_job J)
and custom_step=0
;
--jetzt step-params:
<#foreach etl_step_property in etl_step_properties>
insert into tmp_etl_step_property(
etl_step_id,
prop_name)
select
T.tid as etl_step_id,
'${etl_step_property.prop_name}'
FROM tmp_etl_step T
where uniquename ='${etl_step_property.etl_step}'
;
</#foreach>
--einfügen in echte Tabelle:
insert into etl_step_property(
etl_step_id,
prop_name)
select
T.etl_step_id,
T.prop_name
FROM tmp_etl_step_property T
;
--jetzt step-relation:
insert into tmp_etl_step_relation(
step_id,
parent_step_id,
job_id,
force_continue,
step_active,
sortnr,
custom_step
)
select
tid,
parent_step_id,
etl_job_id,
0 as force_continue,
1 as step_active,
sortnr,
0
FROM tmp_etl_step
;
insert into etl_step_relation(
step_id,
parent_step_id,
job_id,
force_continue,
step_active,
sortnr,
custom_step
)
select
step_id,
parent_step_id,
job_id,
force_continue,
step_active,
sortnr,
custom_step
FROM tmp_etl_step_relation
;
</#foreach> --Ende job
drop table tmp_etl_step;
drop table tmp_etl_job;
drop table tmp_etl_job_param;
drop table tmp_etl_step_property;
drop table tmp_etl_step_relation;
<#foreach testfall in testfaelle>
select 'testfall ${testfall.testcase}: ERFOLG'
from xdummy
where ${testfall.assertion}=(${testfall.sql})
;
select 'testfall ${testfall.testcase}: FEHLER bei ${testfall.testcase}'
from xdummy
where ${testfall.assertion}!=(${testfall.sql})
;
</#foreach> --Ende Testfälle
<#if SQLdialect='Informix'>
--nun xupdates:
--bei informix kann man TEXT Spalten nicht updaten, daher per jdbc updaten:
--damit DOSQL nicht den xupdater beim Start dieses Scriptes anwirft,
--wird das in eine temp. Datei ausgelagert.
! echo "<xup""date>" > "./tmp"$MANDANTID".sql"
<#foreach etl_step_property in etl_step_properties>
! echo '<text table="etl_step_property" field="prop_value" where="prop_name='\''${etl_step_property.prop_name}'\'' and etl_step_id=(select S.tid from etl_step S where S.uniquename='\''${etl_step_property.etl_step}'\'')">${etl_step_property.prop_value}</text>' >>"./tmp"$MANDANTID".sql"
</#foreach>
! echo "</xup""date>" >> "./tmp"$MANDANTID".sql"
! DOSQL "./tmp"$MANDANTID".sql"
<#else>
--Postgres:
<#foreach etl_step_property in etl_step_properties>
update etl_step_property set prop_value='${etl_step_property.prop_value}' where prop_name='${etl_step_property.prop_name}' and etl_step_id=(select S.tid from etl_step S where S.uniquename='${etl_step_property.etl_step}');
</#foreach>
</#if>

5
src-modules/module/etl/schluesseltabellen/etl_step_type.unl

@ -0,0 +1,5 @@
1^LOAD^Tabelle hochladen^de.superx.etl.ActionHandler.EtlActionHandlerUploadRecords^
2^DOSQL^SQL-Script ausführen^de.superx.etl.ActionHandler.EtlActionHandlerDosql^
3^DOQUERY^SQL-Query ausführen^de.superx.etl.ActionHandler.EtlActionHandlerDoquery^
4^UNLOAD^SQL-Ergebnis entladen^de.superx.etl.ActionHandler.EtlActionHandlerUnloadRecords^
5^MSG^Logausgabe^de.superx.etl.ActionHandler.EtlActionHandlerMsg^

440
src-modules/module/etl/schluesseltabellen/fm_templates.unl

@ -0,0 +1,440 @@
216^ETL_MAKROS^<#macro ETL_STEPS_FUELLEN>\
\
\
create temp table tmp_etl_step(\
tid INTEGER, \
uniquename VARCHAR(255) , \
caption VARCHAR(255) , \
systeminfo_id INTEGER not null, \
step_type INTEGER, \
step_type_uniquename VARCHAR(255), \
sortnr SMALLINT not null, \
force_continue SMALLINT,\
etl_job_id INTEGER , \
parent_step_id INTEGER , \
parent_step_uniquename varchar(255),\
parent_job_uniquename varchar(255),\
logfile varchar(255),\
custom_step smallint,\
already_exists smallint\
) \
;\
\
create temp table tmp_etl_job(\
tid INTEGER, \
uniquename VARCHAR(255) , \
caption VARCHAR(255) , \
systeminfo_id INTEGER not null,\
logfile varchar(255),\
already_exists smallint,\
custom_job smallint\
) \
;\
\
create temp table tmp_etl_job_param(\
tid SERIAL not null, \
etl_job_id INTEGER , \
uniquename VARCHAR(255) not null, \
name VARCHAR(255) , \
param_default VARCHAR(255) \
) \
;\
\
\
create temp table tmp_etl_step_property(\
tid SERIAL not null, \
etl_step_id INTEGER not null, \
prop_name VARCHAR(255) , \
prop_value text\
\
) \
;\
\
create temp table tmp_etl_step_relation(\
tid SERIAL not null, \
step_id INTEGER not null, \
parent_step_id INTEGER , \
job_id INTEGER not null, \
force_continue SMALLINT default 1 , \
step_active SMALLINT default 1, \
sortnr SMALLINT default 1,\
custom_step SMALLINT default 1\
\
) \
;\
\
<#foreach etl_job in etl_jobs>\
\
truncate table tmp_etl_job;\
truncate table tmp_etl_step;\
truncate table tmp_etl_job_param;\
truncate table tmp_etl_step_property;\
truncate table tmp_etl_step_relation;\
\
--tids der jobs dürfen sich nicht ändern, daher \
--\
-- 1. vorh. Jobs updaten\
-- 2. neue Jobs einfügen\
-- 3. alte Jobs löschen\
\
insert into tmp_etl_job(uniquename,caption,systeminfo_id,logfile,already_exists,custom_job)\
values ('${etl_job.uniquename}',\
'${etl_job.name}',\
${etl_job.systeminfo_id},\
'${etl_job.logfile}',0,0);\
\
<#if SQLdialect='Postgres'>\
--Postgres Dialekt:\
\
update tmp_etl_job set tid=J.tid,\
already_exists=1,\
caption=J.caption,\
logfile=J.logfile,\
custom_job=J.custom_job\
from etl_job J where J.uniquename=tmp_etl_job.uniquename\
and J.systeminfo_id=tmp_etl_job.systeminfo_id\
;\
\
<#else>\
\
--Informix Dialekt:\
update tmp_etl_job set (tid,\
already_exists,\
caption,\
logfile,\
custom_job) \
= ((select \
tid,\
1 as already_exists,\
caption,\
logfile,\
custom_job\
from etl_job J where J.uniquename=tmp_etl_job.uniquename\
and J.systeminfo_id=tmp_etl_job.systeminfo_id))\
where 0 <(select count(*)\
from etl_job J where J.uniquename=tmp_etl_job.uniquename\
and J.systeminfo_id=tmp_etl_job.systeminfo_id)\
\
;\
\
</#if>\
\
\
\
--TODO Informix\
\
--neue jobs:\
insert into etl_job(uniquename,caption,systeminfo_id,custom_job)\
select uniquename,caption,systeminfo_id,custom_job\
from tmp_etl_job\
where already_exists=0;\
--tid von neuen Jobs ermitteln:\
update tmp_etl_job set tid=(select J.tid\
from etl_job J where J.uniquename=tmp_etl_job.uniquename\
and J.systeminfo_id=tmp_etl_job.systeminfo_id)\
where already_exists=0\
;\
\
--TODO\
--obsolete Jobs: sollen bei Deinstallation des Moduls entfernt werden\
\
\
--Parameter:\
<#foreach etl_job_param in etl_job_params>\
<#if etl_job_param.etl_job==etl_job.uniquename>\
\
insert into tmp_etl_job_param(\
etl_job_id , \
uniquename, \
name , \
param_default)\
select J.tid,\
'${etl_job_param.param_name}',\
'${etl_job_param.name}',\
'${etl_job_param.param_default}'\
from tmp_etl_job J\
;\
</#if>\
</#foreach>\
\
\
\
--ETL-Schritte \
<#assign sortnr=0 />\
<#foreach etl_step in etl_steps>\
<#if etl_step.etl_job==etl_job.uniquename>\
\
<#assign sortnr=sortnr+1 />\
\
insert into tmp_etl_step(\
uniquename , \
caption , \
systeminfo_id , \
step_type_uniquename,\
sortnr, \
force_continue,\
etl_job_id , \
parent_step_uniquename,\
parent_job_uniquename,\
logfile,\
custom_step,\
already_exists\
)\
select '${etl_step.uniquename}',\
'${etl_step.name}',\
${etl_job.systeminfo_id},\
'${etl_step.type}' as step_type_uniquename,\
${sortnr}*10 as sortnr,\
0 as force_continue,\
J.tid as etl_job_id,\
<#if etl_step.parent?exists && etl_step.parent !="" >\
'${etl_step.parent}',\
<#else>\
'' as parent_step_uniquename,\
</#if>\
J.uniquename,\
'${etl_job.logfile}' as logfile,\
0,\
0\
from etl_job J\
where J.uniquename='${etl_job.uniquename}'\
and J.systeminfo_id=${etl_job.systeminfo_id};\
\
\
\
\
</#if> --Ende steps eines job\
</#foreach>\
\
--erst job-params einfügen:\
\
delete from etl_job_param\
where etl_job_id in (\
SELECT distinct \
etl_job_id\
FROM tmp_etl_job_param )\
;\
\
insert into etl_job_param\
(\
etl_job_id,\
uniquename,\
name,\
param_default\
)\
SELECT \
etl_job_id,\
uniquename,\
name,\
param_default\
FROM tmp_etl_job_param \
;\
\
--nun steps einfügen:\
\
\
update tmp_etl_step set step_type=(select T.tid from etl_step_type T\
where T.uniquename=tmp_etl_step.step_type_uniquename);\
\
select * from tmp_etl_step\
where step_type is null;--_uniquename from tmp_etl_step;\
\
\
--vorhandene Steps erkennen:\
<#if SQLdialect='Postgres'>\
--Postgres Dialekt:\
\
update tmp_etl_step set tid=S.tid,\
already_exists=1,\
caption=S.caption,\
logfile=S.logfile,\
custom_step=S.custom_step\
from etl_step S where S.uniquename=tmp_etl_step.uniquename\
and S.systeminfo_id=tmp_etl_step.systeminfo_id\
;\
<#else>\
\
--Informix Dialekt:\
update tmp_etl_step set (tid,\
already_exists,\
caption,\
logfile,\
custom_step) \
= ((select \
tid,\
1 as already_exists,\
caption,\
logfile,\
custom_step\
from etl_step S where S.uniquename=tmp_etl_step.uniquename\
and S.systeminfo_id=tmp_etl_step.systeminfo_id))\
where 0 <(select count(*)\
from etl_step S where S.uniquename=tmp_etl_step.uniquename\
and S.systeminfo_id=tmp_etl_step.systeminfo_id)\
;\
\
\
\
</#if>\
\
\
--neue Steps einfügen:\
insert into etl_step(\
uniquename,\
caption,\
systeminfo_id,\
step_type_id,\
logfile,\
custom_step)\
select \
uniquename,\
caption,\
systeminfo_id,\
step_type,\
logfile,\
custom_step\
FROM tmp_etl_step \
where already_exists=0\
;\
\
--tid von neuen steps ermitteln:\
update tmp_etl_step set tid=(select S.tid\
from etl_step S where S.uniquename=tmp_etl_step.uniquename\
and S.systeminfo_id=tmp_etl_step.systeminfo_id)\
where already_exists=0\
;\
\
--parent ermitteln:\
update tmp_etl_step set parent_step_id=(select S.tid\
from etl_step S where S.uniquename=tmp_etl_step.parent_step_uniquename\
and S.systeminfo_id=tmp_etl_step.systeminfo_id)\
;\
\
\
delete from etl_step_property\
where etl_step_id in (select T.tid\
FROM tmp_etl_step T )\
;\
delete from etl_step_relation\
where job_id in (select J.tid\
FROM tmp_etl_job J)\
and custom_step=0\
;\
\
\
--jetzt step-params:\
\
<#foreach etl_step_property in etl_step_properties>\
insert into tmp_etl_step_property(\
etl_step_id,\
prop_name)\
select \
T.tid as etl_step_id,\
'${etl_step_property.prop_name}'\
\
FROM tmp_etl_step T \
where uniquename ='${etl_step_property.etl_step}'\
;\
\
</#foreach>\
\
--einfügen in echte Tabelle:\
insert into etl_step_property(\
etl_step_id,\
prop_name)\
select \
T.etl_step_id,\
T.prop_name\
FROM tmp_etl_step_property T \
;\
\
--jetzt step-relation:\
insert into tmp_etl_step_relation(\
step_id,\
parent_step_id,\
job_id,\
force_continue,\
step_active,\
sortnr,\
custom_step\
)\
select \
tid,\
parent_step_id,\
etl_job_id,\
0 as force_continue,\
1 as step_active,\
sortnr,\
0\
FROM tmp_etl_step \
;\
\
insert into etl_step_relation(\
step_id,\
parent_step_id,\
job_id,\
force_continue,\
step_active,\
sortnr,\
custom_step\
)\
select \
step_id,\
parent_step_id,\
job_id,\
force_continue,\
step_active,\
sortnr,\
custom_step\
FROM tmp_etl_step_relation \
;\
</#foreach> --Ende job\
\
\
drop table tmp_etl_step;\
drop table tmp_etl_job;\
drop table tmp_etl_job_param;\
drop table tmp_etl_step_property;\
drop table tmp_etl_step_relation;\
\
<#if testfaelle?exists>\
\
<#foreach testfall in testfaelle>\
select 'testfall ${testfall.testcase}: ERFOLG'\
from xdummy\
where ${testfall.assertion}=(${testfall.sql})\
;\
select 'testfall ${testfall.testcase}: FEHLER bei ${testfall.testcase}'\
from xdummy\
where ${testfall.assertion}!=(${testfall.sql})\
;\
\
\
</#foreach> --Ende Testfälle\
</#if>\
\
<#if SQLdialect='Informix'> \
--nun xupdates:\
--bei informix kann man TEXT Spalten nicht updaten, daher per jdbc updaten:\
--damit DOSQL nicht den xupdater beim Start dieses Scriptes anwirft,\
--wird das in eine temp. Datei ausgelagert.\
! echo "<xup""date>" > "./tmp"$MANDANTID".sql"\
<#foreach etl_step_property in etl_step_properties>\
\
! echo '<text table="etl_step_property" field="prop_value" where="prop_name='\\''${etl_step_property.prop_name}'\\'' and etl_step_id=(select S.tid from etl_step S where S.uniquename='\\''${etl_step_property.etl_step}'\\'')">${etl_step_property.prop_value}</text>' >>"./tmp"$MANDANTID".sql"\
\
\
</#foreach>\
! echo "</xup""date>" >> "./tmp"$MANDANTID".sql"\
! DOSQL "./tmp"$MANDANTID".sql"\
\
<#else>\
--Postgres:\
<#foreach etl_step_property in etl_step_properties>\
\
update etl_step_property set prop_value='${etl_step_property.prop_value?replace("'", "''")}' where prop_name='${etl_step_property.prop_name}' and etl_step_id=(select S.tid from etl_step S where S.uniquename='${etl_step_property.etl_step?replace("'", "''")}');\
\
</#foreach>\
</#if>\
\
</#macro>^Datenbankunabhängigkeit^^1^

25
src-modules/module/etl/schluesseltabellen/fm_templates_fuellen.sql

@ -0,0 +1,25 @@
--freemarker template
--Werden immer ausgetauscht:
delete from fm_templates where id in (select id from tmp_templates);
<#if SQLdialect='Postgres'>
select sp_update_sequence('fm_templates');
</#if>
insert into fm_templates(
id,
content,
description,
comment,
version)
SELECT
id,
content,
description,
comment,
version
FROM tmp_templates;
drop table tmp_templates;

25
src-modules/module/etl/schluesseltabellen/fm_templates_unload.x

@ -0,0 +1,25 @@
#!/bin/bash
#löscht alle nicht-ETL-templates und entlädt die Tabelle sortiert ins Rohdaten Verzeichnis (für git-diff).
DOQUERY "create table tmp_templates(tid integer,
id char(200) not null,
content text not null,
description char(200) ,
comment char(200) ,
version integer default 1
)
;"
sx_auto_upload_table.x tmp_templates ./fm_templates.unl
SX_CLIENT=jdbc
export SX_CLIENT
#man_catalogue
DOQUERY "select * from fm_templates where id in (select T.id from tmp_templates T) order by tid" false $DBDELIMITER ./fm_templates.unl txt
SX_CLIENT=psql
export SX_CLIENT
DOQUERY "drop table tmp_templates;"

16
src-modules/module/etl/schluesseltabellen/fm_templates_update.x

@ -0,0 +1,16 @@
#!/bin/bash
DOQUERY "create table tmp_templates(tid integer,
id char(200) not null,
content text not null,
description char(200) ,
comment char(200) ,
version integer
)
;"
sx_auto_upload_table.x tmp_templates fm_templates.unl
DOSQL fm_templates_fuellen.sql

14
src-modules/module/etl/schluesseltabellen/sachgebiete_fuellen.sql

@ -0,0 +1,14 @@
--Das Sachgebiet 271 (Laderoutinen Administr.) wird installiert
delete from sachgebiete where tid in (271);
insert into sachgebiete(tid,name) values (271,'Laderoutinen Administr.');
delete from group_sachgeb_bez where sachgebiete_id in (270,271)
and groupinfo_id in (select G.tid from groupinfo G where G.name='Administratoren' or G.name='superx') ;
insert into group_sachgeb_bez
select G.tid,270 from groupinfo G where G.name='Administratoren' or G.name='superx';
insert into group_sachgeb_bez
select G.tid,271 from groupinfo G where G.name='Administratoren' or G.name='superx';

3
src-modules/module/etl/schluesseltabellen/sachgebiete_loeschen.sql

@ -0,0 +1,3 @@
delete from sachgebiete where tid in (270,271);
delete from group_sachgeb_bez where sachgebiete_id in (270,271);
delete from user_sachgeb_bez where sachgebiete_id in (270,271);

49
src-modules/module/etl/schluesseltabellen/sx_stylesheets_fuellen.sql

@ -0,0 +1,49 @@
--used in etl-job sx_insert_mask
--check stylesheets:
update tmp_etl_stylesheets set tid=(select S.tid
from sx_stylesheets S
where S.filename=tmp_etl_stylesheets.filename);
--new Stylesheet?
insert into sx_stylesheets(filename,caption,description,relation,contenttype)
select filename,caption,description,relation,contenttype
from tmp_etl_stylesheets
where tid is null;
--lookup tid:
update tmp_etl_stylesheets set tid=(select S.tid
from sx_stylesheets S
where S.filename=tmp_etl_stylesheets.filename);
delete from sx_mask_style where maskeninfo_id in (select maskeninfo_id
from tmp_etl_mask_style);
delete from stylesheet_field where stylesheet_id in (select tid
from tmp_etl_stylesheets);
--now insert:
insert into sx_mask_style ( maskeninfo_id, stylesheet_id, ord)
SELECT M.maskeninfo_id, S.tid, M.ord
FROM tmp_etl_mask_style M, sx_stylesheets S
where M.stylesheet_filename=S.filename
;
insert into stylesheet_field (stylesheet_id, tablename, fieldname)
SELECT S.tid,F.tablename, F.fieldname
FROM tmp_etl_stylesheet_field F, sx_stylesheets S
where F.stylesheet_filename=S.filename
;
drop table tmp_etl_stylesheets;
drop table tmp_etl_mask_style;
drop table tmp_etl_stylesheet_field;

22
src-modules/module/etl/schluesseltabellen/themenbaum_fuellen.sql

@ -0,0 +1,22 @@
--Freemarker Template
<#if SQLdialect='Postgres'>
select sp_update_sequence('themenbaum');
</#if>
update tmp_etl_themenbaum set parent=(select min(tid) from themenbaum where name=tmp_etl_themenbaum.parent_name);
--existiert bereits?
delete from tmp_etl_themenbaum
where exists(select T.tid
from themenbaum T where T.maskeninfo_id=tmp_etl_themenbaum.maskeninfo_id
and T.parent=tmp_etl_themenbaum.parent);
--dann einfügen:
insert into themenbaum (name,maskeninfo_id,parent,gueltig_seit,gueltig_bis)
select name,maskeninfo_id,parent,gueltig_seit,gueltig_bis from tmp_etl_themenbaum;
drop table tmp_etl_themenbaum;

131
src/de/superx/etl/ActionHandler/EtlActionHandler.java

@ -0,0 +1,131 @@
/*
* de.superx.etl - a package for controlling ETL routines
* Copyright (C) 2021 Daniel Quathamer <danielq@memtext.de>
*
* This package is licensed under the CampusSource License;
* http://www.campussource.de/org/license/
*/
package de.superx.etl.ActionHandler;
import java.io.StringWriter;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Properties;
import java.util.logging.Logger;
import de.superx.bin.SxConnection;
import de.superx.etl.EtlStep;
public class EtlActionHandler {
public Properties handlerSpecificProperties;
public Properties runTimeParams;
public Logger logger;
private String handlerType;
protected String logOutput="";
protected long numberOfRows=0;
protected int returnCode;
protected SxConnection stepSxConnection;
protected Connection stepConnection;
public String propFile;
public StringWriter outputStringWriter;
public EtlActionHandler(String handlerType, Properties hsp,Logger logger) {
this.handlerType=handlerType;
this.handlerSpecificProperties=hsp;
this.logger=logger;
}
public EtlActionHandler() {
//reflection API needs an empty constructor
}
public void setReturnCode(int returnCode) {
this.returnCode=returnCode;
}
public int getReturnCode() {
return returnCode;
}
public void setNumberOfRows(long numberOfRows) {
this.numberOfRows=numberOfRows;
}
public long getNumberOfRows() {
return numberOfRows;
}
public void setConnection(Connection con) {
// TODO Auto-generated method stub
}
public void setLogOutput(String log)
{
logOutput=log;
}
public String getLogOutput()
{
return logOutput;
}
public String getPropFile() {
return propFile;
}
public void setPropFile(String propFile) {
this.propFile = propFile;
}
public StringWriter getOutputStringWriter() {
return outputStringWriter;
}
public void setOutputStringWriter(StringWriter sw) {
this.outputStringWriter = sw;
}
public Properties getHandlerSpecificProperties() {
return handlerSpecificProperties;
}
public void setHandlerSpecificProperties(Properties hsp) {
this.handlerSpecificProperties = hsp;
}
public Properties getRunTimeParams() {
return runTimeParams;
}
public void setRunTimeParams(Properties runTimeParams) {
this.runTimeParams = runTimeParams;
}
public Logger getLogger() {
return logger;
}
public void setLogger(Logger logger) {
this.logger = logger;
}
protected void getConnection() throws SQLException {
Statement st;
DatabaseMetaData dbmd;
stepSxConnection = new SxConnection();
stepSxConnection.setPropfile(propFile);
logger.config("Starting Connection...");
try {
stepConnection = stepSxConnection.getConnection();
st = stepConnection.createStatement();
dbmd = stepConnection.getMetaData();
} catch (Exception e) {
e.printStackTrace();
logger.severe("Keine DB-Verbindung: " + e.toString());
throw new SQLException("Keine DB-Verbindung: " + e.toString());
}
}
}

48
src/de/superx/etl/ActionHandler/EtlActionHandlerDoquery.java

@ -0,0 +1,48 @@
/*
* de.superx.etl - a package for controlling ETL routines
* Copyright (C) 2021 Daniel Quathamer <danielq@memtext.de>
*
* This package is licensed under the CampusSource License;
* http://www.campussource.de/org/license/
*/
package de.superx.etl.ActionHandler;
import java.io.StringWriter;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Properties;
import java.util.logging.Logger;
import de.superx.bin.SxConnection;
import de.superx.etl.QueryResultSerializer;
import de.superx.etl.SqlExecutor;
public class EtlActionHandlerDoquery extends EtlActionHandler implements EtlActionHandlerI {
public EtlActionHandlerDoquery() {
}
@Override
public int execute(StringWriter sw, String mandantid, String stepUniquename,String outFormat)
throws SQLException, Exception
{
int returnCode=0;
long numberOfRows=0;
this.getConnection();
String query=handlerSpecificProperties.getProperty("select_stmt");
this.getConnection();
SqlExecutor mySqlExecutor=new SqlExecutor("default", stepSxConnection,query,runTimeParams);
returnCode=mySqlExecutor.executeQueries();
numberOfRows=mySqlExecutor.getNumberOfRows();
super.setNumberOfRows(numberOfRows);
stepSxConnection.close();
super.setReturnCode(returnCode);
//System.out.println(sw.toString());
stepSxConnection.close();
//super.setLogOutput(msg);
return returnCode;
}
}

68
src/de/superx/etl/ActionHandler/EtlActionHandlerDosql.java

@ -0,0 +1,68 @@
/*
* de.superx.etl - a package for controlling ETL routines
* Copyright (C) 2021 Daniel Quathamer <danielq@memtext.de>
*
* This package is licensed under the CampusSource License;
* http://www.campussource.de/org/license/
*/
package de.superx.etl.ActionHandler;
import java.io.File;
import java.io.StringWriter;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Properties;
import java.util.logging.Logger;
import de.superx.bin.SxConnection;
import de.superx.etl.QueryResultSerializer;
import de.superx.etl.SqlExecutor;
public class EtlActionHandlerDosql extends EtlActionHandler implements EtlActionHandlerI {
public EtlActionHandlerDosql() {
}
@Override
public int execute(StringWriter sw, String mandantid, String stepUniquename,String outFormat)
throws SQLException, Exception
{
int returnCode=0;
String log="";
long numberOfRows=0;
this.getConnection();
String filename=handlerSpecificProperties.getProperty("PATH_TO_INPUTFILE");
File sqlScriptFile = null;
try {
sqlScriptFile = new File(filename);
} catch (Exception e1) {
returnCode=1;
log+=" beim Öffnen der Datei "+ filename+": "+e1.toString();
super.setLogOutput(log);
}
if(returnCode==0)
{
try {
SqlExecutor mySqlExecutor = new SqlExecutor("default", stepSxConnection,sqlScriptFile,runTimeParams);
returnCode=mySqlExecutor.executeQueries();
String output=mySqlExecutor.getOutString().toString();
numberOfRows=mySqlExecutor.getNumberOfRows();
} catch (Exception e) {
returnCode=1;
log+=" beim Ausführen der Datei "+ filename+": "+e.toString();
super.setLogOutput(log);
}
super.setNumberOfRows(numberOfRows);
}
stepSxConnection.close();
super.setReturnCode(returnCode);
stepSxConnection.close();
if(returnCode==1)
throw new Exception("Fehler " + log);
//System.out.println(sw.toString());
//super.setLogOutput(msg);
return returnCode;
}
}

51
src/de/superx/etl/ActionHandler/EtlActionHandlerExecuteMask.java

@ -0,0 +1,51 @@
/*
* de.superx.etl - a package for controlling ETL routines
* Copyright (C) 2021 Daniel Quathamer <danielq@memtext.de>
*
* This package is licensed under the CampusSource License;
* http://www.campussource.de/org/license/
*/
package de.superx.etl.ActionHandler;
import java.io.StringWriter;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Properties;
import java.util.logging.Logger;
import de.superx.bin.SxConnection;
import de.superx.etl.MaskExecutor;
import de.superx.etl.QueryResultSerializer;
public class EtlActionHandlerExecuteMask extends EtlActionHandler implements EtlActionHandlerI {
public EtlActionHandlerExecuteMask() {
}
@Override
public int execute(StringWriter sw, String mandantid, String stepUniquename,String outFormat)
throws SQLException, Exception
{
int returnCode=0;
this.getConnection();
int maskeninfo_id=new Integer(handlerSpecificProperties.getProperty("maskeninfo_id"));
String username=handlerSpecificProperties.getProperty("username");
String mandantenId="default";
Properties params=null ;//todo aus assertion lesen
MaskExecutor myMaskExecutor=new MaskExecutor(mandantenId, stepSxConnection,maskeninfo_id, username,
params,sw);
myMaskExecutor.setLogger(logger);
super.setNumberOfRows(myMaskExecutor.executeMask(mandantenId,
maskeninfo_id,
username,
params));
super.setReturnCode(myMaskExecutor.getReturnCode());
super.setOutputStringWriter(myMaskExecutor.getOutputString());
//System.out.println(sw.toString());
stepSxConnection.close();
//super.setLogOutput(msg);
return returnCode;
}
}

34
src/de/superx/etl/ActionHandler/EtlActionHandlerI.java

@ -0,0 +1,34 @@
/*
* de.superx.etl - a package for controlling ETL routines
* Copyright (C) 2021 Daniel Quathamer <danielq@memtext.de>
*
* This package is licensed under the CampusSource License;
* http://www.campussource.de/org/license/
*/
package de.superx.etl.ActionHandler;
import java.io.StringWriter;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Properties;
import java.util.logging.Logger;
import de.superx.bin.SxConnection;
public interface EtlActionHandlerI {
int getReturnCode();
long getNumberOfRows();
StringWriter getOutputStringWriter();
String getLogOutput();
void setConnection(Connection con);
void setLogger(Logger logger);
void setHandlerSpecificProperties(Properties hsp);
void setRunTimeParams(Properties runTimeParams);
void setOutputStringWriter(StringWriter sw);
int execute(StringWriter sw, String mandantid, String stepUniquename,String outFormat) throws SQLException,Exception;
void setPropFile(String propfile);
}

30
src/de/superx/etl/ActionHandler/EtlActionHandlerMsg.java

@ -0,0 +1,30 @@
/*
* de.superx.etl - a package for controlling ETL routines
* Copyright (C) 2021 Daniel Quathamer <danielq@memtext.de>
*
* This package is licensed under the CampusSource License;
* http://www.campussource.de/org/license/
*/
package de.superx.etl.ActionHandler;
import java.io.StringWriter;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Properties;
import java.util.logging.Logger;
public class EtlActionHandlerMsg extends EtlActionHandler implements EtlActionHandlerI {
public EtlActionHandlerMsg() {
//reflection API needs an empty constructor
}
@Override
public int execute(StringWriter sw, String mandantid, String stepUniquename, String outFormat)
throws SQLException, Exception {
int ret=0;
String msg=handlerSpecificProperties.getProperty("msg");
super.setLogOutput(stepUniquename+":"+ msg);
return ret;
}
}

43
src/de/superx/etl/ActionHandler/EtlActionHandlerUnloadRecords.java

@ -0,0 +1,43 @@
/*
* de.superx.etl - a package for controlling ETL routines
* Copyright (C) 2021 Daniel Quathamer <danielq@memtext.de>
*
* This package is licensed under the CampusSource License;
* http://www.campussource.de/org/license/
*/
package de.superx.etl.ActionHandler;
import java.io.StringWriter;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Properties;
import java.util.logging.Logger;
import de.superx.bin.SxConnection;
import de.superx.etl.QueryResultSerializer;
public class EtlActionHandlerUnloadRecords extends EtlActionHandler implements EtlActionHandlerI {
public EtlActionHandlerUnloadRecords() {
}
@Override
public int execute(StringWriter sw, String mandantid, String stepUniquename,String outFormat)
throws SQLException, Exception
{
int returnCode=0;
this.getConnection();
String query=handlerSpecificProperties.getProperty("select_stmt");
QueryResultSerializer myQueryResultSerializer=new QueryResultSerializer("default", stepSxConnection,query,sw);
myQueryResultSerializer.setLogger(logger);
super.setNumberOfRows(myQueryResultSerializer.unloadQueryResults(stepUniquename,outFormat,"",true));
super.setReturnCode(myQueryResultSerializer.getReturnCode());
super.setOutputStringWriter(myQueryResultSerializer.getOutputString());
//System.out.println(sw.toString());
stepSxConnection.close();
//super.setLogOutput(msg);
return returnCode;
}
}

58
src/de/superx/etl/ActionHandler/EtlActionHandlerUploadRecords.java

@ -0,0 +1,58 @@
/*
* de.superx.etl - a package for controlling ETL routines
* Copyright (C) 2021 Daniel Quathamer <danielq@memtext.de>
*
* This package is licensed under the CampusSource License;
* http://www.campussource.de/org/license/
*/
package de.superx.etl.ActionHandler;
import java.io.StringWriter;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Properties;
import java.util.logging.Logger;
import de.superx.bin.SxConnection;
import de.superx.etl.QueryResultSerializer;
import de.superx.etl.TableUploader;
public class EtlActionHandlerUploadRecords extends EtlActionHandler implements EtlActionHandlerI {
public EtlActionHandlerUploadRecords() {
}
@Override
public int execute(StringWriter sw, String mandantid, String stepUniquename,String outFormat)
throws SQLException, Exception
{
int returnCode=0;
this.getConnection();
String target_table=handlerSpecificProperties.getProperty("target_table");
String inputfile=handlerSpecificProperties.getProperty("path_to_inputfile");
String xml_search_path=handlerSpecificProperties.getProperty("search_path");
String format=handlerSpecificProperties.getProperty("format");
String truncateTargetTable=handlerSpecificProperties.getProperty("truncateTargetTable");
long numberOfRows=0;
//getConnection(logger, this.getPropfile());
TableUploader myUploader=new TableUploader();
//myUploader.setDbpropfile(this.getPropfile());
myUploader.setInFormat(format);
myUploader.setTargetTable(target_table);
myUploader.setSrcFile(inputfile);
myUploader.setXml_search_path(xml_search_path);
myUploader.getConnection(stepSxConnection.getConnection(),null);
myUploader.setUploadConnection(stepSxConnection.getConnection());
myUploader.setTruncateTargetTable(truncateTargetTable);
numberOfRows=myUploader.uploadFile();
super.setLogOutput("Number of rows loaded: "+numberOfRows);
//System.out.println(sw.toString());
stepSxConnection.close();
//super.setLogOutput(msg);
return returnCode;
}
}

200
src/de/superx/etl/EtlAction.java

@ -0,0 +1,200 @@
/*
* de.superx.etl - a package for controlling ETL routines
* Copyright (C) 2021 Daniel Quathamer <danielq@memtext.de>
*
* This package is licensed under the CampusSource License;
* http://www.campussource.de/org/license/
*/
package de.superx.etl;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintStream;
import java.io.StringWriter;
import java.io.UnsupportedEncodingException;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Properties;
import de.memtext.util.PropUtils;
import javax.sql.DataSource;
import de.superx.bin.SxConnection;
import de.superx.common.Sichten;
public class EtlAction {
protected Properties params;
private boolean contOnError;
private String targetTable;
private String logoutput;
protected StringWriter actionOutput=new StringWriter();
protected StringWriter actionLog=new StringWriter();
private String loglevel="INFO";
private long starttime = new java.util.Date().getTime() ;
protected long endtime ;
private int returnCode;
private Connection dbconnection;
private Properties env;
private String uniquename;
protected String logfile;
protected String propfile;
private String encoding="UTF-8";
protected DataSource dataSource;
public EtlAction(String uniquename, String logfile, String propfile) {
this.uniquename = uniquename;
this.logfile = logfile;
this.propfile=propfile;
this.addActionLog("Action initialized"+EtlUtils.NEWLINE);
}
public EtlAction(String uniquename) {
this.uniquename = uniquename;
this.addActionLog("Action initialized"+EtlUtils.NEWLINE);
}
public DataSource getDataSource() {
return dataSource;
}
public void setDataSource(DataSource dataSource) {
this.dataSource = dataSource;
}
public Connection getDbconnection() {
return dbconnection;
}
public void setDbconnection(Connection dbconnection) {
this.dbconnection = dbconnection;
}
public String getUniquename() {
return uniquename;
}
public void setUniquename(String uniquename) {
this.uniquename = uniquename;
}
public String getLogfile() {
return logfile;
}
public void setLogfile(String logfile) {
this.logfile = logfile;
}
public Properties getEnv() {
return env;
}
public void setEnv(Properties env) {
this.env = env;
}
public Properties getParams() {
return params;
}
public void setParams(Properties params) {
this.params = params;
}
public boolean isContOnError() {
return contOnError;
}
public void setContOnError(boolean contOnError) {
this.contOnError = contOnError;
}
public String getTargetTable() {
return targetTable;
}
public void setTargetTable(String targetTable) {
this.targetTable = targetTable;
}
public String getLogoutput() {
return logoutput;
}
protected void setLogoutput(String log) {
this.logoutput = log;
}
protected void appendLog(String log) {
this.logoutput+= log;
}
public String getLoglevel() {
return loglevel;
}
public void setLoglevel(String loglevel) {
this.loglevel = loglevel;
}
public long getStarttime() {
return starttime;
}
public void setStarttime(long starttime) {
this.starttime = starttime;
}
public long getEndtime() {
return endtime;
}
public void setEndtime(long endtime) {
this.endtime = endtime;
}
public long getDuration() {
return (this.endtime - this.starttime)/1000;
}
public String getPropfile() {
return propfile;
}
public void setPropfile(String propfile) {
this.propfile = propfile;
}
public int getReturnCode() {
return returnCode;
}
public StringWriter getActionOutput() {
return actionOutput;
}
public void setActionOutput(StringWriter actionOutput) {
this.actionOutput = actionOutput;
}
public StringWriter getActionLog() {
return actionLog;
}
public void setActionLog(StringWriter actionLog) {
this.actionLog = actionLog;
}
public void addActionLog(String actionLog) {
String log=de.memtext.util.DateUtils.getTodayString()+ " " + de.memtext.util.DateUtils.getNowString();
this.actionLog.append(log+"- "+ actionLog+EtlUtils.NEWLINE);
}
protected void setReturnCode(int returnCode) {
this.returnCode = returnCode;
}
public int execute() throws SQLException, Exception
{
int returncode=0;
endtime=new java.util.Date().getTime() ;
return returncode;
}
public Connection getConnection(Connection myConnection) throws Exception {
String myDefaultPropFile=de.superx.etl.EtlUtils.WEBINFDIR+de.superx.etl.EtlUtils.PATHSEP+"db.properties";
if(myConnection==null)
{
if(propfile==null)
propfile=myDefaultPropFile;
SxConnection mySxConnection = null;
mySxConnection = new SxConnection();
mySxConnection.setPropfile(propfile);
myConnection = mySxConnection.getConnection();
}
return myConnection;
}
}

235
src/de/superx/etl/EtlActionJob.java

@ -0,0 +1,235 @@
/*
* de.superx.etl - a package for controlling ETL routines
* Copyright (C) 2021 Daniel Quathamer <danielq@memtext.de>
*
* This package is licensed under the CampusSource License;
* http://www.campussource.de/org/license/
*/
package de.superx.etl;
import java.io.IOException;
import java.io.PrintStream;
import java.io.StringWriter;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Iterator;
import java.util.Properties;
import javax.sql.DataSource;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.RowCallbackHandler;
import org.springframework.jdbc.datasource.DriverManagerDataSource;
import de.memtext.rights.RightsKeyEntry;
import de.superx.common.SuperX_el;
import de.superx.common.SxResultRow;
import de.superx.common.SxResultSet;
import de.superx.common.SxSqlHelper;
import de.superx.servlet.SxPools;
import de.superx.util.SqlStringUtils;
import freemarker.template.TemplateBooleanModel;
public class EtlActionJob extends EtlAction {
private String mandantenID;
private String jobTid;
private String jobUniquename;
private String jobLogfile;
private String jobCaption;
private Properties runTimeParams;
private StringWriter sw;
public EtlActionJob(String uniquename, String logfile, String propfile) {
super(uniquename, logfile, propfile);
sw=new StringWriter();
}
public EtlActionJob(String uniquename) {
super(uniquename);
sw=new StringWriter();
}
public StringWriter getSw() {
return sw;
}
public void setSw(StringWriter sw) {
this.sw = sw;
}
public void setRunTimeParams(String runTimeParamsArg) throws IOException {
if(runTimeParamsArg!=null)
this.runTimeParams = EtlUtils.convertStringToProperty(runTimeParamsArg);
}
public void initJob(String job,String runtimeParamsArg) throws Exception
{
long jetzt = new java.util.Date().getTime() ;
EtlUtils.initJobEnvironment();
String paramName=null;
String paramCaption;
String paramDefault="";
String paramProperties="";
if(runtimeParamsArg==null)
runtimeParamsArg="";
runtimeParamsArg="SUPERX_DIR="+EtlUtils.SUPERX_DIR+EtlUtils.NEWLINE+runtimeParamsArg;
//super.addActionLog("runtimeParamsArg:"+runtimeParamsArg);
this.setStarttime(jetzt);
Connection myConnection=this.getDbconnection();
if(myConnection==null)
myConnection=this.getConnection(myConnection);
this.setDbconnection(myConnection);
if(!SqlStringUtils.checkValidKeyEntry(job))
throw new SQLException("Invalid job "+job);
String sql = "select J.tid,J.uniquename,J.caption,J.logfile, P.uniquename as param_name,P.name as param_caption, P.param_default from etl_job J left outer join etl_job_param P on (J.tid=P.etl_job_id) where J.uniquename='"+job+"';";
SuperX_el el = new SuperX_el();
SxSqlHelper sh=new SxSqlHelper();
sh.execute(sql, myConnection, el);
if (el.getError_String() != null
&& !el.getError_String().trim().equals(""))
throw new SQLException("\nProblem bei Job DETAILS:" + "\n\n Meldung:"
+ el.getError_String() + "\n sql:" + sql);
SxResultSet result= el.getResultSet();
int rownr=0;
for (Iterator it = result.iterator(); it.hasNext();) {
rownr++;
SxResultRow row = (SxResultRow) it.next();
jobTid=row.get(0).toString().trim();
jobUniquename=row.get(1).toString().trim();
jobCaption=row.get(2).toString().trim();
jobLogfile=row.get(3).toString().trim();
if(row.get(4)!=null)
paramName=row.get(4).toString().trim();
if(row.get(5)!=null)
paramCaption=row.get(5).toString().trim();
if(row.get(4)!=null)
paramDefault=row.get(6).toString().trim();
if(paramName!=null)
paramProperties+=(paramName+"="+paramDefault+EtlUtils.NEWLINE);
}
if(rownr==0)
throw new Exception("Job "+job+" unbekannt");
//this.setLogfile(jobLogfile);
//this.appendLog("Job gefunden: "+jobCaption+EtlUtils.NEWLINE);
super.addActionLog("Job gefunden: "+jobCaption+EtlUtils.NEWLINE);
if(!paramProperties.equals(""))
this.setParams(EtlUtils.convertStringToProperty(paramProperties));
if(runtimeParamsArg!=null)
{
this.setRunTimeParams(runtimeParamsArg);
super.addActionLog("Runtime Params: "+runtimeParamsArg);
}
if(runTimeParams!=null)
this.setParams(de.superx.etl.EtlUtils.mergeParamProperties(this.getParams(), runTimeParams));
super.addActionLog("Job "+jobCaption+ " initialized");
}
public int execute(String outfile) throws SQLException, Exception
{
Integer stepTid;
String stepUniquename;
String stepCaption;
String stepLogfile;
Integer stepSortnr;
Integer stepForceContinue;
Integer stepParentStepId;
String stepTypeUniquename;
String stepTypeCaption;
String stepTypeHandler;
int jobReturnCode=0;
String sql = "select S.tid,"+
"S.uniquename,"+
"S.caption,"+
"S.systeminfo_id,"+
"R.sortnr,"+
"R.force_continue::integer as force_continue,"+
"R.parent_step_id,"+
"S.logfile,"+
"T.uniquename,"+
"T.caption,"+
"T.handler";
sql +=" FROM etl_step S, etl_step_type T, etl_step_relation R where T.tid=S.step_type_id and R.step_id=S.tid and R.job_id="+jobTid+" order by R.sortnr,R.tid;";
SuperX_el el = new SuperX_el();
SxSqlHelper sh=new SxSqlHelper();
try {
sh.execute(sql, this.getConnection(null), el);
} catch (Exception e1) {
super.addActionLog("Fehler beim SQL: "+sql+EtlUtils.NEWLINE+el.getError_String().trim());
return 1;
}
if (el.getError_String() != null
&& !el.getError_String().trim().equals(""))
throw new SQLException("\nProblem bei Steps DETAILS:" + "\n\n Meldung:"
+ el.getError_String() + "\n sql:" + sql);
SxResultSet result= el.getResultSet();
int rownr=0;
super.addActionLog("Steps found: "+ result.size());
//erstmal nicht mit Zeit: executionTime=\""+de.memtext.util.DateUtils.getTodayString()+" " + de.memtext.util.DateUtils.getNowString()+"\"
sw.write("<?xml version=\"1.0\" encoding=\""+SqlStringUtils.getEncoding()+"\" ?><etlAction name=\""+this.jobUniquename+"\">\n");
for (Iterator it = result.iterator(); it.hasNext();) {
int stepReturnCode=0;
rownr++;
SxResultRow row = (SxResultRow) it.next();
stepTid=(Integer) row.get(0);
stepUniquename=row.get(1).toString().trim();
stepCaption=row.get(2).toString().trim();
stepSortnr=(Integer) row.get(4);
stepForceContinue=(Integer) row.get(5);
stepParentStepId=(Integer) row.get(6);
stepLogfile=row.get(7).toString().trim();
stepTypeUniquename=row.get(8).toString().trim();;
stepTypeCaption=row.get(9).toString().trim();;
stepTypeHandler=row.get(10).toString().trim();;
EtlStep myStep=new EtlStep(stepUniquename,jobLogfile,propfile);
myStep.setOutputStringWriter(sw);
myStep.initStep(jobUniquename, this.getParams(), stepTid,
stepUniquename,
stepLogfile,
stepCaption,
stepSortnr,
stepForceContinue,
stepParentStepId,
stepTypeUniquename,
stepTypeCaption,
stepTypeHandler);
stepReturnCode=myStep.execute();
super.addActionLog(myStep.getActionLog().toString());
sw=myStep.getOutputStringWriter();
if(stepReturnCode!=0)
{
if(stepForceContinue.intValue()==0)
jobReturnCode=1;
break;
}
}
if(rownr==0)
throw new Exception("Steps unbekannt");
sw.write("\n</etlAction>\n");
if(outfile==null || outfile.equals(""))
{
System.out.println(sw.toString());
System.out.println(super.getActionLog());
}
else
{
try {
de.superx.etl.EtlUtils.saveFileContentsWithEncoding(outfile, sw.toString(), null);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
jobReturnCode=1;
}
}
long jetzt = new java.util.Date().getTime() ;
this.setEndtime(jetzt);
return jobReturnCode;
}
}

239
src/de/superx/etl/EtlStep.java

@ -0,0 +1,239 @@
/*
* de.superx.etl - a package for controlling ETL routines
* Copyright (C) 2021 Daniel Quathamer <danielq@memtext.de>
*
* This package is licensed under the CampusSource License;
* http://www.campussource.de/org/license/
*/
package de.superx.etl;
import java.io.InputStream;
import java.io.PrintStream;
import java.io.StringWriter;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.Properties;
import java.util.logging.Logger;
import javax.sql.DataSource;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.RowCallbackHandler;
import org.springframework.jdbc.datasource.DriverManagerDataSource;
import de.superx.etl.ActionHandler.*;
import de.memtext.rights.RightsKeyEntry;
import de.memtext.util.GetOpts;
import de.superx.bin.Doquery;
import de.superx.bin.SxConnection;
import de.superx.common.SuperX_el;
import de.superx.common.SxResultRow;
import de.superx.common.SxResultSet;
import de.superx.common.SxSqlHelper;
import de.superx.servlet.SxPools;
import de.superx.util.SqlStringUtils;
import freemarker.template.TemplateBooleanModel;
public class EtlStep extends EtlAction {
private String mandantenID;
private Integer stepTid;
private String stepUniquename;
private String stepLogfile;
private String stepCaption;
private Integer stepSortnr;
private Integer stepForceContinue;
private Integer stepParentStepId;
private String stepTypeUniquename;
private String stepTypeCaption;
private String stepTypeHandler;
private Properties jobRunTimeParams;
private Properties stepProperties;
private SxConnection stepSxConnection;
private Connection stepConnection;
public EtlStep(String uniquename, String logfile, String propfile) {
super(uniquename, logfile, propfile);
//this.setLogoutput("Step "+uniquename);
super.addActionLog("Step "+uniquename +" loaded");
}
public StringWriter getOutputStringWriter() {
return sw;
}
public void setOutputStringWriter(StringWriter sw) {
this.sw = sw;
}
private StringWriter sw;
public void initStep(String job,Properties runTimeParams, Integer tid,
String uniquename,
String logfile,
String caption,
Integer sortnr,
Integer forceContinue,
Integer parentStepId,
String typeUniquename,
String typeCaption,
String typeHandler
) throws Exception
{
jobRunTimeParams=runTimeParams;
stepTypeHandler=typeHandler;
stepTid=tid;
stepUniquename=uniquename;
stepLogfile=logfile;
stepCaption=caption;
stepSortnr=sortnr;
stepForceContinue=forceContinue;
stepParentStepId=parentStepId;
stepTypeUniquename=typeUniquename;
stepTypeCaption=typeCaption;
this.setLogfile(logfile);
super.addActionLog("Initializing Step: "+caption);
super.addActionLog("Handler: "+typeHandler);
//this.appendLog("Params: "+runTimeParams.toString());
//get Params:
Connection myConnection=this.getDbconnection();
if(myConnection==null)
myConnection=this.getConnection(myConnection);
this.setDbconnection(myConnection);
if(!SqlStringUtils.checkValidKeyEntry(job))
throw new SQLException("Invalid job");
String sql = "select tid,prop_name,prop_value from etl_step_property where etl_step_id="+tid.toString()+";";
SuperX_el el = new SuperX_el();
SxSqlHelper sh=new SxSqlHelper();
sh.execute(sql, myConnection, el);
if (el.getError_String() != null
&& !el.getError_String().trim().equals(""))
throw new SQLException("\nProblem bei Step DETAILS:" + "\n\n Meldung:"
+ el.getError_String() + "\n sql:" + sql);
SxResultSet result= el.getResultSet();
stepProperties=null;
stepProperties=new Properties();
int rownr=0;
for (Iterator it = result.iterator(); it.hasNext();) {
rownr++;
SxResultRow row = (SxResultRow) it.next();
String paramName=row.get(1).toString().trim();
String paramValue=row.get(2).toString().trim();
String paramValueParsed=parseParams(paramValue);
stepProperties.setProperty(paramName, paramValueParsed);
super.addActionLog("Parameter "+paramName+":"+paramValueParsed);
}
}
private String parseParams(String paramValue)
{
String parsedParam=paramValue;
Enumeration runTimeParamNames = jobRunTimeParams.propertyNames();
while (runTimeParamNames.hasMoreElements()) {
String runTimeParamName = (String)runTimeParamNames.nextElement();
String runTimeParamValue = jobRunTimeParams.getProperty(runTimeParamName);
parsedParam=de.memtext.util.StringUtils.replace(parsedParam,"$"+runTimeParamName, runTimeParamValue);
}
return parsedParam;
}
public int execute()
throws Exception
{
int returnCode=0;
long numberOfRows=0;
Logger logger =(Logger) Logger.getLogger(EtlStep.class.toString());
super.addActionLog("Executing Step "+this.stepUniquename);
Class handlerClass = Class.forName(stepTypeHandler);
Object handlerObject = handlerClass.newInstance();
((EtlActionHandlerI) handlerObject).setLogger(logger);
((EtlActionHandlerI) handlerObject).setHandlerSpecificProperties(stepProperties);
((EtlActionHandlerI) handlerObject).setRunTimeParams(jobRunTimeParams);
((EtlActionHandlerI) handlerObject).setOutputStringWriter(this.getOutputStringWriter());
((EtlActionHandlerI) handlerObject).setPropFile(this.getPropfile());
try {
returnCode=((EtlActionHandlerI) handlerObject).execute(this.getOutputStringWriter(),"default",this.stepUniquename,"xml");
} catch (Exception e) {
returnCode=1;
super.addActionLog("Fehler beim Step "+ this.stepUniquename+ ": "+e.toString());
//TODO forceContinue auswerten
}
this.setOutputStringWriter(((EtlActionHandlerI) handlerObject).getOutputStringWriter());
super.addActionLog(((EtlActionHandlerI) handlerObject).getLogOutput());
/*
if(stepTypeHandler.equals("de.superx.bin.UnloadRecords"))
{
getConnection(logger, this.getPropfile());
String query=stepParams.getProperty("select_stmt");
sw=this.getSw();
QueryResultSerializer myQueryResultSerializer=new QueryResultSerializer("default", stepSxConnection,query,sw);
myQueryResultSerializer.setLogger(logger);
NumberOfRows=myQueryResultSerializer.unloadQueryResults(this.stepUniquename,"xml","",true);
this.setReturnCode(myQueryResultSerializer.getReturnCode());
sw=myQueryResultSerializer.getOutputString();
this.setSw(sw);
//System.out.println(sw.toString());
stepSxConnection.close();
}
if(stepTypeHandler.equals("de.superx.bin.Doquery"))
{
String query=stepParams.getProperty("select_stmt");
getConnection(logger, this.getPropfile());
SqlExecutor mySqlExecutor=new SqlExecutor("default", stepSxConnection,query,jobRunTimeParams);
returnCode=mySqlExecutor.executeQueries();
stepSxConnection.close();
}
if(stepTypeHandler.equals("de.superx.bin.UploadRecords"))
{
String target_table=stepParams.getProperty("target_table");
String inputfile=stepParams.getProperty("path_to_inputfile");
String xml_search_path=stepParams.getProperty("search_path");
long numberOfRows=0;
//getConnection(logger, this.getPropfile());
TableUploader myUploader=new TableUploader();
myUploader.setDbpropfile(this.getPropfile());
myUploader.setInFormat("xml");
myUploader.setTargetTable(target_table);
myUploader.setSrcFile(inputfile);
myUploader.setXml_search_path(xml_search_path);
//myUploader.setUploadConnection(stepSxConnection.getConnection());
myUploader.setUploadConnection(myUploader.getConnection(null,this.getPropfile()));
//SqlExecutor mySqlExecutor=new SqlExecutor("default", stepSxConnection,query);
numberOfRows=myUploader.uploadFile();
this.appendLog("Number of rows loaded: "+numberOfRows);
}
if(stepTypeHandler.trim().equals("msg"))
{
String msg=stepParams.getProperty("msg");
this.appendLog(msg);
}*/
//logger.info(this.getLogoutput());
return returnCode;
}
}

336
src/de/superx/etl/EtlUtils.java

@ -0,0 +1,336 @@
/*
* de.superx.etl - a package for controlling ETL routines
* Copyright (C) 2021 Daniel Quathamer <danielq@memtext.de>
*
* This package is licensed under the CampusSource License;
* http://www.campussource.de/org/license/
*/
package de.superx.etl;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.StringReader;
import java.io.StringWriter;
import java.io.UnsupportedEncodingException;
import java.net.URISyntaxException;
import java.util.Enumeration;
import java.util.NoSuchElementException;
import java.util.Properties;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import de.superx.servlet.SuperXManager;
/**
*/
public class EtlUtils {
public static final String NEWLINE=System.getProperty("line.separator");
public static final String PATHSEP=File.separator;
public static String WEBINFDIR=SuperXManager.getWEB_INFPfad();
public static String SUPERX_DIR=(System.getProperties().containsKey("SUPERX_DIR")? System.getProperty("SUPERX_DIR"):"");
public static void main(String args[]) {
try {
String tidInXmlFile="";
Document mydomres =de.superx.etl.EtlUtils.buildDocumentFromXmlFile("/home/superx/devel_module/community/tomcat/temp/myTempFile6460222908896375059.xml");
XPathFactory factory = new net.sf.saxon.xpath.XPathFactoryImpl();
XPath xPath = factory.newXPath();
//String searchPath="/etlAction[@name=\"sx_select_mask\"]/unload/row/fld[@name=\"tid\"]";
String searchPath="/etlAction[@name=\"sx_select_mask\"]/unload[@name=\"unload_maskeninfo\"]/rs/row/fld[@name=\"tid\"]";
Node tidNode=(Node) xPath.compile(searchPath).evaluate(
mydomres, XPathConstants.NODE);
if(tidNode!=null)
tidInXmlFile=de.memtext.util.XMLUtils.getTheValue(tidNode);
else
System.out.println("node not found");
System.out.println(tidInXmlFile);
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (XPathExpressionException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (ParserConfigurationException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (SAXException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public static Properties convertStringToProperty(String inp) throws IOException
{
Properties myProps = new Properties();
myProps.load(new StringReader(inp));
return myProps;
}
public static Properties mergeParamProperties(Properties params, Properties runTimeParams)
{
String parsedParam;
Enumeration runTimeParamNames = runTimeParams.propertyNames();
while (runTimeParamNames.hasMoreElements()) {
String runTimeParamName = (String)runTimeParamNames.nextElement();
String runTimeParamValue = runTimeParams.getProperty(runTimeParamName);
Enumeration paramNames = params.propertyNames();
while (paramNames.hasMoreElements()) {
String paramName=(String)paramNames.nextElement();
String paramValue=params.getProperty(paramName);
if(paramName.equals(runTimeParamName) )
{
paramValue=runTimeParamValue;
params.setProperty(paramName, paramValue);
}
if(paramValue.indexOf("$"+runTimeParamName)>-1)
{
paramValue=de.memtext.util.StringUtils.replace(paramValue,"$"+runTimeParamName, runTimeParamValue);
params.setProperty(paramName, paramValue);
}
}
}
return params;
}
public static void initJobEnvironment()
{
String initVar="";
if(System.getProperty("WEBINFDIR") ==null)
{
try {
initVar=de.superx.servlet.SuperXManager.getWEB_INFPfad();
if(initVar==null || initVar.equals("."))
{
//ermittle webinfdir
initVar=getWebinfDirectory();
}
} catch (Exception e) {
// do nothing, try another
}
WEBINFDIR=initVar;
}
if(System.getProperty("SUPERX_DIR") ==null)
{
SUPERX_DIR=WEBINFDIR+PATHSEP+"conf"+PATHSEP+"edustore";
}
}
private static String getJarName()
{
return new File(EtlUtils.class.getProtectionDomain()
.getCodeSource()
.getLocation()
.getPath())
.getName();
}
private static boolean runningFromJar()
{
String jarName = getJarName();
return jarName.contains(".jar");
}
public static String getWebinfDirectory() throws URISyntaxException
{
if (runningFromJar())
{
return getWebinfDirectoryFromJar();
} else
{
return getWebinfDirectoryFromClass();
}
}
private static String getWebinfDirectoryFromClass()
{
File f= new File(EtlUtils.class.getProtectionDomain()
.getCodeSource()
.getLocation()
.getPath()+PATHSEP+"..");
String class_path=f.getAbsolutePath();
return class_path;
}
private static String getWebinfDirectoryFromJar() throws URISyntaxException
{
String pathOfJarFile=new File(EtlUtils.class.getProtectionDomain().getCodeSource().getLocation().toURI().getPath()).getParent();
String webinfDir=new File(pathOfJarFile+PATHSEP+"..").getAbsolutePath();
return webinfDir;
}
/* die folgenden 2 Methoden
* getFileContentsWithEncoding
* saveFileContentsWithEncoding
* sind in kern5.0 in de.superx.util.FileUtils
* wg. abwärtskompatiblität hierhin kopiert, sollten langfristig wieder weg
*/
public static String getFileContentsWithEncoding(String filePath, String encoding) {
File f = new File(filePath);
if (!f.exists()) {
System.out.println("Fehler: Datei " + filePath + " existiert nicht.");
return null;
}
String fileContents = "";
if (encoding == null || encoding.trim().equals("")) {
encoding = System.getProperty("file.encoding");
}
try {
// --- IputStream und OutputStream generieren ---//
FileInputStream fis = new FileInputStream(f);
// Wenn Quelldatei Unicode, dann speziellen Reader nutzen
BufferedReader in;
//BufferedReader ist schneller bei großen Dateien
in = new BufferedReader(new InputStreamReader(fis, encoding));
// --- Output-Stream der temporären Datei erzeugen ---//
StringWriter out = new StringWriter();
// --- Verarbeiten der Datei ---//
String text;
text = in.readLine();
while (text != null) { // Datei nicht leer
out.write(text);
out.write(System.getProperty("line.separator"));
text = in.readLine();
}
if (!(out == null)) {
fileContents = out.toString();
}
} catch (FileNotFoundException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (UnsupportedEncodingException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return fileContents;
}
public static void saveFileContentsWithEncoding(String filename, String contents, String encoding) throws
FileNotFoundException,
IOException
{
File f = new File(filename);
BufferedReader in;
BufferedWriter out;
//Default encoding ist utf-8
if (encoding == null) encoding = System.getProperty("file.encoding");
// --- Output-Stream der temporären Datei erzeugen ---//
out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(f), encoding));
out.write(contents);
out.close();
}//Ende der Methode
/*SAX Document aus XML-Datei erzeugen */
public static Document buildDocumentFromXmlFile(String srcFile)
throws ParserConfigurationException, FileNotFoundException, SAXException, IOException {
Document mydomres;
org.xml.sax.InputSource is;
DocumentBuilderFactory dfactory = DocumentBuilderFactory.newInstance();
DocumentBuilder builder = dfactory.newDocumentBuilder();
FileInputStream in = new FileInputStream(srcFile);
is=new org.xml.sax.InputSource(in);
mydomres = builder.newDocument();
mydomres = builder.parse(is);
return mydomres;
}
/***
* TODO: nach merge in master diese Methode löschen, und auf de.memtext.util.XMLUtils.parseXml(String) verweisen
* @author Witt This function parses XML-containing string into documents while
* preserving the namespaces and is primarily meant to be used withing
* (jUnit) test cases
* @param xmlString
* @return
* @throws ParserConfigurationException
* @throws SAXException
* @throws IOException
*/
public static Document parseXml(String xmlString) throws ParserConfigurationException, SAXException, IOException {
DocumentBuilderFactory myFactory = DocumentBuilderFactory.newInstance();
myFactory.setNamespaceAware(true);
DocumentBuilder myBuilder;
myBuilder = myFactory.newDocumentBuilder();
Document myDocument = myBuilder.parse(new InputSource(new StringReader(xmlString)));
return myDocument;
}
/**
* @param src
* @param tidInXmlFile
* @return
* @throws ParserConfigurationException
* @throws SAXException
* @throws IOException
* @throws XPathExpressionException
*/
public static boolean isNodeValueInXml(String src, String searchPath, String expectedValue)
throws ParserConfigurationException, SAXException, IOException, XPathExpressionException {
boolean b=false;
Document mydomres =de.superx.etl.EtlUtils.parseXml(src);
XPathFactory factory = new net.sf.saxon.xpath.XPathFactoryImpl();
XPath xPath = factory.newXPath();
Node myNode=(Node) xPath.compile(searchPath).evaluate(
mydomres, XPathConstants.NODE);
if(myNode!=null)
{
String foundValue=de.memtext.util.XMLUtils.getTheValue(myNode);
if(!(foundValue==null) && foundValue.trim().equals(expectedValue))
b=true;
}
return b;
}
public static String translateReturnCode(int returnCode)
{
String returnString="Fehlerhaft";
if(returnCode==0)
returnString="Erfolg";
return returnString;
}
}

437
src/de/superx/etl/QueryResultSerializer.java

@ -0,0 +1,437 @@
/*
* de.superx.etl - a package for controlling ETL routines
* Copyright (C) 2021 Daniel Quathamer <danielq@memtext.de>
*
* This package is licensed under the CampusSource License;
* http://www.campussource.de/org/license/
*/
package de.superx.etl;
import java.io.BufferedOutputStream;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.JDBCType;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Types;
import java.util.logging.LogManager;
import java.util.logging.Logger;
import java.util.logging.Level;
import de.memtext.util.StringUtils;
import de.superx.bin.SxConnection;
import de.superx.bin.SxDBUtils;
import de.superx.bin.SxJdbcClient;
import java.util.Iterator;
import java.util.StringTokenizer;
import de.superx.etl.bin.SxTransformer;
import de.superx.common.SxResultSet;
import de.superx.servlet.ServletUtils;
import de.superx.servlet.SxSQL_Server;
import de.superx.util.SqlStringUtils;
/**
* @author Daniel Quathamer Projektgruppe SuperX
* doquery.java
* @
* Dieses Javaprogramm führt einen SQL-Ausdruck aus und gibt das Ergebnis aus.<br>
* Gebrauch:<br> java doquery <Pfad zu logger-properties> <pfad zu db.properties> <sql-Ausdruck> <Ausgabeformat (txt | html | xml)>(optional) <delimiter> <mit Spaltenüberschriften (true | false)>(optional) <Ausgabedatei>(optional)
*
*/
/*
* SQL-Abfragen Ergebnis serialisieren nach CSV / XML
*/
public class QueryResultSerializer {
public QueryResultSerializer(String mandantenId,SxConnection mySxConnection, String query,StringWriter sw) {
super();
this.mySxConnection = mySxConnection;
this.query = prepareQuery(query);
this.mandantenId = mandantenId;
this.logger = Logger.getLogger("superx_" + mandantenId);
this.outputString=sw;
}
public QueryResultSerializer(String mandantenId,SxConnection mySxConnection, String query,FileWriter fw) {
super();
this.mySxConnection = mySxConnection;
this.query = prepareQuery(query);
this.mandantenId = mandantenId;
this.logger = Logger.getLogger("superx_" + mandantenId);
this.outputFile=fw;
}
public QueryResultSerializer(String mandantenId,SxConnection mySxConnection, ResultSet rs,FileWriter fw) {
super();
this.mySxConnection = mySxConnection;
this.myrs=rs;
this.mandantenId = mandantenId;
this.logger = Logger.getLogger("superx_" + mandantenId);
this.outputFile=fw;
}
public QueryResultSerializer(String mandantenId,SxConnection mySxConnection, ResultSet rs,StringWriter sw) {
super();
this.mySxConnection = mySxConnection;
this.myrs=rs;
this.mandantenId = mandantenId;
this.logger = Logger.getLogger("superx_" + mandantenId);
this.outputString=sw;
}
public final String NEWLINE=System.getProperty("line.separator");
private String outfile;
private SxConnection mySxConnection;
private String query;
private String CSVdelim;
private Boolean printColNames;
private String outFormat;
private Logger logger;
private String mandantenId="default";
private Statement st;
private Connection myConnection;
private String[] colnames;
private int[] coltypes;
private int columnCount;
private StringBuffer outString=new StringBuffer("");
//private PrintStream stringOutWriter=new PrintStream(outString);
//private PrintWriter output=new PrintWriter(stringOutWriter);
private String outputHeader="";
private String outputFooter="";
private BufferedOutputStream outputStream;
public StringWriter outputString;
public FileWriter outputFile;
private int returnCode;
private ResultSet myrs;
public String getOutfile() {
return outfile;
}
public void setOutfile(String outfile) {
this.outfile = outfile;
}
public SxConnection getMySxConnection() {
return mySxConnection;
}
public void setMyConnection(SxConnection mySxConnection) {
this.mySxConnection = mySxConnection;
}
public String getQuery() {
return query;
}
public void setQuery(String query) {
this.query = query;
}
public String getOutFormat() {
return outFormat;
}
public void setOutFormat(String outFormat) {
this.outFormat = outFormat;
}
public Logger getLogger() {
return logger;
}
public void setLogger(Logger logger) {
this.logger = logger;
}
public String getMandantenId() {
return mandantenId;
}
public void setMandantenId(String mandantenId) {
this.mandantenId = mandantenId;
}
public String[] getColnames() {
return colnames;
}
public void setColnames(String[] colnames) {
this.colnames = colnames;
}
public int[] getColtypes() {
return coltypes;
}
public void setColtypes(int[] coltypes) {
this.coltypes = coltypes;
}
public StringWriter getOutputString() {
return outputString;
}
public void setOutputString(StringWriter output) {
this.outputString = output;
}
public FileWriter getOutputFile() {
return outputFile;
}
public void setOutputFile(FileWriter outputFile) {
this.outputFile = outputFile;
}
public int getColumnCount() {
return columnCount;
}
public void setColumnCount(int columnCount) {
this.columnCount = columnCount;
}
public int getReturnCode() {
return returnCode;
}
public void setReturnCode(int returnCode) {
this.returnCode = returnCode;
}
protected ResultSet getMyrs() {
return myrs;
}
protected void setMyrs(ResultSet myrs) {
this.myrs = myrs;
}
public long unloadQueryResults(String uniquename, String outformat,String delim,boolean printColname) throws Exception
{
returnCode=0;
long numberOfRows=0;
String zs="";
if(myrs==null)
{
SxResultSet rs = null;
SxJdbcClient myClient = new SxJdbcClient(logger, mySxConnection.getPropfile(), query);
//logger.info("Executing sql: " + query);
myClient.Rs_executeALL();
logger.info("Getting resultset") ;
myrs = myClient.getRs();
//logger.info("Starting output for: " + query);
}
if(myrs != null )
{
ResultSetMetaData rsmd = myrs.getMetaData();
setColnamesAndTypes(rsmd);
if(outformat.equalsIgnoreCase("xml"))
{
zs+="<unload name=\""+uniquename+"\"><query><![CDATA["+query+"]]></query><rs>";
if(printColname)
zs+=this.getColnamesXML();
}
else
{
//CSV:
if(printColname)
zs+=this.getColnamesCSV(delim)+"\n";
}
//this.outputStream.w
//StringWriter sw = new StringWriter();
//sw.write(zs);
//this.setOutputString(sw);
try {
numberOfRows=printResult(outformat,zs,myrs,delim);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
returnCode=1;
}
}
return numberOfRows;
}
private long printResult(String outformat,String header,ResultSet outrs,String delim) throws SQLException, IOException
{
StringBuffer line = new StringBuffer();
int types[]=this.getColtypes();
String lbl[]=this.getColnames();
String f_wert=null;
Object o = null;
int rowCounterBeforeFlush = 0;
long rowCounter = 0;
FileWriter fw = null;
StringWriter sw=null;
String rowDelim1=(outformat.equalsIgnoreCase("xml"))?"<row>":"";
String rowDelim2=(outformat.equalsIgnoreCase("xml"))?"</row>"+NEWLINE:"\n"; //bei CSV ist Zeilentrenner immer \n, nicht DOS-Anpassung
String footer=(outformat.equalsIgnoreCase("xml"))?"</rs></unload>":"";
boolean writeFileOutput=(this.outputFile==null)?false:true;
int colNumber=this.getColumnCount();
if(writeFileOutput)
{
fw=this.getOutputFile();
fw.write(header);
}
else
{
sw=this.getOutputString();
sw.write(header);
}
while (outrs.next())
{
line.setLength(0);
line.append(rowDelim1);
for (int i = 1; i <= colNumber; i++) {
if (types[i-1] == Types.LONGVARCHAR) {
//Der Informix-Treiber verlangt hier getString
// statt getObject!
o = outrs.getString(i);
} else {
o = outrs.getObject(i);
}
//vergl. de.superx.common.Maske.getCSV
f_wert = SxDBUtils.field_value(o);
f_wert = prepareFieldValue(outformat,lbl[i-1],f_wert,delim);
if (i < colNumber)
line.append( f_wert );
else {
line.append(f_wert );
line.append(rowDelim2);
}
}
if(writeFileOutput)
{
//fw=this.getOutputFile();
fw.write(line.toString());
rowCounterBeforeFlush++;
if (rowCounterBeforeFlush > 10000) {
de.memtext.util.MemoryUtils.printfree();
fw.flush();
rowCounterBeforeFlush = 0;
}
}
else
{
//sw=this.getOutputString();
sw.write(line.toString());
}
rowCounter++;
} ; //von while
outrs.close();
if(writeFileOutput)
{
if(outformat.equalsIgnoreCase("xml"))
fw.write(footer);
fw.flush();
fw.close();
}
else
{
if(outformat.equalsIgnoreCase("xml"))
sw.write(footer);
sw.close();
this.setOutputString(sw);
}
return rowCounter;
}
private void setColnamesAndTypes(ResultSetMetaData rsmd) throws SQLException
{
int cols = rsmd.getColumnCount();
int[] types = new int[cols];
String[] lbl = new String[cols];
// Print the result column names?
for (int i = 0; i < cols; i++) {
lbl[i] = rsmd.getColumnLabel(i+1);
types[i] = rsmd.getColumnType(i+1);
}
this.setColnames(lbl);
this.setColtypes(types);
this.setColumnCount(cols);
}
private String getColnamesCSV(String delim) {
String headZs = "";
int colNumber = this.getColnames().length;
for (int i = 0; i < colNumber; i++) {
headZs += ( this.getColnames()[i] +delim);
}
return headZs;
}
private String getColnamesXML() {
String headZs = "<rsmd>";
int colNumber = this.getColnames().length;
for (int i = 0; i < colNumber; i++) {
headZs += "<fld name=\"" + this.getColnames()[i] + "\" type=\""+SxDBUtils.getSqlTypeName(this.getColtypes()[i])+"\" />"+NEWLINE;
}
headZs+="</rsmd>";
return headZs;
}
private String prepareQuery(String query)
{
query = query.trim();
//DOS produces unnecessary ".." around the stmt
if (query.length() > 0) {
if (query.startsWith("\""))
query = query.substring(1, query.length());
if (query.endsWith("\""))
query = query.substring(0, query.length() - 1);
}
return query;
}
private String prepareFieldValue(String outformat,String fieldName, String f_wert,String delim)
{
String fieldValue="";
if (outformat.equals("xml"))
{
fieldValue+="<fld name=\"" + fieldName + "\">";
f_wert=encodeCDATA(f_wert);
if(f_wert.indexOf("&") > -1 || f_wert.indexOf("<") > -1 || f_wert.indexOf(">") > -1)
fieldValue += "<![CDATA[" + f_wert + "]]>";
else
fieldValue += f_wert;
fieldValue+="</fld>"+NEWLINE;
}
//wenn der Feldwert zufällig das Trennzeichen enthält, wird es mit "\" maskiert
if (outformat.equals("txt")) {
fieldValue = SxDBUtils.prepareInformixCsv(f_wert);
if (fieldValue != null && (fieldValue.indexOf(delim) > -1)) fieldValue = de.memtext.util.StringUtils.replace(fieldValue, delim, "\\" + delim);
fieldValue+=delim;
}
return fieldValue;
}
/**
* Beim XML Export darf der Feldinhalt nicht CDATA-Tags enthalten, dies wrürde beim Import
* Probleme machen. Daher durch Platzhalter ersetzen
*
* @param Feldinhalt
* PG/IDS
* @return Feldinhalt (ersetzt)
*/
private String encodeCDATA(String x)
{
if (x==null) x="";
x = StringUtils.replace(x,"<![CDATA[",
"CDATASTART" );
x = StringUtils.replace(x, "]]>",
"CDATAEND");
return x;
}
}

340
src/de/superx/etl/SqlExecutor.java

@ -0,0 +1,340 @@
/*
* de.superx.etl - a package for controlling ETL routines
* Copyright (C) 2021 Daniel Quathamer <danielq@memtext.de>
*
* This package is licensed under the CampusSource License;
* http://www.campussource.de/org/license/
*/
package de.superx.etl;
import java.io.BufferedOutputStream;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.JDBCType;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Types;
import java.util.logging.LogManager;
import java.util.logging.Logger;
import java.util.logging.Level;
import de.memtext.util.DateUtils;
import de.memtext.util.StringUtils;
import de.superx.bin.FMParser;
import de.superx.bin.SxConnection;
import de.superx.bin.SxDBUtils;
import de.superx.bin.SxJdbcClient;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Properties;
import java.util.StringTokenizer;
import de.superx.etl.bin.SxTransformer;
import de.superx.bin.XUpdater;
import de.superx.common.FieldContainer;
import de.superx.servlet.ServletUtils;
import de.superx.servlet.SuperXManager;
import de.superx.servlet.SxPools;
import de.superx.servlet.SxSQL_Server;
import de.superx.util.SqlStringUtils;
/**
* @author Daniel Quathamer Projektgruppe SuperX
* doquery.java
* @
* Dieses Javaprogramm führt einen SQL-Ausdruck aus und gibt das Ergebnis aus.<br>
* Gebrauch:<br> java doquery <Pfad zu logger-properties> <pfad zu db.properties> <sql-Ausdruck> <Ausgabeformat (txt | html | xml)>(optional) <delimiter> <mit Spaltenüberschriften (true | false)>(optional) <Ausgabedatei>(optional)
*
*/
/*
* SQL-Abfragen Ergebnis serialisieren nach CSV / XML
*/
public class SqlExecutor {
public SqlExecutor(String mandantenId,SxConnection mySxConnection, String sqlScript, Properties params) {
super();
this.mySxConnection = mySxConnection;
this.sqlScript = sqlScript;
this.mandantenId = mandantenId;
this.logger = Logger.getLogger("superx_" + mandantenId);
this.params=params;
}
public SqlExecutor(String mandantenId,SxConnection mySxConnection, File sqlScriptFile, Properties params) throws IOException {
super();
this.mySxConnection = mySxConnection;
this.mandantenId = mandantenId;
this.sqlScriptFile=sqlScriptFile;
this.logger = Logger.getLogger("superx_" + mandantenId);
this.params=params;
if (!sqlScriptFile.exists()) {
throw new IOException("Datei nicht gefunden: " + sqlScriptFile.getAbsolutePath());
}
BufferedReader in;
//--- InputStream generieren ---//
in = new BufferedReader(new InputStreamReader(new FileInputStream(sqlScriptFile)));
//--- Verarbeiten der Datei ---//
String sql = "";
String text = "";
while ((text = in.readLine()) != null) {
//MB auskommentiert 27.4.05
//löschte ( in
//insert into
//(
//tid,...
//if (text.length() > 1)
sql += NEWLINE + text.trim();
}
in.close();
this.sqlScript =sql;
}
public final String NEWLINE=System.getProperty("line.separator");
private SxConnection mySxConnection;
private String sqlScript;
private File sqlScriptFile;
private Logger logger;
private String mandantenId="default";
private Statement st;
private Connection myConnection;
private StringBuffer outString=new StringBuffer("");
//private PrintStream stringOutWriter=new PrintStream(outString);
//private PrintWriter output=new PrintWriter(stringOutWriter);
private int returnCode;
private String outfile="";
private String CSVdelim="^";
private boolean printColNames=false;
private String outFormat="txt";
private long numberOfRows;
private Properties params;
public SxConnection getMySxConnection() {
return mySxConnection;
}
public void setMyConnection(SxConnection mySxConnection) {
this.mySxConnection = mySxConnection;
}
public String getSqlScript() {
return sqlScript;
}
public void setSqlScript(String sqlScript) {
this.sqlScript = sqlScript;
}
public Logger getLogger() {
return logger;
}
public void setLogger(Logger logger) {
this.logger = logger;
}
public String getMandantenId() {
return mandantenId;
}
public void setMandantenId(String mandantenId) {
this.mandantenId = mandantenId;
}
public String getOutfile() {
return outfile;
}
public void setOutfile(String outfile) {
this.outfile = outfile;
}
public String getCSVdelim() {
return CSVdelim;
}
public void setCSVdelim(String cSVdelim) {
CSVdelim = cSVdelim;
}
public boolean getPrintColNames() {
return printColNames;
}
public void setPrintColNames(boolean printColNames) {
this.printColNames = printColNames;
}
public String getOutFormat() {
return outFormat;
}
public void setOutFormat(String outFormat) {
this.outFormat = outFormat;
}
public int getReturnCode() {
return returnCode;
}
public void setReturnCode(int returnCode) {
this.returnCode = returnCode;
}
public long getNumberOfRows() {
return numberOfRows;
}
public void setNumberOfRows(long numberOfRows) {
this.numberOfRows = numberOfRows;
}
public Properties getParams() {
return params;
}
public void setParams(Properties params) {
this.params = params;
}
public StringBuffer getOutString() {
return outString;
}
public void setOutString(StringBuffer outString) {
this.outString = outString;
}
public int executeQueries() throws Exception
{
returnCode=0;
FileWriter fw;
StringWriter sw;
SxTransformer myTransformer;
QueryResultSerializer myQueryResultSerializer=null;
if (sqlScript.toLowerCase().indexOf("<xupdate>")>-1)
{
SxConnection myConnection = new SxConnection();
myConnection.setPropfile(mySxConnection.getPropfile());
Connection con = myConnection.getConnection();
new XUpdater().execute(con, myConnection.getDatabaseAbbr(), sqlScript,logger);
}
else
{
if(sqlScript.toLowerCase().indexOf("--freemarker template")>-1)
{
/* boolean isTemplate = getSelect_stmt().toUpperCase().indexOf("FREEMARKER TEMPLATE") > -1;
String select_string = SqlStringUtils.generateSQL(SxPools.get(mandantenID).getDatabaseAbbr(),
individualFields.getFormular(), getSelect_stmt() + "\n" + getCleanup_stmt());
// select_string=select_string.replaceAll("Fächer.elements",
// "Fächer.xx");
// map.clear();
if (isTemplate) {
SuperXManager.setLastFMMaskenSql(
"-- " + getMaskInfo() + " " + DateUtils.getNowString() + "\n" + select_string);
select_string = SxPools.get(mandantenID).getTemplateProcessor().process(map, (Integer) getId(),
getMaskInfo(), select_string, individualFields, SxPools.get(mandantenID).getRepository(),
SxPools.get(mandantenID).getSqlDialect());
// danach enthält map alle vars inkl. sqlvars
}
// MB 07/2010 macht Probleme bei Freemarker hashes
// generell deaktiviert, hier falls doch noch in alten Masken
// vorhanden
select_string = SqlStringUtils.removeComment(select_string, "{", "}");
*/
/* alt: sqlScript=de.superx.bin.FMParser.simpleParser(mySxConnection.getPropfile(), sqlScript);*/
//Die map und der FieldContainer sind null, werden nur in Masken genutzt
HashMap map = new HashMap();
FieldContainer individualFields = new FieldContainer();
if(SxPools.hasMandanten()){
//Servlet Betrieb:
sqlScript= SxPools.get(mandantenId).getTemplateProcessor().process(map, -1,
"SQL-Script", sqlScript, individualFields, SxPools.get(mandantenId).getRepository(),
SxPools.get(mandantenId).getSqlDialect());
}
else
{
//Kommandozeile: TODO hier sind noch keine FM Scripte mit SQLVAR möglich:
sqlScript=FMParser.simpleParser(mySxConnection.getPropfile(), sqlScript);
}
String keepGenerated=System.getProperty("FreemarkerKeepGenerated");
if(keepGenerated!=null && keepGenerated.equalsIgnoreCase("true"))
{
FileWriter sqlScriptTmp ;
if(sqlScriptFile==null)
{
File tempFile = File.createTempFile("fm_output", ".sql");
sqlScriptTmp = new FileWriter(tempFile);
logger.info("FM generated SQL: "+tempFile.getAbsolutePath());
}
else
sqlScriptTmp = new FileWriter(sqlScriptFile.getAbsolutePath()+".tmp.sql");
sqlScriptTmp.write(sqlScript);
sqlScriptTmp.close();
}
}
sqlScript = sqlScript.trim();
sqlScript = SqlStringUtils.removeComment(sqlScript, "{", "}");
if (sqlScript.startsWith("\""))
sqlScript = sqlScript.substring(1, sqlScript.length());
if (sqlScript.endsWith("\""))
sqlScript = sqlScript.substring(0, sqlScript.length() - 1);
//replace CLI-params:
if (params==null)
{
//do nothing
}
else
{
Enumeration paramEnum = params.propertyNames();
while (paramEnum.hasMoreElements()) {
String paramName = (String)paramEnum.nextElement();
String paramValue = params.getProperty(paramName);
sqlScript=de.memtext.util.StringUtils.replace(sqlScript,"$"+paramName, paramValue);
}
}
logger.config("Inhalt der sql-Datei: "+NEWLINE+"--" + sqlScript + "--");
SxJdbcClient myClient = new SxJdbcClient(logger, mySxConnection.getPropfile(), sqlScript);
myClient.Rs_executeALL();
ResultSet myrs = myClient.getRs();
if (myrs != null) {
//Letzter SQL liefert Ergebnisse:
if(!outfile.equals(""))
{
try {
fw = new FileWriter(outfile);
if(outFormat.equalsIgnoreCase("xml"))
fw.write("<?xml version=\"1.0\" encoding=\""+SqlStringUtils.getEncoding()+"\" ?>");
myQueryResultSerializer=new QueryResultSerializer("default", mySxConnection, myrs,fw);
numberOfRows=myQueryResultSerializer.unloadQueryResults("DOQUERY",outFormat,this.CSVdelim,printColNames);
returnCode=myQueryResultSerializer.getReturnCode();
} catch (IOException e) {
logger.severe ("Fehler beim Erstellen der Datei "+outfile);
returnCode=1;
}
}
else
{
sw=new StringWriter();
if(outFormat.equalsIgnoreCase("xml"))
sw.write("<?xml version=\"1.0\" encoding=\"UTF-8\" ?>");
myQueryResultSerializer=new QueryResultSerializer("default", mySxConnection, myrs,sw);
numberOfRows=myQueryResultSerializer.unloadQueryResults("DOQUERY",outFormat,this.CSVdelim,printColNames);
returnCode=myQueryResultSerializer.getReturnCode();
outString= myQueryResultSerializer.getOutputString().getBuffer();
}
} else
logger.info("Keine Rückgabewerte aus SQL-Script");
}
return returnCode;
}
}

779
src/de/superx/etl/TableUploader.java

@ -0,0 +1,779 @@
/*
* de.superx.etl - a package for controlling ETL routines
* Copyright (C) 2021 Daniel Quathamer <danielq@memtext.de>
*
* This package is licensed under the CampusSource License;
* http://www.campussource.de/org/license/
*/
package de.superx.etl;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.PrintStream;
import java.io.Reader;
import java.io.UnsupportedEncodingException;
import java.nio.charset.CodingErrorAction;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Types;
import java.text.ParseException;
import java.util.Iterator;
import java.util.Properties;
import de.superx.util.FileUtils;
import de.superx.util.SqlStringUtils;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.xpath.XPathFactory;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.*;
import org.postgresql.PGConnection;
import org.postgresql.copy.CopyManager;
import de.superx.bin.SxConnection;
import de.superx.bin.SxJdbcClient;
import de.superx.bin.UploadRecords;
import de.memtext.util.DateUtils;
import de.memtext.util.GetOpts;
import de.memtext.util.StringUtils;
import de.memtext.util.XMLUtils;
public class TableUploader {
private String logfile;
private String dbpropfile;
private String mode="stop";
private String inFormat;
private String targetTable;
private String srcFile;
private boolean header=false;
private String delim="^";
private String encoding=SqlStringUtils.getEncoding();
private String inserts="";
private boolean truncateTargetTable=true;
private boolean continueAfterError;
private boolean removeTrailingDelim=true;
private boolean isPostgres;
private boolean useBatch=true;
private static int maxCols=1000;
private String[] insert_cols = new String[maxCols];
private int[] insert_types = new int[maxCols];
private int numberOfColumns;
public long numberOfRows;
private int returnCode;
private String xml_search_path;
private Connection uploadConnection;
private DatabaseMetaData dbmd;
private PreparedStatement pst;
public TableUploader() {
// TODO Auto-generated constructor stub
}
public Connection getUploadConnection() {
return uploadConnection;
}
public void setUploadConnection(Connection uploadConnection) {
this.uploadConnection = uploadConnection;
}
public boolean isRemoveTrailingDelim() {
return removeTrailingDelim;
}
public void setRemoveTrailingDelim(boolean removeTrailingDelim) {
this.removeTrailingDelim = removeTrailingDelim;
}
public void setHeader(boolean header) {
this.header = header;
}
public String getDbpropfile() {
return dbpropfile;
}
public void setDbpropfile(String dbpropfile) {
this.dbpropfile = dbpropfile;
}
public String getMode() {
return mode;
}
public void setMode(String mode) {
if (!mode.equals("stop") && !mode.equals("exclude-field")&& !mode.equals("transaction"))
mode = "exclude-row";
this.mode = mode;
}
public String getInFormat() {
return inFormat;
}
public void setInFormat(String inFormat) {
this.inFormat = inFormat;
}
public String getTargetTable() {
return targetTable;
}
public void setTargetTable(String targetTable) {
this.targetTable = targetTable;
}
public String getSrcFile() {
return srcFile;
}
public void setSrcFile(String srcFile) {
this.srcFile = srcFile;
}
public String getDelim() {
return delim;
}
public void setDelim(String delim) {
if (delim.equals("tab"))
delim = "\t"; //Tab
if (delim.equals(""))
delim = "^"; //default Delimiter
this.delim = delim;
}
public String getEncoding() {
return encoding;
}
public void setEncoding(String encoding) {
if(encoding==null || encoding.equals(""))
encoding="UTF-8";
this.encoding = encoding;
}
public String getInserts() {
return inserts;
}
public void setInserts(String inserts) {
if(inserts.equalsIgnoreCase("batch"))
useBatch=true;
if(inserts.equalsIgnoreCase("simple"))
useBatch=false;
this.inserts = inserts;
}
public boolean isTruncateTargetTable() {
return truncateTargetTable;
}
public void setTruncateTargetTable(boolean truncateTargetTable) {
this.truncateTargetTable = truncateTargetTable;
}
public void setTruncateTargetTable(String truncateTargetTable) {
if(truncateTargetTable!=null)
{if(truncateTargetTable.equalsIgnoreCase("true"))
this.truncateTargetTable =true;
else
this.truncateTargetTable =false;
}
else
this.truncateTargetTable =false;
}
public int getReturnCode() {
return returnCode;
}
public void setReturnCode(int returnCode) {
this.returnCode = returnCode;
}
public boolean isContinueAfterError() {
return continueAfterError;
}
public void setContinueAfterError(boolean continueAfterError) {
this.continueAfterError = continueAfterError;
}
public String getXml_search_path() {
return xml_search_path;
}
public void setXml_search_path(String xml_search_path) {
this.xml_search_path = xml_search_path;
}
public long uploadFile() throws Exception
{
String protokoll="";
long numberOfRows=0;
returnCode=0;
try {
//dbmd=uploadConnection.getMetaData();
String dbname=uploadConnection.getCatalog();
if(truncateTargetTable)
{
Statement stm=uploadConnection.createStatement();
stm.execute("delete from "+this.targetTable+";");
stm.close();
}
if(inFormat.equalsIgnoreCase("xml"))
{
numberOfRows=uploadXML();
}
else
numberOfRows=uploadCSV();
} catch (Exception e) {
returnCode=1;
throw new Exception(e);
}
return numberOfRows;
}
private long uploadXML() throws Exception
{
String feld_wert;
String errmsg="";
Document mydomres=null;
numberOfRows=0;
org.xml.sax.InputSource is;
NodeList rowlist;
mydomres = de.superx.etl.EtlUtils.buildDocumentFromXmlFile(srcFile);
if(xml_search_path==null)
{
rowlist = mydomres.getElementsByTagName("row");
}
else
{
//XPath xPath = XPathFactory.newInstance().newXPath();
File inputFile = new File(srcFile);
//XPathFactory factory = XPathFactory.newInstance();
XPathFactory factory = new net.sf.saxon.xpath.XPathFactoryImpl();
XPath xPath = factory.newXPath();
//Document doc = builder.parse(inputFile);
//doc.getDocumentElement().normalize();
rowlist=(NodeList) xPath.compile(xml_search_path).evaluate(
mydomres, XPathConstants.NODESET);
}
Node rownode;
initializeColumnSchema();
String insertHead=createPreparedStatementHead();
pst = uploadConnection.prepareStatement(insertHead);
if(useBatch)
pst.clearBatch();
int anz_rows = rowlist.getLength();
for (int zeilennr = 0; zeilennr < anz_rows; zeilennr++) {
//Schleife über jede Zeile des XML-Stroms
rownode = rowlist.item(zeilennr);
//pst.clearParameters();
for(int col=0; col < numberOfColumns;col++)
{
for (Iterator it = XMLUtils.getChildNodeIterator(rownode); it.hasNext();) {
Node fldNode = (Node) it.next();
//System.out.println(XMLUtils.getTheValue(fldNode));
if (XMLUtils.getAttribValue(fldNode,"name").equalsIgnoreCase(insert_cols[col])) {
//int p;
feld_wert="";
try{
feld_wert = XMLUtils.getTheValue(fldNode).trim();
feld_wert=StringUtils.replace(feld_wert,
"CDATASTART", "<![CDATA[");
feld_wert = StringUtils.replace(feld_wert,
"CDATAEND", "]]>");
}
catch (IllegalArgumentException e)
{
//Node ist NULL, keine Warnung notwendig
}
errmsg = feld_wert_to_pst(zeilennr, col, errmsg, feld_wert);
} //Wenn Feldname übereinstimmt
}
} //Ende der Schleife über die Spalten
if(!errmsg.equals("") && mode.equals("stop"))
{
break;
}
if(useBatch)
pst.addBatch();
else
pst.executeUpdate();
numberOfRows++;
} //Ende der Schleife über die Zeilen
if(useBatch)
pst.executeBatch();
return numberOfRows;
}
private long uploadCSV() throws Exception
{
String line;
String line2;
File outFile=null;
String protokoll="";
long numberOfRows=0;
if(isPostgres && !inserts.equalsIgnoreCase("simple") && !inserts.equalsIgnoreCase("batch"))
{
if(removeTrailingDelim)
srcFile=removeTrailingDelim(srcFile);
numberOfRows=uploadCSVinPostgres(srcFile,removeTrailingDelim);
}
else
numberOfRows=uploadCSVwithAnsiSQL(srcFile);
return numberOfRows;
}
private String removeTrailingDelim(String srcFile) throws UnsupportedEncodingException, FileNotFoundException, IOException {
String line;
File outFile;
String returnSrcFile=srcFile+".tmp";
BufferedReader in2 = new BufferedReader(new InputStreamReader(new FileInputStream(srcFile), encoding));
outFile=new File(srcFile+".tmp");
FileOutputStream out = new FileOutputStream(outFile, false);
PrintStream out2 = new PrintStream(out, true, encoding);
while ((line = in2.readLine()) != null) {
if (line.endsWith(delim))
line=line.substring(0,line.length()-delim.length());
out2.println(line);
out2.flush();
}
return returnSrcFile;
}
private long uploadCSVinPostgres(String srcFile, boolean deleteSrcFile) {
long numOfRows=0;
String copySql = "COPY " + targetTable + " FROM STDIN WITH DELIMITER '" + delim + "' NULL '' ENCODING '"+ encoding+"'";
copySql += header ? " HEADER" : "";
String srcFileContent=de.superx.etl.EtlUtils.getFileContentsWithEncoding(srcFile, encoding);
String msg="";
try {
//dbmd=uploadConnection.getMetaData();
String dbname=uploadConnection.getCatalog();
Statement stm=uploadConnection.createStatement();
int isIso=0;
ResultSet rs=stm.executeQuery("SELECT distinct 1 FROM pg_catalog.pg_database where datname='"+dbname+"' and datctype ilike '%euro%' or datctype ilike '%1252%' or datctype ilike '%8859%';");
while (rs.next()) {
if(rs.getObject(1)!=null)
isIso= Integer.parseInt(rs.getObject(1).toString());
}
rs.close();
stm.close();
Reader in4=null;
final CopyManager cpm = ((PGConnection) uploadConnection).getCopyAPI();
long anz = 0;
msg = "";
if(isIso==1)
{
String srcFileIso=srcFile+"_iso.tmp";
String srcFileContentValidIso = FileUtils.convertToIso(srcFileContent,"postgres") ;//new String(srcFileContent.getBytes("ISO-8859-1"));
de.superx.etl.EtlUtils.saveFileContentsWithEncoding(srcFileIso, srcFileContentValidIso, "iso-8859-9");
FileInputStream fis = new FileInputStream(srcFileIso);
in4 = new BufferedReader(new InputStreamReader(fis, "iso-8859-9"));
}
else
{
FileReader in3 = new FileReader(srcFile);
in4 = new BufferedReader(in3);
}
numOfRows= cpm.copyIn(copySql, in4);
numberOfRows =numOfRows;
if(deleteSrcFile)
{
File outFile=new File(srcFile);
if(outFile!=null)
outFile.delete();
}
} catch (Exception e) {
// TODO Auto-generated catch block
msg=e.toString();
}
return numOfRows;
}
private long uploadCSVwithAnsiSQL(String srcFile) throws SQLException, FileNotFoundException, IOException {
numberOfRows=0;
String text;
String text2;
String msg="";
int zeilennr=1;
int fehlerSaetze=0;
BufferedReader in = new BufferedReader(new InputStreamReader(new FileInputStream(srcFile), encoding));
initializeColumnSchema();
String insertHead=createPreparedStatementHead();
pst = uploadConnection.prepareStatement(insertHead);
if(useBatch)
pst.clearBatch();
while ((text = in.readLine()) != null) {
if (text.endsWith("\\")) {
text=text.substring(0, text.length()-1);
text2 = in.readLine();
if (text2 != null) {
text += "\n"+ text2;
while (text2.endsWith("\\")) {
text=text.substring(0, text.length()-1);
text2 = in.readLine();
if (text2 != null)
text += "\n"+text2;
}
}
}
String prepare =
createPreparedInsertStatement(zeilennr,
insertHead,
text);
if(!prepare.equals("") && mode.equals("stop"))
{
msg=prepare;
break;
}
if(useBatch)
pst.addBatch();
else
pst.executeUpdate();
numberOfRows++;
}
if(useBatch)
pst.executeBatch();
//TODO: msg
return numberOfRows;
}
private String createPreparedInsertStatement(
int line,
String insertHead,
String text)
throws SQLException {
int p;
int i=0;
int k=0;
String errmsg = "";
String feld_wert;
//pst.clearParameters();
do {
//ggf. Trennzeichen am Ende hinzufügen:
if(!text.endsWith(delim))
text+= delim;
p = text.indexOf(delim, i);
//logger.config("Type "+types[k]);
//maskierte Trennzeichen abfangen:
if(p>0 && text.substring(p-1, p).equals("\\"))
p = text.indexOf(delim, p+1);
if (p > -1 ) {
if(p==-1)
feld_wert = text.substring(i);
else
feld_wert = text.substring(i, p);
//wenn der Feldwert zufällig das Zeichen "\\n" enthält, wird es zu "\n"
if(feld_wert != null && (feld_wert.indexOf("\\\\n") >0 ))
{
feld_wert=de.memtext.util.StringUtils.replace(feld_wert, "\\\\n", "\\n");
}
//wenn der Feldwert das Zeichen "\Trennzeichen" enthält, wird der \ entfernt
if(feld_wert != null && (feld_wert.indexOf("\\"+delim) >0 ))
{
feld_wert=de.memtext.util.StringUtils.replace(feld_wert, "\\", "");
}
//wenn der Feldwert das Zeichen "\\" enthält, wird ein \ entfernt
if(feld_wert != null && (feld_wert.indexOf("\\\\") >0 ))
{
feld_wert=de.memtext.util.StringUtils.replace(feld_wert, "\\\\", "\\");
}
errmsg = feld_wert_to_pst(line,k, errmsg, feld_wert);
k++;
i = p + 1;
}
} while (p > -1);
return errmsg;
}
private String feld_wert_to_pst(int line, int col, String errmsg, String feld_wert) throws SQLException {
if( col >= numberOfColumns)
errmsg+= "Anzahl Spalten in Datei ist "+col+", aber es sollten nur "+(numberOfColumns-1)+" Spalten sein. Bitte prüfen Sie das Trennzeichen";
else
{
if (feld_wert.equals(""))
try {
pst.setNull(col + 1, insert_types[col]);
} catch (SQLException e1) {
errmsg += e1.toString();
} else {
switch (insert_types[col]) {
case Types.BIGINT :
case Types.TINYINT :
case Types.SMALLINT :
case Types.INTEGER :
try {
int myInt = (int) Integer.parseInt(feld_wert.trim());
pst.setInt(col + 1, myInt);
} catch (NumberFormatException e1) {
errmsg += e1.toString();
setFieldToNull(col, insert_types, pst);
} catch (SQLException e1) {
errmsg += conversionException(line, col, feld_wert,e1.toString());
setFieldToNull(col, insert_types, pst);
}
break;
case Types.FLOAT :
try {
float myFloat =
(float) Float.parseFloat(feld_wert.trim());
pst.setFloat(col + 1, myFloat);
} catch (NumberFormatException e1) {
errmsg += conversionException(line, col, feld_wert,e1.toString());
setFieldToNull(col, insert_types, pst);
} catch (SQLException e1) {
errmsg += conversionException(line, col, feld_wert,e1.toString());
setFieldToNull(col, insert_types, pst);
}
break;
case Types.REAL :
case Types.DOUBLE :
case Types.NUMERIC :
case Types.DECIMAL :
try {
double myDouble =
(double) Double.parseDouble(feld_wert.trim());
pst.setDouble(col + 1, myDouble);
} catch (NumberFormatException e1) {
errmsg += conversionException(line, col, feld_wert,e1.toString());
setFieldToNull(col, insert_types, pst);
} catch (SQLException e1) {
errmsg += conversionException(line, col, feld_wert, e1.toString());
setFieldToNull(col, insert_types, pst);
}
break;
case Types.CHAR :
case Types.VARCHAR :
default :
if(feld_wert.equals(" "))
feld_wert=""; //Leerzeichen im UNL-File wird zu Leerstring
try {
pst.setString(col + 1, feld_wert);
} catch (SQLException e1) {
errmsg += conversionException(line, col, feld_wert,e1.toString());
setFieldToNull(col, insert_types, pst);
}
break;
case Types.LONGVARCHAR :
ByteArrayInputStream by =
new ByteArrayInputStream(feld_wert.getBytes());
pst.setAsciiStream(
col + 1,
by,
feld_wert.length());
break;
case Types.DATE :
try {
java.util.Date datum =
DateUtils.parse(feld_wert.trim());
feld_wert = DateUtils.formatUS(datum);
//Leider ist dieser Schritt wg java.sql.Date nötig
pst.setDate(
col + 1,
java.sql.Date.valueOf(feld_wert));
} catch (SQLException e1) {
errmsg += conversionException(line, col, feld_wert, e1.toString());
setFieldToNull(col, insert_types, pst);
} catch (ParseException e1) {
errmsg += conversionException(line, col, feld_wert, e1.toString());
setFieldToNull(col, insert_types, pst);
}
catch (IllegalArgumentException e1) {
errmsg += conversionException(line, col, feld_wert, e1.toString());
setFieldToNull(col, insert_types, pst);
}
break;
case Types.TIME :
try {
//Time zeit = (java.sql.Time)
//DateUtils.timeParse(feld_wert);
pst.setTime(col + 1, java.sql.Time.valueOf(
feld_wert.trim()));
} catch (SQLException e1) {
errmsg += conversionException(line, col, feld_wert, e1.toString());
setFieldToNull(col, insert_types, pst);
}
catch (IllegalArgumentException e1) {
errmsg += conversionException(line, col, feld_wert, e1.toString());
setFieldToNull(col, insert_types, pst);
}
break;
case Types.TIMESTAMP :
try {
java.util.Date datum =
DateUtils.dateTimeParse(feld_wert.trim());
feld_wert = DateUtils.dateTimeFormatUS(datum);
//Leider ist dieser Schritt wg java.sql.Date nötig
pst.setTimestamp(
col + 1,
java.sql.Timestamp.valueOf(
feld_wert + ".0"));
} catch (SQLException e1) {
errmsg += conversionException(line, col,feld_wert, e1.toString());
setFieldToNull(col, insert_types, pst);
} catch (ParseException e1) {
errmsg += conversionException(line, col, feld_wert, e1.toString());
setFieldToNull(col, insert_types, pst);
}
catch (IllegalArgumentException e1) {
errmsg += conversionException(line, col, feld_wert, e1.toString());
setFieldToNull(col, insert_types, pst);
}
break;
case Types.BIT :
// Types.BOOLEAN gibt es im jdk 1.3 nicht
try {
boolean wf =
(boolean) Boolean.getBoolean(feld_wert.trim());
pst.setBoolean(col + 1, wf);
} catch (SQLException e1) {
errmsg += conversionException(line, col, feld_wert, e1.toString());
setFieldToNull(col, insert_types, pst);
}
//Boolean wird vom Informix-Treiber als OTHER (1111) erkannt
//Da aber default '' ist, klappt es trotzdem
break;
}
}
}
return errmsg;
}
private void setFieldToNull(
int k,
int[] insert_types,
PreparedStatement pst) {
if (mode.equals("exclude-field"))
try {
pst.setNull(k + 1, insert_types[k]);
} catch (SQLException e3) {
System.err.println("Invalid Field " + (k + 1) + " could not be set to null");
}
}
private String conversionException(int line,int col, String field_value, String error) {
String err_msg = "";
err_msg = "Error in line "+line+" in Column " + (col + 1) + " "+insert_cols[col]+" value "+ field_value+ ": " + error.toString() + "; ";
return err_msg;
}
private void initializeColumnSchema() throws SQLException
{
int i=0;
ResultSet rs = null;
ResultSetMetaData rsmd = null;
String tabelle=targetTable;
if (!dbmd.storesLowerCaseIdentifiers())
tabelle = tabelle.toUpperCase();
rs =dbmd.getColumns(uploadConnection.getCatalog(), null, tabelle, null);
rsmd = rs.getMetaData();
while (rs.next()) {
insert_cols[i] = rs.getObject("COLUMN_NAME").toString();
insert_types[i] = rs.getInt("DATA_TYPE");
i++;
}
numberOfColumns=i;
if(!dbmd.supportsBatchUpdates())
useBatch=false;
}
private String createPreparedStatementHead() throws SQLException
{
String sql=null;
String insert_head = "insert into " + targetTable+"(";
String insert_val="";
for (int i = 0; i < numberOfColumns; i++)
{
insert_head += insert_cols[i] + ", ";
insert_val+="?, ";
}
insert_head = insert_head.substring(0, insert_head.length() - 2);
insert_val = insert_val.substring(0, insert_val.length() - 2);
insert_head +=") values( ";
sql=insert_head + insert_val+");";
return sql;
}
public Connection getConnection(Connection myConnection,String propfile) throws Exception {
if(myConnection==null)
{
SxConnection mySxConnection = null;
mySxConnection = new SxConnection();
mySxConnection.setPropfile(propfile);
myConnection = mySxConnection.getConnection();
String db_driver = mySxConnection.m_DriverClass;
if(db_driver.equals("org.postgresql.Driver"))
isPostgres=true;
}
dbmd = myConnection.getMetaData();
return myConnection;
}
}

212
src/de/superx/etl/bin/Doquery.java

@ -0,0 +1,212 @@
/*
* de.superx.etl - a package for controlling ETL routines
* Copyright (C) 2021 Daniel Quathamer <danielq@memtext.de>
*
* This package is licensed under the CampusSource License;
* http://www.campussource.de/org/license/
*/
package de.superx.etl.bin;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.IOException;
import java.io.StringWriter;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.logging.LogManager;
import java.util.logging.Logger;
import java.util.StringTokenizer;
import de.superx.util.SqlStringUtils;
import de.superx.bin.SxConnection;
import de.superx.common.SuperX_el;
import de.superx.etl.QueryResultSerializer;
import de.superx.servlet.SxPools;
import de.superx.servlet.SxSQL_Server;
/**
* @author Daniel Quathamer Projektgruppe SuperX
* doquery.java
* @
* Dieses Javaprogramm führt einen SQL-Ausdruck aus und gibt das Ergebnis aus.<br>
* Gebrauch:<br> java doquery <Pfad zu logger-properties> <pfad zu db.properties> <sql-Ausdruck> <Ausgabeformat (txt | html | xml)>(optional) <delimiter> <mit Spaltenüberschriften (true | false)>(optional) <Ausgabedatei>(optional)
*
*/
public class Doquery {
private static Connection myDb;
private static Statement st; // Our statement to run queries with
private static DatabaseMetaData dbmd;
// This defines the structure of the database
private static boolean done = false; // Added by CWJ to permit \q command
private static String delim = "^";
private static String header = "false";
private static String outFormat = "txt";
private static String logfile = "../conf/logging.properties";
private static String tabelle = "";
private static String dbpropfile = "../conf/db.properties";
private static SxConnection myConnection = null;
private static String db_driver;
private static String mandantenID="default";
private static String outfile = "";
private static Logger logger =
(Logger) Logger.getLogger(Doquery.class.toString());
private static String usage =
"-------------------------------------\nGebrauch: java de.superx.bin.Doquery $LOGGER_PROPERTIES $DB_PROPERTIES $sql (optional: )$outformat $DBDELIMITER $header $filename \n---------------------------------------------------";
public static int go(String args[])
throws
Exception {
if (args.length > 0) {
logfile = args[0].trim();
} else {
throw new IllegalArgumentException("Mindestens drei Parameter (Pfad zu den logger.properties, Pfad zu den db.properties, sql-String) erfoderlich");
}
File f = new File(logfile);
if (!f.exists()) {
throw new IOException("Datei nicht gefunden: " + logfile);
}
FileInputStream ins = new FileInputStream(logfile);
LogManager MyLogManager = java.util.logging.LogManager.getLogManager();
MyLogManager.readConfiguration(ins);
logfile = MyLogManager.getProperty(".level");
logger.info("Using Loggging-Level " + logfile);
String query = "";
long numberOfRows=0;
int returnCode=0;
if (args.length > 1) {
dbpropfile = args[1].trim();
} else {
logger.severe(
"Mindestens drei Parameter (Pfad zu den logger.properties, Pfad zu den db.properties, sql-String) erfoderlich");
System.exit(1);
}
if (args.length <= 2) {
logger.severe(
"Mindestens drei Parameter (Pfad zu den logger.properties, Pfad zu den db.properties, sql-String) erfoderlich");
System.exit(1);
}
query=args[2].trim();
if (args.length > 3) {
outFormat = args[3].trim();
}
if (args.length > 4) {
delim = args[4].trim();
}
if (args.length > 5) {
header = args[5].trim();
}
if (args.length > 6) {
outfile = args[6].trim();
}
if (delim.equals(""))
delim = "^"; //default Delimiter
long jetzt = new java.util.Date().getTime() ;
getConnection(logger, dbpropfile);
QueryResultSerializer myQueryResultSerializer = null;
FileWriter fw;
StringWriter sw;
boolean printColname=(header.equalsIgnoreCase("true")?true:false);
if(!outfile.equals(""))
{
try {
fw = new FileWriter(outfile);
if(outFormat.equalsIgnoreCase("xml"))
fw.write("<?xml version=\"1.0\" encoding=\"UTF-8\" ?>");
myQueryResultSerializer=new QueryResultSerializer("default", myConnection, query,fw);
} catch (IOException e) {
logger.severe ("Fehler beim Erstellen der Datei "+outfile);
returnCode=1;
}
}
else
{
sw=new StringWriter();
if(outFormat.equalsIgnoreCase("xml"))
sw.write("<?xml version=\"1.0\" encoding=\"UTF-8\" ?>");
myQueryResultSerializer=new QueryResultSerializer("default", myConnection, query,sw);
}
numberOfRows=myQueryResultSerializer.unloadQueryResults("DOQUERY",outFormat,delim,printColname);
returnCode=myQueryResultSerializer.getReturnCode();
if(outfile.equals(""))
{
sw=myQueryResultSerializer.getOutputString();
System.out.println(sw.toString());
}
long erstrecht = new java.util.Date().getTime() ;
System.out.println( numberOfRows+" rows unloaded in "+(erstrecht-jetzt)/1000 +" Sec.");
System.out.println( "returnCode= "+returnCode);
myQueryResultSerializer = null;
return returnCode;
}
/*
* Display some instructions on how to run the example
*/
public static void instructions() {
System.out.println("SuperX @version@\n");
System.out.println(
"\nDieses Java-Programm führt einen SQL-Ausdruck aus und gibt das Ergebnis aus.\n");
System.out.println(
"Gebrauch:\n java doquery <Pfad zu logger-properties> <pfad zu db.properties> <sql-Ausdruck> <Ausgabeformat (txt | html | xml)>(optional) <delimiter> <mit Spaltenüberschriften (true | false)>(optional) <Ausgabedatei>(optional)\n");
System.exit(1);
}
public static void main(String args[]) {
int returnCode=0;
try {
returnCode=go(args);
} catch (Exception ex) {
System.err.println("Doquery Aufruf fehlgeschlagen.\n" + ex);
ex.printStackTrace();
System.exit(1);
}
System.out.println("Doquery Aufruf ausgeführt.\n" );
System.exit(returnCode);
}
public static void getConnection(Logger logger,String propFile) throws SQLException {
myConnection = new SxConnection();
myConnection.setPropfile(propFile);
logger.config("Starting Connection...");
try {
myDb = myConnection.getConnection();
st = myDb.createStatement();
//st = myDb.createStatement(java.sql.ResultSet.TYPE_FORWARD_ONLY,
// java.sql.ResultSet.CONCUR_READ_ONLY);
//st.setFetchSize(100);
dbmd = myDb.getMetaData();
//st = myDb.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
//ResultSet.CONCUR_UPDATABLE);
} catch (Exception e) {
e.printStackTrace();
logger.severe("Keine DB-Verbindung: " + e.toString());
throw new SQLException("Keine DB-Verbindung: " + e.toString());
}
db_driver = myConnection.m_DriverClass;
}
}

192
src/de/superx/etl/bin/Dosql.java

@ -0,0 +1,192 @@
/*
* de.superx.etl - a package for controlling ETL routines
* Copyright (C) 2021 Daniel Quathamer <danielq@memtext.de>
*
* This package is licensed under the CampusSource License;
* http://www.campussource.de/org/license/
*/
package de.superx.etl.bin;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStreamReader;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Properties;
import java.util.StringTokenizer;
import java.util.logging.LogManager;
import java.util.logging.Logger;
import de.memtext.util.ExceptionHandler;
import de.superx.etl.util.GetOpts;
import de.memtext.util.PropUtils;
import de.superx.bin.SxConnection;
import de.superx.etl.EtlUtils;
import de.superx.etl.SqlExecutor;
/**
* @author Daniel Quathamer Projektgruppe SuperX
* Dosql.java
* @
* Dieses Javaprogramm führt einen oder mehrere SQL-Ausdrück in einer Datei aus und gibt das Ergebnis aus.<br>
* Gebrauch:<br> java dosql <Pfad zu logger-properties> <pfad zu db.properties> <Datei mit sql-Ausdrücken> <Ausgabeformat (txt | html | xml)>(optional) <delimiter> <mit Spaltenüberschriften (true | false)>(optional) <Ausgabedatei>(optional)
*
*/
/* Änderungen
15.4.2004 MB wenn keine Ausgabedatei und Ausgabeformat txt, alle selects von SxJdbcClient
loggen lassen
6.4.2004 MB Code static main verlegt - nach erfolgreicher Durchführung ein system.out
**/
public class Dosql {
private static Connection myDb;
private static DatabaseMetaData dbmd;
private static Statement st; // Our statement to run queries with
private static boolean done = false; // Added by CWJ to permit \q command
private static String delim = "^";
private static String header = "false";
private static String logfile = "../conf/logging.properties";
private static String tabelle = "";
private static String dbpropfile = "../conf/db.properties";
private static String outfile = "";
private static String outFormat = "txt";
private static String _dateiPfad = "";
//LogUtils logger=null;
private static Logger logger =
(Logger) Logger.getLogger(Dosql.class.toString());
//static Logger logger = Logger.getLogger(dosql.class);
private static ExceptionHandler exceptionHandler =
new ExceptionHandler(false);
private static SxConnection myConnection = null;
private static String usage =
"-------------------------------------\n"
+ "Gebrauch: java de.superx.bin.Dosql -logger=<<Pfad zu logging.properties>> -dbproperties=<<Pfad zu db.properties>> "
+ "-sqlfile:<Datei mit sql-Ausdrücken> -params:<Parameter, die in sql- oder Script-Dateien ersetzt werden; Syntax:param1=wert1|param2=wert2 etc> -outFormat:<Ausgabeformat (txt | html | xml)>(optional) -delim:<delimiter> -header:<mit Spaltenüberschriften (true | false)>(optional) -outfile:<Ausgabedatei>(optional) \n---------------------------------------------------";
/*
* Display some instructions on how to run the example
*/
public static void instructions() {
System.out.println("SuperX @version@\n");
System.out.println(
"\nDieses Javaprogramm führt ein beliebiges sql-Script mit einer oder mehr sql-Anweisungen aus");
System.out.println(
"Im Classpath muss superx@version@.jar sowie der zugehörige jdbc-Treiber sein.");
System.out.println(
"Befehl:\n java dosql <Dateipfad sql-Script> <delimiter>(optional) <Ausgabe der Feldüberschriften (optional, true oder false).");
System.out.println(
"Default:\n java dosql <Dateipfad sql-Script> <TAB> true");
System.exit(1);
}
public static void main(String args[]) {
try {
execute(args);
} catch (Exception e) {
logger.severe(
"Fehler beim sql-Script: " + _dateiPfad + " " + e.toString());
e.printStackTrace();
System.exit(1);
}
}
public static void execute(String[] args)
throws Exception {
String params = "";
GetOpts.setOpts(args);
String isdrin =
GetOpts.isAllRequiredOptionsPresent(
"-logger,-dbproperties,-sqlfile");
if (isdrin != null) {
System.err.println("Folgende Optionen fehlen: " + isdrin);
System.err.println(usage);
System.exit(1);
}
//GetOpts myOpts=new GetOpts();
if (GetOpts.isPresent("-logger"))
logfile = GetOpts.getValue("-logger");
if (GetOpts.isPresent("-dbproperties"))
dbpropfile = GetOpts.getValue("-dbproperties");
if (GetOpts.isPresent("-sqlfile"))
_dateiPfad = GetOpts.getValue("-sqlfile");
if (GetOpts.isPresent("-outFormat"))
outFormat = GetOpts.getValue("-outFormat");
if (GetOpts.isPresent("-delim"))
delim = GetOpts.getValue("-delim");
if (GetOpts.isPresent("-header"))
header = GetOpts.getValue("-header");
if (GetOpts.isPresent("-outfile"))
outfile = GetOpts.getValue("-outfile");
if (GetOpts.isPresent("-params"))
params = GetOpts.getValue("-params");
if (delim.equals(""))
delim = "^"; //default Delimiter
int returnCode=0;
File f = new File(logfile);
if (!f.exists()) {
throw new IOException("Datei nicht gefunden: " + logfile);
}
FileInputStream ins = new FileInputStream(logfile);
LogManager MyLogManager = java.util.logging.LogManager.getLogManager();
MyLogManager.readConfiguration(ins);
logfile = MyLogManager.getProperty(".level");
logger.info("Using Loggging-Level " + logfile);
File sqlScriptFile = new File(_dateiPfad);
getConnection(logger, dbpropfile);
Properties paramProperties=null;
if(!params.equals(""))
{
params=de.memtext.util.StringUtils.replace(params, "|", System.getProperty("line.separator"));
paramProperties=EtlUtils.convertStringToProperty(params);
}
SqlExecutor mySqlExecutor=new SqlExecutor("default", myConnection,sqlScriptFile,paramProperties);
if(!outfile.equals(""))
mySqlExecutor.setOutfile(outfile);
returnCode=mySqlExecutor.executeQueries();
String output=mySqlExecutor.getOutString().toString();
myConnection.close();
System.out.println(
"Dosql hat das Script " + _dateiPfad + " erfolgreich durchgeführt");
System.out.println(output);
logger.info("dosql erfolgreich beendet");
}
public static void getConnection(Logger logger,String propFile) throws SQLException {
myConnection = new SxConnection();
myConnection.setPropfile(propFile);
logger.config("Starting Connection...");
try {
myDb = myConnection.getConnection();
st = myDb.createStatement();
//st = myDb.createStatement(java.sql.ResultSet.TYPE_FORWARD_ONLY,
// java.sql.ResultSet.CONCUR_READ_ONLY);
//st.setFetchSize(100);
dbmd = myDb.getMetaData();
//st = myDb.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
//ResultSet.CONCUR_UPDATABLE);
} catch (Exception e) {
e.printStackTrace();
logger.severe("Keine DB-Verbindung: " + e.toString());
throw new SQLException("Keine DB-Verbindung: " + e.toString());
}
}
}

70
src/de/superx/etl/bin/EtlJobExecutor.java

@ -0,0 +1,70 @@
/*
* de.superx.etl - a package for controlling ETL routines
* Copyright (C) 2021 Daniel Quathamer <danielq@memtext.de>
*
* This package is licensed under the CampusSource License;
* http://www.campussource.de/org/license/
*/
package de.superx.etl.bin;
import java.sql.Connection;
import de.superx.etl.util.GetOpts;
import de.superx.etl.EtlActionJob;
import de.superx.etl.EtlUtils;
public class EtlJobExecutor {
public EtlJobExecutor() {
// TODO Auto-generated constructor stub
}
public static void main(String[] args) {
String usage="usage: java de.superx.bin.EtlJobExecutor -dbproperties:$DB_PROPERTIES -job:abc -params:TID=16000 (optional) -outfile:Ausgabedatei (optional)" ;
GetOpts.setOpts(args);
String isdrin = GetOpts.isAllRequiredOptionsPresent("-dbproperties,-job");
if (isdrin != null) {
System.err.println(usage);
System.exit(1);
}
int returnCode=0;
String dbpropfile = GetOpts.getValue("-dbproperties");
String job = GetOpts.getValue("-job");
String params =null;
if (GetOpts.isPresent("-params"))
params=GetOpts.getValue("-params");
if(!(params==null))
params=de.memtext.util.StringUtils.replace(params, ",", EtlUtils.NEWLINE);
String outfile=null;
if (GetOpts.isPresent("-outfile"))
outfile=GetOpts.getValue("-outfile");
//System.out.println("param:"+params);
EtlActionJob myJob=new EtlActionJob(job,"",dbpropfile); //TODO:Mandantid
try {
myJob.initJob(job,params);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
System.err.println(myJob.getLogoutput());
System.exit(1);
}
System.out.println("Job "+ job+" initialized");
try {
returnCode= myJob.execute(outfile);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
System.err.println(myJob.getLogoutput());
System.exit(1);
}
System.out.println("Job "+ job+" executed in " +myJob.getDuration()+" Sec.");
System.exit(returnCode);
}
}

908
src/de/superx/etl/bin/SxTransformer.java

@ -0,0 +1,908 @@
/*
* de.superx.etl - a package for controlling ETL routines
* Copyright (C) 2021 Daniel Quathamer <danielq@memtext.de>
*
* This package is licensed under the CampusSource License;
* http://www.campussource.de/org/license/
*/
package de.superx.etl.bin;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.io.StringReader;
import java.io.Writer;
import java.net.URI;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Types;
import java.util.StringTokenizer;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.xml.transform.Result;
import javax.xml.transform.Source;
import javax.xml.transform.TransformerException;
import javax.xml.transform.URIResolver;
import javax.xml.transform.sax.SAXResult;
import javax.xml.transform.stream.StreamResult;
import javax.xml.transform.stream.StreamSource;
import org.apache.fop.apps.FOUserAgent;
import org.apache.fop.apps.Fop;
import org.apache.fop.apps.FopConfParser;
import org.apache.fop.apps.FopFactory;
import org.apache.fop.apps.FopFactoryBuilder;
import org.apache.fop.apps.MimeConstants;
import org.jfor.jfor.converter.Converter;
import org.xml.sax.InputSource;
import de.superx.bin.SxDBUtils;
import de.superx.util.SqlStringUtils;
// ACHTUNG - PDF ERZEUGUNG WIEDER AKTIV DQ 27.12.2005!!!
/**
* @author Daniel Quathamer Projektgruppe SuperX SxTransformer.java @ Dieses
* Javaprogramm transformiert ResultSets in text, html oder xml und gibt
* das Ergebnis als Ausgabestrom für stdout oder eine Datei aus.
*
*
*
*/
public class SxTransformer {
/**
*
*/
private String head_delim1 = "";
private String head_delim2 = "\t";
private String header_delim1 = "";
private String header_delim2 = "\n";
private String fld_delim1 = "";
private String fld_delim2 = "\t";
private String row_delim1 = "";
private String row_delim2 = "\n";//we only user unix newline, even under
// DOS
private String rs_delim1 = "";
private String rs_delim2 = "";
private String delim = "\t";
private String header = "false";
private String outHeader = "";
private String outFooter = "";
public String format = "txt";
public String params = "";
public String[] XslPipeline;
public String stylesheet = "";
public PrintWriter myOutwriter;
public OutputStream myOutputStream;
public ResultSet outrs = null;
public ResultSetMetaData outrsmd = null;
public String quellstring = null;
public String outfile = "";
private File fopxconfFile=null;
public void setFopxconfFile(File fopxconfFile) {
this.fopxconfFile = fopxconfFile;
}
Logger myLogger;
String loglevel;
//PrintStream outputstream=null;
//TODO ggfs. auf Saxon umstellen
//TransletUtils.initFactory("net.sf.saxon.TransformerFactoryImpl",
// "org.apache.xalan.xsltc.trax.TransformerFactoryImpl");
//System.out.println("Using xsl processor: "
// + TransletUtils.getUsedProcessor());
public SxTransformer(Logger logger, PrintStream myStream) {
super();
myLogger = logger;
myLogger.config("Starting the Transformer with Stdout-Output.");
myOutwriter = new PrintWriter(myStream);
}
public SxTransformer(Logger logger, String myFile) {
super();
myLogger = logger;
myLogger.config("Starting the Transformer with File-Output.");
outfile = myFile;
myLogger.setLevel(Level.SEVERE);
}
public SxTransformer(Logger logger, FileWriter myFilewriter) {
super();
myLogger = logger;
myLogger.config("Starting the Transformer with File-Output.");
myOutwriter = new PrintWriter(new BufferedWriter(myFilewriter));
}
public SxTransformer(Logger logger, PrintWriter myWriter) {
super();
myLogger = logger;
myLogger.config("Starting the Transformer with StringWriter-Output.");
myOutwriter = myWriter;
}
public SxTransformer(Logger logger) {
super();
myLogger = logger;
myLogger.config("Starting the Transformer without Stream");
}
public void printResult(String outFormat) {
//loglevel=myLogger.getRootLogger().getLevel().toString().trim();
assignDelims(outFormat);
myLogger.config("Resultset wird formatiert");
String f_wert = "";
//ResultSetMetaData rsmd = rs.getMetaData();
String zs = "";
String headZs = "";
int cols = 0;
Object o = null;
String[] lbl = null;
int[] types = new int[1255];
// de.memtext.util.MemoryUtils.printfree();
if (outrs != null) {
try {
cols = outrsmd.getColumnCount();
lbl = new String[1255];
zs = outHeader + rs_delim1;
headZs = header_delim1;
;
// Print the result column names?
for (int i = 1; i <= cols; i++) {
lbl[i] = outrsmd.getColumnLabel(i);
types[i] = outrsmd.getColumnType(i);
if (outFormat.equals("xml")) {
fld_delim1 = "<fld name='" + lbl[i] + "'>";
fld_delim2 = "</fld>";
} else {
if (i < cols)
headZs += (head_delim1 + lbl[i] + head_delim2);
else {
headZs += (head_delim1 + lbl[i] + head_delim2);
headZs += (header_delim2);
//out.newLine();
}
}
}
} catch (SQLException e1) {
myLogger.severe("Fehler beim Einlesen der Feldnamen: " + e1.toString());
e1.printStackTrace();
}
if (header.equals("true")) zs += headZs;
// try {
myOutwriter.write(zs);
myOutwriter.flush();
/*
* } catch (IOException e) { myLogger.severe("Fehler beim Ausgeben
* der Feldnamen: " + e.toString());
* }
*/
//now the results
StringBuffer line = new StringBuffer();
try {
int rowCounter = 0;
do {
line.setLength(0);
line.append(row_delim1);
for (int i = 1; i <= cols; i++) {
if (outFormat.equals("xml")) {
fld_delim1 = "<fld name='" + lbl[i] + "'>";
fld_delim2 = "</fld>";
}
if (types[i] == Types.LONGVARCHAR) {
//Der Informix-Treiber verlangt hier getString
// statt getObject!
o = outrs.getString(i);
} else {
o = outrs.getObject(i);
}
if (outrs.wasNull())
if (i < cols)
line.append(fld_delim1 + "" + fld_delim2);
else {
line.append(fld_delim1 + "" + fld_delim2);
line.append(row_delim2);
//out.newLine();
}
else {
//vergl. de.superx.common.Maske.getCSV
f_wert = SxDBUtils.field_value(o);
if (outFormat.equals("xml") && (f_wert.indexOf("&") > -1 || f_wert.indexOf("<") > -1 || f_wert.indexOf(">") > -1))
f_wert = "<![CDATA[" + f_wert + "]]>";
//wenn der Feldwert zufällig das Trennzeichen enthält, wird es mit "\" maskiert
if (outFormat.equals("txt")) {
f_wert = SxDBUtils.prepareInformixCsv(f_wert);
if (f_wert != null && (f_wert.indexOf(fld_delim2) > -1)) f_wert = de.memtext.util.StringUtils.replace(f_wert, fld_delim2, "\\" + fld_delim2);
}
if (i < cols)
line.append(fld_delim1 + f_wert + fld_delim2);
else {
line.append(fld_delim1 + f_wert + fld_delim2);
line.append(row_delim2);
}
}
}
myOutwriter.write(line.toString());
rowCounter++;
if (rowCounter > 10000) {
de.memtext.util.MemoryUtils.printfree();
myOutwriter.flush();
rowCounter = 0;
}
} while (outrs.next()); //von while
myOutwriter.write(rs_delim2 + outFooter);
myOutwriter.flush();
outrs.close();
myOutwriter.close();
} /*
* catch (IOException e) { myLogger.severe("Fehler beim Ausgeben
* der Feldwerte: " + e.toString()); }
*/
catch (SQLException e) {
myLogger.warning("SQL-Fehler beim Ausgeben der Feldwerte: " + e.toString());
try {
myOutwriter.write(rs_delim2 + outFooter);
myOutwriter.flush();
myOutwriter.close();
outrs.close();
}
/*
* catch (IOException e1) { myLogger.severe("Fehler beim
* Ausgeben der Feldwerte: " + e1.toString()); }
*/
catch (SQLException e1) {
myLogger.warning("SQL-Fehler beim Ausgeben der Feldwerte: " + e1.toString());
}
}
}
else //wenn outrs=null
{
myOutwriter.flush();
myOutwriter.close();
}
myLogger.info("resultset printed");
}
public void transformString(String methode) throws TransformerException {
try {
javax.xml.transform.TransformerFactory tFactory = javax.xml.transform.TransformerFactory.newInstance();
javax.xml.transform.Transformer transformer = tFactory.newTransformer(new javax.xml.transform.stream.StreamSource(stylesheet));
StringReader s1 = new StringReader(quellstring);
transformer.setOutputProperty(javax.xml.transform.OutputKeys.ENCODING, SqlStringUtils.getEncoding());
transformer.setOutputProperty(javax.xml.transform.OutputKeys.METHOD, methode);
transformer.setParameter("sx_client", format);
String result = null;
if (!params.endsWith(",")) params += ",";
StringTokenizer st = new StringTokenizer(params, ",");
for (; st.hasMoreTokens();) {
String param = st.nextToken();
if (!param.equals("")) {
String paramname = param.substring(0, param.indexOf("="));
String paramvalue = param.substring(param.indexOf("=") + 1, param.length());
transformer.setParameter(paramname, paramvalue);
}
}
transformer.transform(new javax.xml.transform.stream.StreamSource(s1), new javax.xml.transform.stream.StreamResult(myOutwriter));
} catch (Exception e) {
myLogger.severe("XSL-Transformation fehlgeschlagen: " + e.toString());
}
}
public void transformFile(String methode) throws TransformerException, Exception
{
javax.xml.transform.TransformerFactory tFactory = javax.xml.transform.TransformerFactory.newInstance("net.sf.saxon.TransformerFactoryImpl", null);
javax.xml.transform.Transformer transformer = tFactory.newTransformer(new javax.xml.transform.stream.StreamSource(this.stylesheet));
//StringReader s1 = new StringReader(quellstring);
transformer.setOutputProperty(javax.xml.transform.OutputKeys.ENCODING, SqlStringUtils.getEncoding());
if (methode.equals("pdf") || methode.equals("rtf"))
transformer.setOutputProperty(javax.xml.transform.OutputKeys.METHOD, "xml");
else
transformer.setOutputProperty(javax.xml.transform.OutputKeys.METHOD, methode);
transformer.setParameter("sx_client", format);
transformer.setParameter("versionParam", "2.0");
// String result = null;
if (!params.endsWith(",")) params += ",";
StringTokenizer st = new StringTokenizer(params, ",");
for (; st.hasMoreTokens();) {
String param = st.nextToken();
if (!param.equals("")) {
String paramname = param.substring(0, param.indexOf("="));
String paramvalue = param.substring(param.indexOf("=") + 1, param.length());
transformer.setParameter(paramname, paramvalue);
}
}
if (methode.equals("pdf")) { //Sichern um zurücksetzen zu können.
Level logLevel = myLogger.getParent().getLevel();
myLogger.getParent().setLevel(Level.SEVERE);
// configure fopFactory as desired
//akt. Pfad muss WEB-INF sein oder vorher als -fopxconf Param übergeben worden sein
FopFactory fopFactory ;
FOUserAgent foUserAgent;
// in fop.xconf fopFactory.setSourceResolution(96); // =96dpi (dots/pixels per Inch)
if (fopxconfFile==null)
fopxconfFile=new File ("conf" + File.separator + "fop.xconf");
if (!fopxconfFile.exists())
{
String msg=fopxconfFile.getAbsolutePath()+" nicht gefunden";
System.out.println(msg);
throw new IllegalStateException(msg);
}
FopConfParser parser = new FopConfParser(fopxconfFile); //parsing configuration
FopFactoryBuilder builder = parser.getFopFactoryBuilder(); //building the factory with the user options
String fa=fopxconfFile.getCanonicalPath();
URI uri=new File(fa).getParentFile().getParentFile().toURI();
// System.out.println(uri);
builder.setBaseURI(uri);
builder.setStrictFOValidation(false);
fopFactory = builder.build();
foUserAgent = fopFactory.newFOUserAgent();
try {
myOutputStream = new java.io.FileOutputStream(outfile);
} catch (FileNotFoundException e) {
myLogger.severe("Ausgabedatei " + outfile + " kann nicht erzeugt werden:" + e.toString());
System.exit(-1);
}
try {
// Construct fop with desired output format
Fop fop = fopFactory.newFop(MimeConstants.MIME_PDF, foUserAgent, myOutputStream);
// Setup input for XSLT transformation
Source src = new StreamSource(quellstring);
// Resulting SAX events (the generated FO) must be piped through to FOP
Result res = new SAXResult(fop.getDefaultHandler());
// Start XSLT transformation and FOP processing
try {
transformer.transform(src, res);
System.out.println("Success!");
} catch (TransformerException ex) {
throw new Exception(ex);
}
} catch (Exception e) {
e.printStackTrace(System.err);
myLogger.severe("FOP-Transformation Fehler: " + e.toString());
} finally {
myOutputStream.close();
myLogger.getParent().setLevel(logLevel);
}
} else {
if (methode.equals("rtf")) {
String zielstring = "";
myLogger.info("Ausgabedatei " + outfile + " vorbereiten");
Writer myWriter = null;
try {
myWriter = new BufferedWriter(new FileWriter(outfile));
} catch (IOException e) {
e.printStackTrace();
}
try {
myLogger.info("Transformiere nach fo ");
transformer.transform(new javax.xml.transform.stream.StreamSource(quellstring), new StreamResult("tmp.fo"));
myLogger.info("Transformiere nach rtf ");
new Converter(new InputSource("tmp.fo"), myWriter, Converter.createConverterOption());
} catch (Exception e1) {
e1.printStackTrace();
}
} else {
if (methode.equals("xls")) {
} else {
//Normal xml Transformation, not fop or rtf
try {
FileWriter myFile = new FileWriter(outfile);
myOutwriter = new PrintWriter(myFile);
transformer.transform(new javax.xml.transform.stream.StreamSource(quellstring), new javax.xml.transform.stream.StreamResult(myOutwriter));
} catch (IOException e) {
myLogger.severe("Datei " + outfile + " kann nicht erzeugt werden: " + e.toString());
} catch (TransformerException e) {
myLogger.severe("Datei " + outfile + " kann nicht transformiert werden: " + e.toString());
}
//new javax.xml.transform.stream.StreamResult( myOutwriter));
}
}
}
}
public void outputString() {
BufferedWriter out = new BufferedWriter(myOutwriter);
try {
out.write(quellstring);
out.flush();
} catch (IOException e) {
myLogger.severe("Fehler beim Ausgeben des Ergebnisses: " + e.toString());
}
}
public String getResult(String outFormat) throws SQLException {
//loglevel=myLogger.getRootLogger().getLevel().toString().trim();
assignDelims(outFormat);
myLogger.config("Resultset wird formatiert");
String f_wert = "";
//ResultSetMetaData rsmd = rs.getMetaData();
int cols = outrsmd.getColumnCount();
String[] lbl = new String[255];
String zs = rs_delim1;
String headZs = header_delim1;
;
// Print the result column names?
for (int i = 1; i <= cols; i++) {
lbl[i] = outrsmd.getColumnLabel(i);
if (outFormat.equals("xml")) {
fld_delim1 = "<fld name='" + lbl[i].toLowerCase() + "'>";
fld_delim2 = "</fld>";
} else {
if (i < cols)
headZs += (head_delim1 + lbl[i] + head_delim2);
else {
headZs += (head_delim1 + lbl[i] + head_delim2);
headZs += (header_delim2);
//out.newLine();
}
}
}
if (header.equals("true")) zs += headZs;
//now the results
try {
while (outrs.next())
{
zs += (row_delim1);
for (int i = 1; i <= cols; i++) {
Object o = outrs.getObject(i);
f_wert = SxDBUtils.field_value(o);
if (outFormat.equals("xml")) {
fld_delim1 = "<fld name='" + lbl[i].toLowerCase() + "'>";
fld_delim2 = "</fld>\n";
}
if (outrs.wasNull())
if (i < cols)
zs += (fld_delim1 + "" + fld_delim2);
else {
zs += (fld_delim1 + "" + fld_delim2);
zs += (row_delim2);
//out.newLine();
}
else if (i < cols)
zs += (fld_delim1 + f_wert.trim() + fld_delim2);
else {
zs += (fld_delim1 + f_wert.trim() + fld_delim2);
zs += (row_delim2);
// out.newLine();
}
// }
// catch (IOException e)
// {
// myLogger.severe("Fehler beim Ausgeben der Feldwerte:
// "+e.toString());
//
// }
}
} //von while
zs += (rs_delim2);
} catch (Exception e) {
myLogger.severe("Fehler beim Ausgeben der Feldwerte: " + e.toString());
}
outrs.close();
return zs;
}
private void assignDelims(String outFormat) {
if (outFormat.equals("html")) {
outHeader = "<html><body>";
outFooter = "</body></html>";
rs_delim1 = "<table border='1'>\n";
rs_delim2 = "</table>";
head_delim1 = "<th>";
head_delim2 = "</th>";
header_delim1 = "<tr>";
header_delim2 = "</tr>\n";
row_delim1 = "<tr>";
row_delim2 = "</tr>";
fld_delim1 = "<td>";
fld_delim2 = "</td>";
}
if (outFormat.equals("xml")) {
outHeader = "<?xml version='1.0' encoding='" + SqlStringUtils.getEncoding() + "'?>";
outFooter = "";
rs_delim1 = "<rs>\n";
rs_delim2 = "</rs>";
head_delim1 = "";
head_delim2 = "";
header_delim1 = "";
header_delim2 = "";
row_delim1 = "<row>\n";
row_delim2 = "</row>\n";
fld_delim1 = "";
fld_delim2 = "";
}
}
/**
* @return
*/
public String getDelim() {
return delim;
}
/**
* @return
*/
public String getFld_delim1() {
return fld_delim1;
}
/**
* @return
*/
public String getFld_delim2() {
return fld_delim2;
}
/**
* @return
*/
public String getFormat() {
return format;
}
/**
* @return
*/
public String getHead_delim1() {
return head_delim1;
}
/**
* @return
*/
public String getHead_delim2() {
return head_delim2;
}
/**
* @return
*/
public String getHeader() {
return header;
}
/**
* @return
*/
public String getHeader_delim1() {
return header_delim1;
}
/**
* @return
*/
public String getHeader_delim2() {
return header_delim2;
}
/**
* @return
*/
public ResultSet getOutrs() {
return outrs;
}
/**
* @return
*/
public ResultSetMetaData getOutrsmd() {
return outrsmd;
}
/**
* @return
*/
public String getRow_delim1() {
return row_delim1;
}
/**
* @return
*/
public String getRow_delim2() {
return row_delim2;
}
/**
* @return
*/
public String getRs_delim1() {
return rs_delim1;
}
/**
* @return
*/
public String getRs_delim2() {
return rs_delim2;
}
/**
* @return
*/
public String[] getXslPipeline() {
return XslPipeline;
}
/**
* @param string
*/
public void setDelim(String string) {
delim = string;
fld_delim1 = "";
fld_delim2 = string;
head_delim2 = string;
}
/**
* @param string
*/
public void setFld_delim1(String string) {
fld_delim1 = string;
}
/**
* @param string
*/
public void setFld_delim2(String string) {
fld_delim2 = string;
}
/**
* @param string
*/
public void setFormat(String string) {
format = string;
}
/**
* @param string
*/
public void setHead_delim1(String string) {
head_delim1 = string;
}
/**
* @param string
*/
public void setHead_delim2(String string) {
head_delim2 = string;
}
/**
* @param string
*/
public void setHeader(String string) {
header = string;
}
/**
* @param string
*/
public void setHeader_delim1(String string) {
header_delim1 = string;
}
/**
* @param string
*/
public void setHeader_delim2(String string) {
header_delim2 = string;
}
/**
* @param set
*/
public void setOutrs(ResultSet set) {
outrs = set;
}
/**
* @param data
*/
public void setOutrsmd(ResultSetMetaData data) {
outrsmd = data;
}
/**
* @param string
*/
public void setRow_delim1(String string) {
row_delim1 = string;
}
/**
* @param string
*/
public void setRow_delim2(String string) {
row_delim2 = string;
}
/**
* @param string
*/
public void setRs_delim1(String string) {
rs_delim1 = string;
}
/**
* @param string
*/
public void setRs_delim2(String string) {
rs_delim2 = string;
}
/**
* @param strings
*/
public void setXslPipeline(String[] strings) {
XslPipeline = strings;
}
/**
* @return
*/
public String getOutFooter() {
return outFooter;
}
/**
* @return
*/
public String getOutHeader() {
return outHeader;
}
/**
* @param string
*/
public void setOutFooter(String string) {
outFooter = string;
}
/**
* @param string
*/
public void setOutHeader(String string) {
outHeader = string;
}
/**
* @return
*/
public String getParams() {
return params;
}
/**
* @param string
*/
public void setParams(String string) {
params = string;
}
//-- nur zum Testen --//
public static void main(String[] args) throws Exception {
Logger log = Logger.getLogger(SxTransformer.class.getName());
String rootDir = "/home/superx/tmp/";
String quelle = rootDir + "Buchungsbericht_ids_Test.xml";
String zielFile = rootDir + "testneu.pdf";
String sxlSheet = "/home/superx/dev/edustore/webserver/tomcat/webapps/superx/xml/tabelle_fo_pdf.xsl";
SxTransformer sxTrans = new SxTransformer(log, zielFile);
sxTrans.quellstring = quelle;
sxTrans.outfile = zielFile;
sxTrans.stylesheet = sxlSheet;
sxTrans.transformFile("pdf");
}//Ende der Methode
public class ClasspathUriResolver implements URIResolver {
public Source resolve(String href, String base) throws TransformerException {
Source source = null;
InputStream inputStream = ClassLoader.getSystemResourceAsStream(href);
if (inputStream != null) {
source = new StreamSource(inputStream);
}
return source;
}
}
}//Ende der Klasse

214
src/de/superx/etl/bin/UnloadRecords.java

@ -0,0 +1,214 @@
/*
* de.superx.etl - a package for controlling ETL routines
* Copyright (C) 2021 Daniel Quathamer <danielq@memtext.de>
*
* This package is licensed under the CampusSource License;
* http://www.campussource.de/org/license/
*/
package de.superx.etl.bin;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.IOException;
import java.io.StringWriter;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.logging.LogManager;
import java.util.logging.Logger;
import java.util.StringTokenizer;
import de.superx.util.SqlStringUtils;
import de.superx.bin.SxConnection;
import de.superx.common.SuperX_el;
import de.superx.etl.QueryResultSerializer;
import de.superx.servlet.SxPools;
import de.superx.servlet.SxSQL_Server;
/**
* @author Daniel Quathamer Projektgruppe SuperX
* doquery.java
* @
* Dieses Javaprogramm führt einen SQL-Ausdruck aus und gibt das Ergebnis aus.<br>
* Gebrauch:<br> java doquery <Pfad zu logger-properties> <pfad zu db.properties> <sql-Ausdruck> <Ausgabeformat (txt | html | xml)>(optional) <delimiter> <mit Spaltenüberschriften (true | false)>(optional) <Ausgabedatei>(optional)
*
*/
//Änderungen
/*
* 16.4.04 Wenn kein outfile angegeben, werden alle select results auf die Console geloggt
* 19.1.2006 dq: Unload großer Tabellen unter Postgres ermöglicht.
*/
public class UnloadRecords {
private static Connection myDb;
private static Statement st; // Our statement to run queries with
private static DatabaseMetaData dbmd;
// This defines the structure of the database
private static boolean done = false; // Added by CWJ to permit \q command
private static String delim = "^";
private static String header = "false";
private static String outFormat = "txt";
private static String logfile = "../conf/logging.properties";
private static String tabelle = "";
private static String dbpropfile = "../conf/db.properties";
private static SxConnection myConnection = null;
private static String db_driver;
private static String mandantenID="default";
private static String outfile = "";
private static Logger logger =
(Logger) Logger.getLogger(Doquery.class.toString());
private static String usage =
"-------------------------------------\nGebrauch: java de.superx.bin.UnloadRecords $LOGGER_PROPERTIES $DB_PROPERTIES $sql (optional: )$outformat $DBDELIMITER $header $filename \n---------------------------------------------------";
public static int go(String args[])
throws
Exception {
if (args.length > 0) {
logfile = args[0].trim();
} else {
throw new IllegalArgumentException("Mindestens drei Parameter (Pfad zu den logger.properties, Pfad zu den db.properties, sql-String) erfoderlich");
}
File f = new File(logfile);
if (!f.exists()) {
throw new IOException("Datei nicht gefunden: " + logfile);
}
FileInputStream ins = new FileInputStream(logfile);
LogManager MyLogManager = java.util.logging.LogManager.getLogManager();
MyLogManager.readConfiguration(ins);
logfile = MyLogManager.getProperty(".level");
logger.info("Using Loggging-Level " + logfile);
String query = "";
long numberOfRows=0;
int returnCode=0;
if (args.length > 1) {
dbpropfile = args[1].trim();
} else {
logger.severe(
"Mindestens drei Parameter (Pfad zu den logger.properties, Pfad zu den db.properties, sql-String) erfoderlich");
System.exit(1);
}
if (args.length <= 2) {
logger.severe(
"Mindestens drei Parameter (Pfad zu den logger.properties, Pfad zu den db.properties, sql-String) erfoderlich");
System.exit(1);
}
query=args[2].trim();
if (args.length > 3) {
outFormat = args[3].trim();
}
if (args.length > 4) {
delim = args[4].trim();
}
if (args.length > 5) {
header = args[5].trim();
}
if (args.length > 6) {
outfile = args[6].trim();
}
if (delim.equals(""))
delim = "^"; //default Delimiter
boolean printColname=(header.equalsIgnoreCase("true")?true:false);
FileWriter fw;
StringWriter sw;
long jetzt = new java.util.Date().getTime() ;
getConnection(logger, dbpropfile);
QueryResultSerializer myQueryResultSerializer = null;
if(!outfile.equals(""))
{
try {
fw = new FileWriter(outfile);
if(outFormat.equalsIgnoreCase("xml"))
fw.write("<?xml version=\"1.0\" encoding=\""+SqlStringUtils.getEncoding()+"\" ?>");
myQueryResultSerializer=new QueryResultSerializer("default", myConnection, query,fw);
} catch (IOException e) {
logger.severe ("Fehler beim Erstellen der Datei "+outfile);
returnCode=1;
}
}
else
{
sw=new StringWriter();
if(outFormat.equalsIgnoreCase("xml"))
sw.write("<?xml version=\"1.0\" encoding=\"UTF-8\" ?>");
myQueryResultSerializer=new QueryResultSerializer("default", myConnection, query,sw);
}
numberOfRows=myQueryResultSerializer.unloadQueryResults("DOQUERY",outFormat,delim,printColname);
returnCode=myQueryResultSerializer.getReturnCode();
if(outfile.equals(""))
{
sw=myQueryResultSerializer.getOutputString();
System.out.println(sw.toString());
}
long erstrecht = new java.util.Date().getTime() ;
System.out.println( numberOfRows+" rows unloaded in "+(erstrecht-jetzt)/1000 +" Sec.");
System.out.println( "returnCode= "+returnCode);
myQueryResultSerializer = null;
return returnCode;
}
/*
* Display some instructions on how to run the example
*/
public static void instructions() {
System.out.println("SuperX @version@\n");
System.out.println(
"\nDieses Java-Programm führt einen SQL-Ausdruck aus und gibt das Ergebnis aus.\n");
System.out.println(
"Gebrauch:\n java doquery <Pfad zu logger-properties> <pfad zu db.properties> <sql-Ausdruck> <Ausgabeformat (txt | html | xml)>(optional) <delimiter> <mit Spaltenüberschriften (true | false)>(optional) <Ausgabedatei>(optional)\n");
System.exit(1);
}
public static void main(String args[]) {
int returnCode=0;
try {
returnCode=go(args);
} catch (Exception ex) {
System.err.println("Doquery Aufruf fehlgeschlagen.\n" + ex);
ex.printStackTrace();
System.exit(1);
}
System.out.println("Doquery Aufruf ausgeführt.\n" );
System.exit(returnCode);
}
public static void getConnection(Logger logger,String propFile) throws SQLException {
myConnection = new SxConnection();
myConnection.setPropfile(propFile);
logger.config("Starting Connection...");
try {
myDb = myConnection.getConnection();
st = myDb.createStatement();
//st = myDb.createStatement(java.sql.ResultSet.TYPE_FORWARD_ONLY,
// java.sql.ResultSet.CONCUR_READ_ONLY);
//st.setFetchSize(100);
dbmd = myDb.getMetaData();
//st = myDb.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE,
//ResultSet.CONCUR_UPDATABLE);
} catch (Exception e) {
e.printStackTrace();
logger.severe("Keine DB-Verbindung: " + e.toString());
throw new SQLException("Keine DB-Verbindung: " + e.toString());
}
db_driver = myConnection.m_DriverClass;
}
}

94
src/de/superx/etl/bin/UploadRecords.java

@ -0,0 +1,94 @@
/*
* de.superx.etl - a package for controlling ETL routines
* Copyright (C) 2021 Daniel Quathamer <danielq@memtext.de>
*
* This package is licensed under the CampusSource License;
* http://www.campussource.de/org/license/
*/
package de.superx.etl.bin;
import de.superx.etl.util.GetOpts;
import de.superx.etl.TableUploader;
/*
* @author Daniel Quathamer Projektgruppe SuperX
* upload_records.java
* Dieses Javaprogramm lädt Inhalte einer Datei in eine Tabelle hoch")
* DQ 5.1.2006 Upload vom XML-Dateien möglich
*
*/
public class UploadRecords {
private static String usage =
"-------------------------------------\n"
+ "Gebrauch: java de.superx.bin.UploadRecords \n-dbproperties:<Pfad zu db.properties> \n"
+ "-table:<Tabellenname> \n-unl:<Dateipfad Quelldatei>(optional, default ist Tabellenname.unl) \n-delim:<delimiter>(optional, default ist ^) \n-header:<true|false>(optional, mit Feldüberschriften, default ist false)\n"
+ "-mode:<stop|exclude-row>(optional, default is stop) #Bei Fehlerhaften Daten kann das Hochladen gestoppt werden, oder der Datensatz wird übersprungen"
+ "\n-inserts:<false|simple|batch>(optional, default is false) #Bei -inserts:simple und batch werden Die Rohdaten in Insert-sql-Statements übersetzt (nur für Debugging-Zwecke, sehr langsam. Der Modus exclude-field ist darüberhinaus nicht anwendbar)"
+ "\n-encoding:<utf8,ISO-8859-1, default ist System.file.encoding>"
+ "\n---------------------------------------------------";
public static void main(String args[]) {
try {
GetOpts.setOpts(args);
String isdrin =
GetOpts.isAllRequiredOptionsPresent("-dbproperties,-table,-unl");
if (isdrin != null) {
System.err.println("Folgende Optionen fehlen: " + isdrin);
System.err.println(usage);
System.exit(1);
}
TableUploader myUploader=new TableUploader();
//GetOpts myOpts=new GetOpts();
if (GetOpts.isPresent("-dbproperties"))
myUploader.setDbpropfile(GetOpts.getValue("-dbproperties"));
if (GetOpts.isPresent("-informat"))
myUploader.setInFormat(GetOpts.getValue("-informat"));
if (GetOpts.isPresent("-table"))
myUploader.setTargetTable( GetOpts.getValue("-table"));
if (GetOpts.isPresent("-unl"))
myUploader.setSrcFile(GetOpts.getValue("-unl"));
else
myUploader.setSrcFile(myUploader.getTargetTable() + ".unl");
if (GetOpts.isPresent("-header"))
myUploader.setHeader(GetOpts.getValue("-header").equalsIgnoreCase("true")?true:false);
if (GetOpts.isPresent("-delim"))
myUploader.setDelim(GetOpts.getValue("-delim"));
if (GetOpts.isPresent("-encoding"))
{
String encodingParam=GetOpts.getValue("-encoding");
if(encodingParam != null && !encodingParam.equals("") )
myUploader.setEncoding(encodingParam);
}
else
myUploader.setEncoding(System.getProperty("file.encoding"));
if (GetOpts.isPresent("-mode")) {
myUploader.setMode(GetOpts.getValue("-mode").toLowerCase());
}
if (GetOpts.isPresent("-inserts"))
myUploader.setInserts(GetOpts.getValue("-inserts"));
long jetzt = new java.util.Date().getTime() ;
long numberOfRows=0;
myUploader.setUploadConnection(myUploader.getConnection(null,myUploader.getDbpropfile()));
numberOfRows=myUploader.uploadFile();
long erstrecht = new java.util.Date().getTime() ;
System.out.println(numberOfRows+" lines loaded");
System.out.println("File "+myUploader.getSrcFile() +" uploaded, returnCode="+myUploader.getReturnCode());
myUploader=null;
//if(protokoll.equals(""))
// protokoll= " in "+(erstrecht-jetzt)/1000 +" Sec.";
//System.out.println(protokoll);
} catch (Exception ex) {
System.err.println("Upload fehlgeschlagen: " + ex);
System.exit(1);
}
}
}

155
src/de/superx/etl/util/GetOpts.java

@ -0,0 +1,155 @@
package de.superx.etl.util;
import java.util.StringTokenizer;
/**
* Original von de.memtext.util.GetOpts,
* für Abwärtskompatibilität mit Kern 4.9/BI 2021.06
*
*/
public class GetOpts {
private static String[] arguments = null;
//don't instantiate
private GetOpts() {
}
/**
* If you pass the arguments you want to analyse to this static helper class
* using setOpts, you can later use the short form of the methods e.g.
* isPresent(String option) without having to pass the arguments again.
*
* @param args
*/
public static void setOpts(String args[]) {
arguments = args;
}
/**
* checks if the arguments passed before by setOpts contain the given option
*
* @param String
* option
* @return true if arguments contain the option, i.e. one String which
* starts with the option-String
*/
public static boolean isPresent(String option) {
if (arguments == null)
throw new IllegalStateException(
"must either use setOpts before or call the long version of this method");
return isPresent(arguments, option);
}
/**
* checks if the arguments contain the given option
*
* @param args
* @param String
* option
* @return true if arguments contain the option, i.e. one String which
* starts with the option-String
*/
public static boolean isPresent(String args[], String option) {
boolean result = false;
for (int i = 0; i < args.length; i++)
if (args[i] != null && args[i].startsWith(option))
result = true;
return result;
}
/**
* Gets a named option from the arguments passed before with setOps. E.g.
* getOpt("--log") would return the "--log:true" in the arguments
*
* @param option -
* name/start of the option
* @return String whole option
*/
public static String getOpt(String option) {
if (arguments == null)
throw new IllegalStateException(
"must either use setOpts before or call the long version of this method");
return getOpt(option, arguments);
}
/**
* Gets a named option from the arguments. E.g. getOpt("--log") would return
* the "--log:true" in the arguments
*
* @param option -
* name/start of the option
* @return String whole option
*/
public static String getOpt(String option, String args[]) {
if (args == null)
throw new IllegalStateException("args must not be null");
String result = null;
for (int i = 0; i < args.length; i++)
if (args[i].startsWith(option))
result = args[i];
if (result == null)
throw new RuntimeException("Option " + option + " not found!");
return result;
}
/**
* Gets the value of an option from the arguments passed before with setOpts
* if for example, you ask getValue("--log:") and the arguments passed
* before with setOpts contain a String "--log:true") "true" is returned
*
* @param String
* option
* @return String value of the option
*/
public static String getValue(String option) {
if (arguments == null)
throw new IllegalStateException(
"must either use setOpts before or call the long version of this method");
return getValue(option, arguments);
}
/**
* Gets the value of an option, if for example, you ask getValue("--log:")
* and the arguments passed before with setOpts contain a String
* "--log:true") "true" is returned
*
* @param String
* option
* @return String value of the option
*/
public static String getValue(String option, String args[]) {
String result = "";
String raw = getOpt(option, args);
int pos = option.length();
if (raw.charAt(pos) == ':')
pos++;
result = raw.substring(pos, raw.length());
return result;
}
/**
* Prüft, ob alle notwendigen Optionen angegeben sind, z.B. String
* "-loggingProperties:,-dbProperties" übergeben, die beiden müssen dabei
* sein
*
* @param options
* z.B. "-loggingProperties:,-dbProperties,..."
* @return null - alles OK, ansonsten String mit den fehlenden Optionen
*/
public static String isAllRequiredOptionsPresent(String options) {
String result = null;
StringTokenizer st = new StringTokenizer(options, ",");
for (; st.hasMoreTokens();) {
String optionName = st.nextToken();
if (!isPresent(optionName)) {
if (result == null)
result = optionName;
else
result += "," + optionName;
}
}
return result;
}
}

47
src/de/superx/sxrest/JobExecutor.java

@ -0,0 +1,47 @@
package de.superx.sxrest;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Request;
import java.sql.SQLException;
import javax.servlet.http.HttpServletRequest;
import de.memtext.util.DateUtils;
import de.superx.common.SxUser;
import de.superx.etl.EtlActionJob;
import de.superx.servlet.SuperXManager;
@Path("/execute")
public class JobExecutor {
@Context
Request request;
@GET
@Path("/{param}")
@Produces(MediaType.TEXT_PLAIN)
public String printLogs(@PathParam("param") String job, @Context HttpServletRequest request) {
String returntext = "";
SxUser user = (SxUser) request.getSession().getAttribute("user");
if (user == null || !user.isAdmin()) {
returntext = "Fehlende Rechte";
} else {
EtlActionJob myJob = new EtlActionJob(job, "", "");
try {
myJob.execute();
} catch (Exception e) {
e.printStackTrace();
returntext = "Fehler " + e;
}
}
return returntext;
}
}

81
src/de/superx/sxrest/LaunchUpgrade.java

@ -0,0 +1,81 @@
/*
* de.superx.etl - a package for controlling ETL routines
* Copyright (C) 2021 Daniel Quathamer <danielq@memtext.de>
*
* This package is licensed under the CampusSource License;
* http://www.campussource.de/org/license/
*/
package de.superx.sxrest;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Request;
import de.memtext.util.DateUtils;
import de.superx.common.SxUser;
import de.superx.etl.EtlActionJob;
import de.superx.etl.EtlUtils;
import de.superx.servlet.SuperXManager;
import javax.servlet.http.HttpServletRequest;
@Path("/launchupgrade")
public class LaunchUpgrade {
@Context
Request request;
@GET
@Path("/{param}")
@Produces(MediaType.TEXT_PLAIN)
public String printXml(@PathParam("param") String componentName,@Context HttpServletRequest request) {
String jobOutput="";
String jobLog="";
int returnCode=0;
SxUser user = (SxUser) request.getSession().getAttribute("user");
String mandantenid= (String) request.getSession().getAttribute("MandantenID");
if(mandantenid==null || mandantenid.equals(""))
mandantenid="default";
if (user == null || !user.isAdmin()) {
jobLog+="Fehlende Rechte";
returnCode=1;
}
else
{
String job=componentName+"_upgrade";
String params=componentName.toUpperCase()+"_PFAD="+SuperXManager.getWEB_INFPfad()+EtlUtils.PATHSEP+"conf"+EtlUtils.PATHSEP+"edustore"+EtlUtils.PATHSEP+"db"+EtlUtils.PATHSEP+"module"+EtlUtils.PATHSEP+componentName;
EtlActionJob myJob=new EtlActionJob(job); //TODO:Mandantid
try {
myJob.initJob(job,params);
} catch (Exception e) {
// TODO Auto-generated catch block
returnCode=1;
jobLog+=myJob.getActionLog().toString();
jobLog+=e.toString();
jobLog+=myJob.getLogoutput();
}
try {
returnCode= myJob.execute("");
jobOutput+=myJob.getSw().toString();
jobLog+=myJob.getActionLog().toString();
} catch (Exception e) {
returnCode=1;
jobLog+=myJob.getActionLog().toString();
jobLog+=e.toString();
jobLog+=myJob.getLogoutput();
}
}
if(returnCode==0)
return "Upgrade erfolgreich: "+EtlUtils.NEWLINE+jobLog;
else
return "Upgrade mit Fehler beendet: "+EtlUtils.NEWLINE+jobLog;
}
}

183
src/de/superx/sxrest/MaskXml.java

@ -0,0 +1,183 @@
/*
* de.superx.etl - a package for controlling ETL routines
* Copyright (C) 2021 Daniel Quathamer <danielq@memtext.de>
*
* This package is licensed under the CampusSource License;
* http://www.campussource.de/org/license/
*/
package de.superx.sxrest;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.FormParam;
import javax.ws.rs.GET;
import javax.ws.rs.HEAD;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.core.Response;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Request;
import de.memtext.util.DateUtils;
import de.superx.common.SxUser;
import de.superx.etl.EtlActionJob;
import de.superx.etl.EtlUtils;
import de.superx.servlet.SuperXManager;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import javax.servlet.http.HttpServletRequest;
@Path("/maskxml")
public class MaskXml {
@Context
Request request;
@GET
@Path("/get/{param}")
@Produces(MediaType.TEXT_PLAIN)
public String printXml(@PathParam("param") String tid,@Context HttpServletRequest request) {
String returntext="";
SxUser user = (SxUser) request.getSession().getAttribute("user");
String mandantenid= (String) request.getSession().getAttribute("MandantenID");
if(mandantenid==null || mandantenid.equals(""))
mandantenid="default";
if (user == null || !user.isAdmin()) {
returntext="Fehlende Rechte";
}
else
{
String job="sx_select_mask";
String params="TID="+tid;
int returnCode=0;
EtlActionJob myJob=new EtlActionJob(job); //TODO:Mandantid
try {
myJob.initJob(job,params);
} catch (Exception e) {
// TODO Auto-generated catch block
returntext=e.toString();
returntext+=myJob.getLogoutput();
}
System.out.println("Job "+ job+" initialized");
try {
returnCode= myJob.execute("");
returntext=myJob.getSw().toString();
} catch (Exception e) {
// TODO Auto-generated catch block
returntext=e.toString();
returntext+=myJob.getLogoutput();
}
}
return returntext;
}
@GET
@Path("/saveget/{params}")
@Produces(MediaType.TEXT_PLAIN)
public String saveGetXml(@QueryParam("param") String tid,
@QueryParam("Inhalt") String src,
@Context HttpServletRequest request) {
String returntext="";
SxUser user = (SxUser) request.getSession().getAttribute("user");
String mandantenid= (String) request.getSession().getAttribute("MandantenID");
if(mandantenid==null || mandantenid.equals(""))
mandantenid="default";
if (user == null || !user.isAdmin()) {
returntext="Fehlende Rechte";
}
else
{
returntext="tid="+tid+"-src="+src;
}
return returntext;
}
@POST
@Path("/save")
@Consumes("application/x-www-form-urlencoded")
//@Consumes(MediaType.APPLICATION_FORM_URLENCODED)
@Produces(MediaType.TEXT_PLAIN)
public String saveXml(@FormParam("tid") String tid,
@FormParam("Inhalt") String src,
@Context HttpServletRequest request) {
String returntext="";
int returnCode=0;
tid="16000";
SxUser user = (SxUser) request.getSession().getAttribute("user");
String mandantenid= (String) request.getSession().getAttribute("MandantenID");
String searchString="/etlAction[@name=\"sx_select_mask\"]/unload[@name=\"unload_maskeninfo\"]/rs/row/fld[@name=\"tid\"]";
if(mandantenid==null || mandantenid.equals(""))
mandantenid="default";
if (user == null || !user.isAdmin()) {
returntext="Fehlende Rechte";
}
else
{
try {
if(de.superx.etl.EtlUtils.isNodeValueInXml(src,searchString,tid))
{
String job="sx_insert_mask";
returntext="TID="+tid;
String params="TID="+tid;
File temp;
temp = File.createTempFile("myTempFile", ".xml");
BufferedWriter bw = new BufferedWriter(new FileWriter(temp));
bw.write(src);
bw.close();
params+=EtlUtils.NEWLINE+"PATH_TO_INPUTFILE="+temp.getAbsolutePath();
params+=EtlUtils.NEWLINE+"FORMAT=XML";
EtlActionJob myJob=new EtlActionJob(job); //TODO:Mandantid
myJob.initJob(job,params);
returnCode= myJob.execute("");
returntext=myJob.getSw().toString();
}
else
returntext="Übergebene TID "+tid+" entspricht nicht der XML-Datei ";
} catch (Exception e) {
returntext=e.toString();
e.printStackTrace();
}
}
return returntext;
}
}

319
superx/edit/etl/etl_manager.jsp

@ -0,0 +1,319 @@
<%@ taglib uri="/WEB-INF/dbforms.tld" prefix="db" %>
<%@page pageEncoding="utf-8" contentType="text/html; charset=UTF-8" %>
<%@ page import ="de.superx.servlet.ServletUtils" %>
<%@ page import ="de.superx.servlet.SxPools" %>
<html>
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>ETL Manager</title>
<link rel="stylesheet" href="../../style/bulma.css" />
<script type="text/javascript" src="/superx/xml/js/memtext/sx_functions.js"></script>
</head>
<script language="Javascript">
function getVersion()
{
//alert("klappt");
var xhttp = new XMLHttpRequest();
xhttp.onreadystatechange = function() {
if (this.readyState == 4 && this.status == 200) {
document.getElementById("version").innerHTML = this.responseText;
//alert(this.responseText);
}
};
xhttp.open("GET", "../../sxrest/version", true);
xhttp.send();
}
function getMaskMenu(srctype)
{
if(srctype=="load")
{
document.getElementById("MaskHeader").innerHTML="Masken-Quellcode exportieren";
document.getElementById("MaskSubHeader").innerHTML="TID = <input type='text' size='10' id='Maskentid' />"+" <button class=\"button has-tooltip-multiline has-tooltip-right\" data-tooltip=\"Quellcode der Maske mit der Nummer im Feld TID exportieren\" onclick=\"getMaskSrc(document.getElementById('Maskentid').value)\">exportieren...</button>";
}
if(srctype=="save")
{
document.getElementById("MaskHeader").innerHTML="Masken-Quellcode importieren";
document.getElementById("MaskSubHeader").innerHTML="<form method=\"post\" name=\"saveMask\" action=\"../../edit/etl/saveMask.jsp\" target=\"_blank\"> TID = <input type='text' size='10' id='Maskentid' name='maskeninfo_id' /><input type='hidden' name='src' /><button class=\"button has-tooltip-multiline has-tooltip-right\" data-tooltip=\"Quellcode der Maske mit der Nummer im Feld TID importieren\" onclick=\"saveMaskSrc(document.getElementById('Maskentid').value)\">importieren...</button></form>";
//document.getElementById("maskentid_for_upload").style.visibility="visible";
//document.getElementById("button_for_upload").style.visibility="visible";
//document.getElementById("MaskSubHeader").innerHTML="Maske TID="+tid;
//document.getElementById("MaskSubHeader").innerHTML="TID=<input type='text' size='10' id='Maskentid' />"+" <button class=\"button has-tooltip-multiline has-tooltip-right\" data-tooltip=\"Quellcode der Maske mit der Nummer im Feld TID exportieren\" onclick=\"saveMaskSrc(document.getElementById('Maskentid').value)\">importieren...</button>";
}
}
function getMaskSrc(tid)
{
//document.getElementById("MaskHeader").innerHTML="Masken-Quellcode laden";
//document.getElementById("MaskSubHeader").innerHTML="Maske TID="+tid;
var xhttp = new XMLHttpRequest();
xhttp.onreadystatechange = function() {
if (this.readyState == 4 && this.status == 200) {
document.getElementById("Inhalt").innerHTML = this.responseText;
//alert(this.responseText);
}
};
xhttp.open("GET", "../../sxrest/maskxml/get/"+tid, true);
xhttp.send();
}
function saveMaskSrc(tid)
{
document.forms["saveMask"].elements["src"].value=document.getElementById("Inhalt").value;
if(document.forms["saveMask"].elements["src"].value=="" || document.forms["saveMask"].elements["maskeninfo_id"].value=="")
{
alert("Bitte setzen Sie eine Nummer in das Feld TID und einen passenden XML ins Feld Inhalt");
return false;
}
else
{
document.forms["saveMask"].submit();
}
}
function getComponentMenu(systeminfo_id, componentName,componentUniquename,componentVersion)
{
document.getElementById("MaskHeader").innerHTML="Komponente "+ componentName+"<br />Version: "+componentVersion;
var menuLines="<form method=\"post\" name=\"execUpgrade\" action=\"../../edit/etl/jobexecutor.jsp\" target=\"_blank\"><input type='hidden' size='10' id='tid' name='systeminfo_id' value='"+systeminfo_id+"'/><input type='hidden' size='10' id='tid' name='componentUniquename' value='"+componentUniquename+"'/><input type='hidden' size='10' id='tid' name='componentAction' value='upgrade'/><button class=\"button has-tooltip-multiline has-tooltip-right\" data-tooltip=\"Upgrade starten\" >Upgrade...</button></form>";
//"<p><button class=\"button has-tooltip-multiline has-tooltip-right\" data-tooltip=\"Upgrade starten\" onclick=\"launchUpgrade('"+componentName+"')\">Upgrade starten</button></p>";
menuLines+="<form method=\"post\" name=\"execUpdate\" action=\"../../edit/etl/jobexecutor.jsp\" target=\"_blank\"><input type='hidden' size='10' id='tid' name='systeminfo_id' value='"+systeminfo_id+"'/><input type='hidden' size='10' id='tid' name='componentUniquename' value='"+componentUniquename+"'/><input type='hidden' size='10' id='tid' name='componentAction' value='update'/><button class=\"button has-tooltip-multiline has-tooltip-right\" data-tooltip=\"Update starten\" >HLR starten</button></form>";
//<p><button class=\"button has-tooltip-multiline has-tooltip-right\" data-tooltip=\"Update starten\" onclick=\"launchUpdate('"+componentName+"')\">HLR starten</button></p>";
document.getElementById("MaskSubHeader").innerHTML=menuLines;
//document.getElementById("MaskSubHeader").innerHTML="<form method=\"post\" name=\"saveMask\" action=\"../../edit/etl/jobexecutor.jsp\" target=\"_blank\"> TID = <input type='text' size='10' id='Maskentid' name='maskeninfo_id' /><input type='hidden' name='src' /><button class=\"button has-tooltip-multiline has-tooltip-right\" data-tooltip=\"Quellcode der Maske mit der Nummer im Feld TID importieren\" onclick=\"saveMaskSrc(document.getElementById('Maskentid').value)\">importieren...</button></form>";
}
</script>
<body>
<%
//init Variables:
String EntwicklungsmodusAn="";
String EntwicklungsmodusAus="checked";
Object mandantobject=request.getSession().getAttribute("MandantenID");
String mandantenid="";
if(mandantobject != null)
mandantenid=mandantobject.toString();
//der superx-Default-Mandant ist bei dbforms ein Leerstring
if(mandantenid.equals("default"))
mandantenid="";
String tab = request.getParameter("tab");
if(tab == null)
tab="";
%>
<!-- START NAV -->
<nav class="navbar is-white">
<div class="container">
<div class="navbar-brand">
<a class="navbar-item" href="etl_manager.jsp">ETL Manager</a>
<div class="navbar-burger burger" data-target="navMenu">
<span>dd</span>
<span>dd</span>
<span>dd</span>
</div>
</div>
<a class="navbar-item" href="etl_manager.jsp?tab=mask">
Masken
</a>
<!--
<a class="navbar-item" href="etl_manager.jsp?tab=action">
Komponenten
</a>
-->
</div>
</div>
</div>
</nav>
<!-- END NAV -->
<div class="container">
<div class="columns">
<div class="column is-3 ">
<aside class="menu is-hidden-mobile">
<p class="menu-label">
Untermenü
</p>
<ul class="menu-list">
<li><a class="is-active">Startseite</a></li>
<!--<li><a>Customers</a></li>
<li><a>Other</a></li>-->
</ul>
<%
if(tab.equals("mask"))
{
%>
<p class="menu-label">
Masken-Sicherung
</p>
<ul class="menu-list">
<li><a onClick="getMaskMenu('load');">Masken-Quellcode exportieren</a></li>
<li><a onClick="getMaskMenu('save');">Masken-Quellcode importieren</a></li>
</ul>
<%
} //Ende Masken Menü
%>
<%
if(tab.equals("action"))
{
%>
<p class="menu-label">
Komponentenverwaltung
</p>
<%@ include file="/edit/etl/systeminfo_list.inc" %>
<!--<ul class="menu-list">
<li><a onClick="getComponentMenu('kern');">Kernmodul</a></li>
<li><a onClick="getComponentMenu('etl');">ETL-Komponente</a></li>
</ul>-->
<%
} //Ende Action Menü
%>
</aside>
</div>
<div class="column is-9">
<%
if(tab.equals(""))
{
%>
<section class="hero is-info welcome is-small">
<div class="hero-body">
<div class="container">
<h1 class="title">
ETL Manager
</h1>
<h2 class="subtitle">
Diese Komponente dient der Masken-Verwaltung.
</h2>
<p class="title">Klicken Sie oben links auf "Masken", um zum Menü zu gelangen.</p>
</div>
</div>
</section>
<!--<section class="info-tiles">
<div class="tile is-ancestor has-text-centered">
<div class="tile is-parent">
<article class="tile is-child box">
<p class="title">Nützliche Funktion</p>
<p class="subtitle">welche?</p>
</div>
</article>
</div>
<div class="tile is-parent">
<article class="tile is-child box">
<p class="title">Weitere nützliche Funktion</p>
<p class="subtitle">welche</p>
</article>
</div>
<div class="tile is-parent">
<article class="tile is-child box">
<p class="title">Noch eine nützliche Funktion</p>
<p class="subtitle">der Webanwendung</p>
</article>
</div>
</div>
</section>-->
<div class="columns">
<div class="column is-6">
</div>
<div class="column is-6">
</div>
</div>
<%
} //Ende Tiles Startseite
%>
<%
if(tab.equals("mask"))
{
%>
<section class="section">
<div class="container" id="loadMask" >
<h1 class="title" id="MaskHeader">Masken-Verwaltung</h1>
<h2 class="subtitle">
<div id="MaskSubHeader">Wählen Sie links das Masken-Menü</div>
</h2>
<div class="bd-snippet-code highlight-full ">
<figure class="highlight"><div class="buttons has-addons is-right">
<button class="button is-small is-info" onClick="copyText('Inhalt');">Kopieren</button>
</div>
<textarea id="Inhalt" name="Inhalt" class="textarea" placeholder="Inhalt..." rows="20"></textarea>
</figure>
</div>
</div>
</section>
<%
} //Ende Mask Content
%>
<%
if(tab.equals("action"))
{
%>
<section class="section">
<div class="container">
<h1 class="title" id="MaskHeader">Komponenten</h1>
<h2 class="subtitle">
<div id="MaskSubHeader">Install / Upgrade / Hauptladeroutinen / Unterladeroutinen </div>
</h2>
<div class="bd-snippet-code highlight-full ">
</div>
</div>
</section>
<%
} //Ende Action Content
%>
</div>
</div>
</div>
<!--<script async type="text/javascript" src="../js/bulma.js"></script>-->
</body>
</html>

163
superx/edit/etl/jobexecutor.jsp

@ -0,0 +1,163 @@
<%@ taglib uri="/WEB-INF/dbforms.tld" prefix="db" %>
<%@page pageEncoding="utf-8" contentType="text/html; charset=UTF-8" %>
<%@ page import ="de.superx.servlet.ServletUtils" %>
<%@ page import ="de.superx.servlet.SxPools" %>
<%@ page import ="java.io.BufferedWriter" %>
<%@ page import ="java.io.File" %>
<%@ page import ="java.io.FileWriter" %>
<%@ page import ="java.io.IOException" %>
<%@ page import ="de.superx.etl.EtlActionJob" %>
<%@ page import ="de.superx.etl.EtlUtils" %>
<%@ page import ="de.superx.common.SxUser" %>
<%@ page import ="de.superx.servlet.SuperXManager" %>
<html>
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>Ausführung</title>
<link rel="stylesheet" href="../../style/bulma.css" />
<script type="text/javascript" src="/superx/xml/js/memtext/sx_functions.js"></script>
</head>
<body>
<%
if (request.getCharacterEncoding() == null)
request.setCharacterEncoding("UTF-8");
//Object userobject=request.getSession().getAttribute("UserID");
String userid;
String filter="";
String sql="";
String returntext="";
int returnCode=0;
if(userobject == null)
{
%>
<jsp:forward page="/edit/not_authenticated.htm"/>
<%
}
else
userid=userobject.toString();
Object mandantobject=request.getSession().getAttribute("MandantenID");
String mandantenid="";
if(mandantobject != null)
mandantenid=mandantobject.toString();
//der superx-Default-Mandant ist bei dbforms ein Leerstring
if(mandantenid.equals("default"))
mandantenid="";
String db_form_name="etl_job";
String erlaubt="0";
%>
<%@ include file="/edit/check_authentication.inc" %>
<%
if(erlaubt.equals("0"))
{
%>
<jsp:forward page="/edit/not_authorized.htm"/>
<%
}
//init Variables:
String tid = request.getParameter("systeminfo_id");
String componentUniquename = request.getParameter("componentUniquename");
String componentAction = request.getParameter("componentAction");
SxUser user = (SxUser) request.getSession().getAttribute("user");
if (user == null || !user.isAdmin()) {
returnCode=1;
returntext="Fehlende Rechte";
}
else
{
try {
String job=componentUniquename+"_"+componentAction;
String params=componentUniquename.toUpperCase()+"_PFAD="+SuperXManager.getWEB_INFPfad()+EtlUtils.PATHSEP+"conf"+EtlUtils.PATHSEP+"edustore"+EtlUtils.PATHSEP+"db"+EtlUtils.PATHSEP+"module"+EtlUtils.PATHSEP+componentUniquename;
EtlActionJob myJob=new EtlActionJob(job); //TODO:Mandantid
myJob.initJob(job,params);
returnCode= myJob.execute("");
returntext+=myJob.getActionLog().toString();
} catch (Exception e) {
returnCode=1;
returntext=e.toString();
e.printStackTrace();
}
}
%>
<!-- START NAV -->
<nav class="navbar is-white">
<div class="container">
<div class="navbar-brand">
<a class="navbar-item" href="webapp_manager.jsp">Masken laden</a>
<div class="navbar-burger burger" data-target="navMenu">
<span>dd</span>
<span>dd</span>
<span>dd</span>
</div>
</div>
<!-- <div id="navMenu" class="navbar-menu">
<div class="navbar-start">
<a class="navbar-item" href="webapp_manager.jsp?tab=inst">
Installation /Patches
</a>
<a class="navbar-item" href="webapp_manager.jsp?tab=load">
Konnektoren
</a>-->
<a class="navbar-item" href="javascript:window.close()">
Schließen
</a>
</div>
</div>
</div>
</nav>
<!-- END NAV -->
<section class="section">
<div class="container">
<h1 class="title" id="MaskHeader">Protokoll</h1>
<h2 class="subtitle">
<div id="MaskSubHeader">Ergebnis der Ausführung</div>
</h2>
<p>Status: <%= "Code "+ returnCode+ "("+de.superx.etl.EtlUtils.translateReturnCode(returnCode)+")" %>, Logausgabe:
</p>
<div class="bd-snippet-code highlight-full ">
<figure class="highlight"><div class="buttons has-addons is-right">
<button class="button is-small is-info" onClick="copyText('Inhalt');">Kopieren</button>
</div>
<textarea id="Inhalt" name="Inhalt" class="textarea" placeholder="Inhalt..." rows="20"><%= returntext %>
</textarea>
</figure>
</div>
</div>
</section>
</body>
</html>

143
superx/edit/etl/saveMask.jsp

@ -0,0 +1,143 @@
<%@ taglib uri="/WEB-INF/dbforms.tld" prefix="db" %>
<%@page pageEncoding="utf-8" contentType="text/html; charset=UTF-8" %>
<%@ page import ="de.superx.servlet.ServletUtils" %>
<%@ page import ="de.superx.servlet.SxPools" %>
<%@ page import ="java.io.BufferedWriter" %>
<%@ page import ="java.io.File" %>
<%@ page import ="java.io.FileWriter" %>
<%@ page import ="java.io.IOException" %>
<%@ page import ="de.superx.etl.EtlActionJob" %>
<%@ page import ="de.superx.etl.EtlUtils" %>
<%@ page import ="de.superx.common.SxUser" %>
<html>
<head>
<meta charset="utf-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1">
<title>Ausführung</title>
<link rel="stylesheet" href="../../style/bulma.css" />
<script type="text/javascript" src="/superx/xml/js/memtext/sx_functions.js"></script>
</head>
<body>
<%
//init Variables:
if (request.getCharacterEncoding() == null)
request.setCharacterEncoding("UTF-8");
String tid = request.getParameter("maskeninfo_id");
String src = request.getParameter("src");
String returntext="";
int returnCode=0;
returntext=src;
SxUser user = (SxUser) request.getSession().getAttribute("user");
String mandantenid= (String) request.getSession().getAttribute("MandantenID");
String searchString="/etlAction[@name=\"sx_select_mask\"]/unload[@name=\"unload_maskeninfo\"]/rs/row/fld[@name=\"tid\"]";
if(mandantenid==null || mandantenid.equals(""))
mandantenid="default";
if (user == null || !user.isAdmin()) {
returnCode=1;
returntext="Fehlende Rechte";
}
else
{
try {
if(de.superx.etl.EtlUtils.isNodeValueInXml(src,searchString,tid))
{
String job="sx_insert_mask";
returntext="TID="+tid;
String params="TID="+tid;
File temp;
temp = File.createTempFile("myTempFile", ".xml");
BufferedWriter bw = new BufferedWriter(new FileWriter(temp));
bw.write(src);
bw.close();
params+=EtlUtils.NEWLINE+"PATH_TO_INPUTFILE="+temp.getAbsolutePath();
params+=EtlUtils.NEWLINE+"FORMAT=XML";
EtlActionJob myJob=new EtlActionJob(job); //TODO:Mandantid
myJob.initJob(job,params);
returnCode= myJob.execute("");
returntext+=myJob.getActionLog().toString();
}
else
{
returntext="Übergebene TID "+tid+" entspricht nicht der XML-Datei ";
returnCode=1;
}
} catch (Exception e) {
returnCode=1;
returntext=e.toString();
e.printStackTrace();
}
}
%>
<!-- START NAV -->
<nav class="navbar is-white">
<div class="container">
<div class="navbar-brand">
Masken laden
<div class="navbar-burger burger" data-target="navMenu">
<span>dd</span>
<span>dd</span>
<span>dd</span>
</div>
</div>
<!-- <div id="navMenu" class="navbar-menu">
<div class="navbar-start">
<a class="navbar-item" href="webapp_manager.jsp?tab=inst">
Installation /Patches
</a>
<a class="navbar-item" href="webapp_manager.jsp?tab=load">
Konnektoren
</a>-->
<a class="navbar-item" href="javascript:window.close()">
Schließen
</a>
</div>
</div>
</div>
</nav>
<!-- END NAV -->
<section class="section">
<div class="container">
<h1 class="title" id="MaskHeader">Protokoll</h1>
<h2 class="subtitle">
<div id="MaskSubHeader">Ergebnis der Ausführung</div>
</h2>
<p>Status: <%= "Code "+ returnCode+ "("+de.superx.etl.EtlUtils.translateReturnCode(returnCode)+")" %>, Logausgabe:
</p>
<div class="bd-snippet-code highlight-full ">
<figure class="highlight"><div class="buttons has-addons is-right">
<button class="button is-small is-info" onClick="copyText('Inhalt');">Kopieren</button>
</div>
<textarea id="Inhalt" name="Inhalt" class="textarea" placeholder="Inhalt..." rows="20"><%= returntext %>
</textarea>
</figure>
</div>
</div>
</section>
</body>
</html>

45
superx/edit/etl/systeminfo_list.inc

@ -0,0 +1,45 @@
<%
String systeminfo_id="";
String componentName="";
String componentUniquename="";
String componentVersion="";
%>
<db:dbform tableName="systeminfo" maxRows="*" autoUpdate="false" captionResource="true" multipart="false" followUp="/edit/etl/etl_manager.jsp" filter="" orderBy="name"
dbConnectionName="<%= mandantenid %>" ><db:header>
<input type="hidden" name="${_csrf.parameterName}" value="${_csrf.token}"/>
<ul>
</db:header>
<db:errors/>
<db:body allowNew="false">
<%
systeminfo_id=currentRow_systeminfo.get("tid").toString();
componentName=currentRow_systeminfo.get("name").toString().trim();
%>
<db:dbform orderBy="" childField="systeminfo_id" parentField="tid" tableName="db_version" maxRows="1" captionResource="true" followUp="" autoUpdate="false" multipart="false" dbConnectionName="<%= mandantenid %>">
<db:header/>
<db:errors/>
<db:body allowNew="false">
<%
if(currentRow_db_version!=null)
{
componentUniquename=currentRow_db_version.get("his_system").toString().trim();
componentVersion=currentRow_db_version.get("version").toString().trim();
}
else
{
componentUniquename="Unbekannt";
componentVersion="Unbekannt";
}
%>
</db:body>
<db:footer/>
</db:dbform>
<li>
<a onClick="<%= "getComponentMenu("+systeminfo_id+",'"+componentName+"','"+componentUniquename+"','"+componentVersion+"');"%>" style="color:blue;cursor: pointer;"><db:label nullFieldValue="" styleClass="clsInputStyle" fieldName="name"/></a>
</li>
</db:body>
<db:footer>
</ul>
</db:footer></db:dbform>
Loading…
Cancel
Save