From fdff2097f5b52dbca008bee0c3b234f49c604ee7 Mon Sep 17 00:00:00 2001 From: Daniel Quathamer Date: Mon, 3 Oct 2022 10:53:04 +0200 Subject: [PATCH] Build package from repo (first steps) #1 --- .gitignore | 1 + build.xml | 70 + src-modules/module/etl/bin/SQL_ENV_etl.sam | 16 + src-modules/module/etl/bin/etl_dosql.x | 115 ++ .../module/etl/bin/etl_sx_execute_actions.x | 11 + .../module/etl/bin/etl_sx_insert_mask.x | 69 + .../module/etl/bin/etl_sx_select_mask.x | 46 + .../module/etl/bin/etl_sx_unload_records.x | 46 + .../module/etl/bin/etl_sx_upload_records.x | 46 + src-modules/module/etl/conf/build.xml | 40 + src-modules/module/etl/conf/etl.xml | 433 ++++++ .../module/etl/conf/etl_step_fuellen.xsl | 1327 +++++++++++++++++ src-modules/module/etl/conf/excludes.txt | 10 + src-modules/module/etl/conf/includes.txt | 4 + .../module/etl/masken/42000_felderinfo.unl | 6 + .../etl/masken/42000_maske_system_bez.unl | 1 + .../etl/masken/42000_masken_felder_bez.unl | 6 + .../module/etl/masken/42000_maskeninfo.unl | 92 ++ .../etl/masken/42000_sachgeb_maske_bez.unl | 1 + .../module/etl/rohdaten/etl_unload.xml | 12 + .../schluesseltabellen/etl_step_fuellen.sql | 592 ++++++++ .../etl/schluesseltabellen/etl_step_type.unl | 5 + .../etl/schluesseltabellen/fm_templates.unl | 440 ++++++ .../fm_templates_fuellen.sql | 25 + .../schluesseltabellen/fm_templates_unload.x | 25 + .../schluesseltabellen/fm_templates_update.x | 16 + .../sachgebiete_fuellen.sql | 14 + .../sachgebiete_loeschen.sql | 3 + .../sx_stylesheets_fuellen.sql | 49 + .../schluesseltabellen/themenbaum_fuellen.sql | 22 + .../etl/ActionHandler/EtlActionHandler.java | 131 ++ .../EtlActionHandlerDoquery.java | 48 + .../ActionHandler/EtlActionHandlerDosql.java | 68 + .../EtlActionHandlerExecuteMask.java | 51 + .../etl/ActionHandler/EtlActionHandlerI.java | 34 + .../ActionHandler/EtlActionHandlerMsg.java | 30 + .../EtlActionHandlerUnloadRecords.java | 43 + .../EtlActionHandlerUploadRecords.java | 58 + src/de/superx/etl/EtlAction.java | 200 +++ src/de/superx/etl/EtlActionJob.java | 235 +++ src/de/superx/etl/EtlStep.java | 239 +++ src/de/superx/etl/EtlUtils.java | 336 +++++ src/de/superx/etl/QueryResultSerializer.java | 437 ++++++ src/de/superx/etl/SqlExecutor.java | 340 +++++ src/de/superx/etl/TableUploader.java | 779 ++++++++++ src/de/superx/etl/bin/Doquery.java | 212 +++ src/de/superx/etl/bin/Dosql.java | 192 +++ src/de/superx/etl/bin/EtlJobExecutor.java | 70 + src/de/superx/etl/bin/SxTransformer.java | 908 +++++++++++ src/de/superx/etl/bin/UnloadRecords.java | 214 +++ src/de/superx/etl/bin/UploadRecords.java | 94 ++ src/de/superx/etl/util/GetOpts.java | 155 ++ src/de/superx/sxrest/JobExecutor.java | 47 + src/de/superx/sxrest/LaunchUpgrade.java | 81 + src/de/superx/sxrest/MaskXml.java | 183 +++ superx/edit/etl/etl_manager.jsp | 319 ++++ superx/edit/etl/jobexecutor.jsp | 163 ++ superx/edit/etl/saveMask.jsp | 143 ++ superx/edit/etl/systeminfo_list.inc | 45 + 59 files changed, 9398 insertions(+) create mode 100644 .gitignore create mode 100644 build.xml create mode 100644 src-modules/module/etl/bin/SQL_ENV_etl.sam create mode 100755 src-modules/module/etl/bin/etl_dosql.x create mode 100755 src-modules/module/etl/bin/etl_sx_execute_actions.x create mode 100755 src-modules/module/etl/bin/etl_sx_insert_mask.x create mode 100755 src-modules/module/etl/bin/etl_sx_select_mask.x create mode 100755 src-modules/module/etl/bin/etl_sx_unload_records.x create mode 100755 src-modules/module/etl/bin/etl_sx_upload_records.x create mode 100644 src-modules/module/etl/conf/build.xml create mode 100644 src-modules/module/etl/conf/etl.xml create mode 100644 src-modules/module/etl/conf/etl_step_fuellen.xsl create mode 100644 src-modules/module/etl/conf/excludes.txt create mode 100644 src-modules/module/etl/conf/includes.txt create mode 100644 src-modules/module/etl/masken/42000_felderinfo.unl create mode 100644 src-modules/module/etl/masken/42000_maske_system_bez.unl create mode 100644 src-modules/module/etl/masken/42000_masken_felder_bez.unl create mode 100644 src-modules/module/etl/masken/42000_maskeninfo.unl create mode 100644 src-modules/module/etl/masken/42000_sachgeb_maske_bez.unl create mode 100644 src-modules/module/etl/rohdaten/etl_unload.xml create mode 100644 src-modules/module/etl/schluesseltabellen/etl_step_fuellen.sql create mode 100644 src-modules/module/etl/schluesseltabellen/etl_step_type.unl create mode 100644 src-modules/module/etl/schluesseltabellen/fm_templates.unl create mode 100644 src-modules/module/etl/schluesseltabellen/fm_templates_fuellen.sql create mode 100755 src-modules/module/etl/schluesseltabellen/fm_templates_unload.x create mode 100755 src-modules/module/etl/schluesseltabellen/fm_templates_update.x create mode 100644 src-modules/module/etl/schluesseltabellen/sachgebiete_fuellen.sql create mode 100644 src-modules/module/etl/schluesseltabellen/sachgebiete_loeschen.sql create mode 100644 src-modules/module/etl/schluesseltabellen/sx_stylesheets_fuellen.sql create mode 100644 src-modules/module/etl/schluesseltabellen/themenbaum_fuellen.sql create mode 100644 src/de/superx/etl/ActionHandler/EtlActionHandler.java create mode 100644 src/de/superx/etl/ActionHandler/EtlActionHandlerDoquery.java create mode 100644 src/de/superx/etl/ActionHandler/EtlActionHandlerDosql.java create mode 100644 src/de/superx/etl/ActionHandler/EtlActionHandlerExecuteMask.java create mode 100644 src/de/superx/etl/ActionHandler/EtlActionHandlerI.java create mode 100644 src/de/superx/etl/ActionHandler/EtlActionHandlerMsg.java create mode 100644 src/de/superx/etl/ActionHandler/EtlActionHandlerUnloadRecords.java create mode 100644 src/de/superx/etl/ActionHandler/EtlActionHandlerUploadRecords.java create mode 100644 src/de/superx/etl/EtlAction.java create mode 100644 src/de/superx/etl/EtlActionJob.java create mode 100644 src/de/superx/etl/EtlStep.java create mode 100644 src/de/superx/etl/EtlUtils.java create mode 100644 src/de/superx/etl/QueryResultSerializer.java create mode 100644 src/de/superx/etl/SqlExecutor.java create mode 100644 src/de/superx/etl/TableUploader.java create mode 100644 src/de/superx/etl/bin/Doquery.java create mode 100644 src/de/superx/etl/bin/Dosql.java create mode 100644 src/de/superx/etl/bin/EtlJobExecutor.java create mode 100644 src/de/superx/etl/bin/SxTransformer.java create mode 100644 src/de/superx/etl/bin/UnloadRecords.java create mode 100644 src/de/superx/etl/bin/UploadRecords.java create mode 100644 src/de/superx/etl/util/GetOpts.java create mode 100644 src/de/superx/sxrest/JobExecutor.java create mode 100644 src/de/superx/sxrest/LaunchUpgrade.java create mode 100644 src/de/superx/sxrest/MaskXml.java create mode 100644 superx/edit/etl/etl_manager.jsp create mode 100644 superx/edit/etl/jobexecutor.jsp create mode 100644 superx/edit/etl/saveMask.jsp create mode 100644 superx/edit/etl/systeminfo_list.inc diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..840e7d3 --- /dev/null +++ b/.gitignore @@ -0,0 +1 @@ +/classes/ diff --git a/build.xml b/build.xml new file mode 100644 index 0000000..b0b0e9d --- /dev/null +++ b/build.xml @@ -0,0 +1,70 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src-modules/module/etl/bin/SQL_ENV_etl.sam b/src-modules/module/etl/bin/SQL_ENV_etl.sam new file mode 100644 index 0000000..19b680e --- /dev/null +++ b/src-modules/module/etl/bin/SQL_ENV_etl.sam @@ -0,0 +1,16 @@ +#Beispielumgebung für das ETL-Modul: + +#SUPERX_MODULE=$SUPERX_DIR/db/module +#SUPERX_ROHDATEN=rohdaten + +ETL_PFAD=$SUPERX_MODULE/etl; export ETL_PFAD +ETL_ERRORDAT=$ETL_PFAD/etl_update.err; export ETL_ERRORDAT +#Hier stehen die Rohdaten (im Unterverzeichnis unl) +ETL_LOAD_PFAD=$ETL_PFAD/$SUPERX_ROHDATEN; export ETL_LOAD_PFAD +ETL_ERRORMAIL=$ERRORMAIL; export ETL_ERRORMAIL +ETL_LOGMAIL=$LOGMAIL; export ETL_LOGMAIL + +PATH=$ETL_PFAD/bin:$PATH +export PATH + + diff --git a/src-modules/module/etl/bin/etl_dosql.x b/src-modules/module/etl/bin/etl_dosql.x new file mode 100755 index 0000000..2d0d7fb --- /dev/null +++ b/src-modules/module/etl/bin/etl_dosql.x @@ -0,0 +1,115 @@ +#!/bin/bash + +#----------------------------- +# Shell-Kommando "etl_dosql" +#----------------------------- + +#------------------------------------------------------------------- +# Shellvariablen setzen und SQL-Anweisung(en) aus Datei +# in der SuperX-Datenbank ausfuehren. +#------------------------------------------------------------------- +#13.12.07 DQ Freemarker-Parsing auch mandantenfähig +#9.12.06 MB Erweiterung für Freemarker-Parsing +if [ "$1" = "" ] + then echo "Aufruf: etl_dosql
(optional) (optional) (optional, nur bei SX_CLIENT=jdbc)" + echo " " + echo "Aktion: etl_dosql führt die Kommandos in der Datei aus." + echo "Das Ergebnis kann mit Feldnamen () in eine Datei ausgegeben werden" + echo " " + exit 0 +fi +#. $SUPERX_DIR/db/bin/SQL_ENV +filename="$1" +header=$2 +outfile=$3 +params=$4 + +fgrep -i -s "freemarker template" $filename > /dev/null +FLAG1=$? +fgrep -i -s "FREEMARKER TEMPLATE" $filename > /dev/null +FLAG2=$? + +orgfilename="$filename" +tmpfilename="$filename"$MANDANTID.tmp$MANDANTENID.sql +if [ $FLAG1 -eq 0 -o $FLAG2 -eq 0 ] +then +rm -f $tmpfilename + + +#Anlegen einer temp. Kopie filename.tmp$MANDANTENID.sql +java -cp $JDBC_CLASSPATH $JAVA_OPTS de.superx.bin.FMParser -dbproperties:$DB_PROPERTIES -in:$filename -out:$tmpfilename +if [ ! -f $tmpfilename ] +then +echo "Error : etl_dosql abgebrochen" +exit 1 +fi + +filename="$tmpfilename" +fi + +fgrep -i -s "" $filename > /dev/null +FLAG1=$? +if [ $FLAG1 -eq 0 ] +then + CP=".:$JDBC_CLASSPATH:$XML_CLASSPATH" + if [ "$LANG" != "" ] + then LOCALE="-Duser.language=$LANG" + fi + java $LOCALE -cp $CP $JAVA_OPTS de.superx.etl.bin.Dosql -logger:$LOGGER_PROPERTIES -dbproperties:$DB_PROPERTIES -sqlfile:"$filename" -outFormat:"txt" -delim:$DBDELIMITER -header:$header -outfile:$outfile -params:$params + +else +case $SX_CLIENT in + +jdbc) + CP=".:$JDBC_CLASSPATH:$XML_CLASSPATH" + if [ "$LANG" != "" ] + then LOCALE="-Duser.language=$LANG" + fi + java $LOCALE -cp $CP $JAVA_OPTS de.superx.etl.bin.Dosql -logger:$LOGGER_PROPERTIES -dbproperties:$DB_PROPERTIES -sqlfile:"$filename" -outFormat:"txt" -delim:$DBDELIMITER -header:$header -outfile:$outfile -params:$params + +;; +psql) +if [ "$header" != "true" ] +then + tuples_only='-t' +fi + +if [ "$outfile" = "" ] +then + psql --dbname $DBNAME -f $filename $tuples_only + +else + echo "Ergebnis mit Fieldsep $DBDELIMITER klappt mit psql noch nicht" + echo "Bitte benutzen Sie SX_CLIENT=jdbc" + psql --dbname $DBNAME -P fieldsep="$DBDELIMITER" -f $filename $tuples_only -o $outfile + +fi + +;; +dbaccess) +if [ "$header" = "true" ] +then + echo "Mit dbaccess ist kein Export der Feldnamen möglich" + exit 0 +fi + +if [ "$outfile" = "" ] +then + $INF_BIN/dbaccess $DBACCESS_PARAMS $DBNAME $filename +else + $INF_BIN/dbaccess $DBACCESS_PARAMS $DBNAME $filename >$outfile +fi + +;; +hsql) + java -Xmx300M -cp .:$JDBC_CLASSPATH org.hsqldb.util.ScriptTool -database joolap -user admin -password ???? -script $filename +esac +fi + +if [ -f $tmpfilename -a "$FM_DEBUG" == "" ] +then +echo +#Zum Debuggen auskommentieren: +rm -f "$tmpfilename" +fi + diff --git a/src-modules/module/etl/bin/etl_sx_execute_actions.x b/src-modules/module/etl/bin/etl_sx_execute_actions.x new file mode 100755 index 0000000..143f6f8 --- /dev/null +++ b/src-modules/module/etl/bin/etl_sx_execute_actions.x @@ -0,0 +1,11 @@ +#!/bin/bash + +if [ "$1" = "" ] + then echo "Aufruf: sx_execute_actions.x MandantenID WEB_INF_PFAD MODULE_PFAD (z.B. /home/superx/webserver/tomcat/webapps/superx/WEB-INF/conf/edustore/db/module oder /home/superx/db/module) Job_uniquename outfile params (optional)" + + exit 0 +fi + + + +java $LOCALE -cp $JDBC_CLASSPATH:$XML_CLASSPATH $JAVA_OPTS -DSX_LOG_TO_TMP=true de.superx.bin.ActionExecutor -mandantenID:$1 -WEB_INF_PFAD:$2 -MODULE_PFAD:$3 -job_uniquename:$4 -outfile:$5 -params:$6 diff --git a/src-modules/module/etl/bin/etl_sx_insert_mask.x b/src-modules/module/etl/bin/etl_sx_insert_mask.x new file mode 100755 index 0000000..247821b --- /dev/null +++ b/src-modules/module/etl/bin/etl_sx_insert_mask.x @@ -0,0 +1,69 @@ +#!/bin/bash +#. $SUPERX_DIR/bin/SQL_ENV +#--------------------------------------------------------------------- +# Shell-Kommando "etl_sx_insert_mask.x" +# +#--------------------------------------------------------------------- + +##WEITER sicherheitshalber zurücksetzen +WEITER="" + + +if [ "$1" = "" ] + then echo "Aufruf: etl_sx_insert_mask.x " + echo " " + echo "Aktion: etl_sx_select_mask.x entlaedt alle Metadaten zur Maske mit der TID ." + echo " Mit Hilfe von sx_insert_mask koennen diese Daten importiert werden." + echo " " + exit 0 +fi + +#------------------------------------------------------------------- +#-- Metadaten entladen +#------------------------------------------------------------------- +TID=$1 + +if [ "$2" == "" ] + then outfile=$TID.xml +else + +outfile=$2 + +fi + + +echo "Maske Nummer $TID" +DOQUERY "select name from maskeninfo where tid = $1" false +echo "entladen" + +SX_CLIENT=jdbc +export SX_CLIENT +CP=".:$JDBC_CLASSPATH" + +CMD="java $JAVA_OPTS -cp $CP de.superx.etl.bin.EtlJobExecutor -dbproperties:$DB_PROPERTIES -job:sx_select_mask -outfile:$outfile -params:TID=$TID" + + +$CMD + + + + diff --git a/src-modules/module/etl/bin/etl_sx_unload_records.x b/src-modules/module/etl/bin/etl_sx_unload_records.x new file mode 100755 index 0000000..645e5b8 --- /dev/null +++ b/src-modules/module/etl/bin/etl_sx_unload_records.x @@ -0,0 +1,46 @@ +#!/bin/bash +#--------------------------------------------------------------------- +# Shell-Kommando "etl_sx_unload_records.x" +# +# Erstellt von D. Quathamer am 6.1.2020 +#--------------------------------------------------------------------- +if [ "$1" = "" ] + then echo "Aufruf: etl_sx_unload_records.x (optional) (optional)
(optional) " + echo " " + echo "Aktion: etl_sx_unload_records.x entlädt Inhalte einer Tabelle " + echo "Die Ausgabeformate sind txt (default), html und xml " + echo "html und xml sind nur möglich,wenn java installiert ist und JAVA_HOME gesetzt ist." + echo "Der Dateiname ist optional" + echo " " + exit 1 +fi + +table=$1 +filename=$2 +outformat=$3 +header=$4 +currpath=`pwd` +UNLLOG=$currpath/unload.err + +if [ "$filename" = "" ] + then filename="$currpath/$table.unl" +fi +if [ "$header" = "" ] + then header="false" +fi +if [ "$outformat" = "" ] + then outformat="txt" +fi +if [ "$LANG" != "" ] + then LOCALE="-Duser.language=$LANG" +fi + + +CP=".:$JDBC_CLASSPATH" + +echo "Unload $table to $filename" + + +java $LOCALE -cp "$CP" de.superx.etl.bin.Doquery $LOGGER_PROPERTIES $DB_PROPERTIES 'select * from '$table $outformat $DBDELIMITER $header $filename + + diff --git a/src-modules/module/etl/bin/etl_sx_upload_records.x b/src-modules/module/etl/bin/etl_sx_upload_records.x new file mode 100755 index 0000000..4372975 --- /dev/null +++ b/src-modules/module/etl/bin/etl_sx_upload_records.x @@ -0,0 +1,46 @@ +#!/bin/bash +#--------------------------------------------------------------------- +# Shell-Kommando "etl_sx_upload_records.x" +# +# Erstellt von D. Quathamer am 6.1.2020 +#--------------------------------------------------------------------- + + +if [ "$1" = "" ] + then echo "Aufruf: etl_sx_upload_records.x (optional,Default ist Tabellenname+.unl)
(optional) (optional) (optional)" + echo " " + echo "Aktion: etl_sx_upload_records.x lädt von einer Quelldatei in die Tabelle" + echo " " + exit 0 +fi + +tabelle=$1 +quelldatei=$2 +header=$3 +informat=$4 +encoding=$5 + +if [ "$encoding" = "" ] +then + if [ "$LANG" = "de_DE@euro" ] + then + encoding=ISO-8859-1 + fi + if [ "$LANG" = "de_DE.utf8" ] + then + encoding=UTF-8 + fi +fi + +if [ "$2" = "" ] +then + quelldatei=$tabelle.unl +fi + +echo "$tabelle wird aus der Datei $quelldatei gefüllt" +CP=".:$JDBC_CLASSPATH" + +CMD="java $JAVA_OPTS -cp $CP de.superx.etl.bin.UploadRecords -logger:$LOGGER_PROPERTIES -dbproperties:$DB_PROPERTIES -table:$tabelle -unl:$quelldatei -delim:$DBDELIMITER -header:$header -informat:$informat -encoding:$encoding" + +$CMD + diff --git a/src-modules/module/etl/conf/build.xml b/src-modules/module/etl/conf/build.xml new file mode 100644 index 0000000..96b850e --- /dev/null +++ b/src-modules/module/etl/conf/build.xml @@ -0,0 +1,40 @@ + + + + + + + + + + + File = ${ant.file} + + Script Parameters: + + all Modulscripte komplett erzeugen + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src-modules/module/etl/conf/etl.xml b/src-modules/module/etl/conf/etl.xml new file mode 100644 index 0000000..61448c1 --- /dev/null +++ b/src-modules/module/etl/conf/etl.xml @@ -0,0 +1,433 @@ + + + + + + + + + + Schlüsseltabelle für Ladeschritte + + + + + + 1=Gruppierung, 2=Loadtable,3=nativeaction,4=loadmask,5=select + + 1=ja, d.h. von der Hochschule angepaßt, 0=nein, d.h. Auslieferung der Software + + + + +superx +superx +etl_step +tid +1 +etl_step_pk + + + + + + + + + +
+ + + + + Schlüsseltabelle für Arten von Ladeschritten, z.B. loadtable, nativeaction,unload + + + + + + + + + +superx +superx +etl_step_type +tid +1 +etl_step_type_pk + + + + + +
+ + + Schlüsseltabelle für Parameter für Arten von Ladeschritten + + + + + + + + + + + +superx +superx +etl_step_type_param +tid +1 +etl_step_type_param_pk + + + + + +
+ + + Schlüsseltabelle für Eigenschaften von Ladeschritten + + + + + + + + + +superx +superx +etl_step_property +tid +1 +etl_step_property_pk + + + + + +
+ + + Schlüsseltabelle für Beziehungen zwischen Ladeschritten, und Ausführungssteuerung + + + + + + + + + 1=ja, d.h. von der Hochschule angepaßt, 0=nein, d.h. Auslieferung der Software + + + +superx +superx +etl_step_relation +tid +1 +etl_step_relation_pk + + + + + +
+ + + Schlüsseltabelle für Ladejobs + + + + + + + 1=ja, d.h. von der Hochschule angepaßt, 0=nein, d.h. Auslieferung der Software + + + +superx +superx +etl_job +tid +1 +etl_job_pk + + + + + + + + +
+ + Schlüsseltabelle für Parameter für Ladejobs + + + + + + + + + + + +superx +superx +etl_job_param +tid +1 +etl_job_param_pk + + + + + +
+ + + + + + + + +Abfragen zur Administration + + + + + Laderoutinen verwalten + + $ETL_PFAD/masken + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+In diesem Formular können Sie Laderoutinen verwalten. + +tid + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+In diesem Formular können Sie Ladeschritte verwalten. + +tid + + + + + + + + + + + + + + + + + + +
+In diesem Formular können Sie Arten von Ladeschritten verwalten. + + + + + + +
+
+ diff --git a/src-modules/module/etl/conf/etl_step_fuellen.xsl b/src-modules/module/etl/conf/etl_step_fuellen.xsl new file mode 100644 index 0000000..0cb4ecb --- /dev/null +++ b/src-modules/module/etl/conf/etl_step_fuellen.xsl @@ -0,0 +1,1327 @@ + + + + + + + + + + + +' + + + + + +select sp_table_exists('etl_step') from xdummy; + + +]]> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +<#assign etl_step_properties = [ + + +]]> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +]]> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +<#assign etl_step_properties = [ + + +]]> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +]]> + + + + + + +<@ETL_STEPS_FUELLEN /> + + + +]]> + + + + + + + + + + + + + + + +", "uniquename":" +", "name":" +", "type":" + +", "parent":" + + + +, + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +", "prop_name":" +msg", "prop_value":" + + + + + + + +", "prop_name":" +select_stmt", "prop_value":" + + + + + +", "prop_name":"PATH_TO_INPUTFILE", "prop_value":" + + + + + +", "prop_name":"target_table", "prop_value":" + + + + +", "prop_name":"path_to_inputfile", "prop_value":" + + + +", "prop_name":"truncateTargetTable", "prop_value":" + + + +", "prop_name":"format", "prop_value":" + + + + + + + + + +, + + + + + + + + + + + + + + + + + +<#assign etl_job_params = [ + {"etl_job":"]]> + ","param_name":" + + + + + ","param_name":" + + + + + +<#assign etl_steps = [ + +]]> + + + + + + +Load + + +Transformation + + +Aggregation + + +Test + + +Standdatum + + + + + + + + + + + + + + + DOSQL + + + + DOQUERY + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +$ + + + +_PFAD + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/src-modules/module/etl/conf/excludes.txt b/src-modules/module/etl/conf/excludes.txt new file mode 100644 index 0000000..3ab2249 --- /dev/null +++ b/src-modules/module/etl/conf/excludes.txt @@ -0,0 +1,10 @@ +rohdaten/ETL_ENV +rohdaten/*.properties +preparation.sql +finalize.sql +rohdaten/*.err +*.log +*.err +rohdaten/unl/* +conf/customize.sql +conf/*.log diff --git a/src-modules/module/etl/conf/includes.txt b/src-modules/module/etl/conf/includes.txt new file mode 100644 index 0000000..6abd85a --- /dev/null +++ b/src-modules/module/etl/conf/includes.txt @@ -0,0 +1,4 @@ +doku/etl_modul/etl.html +WEB-INF/conf/edustore/db/bin/SQL_ENV_etl.sam +WEB-INF/lib/superx-etl0.2.jar + diff --git a/src-modules/module/etl/masken/42000_felderinfo.unl b/src-modules/module/etl/masken/42000_felderinfo.unl new file mode 100644 index 0000000..d267cf5 --- /dev/null +++ b/src-modules/module/etl/masken/42000_felderinfo.unl @@ -0,0 +1,6 @@ +42000^Jobstatus^50^0^0^150^200^1^integer^200^0^1^<> select 0,'Release' from xdummy union select 1,'Eigene' from xdummy^^ ^ +42001^Laderoutine^0^0^0^150^80^1^integer^200^0^1^<> select tid,caption from etl_job order by 2;^^^ +42002^Arten von Ladeschritten^100^0^0^150^300^1^char^30^0^18^^^<>select '../edit/etl/etl_step_type_list.jsp' from xdummy;^ +42003^Name^20^0^0^150^150^1^sql^50^0^0^^^^ +42004^Komponente^30^0^0^150^200^1^integer^200^0^1^<> select tid,name from systeminfo order by 2;^^^ +42005^ETL-Manager^1000^0^0^150^300^1^char^30^0^18^^^<>select '../edit/etl/etl_manager.jsp' from xdummy;^ diff --git a/src-modules/module/etl/masken/42000_maske_system_bez.unl b/src-modules/module/etl/masken/42000_maske_system_bez.unl new file mode 100644 index 0000000..5cda659 --- /dev/null +++ b/src-modules/module/etl/masken/42000_maske_system_bez.unl @@ -0,0 +1 @@ +42000^270^ diff --git a/src-modules/module/etl/masken/42000_masken_felder_bez.unl b/src-modules/module/etl/masken/42000_masken_felder_bez.unl new file mode 100644 index 0000000..2aa3b27 --- /dev/null +++ b/src-modules/module/etl/masken/42000_masken_felder_bez.unl @@ -0,0 +1,6 @@ +42000^42000^ +42000^42001^ +42000^42002^ +42000^42003^ +42000^42004^ +42000^42005^ diff --git a/src-modules/module/etl/masken/42000_maskeninfo.unl b/src-modules/module/etl/masken/42000_maskeninfo.unl new file mode 100644 index 0000000..ca9bc8f --- /dev/null +++ b/src-modules/module/etl/masken/42000_maskeninfo.unl @@ -0,0 +1,92 @@ +42000^Laderoutinen verwalten^--Autor: D. Quathamer\ +--Datum: 2.8.2019\ +--freemarker template\ +create temp table tmp_ergebnis (\ +ord smallint,\ +tid integer, \ +uniquename varchar(255) ,\ +caption varchar(255),\ +systeminfo_id integer ,\ +systeminfo_str varchar(255),\ +logfile varchar(255),\ +custom_job smallint ,\ +letzter_lauf date,\ +nextedit varchar(255)\ +);\ +insert into tmp_ergebnis ( tid,\ + uniquename,\ + caption,\ + systeminfo_id,\ + logfile,\ + custom_job,\ + nextedit) \ +select tid,\ + uniquename,\ + caption,\ + systeminfo_id,\ + logfile,\ + custom_job,\ + ('../edit/etl/etl_job_edit.jsp|tid=' || J.tid)::varchar(255)\ +FROM etl_job J\ +where 1=1\ +/* and J.tid=<> */\ +/* and J.custom_job=<> */\ +/* and J.caption like '%<>%' */\ +/* and E.systeminfo_id=<> */\ +;\ +\ +update tmp_ergebnis set systeminfo_str=(select name from systeminfo where tid=tmp_ergebnis.systeminfo_id);\ +\ +\ +<@selectintotmp \ +select=" tid,\ + uniquename,\ + caption,\ + systeminfo_str,\ + logfile,\ + letzter_lauf,\ + custom_job,\ + nextedit"\ +source="tmp_ergebnis"\ +target="tmp_ergebnis2">\ +order by systeminfo_str,\ +caption\ +\ + <@informixnolog/>;\ + \ +drop table tmp_ergebnis;\ + \ +select systeminfo_str,\ + caption,\ + uniquename,\ + logfile,\ + custom_job,\ + letzter_lauf,\ +nextedit \ +from tmp_ergebnis2\ +;^XIL List\ + drop_and_delete movable_columns sizable_columns horizontal_scrolling\ + white_space_color=COLOR_WHITE fixed_columns=2\ + min_heading_height=35\ +Column CID=0 heading_text="Komponente" center_heading\ + row_selectable col_selectable heading_platform readonly\ + width=50 text_size=100\ +Column CID=0 heading_text="Name" center_heading\ + row_selectable col_selectable heading_platform readonly\ + width=50 text_size=100\ +Column CID=1 heading_text="Schlüssel" center_heading\ + row_selectable col_selectable heading_platform readonly\ + width=150 text_size=200\ +Column CID=1 heading_text="Logdatei" center_heading\ + row_selectable col_selectable heading_platform readonly\ + width=30 text_size=200\ +Column CID=1 heading_text="Eigene Laderoutine" center_heading\ + row_selectable col_selectable heading_platform readonly\ + width=5 text_size=200\ +Column CID=1 heading_text="Letzter Lauf" center_heading\ + row_selectable col_selectable heading_platform readonly\ + width=5 text_size=200\ +Column CID=1 heading_text="Bearbeiten" center_heading\ + row_selectable col_selectable heading_platform readonly\ + width=5 text_size=200\ +@@@^^^Suchen und Bearbeiten von Laderoutinen^drop table tmp_ergebnis2;^^1^440^360^0^1^^ diff --git a/src-modules/module/etl/masken/42000_sachgeb_maske_bez.unl b/src-modules/module/etl/masken/42000_sachgeb_maske_bez.unl new file mode 100644 index 0000000..5527b92 --- /dev/null +++ b/src-modules/module/etl/masken/42000_sachgeb_maske_bez.unl @@ -0,0 +1 @@ +270^42000^ diff --git a/src-modules/module/etl/rohdaten/etl_unload.xml b/src-modules/module/etl/rohdaten/etl_unload.xml new file mode 100644 index 0000000..3d364a2 --- /dev/null +++ b/src-modules/module/etl/rohdaten/etl_unload.xml @@ -0,0 +1,12 @@ + + +ETL-Modul + + + + + + + + + diff --git a/src-modules/module/etl/schluesseltabellen/etl_step_fuellen.sql b/src-modules/module/etl/schluesseltabellen/etl_step_fuellen.sql new file mode 100644 index 0000000..50b8f5a --- /dev/null +++ b/src-modules/module/etl/schluesseltabellen/etl_step_fuellen.sql @@ -0,0 +1,592 @@ +--Freemarker Template +<#include "SQL_lingua_franca"/> +<#include "SuperX_general"/> + + +<#assign etl_jobs = [ + {"uniquename":"sx_select_mask", "name":"Maske entladen", "systeminfo_id":9 ,"logfile":""}, + {"uniquename":"sx_insert_mask", "name":"Maske hochladen", "systeminfo_id":9 ,"logfile":""}, + {"uniquename":"kern_konstanten_update", "name":"Konstanten aktualisieren", "systeminfo_id":9 ,"logfile":""} + + + ] /> + +<#assign etl_job_params = [ + {"etl_job":"sx_select_mask","param_name":"TID", "name":"Maskennr.", "param_default":""}, + {"etl_job":"sx_select_mask","param_name":"PATH_TO_OUTPUTFILE", "name":"Ausgabedatei", "param_default":"$SUPERX_DIR/db/masken/$TID.xml"}, + {"etl_job":"sx_select_mask","param_name":"FORMAT", "name":"Ausgabeformat", "param_default":"XML"}, + + {"etl_job":"sx_insert_mask","param_name":"TID", "name":"Maskennr.", "param_default":""}, + {"etl_job":"sx_insert_mask","param_name":"PATH_TO_INPUTFILE", "name":"Eingabepfad", "param_default":"$SUPERX_DIR/db/masken/$TID.xml"}, + {"etl_job":"sx_insert_mask","param_name":"FORMAT", "name":"Format", "param_default":"XML"}, + {"etl_job":"sx_insert_mask","param_name":"SUPERX_DIR", "name":"Superx-Pfad zu WEB-INF/conf/edustore", "param_default":"$WEBAPP/WEB-INF/conf/edustore"}, + + {"etl_job":"kern_konstanten_update","param_name":"SUPERX_DIR", "name":"Superx-Pfad zu WEB-INF/conf/edustore", "param_default":"$WEBAPP/WEB-INF/conf/edustore"}, + {"etl_job":"kern_konstanten_update","param_name":"PATH_TO_INPUTFILE", "name":"Eingabepfad", "param_default":"$SUPERX_DIR/db/install/schluesseltabellen/kern_feste_konstanten_fuellen.sql"} + + + ] /> + +<#assign etl_steps = [ + {"etl_job":"sx_select_mask", "uniquename":"unload_masken_stammdaten", "name":"Masken-Daten entladen", "type":"MSG"}, + {"etl_job":"sx_select_mask", "uniquename":"unload_maskeninfo", "name":"Maskeninfo entladen", "type":"UNLOAD", "parent":"unload_masken_stammdaten"}, + {"etl_job":"sx_select_mask", "uniquename":"unload_felderinfo", "name":"felderinfo entladen", "type":"UNLOAD", "parent":"unload_masken_stammdaten"}, + {"etl_job":"sx_select_mask", "uniquename":"unload_masken_felder_bez", "name":"masken_felder_bez entladen", "type":"UNLOAD", "parent":"unload_masken_stammdaten"}, + {"etl_job":"sx_select_mask", "uniquename":"unload_sachgeb_maske_bez", "name":"sachgeb_maske_bez entladen", "type":"UNLOAD", "parent":"unload_masken_stammdaten"}, + {"etl_job":"sx_select_mask", "uniquename":"unload_maske_system_bez", "name":"maske_system_bez entladen", "type":"UNLOAD", "parent":"unload_masken_stammdaten"}, + {"etl_job":"sx_select_mask", "uniquename":"unload_themenbaum", "name":"themenbaum entladen", "type":"UNLOAD"}, + {"etl_job":"sx_select_mask", "uniquename":"unload_sx_mask_style", "name":"sx_mask_style entladen", "type":"UNLOAD"}, + {"etl_job":"sx_select_mask", "uniquename":"unload_sx_stylesheets", "name":"sx_stylesheets entladen", "type":"UNLOAD"}, + {"etl_job":"sx_select_mask", "uniquename":"unload_stylesheet_field", "name":"stylesheet_field entladen", "type":"UNLOAD"}, + + {"etl_job":"sx_insert_mask", "uniquename":"delete_maskeninfo", "name":"Maskeninfo löschen", "type":"DOQUERY" }, + {"etl_job":"sx_insert_mask", "uniquename":"upload_maskeninfo", "name":"Maskeninfo hochladen", "type":"LOAD" }, + + {"etl_job":"sx_insert_mask", "uniquename":"delete_felderinfo", "name":"felderinfo löschen", "type":"DOQUERY" }, + {"etl_job":"sx_insert_mask", "uniquename":"upload_felderinfo", "name":"felderinfo hochladen", "type":"LOAD" }, + + + {"etl_job":"sx_insert_mask", "uniquename":"delete_masken_felder_bez", "name":"masken_felder_bez löschen", "type":"DOQUERY" }, + {"etl_job":"sx_insert_mask", "uniquename":"upload_masken_felder_bez", "name":"masken_felder_bez hochladen", "type":"LOAD" }, + + + {"etl_job":"sx_insert_mask", "uniquename":"delete_sachgeb_maske_bez", "name":"sachgeb_maske_bez löschen", "type":"DOQUERY" }, + {"etl_job":"sx_insert_mask", "uniquename":"upload_sachgeb_maske_bez", "name":"sachgeb_maske_bez hochladen", "type":"LOAD" }, + + {"etl_job":"sx_insert_mask", "uniquename":"delete_maske_system_bez", "name":"maske_system_bez löschen", "type":"DOQUERY" }, + {"etl_job":"sx_insert_mask", "uniquename":"upload_maske_system_bez", "name":"maske_system_bez hochladen", "type":"LOAD" }, + + {"etl_job":"sx_insert_mask", "uniquename":"create_tmp_etl_themenbaum", "name":"Tabelle tmp_etl_themenbaum erzeugen", "type":"DOSQL" }, + {"etl_job":"sx_insert_mask", "uniquename":"create_tmp_etl_stylesheets", "name":"Tabelle tmp_etl_stylesheets erzeugen", "type":"DOSQL" }, + + {"etl_job":"sx_insert_mask", "uniquename":"upload_tmp_etl_themenbaum", "name":"tmp_etl_themenbaum hochladen", "type":"LOAD" }, + {"etl_job":"sx_insert_mask", "uniquename":"upload_tmp_etl_stylesheets", "name":"tmp_etl_stylesheets hochladen", "type":"LOAD" }, + {"etl_job":"sx_insert_mask", "uniquename":"upload_tmp_etl_mask_style", "name":"tmp_etl_mask_style hochladen", "type":"LOAD" }, + {"etl_job":"sx_insert_mask", "uniquename":"upload_tmp_etl_stylesheet_field", "name":"tmp_etl_stylesheet_field hochladen", "type":"LOAD" }, + + {"etl_job":"sx_insert_mask", "uniquename":"themenbaum_fuellen", "name":"themenbaum_fuellen", "type":"DOSQL" }, + {"etl_job":"sx_insert_mask", "uniquename":"sx_stylesheets_fuellen", "name":"sx_stylesheets_fuellen", "type":"DOSQL" }, + + + {"etl_job":"kern_konstanten_update", "uniquename":"kern_konstanten_update", "name":"Kern Konstanten aktualisieren", "type":"DOSQL" } + + + ] /> + +<#assign etl_step_properties = [ + {"etl_step":"unload_masken_stammdaten","prop_name":"msg", "prop_value":"Entlade Stammdaten Maske $TID" }, + {"etl_step":"unload_maskeninfo","prop_name":"select_stmt", "prop_value":"select tid,name,select_stmt,xil_proplist,chart_xtitel,chart_ytitel,erlaeuterung,cleanup_stmt,default_file,frontend,breite,hoehe,ampel,hilfe,hinweis from maskeninfo where tid=$TID" }, + {"etl_step":"unload_felderinfo","prop_name":"select_stmt", "prop_value":"select tid ,name,nummer,x,y,buttonbreite,feldbreite,zeilenanzahl,typ,laenge,obligatorisch,art,relation,attribut,defaultwert from felderinfo where tid in (select felderinfo_id from masken_felder_bez where maskeninfo_id = $TID) order by tid" }, + {"etl_step":"unload_masken_felder_bez","prop_name":"select_stmt", "prop_value":"select maskeninfo_id,felderinfo_id from masken_felder_bez where maskeninfo_id=$TID order by 1,2" }, + {"etl_step":"unload_sachgeb_maske_bez","prop_name":"select_stmt", "prop_value":"select sachgebiete_id,maskeninfo_id from sachgeb_maske_bez where maskeninfo_id=$TID order by 1,2" }, + {"etl_step":"unload_maske_system_bez","prop_name":"select_stmt", "prop_value":"select maskeninfo_id,systeminfo_id from maske_system_bez where maskeninfo_id=$TID order by 1,2" }, + {"etl_step":"unload_themenbaum","prop_name":"select_stmt", "prop_value":"select T.tid,name,maskeninfo_id,parent,(select name from themenbaum where tid=T.parent) as parent_name,gueltig_seit,gueltig_bis,erlaeuterung,sort,css_class from themenbaum T where maskeninfo_id=$TID order by 2,1" }, + {"etl_step":"unload_sx_mask_style","prop_name":"select_stmt", "prop_value":"select S.tid,S.maskeninfo_id,S.stylesheet_id,S.ord,(select filename from sx_stylesheets where tid=S.stylesheet_id) as stylesheet_filename from sx_mask_style S where maskeninfo_id=$TID order by 1,2,3" }, + {"etl_step":"unload_sx_stylesheets","prop_name":"select_stmt", "prop_value":"select S.tid,S.filename,S.caption,S.description,S.relation,S.useragent,S.contenttype from sx_stylesheets S, sx_mask_style M where S.tid=M.stylesheet_id and M.maskeninfo_id=$TID order by 1,2,3" }, + {"etl_step":"unload_stylesheet_field","prop_name":"select_stmt", "prop_value":"select F.tid,F.stylesheet_id,S.filename as stylesheet_filename,F.tablename,F.fieldname FROM stylesheet_field F, sx_stylesheets S, sx_mask_style M where F.stylesheet_id=S.tid and S.tid=M.stylesheet_id and M.maskeninfo_id=$TID order by 1,2,3,4,5" }, + + {"etl_step":"delete_maskeninfo","prop_name":"select_stmt", "prop_value":"delete from maskeninfo where tid=$TID" }, + {"etl_step":"upload_maskeninfo","prop_name":"target_table", "prop_value":"maskeninfo" }, + {"etl_step":"upload_maskeninfo","prop_name":"format", "prop_value":"xml" }, + {"etl_step":"upload_maskeninfo","prop_name":"search_path", "prop_value":"/etlAction/unload [@name=\"unload_maskeninfo\"]/rs/row" }, + {"etl_step":"upload_maskeninfo","prop_name":"path_to_inputfile", "prop_value":"$PATH_TO_INPUTFILE" }, + + {"etl_step":"delete_felderinfo","prop_name":"select_stmt", "prop_value":"delete from felderinfo where tid in (select felderinfo_id from masken_felder_bez where maskeninfo_id =$TID)" }, + {"etl_step":"upload_felderinfo","prop_name":"target_table", "prop_value":"felderinfo" }, + {"etl_step":"upload_felderinfo","prop_name":"format", "prop_value":"xml" }, + {"etl_step":"upload_felderinfo","prop_name":"search_path", "prop_value":"/etlAction/unload [@name=\"unload_felderinfo\"]/rs/row" }, + {"etl_step":"upload_felderinfo","prop_name":"path_to_inputfile", "prop_value":"$PATH_TO_INPUTFILE" }, + + {"etl_step":"delete_masken_felder_bez","prop_name":"select_stmt", "prop_value":"delete from masken_felder_bez where maskeninfo_id =$TID" }, + {"etl_step":"upload_masken_felder_bez","prop_name":"target_table", "prop_value":"masken_felder_bez" }, + {"etl_step":"upload_masken_felder_bez","prop_name":"format", "prop_value":"xml" }, + {"etl_step":"upload_masken_felder_bez","prop_name":"search_path", "prop_value":"/etlAction/unload [@name=\"unload_masken_felder_bez\"]/rs/row" }, + {"etl_step":"upload_masken_felder_bez","prop_name":"path_to_inputfile", "prop_value":"$PATH_TO_INPUTFILE" }, + + {"etl_step":"delete_sachgeb_maske_bez","prop_name":"select_stmt", "prop_value":"delete from sachgeb_maske_bez where maskeninfo_id =$TID" }, + {"etl_step":"upload_sachgeb_maske_bez","prop_name":"target_table", "prop_value":"sachgeb_maske_bez" }, + {"etl_step":"upload_sachgeb_maske_bez","prop_name":"format", "prop_value":"xml" }, + {"etl_step":"upload_sachgeb_maske_bez","prop_name":"search_path", "prop_value":"/etlAction/unload [@name=\"unload_sachgeb_maske_bez\"]/rs/row" }, + {"etl_step":"upload_sachgeb_maske_bez","prop_name":"path_to_inputfile", "prop_value":"$PATH_TO_INPUTFILE" }, + + {"etl_step":"delete_maske_system_bez","prop_name":"select_stmt", "prop_value":"delete from maske_system_bez where maskeninfo_id =$TID" }, + {"etl_step":"upload_maske_system_bez","prop_name":"target_table", "prop_value":"maske_system_bez" }, + {"etl_step":"upload_maske_system_bez","prop_name":"format", "prop_value":"xml" }, + {"etl_step":"upload_maske_system_bez","prop_name":"search_path", "prop_value":"/etlAction/unload [@name=\"unload_maske_system_bez\"]/rs/row" }, + {"etl_step":"upload_maske_system_bez","prop_name":"path_to_inputfile", "prop_value":"$PATH_TO_INPUTFILE" }, + + {"etl_step":"create_tmp_etl_themenbaum","prop_name":"PATH_TO_INPUTFILE", "prop_value":"$SUPERX_DIR/db/module/etl/schluesseltabellen/create_tmp_etl_themenbaum.sql" }, + {"etl_step":"create_tmp_etl_stylesheets","prop_name":"PATH_TO_INPUTFILE", "prop_value":"$SUPERX_DIR/db/module/etl/schluesseltabellen/create_tmp_etl_stylesheets.sql" }, + + {"etl_step":"upload_tmp_etl_themenbaum","prop_name":"target_table", "prop_value":"tmp_etl_themenbaum" }, + {"etl_step":"upload_tmp_etl_themenbaum","prop_name":"format", "prop_value":"xml" }, + {"etl_step":"upload_tmp_etl_themenbaum","prop_name":"search_path", "prop_value":"/etlAction/unload [@name=\"unload_themenbaum\"]/rs/row" }, + {"etl_step":"upload_tmp_etl_themenbaum","prop_name":"path_to_inputfile", "prop_value":"$PATH_TO_INPUTFILE" }, + + {"etl_step":"upload_tmp_etl_stylesheets","prop_name":"target_table", "prop_value":"tmp_etl_stylesheets" }, + {"etl_step":"upload_tmp_etl_stylesheets","prop_name":"format", "prop_value":"xml" }, + {"etl_step":"upload_tmp_etl_stylesheets","prop_name":"search_path", "prop_value":"/etlAction/unload [@name=\"unload_sx_stylesheets\"]/rs/row" }, + {"etl_step":"upload_tmp_etl_stylesheets","prop_name":"path_to_inputfile", "prop_value":"$PATH_TO_INPUTFILE" }, + + {"etl_step":"upload_tmp_etl_mask_style","prop_name":"target_table", "prop_value":"tmp_etl_mask_style" }, + {"etl_step":"upload_tmp_etl_mask_style","prop_name":"format", "prop_value":"xml" }, + {"etl_step":"upload_tmp_etl_mask_style","prop_name":"search_path", "prop_value":"/etlAction/unload [@name=\"unload_sx_mask_style\"]/rs/row" }, + {"etl_step":"upload_tmp_etl_mask_style","prop_name":"path_to_inputfile", "prop_value":"$PATH_TO_INPUTFILE" }, + + {"etl_step":"upload_tmp_etl_stylesheet_field","prop_name":"target_table", "prop_value":"tmp_etl_stylesheet_field" }, + {"etl_step":"upload_tmp_etl_stylesheet_field","prop_name":"format", "prop_value":"xml" }, + {"etl_step":"upload_tmp_etl_stylesheet_field","prop_name":"search_path", "prop_value":"/etlAction/unload [@name=\"unload_stylesheet_field\"]/rs/row" }, + {"etl_step":"upload_tmp_etl_stylesheet_field","prop_name":"path_to_inputfile", "prop_value":"$PATH_TO_INPUTFILE" }, + + {"etl_step":"themenbaum_fuellen","prop_name":"PATH_TO_INPUTFILE", "prop_value":"$SUPERX_DIR/db/module/etl/schluesseltabellen/themenbaum_fuellen.sql" }, + {"etl_step":"sx_stylesheets_fuellen","prop_name":"PATH_TO_INPUTFILE", "prop_value":"$SUPERX_DIR/db/module/etl/schluesseltabellen/sx_stylesheets_fuellen.sql" }, + + {"etl_step":"kern_konstanten_update","prop_name":"path_to_inputfile", "prop_value":"$PATH_TO_INPUTFILE" } + + + + ] /> +<#assign testfaelle = [ + {"testcase":"test_sx_select_mask","assertion":1, "sql":"select count(*) from etl_job where uniquename='sx_select_mask'" }, + {"testcase":"test_sx_insert_mask","assertion":1, "sql":"select count(*) from etl_job where uniquename='sx_insert_mask'" }, + {"testcase":"test_sx_insert_mask_params","assertion":4, "sql":"select count(*) from etl_job_param P, etl_job J where J.tid=P.etl_job_id and J.uniquename='sx_insert_mask'" }, + {"testcase":"test_sx_insert_mask_steps","assertion":18, "sql":"select count(*) from etl_step S, etl_step_relation R, etl_job J where J.tid=R.job_id and S.tid=R.step_id and J.uniquename='sx_insert_mask'" }, + {"testcase":"test_sx_select_mask_steps","assertion":10, "sql":"select count(*) from etl_step S, etl_step_relation R, etl_job J where J.tid=R.job_id and S.tid=R.step_id and J.uniquename='sx_select_mask'" } + + ] /> + +create temp table tmp_etl_step( +tid INTEGER, +uniquename VARCHAR(255) , +caption VARCHAR(255) , +systeminfo_id INTEGER not null, +step_type INTEGER, +step_type_uniquename VARCHAR(255), +sortnr SMALLINT not null, +force_continue SMALLINT, +etl_job_id INTEGER , +parent_step_id INTEGER , +parent_step_uniquename varchar(255), +parent_job_uniquename varchar(255), +logfile varchar(255), +custom_step smallint, +already_exists smallint +) +; + +create temp table tmp_etl_job( + tid INTEGER, +uniquename VARCHAR(255) , +caption VARCHAR(255) , +systeminfo_id INTEGER not null, +logfile varchar(255), +already_exists smallint, +custom_job smallint +) +; + +create temp table tmp_etl_job_param( + tid SERIAL not null, +etl_job_id INTEGER , +uniquename VARCHAR(255) not null, +name VARCHAR(255) , +param_default VARCHAR(255) +) +; + + +create temp table tmp_etl_step_property( + tid SERIAL not null, +etl_step_id INTEGER not null, +prop_name VARCHAR(255) , +prop_value text + +) +; + +create temp table tmp_etl_step_relation( + tid SERIAL not null, +step_id INTEGER not null, +parent_step_id INTEGER , +job_id INTEGER not null, +force_continue SMALLINT default 1 , +step_active SMALLINT default 1, +sortnr SMALLINT default 1, +custom_step SMALLINT default 1 + +) +; + +<#foreach etl_job in etl_jobs> + +truncate table tmp_etl_job; +truncate table tmp_etl_step; +truncate table tmp_etl_job_param; +truncate table tmp_etl_step_property; +truncate table tmp_etl_step_relation; + +--tids der jobs dürfen sich nicht ändern, daher +-- +-- 1. vorh. Jobs updaten +-- 2. neue Jobs einfügen +-- 3. alte Jobs löschen + +insert into tmp_etl_job(uniquename,caption,systeminfo_id,logfile,already_exists,custom_job) +values ('${etl_job.uniquename}', +'${etl_job.name}', +${etl_job.systeminfo_id}, +'${etl_job.logfile}',0,0); + +<#if SQLdialect='Postgres'> +--Postgres Dialekt: + +update tmp_etl_job set tid=J.tid, +already_exists=1, +caption=J.caption, +logfile=J.logfile, +custom_job=J.custom_job +from etl_job J where J.uniquename=tmp_etl_job.uniquename +and J.systeminfo_id=tmp_etl_job.systeminfo_id +; + +<#else> + +--Informix Dialekt: +update tmp_etl_job set (tid, +already_exists, +caption, +logfile, +custom_job) + = ((select + tid, +1 as already_exists, +caption, +logfile, +custom_job +from etl_job J where J.uniquename=tmp_etl_job.uniquename +and J.systeminfo_id=tmp_etl_job.systeminfo_id)) +where 0 <(select count(*) +from etl_job J where J.uniquename=tmp_etl_job.uniquename +and J.systeminfo_id=tmp_etl_job.systeminfo_id) + +; + + + + + +--TODO Informix + +--neue jobs: +insert into etl_job(uniquename,caption,systeminfo_id,custom_job) +select uniquename,caption,systeminfo_id,custom_job +from tmp_etl_job +where already_exists=0; +--tid von neuen Jobs ermitteln: +update tmp_etl_job set tid=(select J.tid +from etl_job J where J.uniquename=tmp_etl_job.uniquename +and J.systeminfo_id=tmp_etl_job.systeminfo_id) +where already_exists=0 +; + +--TODO +--obsolete Jobs: sollen bei Deinstallation des Moduls entfernt werden + + +--Parameter: +<#foreach etl_job_param in etl_job_params> +<#if etl_job_param.etl_job==etl_job.uniquename> + +insert into tmp_etl_job_param( +etl_job_id , +uniquename, +name , +param_default) +select J.tid, +'${etl_job_param.param_name}', +'${etl_job_param.name}', +'${etl_job_param.param_default}' +from tmp_etl_job J +; + + + + + +--ETL-Schritte +<#assign sortnr=0 /> +<#foreach etl_step in etl_steps> +<#if etl_step.etl_job==etl_job.uniquename> + +<#assign sortnr=sortnr+1 /> + +insert into tmp_etl_step( +uniquename , +caption , +systeminfo_id , +step_type_uniquename, +sortnr, +force_continue, +etl_job_id , +parent_step_uniquename, +parent_job_uniquename, +logfile, +custom_step, +already_exists +) +select '${etl_step.uniquename}', +'${etl_step.name}', +${etl_job.systeminfo_id}, +'${etl_step.type}' as step_type_uniquename, +${sortnr}*10 as sortnr, +0 as force_continue, +J.tid as etl_job_id, +<#if etl_step.parent?exists && etl_step.parent !="" > +'${etl_step.parent}', +<#else> +'' as parent_step_uniquename, + +J.uniquename, +'${etl_job.logfile}' as logfile, +0, +0 +from etl_job J +where J.uniquename='${etl_job.uniquename}' +and J.systeminfo_id=${etl_job.systeminfo_id}; + + + + + --Ende steps eines job + + +--erst job-params einfügen: + +delete from etl_job_param +where etl_job_id in ( + SELECT distinct + etl_job_id +FROM tmp_etl_job_param ) + ; + +insert into etl_job_param +( + etl_job_id, + uniquename, + name, + param_default + ) + SELECT + etl_job_id, + uniquename, + name, + param_default +FROM tmp_etl_job_param + ; + +--nun steps einfügen: + + +update tmp_etl_step set step_type=(select T.tid from etl_step_type T +where T.uniquename=tmp_etl_step.step_type_uniquename); + +-- select * from tmp_etl_step +-- where step_type is null;--_uniquename from tmp_etl_step; + + +--vorhandene Steps erkennen: +<#if SQLdialect='Postgres'> +--Postgres Dialekt: + +update tmp_etl_step set tid=S.tid, +already_exists=1, +caption=S.caption, +logfile=S.logfile, +custom_step=S.custom_step +from etl_step S where S.uniquename=tmp_etl_step.uniquename +and S.systeminfo_id=tmp_etl_step.systeminfo_id +; +<#else> + +--Informix Dialekt: +update tmp_etl_step set (tid, +already_exists, +caption, +logfile, +custom_step) + = ((select + tid, +1 as already_exists, +caption, +logfile, +custom_step +from etl_step S where S.uniquename=tmp_etl_step.uniquename +and S.systeminfo_id=tmp_etl_step.systeminfo_id)) +where 0 <(select count(*) +from etl_step S where S.uniquename=tmp_etl_step.uniquename +and S.systeminfo_id=tmp_etl_step.systeminfo_id) +; + + + + + + +--neue Steps einfügen: +insert into etl_step( + uniquename, + caption, + systeminfo_id, + step_type_id, + logfile, + custom_step) +select + uniquename, + caption, + systeminfo_id, + step_type, + logfile, + custom_step +FROM tmp_etl_step +where already_exists=0 +; + +--tid von neuen steps ermitteln: +update tmp_etl_step set tid=(select S.tid +from etl_step S where S.uniquename=tmp_etl_step.uniquename +and S.systeminfo_id=tmp_etl_step.systeminfo_id) +where already_exists=0 +; + +--parent ermitteln: +update tmp_etl_step set parent_step_id=(select S.tid +from etl_step S where S.uniquename=tmp_etl_step.parent_step_uniquename +and S.systeminfo_id=tmp_etl_step.systeminfo_id) +; + + +delete from etl_step_property +where etl_step_id in (select T.tid +FROM tmp_etl_step T ) + ; +delete from etl_step_relation +where job_id in (select J.tid +FROM tmp_etl_job J) +and custom_step=0 + ; + + +--jetzt step-params: + +<#foreach etl_step_property in etl_step_properties> +insert into tmp_etl_step_property( + etl_step_id, + prop_name) +select + T.tid as etl_step_id, + '${etl_step_property.prop_name}' + +FROM tmp_etl_step T +where uniquename ='${etl_step_property.etl_step}' +; + + + +--einfügen in echte Tabelle: +insert into etl_step_property( + etl_step_id, + prop_name) +select + T.etl_step_id, + T.prop_name +FROM tmp_etl_step_property T +; + +--jetzt step-relation: +insert into tmp_etl_step_relation( + step_id, + parent_step_id, + job_id, + force_continue, + step_active, + sortnr, + custom_step +) +select + tid, + parent_step_id, + etl_job_id, + 0 as force_continue, + 1 as step_active, + sortnr, + 0 +FROM tmp_etl_step + ; + +insert into etl_step_relation( + step_id, + parent_step_id, + job_id, + force_continue, + step_active, + sortnr, + custom_step +) +select + step_id, + parent_step_id, + job_id, + force_continue, + step_active, + sortnr, + custom_step +FROM tmp_etl_step_relation + ; + --Ende job + + +drop table tmp_etl_step; +drop table tmp_etl_job; +drop table tmp_etl_job_param; +drop table tmp_etl_step_property; +drop table tmp_etl_step_relation; + + +<#foreach testfall in testfaelle> +select 'testfall ${testfall.testcase}: ERFOLG' +from xdummy +where ${testfall.assertion}=(${testfall.sql}) +; +select 'testfall ${testfall.testcase}: FEHLER bei ${testfall.testcase}' +from xdummy +where ${testfall.assertion}!=(${testfall.sql}) +; + + + --Ende Testfälle + +<#if SQLdialect='Informix'> +--nun xupdates: +--bei informix kann man TEXT Spalten nicht updaten, daher per jdbc updaten: +--damit DOSQL nicht den xupdater beim Start dieses Scriptes anwirft, +--wird das in eine temp. Datei ausgelagert. +! echo "" > "./tmp"$MANDANTID".sql" +<#foreach etl_step_property in etl_step_properties> + +! echo '${etl_step_property.prop_value}' >>"./tmp"$MANDANTID".sql" + + + +! echo "" >> "./tmp"$MANDANTID".sql" +! DOSQL "./tmp"$MANDANTID".sql" + +<#else> +--Postgres: +<#foreach etl_step_property in etl_step_properties> + +update etl_step_property set prop_value='${etl_step_property.prop_value}' where prop_name='${etl_step_property.prop_name}' and etl_step_id=(select S.tid from etl_step S where S.uniquename='${etl_step_property.etl_step}'); + + + + + diff --git a/src-modules/module/etl/schluesseltabellen/etl_step_type.unl b/src-modules/module/etl/schluesseltabellen/etl_step_type.unl new file mode 100644 index 0000000..8c108c2 --- /dev/null +++ b/src-modules/module/etl/schluesseltabellen/etl_step_type.unl @@ -0,0 +1,5 @@ +1^LOAD^Tabelle hochladen^de.superx.etl.ActionHandler.EtlActionHandlerUploadRecords^ +2^DOSQL^SQL-Script ausführen^de.superx.etl.ActionHandler.EtlActionHandlerDosql^ +3^DOQUERY^SQL-Query ausführen^de.superx.etl.ActionHandler.EtlActionHandlerDoquery^ +4^UNLOAD^SQL-Ergebnis entladen^de.superx.etl.ActionHandler.EtlActionHandlerUnloadRecords^ +5^MSG^Logausgabe^de.superx.etl.ActionHandler.EtlActionHandlerMsg^ diff --git a/src-modules/module/etl/schluesseltabellen/fm_templates.unl b/src-modules/module/etl/schluesseltabellen/fm_templates.unl new file mode 100644 index 0000000..0073229 --- /dev/null +++ b/src-modules/module/etl/schluesseltabellen/fm_templates.unl @@ -0,0 +1,440 @@ +216^ETL_MAKROS^<#macro ETL_STEPS_FUELLEN>\ +\ +\ +create temp table tmp_etl_step(\ +tid INTEGER, \ +uniquename VARCHAR(255) , \ +caption VARCHAR(255) , \ +systeminfo_id INTEGER not null, \ +step_type INTEGER, \ +step_type_uniquename VARCHAR(255), \ +sortnr SMALLINT not null, \ +force_continue SMALLINT,\ +etl_job_id INTEGER , \ +parent_step_id INTEGER , \ +parent_step_uniquename varchar(255),\ +parent_job_uniquename varchar(255),\ +logfile varchar(255),\ +custom_step smallint,\ +already_exists smallint\ +) \ +;\ +\ +create temp table tmp_etl_job(\ + tid INTEGER, \ +uniquename VARCHAR(255) , \ +caption VARCHAR(255) , \ +systeminfo_id INTEGER not null,\ +logfile varchar(255),\ +already_exists smallint,\ +custom_job smallint\ +) \ +;\ +\ +create temp table tmp_etl_job_param(\ + tid SERIAL not null, \ +etl_job_id INTEGER , \ +uniquename VARCHAR(255) not null, \ +name VARCHAR(255) , \ +param_default VARCHAR(255) \ +) \ +;\ +\ +\ +create temp table tmp_etl_step_property(\ + tid SERIAL not null, \ +etl_step_id INTEGER not null, \ +prop_name VARCHAR(255) , \ +prop_value text\ +\ +) \ +;\ +\ +create temp table tmp_etl_step_relation(\ + tid SERIAL not null, \ +step_id INTEGER not null, \ +parent_step_id INTEGER , \ +job_id INTEGER not null, \ +force_continue SMALLINT default 1 , \ +step_active SMALLINT default 1, \ +sortnr SMALLINT default 1,\ +custom_step SMALLINT default 1\ +\ +) \ +;\ +\ +<#foreach etl_job in etl_jobs>\ +\ +truncate table tmp_etl_job;\ +truncate table tmp_etl_step;\ +truncate table tmp_etl_job_param;\ +truncate table tmp_etl_step_property;\ +truncate table tmp_etl_step_relation;\ +\ +--tids der jobs dürfen sich nicht ändern, daher \ +--\ +-- 1. vorh. Jobs updaten\ +-- 2. neue Jobs einfügen\ +-- 3. alte Jobs löschen\ +\ +insert into tmp_etl_job(uniquename,caption,systeminfo_id,logfile,already_exists,custom_job)\ +values ('${etl_job.uniquename}',\ +'${etl_job.name}',\ +${etl_job.systeminfo_id},\ +'${etl_job.logfile}',0,0);\ +\ +<#if SQLdialect='Postgres'>\ +--Postgres Dialekt:\ +\ +update tmp_etl_job set tid=J.tid,\ +already_exists=1,\ +caption=J.caption,\ +logfile=J.logfile,\ +custom_job=J.custom_job\ +from etl_job J where J.uniquename=tmp_etl_job.uniquename\ +and J.systeminfo_id=tmp_etl_job.systeminfo_id\ +;\ +\ +<#else>\ +\ +--Informix Dialekt:\ +update tmp_etl_job set (tid,\ +already_exists,\ +caption,\ +logfile,\ +custom_job) \ + = ((select \ + tid,\ +1 as already_exists,\ +caption,\ +logfile,\ +custom_job\ +from etl_job J where J.uniquename=tmp_etl_job.uniquename\ +and J.systeminfo_id=tmp_etl_job.systeminfo_id))\ +where 0 <(select count(*)\ +from etl_job J where J.uniquename=tmp_etl_job.uniquename\ +and J.systeminfo_id=tmp_etl_job.systeminfo_id)\ +\ +;\ +\ +\ +\ +\ +\ +--TODO Informix\ +\ +--neue jobs:\ +insert into etl_job(uniquename,caption,systeminfo_id,custom_job)\ +select uniquename,caption,systeminfo_id,custom_job\ +from tmp_etl_job\ +where already_exists=0;\ +--tid von neuen Jobs ermitteln:\ +update tmp_etl_job set tid=(select J.tid\ +from etl_job J where J.uniquename=tmp_etl_job.uniquename\ +and J.systeminfo_id=tmp_etl_job.systeminfo_id)\ +where already_exists=0\ +;\ +\ +--TODO\ +--obsolete Jobs: sollen bei Deinstallation des Moduls entfernt werden\ +\ +\ +--Parameter:\ +<#foreach etl_job_param in etl_job_params>\ +<#if etl_job_param.etl_job==etl_job.uniquename>\ +\ +insert into tmp_etl_job_param(\ +etl_job_id , \ +uniquename, \ +name , \ +param_default)\ +select J.tid,\ +'${etl_job_param.param_name}',\ +'${etl_job_param.name}',\ +'${etl_job_param.param_default}'\ +from tmp_etl_job J\ +;\ +\ +\ +\ +\ +\ +--ETL-Schritte \ +<#assign sortnr=0 />\ +<#foreach etl_step in etl_steps>\ +<#if etl_step.etl_job==etl_job.uniquename>\ +\ +<#assign sortnr=sortnr+1 />\ +\ +insert into tmp_etl_step(\ +uniquename , \ +caption , \ +systeminfo_id , \ +step_type_uniquename,\ +sortnr, \ +force_continue,\ +etl_job_id , \ +parent_step_uniquename,\ +parent_job_uniquename,\ +logfile,\ +custom_step,\ +already_exists\ +)\ +select '${etl_step.uniquename}',\ +'${etl_step.name}',\ +${etl_job.systeminfo_id},\ +'${etl_step.type}' as step_type_uniquename,\ +${sortnr}*10 as sortnr,\ +0 as force_continue,\ +J.tid as etl_job_id,\ +<#if etl_step.parent?exists && etl_step.parent !="" >\ +'${etl_step.parent}',\ +<#else>\ +'' as parent_step_uniquename,\ +\ +J.uniquename,\ +'${etl_job.logfile}' as logfile,\ +0,\ +0\ +from etl_job J\ +where J.uniquename='${etl_job.uniquename}'\ +and J.systeminfo_id=${etl_job.systeminfo_id};\ +\ +\ +\ +\ + --Ende steps eines job\ +\ +\ +--erst job-params einfügen:\ +\ +delete from etl_job_param\ +where etl_job_id in (\ + SELECT distinct \ + etl_job_id\ +FROM tmp_etl_job_param )\ + ;\ +\ +insert into etl_job_param\ +(\ + etl_job_id,\ + uniquename,\ + name,\ + param_default\ + )\ + SELECT \ + etl_job_id,\ + uniquename,\ + name,\ + param_default\ +FROM tmp_etl_job_param \ + ;\ +\ +--nun steps einfügen:\ +\ +\ +update tmp_etl_step set step_type=(select T.tid from etl_step_type T\ +where T.uniquename=tmp_etl_step.step_type_uniquename);\ +\ +select * from tmp_etl_step\ +where step_type is null;--_uniquename from tmp_etl_step;\ +\ +\ +--vorhandene Steps erkennen:\ +<#if SQLdialect='Postgres'>\ +--Postgres Dialekt:\ +\ +update tmp_etl_step set tid=S.tid,\ +already_exists=1,\ +caption=S.caption,\ +logfile=S.logfile,\ +custom_step=S.custom_step\ +from etl_step S where S.uniquename=tmp_etl_step.uniquename\ +and S.systeminfo_id=tmp_etl_step.systeminfo_id\ +;\ +<#else>\ +\ +--Informix Dialekt:\ +update tmp_etl_step set (tid,\ +already_exists,\ +caption,\ +logfile,\ +custom_step) \ + = ((select \ + tid,\ +1 as already_exists,\ +caption,\ +logfile,\ +custom_step\ +from etl_step S where S.uniquename=tmp_etl_step.uniquename\ +and S.systeminfo_id=tmp_etl_step.systeminfo_id))\ +where 0 <(select count(*)\ +from etl_step S where S.uniquename=tmp_etl_step.uniquename\ +and S.systeminfo_id=tmp_etl_step.systeminfo_id)\ +;\ +\ +\ +\ +\ +\ +\ +--neue Steps einfügen:\ +insert into etl_step(\ + uniquename,\ + caption,\ + systeminfo_id,\ + step_type_id,\ + logfile,\ + custom_step)\ +select \ + uniquename,\ + caption,\ + systeminfo_id,\ + step_type,\ + logfile,\ + custom_step\ +FROM tmp_etl_step \ +where already_exists=0\ +;\ +\ +--tid von neuen steps ermitteln:\ +update tmp_etl_step set tid=(select S.tid\ +from etl_step S where S.uniquename=tmp_etl_step.uniquename\ +and S.systeminfo_id=tmp_etl_step.systeminfo_id)\ +where already_exists=0\ +;\ +\ +--parent ermitteln:\ +update tmp_etl_step set parent_step_id=(select S.tid\ +from etl_step S where S.uniquename=tmp_etl_step.parent_step_uniquename\ +and S.systeminfo_id=tmp_etl_step.systeminfo_id)\ +;\ +\ +\ +delete from etl_step_property\ +where etl_step_id in (select T.tid\ +FROM tmp_etl_step T )\ + ;\ +delete from etl_step_relation\ +where job_id in (select J.tid\ +FROM tmp_etl_job J)\ +and custom_step=0\ + ;\ + \ +\ +--jetzt step-params:\ +\ +<#foreach etl_step_property in etl_step_properties>\ +insert into tmp_etl_step_property(\ + etl_step_id,\ + prop_name)\ +select \ + T.tid as etl_step_id,\ + '${etl_step_property.prop_name}'\ + \ +FROM tmp_etl_step T \ +where uniquename ='${etl_step_property.etl_step}'\ +;\ +\ +\ +\ +--einfügen in echte Tabelle:\ +insert into etl_step_property(\ + etl_step_id,\ + prop_name)\ +select \ + T.etl_step_id,\ + T.prop_name\ +FROM tmp_etl_step_property T \ +;\ +\ +--jetzt step-relation:\ +insert into tmp_etl_step_relation(\ + step_id,\ + parent_step_id,\ + job_id,\ + force_continue,\ + step_active,\ + sortnr,\ + custom_step\ +)\ +select \ + tid,\ + parent_step_id,\ + etl_job_id,\ + 0 as force_continue,\ + 1 as step_active,\ + sortnr,\ + 0\ +FROM tmp_etl_step \ + ;\ + \ +insert into etl_step_relation(\ + step_id,\ + parent_step_id,\ + job_id,\ + force_continue,\ + step_active,\ + sortnr,\ + custom_step\ +)\ +select \ + step_id,\ + parent_step_id,\ + job_id,\ + force_continue,\ + step_active,\ + sortnr,\ + custom_step\ +FROM tmp_etl_step_relation \ + ;\ + --Ende job\ +\ +\ +drop table tmp_etl_step;\ +drop table tmp_etl_job;\ +drop table tmp_etl_job_param;\ +drop table tmp_etl_step_property;\ +drop table tmp_etl_step_relation;\ +\ +<#if testfaelle?exists>\ +\ +<#foreach testfall in testfaelle>\ +select 'testfall ${testfall.testcase}: ERFOLG'\ +from xdummy\ +where ${testfall.assertion}=(${testfall.sql})\ +;\ +select 'testfall ${testfall.testcase}: FEHLER bei ${testfall.testcase}'\ +from xdummy\ +where ${testfall.assertion}!=(${testfall.sql})\ +;\ +\ +\ + --Ende Testfälle\ +\ +\ +<#if SQLdialect='Informix'> \ +--nun xupdates:\ +--bei informix kann man TEXT Spalten nicht updaten, daher per jdbc updaten:\ +--damit DOSQL nicht den xupdater beim Start dieses Scriptes anwirft,\ +--wird das in eine temp. Datei ausgelagert.\ +! echo "" > "./tmp"$MANDANTID".sql"\ +<#foreach etl_step_property in etl_step_properties>\ +\ +! echo '${etl_step_property.prop_value}' >>"./tmp"$MANDANTID".sql"\ +\ +\ +\ +! echo "" >> "./tmp"$MANDANTID".sql"\ +! DOSQL "./tmp"$MANDANTID".sql"\ +\ +<#else>\ +--Postgres:\ +<#foreach etl_step_property in etl_step_properties>\ +\ +update etl_step_property set prop_value='${etl_step_property.prop_value?replace("'", "''")}' where prop_name='${etl_step_property.prop_name}' and etl_step_id=(select S.tid from etl_step S where S.uniquename='${etl_step_property.etl_step?replace("'", "''")}');\ +\ +\ +\ +\ +^Datenbankunabhängigkeit^^1^ diff --git a/src-modules/module/etl/schluesseltabellen/fm_templates_fuellen.sql b/src-modules/module/etl/schluesseltabellen/fm_templates_fuellen.sql new file mode 100644 index 0000000..10dd661 --- /dev/null +++ b/src-modules/module/etl/schluesseltabellen/fm_templates_fuellen.sql @@ -0,0 +1,25 @@ +--freemarker template +--Werden immer ausgetauscht: +delete from fm_templates where id in (select id from tmp_templates); + + +<#if SQLdialect='Postgres'> +select sp_update_sequence('fm_templates'); + + +insert into fm_templates( + id, + content, + description, + comment, + version) + SELECT + id, + content, + description, + comment, + version +FROM tmp_templates; + +drop table tmp_templates; + diff --git a/src-modules/module/etl/schluesseltabellen/fm_templates_unload.x b/src-modules/module/etl/schluesseltabellen/fm_templates_unload.x new file mode 100755 index 0000000..10d8817 --- /dev/null +++ b/src-modules/module/etl/schluesseltabellen/fm_templates_unload.x @@ -0,0 +1,25 @@ +#!/bin/bash +#löscht alle nicht-ETL-templates und entlädt die Tabelle sortiert ins Rohdaten Verzeichnis (für git-diff). + +DOQUERY "create table tmp_templates(tid integer, + id char(200) not null, +content text not null, +description char(200) , +comment char(200) , +version integer default 1 + +) +;" + +sx_auto_upload_table.x tmp_templates ./fm_templates.unl + + +SX_CLIENT=jdbc +export SX_CLIENT +#man_catalogue +DOQUERY "select * from fm_templates where id in (select T.id from tmp_templates T) order by tid" false $DBDELIMITER ./fm_templates.unl txt + +SX_CLIENT=psql +export SX_CLIENT +DOQUERY "drop table tmp_templates;" + diff --git a/src-modules/module/etl/schluesseltabellen/fm_templates_update.x b/src-modules/module/etl/schluesseltabellen/fm_templates_update.x new file mode 100755 index 0000000..82a8e99 --- /dev/null +++ b/src-modules/module/etl/schluesseltabellen/fm_templates_update.x @@ -0,0 +1,16 @@ +#!/bin/bash + +DOQUERY "create table tmp_templates(tid integer, + id char(200) not null, +content text not null, +description char(200) , +comment char(200) , +version integer +) +;" + +sx_auto_upload_table.x tmp_templates fm_templates.unl + +DOSQL fm_templates_fuellen.sql + + diff --git a/src-modules/module/etl/schluesseltabellen/sachgebiete_fuellen.sql b/src-modules/module/etl/schluesseltabellen/sachgebiete_fuellen.sql new file mode 100644 index 0000000..2e731dc --- /dev/null +++ b/src-modules/module/etl/schluesseltabellen/sachgebiete_fuellen.sql @@ -0,0 +1,14 @@ +--Das Sachgebiet 271 (Laderoutinen Administr.) wird installiert + + +delete from sachgebiete where tid in (271); +insert into sachgebiete(tid,name) values (271,'Laderoutinen Administr.'); + +delete from group_sachgeb_bez where sachgebiete_id in (270,271) +and groupinfo_id in (select G.tid from groupinfo G where G.name='Administratoren' or G.name='superx') ; + +insert into group_sachgeb_bez +select G.tid,270 from groupinfo G where G.name='Administratoren' or G.name='superx'; +insert into group_sachgeb_bez +select G.tid,271 from groupinfo G where G.name='Administratoren' or G.name='superx'; + diff --git a/src-modules/module/etl/schluesseltabellen/sachgebiete_loeschen.sql b/src-modules/module/etl/schluesseltabellen/sachgebiete_loeschen.sql new file mode 100644 index 0000000..224ff0e --- /dev/null +++ b/src-modules/module/etl/schluesseltabellen/sachgebiete_loeschen.sql @@ -0,0 +1,3 @@ +delete from sachgebiete where tid in (270,271); +delete from group_sachgeb_bez where sachgebiete_id in (270,271); +delete from user_sachgeb_bez where sachgebiete_id in (270,271); diff --git a/src-modules/module/etl/schluesseltabellen/sx_stylesheets_fuellen.sql b/src-modules/module/etl/schluesseltabellen/sx_stylesheets_fuellen.sql new file mode 100644 index 0000000..2c92a37 --- /dev/null +++ b/src-modules/module/etl/schluesseltabellen/sx_stylesheets_fuellen.sql @@ -0,0 +1,49 @@ + +--used in etl-job sx_insert_mask + + +--check stylesheets: +update tmp_etl_stylesheets set tid=(select S.tid +from sx_stylesheets S +where S.filename=tmp_etl_stylesheets.filename); + +--new Stylesheet? +insert into sx_stylesheets(filename,caption,description,relation,contenttype) +select filename,caption,description,relation,contenttype +from tmp_etl_stylesheets +where tid is null; + +--lookup tid: +update tmp_etl_stylesheets set tid=(select S.tid +from sx_stylesheets S +where S.filename=tmp_etl_stylesheets.filename); + + +delete from sx_mask_style where maskeninfo_id in (select maskeninfo_id +from tmp_etl_mask_style); + +delete from stylesheet_field where stylesheet_id in (select tid +from tmp_etl_stylesheets); + + +--now insert: +insert into sx_mask_style ( maskeninfo_id, stylesheet_id, ord) +SELECT M.maskeninfo_id, S.tid, M.ord +FROM tmp_etl_mask_style M, sx_stylesheets S +where M.stylesheet_filename=S.filename +; + +insert into stylesheet_field (stylesheet_id, tablename, fieldname) +SELECT S.tid,F.tablename, F.fieldname +FROM tmp_etl_stylesheet_field F, sx_stylesheets S +where F.stylesheet_filename=S.filename +; + + + +drop table tmp_etl_stylesheets; +drop table tmp_etl_mask_style; +drop table tmp_etl_stylesheet_field; + + + diff --git a/src-modules/module/etl/schluesseltabellen/themenbaum_fuellen.sql b/src-modules/module/etl/schluesseltabellen/themenbaum_fuellen.sql new file mode 100644 index 0000000..7cbdb11 --- /dev/null +++ b/src-modules/module/etl/schluesseltabellen/themenbaum_fuellen.sql @@ -0,0 +1,22 @@ +--Freemarker Template + +<#if SQLdialect='Postgres'> +select sp_update_sequence('themenbaum'); + + +update tmp_etl_themenbaum set parent=(select min(tid) from themenbaum where name=tmp_etl_themenbaum.parent_name); + +--existiert bereits? +delete from tmp_etl_themenbaum +where exists(select T.tid +from themenbaum T where T.maskeninfo_id=tmp_etl_themenbaum.maskeninfo_id +and T.parent=tmp_etl_themenbaum.parent); + +--dann einfügen: + +insert into themenbaum (name,maskeninfo_id,parent,gueltig_seit,gueltig_bis) +select name,maskeninfo_id,parent,gueltig_seit,gueltig_bis from tmp_etl_themenbaum; + + +drop table tmp_etl_themenbaum; + diff --git a/src/de/superx/etl/ActionHandler/EtlActionHandler.java b/src/de/superx/etl/ActionHandler/EtlActionHandler.java new file mode 100644 index 0000000..a0f1db8 --- /dev/null +++ b/src/de/superx/etl/ActionHandler/EtlActionHandler.java @@ -0,0 +1,131 @@ +/* + * de.superx.etl - a package for controlling ETL routines + * Copyright (C) 2021 Daniel Quathamer + * + * This package is licensed under the CampusSource License; + * http://www.campussource.de/org/license/ + */ +package de.superx.etl.ActionHandler; + +import java.io.StringWriter; +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.Properties; +import java.util.logging.Logger; + +import de.superx.bin.SxConnection; +import de.superx.etl.EtlStep; + + + + +public class EtlActionHandler { + public Properties handlerSpecificProperties; + public Properties runTimeParams; + public Logger logger; + private String handlerType; + protected String logOutput=""; + protected long numberOfRows=0; + protected int returnCode; + protected SxConnection stepSxConnection; + protected Connection stepConnection; + public String propFile; + public StringWriter outputStringWriter; + + public EtlActionHandler(String handlerType, Properties hsp,Logger logger) { + this.handlerType=handlerType; + this.handlerSpecificProperties=hsp; + this.logger=logger; + } + public EtlActionHandler() { + //reflection API needs an empty constructor + } + + public void setReturnCode(int returnCode) { + this.returnCode=returnCode; + } + public int getReturnCode() { + + return returnCode; + } + + public void setNumberOfRows(long numberOfRows) { + this.numberOfRows=numberOfRows; + } + public long getNumberOfRows() { + + return numberOfRows; + } + + + public void setConnection(Connection con) { + // TODO Auto-generated method stub + + } + public void setLogOutput(String log) + { + logOutput=log; + } + public String getLogOutput() + { + return logOutput; + } + public String getPropFile() { + return propFile; + } + + + public void setPropFile(String propFile) { + this.propFile = propFile; + } + + + public StringWriter getOutputStringWriter() { + return outputStringWriter; + } + + + public void setOutputStringWriter(StringWriter sw) { + this.outputStringWriter = sw; + } + + + public Properties getHandlerSpecificProperties() { + return handlerSpecificProperties; + } + public void setHandlerSpecificProperties(Properties hsp) { + this.handlerSpecificProperties = hsp; + } + public Properties getRunTimeParams() { + return runTimeParams; + } + public void setRunTimeParams(Properties runTimeParams) { + this.runTimeParams = runTimeParams; + } + public Logger getLogger() { + return logger; + } + public void setLogger(Logger logger) { + this.logger = logger; + } + protected void getConnection() throws SQLException { + Statement st; + DatabaseMetaData dbmd; + stepSxConnection = new SxConnection(); + stepSxConnection.setPropfile(propFile); + logger.config("Starting Connection..."); + try { + stepConnection = stepSxConnection.getConnection(); + st = stepConnection.createStatement(); + dbmd = stepConnection.getMetaData(); + } catch (Exception e) { + e.printStackTrace(); + logger.severe("Keine DB-Verbindung: " + e.toString()); + throw new SQLException("Keine DB-Verbindung: " + e.toString()); + } + + + } +} diff --git a/src/de/superx/etl/ActionHandler/EtlActionHandlerDoquery.java b/src/de/superx/etl/ActionHandler/EtlActionHandlerDoquery.java new file mode 100644 index 0000000..b05dba5 --- /dev/null +++ b/src/de/superx/etl/ActionHandler/EtlActionHandlerDoquery.java @@ -0,0 +1,48 @@ +/* + * de.superx.etl - a package for controlling ETL routines + * Copyright (C) 2021 Daniel Quathamer + * + * This package is licensed under the CampusSource License; + * http://www.campussource.de/org/license/ + */ +package de.superx.etl.ActionHandler; + +import java.io.StringWriter; +import java.sql.Connection; +import java.sql.SQLException; +import java.util.Properties; +import java.util.logging.Logger; + +import de.superx.bin.SxConnection; +import de.superx.etl.QueryResultSerializer; +import de.superx.etl.SqlExecutor; + +public class EtlActionHandlerDoquery extends EtlActionHandler implements EtlActionHandlerI { + + public EtlActionHandlerDoquery() { + + } + @Override + public int execute(StringWriter sw, String mandantid, String stepUniquename,String outFormat) + throws SQLException, Exception + { + int returnCode=0; + long numberOfRows=0; + this.getConnection(); + String query=handlerSpecificProperties.getProperty("select_stmt"); + this.getConnection(); + SqlExecutor mySqlExecutor=new SqlExecutor("default", stepSxConnection,query,runTimeParams); + returnCode=mySqlExecutor.executeQueries(); + numberOfRows=mySqlExecutor.getNumberOfRows(); + super.setNumberOfRows(numberOfRows); + stepSxConnection.close(); + super.setReturnCode(returnCode); + + + //System.out.println(sw.toString()); + stepSxConnection.close(); + //super.setLogOutput(msg); + return returnCode; + } + +} diff --git a/src/de/superx/etl/ActionHandler/EtlActionHandlerDosql.java b/src/de/superx/etl/ActionHandler/EtlActionHandlerDosql.java new file mode 100644 index 0000000..ad4c943 --- /dev/null +++ b/src/de/superx/etl/ActionHandler/EtlActionHandlerDosql.java @@ -0,0 +1,68 @@ +/* + * de.superx.etl - a package for controlling ETL routines + * Copyright (C) 2021 Daniel Quathamer + * + * This package is licensed under the CampusSource License; + * http://www.campussource.de/org/license/ + */ +package de.superx.etl.ActionHandler; + +import java.io.File; +import java.io.StringWriter; +import java.sql.Connection; +import java.sql.SQLException; +import java.util.Properties; +import java.util.logging.Logger; + +import de.superx.bin.SxConnection; +import de.superx.etl.QueryResultSerializer; +import de.superx.etl.SqlExecutor; + +public class EtlActionHandlerDosql extends EtlActionHandler implements EtlActionHandlerI { + + public EtlActionHandlerDosql() { + + } + @Override + public int execute(StringWriter sw, String mandantid, String stepUniquename,String outFormat) + throws SQLException, Exception + { + int returnCode=0; + String log=""; + long numberOfRows=0; + this.getConnection(); + String filename=handlerSpecificProperties.getProperty("PATH_TO_INPUTFILE"); + File sqlScriptFile = null; + try { + sqlScriptFile = new File(filename); + } catch (Exception e1) { + returnCode=1; + log+=" beim Öffnen der Datei "+ filename+": "+e1.toString(); + super.setLogOutput(log); + } + if(returnCode==0) + { + try { + SqlExecutor mySqlExecutor = new SqlExecutor("default", stepSxConnection,sqlScriptFile,runTimeParams); + returnCode=mySqlExecutor.executeQueries(); + String output=mySqlExecutor.getOutString().toString(); + numberOfRows=mySqlExecutor.getNumberOfRows(); + } catch (Exception e) { + returnCode=1; + log+=" beim Ausführen der Datei "+ filename+": "+e.toString(); + super.setLogOutput(log); + } + super.setNumberOfRows(numberOfRows); + } + stepSxConnection.close(); + super.setReturnCode(returnCode); + stepSxConnection.close(); + + if(returnCode==1) + throw new Exception("Fehler " + log); + //System.out.println(sw.toString()); + //super.setLogOutput(msg); + return returnCode; + } + +} diff --git a/src/de/superx/etl/ActionHandler/EtlActionHandlerExecuteMask.java b/src/de/superx/etl/ActionHandler/EtlActionHandlerExecuteMask.java new file mode 100644 index 0000000..cea989e --- /dev/null +++ b/src/de/superx/etl/ActionHandler/EtlActionHandlerExecuteMask.java @@ -0,0 +1,51 @@ +/* + * de.superx.etl - a package for controlling ETL routines + * Copyright (C) 2021 Daniel Quathamer + * + * This package is licensed under the CampusSource License; + * http://www.campussource.de/org/license/ + */ +package de.superx.etl.ActionHandler; + +import java.io.StringWriter; +import java.sql.Connection; +import java.sql.SQLException; +import java.util.Properties; +import java.util.logging.Logger; + +import de.superx.bin.SxConnection; +import de.superx.etl.MaskExecutor; +import de.superx.etl.QueryResultSerializer; + +public class EtlActionHandlerExecuteMask extends EtlActionHandler implements EtlActionHandlerI { + + public EtlActionHandlerExecuteMask() { + + } + @Override + public int execute(StringWriter sw, String mandantid, String stepUniquename,String outFormat) + throws SQLException, Exception + { + int returnCode=0; + this.getConnection(); + int maskeninfo_id=new Integer(handlerSpecificProperties.getProperty("maskeninfo_id")); + String username=handlerSpecificProperties.getProperty("username"); + String mandantenId="default"; + Properties params=null ;//todo aus assertion lesen + MaskExecutor myMaskExecutor=new MaskExecutor(mandantenId, stepSxConnection,maskeninfo_id, username, + params,sw); + myMaskExecutor.setLogger(logger); + super.setNumberOfRows(myMaskExecutor.executeMask(mandantenId, + maskeninfo_id, + username, + params)); + super.setReturnCode(myMaskExecutor.getReturnCode()); + super.setOutputStringWriter(myMaskExecutor.getOutputString()); + + //System.out.println(sw.toString()); + stepSxConnection.close(); + //super.setLogOutput(msg); + return returnCode; + } + +} diff --git a/src/de/superx/etl/ActionHandler/EtlActionHandlerI.java b/src/de/superx/etl/ActionHandler/EtlActionHandlerI.java new file mode 100644 index 0000000..84bd0b0 --- /dev/null +++ b/src/de/superx/etl/ActionHandler/EtlActionHandlerI.java @@ -0,0 +1,34 @@ +/* + * de.superx.etl - a package for controlling ETL routines + * Copyright (C) 2021 Daniel Quathamer + * + * This package is licensed under the CampusSource License; + * http://www.campussource.de/org/license/ + */ +package de.superx.etl.ActionHandler; + +import java.io.StringWriter; +import java.sql.Connection; +import java.sql.SQLException; +import java.util.Properties; +import java.util.logging.Logger; + +import de.superx.bin.SxConnection; + +public interface EtlActionHandlerI { + + int getReturnCode(); + long getNumberOfRows(); + StringWriter getOutputStringWriter(); + + String getLogOutput(); + void setConnection(Connection con); + void setLogger(Logger logger); + void setHandlerSpecificProperties(Properties hsp); + void setRunTimeParams(Properties runTimeParams); + void setOutputStringWriter(StringWriter sw); + + int execute(StringWriter sw, String mandantid, String stepUniquename,String outFormat) throws SQLException,Exception; + void setPropFile(String propfile); + +} diff --git a/src/de/superx/etl/ActionHandler/EtlActionHandlerMsg.java b/src/de/superx/etl/ActionHandler/EtlActionHandlerMsg.java new file mode 100644 index 0000000..07d9797 --- /dev/null +++ b/src/de/superx/etl/ActionHandler/EtlActionHandlerMsg.java @@ -0,0 +1,30 @@ +/* + * de.superx.etl - a package for controlling ETL routines + * Copyright (C) 2021 Daniel Quathamer + * + * This package is licensed under the CampusSource License; + * http://www.campussource.de/org/license/ + */ +package de.superx.etl.ActionHandler; + +import java.io.StringWriter; +import java.sql.Connection; +import java.sql.SQLException; +import java.util.Properties; +import java.util.logging.Logger; + +public class EtlActionHandlerMsg extends EtlActionHandler implements EtlActionHandlerI { + + public EtlActionHandlerMsg() { + //reflection API needs an empty constructor + } + @Override + public int execute(StringWriter sw, String mandantid, String stepUniquename, String outFormat) + throws SQLException, Exception { + int ret=0; + String msg=handlerSpecificProperties.getProperty("msg"); + super.setLogOutput(stepUniquename+":"+ msg); + return ret; + } + +} diff --git a/src/de/superx/etl/ActionHandler/EtlActionHandlerUnloadRecords.java b/src/de/superx/etl/ActionHandler/EtlActionHandlerUnloadRecords.java new file mode 100644 index 0000000..5999f1b --- /dev/null +++ b/src/de/superx/etl/ActionHandler/EtlActionHandlerUnloadRecords.java @@ -0,0 +1,43 @@ +/* + * de.superx.etl - a package for controlling ETL routines + * Copyright (C) 2021 Daniel Quathamer + * + * This package is licensed under the CampusSource License; + * http://www.campussource.de/org/license/ + */ +package de.superx.etl.ActionHandler; + +import java.io.StringWriter; +import java.sql.Connection; +import java.sql.SQLException; +import java.util.Properties; +import java.util.logging.Logger; + +import de.superx.bin.SxConnection; +import de.superx.etl.QueryResultSerializer; + +public class EtlActionHandlerUnloadRecords extends EtlActionHandler implements EtlActionHandlerI { + + public EtlActionHandlerUnloadRecords() { + + } + @Override + public int execute(StringWriter sw, String mandantid, String stepUniquename,String outFormat) + throws SQLException, Exception + { + int returnCode=0; + this.getConnection(); + String query=handlerSpecificProperties.getProperty("select_stmt"); + QueryResultSerializer myQueryResultSerializer=new QueryResultSerializer("default", stepSxConnection,query,sw); + myQueryResultSerializer.setLogger(logger); + super.setNumberOfRows(myQueryResultSerializer.unloadQueryResults(stepUniquename,outFormat,"",true)); + super.setReturnCode(myQueryResultSerializer.getReturnCode()); + super.setOutputStringWriter(myQueryResultSerializer.getOutputString()); + + //System.out.println(sw.toString()); + stepSxConnection.close(); + //super.setLogOutput(msg); + return returnCode; + } + +} diff --git a/src/de/superx/etl/ActionHandler/EtlActionHandlerUploadRecords.java b/src/de/superx/etl/ActionHandler/EtlActionHandlerUploadRecords.java new file mode 100644 index 0000000..3f67962 --- /dev/null +++ b/src/de/superx/etl/ActionHandler/EtlActionHandlerUploadRecords.java @@ -0,0 +1,58 @@ +/* + * de.superx.etl - a package for controlling ETL routines + * Copyright (C) 2021 Daniel Quathamer + * + * This package is licensed under the CampusSource License; + * http://www.campussource.de/org/license/ + */ +package de.superx.etl.ActionHandler; + +import java.io.StringWriter; +import java.sql.Connection; +import java.sql.SQLException; +import java.util.Properties; +import java.util.logging.Logger; + +import de.superx.bin.SxConnection; +import de.superx.etl.QueryResultSerializer; +import de.superx.etl.TableUploader; + +public class EtlActionHandlerUploadRecords extends EtlActionHandler implements EtlActionHandlerI { + + public EtlActionHandlerUploadRecords() { + + } + @Override + public int execute(StringWriter sw, String mandantid, String stepUniquename,String outFormat) + throws SQLException, Exception + { + int returnCode=0; + this.getConnection(); + String target_table=handlerSpecificProperties.getProperty("target_table"); + String inputfile=handlerSpecificProperties.getProperty("path_to_inputfile"); + String xml_search_path=handlerSpecificProperties.getProperty("search_path"); + String format=handlerSpecificProperties.getProperty("format"); + String truncateTargetTable=handlerSpecificProperties.getProperty("truncateTargetTable"); + + long numberOfRows=0; + //getConnection(logger, this.getPropfile()); + TableUploader myUploader=new TableUploader(); + //myUploader.setDbpropfile(this.getPropfile()); + myUploader.setInFormat(format); + myUploader.setTargetTable(target_table); + myUploader.setSrcFile(inputfile); + myUploader.setXml_search_path(xml_search_path); + myUploader.getConnection(stepSxConnection.getConnection(),null); + myUploader.setUploadConnection(stepSxConnection.getConnection()); + myUploader.setTruncateTargetTable(truncateTargetTable); + numberOfRows=myUploader.uploadFile(); + super.setLogOutput("Number of rows loaded: "+numberOfRows); + + + //System.out.println(sw.toString()); + stepSxConnection.close(); + //super.setLogOutput(msg); + return returnCode; + } + +} diff --git a/src/de/superx/etl/EtlAction.java b/src/de/superx/etl/EtlAction.java new file mode 100644 index 0000000..45188f3 --- /dev/null +++ b/src/de/superx/etl/EtlAction.java @@ -0,0 +1,200 @@ +/* + * de.superx.etl - a package for controlling ETL routines + * Copyright (C) 2021 Daniel Quathamer + * + * This package is licensed under the CampusSource License; + * http://www.campussource.de/org/license/ + */ +package de.superx.etl; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStreamReader; +import java.io.PrintStream; +import java.io.StringWriter; +import java.io.UnsupportedEncodingException; +import java.sql.Connection; +import java.sql.SQLException; +import java.util.Properties; +import de.memtext.util.PropUtils; + +import javax.sql.DataSource; + +import de.superx.bin.SxConnection; +import de.superx.common.Sichten; + +public class EtlAction { + protected Properties params; + private boolean contOnError; + private String targetTable; + private String logoutput; + protected StringWriter actionOutput=new StringWriter(); + protected StringWriter actionLog=new StringWriter(); + + private String loglevel="INFO"; + private long starttime = new java.util.Date().getTime() ; + protected long endtime ; + private int returnCode; + private Connection dbconnection; + private Properties env; + private String uniquename; + protected String logfile; + protected String propfile; + private String encoding="UTF-8"; + protected DataSource dataSource; + + + public EtlAction(String uniquename, String logfile, String propfile) { + this.uniquename = uniquename; + this.logfile = logfile; + this.propfile=propfile; + this.addActionLog("Action initialized"+EtlUtils.NEWLINE); + + } + public EtlAction(String uniquename) { + this.uniquename = uniquename; + this.addActionLog("Action initialized"+EtlUtils.NEWLINE); + + } + public DataSource getDataSource() { + return dataSource; + } + public void setDataSource(DataSource dataSource) { + this.dataSource = dataSource; + } + public Connection getDbconnection() { + return dbconnection; + } + public void setDbconnection(Connection dbconnection) { + this.dbconnection = dbconnection; + } + + public String getUniquename() { + return uniquename; + } + public void setUniquename(String uniquename) { + this.uniquename = uniquename; + } + public String getLogfile() { + return logfile; + } + public void setLogfile(String logfile) { + this.logfile = logfile; + } + public Properties getEnv() { + return env; + } + public void setEnv(Properties env) { + this.env = env; + } + public Properties getParams() { + return params; + } + public void setParams(Properties params) { + this.params = params; + } + + public boolean isContOnError() { + return contOnError; + } + public void setContOnError(boolean contOnError) { + this.contOnError = contOnError; + } + public String getTargetTable() { + return targetTable; + } + public void setTargetTable(String targetTable) { + this.targetTable = targetTable; + } + public String getLogoutput() { + return logoutput; + } + protected void setLogoutput(String log) { + this.logoutput = log; + } + protected void appendLog(String log) { + this.logoutput+= log; + } + public String getLoglevel() { + return loglevel; + } + public void setLoglevel(String loglevel) { + this.loglevel = loglevel; + } + public long getStarttime() { + return starttime; + } + public void setStarttime(long starttime) { + this.starttime = starttime; + } + public long getEndtime() { + return endtime; + } + public void setEndtime(long endtime) { + this.endtime = endtime; + } + public long getDuration() { + return (this.endtime - this.starttime)/1000; + } + + + public String getPropfile() { + return propfile; + } + + public void setPropfile(String propfile) { + this.propfile = propfile; + } + + public int getReturnCode() { + return returnCode; + } + public StringWriter getActionOutput() { + return actionOutput; + } + public void setActionOutput(StringWriter actionOutput) { + this.actionOutput = actionOutput; + } + public StringWriter getActionLog() { + return actionLog; + } + + public void setActionLog(StringWriter actionLog) { + this.actionLog = actionLog; + } + public void addActionLog(String actionLog) { + + String log=de.memtext.util.DateUtils.getTodayString()+ " " + de.memtext.util.DateUtils.getNowString(); + this.actionLog.append(log+"- "+ actionLog+EtlUtils.NEWLINE); + } + protected void setReturnCode(int returnCode) { + this.returnCode = returnCode; + } + public int execute() throws SQLException, Exception + { + int returncode=0; + endtime=new java.util.Date().getTime() ; + return returncode; + } + + public Connection getConnection(Connection myConnection) throws Exception { + String myDefaultPropFile=de.superx.etl.EtlUtils.WEBINFDIR+de.superx.etl.EtlUtils.PATHSEP+"db.properties"; + if(myConnection==null) + { + if(propfile==null) + propfile=myDefaultPropFile; + SxConnection mySxConnection = null; + mySxConnection = new SxConnection(); + mySxConnection.setPropfile(propfile); + + myConnection = mySxConnection.getConnection(); + + + } + return myConnection; + } +} diff --git a/src/de/superx/etl/EtlActionJob.java b/src/de/superx/etl/EtlActionJob.java new file mode 100644 index 0000000..bba4e1d --- /dev/null +++ b/src/de/superx/etl/EtlActionJob.java @@ -0,0 +1,235 @@ +/* + * de.superx.etl - a package for controlling ETL routines + * Copyright (C) 2021 Daniel Quathamer + * + * This package is licensed under the CampusSource License; + * http://www.campussource.de/org/license/ + */ +package de.superx.etl; + +import java.io.IOException; +import java.io.PrintStream; +import java.io.StringWriter; +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.util.Iterator; +import java.util.Properties; + +import javax.sql.DataSource; + +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.core.RowCallbackHandler; +import org.springframework.jdbc.datasource.DriverManagerDataSource; + +import de.memtext.rights.RightsKeyEntry; +import de.superx.common.SuperX_el; +import de.superx.common.SxResultRow; +import de.superx.common.SxResultSet; +import de.superx.common.SxSqlHelper; +import de.superx.servlet.SxPools; +import de.superx.util.SqlStringUtils; +import freemarker.template.TemplateBooleanModel; + +public class EtlActionJob extends EtlAction { + private String mandantenID; + private String jobTid; + private String jobUniquename; + private String jobLogfile; + private String jobCaption; + private Properties runTimeParams; + + private StringWriter sw; + public EtlActionJob(String uniquename, String logfile, String propfile) { + super(uniquename, logfile, propfile); + sw=new StringWriter(); + } + public EtlActionJob(String uniquename) { + super(uniquename); + sw=new StringWriter(); + } + + public StringWriter getSw() { + return sw; + } + public void setSw(StringWriter sw) { + this.sw = sw; + } + public void setRunTimeParams(String runTimeParamsArg) throws IOException { + if(runTimeParamsArg!=null) + this.runTimeParams = EtlUtils.convertStringToProperty(runTimeParamsArg); + } + + + public void initJob(String job,String runtimeParamsArg) throws Exception + { + long jetzt = new java.util.Date().getTime() ; + EtlUtils.initJobEnvironment(); + String paramName=null; + String paramCaption; + String paramDefault=""; + String paramProperties=""; + if(runtimeParamsArg==null) + runtimeParamsArg=""; + runtimeParamsArg="SUPERX_DIR="+EtlUtils.SUPERX_DIR+EtlUtils.NEWLINE+runtimeParamsArg; + //super.addActionLog("runtimeParamsArg:"+runtimeParamsArg); + this.setStarttime(jetzt); + Connection myConnection=this.getDbconnection(); + if(myConnection==null) + myConnection=this.getConnection(myConnection); + this.setDbconnection(myConnection); + if(!SqlStringUtils.checkValidKeyEntry(job)) + throw new SQLException("Invalid job "+job); + String sql = "select J.tid,J.uniquename,J.caption,J.logfile, P.uniquename as param_name,P.name as param_caption, P.param_default from etl_job J left outer join etl_job_param P on (J.tid=P.etl_job_id) where J.uniquename='"+job+"';"; + + SuperX_el el = new SuperX_el(); + SxSqlHelper sh=new SxSqlHelper(); + + sh.execute(sql, myConnection, el); + if (el.getError_String() != null + && !el.getError_String().trim().equals("")) + throw new SQLException("\nProblem bei Job DETAILS:" + "\n\n Meldung:" + + el.getError_String() + "\n sql:" + sql); + SxResultSet result= el.getResultSet(); + int rownr=0; + for (Iterator it = result.iterator(); it.hasNext();) { + rownr++; + SxResultRow row = (SxResultRow) it.next(); + jobTid=row.get(0).toString().trim(); + jobUniquename=row.get(1).toString().trim(); + jobCaption=row.get(2).toString().trim(); + jobLogfile=row.get(3).toString().trim(); + if(row.get(4)!=null) + paramName=row.get(4).toString().trim(); + if(row.get(5)!=null) + paramCaption=row.get(5).toString().trim(); + if(row.get(4)!=null) + paramDefault=row.get(6).toString().trim(); + if(paramName!=null) + paramProperties+=(paramName+"="+paramDefault+EtlUtils.NEWLINE); + + } + if(rownr==0) + throw new Exception("Job "+job+" unbekannt"); + //this.setLogfile(jobLogfile); + //this.appendLog("Job gefunden: "+jobCaption+EtlUtils.NEWLINE); + super.addActionLog("Job gefunden: "+jobCaption+EtlUtils.NEWLINE); + if(!paramProperties.equals("")) + this.setParams(EtlUtils.convertStringToProperty(paramProperties)); + if(runtimeParamsArg!=null) + { + this.setRunTimeParams(runtimeParamsArg); + super.addActionLog("Runtime Params: "+runtimeParamsArg); + } + if(runTimeParams!=null) + this.setParams(de.superx.etl.EtlUtils.mergeParamProperties(this.getParams(), runTimeParams)); + super.addActionLog("Job "+jobCaption+ " initialized"); + } + public int execute(String outfile) throws SQLException, Exception + { + Integer stepTid; + String stepUniquename; + String stepCaption; + String stepLogfile; + Integer stepSortnr; + Integer stepForceContinue; + Integer stepParentStepId; + String stepTypeUniquename; + String stepTypeCaption; + String stepTypeHandler; + + + int jobReturnCode=0; + String sql = "select S.tid,"+ + "S.uniquename,"+ + "S.caption,"+ + "S.systeminfo_id,"+ + "R.sortnr,"+ + "R.force_continue::integer as force_continue,"+ + "R.parent_step_id,"+ + "S.logfile,"+ + "T.uniquename,"+ + "T.caption,"+ + "T.handler"; + sql +=" FROM etl_step S, etl_step_type T, etl_step_relation R where T.tid=S.step_type_id and R.step_id=S.tid and R.job_id="+jobTid+" order by R.sortnr,R.tid;"; + + SuperX_el el = new SuperX_el(); + SxSqlHelper sh=new SxSqlHelper(); + + try { + sh.execute(sql, this.getConnection(null), el); + } catch (Exception e1) { + + super.addActionLog("Fehler beim SQL: "+sql+EtlUtils.NEWLINE+el.getError_String().trim()); + return 1; + } + if (el.getError_String() != null + && !el.getError_String().trim().equals("")) + throw new SQLException("\nProblem bei Steps DETAILS:" + "\n\n Meldung:" + + el.getError_String() + "\n sql:" + sql); + SxResultSet result= el.getResultSet(); + int rownr=0; + super.addActionLog("Steps found: "+ result.size()); + //erstmal nicht mit Zeit: executionTime=\""+de.memtext.util.DateUtils.getTodayString()+" " + de.memtext.util.DateUtils.getNowString()+"\" + sw.write("\n"); + for (Iterator it = result.iterator(); it.hasNext();) { + int stepReturnCode=0; + rownr++; + SxResultRow row = (SxResultRow) it.next(); + stepTid=(Integer) row.get(0); + stepUniquename=row.get(1).toString().trim(); + stepCaption=row.get(2).toString().trim(); + stepSortnr=(Integer) row.get(4); + stepForceContinue=(Integer) row.get(5); + stepParentStepId=(Integer) row.get(6); + stepLogfile=row.get(7).toString().trim(); + stepTypeUniquename=row.get(8).toString().trim();; + stepTypeCaption=row.get(9).toString().trim();; + stepTypeHandler=row.get(10).toString().trim();; + EtlStep myStep=new EtlStep(stepUniquename,jobLogfile,propfile); + myStep.setOutputStringWriter(sw); + myStep.initStep(jobUniquename, this.getParams(), stepTid, + stepUniquename, + stepLogfile, + stepCaption, + stepSortnr, + stepForceContinue, + stepParentStepId, + stepTypeUniquename, + stepTypeCaption, + stepTypeHandler); + stepReturnCode=myStep.execute(); + super.addActionLog(myStep.getActionLog().toString()); + sw=myStep.getOutputStringWriter(); + if(stepReturnCode!=0) + { + if(stepForceContinue.intValue()==0) + jobReturnCode=1; + break; + } + } + if(rownr==0) + throw new Exception("Steps unbekannt"); + sw.write("\n\n"); + if(outfile==null || outfile.equals("")) + { + System.out.println(sw.toString()); + System.out.println(super.getActionLog()); + } + else + { + try { + de.superx.etl.EtlUtils.saveFileContentsWithEncoding(outfile, sw.toString(), null); + } catch (Exception e) { + // TODO Auto-generated catch block + e.printStackTrace(); + jobReturnCode=1; + } + } + long jetzt = new java.util.Date().getTime() ; + this.setEndtime(jetzt); + + + return jobReturnCode; + } +} diff --git a/src/de/superx/etl/EtlStep.java b/src/de/superx/etl/EtlStep.java new file mode 100644 index 0000000..6426be7 --- /dev/null +++ b/src/de/superx/etl/EtlStep.java @@ -0,0 +1,239 @@ +/* + * de.superx.etl - a package for controlling ETL routines + * Copyright (C) 2021 Daniel Quathamer + * + * This package is licensed under the CampusSource License; + * http://www.campussource.de/org/license/ + */ +package de.superx.etl; + + +import java.io.InputStream; +import java.io.PrintStream; +import java.io.StringWriter; +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.Enumeration; +import java.util.Iterator; +import java.util.Properties; +import java.util.logging.Logger; + +import javax.sql.DataSource; + +import org.springframework.jdbc.core.JdbcTemplate; +import org.springframework.jdbc.core.RowCallbackHandler; +import org.springframework.jdbc.datasource.DriverManagerDataSource; + +import de.superx.etl.ActionHandler.*; +import de.memtext.rights.RightsKeyEntry; +import de.memtext.util.GetOpts; +import de.superx.bin.Doquery; +import de.superx.bin.SxConnection; +import de.superx.common.SuperX_el; +import de.superx.common.SxResultRow; +import de.superx.common.SxResultSet; +import de.superx.common.SxSqlHelper; +import de.superx.servlet.SxPools; +import de.superx.util.SqlStringUtils; +import freemarker.template.TemplateBooleanModel; + +public class EtlStep extends EtlAction { + private String mandantenID; + private Integer stepTid; + private String stepUniquename; + private String stepLogfile; + private String stepCaption; + private Integer stepSortnr; + private Integer stepForceContinue; + private Integer stepParentStepId; + private String stepTypeUniquename; + private String stepTypeCaption; + private String stepTypeHandler; + private Properties jobRunTimeParams; + private Properties stepProperties; + private SxConnection stepSxConnection; + private Connection stepConnection; + public EtlStep(String uniquename, String logfile, String propfile) { + super(uniquename, logfile, propfile); + //this.setLogoutput("Step "+uniquename); + super.addActionLog("Step "+uniquename +" loaded"); + + } + public StringWriter getOutputStringWriter() { + return sw; + } + + + public void setOutputStringWriter(StringWriter sw) { + this.sw = sw; + } + private StringWriter sw; + + + + public void initStep(String job,Properties runTimeParams, Integer tid, + String uniquename, + String logfile, + String caption, + Integer sortnr, + Integer forceContinue, + Integer parentStepId, + String typeUniquename, + String typeCaption, + String typeHandler + ) throws Exception + { + jobRunTimeParams=runTimeParams; + stepTypeHandler=typeHandler; + stepTid=tid; + stepUniquename=uniquename; + stepLogfile=logfile; + stepCaption=caption; + stepSortnr=sortnr; + stepForceContinue=forceContinue; + stepParentStepId=parentStepId; + stepTypeUniquename=typeUniquename; + stepTypeCaption=typeCaption; + this.setLogfile(logfile); + super.addActionLog("Initializing Step: "+caption); + super.addActionLog("Handler: "+typeHandler); + //this.appendLog("Params: "+runTimeParams.toString()); + //get Params: + Connection myConnection=this.getDbconnection(); + if(myConnection==null) + myConnection=this.getConnection(myConnection); + this.setDbconnection(myConnection); + if(!SqlStringUtils.checkValidKeyEntry(job)) + throw new SQLException("Invalid job"); + String sql = "select tid,prop_name,prop_value from etl_step_property where etl_step_id="+tid.toString()+";"; + + SuperX_el el = new SuperX_el(); + SxSqlHelper sh=new SxSqlHelper(); + sh.execute(sql, myConnection, el); + if (el.getError_String() != null + && !el.getError_String().trim().equals("")) + throw new SQLException("\nProblem bei Step DETAILS:" + "\n\n Meldung:" + + el.getError_String() + "\n sql:" + sql); + SxResultSet result= el.getResultSet(); + stepProperties=null; + stepProperties=new Properties(); + int rownr=0; + for (Iterator it = result.iterator(); it.hasNext();) { + rownr++; + SxResultRow row = (SxResultRow) it.next(); + String paramName=row.get(1).toString().trim(); + String paramValue=row.get(2).toString().trim(); + String paramValueParsed=parseParams(paramValue); + stepProperties.setProperty(paramName, paramValueParsed); + super.addActionLog("Parameter "+paramName+":"+paramValueParsed); + + } + + } + private String parseParams(String paramValue) + { + String parsedParam=paramValue; + Enumeration runTimeParamNames = jobRunTimeParams.propertyNames(); + while (runTimeParamNames.hasMoreElements()) { + String runTimeParamName = (String)runTimeParamNames.nextElement(); + String runTimeParamValue = jobRunTimeParams.getProperty(runTimeParamName); + parsedParam=de.memtext.util.StringUtils.replace(parsedParam,"$"+runTimeParamName, runTimeParamValue); + + } + return parsedParam; + + } + + public int execute() + throws Exception + { + int returnCode=0; + long numberOfRows=0; + Logger logger =(Logger) Logger.getLogger(EtlStep.class.toString()); + + super.addActionLog("Executing Step "+this.stepUniquename); + Class handlerClass = Class.forName(stepTypeHandler); + Object handlerObject = handlerClass.newInstance(); + ((EtlActionHandlerI) handlerObject).setLogger(logger); + ((EtlActionHandlerI) handlerObject).setHandlerSpecificProperties(stepProperties); + ((EtlActionHandlerI) handlerObject).setRunTimeParams(jobRunTimeParams); + ((EtlActionHandlerI) handlerObject).setOutputStringWriter(this.getOutputStringWriter()); + ((EtlActionHandlerI) handlerObject).setPropFile(this.getPropfile()); + + try { + returnCode=((EtlActionHandlerI) handlerObject).execute(this.getOutputStringWriter(),"default",this.stepUniquename,"xml"); + } catch (Exception e) { + returnCode=1; + super.addActionLog("Fehler beim Step "+ this.stepUniquename+ ": "+e.toString()); + //TODO forceContinue auswerten + } + this.setOutputStringWriter(((EtlActionHandlerI) handlerObject).getOutputStringWriter()); + + super.addActionLog(((EtlActionHandlerI) handlerObject).getLogOutput()); + + + /* + if(stepTypeHandler.equals("de.superx.bin.UnloadRecords")) + { + getConnection(logger, this.getPropfile()); + String query=stepParams.getProperty("select_stmt"); + + sw=this.getSw(); + QueryResultSerializer myQueryResultSerializer=new QueryResultSerializer("default", stepSxConnection,query,sw); + myQueryResultSerializer.setLogger(logger); + NumberOfRows=myQueryResultSerializer.unloadQueryResults(this.stepUniquename,"xml","",true); + this.setReturnCode(myQueryResultSerializer.getReturnCode()); + + sw=myQueryResultSerializer.getOutputString(); + this.setSw(sw); + //System.out.println(sw.toString()); + stepSxConnection.close(); + } + if(stepTypeHandler.equals("de.superx.bin.Doquery")) + + { + String query=stepParams.getProperty("select_stmt"); + getConnection(logger, this.getPropfile()); + SqlExecutor mySqlExecutor=new SqlExecutor("default", stepSxConnection,query,jobRunTimeParams); + returnCode=mySqlExecutor.executeQueries(); + + stepSxConnection.close(); + } + + if(stepTypeHandler.equals("de.superx.bin.UploadRecords")) + + { + String target_table=stepParams.getProperty("target_table"); + String inputfile=stepParams.getProperty("path_to_inputfile"); + String xml_search_path=stepParams.getProperty("search_path"); + long numberOfRows=0; + //getConnection(logger, this.getPropfile()); + TableUploader myUploader=new TableUploader(); + myUploader.setDbpropfile(this.getPropfile()); + myUploader.setInFormat("xml"); + myUploader.setTargetTable(target_table); + myUploader.setSrcFile(inputfile); + myUploader.setXml_search_path(xml_search_path); + //myUploader.setUploadConnection(stepSxConnection.getConnection()); + myUploader.setUploadConnection(myUploader.getConnection(null,this.getPropfile())); + //SqlExecutor mySqlExecutor=new SqlExecutor("default", stepSxConnection,query); + numberOfRows=myUploader.uploadFile(); + this.appendLog("Number of rows loaded: "+numberOfRows); + + + } + if(stepTypeHandler.trim().equals("msg")) + + { + String msg=stepParams.getProperty("msg"); + + this.appendLog(msg); + }*/ + //logger.info(this.getLogoutput()); + return returnCode; + } + +} diff --git a/src/de/superx/etl/EtlUtils.java b/src/de/superx/etl/EtlUtils.java new file mode 100644 index 0000000..d50706e --- /dev/null +++ b/src/de/superx/etl/EtlUtils.java @@ -0,0 +1,336 @@ +/* + * de.superx.etl - a package for controlling ETL routines + * Copyright (C) 2021 Daniel Quathamer + * + * This package is licensed under the CampusSource License; + * http://www.campussource.de/org/license/ + */ +package de.superx.etl; + +import java.io.BufferedReader; +import java.io.BufferedWriter; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.FileWriter; +import java.io.IOException; +import java.io.InputStreamReader; +import java.io.OutputStreamWriter; +import java.io.StringReader; +import java.io.StringWriter; +import java.io.UnsupportedEncodingException; +import java.net.URISyntaxException; +import java.util.Enumeration; +import java.util.NoSuchElementException; +import java.util.Properties; + +import javax.xml.parsers.DocumentBuilder; +import javax.xml.parsers.DocumentBuilderFactory; +import javax.xml.parsers.ParserConfigurationException; +import javax.xml.xpath.XPath; +import javax.xml.xpath.XPathConstants; +import javax.xml.xpath.XPathExpressionException; +import javax.xml.xpath.XPathFactory; + +import org.w3c.dom.Document; +import org.w3c.dom.Node; +import org.xml.sax.InputSource; +import org.xml.sax.SAXException; + +import de.superx.servlet.SuperXManager; + +/** + + */ +public class EtlUtils { +public static final String NEWLINE=System.getProperty("line.separator"); +public static final String PATHSEP=File.separator; +public static String WEBINFDIR=SuperXManager.getWEB_INFPfad(); +public static String SUPERX_DIR=(System.getProperties().containsKey("SUPERX_DIR")? System.getProperty("SUPERX_DIR"):""); + +public static void main(String args[]) { + + try { + String tidInXmlFile=""; + Document mydomres =de.superx.etl.EtlUtils.buildDocumentFromXmlFile("/home/superx/devel_module/community/tomcat/temp/myTempFile6460222908896375059.xml"); + XPathFactory factory = new net.sf.saxon.xpath.XPathFactoryImpl(); + + XPath xPath = factory.newXPath(); + //String searchPath="/etlAction[@name=\"sx_select_mask\"]/unload/row/fld[@name=\"tid\"]"; + String searchPath="/etlAction[@name=\"sx_select_mask\"]/unload[@name=\"unload_maskeninfo\"]/rs/row/fld[@name=\"tid\"]"; + + Node tidNode=(Node) xPath.compile(searchPath).evaluate( + mydomres, XPathConstants.NODE); + if(tidNode!=null) + tidInXmlFile=de.memtext.util.XMLUtils.getTheValue(tidNode); + else + System.out.println("node not found"); + System.out.println(tidInXmlFile); + } catch (FileNotFoundException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (XPathExpressionException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (ParserConfigurationException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (SAXException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } +} + +public static Properties convertStringToProperty(String inp) throws IOException +{ + Properties myProps = new Properties(); + myProps.load(new StringReader(inp)); + return myProps; +} +public static Properties mergeParamProperties(Properties params, Properties runTimeParams) + +{ + String parsedParam; + Enumeration runTimeParamNames = runTimeParams.propertyNames(); + while (runTimeParamNames.hasMoreElements()) { + String runTimeParamName = (String)runTimeParamNames.nextElement(); + String runTimeParamValue = runTimeParams.getProperty(runTimeParamName); + Enumeration paramNames = params.propertyNames(); + while (paramNames.hasMoreElements()) { + String paramName=(String)paramNames.nextElement(); + String paramValue=params.getProperty(paramName); + if(paramName.equals(runTimeParamName) ) + { + paramValue=runTimeParamValue; + params.setProperty(paramName, paramValue); + } + if(paramValue.indexOf("$"+runTimeParamName)>-1) + { + paramValue=de.memtext.util.StringUtils.replace(paramValue,"$"+runTimeParamName, runTimeParamValue); + params.setProperty(paramName, paramValue); + } + + } + } + return params; + +} +public static void initJobEnvironment() +{ + String initVar=""; + if(System.getProperty("WEBINFDIR") ==null) + { + + try { + initVar=de.superx.servlet.SuperXManager.getWEB_INFPfad(); + if(initVar==null || initVar.equals(".")) + { + //ermittle webinfdir + initVar=getWebinfDirectory(); + + } + } catch (Exception e) { + // do nothing, try another + } + + WEBINFDIR=initVar; + + } + + if(System.getProperty("SUPERX_DIR") ==null) + { + SUPERX_DIR=WEBINFDIR+PATHSEP+"conf"+PATHSEP+"edustore"; + + } + +} +private static String getJarName() +{ + return new File(EtlUtils.class.getProtectionDomain() + .getCodeSource() + .getLocation() + .getPath()) + .getName(); +} + +private static boolean runningFromJar() +{ + String jarName = getJarName(); + return jarName.contains(".jar"); +} + +public static String getWebinfDirectory() throws URISyntaxException +{ + if (runningFromJar()) + { + return getWebinfDirectoryFromJar(); + } else + { + return getWebinfDirectoryFromClass(); + } +} + +private static String getWebinfDirectoryFromClass() +{ + File f= new File(EtlUtils.class.getProtectionDomain() + .getCodeSource() + .getLocation() + .getPath()+PATHSEP+".."); + String class_path=f.getAbsolutePath(); + return class_path; + +} + +private static String getWebinfDirectoryFromJar() throws URISyntaxException +{ + String pathOfJarFile=new File(EtlUtils.class.getProtectionDomain().getCodeSource().getLocation().toURI().getPath()).getParent(); + String webinfDir=new File(pathOfJarFile+PATHSEP+"..").getAbsolutePath(); + return webinfDir; + +} +/* die folgenden 2 Methoden + * getFileContentsWithEncoding + * saveFileContentsWithEncoding + * sind in kern5.0 in de.superx.util.FileUtils + * wg. abwärtskompatiblität hierhin kopiert, sollten langfristig wieder weg + */ +public static String getFileContentsWithEncoding(String filePath, String encoding) { + File f = new File(filePath); + if (!f.exists()) { + System.out.println("Fehler: Datei " + filePath + " existiert nicht."); + return null; + } + String fileContents = ""; + if (encoding == null || encoding.trim().equals("")) { + encoding = System.getProperty("file.encoding"); + } + try { + // --- IputStream und OutputStream generieren ---// + FileInputStream fis = new FileInputStream(f); + // Wenn Quelldatei Unicode, dann speziellen Reader nutzen + BufferedReader in; + //BufferedReader ist schneller bei großen Dateien + in = new BufferedReader(new InputStreamReader(fis, encoding)); + // --- Output-Stream der temporären Datei erzeugen ---// + StringWriter out = new StringWriter(); + // --- Verarbeiten der Datei ---// + String text; + text = in.readLine(); + while (text != null) { // Datei nicht leer + out.write(text); + out.write(System.getProperty("line.separator")); + text = in.readLine(); + } + if (!(out == null)) { + fileContents = out.toString(); + } + } catch (FileNotFoundException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (UnsupportedEncodingException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + return fileContents; + } + + public static void saveFileContentsWithEncoding(String filename, String contents, String encoding) throws + + FileNotFoundException, + IOException + { + + + File f = new File(filename); + BufferedReader in; + BufferedWriter out; + + //Default encoding ist utf-8 + if (encoding == null) encoding = System.getProperty("file.encoding"); + // --- Output-Stream der temporären Datei erzeugen ---// + out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(f), encoding)); + + out.write(contents); + + out.close(); + + + }//Ende der Methode + + +/*SAX Document aus XML-Datei erzeugen */ + public static Document buildDocumentFromXmlFile(String srcFile) + throws ParserConfigurationException, FileNotFoundException, SAXException, IOException { + Document mydomres; + org.xml.sax.InputSource is; + DocumentBuilderFactory dfactory = DocumentBuilderFactory.newInstance(); + DocumentBuilder builder = dfactory.newDocumentBuilder(); + FileInputStream in = new FileInputStream(srcFile); + + is=new org.xml.sax.InputSource(in); + mydomres = builder.newDocument(); + mydomres = builder.parse(is); + return mydomres; + } + /*** + * TODO: nach merge in master diese Methode löschen, und auf de.memtext.util.XMLUtils.parseXml(String) verweisen + * @author Witt This function parses XML-containing string into documents while + * preserving the namespaces and is primarily meant to be used withing + * (jUnit) test cases + * @param xmlString + * @return + * @throws ParserConfigurationException + * @throws SAXException + * @throws IOException + */ + public static Document parseXml(String xmlString) throws ParserConfigurationException, SAXException, IOException { + DocumentBuilderFactory myFactory = DocumentBuilderFactory.newInstance(); + myFactory.setNamespaceAware(true); + DocumentBuilder myBuilder; + myBuilder = myFactory.newDocumentBuilder(); + Document myDocument = myBuilder.parse(new InputSource(new StringReader(xmlString))); + return myDocument; + } + /** + * @param src + * @param tidInXmlFile + * @return + * @throws ParserConfigurationException + * @throws SAXException + * @throws IOException + * @throws XPathExpressionException + */ + public static boolean isNodeValueInXml(String src, String searchPath, String expectedValue) + throws ParserConfigurationException, SAXException, IOException, XPathExpressionException { + boolean b=false; + Document mydomres =de.superx.etl.EtlUtils.parseXml(src); + XPathFactory factory = new net.sf.saxon.xpath.XPathFactoryImpl(); + + XPath xPath = factory.newXPath(); + + Node myNode=(Node) xPath.compile(searchPath).evaluate( + mydomres, XPathConstants.NODE); + if(myNode!=null) + { + String foundValue=de.memtext.util.XMLUtils.getTheValue(myNode); + if(!(foundValue==null) && foundValue.trim().equals(expectedValue)) + b=true; + } + return b; + + } + public static String translateReturnCode(int returnCode) + { + String returnString="Fehlerhaft"; + if(returnCode==0) + returnString="Erfolg"; + return returnString; + } +} diff --git a/src/de/superx/etl/QueryResultSerializer.java b/src/de/superx/etl/QueryResultSerializer.java new file mode 100644 index 0000000..7d01626 --- /dev/null +++ b/src/de/superx/etl/QueryResultSerializer.java @@ -0,0 +1,437 @@ +/* + * de.superx.etl - a package for controlling ETL routines + * Copyright (C) 2021 Daniel Quathamer + * + * This package is licensed under the CampusSource License; + * http://www.campussource.de/org/license/ + */ +package de.superx.etl; +import java.io.BufferedOutputStream; +import java.io.FileNotFoundException; +import java.io.FileWriter; +import java.io.IOException; +import java.io.PrintStream; +import java.io.PrintWriter; +import java.io.StringWriter; +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.JDBCType; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.sql.Statement; +import java.sql.Types; +import java.util.logging.LogManager; +import java.util.logging.Logger; +import java.util.logging.Level; + +import de.memtext.util.StringUtils; +import de.superx.bin.SxConnection; +import de.superx.bin.SxDBUtils; +import de.superx.bin.SxJdbcClient; + +import java.util.Iterator; +import java.util.StringTokenizer; + +import de.superx.etl.bin.SxTransformer; +import de.superx.common.SxResultSet; +import de.superx.servlet.ServletUtils; +import de.superx.servlet.SxSQL_Server; +import de.superx.util.SqlStringUtils; +/** + * @author Daniel Quathamer Projektgruppe SuperX + * doquery.java + * @ + * Dieses Javaprogramm führt einen SQL-Ausdruck aus und gibt das Ergebnis aus.
+ * Gebrauch:
java doquery (optional) (optional) (optional) + * + + */ +/* + * SQL-Abfragen Ergebnis serialisieren nach CSV / XML + */ +public class QueryResultSerializer { + public QueryResultSerializer(String mandantenId,SxConnection mySxConnection, String query,StringWriter sw) { + super(); + this.mySxConnection = mySxConnection; + this.query = prepareQuery(query); + this.mandantenId = mandantenId; + this.logger = Logger.getLogger("superx_" + mandantenId); + this.outputString=sw; + } + public QueryResultSerializer(String mandantenId,SxConnection mySxConnection, String query,FileWriter fw) { + super(); + this.mySxConnection = mySxConnection; + this.query = prepareQuery(query); + this.mandantenId = mandantenId; + this.logger = Logger.getLogger("superx_" + mandantenId); + this.outputFile=fw; + } + public QueryResultSerializer(String mandantenId,SxConnection mySxConnection, ResultSet rs,FileWriter fw) { + super(); + this.mySxConnection = mySxConnection; + this.myrs=rs; + this.mandantenId = mandantenId; + this.logger = Logger.getLogger("superx_" + mandantenId); + this.outputFile=fw; + } + public QueryResultSerializer(String mandantenId,SxConnection mySxConnection, ResultSet rs,StringWriter sw) { + super(); + this.mySxConnection = mySxConnection; + this.myrs=rs; + this.mandantenId = mandantenId; + this.logger = Logger.getLogger("superx_" + mandantenId); + this.outputString=sw; + } + public final String NEWLINE=System.getProperty("line.separator"); + private String outfile; + private SxConnection mySxConnection; + private String query; + private String CSVdelim; + private Boolean printColNames; + private String outFormat; + private Logger logger; + private String mandantenId="default"; + private Statement st; + private Connection myConnection; + private String[] colnames; + private int[] coltypes; + private int columnCount; + private StringBuffer outString=new StringBuffer(""); + //private PrintStream stringOutWriter=new PrintStream(outString); + //private PrintWriter output=new PrintWriter(stringOutWriter); + private String outputHeader=""; + private String outputFooter=""; + private BufferedOutputStream outputStream; + public StringWriter outputString; + public FileWriter outputFile; + private int returnCode; + private ResultSet myrs; + + public String getOutfile() { + return outfile; + } + public void setOutfile(String outfile) { + this.outfile = outfile; + } + public SxConnection getMySxConnection() { + return mySxConnection; + } + public void setMyConnection(SxConnection mySxConnection) { + this.mySxConnection = mySxConnection; + } + public String getQuery() { + return query; + } + public void setQuery(String query) { + this.query = query; + + } + + public String getOutFormat() { + return outFormat; + } + public void setOutFormat(String outFormat) { + this.outFormat = outFormat; + } + public Logger getLogger() { + return logger; + } + public void setLogger(Logger logger) { + this.logger = logger; + } + public String getMandantenId() { + return mandantenId; + } + public void setMandantenId(String mandantenId) { + this.mandantenId = mandantenId; + } + public String[] getColnames() { + return colnames; + } + public void setColnames(String[] colnames) { + this.colnames = colnames; + } + public int[] getColtypes() { + return coltypes; + } + public void setColtypes(int[] coltypes) { + this.coltypes = coltypes; + } + public StringWriter getOutputString() { + return outputString; + } + public void setOutputString(StringWriter output) { + this.outputString = output; + } + public FileWriter getOutputFile() { + return outputFile; + } + public void setOutputFile(FileWriter outputFile) { + this.outputFile = outputFile; + } + public int getColumnCount() { + return columnCount; + } + public void setColumnCount(int columnCount) { + this.columnCount = columnCount; + } + + + + public int getReturnCode() { + return returnCode; + } + public void setReturnCode(int returnCode) { + this.returnCode = returnCode; + } + protected ResultSet getMyrs() { + return myrs; + } + protected void setMyrs(ResultSet myrs) { + this.myrs = myrs; + } + public long unloadQueryResults(String uniquename, String outformat,String delim,boolean printColname) throws Exception + { + returnCode=0; + long numberOfRows=0; + String zs=""; + if(myrs==null) + { + SxResultSet rs = null; + + SxJdbcClient myClient = new SxJdbcClient(logger, mySxConnection.getPropfile(), query); + + + //logger.info("Executing sql: " + query); + myClient.Rs_executeALL(); + logger.info("Getting resultset") ; + myrs = myClient.getRs(); + //logger.info("Starting output for: " + query); + } + if(myrs != null ) + { + ResultSetMetaData rsmd = myrs.getMetaData(); + setColnamesAndTypes(rsmd); + + + if(outformat.equalsIgnoreCase("xml")) + { + zs+=""; + if(printColname) + zs+=this.getColnamesXML(); + } + else + { + //CSV: + if(printColname) + zs+=this.getColnamesCSV(delim)+"\n"; + } + //this.outputStream.w + //StringWriter sw = new StringWriter(); + //sw.write(zs); + //this.setOutputString(sw); + + try { + numberOfRows=printResult(outformat,zs,myrs,delim); + } catch (Exception e) { + // TODO Auto-generated catch block + e.printStackTrace(); + returnCode=1; + } + } + return numberOfRows; + } + private long printResult(String outformat,String header,ResultSet outrs,String delim) throws SQLException, IOException + { + StringBuffer line = new StringBuffer(); + int types[]=this.getColtypes(); + String lbl[]=this.getColnames(); + String f_wert=null; + Object o = null; + int rowCounterBeforeFlush = 0; + long rowCounter = 0; + FileWriter fw = null; + StringWriter sw=null; + String rowDelim1=(outformat.equalsIgnoreCase("xml"))?"":""; + String rowDelim2=(outformat.equalsIgnoreCase("xml"))?""+NEWLINE:"\n"; //bei CSV ist Zeilentrenner immer \n, nicht DOS-Anpassung + String footer=(outformat.equalsIgnoreCase("xml"))?"":""; + + boolean writeFileOutput=(this.outputFile==null)?false:true; + int colNumber=this.getColumnCount(); + if(writeFileOutput) + { + fw=this.getOutputFile(); + fw.write(header); + } + else + { + sw=this.getOutputString(); + sw.write(header); + } + while (outrs.next()) + { + line.setLength(0); + line.append(rowDelim1); + for (int i = 1; i <= colNumber; i++) { + if (types[i-1] == Types.LONGVARCHAR) { + //Der Informix-Treiber verlangt hier getString + // statt getObject! + o = outrs.getString(i); + } else { + o = outrs.getObject(i); + } + //vergl. de.superx.common.Maske.getCSV + f_wert = SxDBUtils.field_value(o); + f_wert = prepareFieldValue(outformat,lbl[i-1],f_wert,delim); + + if (i < colNumber) + line.append( f_wert ); + else { + line.append(f_wert ); + line.append(rowDelim2); + } + + } + + if(writeFileOutput) + { + //fw=this.getOutputFile(); + + fw.write(line.toString()); + rowCounterBeforeFlush++; + if (rowCounterBeforeFlush > 10000) { + de.memtext.util.MemoryUtils.printfree(); + fw.flush(); + + rowCounterBeforeFlush = 0; + } + } + else + { + //sw=this.getOutputString(); + sw.write(line.toString()); + } + rowCounter++; + + + } ; //von while + outrs.close(); + + if(writeFileOutput) + { + if(outformat.equalsIgnoreCase("xml")) + fw.write(footer); + fw.flush(); + fw.close(); + } + else + { + if(outformat.equalsIgnoreCase("xml")) + sw.write(footer); + sw.close(); + this.setOutputString(sw); + } + return rowCounter; + + + + + + + } + private void setColnamesAndTypes(ResultSetMetaData rsmd) throws SQLException + { + int cols = rsmd.getColumnCount(); + int[] types = new int[cols]; + String[] lbl = new String[cols]; + + // Print the result column names? + for (int i = 0; i < cols; i++) { + lbl[i] = rsmd.getColumnLabel(i+1); + types[i] = rsmd.getColumnType(i+1); + } + + this.setColnames(lbl); + this.setColtypes(types); + this.setColumnCount(cols); + } + + + + private String getColnamesCSV(String delim) { + String headZs = ""; + int colNumber = this.getColnames().length; + for (int i = 0; i < colNumber; i++) { + headZs += ( this.getColnames()[i] +delim); + } + + return headZs; + } + private String getColnamesXML() { + String headZs = ""; + int colNumber = this.getColnames().length; + for (int i = 0; i < colNumber; i++) { + headZs += ""+NEWLINE; + + } + headZs+=""; + + return headZs; + } + private String prepareQuery(String query) + { + query = query.trim(); + //DOS produces unnecessary ".." around the stmt + if (query.length() > 0) { + + if (query.startsWith("\"")) + query = query.substring(1, query.length()); + if (query.endsWith("\"")) + query = query.substring(0, query.length() - 1); + } + return query; + } + private String prepareFieldValue(String outformat,String fieldName, String f_wert,String delim) + { + String fieldValue=""; + if (outformat.equals("xml")) + { + fieldValue+=""; + f_wert=encodeCDATA(f_wert); + if(f_wert.indexOf("&") > -1 || f_wert.indexOf("<") > -1 || f_wert.indexOf(">") > -1) + fieldValue += ""; + else + fieldValue += f_wert; + fieldValue+=""+NEWLINE; + } + //wenn der Feldwert zufällig das Trennzeichen enthält, wird es mit "\" maskiert + if (outformat.equals("txt")) { + fieldValue = SxDBUtils.prepareInformixCsv(f_wert); + if (fieldValue != null && (fieldValue.indexOf(delim) > -1)) fieldValue = de.memtext.util.StringUtils.replace(fieldValue, delim, "\\" + delim); + fieldValue+=delim; + } + + + return fieldValue; + } + /** + * Beim XML Export darf der Feldinhalt nicht CDATA-Tags enthalten, dies wrürde beim Import + * Probleme machen. Daher durch Platzhalter ersetzen + * + * @param Feldinhalt + * PG/IDS + * @return Feldinhalt (ersetzt) + */ + private String encodeCDATA(String x) + { + if (x==null) x=""; + x = StringUtils.replace(x,"", + "CDATAEND"); + return x; + + } +} diff --git a/src/de/superx/etl/SqlExecutor.java b/src/de/superx/etl/SqlExecutor.java new file mode 100644 index 0000000..d5f40e0 --- /dev/null +++ b/src/de/superx/etl/SqlExecutor.java @@ -0,0 +1,340 @@ +/* + * de.superx.etl - a package for controlling ETL routines + * Copyright (C) 2021 Daniel Quathamer + * + * This package is licensed under the CampusSource License; + * http://www.campussource.de/org/license/ + */ +package de.superx.etl; +import java.io.BufferedOutputStream; +import java.io.BufferedReader; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.FileWriter; +import java.io.IOException; +import java.io.InputStreamReader; +import java.io.PrintStream; +import java.io.PrintWriter; +import java.io.StringWriter; +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.JDBCType; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.sql.Statement; +import java.sql.Types; +import java.util.logging.LogManager; +import java.util.logging.Logger; +import java.util.logging.Level; + +import de.memtext.util.DateUtils; +import de.memtext.util.StringUtils; +import de.superx.bin.FMParser; +import de.superx.bin.SxConnection; +import de.superx.bin.SxDBUtils; +import de.superx.bin.SxJdbcClient; + +import java.util.Enumeration; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Properties; +import java.util.StringTokenizer; + +import de.superx.etl.bin.SxTransformer; +import de.superx.bin.XUpdater; +import de.superx.common.FieldContainer; +import de.superx.servlet.ServletUtils; +import de.superx.servlet.SuperXManager; +import de.superx.servlet.SxPools; +import de.superx.servlet.SxSQL_Server; +import de.superx.util.SqlStringUtils; +/** + * @author Daniel Quathamer Projektgruppe SuperX + * doquery.java + * @ + * Dieses Javaprogramm führt einen SQL-Ausdruck aus und gibt das Ergebnis aus.
+ * Gebrauch:
java doquery (optional) (optional) (optional) + * + + */ +/* + * SQL-Abfragen Ergebnis serialisieren nach CSV / XML + */ +public class SqlExecutor { + public SqlExecutor(String mandantenId,SxConnection mySxConnection, String sqlScript, Properties params) { + super(); + this.mySxConnection = mySxConnection; + this.sqlScript = sqlScript; + this.mandantenId = mandantenId; + this.logger = Logger.getLogger("superx_" + mandantenId); + this.params=params; + + } + public SqlExecutor(String mandantenId,SxConnection mySxConnection, File sqlScriptFile, Properties params) throws IOException { + super(); + this.mySxConnection = mySxConnection; + this.mandantenId = mandantenId; + this.sqlScriptFile=sqlScriptFile; + this.logger = Logger.getLogger("superx_" + mandantenId); + this.params=params; + + if (!sqlScriptFile.exists()) { + throw new IOException("Datei nicht gefunden: " + sqlScriptFile.getAbsolutePath()); + } + BufferedReader in; + //--- InputStream generieren ---// + in = new BufferedReader(new InputStreamReader(new FileInputStream(sqlScriptFile))); + + //--- Verarbeiten der Datei ---// + String sql = ""; + String text = ""; + while ((text = in.readLine()) != null) { + //MB auskommentiert 27.4.05 + //löschte ( in + //insert into + //( + //tid,... + //if (text.length() > 1) + sql += NEWLINE + text.trim(); + } + in.close(); + this.sqlScript =sql; + + } + public final String NEWLINE=System.getProperty("line.separator"); + private SxConnection mySxConnection; + private String sqlScript; + private File sqlScriptFile; + private Logger logger; + private String mandantenId="default"; + private Statement st; + private Connection myConnection; + private StringBuffer outString=new StringBuffer(""); + //private PrintStream stringOutWriter=new PrintStream(outString); + //private PrintWriter output=new PrintWriter(stringOutWriter); + private int returnCode; + private String outfile=""; + private String CSVdelim="^"; + private boolean printColNames=false; + private String outFormat="txt"; + private long numberOfRows; + + private Properties params; + + + public SxConnection getMySxConnection() { + return mySxConnection; + } + public void setMyConnection(SxConnection mySxConnection) { + this.mySxConnection = mySxConnection; + } + public String getSqlScript() { + return sqlScript; + } + public void setSqlScript(String sqlScript) { + this.sqlScript = sqlScript; + + } + + public Logger getLogger() { + return logger; + } + public void setLogger(Logger logger) { + this.logger = logger; + } + public String getMandantenId() { + return mandantenId; + } + public void setMandantenId(String mandantenId) { + this.mandantenId = mandantenId; + } + + public String getOutfile() { + return outfile; + } + public void setOutfile(String outfile) { + this.outfile = outfile; + } + public String getCSVdelim() { + return CSVdelim; + } + public void setCSVdelim(String cSVdelim) { + CSVdelim = cSVdelim; + } + public boolean getPrintColNames() { + return printColNames; + } + public void setPrintColNames(boolean printColNames) { + this.printColNames = printColNames; + } + public String getOutFormat() { + return outFormat; + } + public void setOutFormat(String outFormat) { + this.outFormat = outFormat; + } + public int getReturnCode() { + return returnCode; + } + public void setReturnCode(int returnCode) { + this.returnCode = returnCode; + } + public long getNumberOfRows() { + return numberOfRows; + } + public void setNumberOfRows(long numberOfRows) { + this.numberOfRows = numberOfRows; + } + public Properties getParams() { + return params; + } + public void setParams(Properties params) { + this.params = params; + } + public StringBuffer getOutString() { + return outString; + } + public void setOutString(StringBuffer outString) { + this.outString = outString; + } + public int executeQueries() throws Exception + { + returnCode=0; + FileWriter fw; + StringWriter sw; + SxTransformer myTransformer; + QueryResultSerializer myQueryResultSerializer=null; + if (sqlScript.toLowerCase().indexOf("")>-1) + { + SxConnection myConnection = new SxConnection(); + myConnection.setPropfile(mySxConnection.getPropfile()); + Connection con = myConnection.getConnection(); + + new XUpdater().execute(con, myConnection.getDatabaseAbbr(), sqlScript,logger); + } + else + { + if(sqlScript.toLowerCase().indexOf("--freemarker template")>-1) + { + /* boolean isTemplate = getSelect_stmt().toUpperCase().indexOf("FREEMARKER TEMPLATE") > -1; + String select_string = SqlStringUtils.generateSQL(SxPools.get(mandantenID).getDatabaseAbbr(), + individualFields.getFormular(), getSelect_stmt() + "\n" + getCleanup_stmt()); + + // select_string=select_string.replaceAll("Fächer.elements", + // "Fächer.xx"); + // map.clear(); + if (isTemplate) { + SuperXManager.setLastFMMaskenSql( + "-- " + getMaskInfo() + " " + DateUtils.getNowString() + "\n" + select_string); + select_string = SxPools.get(mandantenID).getTemplateProcessor().process(map, (Integer) getId(), + getMaskInfo(), select_string, individualFields, SxPools.get(mandantenID).getRepository(), + SxPools.get(mandantenID).getSqlDialect()); + // danach enthält map alle vars inkl. sqlvars + } + // MB 07/2010 macht Probleme bei Freemarker hashes + // generell deaktiviert, hier falls doch noch in alten Masken + // vorhanden + select_string = SqlStringUtils.removeComment(select_string, "{", "}"); +*/ + /* alt: sqlScript=de.superx.bin.FMParser.simpleParser(mySxConnection.getPropfile(), sqlScript);*/ + //Die map und der FieldContainer sind null, werden nur in Masken genutzt + HashMap map = new HashMap(); + FieldContainer individualFields = new FieldContainer(); + if(SxPools.hasMandanten()){ + //Servlet Betrieb: + sqlScript= SxPools.get(mandantenId).getTemplateProcessor().process(map, -1, + "SQL-Script", sqlScript, individualFields, SxPools.get(mandantenId).getRepository(), + SxPools.get(mandantenId).getSqlDialect()); + } + else + { + //Kommandozeile: TODO hier sind noch keine FM Scripte mit SQLVAR möglich: + sqlScript=FMParser.simpleParser(mySxConnection.getPropfile(), sqlScript); + } + String keepGenerated=System.getProperty("FreemarkerKeepGenerated"); + if(keepGenerated!=null && keepGenerated.equalsIgnoreCase("true")) + { + FileWriter sqlScriptTmp ; + + if(sqlScriptFile==null) + { + File tempFile = File.createTempFile("fm_output", ".sql"); + sqlScriptTmp = new FileWriter(tempFile); + logger.info("FM generated SQL: "+tempFile.getAbsolutePath()); + } + else + sqlScriptTmp = new FileWriter(sqlScriptFile.getAbsolutePath()+".tmp.sql"); + sqlScriptTmp.write(sqlScript); + sqlScriptTmp.close(); + } + + } + sqlScript = sqlScript.trim(); + sqlScript = SqlStringUtils.removeComment(sqlScript, "{", "}"); + if (sqlScript.startsWith("\"")) + sqlScript = sqlScript.substring(1, sqlScript.length()); + if (sqlScript.endsWith("\"")) + sqlScript = sqlScript.substring(0, sqlScript.length() - 1); + //replace CLI-params: + if (params==null) + { + //do nothing + } + else + { + Enumeration paramEnum = params.propertyNames(); + while (paramEnum.hasMoreElements()) { + String paramName = (String)paramEnum.nextElement(); + String paramValue = params.getProperty(paramName); + sqlScript=de.memtext.util.StringUtils.replace(sqlScript,"$"+paramName, paramValue); + } + } + logger.config("Inhalt der sql-Datei: "+NEWLINE+"--" + sqlScript + "--"); + SxJdbcClient myClient = new SxJdbcClient(logger, mySxConnection.getPropfile(), sqlScript); + myClient.Rs_executeALL(); + ResultSet myrs = myClient.getRs(); + if (myrs != null) { + //Letzter SQL liefert Ergebnisse: + if(!outfile.equals("")) + { + try { + fw = new FileWriter(outfile); + if(outFormat.equalsIgnoreCase("xml")) + fw.write(""); + myQueryResultSerializer=new QueryResultSerializer("default", mySxConnection, myrs,fw); + numberOfRows=myQueryResultSerializer.unloadQueryResults("DOQUERY",outFormat,this.CSVdelim,printColNames); + returnCode=myQueryResultSerializer.getReturnCode(); + + } catch (IOException e) { + logger.severe ("Fehler beim Erstellen der Datei "+outfile); + returnCode=1; + } + + } + else + { + sw=new StringWriter(); + if(outFormat.equalsIgnoreCase("xml")) + sw.write(""); + myQueryResultSerializer=new QueryResultSerializer("default", mySxConnection, myrs,sw); + numberOfRows=myQueryResultSerializer.unloadQueryResults("DOQUERY",outFormat,this.CSVdelim,printColNames); + returnCode=myQueryResultSerializer.getReturnCode(); + outString= myQueryResultSerializer.getOutputString().getBuffer(); + + } + + + + + } else + logger.info("Keine Rückgabewerte aus SQL-Script"); + } + return returnCode; + } + + + +} diff --git a/src/de/superx/etl/TableUploader.java b/src/de/superx/etl/TableUploader.java new file mode 100644 index 0000000..7987014 --- /dev/null +++ b/src/de/superx/etl/TableUploader.java @@ -0,0 +1,779 @@ +/* + * de.superx.etl - a package for controlling ETL routines + * Copyright (C) 2021 Daniel Quathamer + * + * This package is licensed under the CampusSource License; + * http://www.campussource.de/org/license/ + */ +package de.superx.etl; + +import java.io.BufferedReader; +import java.io.BufferedWriter; +import java.io.ByteArrayInputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.FileOutputStream; +import java.io.FileReader; +import java.io.FileWriter; +import java.io.IOException; +import java.io.InputStreamReader; +import java.io.OutputStreamWriter; +import java.io.PrintStream; +import java.io.Reader; +import java.io.UnsupportedEncodingException; +import java.nio.charset.CodingErrorAction; +import java.nio.charset.StandardCharsets; +import java.nio.file.Files; +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.PreparedStatement; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.Statement; +import java.sql.Types; +import java.text.ParseException; +import java.util.Iterator; +import java.util.Properties; + +import de.superx.util.FileUtils; +import de.superx.util.SqlStringUtils; + +import javax.xml.parsers.DocumentBuilder; +import javax.xml.parsers.DocumentBuilderFactory; +import javax.xml.parsers.ParserConfigurationException; +import javax.xml.xpath.XPathFactory; +import javax.xml.xpath.XPath; +import javax.xml.xpath.XPathConstants; + +import org.w3c.dom.Document; +import org.w3c.dom.Node; +import org.w3c.dom.NodeList; +import org.xml.sax.*; + + +import org.postgresql.PGConnection; +import org.postgresql.copy.CopyManager; + +import de.superx.bin.SxConnection; +import de.superx.bin.SxJdbcClient; +import de.superx.bin.UploadRecords; +import de.memtext.util.DateUtils; +import de.memtext.util.GetOpts; +import de.memtext.util.StringUtils; +import de.memtext.util.XMLUtils; +public class TableUploader { + private String logfile; + private String dbpropfile; + private String mode="stop"; + private String inFormat; + private String targetTable; + private String srcFile; + private boolean header=false; + + private String delim="^"; + private String encoding=SqlStringUtils.getEncoding(); + private String inserts=""; + private boolean truncateTargetTable=true; + private boolean continueAfterError; + private boolean removeTrailingDelim=true; + private boolean isPostgres; + private boolean useBatch=true; + private static int maxCols=1000; + private String[] insert_cols = new String[maxCols]; + private int[] insert_types = new int[maxCols]; + private int numberOfColumns; + public long numberOfRows; + private int returnCode; + private String xml_search_path; + private Connection uploadConnection; + private DatabaseMetaData dbmd; + private PreparedStatement pst; + + + public TableUploader() { + // TODO Auto-generated constructor stub + + } + + public Connection getUploadConnection() { + return uploadConnection; + } + public void setUploadConnection(Connection uploadConnection) { + this.uploadConnection = uploadConnection; + } + public boolean isRemoveTrailingDelim() { + return removeTrailingDelim; + } + public void setRemoveTrailingDelim(boolean removeTrailingDelim) { + this.removeTrailingDelim = removeTrailingDelim; + } + public void setHeader(boolean header) { + this.header = header; + } + + + public String getDbpropfile() { + return dbpropfile; + } + public void setDbpropfile(String dbpropfile) { + this.dbpropfile = dbpropfile; + } + public String getMode() { + return mode; + } + public void setMode(String mode) { + if (!mode.equals("stop") && !mode.equals("exclude-field")&& !mode.equals("transaction")) + mode = "exclude-row"; + this.mode = mode; + } + public String getInFormat() { + return inFormat; + } + public void setInFormat(String inFormat) { + this.inFormat = inFormat; + } + public String getTargetTable() { + return targetTable; + } + public void setTargetTable(String targetTable) { + this.targetTable = targetTable; + } + public String getSrcFile() { + return srcFile; + } + public void setSrcFile(String srcFile) { + this.srcFile = srcFile; + } + + public String getDelim() { + return delim; + } + public void setDelim(String delim) { + if (delim.equals("tab")) + delim = "\t"; //Tab + if (delim.equals("")) + delim = "^"; //default Delimiter + this.delim = delim; + } + public String getEncoding() { + return encoding; + } + public void setEncoding(String encoding) { + if(encoding==null || encoding.equals("")) + encoding="UTF-8"; + this.encoding = encoding; + } + public String getInserts() { + return inserts; + } + public void setInserts(String inserts) { + if(inserts.equalsIgnoreCase("batch")) + useBatch=true; + if(inserts.equalsIgnoreCase("simple")) + useBatch=false; + + this.inserts = inserts; + } + public boolean isTruncateTargetTable() { + return truncateTargetTable; + } + + public void setTruncateTargetTable(boolean truncateTargetTable) { + this.truncateTargetTable = truncateTargetTable; + } + public void setTruncateTargetTable(String truncateTargetTable) { + if(truncateTargetTable!=null) + {if(truncateTargetTable.equalsIgnoreCase("true")) + this.truncateTargetTable =true; + else + this.truncateTargetTable =false; + } + else + this.truncateTargetTable =false; + } + + public int getReturnCode() { + return returnCode; + } + + public void setReturnCode(int returnCode) { + this.returnCode = returnCode; + } + + public boolean isContinueAfterError() { + return continueAfterError; + } + public void setContinueAfterError(boolean continueAfterError) { + this.continueAfterError = continueAfterError; + } + public String getXml_search_path() { + return xml_search_path; + } + + public void setXml_search_path(String xml_search_path) { + this.xml_search_path = xml_search_path; + } + + public long uploadFile() throws Exception + { + String protokoll=""; + long numberOfRows=0; + returnCode=0; + try { + //dbmd=uploadConnection.getMetaData(); + String dbname=uploadConnection.getCatalog(); + if(truncateTargetTable) + { + Statement stm=uploadConnection.createStatement(); + stm.execute("delete from "+this.targetTable+";"); + stm.close(); + } + if(inFormat.equalsIgnoreCase("xml")) + { + numberOfRows=uploadXML(); + } + else + numberOfRows=uploadCSV(); + } catch (Exception e) { + returnCode=1; + throw new Exception(e); + } + return numberOfRows; + + } + private long uploadXML() throws Exception + { + String feld_wert; + String errmsg=""; + Document mydomres=null; + numberOfRows=0; + org.xml.sax.InputSource is; + NodeList rowlist; + + mydomres = de.superx.etl.EtlUtils.buildDocumentFromXmlFile(srcFile); + + if(xml_search_path==null) + { + rowlist = mydomres.getElementsByTagName("row"); + } + else + + { + //XPath xPath = XPathFactory.newInstance().newXPath(); + File inputFile = new File(srcFile); + //XPathFactory factory = XPathFactory.newInstance(); + XPathFactory factory = new net.sf.saxon.xpath.XPathFactoryImpl(); + + XPath xPath = factory.newXPath(); + //Document doc = builder.parse(inputFile); + //doc.getDocumentElement().normalize(); + rowlist=(NodeList) xPath.compile(xml_search_path).evaluate( + mydomres, XPathConstants.NODESET); + } + Node rownode; + initializeColumnSchema(); + String insertHead=createPreparedStatementHead(); + pst = uploadConnection.prepareStatement(insertHead); + if(useBatch) + pst.clearBatch(); + int anz_rows = rowlist.getLength(); + for (int zeilennr = 0; zeilennr < anz_rows; zeilennr++) { + //Schleife über jede Zeile des XML-Stroms + rownode = rowlist.item(zeilennr); + //pst.clearParameters(); + + for(int col=0; col < numberOfColumns;col++) + { + for (Iterator it = XMLUtils.getChildNodeIterator(rownode); it.hasNext();) { + Node fldNode = (Node) it.next(); + //System.out.println(XMLUtils.getTheValue(fldNode)); + if (XMLUtils.getAttribValue(fldNode,"name").equalsIgnoreCase(insert_cols[col])) { + //int p; + feld_wert=""; + try{ + feld_wert = XMLUtils.getTheValue(fldNode).trim(); + feld_wert=StringUtils.replace(feld_wert, + "CDATASTART", ""); + + + } + catch (IllegalArgumentException e) + { + //Node ist NULL, keine Warnung notwendig + } + + errmsg = feld_wert_to_pst(zeilennr, col, errmsg, feld_wert); + + } //Wenn Feldname übereinstimmt + } + } //Ende der Schleife über die Spalten + if(!errmsg.equals("") && mode.equals("stop")) + { + break; + } + if(useBatch) + pst.addBatch(); + else + pst.executeUpdate(); + + numberOfRows++; + + } //Ende der Schleife über die Zeilen + if(useBatch) + pst.executeBatch(); + return numberOfRows; + } + + + + private long uploadCSV() throws Exception + { + String line; + String line2; + File outFile=null; + String protokoll=""; + long numberOfRows=0; + if(isPostgres && !inserts.equalsIgnoreCase("simple") && !inserts.equalsIgnoreCase("batch")) + { + if(removeTrailingDelim) + srcFile=removeTrailingDelim(srcFile); + numberOfRows=uploadCSVinPostgres(srcFile,removeTrailingDelim); + } + else + numberOfRows=uploadCSVwithAnsiSQL(srcFile); + return numberOfRows; + + + } + private String removeTrailingDelim(String srcFile) throws UnsupportedEncodingException, FileNotFoundException, IOException { + String line; + File outFile; + String returnSrcFile=srcFile+".tmp"; + BufferedReader in2 = new BufferedReader(new InputStreamReader(new FileInputStream(srcFile), encoding)); + + outFile=new File(srcFile+".tmp"); + FileOutputStream out = new FileOutputStream(outFile, false); + PrintStream out2 = new PrintStream(out, true, encoding); + + + while ((line = in2.readLine()) != null) { + + if (line.endsWith(delim)) + line=line.substring(0,line.length()-delim.length()); + out2.println(line); + out2.flush(); + + } + return returnSrcFile; + } + private long uploadCSVinPostgres(String srcFile, boolean deleteSrcFile) { + long numOfRows=0; + String copySql = "COPY " + targetTable + " FROM STDIN WITH DELIMITER '" + delim + "' NULL '' ENCODING '"+ encoding+"'"; + copySql += header ? " HEADER" : ""; + + String srcFileContent=de.superx.etl.EtlUtils.getFileContentsWithEncoding(srcFile, encoding); + String msg=""; + try { + //dbmd=uploadConnection.getMetaData(); + String dbname=uploadConnection.getCatalog(); + Statement stm=uploadConnection.createStatement(); + int isIso=0; + ResultSet rs=stm.executeQuery("SELECT distinct 1 FROM pg_catalog.pg_database where datname='"+dbname+"' and datctype ilike '%euro%' or datctype ilike '%1252%' or datctype ilike '%8859%';"); + while (rs.next()) { + if(rs.getObject(1)!=null) + isIso= Integer.parseInt(rs.getObject(1).toString()); + + } + rs.close(); + stm.close(); + Reader in4=null; + final CopyManager cpm = ((PGConnection) uploadConnection).getCopyAPI(); + long anz = 0; + msg = ""; + + if(isIso==1) + { + String srcFileIso=srcFile+"_iso.tmp"; + String srcFileContentValidIso = FileUtils.convertToIso(srcFileContent,"postgres") ;//new String(srcFileContent.getBytes("ISO-8859-1")); + de.superx.etl.EtlUtils.saveFileContentsWithEncoding(srcFileIso, srcFileContentValidIso, "iso-8859-9"); + FileInputStream fis = new FileInputStream(srcFileIso); + in4 = new BufferedReader(new InputStreamReader(fis, "iso-8859-9")); + + } + else + { + FileReader in3 = new FileReader(srcFile); + in4 = new BufferedReader(in3); + + } + + numOfRows= cpm.copyIn(copySql, in4); + numberOfRows =numOfRows; + if(deleteSrcFile) + { + File outFile=new File(srcFile); + if(outFile!=null) + outFile.delete(); + } + } catch (Exception e) { + // TODO Auto-generated catch block + msg=e.toString(); + } + return numOfRows; + } + + private long uploadCSVwithAnsiSQL(String srcFile) throws SQLException, FileNotFoundException, IOException { + numberOfRows=0; + String text; + String text2; + String msg=""; + int zeilennr=1; + int fehlerSaetze=0; + + BufferedReader in = new BufferedReader(new InputStreamReader(new FileInputStream(srcFile), encoding)); + initializeColumnSchema(); + String insertHead=createPreparedStatementHead(); + pst = uploadConnection.prepareStatement(insertHead); + if(useBatch) + pst.clearBatch(); + + while ((text = in.readLine()) != null) { + if (text.endsWith("\\")) { + text=text.substring(0, text.length()-1); + text2 = in.readLine(); + if (text2 != null) { + text += "\n"+ text2; + while (text2.endsWith("\\")) { + text=text.substring(0, text.length()-1); + text2 = in.readLine(); + if (text2 != null) + text += "\n"+text2; + + } + } + } + + String prepare = + createPreparedInsertStatement(zeilennr, + insertHead, + text); + if(!prepare.equals("") && mode.equals("stop")) + { + msg=prepare; + break; + } + if(useBatch) + pst.addBatch(); + else + pst.executeUpdate(); + numberOfRows++; + + + + } + if(useBatch) + pst.executeBatch(); + //TODO: msg + return numberOfRows; + } + + private String createPreparedInsertStatement( + int line, + String insertHead, + String text) + throws SQLException { + int p; + int i=0; + int k=0; + String errmsg = ""; + String feld_wert; + //pst.clearParameters(); + do { + //ggf. Trennzeichen am Ende hinzufügen: + if(!text.endsWith(delim)) + text+= delim; + p = text.indexOf(delim, i); + //logger.config("Type "+types[k]); + //maskierte Trennzeichen abfangen: + if(p>0 && text.substring(p-1, p).equals("\\")) + p = text.indexOf(delim, p+1); + + if (p > -1 ) { + if(p==-1) + feld_wert = text.substring(i); + else + feld_wert = text.substring(i, p); + //wenn der Feldwert zufällig das Zeichen "\\n" enthält, wird es zu "\n" + if(feld_wert != null && (feld_wert.indexOf("\\\\n") >0 )) + { + feld_wert=de.memtext.util.StringUtils.replace(feld_wert, "\\\\n", "\\n"); + } + //wenn der Feldwert das Zeichen "\Trennzeichen" enthält, wird der \ entfernt + if(feld_wert != null && (feld_wert.indexOf("\\"+delim) >0 )) + { + feld_wert=de.memtext.util.StringUtils.replace(feld_wert, "\\", ""); + } + //wenn der Feldwert das Zeichen "\\" enthält, wird ein \ entfernt + if(feld_wert != null && (feld_wert.indexOf("\\\\") >0 )) + { + feld_wert=de.memtext.util.StringUtils.replace(feld_wert, "\\\\", "\\"); + } + + errmsg = feld_wert_to_pst(line,k, errmsg, feld_wert); + k++; + i = p + 1; + } + + } while (p > -1); + return errmsg; + } + private String feld_wert_to_pst(int line, int col, String errmsg, String feld_wert) throws SQLException { + + + if( col >= numberOfColumns) + errmsg+= "Anzahl Spalten in Datei ist "+col+", aber es sollten nur "+(numberOfColumns-1)+" Spalten sein. Bitte prüfen Sie das Trennzeichen"; + else + { + if (feld_wert.equals("")) + try { + pst.setNull(col + 1, insert_types[col]); + } catch (SQLException e1) { + errmsg += e1.toString(); + } else { + + switch (insert_types[col]) { + case Types.BIGINT : + case Types.TINYINT : + case Types.SMALLINT : + case Types.INTEGER : + + try { + int myInt = (int) Integer.parseInt(feld_wert.trim()); + pst.setInt(col + 1, myInt); + } catch (NumberFormatException e1) { + errmsg += e1.toString(); + setFieldToNull(col, insert_types, pst); + } catch (SQLException e1) { + errmsg += conversionException(line, col, feld_wert,e1.toString()); + setFieldToNull(col, insert_types, pst); + } + break; + case Types.FLOAT : + try { + float myFloat = + (float) Float.parseFloat(feld_wert.trim()); + pst.setFloat(col + 1, myFloat); + } catch (NumberFormatException e1) { + errmsg += conversionException(line, col, feld_wert,e1.toString()); + setFieldToNull(col, insert_types, pst); + } catch (SQLException e1) { + errmsg += conversionException(line, col, feld_wert,e1.toString()); + setFieldToNull(col, insert_types, pst); + } + break; + + case Types.REAL : + case Types.DOUBLE : + case Types.NUMERIC : + case Types.DECIMAL : + try { + double myDouble = + (double) Double.parseDouble(feld_wert.trim()); + pst.setDouble(col + 1, myDouble); + } catch (NumberFormatException e1) { + errmsg += conversionException(line, col, feld_wert,e1.toString()); + setFieldToNull(col, insert_types, pst); + } catch (SQLException e1) { + errmsg += conversionException(line, col, feld_wert, e1.toString()); + setFieldToNull(col, insert_types, pst); + } + break; + + case Types.CHAR : + case Types.VARCHAR : + default : + if(feld_wert.equals(" ")) + feld_wert=""; //Leerzeichen im UNL-File wird zu Leerstring + try { + pst.setString(col + 1, feld_wert); + } catch (SQLException e1) { + errmsg += conversionException(line, col, feld_wert,e1.toString()); + setFieldToNull(col, insert_types, pst); + } + break; + case Types.LONGVARCHAR : + ByteArrayInputStream by = + new ByteArrayInputStream(feld_wert.getBytes()); + pst.setAsciiStream( + col + 1, + by, + feld_wert.length()); + break; + case Types.DATE : + try { + java.util.Date datum = + DateUtils.parse(feld_wert.trim()); + feld_wert = DateUtils.formatUS(datum); + //Leider ist dieser Schritt wg java.sql.Date nötig + pst.setDate( + col + 1, + java.sql.Date.valueOf(feld_wert)); + + } catch (SQLException e1) { + errmsg += conversionException(line, col, feld_wert, e1.toString()); + setFieldToNull(col, insert_types, pst); + } catch (ParseException e1) { + errmsg += conversionException(line, col, feld_wert, e1.toString()); + setFieldToNull(col, insert_types, pst); + } + catch (IllegalArgumentException e1) { + errmsg += conversionException(line, col, feld_wert, e1.toString()); + setFieldToNull(col, insert_types, pst); + } + + break; + case Types.TIME : + + try { + //Time zeit = (java.sql.Time) + //DateUtils.timeParse(feld_wert); + pst.setTime(col + 1, java.sql.Time.valueOf( + feld_wert.trim())); + } catch (SQLException e1) { + errmsg += conversionException(line, col, feld_wert, e1.toString()); + setFieldToNull(col, insert_types, pst); + } + catch (IllegalArgumentException e1) { + errmsg += conversionException(line, col, feld_wert, e1.toString()); + setFieldToNull(col, insert_types, pst); + } + + break; + case Types.TIMESTAMP : + try { + java.util.Date datum = + DateUtils.dateTimeParse(feld_wert.trim()); + feld_wert = DateUtils.dateTimeFormatUS(datum); + //Leider ist dieser Schritt wg java.sql.Date nötig + pst.setTimestamp( + col + 1, + java.sql.Timestamp.valueOf( + feld_wert + ".0")); + + } catch (SQLException e1) { + errmsg += conversionException(line, col,feld_wert, e1.toString()); + setFieldToNull(col, insert_types, pst); + } catch (ParseException e1) { + errmsg += conversionException(line, col, feld_wert, e1.toString()); + setFieldToNull(col, insert_types, pst); + } + catch (IllegalArgumentException e1) { + errmsg += conversionException(line, col, feld_wert, e1.toString()); + setFieldToNull(col, insert_types, pst); + } + + break; + + case Types.BIT : + // Types.BOOLEAN gibt es im jdk 1.3 nicht + try { + boolean wf = + (boolean) Boolean.getBoolean(feld_wert.trim()); + pst.setBoolean(col + 1, wf); + } catch (SQLException e1) { + errmsg += conversionException(line, col, feld_wert, e1.toString()); + setFieldToNull(col, insert_types, pst); + } + //Boolean wird vom Informix-Treiber als OTHER (1111) erkannt + //Da aber default '' ist, klappt es trotzdem + break; + } + + } + } + return errmsg; + } + private void setFieldToNull( + int k, + int[] insert_types, + PreparedStatement pst) { + if (mode.equals("exclude-field")) + try { + pst.setNull(k + 1, insert_types[k]); + } catch (SQLException e3) { + System.err.println("Invalid Field " + (k + 1) + " could not be set to null"); + } + + } + private String conversionException(int line,int col, String field_value, String error) { + String err_msg = ""; + + err_msg = "Error in line "+line+" in Column " + (col + 1) + " "+insert_cols[col]+" value "+ field_value+ ": " + error.toString() + "; "; + + return err_msg; + } + private void initializeColumnSchema() throws SQLException + { + int i=0; + + ResultSet rs = null; + ResultSetMetaData rsmd = null; + String tabelle=targetTable; + if (!dbmd.storesLowerCaseIdentifiers()) + tabelle = tabelle.toUpperCase(); + rs =dbmd.getColumns(uploadConnection.getCatalog(), null, tabelle, null); + rsmd = rs.getMetaData(); + while (rs.next()) { + insert_cols[i] = rs.getObject("COLUMN_NAME").toString(); + insert_types[i] = rs.getInt("DATA_TYPE"); + i++; + } + numberOfColumns=i; + if(!dbmd.supportsBatchUpdates()) + useBatch=false; + + } + private String createPreparedStatementHead() throws SQLException + { + + String sql=null; + + String insert_head = "insert into " + targetTable+"("; + String insert_val=""; + for (int i = 0; i < numberOfColumns; i++) + { + insert_head += insert_cols[i] + ", "; + insert_val+="?, "; + } + insert_head = insert_head.substring(0, insert_head.length() - 2); + insert_val = insert_val.substring(0, insert_val.length() - 2); + insert_head +=") values( "; + sql=insert_head + insert_val+");"; + return sql; + + } + public Connection getConnection(Connection myConnection,String propfile) throws Exception { + + if(myConnection==null) + { + SxConnection mySxConnection = null; + mySxConnection = new SxConnection(); + mySxConnection.setPropfile(propfile); + + myConnection = mySxConnection.getConnection(); + + String db_driver = mySxConnection.m_DriverClass; + if(db_driver.equals("org.postgresql.Driver")) + isPostgres=true; + } + dbmd = myConnection.getMetaData(); + + return myConnection; + + } + +} diff --git a/src/de/superx/etl/bin/Doquery.java b/src/de/superx/etl/bin/Doquery.java new file mode 100644 index 0000000..ddd6596 --- /dev/null +++ b/src/de/superx/etl/bin/Doquery.java @@ -0,0 +1,212 @@ +/* + * de.superx.etl - a package for controlling ETL routines + * Copyright (C) 2021 Daniel Quathamer + * + * This package is licensed under the CampusSource License; + * http://www.campussource.de/org/license/ + */ +package de.superx.etl.bin; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.FileWriter; +import java.io.IOException; +import java.io.StringWriter; +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.logging.LogManager; +import java.util.logging.Logger; +import java.util.StringTokenizer; +import de.superx.util.SqlStringUtils; +import de.superx.bin.SxConnection; +import de.superx.common.SuperX_el; +import de.superx.etl.QueryResultSerializer; +import de.superx.servlet.SxPools; +import de.superx.servlet.SxSQL_Server; + +/** + * @author Daniel Quathamer Projektgruppe SuperX + * doquery.java + * @ + * Dieses Javaprogramm führt einen SQL-Ausdruck aus und gibt das Ergebnis aus.
+ * Gebrauch:
java doquery (optional) (optional) (optional) + * + + */ + +public class Doquery { + private static Connection myDb; + private static Statement st; // Our statement to run queries with + private static DatabaseMetaData dbmd; + // This defines the structure of the database + private static boolean done = false; // Added by CWJ to permit \q command + private static String delim = "^"; + private static String header = "false"; + private static String outFormat = "txt"; + private static String logfile = "../conf/logging.properties"; + private static String tabelle = ""; + private static String dbpropfile = "../conf/db.properties"; + private static SxConnection myConnection = null; + private static String db_driver; + private static String mandantenID="default"; + private static String outfile = ""; + private static Logger logger = + (Logger) Logger.getLogger(Doquery.class.toString()); + private static String usage = + "-------------------------------------\nGebrauch: java de.superx.bin.Doquery $LOGGER_PROPERTIES $DB_PROPERTIES $sql (optional: )$outformat $DBDELIMITER $header $filename \n---------------------------------------------------"; + + public static int go(String args[]) + throws + Exception { + if (args.length > 0) { + logfile = args[0].trim(); + } else { + throw new IllegalArgumentException("Mindestens drei Parameter (Pfad zu den logger.properties, Pfad zu den db.properties, sql-String) erfoderlich"); + + } + File f = new File(logfile); + if (!f.exists()) { + throw new IOException("Datei nicht gefunden: " + logfile); + } + + FileInputStream ins = new FileInputStream(logfile); + LogManager MyLogManager = java.util.logging.LogManager.getLogManager(); + MyLogManager.readConfiguration(ins); + logfile = MyLogManager.getProperty(".level"); + logger.info("Using Loggging-Level " + logfile); + String query = ""; + long numberOfRows=0; + int returnCode=0; + + if (args.length > 1) { + dbpropfile = args[1].trim(); + } else { + logger.severe( + "Mindestens drei Parameter (Pfad zu den logger.properties, Pfad zu den db.properties, sql-String) erfoderlich"); + System.exit(1); + } + if (args.length <= 2) { + logger.severe( + "Mindestens drei Parameter (Pfad zu den logger.properties, Pfad zu den db.properties, sql-String) erfoderlich"); + System.exit(1); + } + query=args[2].trim(); + + if (args.length > 3) { + outFormat = args[3].trim(); + } + + if (args.length > 4) { + delim = args[4].trim(); + } + if (args.length > 5) { + header = args[5].trim(); + + } + if (args.length > 6) { + outfile = args[6].trim(); + + } + if (delim.equals("")) + delim = "^"; //default Delimiter + + + long jetzt = new java.util.Date().getTime() ; + getConnection(logger, dbpropfile); + QueryResultSerializer myQueryResultSerializer = null; + FileWriter fw; + StringWriter sw; + boolean printColname=(header.equalsIgnoreCase("true")?true:false); + if(!outfile.equals("")) + { + try { + fw = new FileWriter(outfile); + if(outFormat.equalsIgnoreCase("xml")) + fw.write(""); + myQueryResultSerializer=new QueryResultSerializer("default", myConnection, query,fw); + + } catch (IOException e) { + logger.severe ("Fehler beim Erstellen der Datei "+outfile); + returnCode=1; + } + + } + else + { + sw=new StringWriter(); + if(outFormat.equalsIgnoreCase("xml")) + sw.write(""); + myQueryResultSerializer=new QueryResultSerializer("default", myConnection, query,sw); + } + + numberOfRows=myQueryResultSerializer.unloadQueryResults("DOQUERY",outFormat,delim,printColname); + returnCode=myQueryResultSerializer.getReturnCode(); + if(outfile.equals("")) + { + sw=myQueryResultSerializer.getOutputString(); + System.out.println(sw.toString()); + } + long erstrecht = new java.util.Date().getTime() ; + System.out.println( numberOfRows+" rows unloaded in "+(erstrecht-jetzt)/1000 +" Sec."); + System.out.println( "returnCode= "+returnCode); + myQueryResultSerializer = null; + return returnCode; + + + } + + /* + * Display some instructions on how to run the example + */ + public static void instructions() { + System.out.println("SuperX @version@\n"); + System.out.println( + "\nDieses Java-Programm führt einen SQL-Ausdruck aus und gibt das Ergebnis aus.\n"); + System.out.println( + "Gebrauch:\n java doquery (optional) (optional) (optional)\n"); + System.exit(1); + } + + public static void main(String args[]) { + int returnCode=0; + try { + returnCode=go(args); + + } catch (Exception ex) { + System.err.println("Doquery Aufruf fehlgeschlagen.\n" + ex); + ex.printStackTrace(); + System.exit(1); + } + System.out.println("Doquery Aufruf ausgeführt.\n" ); + System.exit(returnCode); + } + public static void getConnection(Logger logger,String propFile) throws SQLException { + myConnection = new SxConnection(); + myConnection.setPropfile(propFile); + logger.config("Starting Connection..."); + try { + myDb = myConnection.getConnection(); + st = myDb.createStatement(); + //st = myDb.createStatement(java.sql.ResultSet.TYPE_FORWARD_ONLY, + // java.sql.ResultSet.CONCUR_READ_ONLY); + //st.setFetchSize(100); + dbmd = myDb.getMetaData(); + //st = myDb.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, + //ResultSet.CONCUR_UPDATABLE); + } catch (Exception e) { + e.printStackTrace(); + logger.severe("Keine DB-Verbindung: " + e.toString()); + throw new SQLException("Keine DB-Verbindung: " + e.toString()); + } + db_driver = myConnection.m_DriverClass; + + } + + +} diff --git a/src/de/superx/etl/bin/Dosql.java b/src/de/superx/etl/bin/Dosql.java new file mode 100644 index 0000000..3b09ca7 --- /dev/null +++ b/src/de/superx/etl/bin/Dosql.java @@ -0,0 +1,192 @@ +/* + * de.superx.etl - a package for controlling ETL routines + * Copyright (C) 2021 Daniel Quathamer + * + * This package is licensed under the CampusSource License; + * http://www.campussource.de/org/license/ + */ +package de.superx.etl.bin; +import java.io.BufferedReader; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.FileWriter; +import java.io.IOException; +import java.io.InputStreamReader; +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.Properties; +import java.util.StringTokenizer; +import java.util.logging.LogManager; +import java.util.logging.Logger; + +import de.memtext.util.ExceptionHandler; +import de.superx.etl.util.GetOpts; +import de.memtext.util.PropUtils; +import de.superx.bin.SxConnection; +import de.superx.etl.EtlUtils; +import de.superx.etl.SqlExecutor; + +/** + * @author Daniel Quathamer Projektgruppe SuperX + * Dosql.java + * @ + * Dieses Javaprogramm führt einen oder mehrere SQL-Ausdrück in einer Datei aus und gibt das Ergebnis aus.
+ * Gebrauch:
java dosql (optional) (optional) (optional) + * + */ + +/* Änderungen + +15.4.2004 MB wenn keine Ausgabedatei und Ausgabeformat txt, alle selects von SxJdbcClient + loggen lassen +6.4.2004 MB Code static main verlegt - nach erfolgreicher Durchführung ein system.out + +**/ +public class Dosql { + private static Connection myDb; + private static DatabaseMetaData dbmd; + + private static Statement st; // Our statement to run queries with + private static boolean done = false; // Added by CWJ to permit \q command + private static String delim = "^"; + private static String header = "false"; + private static String logfile = "../conf/logging.properties"; + private static String tabelle = ""; + private static String dbpropfile = "../conf/db.properties"; + private static String outfile = ""; + private static String outFormat = "txt"; + private static String _dateiPfad = ""; + //LogUtils logger=null; + private static Logger logger = + (Logger) Logger.getLogger(Dosql.class.toString()); + //static Logger logger = Logger.getLogger(dosql.class); + private static ExceptionHandler exceptionHandler = + new ExceptionHandler(false); + private static SxConnection myConnection = null; + private static String usage = + "-------------------------------------\n" + + "Gebrauch: java de.superx.bin.Dosql -logger=<> -dbproperties=<> " + + "-sqlfile: -params: -outFormat:(optional) -delim: -header:(optional) -outfile:(optional) \n---------------------------------------------------"; + + /* + * Display some instructions on how to run the example + */ + public static void instructions() { + System.out.println("SuperX @version@\n"); + System.out.println( + "\nDieses Javaprogramm führt ein beliebiges sql-Script mit einer oder mehr sql-Anweisungen aus"); + System.out.println( + "Im Classpath muss superx@version@.jar sowie der zugehörige jdbc-Treiber sein."); + System.out.println( + "Befehl:\n java dosql (optional) true"); + System.exit(1); + } + + public static void main(String args[]) { + try { + execute(args); + } catch (Exception e) { + logger.severe( + "Fehler beim sql-Script: " + _dateiPfad + " " + e.toString()); + e.printStackTrace(); + System.exit(1); + } + + } + + public static void execute(String[] args) + throws Exception { + + String params = ""; + GetOpts.setOpts(args); + String isdrin = + GetOpts.isAllRequiredOptionsPresent( + "-logger,-dbproperties,-sqlfile"); + if (isdrin != null) { + System.err.println("Folgende Optionen fehlen: " + isdrin); + System.err.println(usage); + System.exit(1); + } + + //GetOpts myOpts=new GetOpts(); + if (GetOpts.isPresent("-logger")) + logfile = GetOpts.getValue("-logger"); + if (GetOpts.isPresent("-dbproperties")) + dbpropfile = GetOpts.getValue("-dbproperties"); + if (GetOpts.isPresent("-sqlfile")) + _dateiPfad = GetOpts.getValue("-sqlfile"); + if (GetOpts.isPresent("-outFormat")) + outFormat = GetOpts.getValue("-outFormat"); + if (GetOpts.isPresent("-delim")) + delim = GetOpts.getValue("-delim"); + if (GetOpts.isPresent("-header")) + header = GetOpts.getValue("-header"); + if (GetOpts.isPresent("-outfile")) + outfile = GetOpts.getValue("-outfile"); + if (GetOpts.isPresent("-params")) + params = GetOpts.getValue("-params"); + + if (delim.equals("")) + delim = "^"; //default Delimiter + + int returnCode=0; + File f = new File(logfile); + if (!f.exists()) { + throw new IOException("Datei nicht gefunden: " + logfile); + } + FileInputStream ins = new FileInputStream(logfile); + LogManager MyLogManager = java.util.logging.LogManager.getLogManager(); + MyLogManager.readConfiguration(ins); + logfile = MyLogManager.getProperty(".level"); + logger.info("Using Loggging-Level " + logfile); + File sqlScriptFile = new File(_dateiPfad); + getConnection(logger, dbpropfile); + Properties paramProperties=null; + if(!params.equals("")) + { + params=de.memtext.util.StringUtils.replace(params, "|", System.getProperty("line.separator")); + paramProperties=EtlUtils.convertStringToProperty(params); + } + SqlExecutor mySqlExecutor=new SqlExecutor("default", myConnection,sqlScriptFile,paramProperties); + if(!outfile.equals("")) + mySqlExecutor.setOutfile(outfile); + returnCode=mySqlExecutor.executeQueries(); + String output=mySqlExecutor.getOutString().toString(); + + myConnection.close(); + + System.out.println( + "Dosql hat das Script " + _dateiPfad + " erfolgreich durchgeführt"); + System.out.println(output); + logger.info("dosql erfolgreich beendet"); + + } + public static void getConnection(Logger logger,String propFile) throws SQLException { + myConnection = new SxConnection(); + myConnection.setPropfile(propFile); + logger.config("Starting Connection..."); + try { + myDb = myConnection.getConnection(); + st = myDb.createStatement(); + //st = myDb.createStatement(java.sql.ResultSet.TYPE_FORWARD_ONLY, + // java.sql.ResultSet.CONCUR_READ_ONLY); + //st.setFetchSize(100); + dbmd = myDb.getMetaData(); + //st = myDb.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, + //ResultSet.CONCUR_UPDATABLE); + } catch (Exception e) { + e.printStackTrace(); + logger.severe("Keine DB-Verbindung: " + e.toString()); + throw new SQLException("Keine DB-Verbindung: " + e.toString()); + } + + + } +} + diff --git a/src/de/superx/etl/bin/EtlJobExecutor.java b/src/de/superx/etl/bin/EtlJobExecutor.java new file mode 100644 index 0000000..0b12bf8 --- /dev/null +++ b/src/de/superx/etl/bin/EtlJobExecutor.java @@ -0,0 +1,70 @@ +/* + * de.superx.etl - a package for controlling ETL routines + * Copyright (C) 2021 Daniel Quathamer + * + * This package is licensed under the CampusSource License; + * http://www.campussource.de/org/license/ + */ +package de.superx.etl.bin; + +import java.sql.Connection; + +import de.superx.etl.util.GetOpts; +import de.superx.etl.EtlActionJob; +import de.superx.etl.EtlUtils; + +public class EtlJobExecutor { + + public EtlJobExecutor() { + // TODO Auto-generated constructor stub + } + + public static void main(String[] args) { + String usage="usage: java de.superx.bin.EtlJobExecutor -dbproperties:$DB_PROPERTIES -job:abc -params:TID=16000 (optional) -outfile:Ausgabedatei (optional)" ; + GetOpts.setOpts(args); + + String isdrin = GetOpts.isAllRequiredOptionsPresent("-dbproperties,-job"); + if (isdrin != null) { + + System.err.println(usage); + System.exit(1); + } + int returnCode=0; + String dbpropfile = GetOpts.getValue("-dbproperties"); + String job = GetOpts.getValue("-job"); + String params =null; + if (GetOpts.isPresent("-params")) + params=GetOpts.getValue("-params"); + if(!(params==null)) + params=de.memtext.util.StringUtils.replace(params, ",", EtlUtils.NEWLINE); + String outfile=null; + if (GetOpts.isPresent("-outfile")) + outfile=GetOpts.getValue("-outfile"); + //System.out.println("param:"+params); + + EtlActionJob myJob=new EtlActionJob(job,"",dbpropfile); //TODO:Mandantid + try { + myJob.initJob(job,params); + } catch (Exception e) { + // TODO Auto-generated catch block + + e.printStackTrace(); + System.err.println(myJob.getLogoutput()); + System.exit(1); + } + System.out.println("Job "+ job+" initialized"); + try { + returnCode= myJob.execute(outfile); + } catch (Exception e) { + // TODO Auto-generated catch block + e.printStackTrace(); + System.err.println(myJob.getLogoutput()); + System.exit(1); + } + System.out.println("Job "+ job+" executed in " +myJob.getDuration()+" Sec."); + + System.exit(returnCode); + + } + +} diff --git a/src/de/superx/etl/bin/SxTransformer.java b/src/de/superx/etl/bin/SxTransformer.java new file mode 100644 index 0000000..5374cb7 --- /dev/null +++ b/src/de/superx/etl/bin/SxTransformer.java @@ -0,0 +1,908 @@ +/* + * de.superx.etl - a package for controlling ETL routines + * Copyright (C) 2021 Daniel Quathamer + * + * This package is licensed under the CampusSource License; + * http://www.campussource.de/org/license/ + */ +package de.superx.etl.bin; + +import java.io.BufferedWriter; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.FileWriter; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.io.PrintStream; +import java.io.PrintWriter; +import java.io.StringReader; +import java.io.Writer; +import java.net.URI; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.SQLException; +import java.sql.Types; +import java.util.StringTokenizer; +import java.util.logging.Level; +import java.util.logging.Logger; + +import javax.xml.transform.Result; +import javax.xml.transform.Source; +import javax.xml.transform.TransformerException; +import javax.xml.transform.URIResolver; +import javax.xml.transform.sax.SAXResult; +import javax.xml.transform.stream.StreamResult; +import javax.xml.transform.stream.StreamSource; + +import org.apache.fop.apps.FOUserAgent; +import org.apache.fop.apps.Fop; +import org.apache.fop.apps.FopConfParser; +import org.apache.fop.apps.FopFactory; +import org.apache.fop.apps.FopFactoryBuilder; +import org.apache.fop.apps.MimeConstants; +import org.jfor.jfor.converter.Converter; +import org.xml.sax.InputSource; + +import de.superx.bin.SxDBUtils; +import de.superx.util.SqlStringUtils; + +// ACHTUNG - PDF ERZEUGUNG WIEDER AKTIV DQ 27.12.2005!!! + +/** + * @author Daniel Quathamer Projektgruppe SuperX SxTransformer.java @ Dieses + * Javaprogramm transformiert ResultSets in text, html oder xml und gibt + * das Ergebnis als Ausgabestrom für stdout oder eine Datei aus. + * + * + * + */ +public class SxTransformer { + + /** + * + */ + private String head_delim1 = ""; + + private String head_delim2 = "\t"; + + private String header_delim1 = ""; + + private String header_delim2 = "\n"; + + private String fld_delim1 = ""; + + private String fld_delim2 = "\t"; + + private String row_delim1 = ""; + + private String row_delim2 = "\n";//we only user unix newline, even under + // DOS + + private String rs_delim1 = ""; + + private String rs_delim2 = ""; + + private String delim = "\t"; + + private String header = "false"; + + private String outHeader = ""; + + private String outFooter = ""; + + public String format = "txt"; + + public String params = ""; + + public String[] XslPipeline; + + public String stylesheet = ""; + + public PrintWriter myOutwriter; + + public OutputStream myOutputStream; + + public ResultSet outrs = null; + + public ResultSetMetaData outrsmd = null; + + public String quellstring = null; + + public String outfile = ""; + private File fopxconfFile=null; + public void setFopxconfFile(File fopxconfFile) { + this.fopxconfFile = fopxconfFile; + } + + Logger myLogger; + + String loglevel; + + //PrintStream outputstream=null; + + //TODO ggfs. auf Saxon umstellen + //TransletUtils.initFactory("net.sf.saxon.TransformerFactoryImpl", + // "org.apache.xalan.xsltc.trax.TransformerFactoryImpl"); + //System.out.println("Using xsl processor: " + // + TransletUtils.getUsedProcessor()); + public SxTransformer(Logger logger, PrintStream myStream) { + super(); + myLogger = logger; + myLogger.config("Starting the Transformer with Stdout-Output."); + myOutwriter = new PrintWriter(myStream); + } + + public SxTransformer(Logger logger, String myFile) { + super(); + myLogger = logger; + myLogger.config("Starting the Transformer with File-Output."); + outfile = myFile; + myLogger.setLevel(Level.SEVERE); + + } + + public SxTransformer(Logger logger, FileWriter myFilewriter) { + super(); + myLogger = logger; + myLogger.config("Starting the Transformer with File-Output."); + myOutwriter = new PrintWriter(new BufferedWriter(myFilewriter)); + } + + public SxTransformer(Logger logger, PrintWriter myWriter) { + super(); + myLogger = logger; + myLogger.config("Starting the Transformer with StringWriter-Output."); + myOutwriter = myWriter; + } + public SxTransformer(Logger logger) { + super(); + myLogger = logger; + myLogger.config("Starting the Transformer without Stream"); + } + + public void printResult(String outFormat) { + //loglevel=myLogger.getRootLogger().getLevel().toString().trim(); + assignDelims(outFormat); + myLogger.config("Resultset wird formatiert"); + String f_wert = ""; + //ResultSetMetaData rsmd = rs.getMetaData(); + + String zs = ""; + String headZs = ""; + int cols = 0; + Object o = null; + String[] lbl = null; + int[] types = new int[1255]; + // de.memtext.util.MemoryUtils.printfree(); + if (outrs != null) { + + try { + cols = outrsmd.getColumnCount(); + lbl = new String[1255]; + zs = outHeader + rs_delim1; + headZs = header_delim1; + ; + // Print the result column names? + for (int i = 1; i <= cols; i++) { + lbl[i] = outrsmd.getColumnLabel(i); + types[i] = outrsmd.getColumnType(i); + if (outFormat.equals("xml")) { + fld_delim1 = ""; + fld_delim2 = ""; + } else { + + if (i < cols) + headZs += (head_delim1 + lbl[i] + head_delim2); + else { + headZs += (head_delim1 + lbl[i] + head_delim2); + headZs += (header_delim2); + //out.newLine(); + } + } + } + } catch (SQLException e1) { + myLogger.severe("Fehler beim Einlesen der Feldnamen: " + e1.toString()); + e1.printStackTrace(); + } + if (header.equals("true")) zs += headZs; + + // try { + myOutwriter.write(zs); + myOutwriter.flush(); + /* + * } catch (IOException e) { myLogger.severe("Fehler beim Ausgeben + * der Feldnamen: " + e.toString()); + * } + */ + + //now the results + StringBuffer line = new StringBuffer(); + try { + int rowCounter = 0; + + do { + + line.setLength(0); + + line.append(row_delim1); + for (int i = 1; i <= cols; i++) { + if (outFormat.equals("xml")) { + fld_delim1 = ""; + fld_delim2 = ""; + } + if (types[i] == Types.LONGVARCHAR) { + //Der Informix-Treiber verlangt hier getString + // statt getObject! + o = outrs.getString(i); + } else { + o = outrs.getObject(i); + } + if (outrs.wasNull()) + if (i < cols) + line.append(fld_delim1 + "" + fld_delim2); + else { + line.append(fld_delim1 + "" + fld_delim2); + line.append(row_delim2); + //out.newLine(); + } + else { + //vergl. de.superx.common.Maske.getCSV + f_wert = SxDBUtils.field_value(o); + if (outFormat.equals("xml") && (f_wert.indexOf("&") > -1 || f_wert.indexOf("<") > -1 || f_wert.indexOf(">") > -1)) + f_wert = ""; + //wenn der Feldwert zufällig das Trennzeichen enthält, wird es mit "\" maskiert + if (outFormat.equals("txt")) { + f_wert = SxDBUtils.prepareInformixCsv(f_wert); + if (f_wert != null && (f_wert.indexOf(fld_delim2) > -1)) f_wert = de.memtext.util.StringUtils.replace(f_wert, fld_delim2, "\\" + fld_delim2); + } + if (i < cols) + line.append(fld_delim1 + f_wert + fld_delim2); + else { + line.append(fld_delim1 + f_wert + fld_delim2); + line.append(row_delim2); + } + } + } + myOutwriter.write(line.toString()); + rowCounter++; + if (rowCounter > 10000) { + de.memtext.util.MemoryUtils.printfree(); + myOutwriter.flush(); + + rowCounter = 0; + } + } while (outrs.next()); //von while + + myOutwriter.write(rs_delim2 + outFooter); + + myOutwriter.flush(); + outrs.close(); + myOutwriter.close(); + + } /* + * catch (IOException e) { myLogger.severe("Fehler beim Ausgeben + * der Feldwerte: " + e.toString()); } + */ + catch (SQLException e) { + myLogger.warning("SQL-Fehler beim Ausgeben der Feldwerte: " + e.toString()); + try { + myOutwriter.write(rs_delim2 + outFooter); + myOutwriter.flush(); + myOutwriter.close(); + outrs.close(); + } + /* + * catch (IOException e1) { myLogger.severe("Fehler beim + * Ausgeben der Feldwerte: " + e1.toString()); } + */ + catch (SQLException e1) { + myLogger.warning("SQL-Fehler beim Ausgeben der Feldwerte: " + e1.toString()); + } + } + + } + + else //wenn outrs=null + { + + myOutwriter.flush(); + myOutwriter.close(); + + } + myLogger.info("resultset printed"); + + } + + public void transformString(String methode) throws TransformerException { + try { + + javax.xml.transform.TransformerFactory tFactory = javax.xml.transform.TransformerFactory.newInstance(); + + javax.xml.transform.Transformer transformer = tFactory.newTransformer(new javax.xml.transform.stream.StreamSource(stylesheet)); + StringReader s1 = new StringReader(quellstring); + transformer.setOutputProperty(javax.xml.transform.OutputKeys.ENCODING, SqlStringUtils.getEncoding()); + transformer.setOutputProperty(javax.xml.transform.OutputKeys.METHOD, methode); + transformer.setParameter("sx_client", format); + String result = null; + if (!params.endsWith(",")) params += ","; + StringTokenizer st = new StringTokenizer(params, ","); + for (; st.hasMoreTokens();) { + String param = st.nextToken(); + if (!param.equals("")) { + String paramname = param.substring(0, param.indexOf("=")); + String paramvalue = param.substring(param.indexOf("=") + 1, param.length()); + transformer.setParameter(paramname, paramvalue); + } + } + transformer.transform(new javax.xml.transform.stream.StreamSource(s1), new javax.xml.transform.stream.StreamResult(myOutwriter)); + } catch (Exception e) { + myLogger.severe("XSL-Transformation fehlgeschlagen: " + e.toString()); + + } + + } + + public void transformFile(String methode) throws TransformerException, Exception + { + + javax.xml.transform.TransformerFactory tFactory = javax.xml.transform.TransformerFactory.newInstance("net.sf.saxon.TransformerFactoryImpl", null); + + javax.xml.transform.Transformer transformer = tFactory.newTransformer(new javax.xml.transform.stream.StreamSource(this.stylesheet)); + //StringReader s1 = new StringReader(quellstring); + transformer.setOutputProperty(javax.xml.transform.OutputKeys.ENCODING, SqlStringUtils.getEncoding()); + if (methode.equals("pdf") || methode.equals("rtf")) + transformer.setOutputProperty(javax.xml.transform.OutputKeys.METHOD, "xml"); + else + transformer.setOutputProperty(javax.xml.transform.OutputKeys.METHOD, methode); + + transformer.setParameter("sx_client", format); + transformer.setParameter("versionParam", "2.0"); + + // String result = null; + if (!params.endsWith(",")) params += ","; + StringTokenizer st = new StringTokenizer(params, ","); + for (; st.hasMoreTokens();) { + String param = st.nextToken(); + if (!param.equals("")) { + String paramname = param.substring(0, param.indexOf("=")); + String paramvalue = param.substring(param.indexOf("=") + 1, param.length()); + transformer.setParameter(paramname, paramvalue); + } + } + + if (methode.equals("pdf")) { //Sichern um zurücksetzen zu können. + Level logLevel = myLogger.getParent().getLevel(); + myLogger.getParent().setLevel(Level.SEVERE); + + // configure fopFactory as desired + //akt. Pfad muss WEB-INF sein oder vorher als -fopxconf Param übergeben worden sein + + + FopFactory fopFactory ; + FOUserAgent foUserAgent; + + // in fop.xconf fopFactory.setSourceResolution(96); // =96dpi (dots/pixels per Inch) + if (fopxconfFile==null) + fopxconfFile=new File ("conf" + File.separator + "fop.xconf"); + if (!fopxconfFile.exists()) + { + String msg=fopxconfFile.getAbsolutePath()+" nicht gefunden"; + System.out.println(msg); + throw new IllegalStateException(msg); + } + + FopConfParser parser = new FopConfParser(fopxconfFile); //parsing configuration + + FopFactoryBuilder builder = parser.getFopFactoryBuilder(); //building the factory with the user options + String fa=fopxconfFile.getCanonicalPath(); + + URI uri=new File(fa).getParentFile().getParentFile().toURI(); + // System.out.println(uri); + builder.setBaseURI(uri); + builder.setStrictFOValidation(false); + fopFactory = builder.build(); + + foUserAgent = fopFactory.newFOUserAgent(); + + + try { + myOutputStream = new java.io.FileOutputStream(outfile); + } catch (FileNotFoundException e) { + myLogger.severe("Ausgabedatei " + outfile + " kann nicht erzeugt werden:" + e.toString()); + System.exit(-1); + } + + try { + // Construct fop with desired output format + Fop fop = fopFactory.newFop(MimeConstants.MIME_PDF, foUserAgent, myOutputStream); + + // Setup input for XSLT transformation + Source src = new StreamSource(quellstring); + + // Resulting SAX events (the generated FO) must be piped through to FOP + Result res = new SAXResult(fop.getDefaultHandler()); + + // Start XSLT transformation and FOP processing + try { + transformer.transform(src, res); + System.out.println("Success!"); + } catch (TransformerException ex) { + throw new Exception(ex); + } + } catch (Exception e) { + e.printStackTrace(System.err); + myLogger.severe("FOP-Transformation Fehler: " + e.toString()); + } finally { + myOutputStream.close(); + myLogger.getParent().setLevel(logLevel); + } + + } else { + if (methode.equals("rtf")) { + String zielstring = ""; + myLogger.info("Ausgabedatei " + outfile + " vorbereiten"); + Writer myWriter = null; + try { + myWriter = new BufferedWriter(new FileWriter(outfile)); + } catch (IOException e) { + e.printStackTrace(); + } + try { + myLogger.info("Transformiere nach fo "); + transformer.transform(new javax.xml.transform.stream.StreamSource(quellstring), new StreamResult("tmp.fo")); + + myLogger.info("Transformiere nach rtf "); + + new Converter(new InputSource("tmp.fo"), myWriter, Converter.createConverterOption()); + } catch (Exception e1) { + + e1.printStackTrace(); + } + } else { + if (methode.equals("xls")) { + + } else { + //Normal xml Transformation, not fop or rtf + try { + + FileWriter myFile = new FileWriter(outfile); + myOutwriter = new PrintWriter(myFile); + transformer.transform(new javax.xml.transform.stream.StreamSource(quellstring), new javax.xml.transform.stream.StreamResult(myOutwriter)); + } catch (IOException e) { + myLogger.severe("Datei " + outfile + " kann nicht erzeugt werden: " + e.toString()); + } catch (TransformerException e) { + myLogger.severe("Datei " + outfile + " kann nicht transformiert werden: " + e.toString()); + } + //new javax.xml.transform.stream.StreamResult( myOutwriter)); + } + } + } + + } + + public void outputString() { + BufferedWriter out = new BufferedWriter(myOutwriter); + try { + out.write(quellstring); + out.flush(); + } catch (IOException e) { + myLogger.severe("Fehler beim Ausgeben des Ergebnisses: " + e.toString()); + } + + } + + public String getResult(String outFormat) throws SQLException { + + //loglevel=myLogger.getRootLogger().getLevel().toString().trim(); + assignDelims(outFormat); + myLogger.config("Resultset wird formatiert"); + String f_wert = ""; + //ResultSetMetaData rsmd = rs.getMetaData(); + int cols = outrsmd.getColumnCount(); + String[] lbl = new String[255]; + String zs = rs_delim1; + String headZs = header_delim1; + ; + // Print the result column names? + for (int i = 1; i <= cols; i++) { + lbl[i] = outrsmd.getColumnLabel(i); + if (outFormat.equals("xml")) { + fld_delim1 = ""; + fld_delim2 = ""; + } else { + + if (i < cols) + headZs += (head_delim1 + lbl[i] + head_delim2); + else { + headZs += (head_delim1 + lbl[i] + head_delim2); + headZs += (header_delim2); + //out.newLine(); + } + } + } + if (header.equals("true")) zs += headZs; + + //now the results + try { + while (outrs.next()) + + { + zs += (row_delim1); + for (int i = 1; i <= cols; i++) { + Object o = outrs.getObject(i); + f_wert = SxDBUtils.field_value(o); + if (outFormat.equals("xml")) { + fld_delim1 = ""; + fld_delim2 = "\n"; + } + if (outrs.wasNull()) + if (i < cols) + zs += (fld_delim1 + "" + fld_delim2); + else { + zs += (fld_delim1 + "" + fld_delim2); + zs += (row_delim2); + //out.newLine(); + } + else if (i < cols) + zs += (fld_delim1 + f_wert.trim() + fld_delim2); + else { + zs += (fld_delim1 + f_wert.trim() + fld_delim2); + zs += (row_delim2); + + // out.newLine(); + } + + // } + // catch (IOException e) + // { + // myLogger.severe("Fehler beim Ausgeben der Feldwerte: + // "+e.toString()); + // + // } + + } + } //von while + zs += (rs_delim2); + + } catch (Exception e) { + myLogger.severe("Fehler beim Ausgeben der Feldwerte: " + e.toString()); + } + outrs.close(); + + return zs; + } + + private void assignDelims(String outFormat) { + if (outFormat.equals("html")) { + outHeader = ""; + outFooter = ""; + rs_delim1 = "\n"; + rs_delim2 = "
"; + head_delim1 = ""; + head_delim2 = ""; + header_delim1 = ""; + header_delim2 = "\n"; + row_delim1 = ""; + row_delim2 = ""; + fld_delim1 = ""; + fld_delim2 = ""; + } + if (outFormat.equals("xml")) { + outHeader = ""; + outFooter = ""; + rs_delim1 = "\n"; + rs_delim2 = ""; + head_delim1 = ""; + head_delim2 = ""; + header_delim1 = ""; + header_delim2 = ""; + row_delim1 = "\n"; + row_delim2 = "\n"; + fld_delim1 = ""; + fld_delim2 = ""; + } + } + + /** + * @return + */ + public String getDelim() { + return delim; + } + + /** + * @return + */ + public String getFld_delim1() { + return fld_delim1; + } + + /** + * @return + */ + public String getFld_delim2() { + return fld_delim2; + } + + /** + * @return + */ + public String getFormat() { + return format; + } + + /** + * @return + */ + public String getHead_delim1() { + return head_delim1; + } + + /** + * @return + */ + public String getHead_delim2() { + return head_delim2; + } + + /** + * @return + */ + public String getHeader() { + return header; + } + + /** + * @return + */ + public String getHeader_delim1() { + return header_delim1; + } + + /** + * @return + */ + public String getHeader_delim2() { + return header_delim2; + } + + /** + * @return + */ + public ResultSet getOutrs() { + return outrs; + } + + /** + * @return + */ + public ResultSetMetaData getOutrsmd() { + return outrsmd; + } + + /** + * @return + */ + public String getRow_delim1() { + return row_delim1; + } + + /** + * @return + */ + public String getRow_delim2() { + return row_delim2; + } + + /** + * @return + */ + public String getRs_delim1() { + return rs_delim1; + } + + /** + * @return + */ + public String getRs_delim2() { + return rs_delim2; + } + + /** + * @return + */ + public String[] getXslPipeline() { + return XslPipeline; + } + + /** + * @param string + */ + public void setDelim(String string) { + delim = string; + fld_delim1 = ""; + fld_delim2 = string; + head_delim2 = string; + } + + /** + * @param string + */ + public void setFld_delim1(String string) { + fld_delim1 = string; + } + + /** + * @param string + */ + public void setFld_delim2(String string) { + fld_delim2 = string; + } + + /** + * @param string + */ + public void setFormat(String string) { + format = string; + } + + /** + * @param string + */ + public void setHead_delim1(String string) { + head_delim1 = string; + } + + /** + * @param string + */ + public void setHead_delim2(String string) { + head_delim2 = string; + } + + /** + * @param string + */ + public void setHeader(String string) { + header = string; + } + + /** + * @param string + */ + public void setHeader_delim1(String string) { + header_delim1 = string; + } + + /** + * @param string + */ + public void setHeader_delim2(String string) { + header_delim2 = string; + } + + /** + * @param set + */ + + public void setOutrs(ResultSet set) { + outrs = set; + } + + /** + * @param data + */ + public void setOutrsmd(ResultSetMetaData data) { + outrsmd = data; + } + + /** + * @param string + */ + public void setRow_delim1(String string) { + row_delim1 = string; + } + + /** + * @param string + */ + public void setRow_delim2(String string) { + row_delim2 = string; + } + + /** + * @param string + */ + public void setRs_delim1(String string) { + rs_delim1 = string; + } + + /** + * @param string + */ + public void setRs_delim2(String string) { + rs_delim2 = string; + } + + /** + * @param strings + */ + public void setXslPipeline(String[] strings) { + XslPipeline = strings; + } + + /** + * @return + */ + public String getOutFooter() { + return outFooter; + } + + /** + * @return + */ + public String getOutHeader() { + return outHeader; + } + + /** + * @param string + */ + public void setOutFooter(String string) { + outFooter = string; + } + + /** + * @param string + */ + public void setOutHeader(String string) { + outHeader = string; + } + + /** + * @return + */ + public String getParams() { + return params; + } + + /** + * @param string + */ + public void setParams(String string) { + params = string; + } + + //-- nur zum Testen --// + public static void main(String[] args) throws Exception { + Logger log = Logger.getLogger(SxTransformer.class.getName()); + String rootDir = "/home/superx/tmp/"; + String quelle = rootDir + "Buchungsbericht_ids_Test.xml"; + String zielFile = rootDir + "testneu.pdf"; + String sxlSheet = "/home/superx/dev/edustore/webserver/tomcat/webapps/superx/xml/tabelle_fo_pdf.xsl"; + + SxTransformer sxTrans = new SxTransformer(log, zielFile); + + sxTrans.quellstring = quelle; + sxTrans.outfile = zielFile; + sxTrans.stylesheet = sxlSheet; + sxTrans.transformFile("pdf"); + + + }//Ende der Methode + + public class ClasspathUriResolver implements URIResolver { + + public Source resolve(String href, String base) throws TransformerException { + Source source = null; + InputStream inputStream = ClassLoader.getSystemResourceAsStream(href); + if (inputStream != null) { + source = new StreamSource(inputStream); + } + return source; + } + } +}//Ende der Klasse + + diff --git a/src/de/superx/etl/bin/UnloadRecords.java b/src/de/superx/etl/bin/UnloadRecords.java new file mode 100644 index 0000000..a26b1d0 --- /dev/null +++ b/src/de/superx/etl/bin/UnloadRecords.java @@ -0,0 +1,214 @@ +/* + * de.superx.etl - a package for controlling ETL routines + * Copyright (C) 2021 Daniel Quathamer + * + * This package is licensed under the CampusSource License; + * http://www.campussource.de/org/license/ + */ +package de.superx.etl.bin; +import java.io.File; +import java.io.FileInputStream; +import java.io.FileNotFoundException; +import java.io.FileWriter; +import java.io.IOException; +import java.io.StringWriter; +import java.sql.Connection; +import java.sql.DatabaseMetaData; +import java.sql.ResultSet; +import java.sql.ResultSetMetaData; +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.logging.LogManager; +import java.util.logging.Logger; +import java.util.StringTokenizer; +import de.superx.util.SqlStringUtils; +import de.superx.bin.SxConnection; +import de.superx.common.SuperX_el; +import de.superx.etl.QueryResultSerializer; +import de.superx.servlet.SxPools; +import de.superx.servlet.SxSQL_Server; + +/** + * @author Daniel Quathamer Projektgruppe SuperX + * doquery.java + * @ + * Dieses Javaprogramm führt einen SQL-Ausdruck aus und gibt das Ergebnis aus.
+ * Gebrauch:
java doquery (optional) (optional) (optional) + * + + */ +//Änderungen +/* + * 16.4.04 Wenn kein outfile angegeben, werden alle select results auf die Console geloggt + * 19.1.2006 dq: Unload großer Tabellen unter Postgres ermöglicht. + */ +public class UnloadRecords { + private static Connection myDb; + private static Statement st; // Our statement to run queries with + private static DatabaseMetaData dbmd; + // This defines the structure of the database + private static boolean done = false; // Added by CWJ to permit \q command + private static String delim = "^"; + private static String header = "false"; + private static String outFormat = "txt"; + private static String logfile = "../conf/logging.properties"; + private static String tabelle = ""; + private static String dbpropfile = "../conf/db.properties"; + private static SxConnection myConnection = null; + private static String db_driver; + private static String mandantenID="default"; + private static String outfile = ""; + private static Logger logger = + (Logger) Logger.getLogger(Doquery.class.toString()); + private static String usage = + "-------------------------------------\nGebrauch: java de.superx.bin.UnloadRecords $LOGGER_PROPERTIES $DB_PROPERTIES $sql (optional: )$outformat $DBDELIMITER $header $filename \n---------------------------------------------------"; + + public static int go(String args[]) + throws + Exception { + if (args.length > 0) { + logfile = args[0].trim(); + } else { + throw new IllegalArgumentException("Mindestens drei Parameter (Pfad zu den logger.properties, Pfad zu den db.properties, sql-String) erfoderlich"); + + } + File f = new File(logfile); + if (!f.exists()) { + throw new IOException("Datei nicht gefunden: " + logfile); + } + + FileInputStream ins = new FileInputStream(logfile); + LogManager MyLogManager = java.util.logging.LogManager.getLogManager(); + MyLogManager.readConfiguration(ins); + logfile = MyLogManager.getProperty(".level"); + logger.info("Using Loggging-Level " + logfile); + String query = ""; + long numberOfRows=0; + int returnCode=0; + + if (args.length > 1) { + dbpropfile = args[1].trim(); + } else { + logger.severe( + "Mindestens drei Parameter (Pfad zu den logger.properties, Pfad zu den db.properties, sql-String) erfoderlich"); + System.exit(1); + } + if (args.length <= 2) { + logger.severe( + "Mindestens drei Parameter (Pfad zu den logger.properties, Pfad zu den db.properties, sql-String) erfoderlich"); + System.exit(1); + } + query=args[2].trim(); + + if (args.length > 3) { + outFormat = args[3].trim(); + } + + if (args.length > 4) { + delim = args[4].trim(); + } + if (args.length > 5) { + header = args[5].trim(); + + } + if (args.length > 6) { + outfile = args[6].trim(); + + } + if (delim.equals("")) + delim = "^"; //default Delimiter + + boolean printColname=(header.equalsIgnoreCase("true")?true:false); + FileWriter fw; + StringWriter sw; + long jetzt = new java.util.Date().getTime() ; + getConnection(logger, dbpropfile); + QueryResultSerializer myQueryResultSerializer = null; + if(!outfile.equals("")) + { + try { + fw = new FileWriter(outfile); + if(outFormat.equalsIgnoreCase("xml")) + fw.write(""); + myQueryResultSerializer=new QueryResultSerializer("default", myConnection, query,fw); + + } catch (IOException e) { + logger.severe ("Fehler beim Erstellen der Datei "+outfile); + returnCode=1; + } + + } + else + { + sw=new StringWriter(); + if(outFormat.equalsIgnoreCase("xml")) + sw.write(""); + myQueryResultSerializer=new QueryResultSerializer("default", myConnection, query,sw); + } + + numberOfRows=myQueryResultSerializer.unloadQueryResults("DOQUERY",outFormat,delim,printColname); + returnCode=myQueryResultSerializer.getReturnCode(); + if(outfile.equals("")) + { + sw=myQueryResultSerializer.getOutputString(); + System.out.println(sw.toString()); + } + long erstrecht = new java.util.Date().getTime() ; + System.out.println( numberOfRows+" rows unloaded in "+(erstrecht-jetzt)/1000 +" Sec."); + System.out.println( "returnCode= "+returnCode); + myQueryResultSerializer = null; + return returnCode; + + + } + + /* + * Display some instructions on how to run the example + */ + public static void instructions() { + System.out.println("SuperX @version@\n"); + System.out.println( + "\nDieses Java-Programm führt einen SQL-Ausdruck aus und gibt das Ergebnis aus.\n"); + System.out.println( + "Gebrauch:\n java doquery (optional) (optional) (optional)\n"); + System.exit(1); + } + + public static void main(String args[]) { + int returnCode=0; + try { + returnCode=go(args); + + } catch (Exception ex) { + System.err.println("Doquery Aufruf fehlgeschlagen.\n" + ex); + ex.printStackTrace(); + System.exit(1); + } + System.out.println("Doquery Aufruf ausgeführt.\n" ); + System.exit(returnCode); + } + public static void getConnection(Logger logger,String propFile) throws SQLException { + myConnection = new SxConnection(); + myConnection.setPropfile(propFile); + logger.config("Starting Connection..."); + try { + myDb = myConnection.getConnection(); + st = myDb.createStatement(); + //st = myDb.createStatement(java.sql.ResultSet.TYPE_FORWARD_ONLY, + // java.sql.ResultSet.CONCUR_READ_ONLY); + //st.setFetchSize(100); + dbmd = myDb.getMetaData(); + //st = myDb.createStatement(ResultSet.TYPE_SCROLL_INSENSITIVE, + //ResultSet.CONCUR_UPDATABLE); + } catch (Exception e) { + e.printStackTrace(); + logger.severe("Keine DB-Verbindung: " + e.toString()); + throw new SQLException("Keine DB-Verbindung: " + e.toString()); + } + db_driver = myConnection.m_DriverClass; + + } + + +} diff --git a/src/de/superx/etl/bin/UploadRecords.java b/src/de/superx/etl/bin/UploadRecords.java new file mode 100644 index 0000000..8570c2d --- /dev/null +++ b/src/de/superx/etl/bin/UploadRecords.java @@ -0,0 +1,94 @@ +/* + * de.superx.etl - a package for controlling ETL routines + * Copyright (C) 2021 Daniel Quathamer + * + * This package is licensed under the CampusSource License; + * http://www.campussource.de/org/license/ + */ +package de.superx.etl.bin; +import de.superx.etl.util.GetOpts; +import de.superx.etl.TableUploader; + +/* + * @author Daniel Quathamer Projektgruppe SuperX + * upload_records.java +* Dieses Javaprogramm lädt Inhalte einer Datei in eine Tabelle hoch") +* DQ 5.1.2006 Upload vom XML-Dateien möglich + * + */ + +public class UploadRecords { + private static String usage = + "-------------------------------------\n" + + "Gebrauch: java de.superx.bin.UploadRecords \n-dbproperties: \n" + + "-table: \n-unl:(optional, default ist Tabellenname.unl) \n-delim:(optional, default ist ^) \n-header:(optional, mit Feldüberschriften, default ist false)\n" + + "-mode:(optional, default is stop) #Bei Fehlerhaften Daten kann das Hochladen gestoppt werden, oder der Datensatz wird übersprungen" + + "\n-inserts:(optional, default is false) #Bei -inserts:simple und batch werden Die Rohdaten in Insert-sql-Statements übersetzt (nur für Debugging-Zwecke, sehr langsam. Der Modus exclude-field ist darüberhinaus nicht anwendbar)" + + "\n-encoding:" + + "\n---------------------------------------------------"; + + public static void main(String args[]) { + try { + GetOpts.setOpts(args); + String isdrin = + GetOpts.isAllRequiredOptionsPresent("-dbproperties,-table,-unl"); + if (isdrin != null) { + System.err.println("Folgende Optionen fehlen: " + isdrin); + System.err.println(usage); + System.exit(1); + } + TableUploader myUploader=new TableUploader(); + //GetOpts myOpts=new GetOpts(); + if (GetOpts.isPresent("-dbproperties")) + myUploader.setDbpropfile(GetOpts.getValue("-dbproperties")); + if (GetOpts.isPresent("-informat")) + myUploader.setInFormat(GetOpts.getValue("-informat")); + if (GetOpts.isPresent("-table")) + myUploader.setTargetTable( GetOpts.getValue("-table")); + + if (GetOpts.isPresent("-unl")) + myUploader.setSrcFile(GetOpts.getValue("-unl")); + else + myUploader.setSrcFile(myUploader.getTargetTable() + ".unl"); + if (GetOpts.isPresent("-header")) + myUploader.setHeader(GetOpts.getValue("-header").equalsIgnoreCase("true")?true:false); + if (GetOpts.isPresent("-delim")) + myUploader.setDelim(GetOpts.getValue("-delim")); + if (GetOpts.isPresent("-encoding")) + { + String encodingParam=GetOpts.getValue("-encoding"); + + if(encodingParam != null && !encodingParam.equals("") ) + myUploader.setEncoding(encodingParam); + } + else + myUploader.setEncoding(System.getProperty("file.encoding")); + if (GetOpts.isPresent("-mode")) { + myUploader.setMode(GetOpts.getValue("-mode").toLowerCase()); + + } + if (GetOpts.isPresent("-inserts")) + myUploader.setInserts(GetOpts.getValue("-inserts")); + long jetzt = new java.util.Date().getTime() ; + long numberOfRows=0; + myUploader.setUploadConnection(myUploader.getConnection(null,myUploader.getDbpropfile())); + numberOfRows=myUploader.uploadFile(); + long erstrecht = new java.util.Date().getTime() ; + System.out.println(numberOfRows+" lines loaded"); + System.out.println("File "+myUploader.getSrcFile() +" uploaded, returnCode="+myUploader.getReturnCode()); + myUploader=null; + //if(protokoll.equals("")) + // protokoll= " in "+(erstrecht-jetzt)/1000 +" Sec."; + //System.out.println(protokoll); + +} catch (Exception ex) { + System.err.println("Upload fehlgeschlagen: " + ex); + System.exit(1); +} +} + } + + + + + diff --git a/src/de/superx/etl/util/GetOpts.java b/src/de/superx/etl/util/GetOpts.java new file mode 100644 index 0000000..1db8737 --- /dev/null +++ b/src/de/superx/etl/util/GetOpts.java @@ -0,0 +1,155 @@ +package de.superx.etl.util; + + +import java.util.StringTokenizer; + +/** + * Original von de.memtext.util.GetOpts, + * für Abwärtskompatibilität mit Kern 4.9/BI 2021.06 + * + */ +public class GetOpts { + private static String[] arguments = null; + + //don't instantiate + private GetOpts() { + + } + + /** + * If you pass the arguments you want to analyse to this static helper class + * using setOpts, you can later use the short form of the methods e.g. + * isPresent(String option) without having to pass the arguments again. + * + * @param args + */ + public static void setOpts(String args[]) { + arguments = args; + } + + /** + * checks if the arguments passed before by setOpts contain the given option + * + * @param String + * option + * @return true if arguments contain the option, i.e. one String which + * starts with the option-String + */ + public static boolean isPresent(String option) { + if (arguments == null) + throw new IllegalStateException( + "must either use setOpts before or call the long version of this method"); + return isPresent(arguments, option); + } + + /** + * checks if the arguments contain the given option + * + * @param args + * @param String + * option + * @return true if arguments contain the option, i.e. one String which + * starts with the option-String + */ + public static boolean isPresent(String args[], String option) { + boolean result = false; + for (int i = 0; i < args.length; i++) + if (args[i] != null && args[i].startsWith(option)) + result = true; + return result; + } + + /** + * Gets a named option from the arguments passed before with setOps. E.g. + * getOpt("--log") would return the "--log:true" in the arguments + * + * @param option - + * name/start of the option + * @return String whole option + */ + public static String getOpt(String option) { + if (arguments == null) + throw new IllegalStateException( + "must either use setOpts before or call the long version of this method"); + return getOpt(option, arguments); + } + + /** + * Gets a named option from the arguments. E.g. getOpt("--log") would return + * the "--log:true" in the arguments + * + * @param option - + * name/start of the option + * @return String whole option + */ + public static String getOpt(String option, String args[]) { + if (args == null) + throw new IllegalStateException("args must not be null"); + String result = null; + for (int i = 0; i < args.length; i++) + if (args[i].startsWith(option)) + result = args[i]; + if (result == null) + throw new RuntimeException("Option " + option + " not found!"); + return result; + } + + /** + * Gets the value of an option from the arguments passed before with setOpts + * if for example, you ask getValue("--log:") and the arguments passed + * before with setOpts contain a String "--log:true") "true" is returned + * + * @param String + * option + * @return String value of the option + */ + public static String getValue(String option) { + if (arguments == null) + throw new IllegalStateException( + "must either use setOpts before or call the long version of this method"); + return getValue(option, arguments); + } + + /** + * Gets the value of an option, if for example, you ask getValue("--log:") + * and the arguments passed before with setOpts contain a String + * "--log:true") "true" is returned + * + * @param String + * option + * @return String value of the option + */ + public static String getValue(String option, String args[]) { + String result = ""; + String raw = getOpt(option, args); + int pos = option.length(); + if (raw.charAt(pos) == ':') + pos++; + result = raw.substring(pos, raw.length()); + return result; + } + + /** + * Prüft, ob alle notwendigen Optionen angegeben sind, z.B. String + * "-loggingProperties:,-dbProperties" übergeben, die beiden müssen dabei + * sein + * + * @param options + * z.B. "-loggingProperties:,-dbProperties,..." + * @return null - alles OK, ansonsten String mit den fehlenden Optionen + */ + public static String isAllRequiredOptionsPresent(String options) { + String result = null; + StringTokenizer st = new StringTokenizer(options, ","); + for (; st.hasMoreTokens();) { + String optionName = st.nextToken(); + if (!isPresent(optionName)) { + if (result == null) + result = optionName; + else + result += "," + optionName; + } + } + return result; + } +} diff --git a/src/de/superx/sxrest/JobExecutor.java b/src/de/superx/sxrest/JobExecutor.java new file mode 100644 index 0000000..3eb3b33 --- /dev/null +++ b/src/de/superx/sxrest/JobExecutor.java @@ -0,0 +1,47 @@ +package de.superx.sxrest; + +import javax.ws.rs.GET; +import javax.ws.rs.Path; +import javax.ws.rs.PathParam; +import javax.ws.rs.Produces; +import javax.ws.rs.core.Context; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Request; + +import java.sql.SQLException; + +import javax.servlet.http.HttpServletRequest; +import de.memtext.util.DateUtils; +import de.superx.common.SxUser; +import de.superx.etl.EtlActionJob; +import de.superx.servlet.SuperXManager; + +@Path("/execute") + +public class JobExecutor { + @Context + Request request; + + @GET + @Path("/{param}") + @Produces(MediaType.TEXT_PLAIN) + public String printLogs(@PathParam("param") String job, @Context HttpServletRequest request) { + String returntext = ""; + SxUser user = (SxUser) request.getSession().getAttribute("user"); + if (user == null || !user.isAdmin()) { + returntext = "Fehlende Rechte"; + } else { + EtlActionJob myJob = new EtlActionJob(job, "", ""); + try { + myJob.execute(); + } catch (Exception e) { + e.printStackTrace(); + returntext = "Fehler " + e; + + } + + + } + return returntext; + } +} diff --git a/src/de/superx/sxrest/LaunchUpgrade.java b/src/de/superx/sxrest/LaunchUpgrade.java new file mode 100644 index 0000000..b504b72 --- /dev/null +++ b/src/de/superx/sxrest/LaunchUpgrade.java @@ -0,0 +1,81 @@ +/* + * de.superx.etl - a package for controlling ETL routines + * Copyright (C) 2021 Daniel Quathamer + * + * This package is licensed under the CampusSource License; + * http://www.campussource.de/org/license/ + */ +package de.superx.sxrest; +import javax.ws.rs.GET; +import javax.ws.rs.Path; +import javax.ws.rs.PathParam; +import javax.ws.rs.Produces; +import javax.ws.rs.core.Context; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Request; +import de.memtext.util.DateUtils; +import de.superx.common.SxUser; +import de.superx.etl.EtlActionJob; +import de.superx.etl.EtlUtils; +import de.superx.servlet.SuperXManager; +import javax.servlet.http.HttpServletRequest; + + +@Path("/launchupgrade") + +public class LaunchUpgrade { + @Context + Request request; + + @GET + @Path("/{param}") + @Produces(MediaType.TEXT_PLAIN) + public String printXml(@PathParam("param") String componentName,@Context HttpServletRequest request) { + String jobOutput=""; + String jobLog=""; + int returnCode=0; + SxUser user = (SxUser) request.getSession().getAttribute("user"); + String mandantenid= (String) request.getSession().getAttribute("MandantenID"); + if(mandantenid==null || mandantenid.equals("")) + mandantenid="default"; + + if (user == null || !user.isAdmin()) { + jobLog+="Fehlende Rechte"; + returnCode=1; + } + else + { + String job=componentName+"_upgrade"; + String params=componentName.toUpperCase()+"_PFAD="+SuperXManager.getWEB_INFPfad()+EtlUtils.PATHSEP+"conf"+EtlUtils.PATHSEP+"edustore"+EtlUtils.PATHSEP+"db"+EtlUtils.PATHSEP+"module"+EtlUtils.PATHSEP+componentName; + + EtlActionJob myJob=new EtlActionJob(job); //TODO:Mandantid + try { + myJob.initJob(job,params); + } catch (Exception e) { + // TODO Auto-generated catch block + returnCode=1; + jobLog+=myJob.getActionLog().toString(); + jobLog+=e.toString(); + jobLog+=myJob.getLogoutput(); + + } + + try { + returnCode= myJob.execute(""); + jobOutput+=myJob.getSw().toString(); + jobLog+=myJob.getActionLog().toString(); + + } catch (Exception e) { + returnCode=1; + jobLog+=myJob.getActionLog().toString(); + jobLog+=e.toString(); + jobLog+=myJob.getLogoutput(); + } + } + if(returnCode==0) + return "Upgrade erfolgreich: "+EtlUtils.NEWLINE+jobLog; + else + return "Upgrade mit Fehler beendet: "+EtlUtils.NEWLINE+jobLog; + } + +} diff --git a/src/de/superx/sxrest/MaskXml.java b/src/de/superx/sxrest/MaskXml.java new file mode 100644 index 0000000..edfe164 --- /dev/null +++ b/src/de/superx/sxrest/MaskXml.java @@ -0,0 +1,183 @@ +/* + * de.superx.etl - a package for controlling ETL routines + * Copyright (C) 2021 Daniel Quathamer + * + * This package is licensed under the CampusSource License; + * http://www.campussource.de/org/license/ + */ +package de.superx.sxrest; +import javax.ws.rs.Consumes; +import javax.ws.rs.DELETE; +import javax.ws.rs.FormParam; +import javax.ws.rs.GET; +import javax.ws.rs.HEAD; +import javax.ws.rs.POST; +import javax.ws.rs.PUT; +import javax.ws.rs.Path; +import javax.ws.rs.PathParam; +import javax.ws.rs.core.Response; +import javax.xml.parsers.ParserConfigurationException; +import javax.xml.xpath.XPath; +import javax.xml.xpath.XPathConstants; +import javax.xml.xpath.XPathExpressionException; +import javax.xml.xpath.XPathFactory; + + +import org.w3c.dom.Document; +import org.w3c.dom.Node; +import org.w3c.dom.NodeList; +import org.xml.sax.SAXException; + +import javax.ws.rs.Produces; +import javax.ws.rs.QueryParam; +import javax.ws.rs.core.Context; +import javax.ws.rs.core.MediaType; +import javax.ws.rs.core.Request; +import de.memtext.util.DateUtils; +import de.superx.common.SxUser; +import de.superx.etl.EtlActionJob; +import de.superx.etl.EtlUtils; +import de.superx.servlet.SuperXManager; + +import java.io.BufferedWriter; +import java.io.File; +import java.io.FileWriter; +import java.io.IOException; + +import javax.servlet.http.HttpServletRequest; + + +@Path("/maskxml") + +public class MaskXml { + @Context + Request request; + + @GET + @Path("/get/{param}") + @Produces(MediaType.TEXT_PLAIN) + + + public String printXml(@PathParam("param") String tid,@Context HttpServletRequest request) { + String returntext=""; + SxUser user = (SxUser) request.getSession().getAttribute("user"); + String mandantenid= (String) request.getSession().getAttribute("MandantenID"); + if(mandantenid==null || mandantenid.equals("")) + mandantenid="default"; + + if (user == null || !user.isAdmin()) { + returntext="Fehlende Rechte"; + } + else + { + String job="sx_select_mask"; + String params="TID="+tid; + int returnCode=0; + EtlActionJob myJob=new EtlActionJob(job); //TODO:Mandantid + try { + myJob.initJob(job,params); + } catch (Exception e) { + // TODO Auto-generated catch block + + returntext=e.toString(); + returntext+=myJob.getLogoutput(); + + } + System.out.println("Job "+ job+" initialized"); + try { + returnCode= myJob.execute(""); + returntext=myJob.getSw().toString(); + + } catch (Exception e) { + // TODO Auto-generated catch block + returntext=e.toString(); + returntext+=myJob.getLogoutput(); + } + } + + return returntext; + } + @GET + @Path("/saveget/{params}") + @Produces(MediaType.TEXT_PLAIN) + + + public String saveGetXml(@QueryParam("param") String tid, + @QueryParam("Inhalt") String src, + @Context HttpServletRequest request) { + String returntext=""; + SxUser user = (SxUser) request.getSession().getAttribute("user"); + String mandantenid= (String) request.getSession().getAttribute("MandantenID"); + if(mandantenid==null || mandantenid.equals("")) + mandantenid="default"; + + if (user == null || !user.isAdmin()) { + returntext="Fehlende Rechte"; + } + else + { + returntext="tid="+tid+"-src="+src; + } + + return returntext; + } + @POST + @Path("/save") + @Consumes("application/x-www-form-urlencoded") + //@Consumes(MediaType.APPLICATION_FORM_URLENCODED) + @Produces(MediaType.TEXT_PLAIN) + + public String saveXml(@FormParam("tid") String tid, + @FormParam("Inhalt") String src, + @Context HttpServletRequest request) { + String returntext=""; + int returnCode=0; + tid="16000"; + SxUser user = (SxUser) request.getSession().getAttribute("user"); + String mandantenid= (String) request.getSession().getAttribute("MandantenID"); + String searchString="/etlAction[@name=\"sx_select_mask\"]/unload[@name=\"unload_maskeninfo\"]/rs/row/fld[@name=\"tid\"]"; + if(mandantenid==null || mandantenid.equals("")) + mandantenid="default"; + + if (user == null || !user.isAdmin()) { + returntext="Fehlende Rechte"; + + } + else + { + try { + if(de.superx.etl.EtlUtils.isNodeValueInXml(src,searchString,tid)) + { + String job="sx_insert_mask"; + returntext="TID="+tid; + String params="TID="+tid; + File temp; + temp = File.createTempFile("myTempFile", ".xml"); + + BufferedWriter bw = new BufferedWriter(new FileWriter(temp)); + bw.write(src); + + bw.close(); + params+=EtlUtils.NEWLINE+"PATH_TO_INPUTFILE="+temp.getAbsolutePath(); + params+=EtlUtils.NEWLINE+"FORMAT=XML"; + EtlActionJob myJob=new EtlActionJob(job); //TODO:Mandantid + myJob.initJob(job,params); + returnCode= myJob.execute(""); + returntext=myJob.getSw().toString(); + + + + } + else + returntext="Übergebene TID "+tid+" entspricht nicht der XML-Datei "; + } catch (Exception e) { + returntext=e.toString(); + e.printStackTrace(); + } + + + } + return returntext; + } + +} diff --git a/superx/edit/etl/etl_manager.jsp b/superx/edit/etl/etl_manager.jsp new file mode 100644 index 0000000..f48ed06 --- /dev/null +++ b/superx/edit/etl/etl_manager.jsp @@ -0,0 +1,319 @@ +<%@ taglib uri="/WEB-INF/dbforms.tld" prefix="db" %> +<%@page pageEncoding="utf-8" contentType="text/html; charset=UTF-8" %> +<%@ page import ="de.superx.servlet.ServletUtils" %> +<%@ page import ="de.superx.servlet.SxPools" %> + + + + + + + + ETL Manager + + + + + +<% +//init Variables: +String EntwicklungsmodusAn=""; +String EntwicklungsmodusAus="checked"; + +Object mandantobject=request.getSession().getAttribute("MandantenID"); +String mandantenid=""; +if(mandantobject != null) + mandantenid=mandantobject.toString(); +//der superx-Default-Mandant ist bei dbforms ein Leerstring +if(mandantenid.equals("default")) + mandantenid=""; + +String tab = request.getParameter("tab"); +if(tab == null) + tab=""; + + +%> + + + + +
+
+
+ +
+
+<% +if(tab.equals("")) +{ +%> + +
+
+
+

+ ETL Manager +

+

+ Diese Komponente dient der Masken-Verwaltung. +

+

Klicken Sie oben links auf "Masken", um zum Menü zu gelangen.

+
+
+
+ +
+
+ +
+
+ +
+
+<% + } //Ende Tiles Startseite +%> +<% +if(tab.equals("mask")) +{ +%> +
+ + +
+

Masken-Verwaltung

+

+
Wählen Sie links das Masken-Menü
+

+ +
+
+ +
+ + + +
+
+ + +
+ + +
+<% + } //Ende Mask Content +%> + +<% +if(tab.equals("action")) +{ +%> +
+
+

Komponenten

+

+
Install / Upgrade / Hauptladeroutinen / Unterladeroutinen
+

+ +
+ +
+ +
+
+<% + } //Ende Action Content +%> +
+
+
+ + + + + diff --git a/superx/edit/etl/jobexecutor.jsp b/superx/edit/etl/jobexecutor.jsp new file mode 100644 index 0000000..989e658 --- /dev/null +++ b/superx/edit/etl/jobexecutor.jsp @@ -0,0 +1,163 @@ +<%@ taglib uri="/WEB-INF/dbforms.tld" prefix="db" %> +<%@page pageEncoding="utf-8" contentType="text/html; charset=UTF-8" %> +<%@ page import ="de.superx.servlet.ServletUtils" %> +<%@ page import ="de.superx.servlet.SxPools" %> +<%@ page import ="java.io.BufferedWriter" %> +<%@ page import ="java.io.File" %> +<%@ page import ="java.io.FileWriter" %> +<%@ page import ="java.io.IOException" %> +<%@ page import ="de.superx.etl.EtlActionJob" %> +<%@ page import ="de.superx.etl.EtlUtils" %> +<%@ page import ="de.superx.common.SxUser" %> +<%@ page import ="de.superx.servlet.SuperXManager" %> + + + + + + + + Ausführung + + + + + +<% +if (request.getCharacterEncoding() == null) + request.setCharacterEncoding("UTF-8"); + +//Object userobject=request.getSession().getAttribute("UserID"); +String userid; +String filter=""; +String sql=""; + String returntext=""; + int returnCode=0; + +if(userobject == null) +{ +%> + + +<% +} +else + userid=userobject.toString(); +Object mandantobject=request.getSession().getAttribute("MandantenID"); +String mandantenid=""; +if(mandantobject != null) + mandantenid=mandantobject.toString(); +//der superx-Default-Mandant ist bei dbforms ein Leerstring +if(mandantenid.equals("default")) + mandantenid=""; +String db_form_name="etl_job"; +String erlaubt="0"; +%> +<%@ include file="/edit/check_authentication.inc" %> +<% +if(erlaubt.equals("0")) +{ +%> + + +<% +} + + + +//init Variables: + +String tid = request.getParameter("systeminfo_id"); +String componentUniquename = request.getParameter("componentUniquename"); +String componentAction = request.getParameter("componentAction"); + + + SxUser user = (SxUser) request.getSession().getAttribute("user"); + + if (user == null || !user.isAdmin()) { + returnCode=1; + returntext="Fehlende Rechte"; + + } + else + { + try { + String job=componentUniquename+"_"+componentAction; + String params=componentUniquename.toUpperCase()+"_PFAD="+SuperXManager.getWEB_INFPfad()+EtlUtils.PATHSEP+"conf"+EtlUtils.PATHSEP+"edustore"+EtlUtils.PATHSEP+"db"+EtlUtils.PATHSEP+"module"+EtlUtils.PATHSEP+componentUniquename; + + EtlActionJob myJob=new EtlActionJob(job); //TODO:Mandantid + myJob.initJob(job,params); + returnCode= myJob.execute(""); + returntext+=myJob.getActionLog().toString(); + + + + + } catch (Exception e) { + returnCode=1; + returntext=e.toString(); + e.printStackTrace(); + } + + + } + + +%> + + + + +
+
+ +

Protokoll

+

+
Ergebnis der Ausführung
+

+

Status: <%= "Code "+ returnCode+ "("+de.superx.etl.EtlUtils.translateReturnCode(returnCode)+")" %>, Logausgabe: +

+
+
+ +
+ + + +
+
+ +
+
+ + + diff --git a/superx/edit/etl/saveMask.jsp b/superx/edit/etl/saveMask.jsp new file mode 100644 index 0000000..bb87da4 --- /dev/null +++ b/superx/edit/etl/saveMask.jsp @@ -0,0 +1,143 @@ +<%@ taglib uri="/WEB-INF/dbforms.tld" prefix="db" %> +<%@page pageEncoding="utf-8" contentType="text/html; charset=UTF-8" %> +<%@ page import ="de.superx.servlet.ServletUtils" %> +<%@ page import ="de.superx.servlet.SxPools" %> +<%@ page import ="java.io.BufferedWriter" %> +<%@ page import ="java.io.File" %> +<%@ page import ="java.io.FileWriter" %> +<%@ page import ="java.io.IOException" %> +<%@ page import ="de.superx.etl.EtlActionJob" %> +<%@ page import ="de.superx.etl.EtlUtils" %> +<%@ page import ="de.superx.common.SxUser" %> + + + + + + + Ausführung + + + + + +<% +//init Variables: +if (request.getCharacterEncoding() == null) + request.setCharacterEncoding("UTF-8"); + +String tid = request.getParameter("maskeninfo_id"); +String src = request.getParameter("src"); + String returntext=""; + int returnCode=0; + returntext=src; + + SxUser user = (SxUser) request.getSession().getAttribute("user"); + String mandantenid= (String) request.getSession().getAttribute("MandantenID"); + String searchString="/etlAction[@name=\"sx_select_mask\"]/unload[@name=\"unload_maskeninfo\"]/rs/row/fld[@name=\"tid\"]"; + if(mandantenid==null || mandantenid.equals("")) + mandantenid="default"; + + if (user == null || !user.isAdmin()) { + returnCode=1; + returntext="Fehlende Rechte"; + + } + else + { + try { + if(de.superx.etl.EtlUtils.isNodeValueInXml(src,searchString,tid)) + { + String job="sx_insert_mask"; + returntext="TID="+tid; + String params="TID="+tid; + File temp; + temp = File.createTempFile("myTempFile", ".xml"); + + BufferedWriter bw = new BufferedWriter(new FileWriter(temp)); + bw.write(src); + + bw.close(); + params+=EtlUtils.NEWLINE+"PATH_TO_INPUTFILE="+temp.getAbsolutePath(); + params+=EtlUtils.NEWLINE+"FORMAT=XML"; + EtlActionJob myJob=new EtlActionJob(job); //TODO:Mandantid + myJob.initJob(job,params); + returnCode= myJob.execute(""); + returntext+=myJob.getActionLog().toString(); + + + + } + else + { + returntext="Übergebene TID "+tid+" entspricht nicht der XML-Datei "; + returnCode=1; + } + } catch (Exception e) { + returnCode=1; + returntext=e.toString(); + e.printStackTrace(); + } + + + } + + +%> + + + + +
+
+ +

Protokoll

+

+
Ergebnis der Ausführung
+

+

Status: <%= "Code "+ returnCode+ "("+de.superx.etl.EtlUtils.translateReturnCode(returnCode)+")" %>, Logausgabe: +

+
+
+ +
+ + + +
+
+ +
+
+ + + diff --git a/superx/edit/etl/systeminfo_list.inc b/superx/edit/etl/systeminfo_list.inc new file mode 100644 index 0000000..486b112 --- /dev/null +++ b/superx/edit/etl/systeminfo_list.inc @@ -0,0 +1,45 @@ +<% +String systeminfo_id=""; +String componentName=""; +String componentUniquename=""; +String componentVersion=""; +%> + + +
    + + + +<% +systeminfo_id=currentRow_systeminfo.get("tid").toString(); +componentName=currentRow_systeminfo.get("name").toString().trim(); +%> + + + + +<% +if(currentRow_db_version!=null) +{ +componentUniquename=currentRow_db_version.get("his_system").toString().trim(); +componentVersion=currentRow_db_version.get("version").toString().trim(); +} +else +{ +componentUniquename="Unbekannt"; +componentVersion="Unbekannt"; +} +%> + + + + +
  • +" style="color:blue;cursor: pointer;"> +
  • +
    + +
+
+