1977 changed files with 116063 additions and 46915 deletions
@ -0,0 +1,88 @@
@@ -0,0 +1,88 @@
|
||||
package de.superx.bianalysis; |
||||
|
||||
import de.superx.bianalysis.models.DimensionAttribute; |
||||
import de.superx.bianalysis.models.Measure; |
||||
|
||||
public class ColumnElement { |
||||
|
||||
public String caption; |
||||
public String header; |
||||
public String dimensionAttributeFilter; |
||||
public Measure measure; |
||||
public int columnNumber; |
||||
|
||||
public ColumnElement(String caption, String dimensionAttributeFilter) { |
||||
this.caption = caption; |
||||
this.dimensionAttributeFilter = dimensionAttributeFilter; |
||||
} |
||||
|
||||
public ColumnElement(String caption, String dimensionAttributeFilter, Measure measure, int col) { |
||||
this.caption = caption; |
||||
this.dimensionAttributeFilter = dimensionAttributeFilter; |
||||
this.measure = measure; |
||||
this.columnNumber = col; |
||||
} |
||||
|
||||
public ColumnElement(Measure measure, int index) { |
||||
this.caption = "Kennzahl|" + measure.getId().composedId; |
||||
this.header = "Kennzahl|" + measure.getCaption(); |
||||
this.measure = measure; |
||||
this.columnNumber = index; |
||||
} |
||||
|
||||
public ColumnElement(ColumnElement currentColumnElement) { |
||||
this.caption = currentColumnElement.caption; |
||||
this.dimensionAttributeFilter = currentColumnElement.dimensionAttributeFilter; |
||||
this.measure = currentColumnElement.measure; |
||||
} |
||||
|
||||
/** |
||||
* Builds the attribute part of a columns's 'field' member. |
||||
* |
||||
* The attribute part is a crucial component of the column's identifier |
||||
* and typically consists of IDs and associated values. |
||||
* |
||||
* <p>Example of an attribute part: |
||||
* <pre> |
||||
* "conf:123 : conf:124 |weiblich" |
||||
* </pre> |
||||
* </p> |
||||
* |
||||
* <p>In the context of a complete 'field' member, it might appear as: |
||||
* <pre> |
||||
* "conf:123: conf:124|weiblich || Kennzahl|res:123" |
||||
* </pre> |
||||
* where the part before "||" is the attribute part, and after is the measure.</p> |
||||
* |
||||
* The 'field' member serves as a unique identifier for each column. |
||||
* |
||||
* @see ColumnElementBuilder For the complete column building process |
||||
*/ |
||||
public static String buildField(DimensionAttribute attr, String value) { |
||||
// The conformed id takes precedence, so that we can merge reports
|
||||
String attrId = attr.getAttrConformedId(); |
||||
if(attrId == null) { |
||||
attrId = attr.getStringId(); |
||||
} |
||||
|
||||
String dimId = attr.getDimConformedId(); |
||||
if(dimId == null) { |
||||
dimId = attr.getDimId(); |
||||
} |
||||
|
||||
return dimId + ": " + attrId + "|" + value; |
||||
} |
||||
|
||||
public static String buildHeader(DimensionAttribute attr, String value) { |
||||
return attr.getCaption() + ": " + attr.getCaption() + "|" + value; |
||||
} |
||||
|
||||
public static String buildFilter(DimensionAttribute attr, String value) { |
||||
return attr.getDimensionTableAlias() + "." + attr.getColumnname() + " = '" + value + "'"; |
||||
} |
||||
|
||||
public void setHeader(String finalHeader) { |
||||
this.header = finalHeader; |
||||
} |
||||
|
||||
} |
||||
@ -0,0 +1,157 @@
@@ -0,0 +1,157 @@
|
||||
package de.superx.bianalysis; |
||||
|
||||
import java.util.ArrayList; |
||||
import java.util.List; |
||||
import java.util.StringJoiner; |
||||
|
||||
import org.apache.log4j.Logger; |
||||
|
||||
import de.superx.bianalysis.models.DimensionAttribute; |
||||
import de.superx.bianalysis.models.Filter; |
||||
import de.superx.bianalysis.models.Measure; |
||||
import de.superx.common.NotYetImplementedException; |
||||
|
||||
public class ColumnElementBuilder { |
||||
|
||||
private static Logger logger = Logger.getLogger(ColumnElementBuilder.class); |
||||
|
||||
/** |
||||
* Lets assume we have the two Dimensions X and Y each with one Attribute. DA for X |
||||
* and DB for Y. Both Attributes have two possible values DA1, DA2 and DB1, DB2. There |
||||
* also exist two Measures M1, M2. |
||||
* |
||||
* If the users wants to see all attributes and measures the header of the cross table looks like this: |
||||
* |
||||
* +-----------------------+----------------------+ |
||||
* | DA1 | DA2 | |
||||
* +-----------+-----------+-----------+----------+ |
||||
* | DB1 | DB2 | DB1 | DB2 | |
||||
* +-----+-----+-----+-----+-----+-----+-----+----+ |
||||
* | M1 | M2 | M1 | M2 | M1 | M2 | M1 | M2 | |
||||
* +=====+=====+=====+=====+=====+=====+=====+====+ |
||||
* | | | | | | | | | |
||||
* +-----+-----+-----+-----+-----+-----+-----+----+ |
||||
* |
||||
* This header would be defined like follows: |
||||
* |
||||
* "X: DA | DA1 || Y: DB | DB1 || Kennzahl| M1" |
||||
* "X: DA | DA1 || Y: DB | DB1 || Kennzahl| M2" |
||||
* "X: DA | DA1 || Y: DB | DB2 || Kennzahl| M1" |
||||
* "X: DA | DA1 || Y: DB | DB2 || Kennzahl| M2" |
||||
* "X: DA | DA2 || Y: DB | DB1 || Kennzahl| M1" |
||||
* "X: DA | DA2 || Y: DB | DB1 || Kennzahl| M2" |
||||
* "X: DA | DA2 || Y: DB | DB2 || Kennzahl| M1" |
||||
* "X: DA | DA2 || Y: DB | DB2 || Kennzahl| M2" |
||||
* |
||||
* Every single line is represented by one 'ColumnElement'. |
||||
* @throws NotYetImplementedException |
||||
*/ |
||||
public static List<ColumnElement> buildColumnElements(ReportMetadata metadata) { |
||||
|
||||
List<Filter> filters = metadata.filters; |
||||
List<Measure> measures = metadata.measures; |
||||
List<DimensionAttribute> dimensionAttributes = metadata.topDimensionAttributes; |
||||
|
||||
List<ColumnElement> columnElements = new ArrayList<ColumnElement>(); |
||||
final String HEADER_DIVIDER = " || "; |
||||
final String KENNZAHL_IDENTIFIER = "Kennzahl|"; |
||||
|
||||
if(measures == null || measures.isEmpty()) { |
||||
// edge case 1: no measures were selected, simply return empty columnElements list
|
||||
return columnElements; |
||||
} |
||||
|
||||
// for every column there exists an offset of 'maxbridgelvl' if a hierarchy-attribute was selected
|
||||
int colStartPoint = metadata.getHierarchyAttributes().size() * metadata.maxBridgeLvl; |
||||
if(dimensionAttributes == null || dimensionAttributes.isEmpty()) { |
||||
// edge case 2: no dimension attributes were selected, only display the measures
|
||||
for (Measure measure : measures) { |
||||
columnElements.add(new ColumnElement(measure, colStartPoint + columnElements.size())); |
||||
} |
||||
return columnElements; |
||||
} |
||||
|
||||
// for every single column combination (one list of combined attribute values) we build one 'ColumnElement' object
|
||||
List<List<String>> dimAttrCombinations = cartesianProductOfDimensionAttributeValues(dimensionAttributes, filters); |
||||
for (int i = 0; i < dimAttrCombinations.size(); i++) { |
||||
StringJoiner captionJoiner = new StringJoiner(HEADER_DIVIDER); |
||||
StringJoiner headerJoiner = new StringJoiner(HEADER_DIVIDER); |
||||
StringJoiner filterJoiner = new StringJoiner(" AND "); |
||||
List<String> comb = dimAttrCombinations.get(i); |
||||
for(int j = 0; j < comb.size(); j++) { |
||||
DimensionAttribute attr = dimensionAttributes.get(j); |
||||
String value = comb.get(j); |
||||
captionJoiner.add(ColumnElement.buildField(attr, value)); |
||||
headerJoiner.add(ColumnElement.buildHeader(attr, value)); |
||||
filterJoiner.add(ColumnElement.buildFilter(attr, value)); |
||||
} |
||||
String partialCaption = captionJoiner.toString(); |
||||
String partialHeader = headerJoiner.toString(); |
||||
String filter = filterJoiner.toString(); |
||||
for (Measure measure : measures) { |
||||
String finalCaption = partialCaption + HEADER_DIVIDER + KENNZAHL_IDENTIFIER + measure.getId().composedId; |
||||
String finalHeader = partialHeader + HEADER_DIVIDER + KENNZAHL_IDENTIFIER + measure.getCaption(); |
||||
ColumnElement colElement = new ColumnElement(finalCaption, filter, measure, colStartPoint + columnElements.size()); |
||||
colElement.setHeader(finalHeader); |
||||
columnElements.add(colElement); |
||||
} |
||||
} |
||||
|
||||
return columnElements; |
||||
} |
||||
|
||||
/** |
||||
* |
||||
* Computes all possible combination of dimension attribute values. |
||||
* Each of the individual combinations is a specific column. |
||||
* |
||||
* Example |
||||
* Input: DimensionAttributes = {DA, DB}, each with two possible values DA1, DA2, DB1, DB2, Filters = { } |
||||
* Output: {{DA1, DB1}, {DA1, DB2}, {DA2, DB1}, {DA2, DB2}} |
||||
* |
||||
* If the user choose the following Filter = {DB2} |
||||
* Output: {{DA1, DB2}, {DA2, DB2}} |
||||
* |
||||
* @param dimensionAttributes The list of choosen dimension attributes. |
||||
* @param filters The list of choosen filters. |
||||
* @return A list containing the cartesian product of all the possible combination for a set of dimension attributes and filters. |
||||
*/ |
||||
private static List<List<String>> cartesianProductOfDimensionAttributeValues(List<DimensionAttribute> dimensionAttributes, List<Filter> filters){ |
||||
List<List<String>> allDimAttrVals = new ArrayList<>(); |
||||
for (DimensionAttribute attr: dimensionAttributes) { |
||||
//if(attr.bridge != null) {
|
||||
// continue;
|
||||
//}
|
||||
// did the user choose a filter for this attribute ?
|
||||
Filter compoundFilter = Filter.findFilterById(filters, attr.getId()); |
||||
if(compoundFilter != null) { |
||||
// if yes only use the filter values
|
||||
allDimAttrVals.add(compoundFilter.filterValues); |
||||
} else { |
||||
// if no use all possible attribute values
|
||||
allDimAttrVals.add(attr.getDimensionAttributeValues()); |
||||
} |
||||
} |
||||
// compute and return all possible column combinations
|
||||
return cartesian(allDimAttrVals); |
||||
} |
||||
|
||||
private static List<List<String>> cartesian(List<List<String>> lists) { |
||||
List<List<String>> result = new ArrayList<>(); |
||||
if(lists.size() == 0) { |
||||
result.add(new ArrayList<>()); |
||||
return result; |
||||
} |
||||
List<String> curr = lists.get(0); |
||||
List<List<String>> remainingLists = cartesian(lists.subList(1, lists.size())); |
||||
for (String val : curr) { |
||||
for (List<String> list : remainingLists) { |
||||
List<String> resultList = new ArrayList<>(); |
||||
resultList.add(val); |
||||
resultList.addAll(list); |
||||
result.add(resultList); |
||||
} |
||||
} |
||||
return result; |
||||
} |
||||
} |
||||
@ -0,0 +1,415 @@
@@ -0,0 +1,415 @@
|
||||
package de.superx.bianalysis; |
||||
|
||||
import java.text.SimpleDateFormat; |
||||
import java.util.ArrayList; |
||||
import java.util.Date; |
||||
import java.util.HashMap; |
||||
import java.util.List; |
||||
import java.util.stream.Collectors; |
||||
|
||||
import org.apache.poi.ss.usermodel.BorderStyle; |
||||
import org.apache.poi.ss.usermodel.Cell; |
||||
import org.apache.poi.ss.usermodel.CellStyle; |
||||
import org.apache.poi.ss.usermodel.Font; |
||||
import org.apache.poi.ss.usermodel.Footer; |
||||
import org.apache.poi.ss.usermodel.Header; |
||||
import org.apache.poi.ss.usermodel.Row; |
||||
import org.apache.poi.ss.util.CellRangeAddress; |
||||
import org.apache.poi.xssf.usermodel.XSSFCellStyle; |
||||
import org.apache.poi.xssf.usermodel.XSSFSheet; |
||||
import org.apache.poi.xssf.usermodel.XSSFWorkbook; |
||||
|
||||
import de.superx.bianalysis.models.InfoItem; |
||||
import de.superx.rest.model.Column; |
||||
import de.superx.rest.model.ColumnType; |
||||
import de.superx.rest.model.Result; |
||||
import de.superx.rest.model.ResultType; |
||||
|
||||
public class ExcelSheetBuilder { |
||||
|
||||
private Result result; |
||||
private XSSFWorkbook workbook; |
||||
private XSSFSheet sheet; |
||||
private String reportName; |
||||
private String reportDescription; |
||||
private String date; |
||||
private int leftDimensionAttributes; |
||||
private int topDimensionAttributes = 0; |
||||
|
||||
private List<Column> visibleColumns; |
||||
private final boolean mergeCells = true; |
||||
private final int startingRow = 1; |
||||
|
||||
private static HashMap<String, Integer> defaultStyles = new HashMap<>(); |
||||
|
||||
public ExcelSheetBuilder(Result result) { |
||||
this.result = result; |
||||
this.visibleColumns = getVisibleColumns(result); |
||||
this.workbook = new XSSFWorkbook(); |
||||
initializeDefaultStyles(); |
||||
|
||||
leftDimensionAttributes = this.result.info.leftDimensionAttributes.size(); |
||||
if(this.result.info.topDimensionAttributes != null && this.result.info.topDimensionAttributes.size() > 0) { |
||||
topDimensionAttributes = this.result.info.topDimensionAttributes.size(); |
||||
} |
||||
} |
||||
|
||||
public XSSFWorkbook build() { |
||||
int rowNum = startingRow; |
||||
rowNum = createRowsFromGrid(createReportInfoGrid(), rowNum); |
||||
rowNum += 2; // rows between report info and header
|
||||
int reportInfoEnd = rowNum; |
||||
|
||||
String[][] grid = createHeaderGrid(); |
||||
|
||||
rowNum = createRowsFromGrid(grid, rowNum); |
||||
rowNum = createDataRows(rowNum); |
||||
rowNum = createTotalRow(rowNum); |
||||
|
||||
if(mergeCells) { |
||||
mergeHeaderCells(grid, 0, reportInfoEnd); |
||||
} |
||||
|
||||
styleHeaderCells(reportInfoEnd, grid); |
||||
styleDataCells(reportInfoEnd + grid.length); |
||||
styleTotalRowCells(rowNum); |
||||
styleReportInfoCells(); |
||||
|
||||
Footer footer = sheet.getFooter(); |
||||
Header header = sheet.getHeader(); |
||||
header.setLeft(reportName); |
||||
header.setRight(this.date); |
||||
footer.setRight("Seite &P von &N"); |
||||
|
||||
return workbook; |
||||
} |
||||
|
||||
private void initializeDefaultStyles() { |
||||
//ReportInfoCells
|
||||
XSSFCellStyle infoStyle = workbook.createCellStyle(); |
||||
Font infoFont = workbook.createFont(); |
||||
infoFont.setBold(true); |
||||
infoStyle.setFont(infoFont); |
||||
|
||||
defaultStyles.put("info", Integer.valueOf(infoStyle.getIndex())); |
||||
|
||||
//Header
|
||||
XSSFCellStyle headerStyle = workbook.createCellStyle(); |
||||
headerStyle.setBorderBottom(BorderStyle.THIN); |
||||
headerStyle.setBorderLeft(BorderStyle.THIN); |
||||
headerStyle.setBorderRight(BorderStyle.THIN); |
||||
headerStyle.setBorderTop(BorderStyle.THIN); |
||||
Font headerFont = workbook.createFont(); |
||||
headerFont.setBold(true); |
||||
headerStyle.setFont(headerFont); |
||||
|
||||
defaultStyles.put("header", Integer.valueOf(headerStyle.getIndex())); |
||||
|
||||
//Data
|
||||
XSSFCellStyle dataStyle = workbook.createCellStyle(); |
||||
dataStyle.setBorderBottom(BorderStyle.HAIR); |
||||
dataStyle.setBorderLeft(BorderStyle.HAIR); |
||||
dataStyle.setBorderRight(BorderStyle.HAIR); |
||||
dataStyle.setBorderTop(BorderStyle.HAIR); |
||||
|
||||
defaultStyles.put("data", Integer.valueOf(dataStyle.getIndex())); |
||||
|
||||
//Total
|
||||
XSSFCellStyle totalStyle = workbook.createCellStyle(); |
||||
totalStyle.setBorderBottom(BorderStyle.THIN); |
||||
totalStyle.setBorderLeft(BorderStyle.THIN); |
||||
totalStyle.setBorderRight(BorderStyle.THIN); |
||||
totalStyle.setBorderTop(BorderStyle.DOUBLE); |
||||
Font totalFont = workbook.createFont(); |
||||
totalFont.setBold(true); |
||||
totalStyle.setFont(totalFont); |
||||
|
||||
defaultStyles.put("total", Integer.valueOf(totalStyle.getIndex())); |
||||
|
||||
} |
||||
|
||||
|
||||
private void styleTotalRowCells(int rowNum) { |
||||
int current = rowNum; |
||||
Row row = sheet.getRow(--current); |
||||
for (int i = 0; i < visibleColumns.size(); i++) { |
||||
Cell cell = row.getCell(i); |
||||
cell.setCellStyle(getTotalCellStyle(workbook)); |
||||
} |
||||
} |
||||
|
||||
private int createTotalRow(int startFrom) { |
||||
de.superx.rest.model.Row totalRow = result.getTotalRow(); |
||||
int rowNum = startFrom; |
||||
Row row = sheet.createRow(rowNum++); |
||||
Cell labelCell = row.createCell(0); |
||||
labelCell.setCellValue("Gesamt"); |
||||
for (int i = 1; i < visibleColumns.size(); i++) { |
||||
Column col = visibleColumns.get(i); |
||||
Cell cell = row.createCell(i); |
||||
if(col.type.equals(ColumnType.StringColumn)) { |
||||
cell.setCellValue(""); |
||||
} else { |
||||
Object obj = totalRow.cells.get(col.field); |
||||
if(obj == null) { |
||||
cell.setCellValue(""); |
||||
continue; |
||||
} |
||||
Double value = Double.valueOf(String.valueOf(obj)); |
||||
cell.setCellValue(value.doubleValue()); |
||||
} |
||||
} |
||||
return rowNum; |
||||
} |
||||
|
||||
private void styleReportInfoCells() { |
||||
Row row = sheet.getRow(startingRow); |
||||
Cell cell = row.getCell(0); |
||||
cell.setCellStyle(workbook.getCellStyleAt(defaultStyles.get("info").intValue())); |
||||
} |
||||
|
||||
private String[][] createReportInfoGrid() { |
||||
|
||||
List<List<String>> gridList = new ArrayList<>(); |
||||
gridList.add(List.of("Informationen zur BI-Analyse", "")); |
||||
gridList.add(List.of("Name:", this.reportName)); |
||||
gridList.add(List.of("Beschreibung:", this.reportDescription)); |
||||
|
||||
String sachgebiet = this.result.info.sachgebiete.stream().collect(Collectors.joining(", ")); |
||||
String theme = getInfoCaptions(this.result.info.facttables); |
||||
String measures = getInfoCaptions(this.result.info.measures); |
||||
String topAttributes = getInfoCaptions(this.result.info.topDimensionAttributes); |
||||
String leftAttributes = getInfoCaptions(this.result.info.leftDimensionAttributes); |
||||
String filter = this.result.info.filter.stream().collect(Collectors.joining(", ")); |
||||
String lastUpdateBad = this.result.info.lastUpdateBiad; |
||||
|
||||
if(sachgebiet != null) { |
||||
gridList.add(List.of("Sachgebiet:", sachgebiet)); |
||||
} |
||||
if(theme != null) { |
||||
gridList.add(List.of("Thema:", theme)); |
||||
} |
||||
if(measures != null) { |
||||
gridList.add(List.of("Kennzahlen:", measures)); |
||||
} |
||||
if(leftAttributes != null) { |
||||
gridList.add(List.of("Zeilenattribute:", leftAttributes)); |
||||
} |
||||
if(topAttributes != null) { |
||||
gridList.add(List.of("Spaltenattribute:", topAttributes)); |
||||
} |
||||
if(filter != null) { |
||||
gridList.add(List.of("Filter:", filter)); |
||||
} |
||||
if(lastUpdateBad != null) { |
||||
gridList.add(List.of("Letztes Update von BI-Analyse-Daten:", lastUpdateBad)); |
||||
} |
||||
if(result.resultType.equals(ResultType.FlatTable)) { |
||||
gridList.add(List.of("Tabellentyp:", "Flache Tabelle")); |
||||
} else if(result.resultType.equals(ResultType.DrilldownTableGroupable)) { |
||||
gridList.add(List.of("Tabellentyp:", "Hierarchische Tabelle")); |
||||
} |
||||
return listToStringGrid(gridList); |
||||
} |
||||
|
||||
private static String getInfoCaptions(List<InfoItem> infoItems) { |
||||
if(infoItems != null && infoItems.size() > 0) { |
||||
return infoItems.stream().map(f->f.caption).collect(Collectors.joining(", ")); |
||||
} |
||||
return ""; |
||||
} |
||||
|
||||
private static String[][] listToStringGrid(List<List<String>> list) { |
||||
String[][] result = new String[list.size()][list.get(0).size()]; |
||||
for (int i = 0; i < result.length; i++) { |
||||
for (int j = 0; j < result[i].length; j++) { |
||||
result[i][j] = list.get(i).get(j); |
||||
} |
||||
} |
||||
return result; |
||||
} |
||||
|
||||
private static CellRangeAddress mergeCellByOffset(int firstRow, int lastRow, int firstCol, int lastCol, int xOffset, int yOffset) { |
||||
return new CellRangeAddress(firstRow + yOffset, lastRow + yOffset, firstCol + xOffset, lastCol + xOffset); |
||||
} |
||||
|
||||
private void styleHeaderCells(int start, String[][] grid) { |
||||
for (int i = 0; i < grid.length; i++) { |
||||
Row row = this.sheet.getRow(i+start); |
||||
row.setHeightInPoints((short) 25); |
||||
for (int j = 0; j < grid[i].length; j++) { |
||||
Cell cell = row.getCell(j); |
||||
cell.setCellStyle(getHeaderStyle(workbook)); |
||||
} |
||||
} |
||||
} |
||||
|
||||
private static CellStyle getHeaderStyle(XSSFWorkbook workbook) { |
||||
return workbook.getCellStyleAt(defaultStyles.get("header").intValue()); |
||||
} |
||||
|
||||
private static CellStyle getDataCellStyle(XSSFWorkbook workbook) { |
||||
return workbook.getCellStyleAt(defaultStyles.get("data").intValue()); |
||||
} |
||||
|
||||
private static CellStyle getTotalCellStyle(XSSFWorkbook workbook) { |
||||
return workbook.getCellStyleAt(defaultStyles.get("total").intValue()); |
||||
} |
||||
|
||||
private void styleDataCells(int startDataCells) { |
||||
for (int i = startDataCells; i < startDataCells + this.result.rows.size(); i++) { |
||||
Row row = this.sheet.getRow(i); |
||||
for (int j = 0; j < this.visibleColumns.size(); j++) { |
||||
Cell cell = row.getCell(j); |
||||
if(this.visibleColumns.get(j).groupable) { |
||||
cell.setCellStyle(getHeaderStyle(workbook)); |
||||
} else { |
||||
cell.setCellStyle(getDataCellStyle(workbook)); |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
private void mergeHeaderCells(String grid[][], int xOffset, int yOffset) { |
||||
// merge header grid cells
|
||||
if(topDimensionAttributes > 0) { |
||||
for(int i = 0; i < grid.length; i++) { |
||||
String lastCell = ""; |
||||
int cellsToMerge = 0; |
||||
for (int j = 0; j < grid[i].length; j++) { |
||||
String currentCell = grid[i][j]; |
||||
if(!currentCell.equals(lastCell) && cellsToMerge > 0) { |
||||
sheet.addMergedRegion(mergeCellByOffset(i, i, j - cellsToMerge - 1, j - 1, xOffset, yOffset)); |
||||
} |
||||
if(currentCell.equals(lastCell)) { |
||||
cellsToMerge++; |
||||
} else { |
||||
cellsToMerge = 0; |
||||
} |
||||
lastCell = currentCell; |
||||
} |
||||
if(cellsToMerge > 0) { |
||||
int j = grid[i].length - 1; |
||||
sheet.addMergedRegion(mergeCellByOffset(i, i, j - cellsToMerge, j, xOffset, yOffset)); |
||||
} |
||||
} |
||||
} |
||||
|
||||
// merge left header cols
|
||||
if(grid.length > 1) { |
||||
for (int i = 0; i < leftDimensionAttributes; i++) { |
||||
sheet.addMergedRegion(mergeCellByOffset(0, grid.length - 1, i, i, xOffset, yOffset)); |
||||
} |
||||
} |
||||
} |
||||
|
||||
private int createRowsFromGrid(String[][] grid, int startFrom) { |
||||
if(grid == null) { |
||||
return startFrom; |
||||
} |
||||
int rowNum = startFrom; |
||||
for (int i = 0; i < grid.length; i++) { |
||||
Row poiRow = sheet.createRow(rowNum++); |
||||
int colNum = 0; |
||||
for (int j = 0; j < grid[i].length; j++) { |
||||
Cell cell = poiRow.createCell(colNum++); |
||||
if(grid[i][j] != null && !grid[i][j].isBlank()) { |
||||
cell.setCellValue(grid[i][j]); |
||||
} else { |
||||
cell.setBlank(); |
||||
} |
||||
} |
||||
} |
||||
return rowNum; |
||||
} |
||||
|
||||
private int createDataRows(int startFrom) { |
||||
int rowNum = startFrom; |
||||
// build cells from row data without sumrow
|
||||
List<de.superx.rest.model.Row> resultRows = result.rows.stream().filter(r -> r.aggregated != -1).collect(Collectors.toList()); |
||||
Row[] rows = new Row[resultRows.size()]; |
||||
for (int i = 0; i < resultRows.size(); i++) { |
||||
rows[i] = sheet.createRow(rowNum++); |
||||
rows[i].setHeightInPoints((short) 20); |
||||
} |
||||
for (int i = 0; i < visibleColumns.size(); i++) { |
||||
Column col = visibleColumns.get(i); |
||||
for(int j = 0; j < resultRows.size(); j++) { |
||||
Object obj = resultRows.get(j).cells.get(col.field); |
||||
String objVal = String.valueOf(obj); |
||||
Cell cell = rows[j].createCell(i); |
||||
if(obj == null) { |
||||
cell.setBlank(); |
||||
continue; |
||||
} |
||||
if(col.type == ColumnType.IntegerColumn || col.type == ColumnType.DecimalColumn) { |
||||
Double value = Double.valueOf(objVal); |
||||
cell.setCellValue(value.doubleValue()); |
||||
} else { |
||||
cell.setCellValue(obj.toString()); |
||||
} |
||||
//if(col.groupable) {
|
||||
// cell.setCellStyle(style);
|
||||
//}
|
||||
} |
||||
} |
||||
return rowNum; |
||||
} |
||||
|
||||
private String[][] createHeaderGrid(){ |
||||
int colSize = visibleColumns.size(); |
||||
int rowSize = topDimensionAttributes + 1; |
||||
String[][] grid = new String[rowSize][colSize]; |
||||
|
||||
for(int i = 0; i < colSize; i++) { |
||||
Column column= this.visibleColumns.get(i); |
||||
String[] columnHeader = column.header.split("\\|\\|"); |
||||
boolean isLeftDimensionAttributeColumn = (columnHeader.length == 1 && !columnHeader[0].contains("|"))? true : false; |
||||
for(int j = 0; j < rowSize; j++) { |
||||
if(isLeftDimensionAttributeColumn) { |
||||
grid[j][i] = columnHeader[0]; |
||||
}else { |
||||
String header = columnHeader[j]; |
||||
String[] headerValues = header.split("\\|"); |
||||
grid[j][i] = headerValues[1]; |
||||
} |
||||
} |
||||
} |
||||
return grid; |
||||
} |
||||
|
||||
public ExcelSheetBuilder withFileName(String name) { |
||||
this.sheet = workbook.createSheet(name); |
||||
return this; |
||||
} |
||||
|
||||
public ExcelSheetBuilder withReportName(String name) { |
||||
this.reportName = replaceEmptyString(name, "Nicht gespeicherte BI-Analyse"); |
||||
return this; |
||||
} |
||||
|
||||
public ExcelSheetBuilder withDescription(String description) { |
||||
this.reportDescription = replaceEmptyString(description, "-"); |
||||
return this; |
||||
} |
||||
|
||||
private static String replaceEmptyString(String value, String replacement) { |
||||
if(value == null || value.isBlank()) { |
||||
return replacement; |
||||
} |
||||
return value; |
||||
} |
||||
|
||||
public ExcelSheetBuilder withDate(Date currentDate) { |
||||
this.date = new SimpleDateFormat("dd.MM.yyyy HH:mm").format(currentDate); |
||||
return this; |
||||
} |
||||
|
||||
private static List<Column> getVisibleColumns(Result result) { |
||||
return result.columns |
||||
.stream() |
||||
.filter(col -> !col.hidden) |
||||
.collect(Collectors.toList()); |
||||
} |
||||
|
||||
} |
||||
@ -0,0 +1,21 @@
@@ -0,0 +1,21 @@
|
||||
package de.superx.bianalysis; |
||||
|
||||
import de.superx.bianalysis.metadata.Identifier; |
||||
|
||||
public class FaultyMetadataException extends RuntimeException { |
||||
|
||||
private static final long serialVersionUID = -5959640234409065198L; |
||||
|
||||
public FaultyMetadataException(String message) { |
||||
super(message); |
||||
} |
||||
|
||||
public FaultyMetadataException(Identifier id) { |
||||
super("Metadata Object with ID: '" + id.composedId + "' does not exist."); |
||||
} |
||||
|
||||
public FaultyMetadataException(Identifier id, String metaType) { |
||||
super("Metadata " + metaType + " with ID: '" + id.composedId + "' does not exist."); |
||||
} |
||||
|
||||
} |
||||
@ -0,0 +1,57 @@
@@ -0,0 +1,57 @@
|
||||
package de.superx.bianalysis; |
||||
|
||||
import java.util.ArrayList; |
||||
import java.util.List; |
||||
|
||||
import de.superx.bianalysis.metadata.Identifier; |
||||
import de.superx.bianalysis.models.Filter; |
||||
import de.superx.bianalysis.service.DbMetaAdapter; |
||||
|
||||
public class ReportDefinition { |
||||
|
||||
public List<Identifier> factTableIds; |
||||
public List<Identifier> leftDimensionAttributeIds; |
||||
public List<Identifier> topDimensionAttributeIds; |
||||
public List<Identifier> measureIds; |
||||
public List<Filter> filters; |
||||
public boolean hideEmptyColumns; |
||||
|
||||
public ReportDefinition() { |
||||
this.factTableIds = new ArrayList<>(); |
||||
this.leftDimensionAttributeIds = new ArrayList<>(); |
||||
this.topDimensionAttributeIds = new ArrayList<>(); |
||||
this.measureIds = new ArrayList<>(); |
||||
this.filters = new ArrayList<>(); |
||||
this.hideEmptyColumns = false; |
||||
} |
||||
|
||||
public ReportDefinition(ReportDefinition definition) { |
||||
super(); |
||||
this.factTableIds = definition.factTableIds; |
||||
this.topDimensionAttributeIds = definition.topDimensionAttributeIds; |
||||
this.measureIds = definition.measureIds; |
||||
this.filters = definition.filters; |
||||
this.leftDimensionAttributeIds = new ArrayList<>(); |
||||
this.hideEmptyColumns = definition.hideEmptyColumns; |
||||
|
||||
} |
||||
|
||||
public ReportMetadata getReportMetadata(DbMetaAdapter dbAdapter, Identifier factTableId) { |
||||
ReportMetadata reportMetadata = new ReportMetadata(this, factTableId, dbAdapter); |
||||
return reportMetadata; |
||||
} |
||||
|
||||
public static List<Identifier> getAttributesForDefinitions(List<ReportDefinition> definitions){ |
||||
List<Identifier> ids = new ArrayList<>(); |
||||
for (ReportDefinition def : definitions) { |
||||
for (Identifier id : def.topDimensionAttributeIds) { |
||||
ids.add(id); |
||||
} |
||||
for (Identifier id : def.leftDimensionAttributeIds) { |
||||
ids.add(id); |
||||
} |
||||
} |
||||
return ids; |
||||
} |
||||
|
||||
} |
||||
@ -0,0 +1,338 @@
@@ -0,0 +1,338 @@
|
||||
package de.superx.bianalysis; |
||||
|
||||
import java.util.ArrayList; |
||||
import java.util.Arrays; |
||||
import java.util.HashMap; |
||||
import java.util.List; |
||||
import java.util.Map; |
||||
import java.util.stream.Collectors; |
||||
|
||||
import de.superx.bianalysis.metadata.Identifier; |
||||
import de.superx.bianalysis.models.DimensionAttribute; |
||||
import de.superx.bianalysis.models.FactTable; |
||||
import de.superx.bianalysis.models.Filter; |
||||
import de.superx.bianalysis.models.InfoItem; |
||||
import de.superx.bianalysis.models.Measure; |
||||
import de.superx.bianalysis.service.DbMetaAdapter; |
||||
import de.superx.jdbc.entity.Sachgebiet; |
||||
|
||||
public class ReportMetadata { |
||||
|
||||
public final FactTable factTable; |
||||
public final Sachgebiet sachgebiet; |
||||
public final List<DimensionAttribute> leftDimensionAttributes; |
||||
public final List<DimensionAttribute> topDimensionAttributes; |
||||
public final List<Measure> measures; |
||||
public final List<Filter> filters; |
||||
|
||||
public String lastBiadUpdate; |
||||
|
||||
// only used if hierarchy dimension is present in left dim attributes
|
||||
public int maxBridgeLvl; |
||||
public int minBridgeLvl; |
||||
|
||||
public DbMetaAdapter dbMetaAdapter; |
||||
|
||||
public boolean hideEmptyColumns; |
||||
|
||||
public ReportMetadata(ReportDefinition reportDefinition, Identifier factTableId, DbMetaAdapter dbAdapter) { |
||||
this.dbMetaAdapter = dbAdapter; |
||||
if(factTableId == null) { // merged Report
|
||||
this.factTable = new FactTable(); |
||||
this.sachgebiet = new Sachgebiet(); |
||||
} else { |
||||
this.factTable = dbAdapter.getFactTable(factTableId); |
||||
this.sachgebiet = dbAdapter.getSachgebietById(this.factTable.getSachgebiettid()); |
||||
} |
||||
List<DimensionAttribute> databaseOrderedLeftDimensionAttributes = dbAdapter.getDimensionAttributeMetadata(reportDefinition.leftDimensionAttributeIds, factTableId); |
||||
this.leftDimensionAttributes = reorderDimensionAttributesToReportOrder(databaseOrderedLeftDimensionAttributes, reportDefinition, false); |
||||
List<DimensionAttribute> databaseOrderedTopDimensionAttributes = dbAdapter.getDimensionAttributeMetadata(reportDefinition.topDimensionAttributeIds, factTableId); |
||||
this.topDimensionAttributes = reorderDimensionAttributesToReportOrder(databaseOrderedTopDimensionAttributes, reportDefinition, true); |
||||
List<Measure> databaseOrderedMeasures = dbAdapter.getMeasureMetadata(reportDefinition.measureIds); |
||||
this.measures = reorderMeasuresToReportOrder(databaseOrderedMeasures, reportDefinition); |
||||
if (reportDefinition.filters != null) { |
||||
this.filters = dbAdapter.getFilterMetadata(reportDefinition.filters); |
||||
} else { |
||||
this.filters = new ArrayList<Filter>(); |
||||
} |
||||
this.setTopDimensionAttributeValues(dbAdapter); |
||||
if(factTableId != null) { |
||||
this.setMaxBridgeLvl(); |
||||
} else { |
||||
// for merged report
|
||||
this.setMaxBridgeLvlForConformed(reportDefinition.factTableIds); |
||||
} |
||||
this.lastBiadUpdate = dbAdapter.getLastUpdate(440); |
||||
this.hideEmptyColumns = reportDefinition.hideEmptyColumns; |
||||
} |
||||
|
||||
public ReportMetadata(ReportMetadata metadata, List<DimensionAttribute> leftDimensionAttributes) { |
||||
this.dbMetaAdapter = metadata.dbMetaAdapter; |
||||
this.factTable = metadata.factTable; |
||||
this.sachgebiet = metadata.sachgebiet; |
||||
this.topDimensionAttributes = metadata.topDimensionAttributes; |
||||
this.measures = metadata.measures; |
||||
this.filters = metadata.filters; |
||||
this.leftDimensionAttributes = leftDimensionAttributes; |
||||
this.hideEmptyColumns = metadata.hideEmptyColumns; |
||||
} |
||||
|
||||
public ReportMetadata() { |
||||
this.factTable = new FactTable(); |
||||
this.sachgebiet = new Sachgebiet(); |
||||
this.leftDimensionAttributes = new ArrayList<>(); |
||||
this.topDimensionAttributes = new ArrayList<>(); |
||||
this.measures = new ArrayList<>(); |
||||
this.filters = new ArrayList<>(); |
||||
} |
||||
|
||||
public List<DimensionAttribute> getSortOrderLeftDimensionAttributes(){ |
||||
return leftDimensionAttributes.stream().filter(d -> d.getSortOrderColumn() != null).collect(Collectors.toList()); |
||||
} |
||||
|
||||
private void setMaxBridgeLvl() { |
||||
List<DimensionAttribute> attrs = leftDimensionAttributes |
||||
.stream() |
||||
.filter(a -> a.isHierarchy() ) |
||||
.collect(Collectors.toList()); |
||||
if(attrs.size() > 1) { |
||||
throw new RuntimeException("NOT YET IMPLEMENTED: There can only be one hierarchy attribute."); |
||||
} |
||||
if(!attrs.isEmpty()) { |
||||
this.maxBridgeLvl = dbMetaAdapter.getBridgeMaxLevel(attrs.get(0), this); |
||||
this.minBridgeLvl = dbMetaAdapter.getBridgeMinLevel(getHierarchyFilter(), this.maxBridgeLvl, attrs.get(0).getTablename()); |
||||
} |
||||
} |
||||
|
||||
private void setMaxBridgeLvlForConformed(List<Identifier> factTableIds) { |
||||
List<DimensionAttribute> attrs = leftDimensionAttributes |
||||
.stream() |
||||
.filter(a -> a.isHierarchy()) |
||||
.collect(Collectors.toList()); |
||||
if(!attrs.isEmpty()) { |
||||
DimensionAttribute attr = attrs.get(0); |
||||
int lvl = 0; |
||||
for (Identifier fact : factTableIds) { |
||||
String name = dbMetaAdapter.getFactTableNameMaxBridgeLvl(fact, attr.getId()); |
||||
if(name == null || name.isBlank()) { |
||||
continue; |
||||
} |
||||
int value = -1; |
||||
Identifier checkedAttr = dbMetaAdapter.checkIfFactTableHasDimensionAttribute(attr.getId(), fact); |
||||
if (checkedAttr != null && !checkedAttr.equals(attr.getId())) { |
||||
DimensionAttribute rolePlayingAttribute = dbMetaAdapter.getDimensionAttributeMetadataById(checkedAttr); |
||||
value = dbMetaAdapter.getBridgeMaxLevel(rolePlayingAttribute, this, name); |
||||
} |
||||
if (value > lvl) { |
||||
lvl = value; |
||||
} |
||||
} |
||||
this.maxBridgeLvl = lvl; |
||||
} |
||||
} |
||||
|
||||
private void setTopDimensionAttributeValues(DbMetaAdapter dbAdapter) { |
||||
for(DimensionAttribute attr : this.topDimensionAttributes) { |
||||
Filter filter = getFilterForDimensionAttribute(attr.getId()); |
||||
if(filter != null) { |
||||
attr.setDimensionAttributeValues(filter.filterValues); |
||||
} else { |
||||
attr.setDimensionAttributeValues(dbAdapter.getDimensionAttributeValues(attr, null, null)); |
||||
} |
||||
} |
||||
} |
||||
|
||||
private Filter getFilterForDimensionAttribute(Identifier id) { |
||||
return this.filters |
||||
.stream() |
||||
.filter(f -> f.dimensionAttributeId.equals(id)) |
||||
.findFirst() |
||||
.orElse(null); |
||||
} |
||||
|
||||
private static List<Measure> reorderMeasuresToReportOrder(List<Measure> measures, ReportDefinition reportDefinition) { |
||||
List<Measure> orderedMeasures = new ArrayList<Measure>(); |
||||
reportDefinition.measureIds.forEach(measureId -> { |
||||
Measure nextMeasure = measures |
||||
.stream() |
||||
.filter( measure -> measure.getId().equals( measureId ) ) |
||||
.findFirst() |
||||
.orElse(null); |
||||
orderedMeasures.add(nextMeasure); |
||||
}); |
||||
return orderedMeasures; |
||||
} |
||||
|
||||
public static List<DimensionAttribute> reorderDimensionAttributesToReportOrder(List<DimensionAttribute> dimensionAttributes, ReportDefinition reportDefinition, boolean isTopAttribute) { |
||||
List<DimensionAttribute> orderedDimensionAttributes = new ArrayList<DimensionAttribute>(); |
||||
List<Identifier> attributeIds; |
||||
if (isTopAttribute) { |
||||
attributeIds = reportDefinition.topDimensionAttributeIds; |
||||
} else { |
||||
attributeIds = reportDefinition.leftDimensionAttributeIds; |
||||
} |
||||
attributeIds.forEach(attributeId -> { |
||||
DimensionAttribute nextAttribute = dimensionAttributes |
||||
.stream() |
||||
.filter( dimensionAttribute -> dimensionAttribute.getId().equals( attributeId )) |
||||
.findFirst() |
||||
.orElse(null); |
||||
orderedDimensionAttributes.add(nextAttribute); |
||||
}); |
||||
return orderedDimensionAttributes; |
||||
} |
||||
|
||||
|
||||
public DimensionAttribute getDimById(Identifier id) { |
||||
DimensionAttribute attr = topDimensionAttributes |
||||
.stream() |
||||
.filter(a -> a.getDimensionId().equals(id)) |
||||
.findFirst() |
||||
.orElse(null); |
||||
|
||||
if(attr != null) { |
||||
return attr; |
||||
} |
||||
|
||||
attr = leftDimensionAttributes |
||||
.stream() |
||||
.filter(a -> a.getDimensionId().equals(id)) |
||||
.findFirst() |
||||
.orElse(null); |
||||
|
||||
return attr; |
||||
} |
||||
|
||||
public DimensionAttribute getDimAttrById(Identifier id) { |
||||
DimensionAttribute attr = topDimensionAttributes |
||||
.stream() |
||||
.filter(a -> a.getId().equals(id)) |
||||
.findFirst() |
||||
.orElse(null); |
||||
|
||||
if(attr != null) { |
||||
return attr; |
||||
} |
||||
|
||||
attr = leftDimensionAttributes |
||||
.stream() |
||||
.filter(a -> a.getId().equals(id)) |
||||
.findFirst() |
||||
.orElse(null); |
||||
|
||||
return attr; |
||||
} |
||||
|
||||
/** |
||||
* We want to join dimension tables only once. There we need a list of unique ids of |
||||
* the dimensions, otherwise we duplicate our joins. |
||||
*/ |
||||
public List<DimensionAttribute> getUniqueDimensionAttributes(){ |
||||
|
||||
Map<String, DimensionAttribute> joinTables = new HashMap<String, DimensionAttribute>(); |
||||
|
||||
for (DimensionAttribute attr : leftDimensionAttributes) { |
||||
if(!joinTables.containsKey(attr.getDimensionTableAlias())) { |
||||
joinTables.put(attr.getDimensionTableAlias(), attr); |
||||
} |
||||
} |
||||
|
||||
for (DimensionAttribute attr : topDimensionAttributes) { |
||||
if(!joinTables.containsKey(attr.getDimensionTableAlias())) { |
||||
joinTables.put(attr.getDimensionTableAlias(), attr); |
||||
} |
||||
} |
||||
|
||||
// join dimension if attribute occurs in filter
|
||||
for (Filter filter : filters) { |
||||
Identifier attrId = filter.dimensionAttributeId; |
||||
DimensionAttribute attr = dbMetaAdapter.getDimensionAttributeMetadataById(attrId); |
||||
if(!joinTables.containsKey(attr.getDimensionTableAlias())) { |
||||
joinTables.put(attr.getDimensionTableAlias(), attr); |
||||
} |
||||
} |
||||
|
||||
// join dimension if measure has a filter for an attribute
|
||||
for (Measure measure : measures) { |
||||
if(measure.filterAttributeId != null) { |
||||
Identifier attrId = measure.filterAttributeId; |
||||
DimensionAttribute attr = dbMetaAdapter.getDimensionAttributeMetadataById(attrId); |
||||
if(!joinTables.containsKey(attr.getDimensionTableAlias())) { |
||||
joinTables.put(attr.getDimensionTableAlias(), attr); |
||||
} |
||||
} |
||||
} |
||||
|
||||
return new ArrayList<DimensionAttribute>(joinTables.values()); |
||||
} |
||||
|
||||
public List<InfoItem> getMeasureInfo() { |
||||
if(measures != null && !measures.isEmpty()) { |
||||
return measures.stream().map(m-> |
||||
new InfoItem(m.getId().composedId, m.getCaption(), m.getDescription())).collect(Collectors.toList()); |
||||
} |
||||
return null; |
||||
} |
||||
|
||||
public List<InfoItem> getTopDimAttrAsInfo() { |
||||
if(topDimensionAttributes != null && !topDimensionAttributes.isEmpty()) { |
||||
return topDimensionAttributes.stream().map(m-> |
||||
new InfoItem(m.getStringId(), m.getCaption(), m.getDescription())).collect(Collectors.toList()); |
||||
} |
||||
return null; |
||||
} |
||||
|
||||
public List<Filter> getFilterNoHierarchy() { |
||||
List<Filter> filterNoBridge = new ArrayList<>(); |
||||
for (Filter filter : this.filters) { |
||||
DimensionAttribute attr = dbMetaAdapter.getDimensionAttributeById(filter.dimensionAttributeId); |
||||
if(!attr.isHierarchy()) { |
||||
filterNoBridge.add(filter); |
||||
} |
||||
} |
||||
return filterNoBridge; |
||||
} |
||||
|
||||
public List<InfoItem> getLeftDimAttrAsInfo() { |
||||
if(leftDimensionAttributes != null && !leftDimensionAttributes.isEmpty()) { |
||||
return leftDimensionAttributes.stream().map(m-> |
||||
new InfoItem(m.getStringId(), m.getCaption(), m.getDescription())).collect(Collectors.toList()); |
||||
} |
||||
return null; |
||||
} |
||||
|
||||
public List<String> getFilterAsInfo(){ |
||||
ArrayList<String> filterList = new ArrayList<String>(); |
||||
DimensionAttribute dimAttr; |
||||
for (Filter filter: filters) { |
||||
dimAttr = filter.getDimAttribute(this); |
||||
if(dimAttr == null) { |
||||
dimAttr = dbMetaAdapter.getDimensionAttributeById(filter.dimensionAttributeId); |
||||
} |
||||
filterList.add(" (" + dimAttr.getCaption() + ") " + filter.getValuesAsString()); |
||||
} |
||||
return filterList; |
||||
} |
||||
|
||||
public List<DimensionAttribute> getHierarchyAttributes() { |
||||
return leftDimensionAttributes |
||||
.stream() |
||||
.filter(a -> a.isHierarchy()) |
||||
.collect(Collectors.toList()); |
||||
} |
||||
|
||||
public List<Filter> getHierarchyFilter(){ |
||||
List<Filter> hierarchyFilter = new ArrayList<>(); |
||||
for (Filter filter : this.filters) { |
||||
if(isHierarchyFilter(filter)) { |
||||
hierarchyFilter.add(filter); |
||||
} |
||||
} |
||||
return hierarchyFilter; |
||||
} |
||||
|
||||
public boolean isHierarchyFilter(Filter filter) { |
||||
return dbMetaAdapter.isAttributeHierarchyBridge(filter.dimensionAttributeId); |
||||
} |
||||
|
||||
} |
||||
@ -0,0 +1,536 @@
@@ -0,0 +1,536 @@
|
||||
package de.superx.bianalysis; |
||||
|
||||
import java.sql.Connection; |
||||
import java.sql.ResultSet; |
||||
import java.sql.SQLException; |
||||
import java.sql.Statement; |
||||
import java.util.ArrayList; |
||||
import java.util.HashMap; |
||||
import java.util.Iterator; |
||||
import java.util.List; |
||||
import java.util.Map; |
||||
import java.util.TreeMap; |
||||
|
||||
import javax.sql.DataSource; |
||||
|
||||
import org.apache.log4j.Logger; |
||||
import org.springframework.jdbc.core.JdbcTemplate; |
||||
import org.springframework.jdbc.core.RowMapper; |
||||
|
||||
import de.superx.bianalysis.models.DimensionAttribute; |
||||
import de.superx.bianalysis.models.InfoItem; |
||||
import de.superx.bianalysis.models.Measure; |
||||
import de.superx.rest.model.Column; |
||||
import de.superx.rest.model.ColumnType; |
||||
import de.superx.rest.model.Item; |
||||
import de.superx.rest.model.Result; |
||||
import de.superx.rest.model.ResultType; |
||||
import de.superx.rest.model.Row; |
||||
|
||||
public class ResultBuilder { |
||||
|
||||
private static final boolean IGNORE_SELF_LOOPS = true; |
||||
|
||||
private DataSource dataSource; |
||||
|
||||
private ReportMetadata reportMetadata; |
||||
private List<ColumnElement> columnElements; |
||||
|
||||
Logger logger = Logger.getLogger(ResultBuilder.class); |
||||
|
||||
public ResultBuilder() {} |
||||
|
||||
// used for testing
|
||||
public ResultBuilder(ReportMetadata metadata, List<ColumnElement> columns) { |
||||
this.reportMetadata = metadata; |
||||
this.columnElements = columns; |
||||
} |
||||
|
||||
public ResultBuilder(DataSource dataSource) { |
||||
this.dataSource = dataSource; |
||||
} |
||||
|
||||
public void setReportMetadata(ReportMetadata reportMetadata) { |
||||
this.reportMetadata = reportMetadata; |
||||
} |
||||
|
||||
public void setColumnElements(List<ColumnElement> columnElements) { |
||||
this.columnElements = columnElements; |
||||
} |
||||
|
||||
private Row buildRowCells(ResultSet rs) { |
||||
Row row = new Row(); |
||||
Map<String, Object> cells = new TreeMap<String, Object>(); |
||||
if (reportMetadata.leftDimensionAttributes != null && !reportMetadata.leftDimensionAttributes.isEmpty()) { |
||||
int aggregationLvl = reportMetadata.leftDimensionAttributes.size() -1; |
||||
for (DimensionAttribute dimensionAttribute : reportMetadata.leftDimensionAttributes) { |
||||
if(dimensionAttribute.isHierarchy()) { |
||||
try { |
||||
String prevLbl = ""; |
||||
int countLvl = 0; |
||||
aggregationLvl += reportMetadata.maxBridgeLvl - reportMetadata.minBridgeLvl - 1; |
||||
for (int i = reportMetadata.minBridgeLvl; i < reportMetadata.maxBridgeLvl; i++) { |
||||
Object cell = rs.getObject("col" + i); |
||||
String curLbl = (String) cell; |
||||
|
||||
if(cell == null) { |
||||
// An empty cell means a lower aggregation level because
|
||||
// of how the GROUP BY ROLLUP works.
|
||||
aggregationLvl--; |
||||
} |
||||
|
||||
if(IGNORE_SELF_LOOPS && |
||||
curLbl != null && |
||||
curLbl != "" && |
||||
curLbl.equals(prevLbl)) { |
||||
// If the cell label is equal to the previous cell label
|
||||
// then this row contains a self loop, meaning the node is
|
||||
// both its own parent and child. This happens due to the
|
||||
// GROUP BY ROLLUP part of the sql statement, which groups
|
||||
// columns in which the same node can appear right next to
|
||||
// each other in two columns.
|
||||
continue; |
||||
} |
||||
|
||||
String id = dimensionAttribute.getAttrConformedId(); |
||||
if(id == null) { |
||||
id = dimensionAttribute.getStringId(); |
||||
} |
||||
|
||||
String cellKey = id + " (Ebene " + countLvl + ")"; |
||||
|
||||
if(countLvl == 0) { |
||||
cellKey = dimensionAttribute.getAttrConformedId(); |
||||
} |
||||
|
||||
if(cell != null && cellKey != null) { |
||||
cells.put(cellKey, cell); |
||||
row.rowKey += cellKey + cell; |
||||
countLvl++; |
||||
} |
||||
|
||||
prevLbl = (String) cell; |
||||
} |
||||
} catch (SQLException e) { |
||||
e.printStackTrace(); |
||||
} |
||||
} else { |
||||
try { |
||||
Object val = rs.getObject(dimensionAttribute.getDimensionColumnAlias()); |
||||
if(val != null) { |
||||
String id = dimensionAttribute.getAttrConformedId(); |
||||
if(id == null) { |
||||
id = dimensionAttribute.getStringId(); |
||||
} |
||||
cells.put(id, val); |
||||
row.rowKey += id + val; |
||||
} else { |
||||
aggregationLvl--; |
||||
} |
||||
} catch (SQLException e) { |
||||
e.printStackTrace(); |
||||
} |
||||
if(dimensionAttribute.getSortOrderColumn() != null) { |
||||
try { |
||||
String id = dimensionAttribute.getAttrConformedId(); |
||||
if(id == null) { |
||||
id = dimensionAttribute.getStringId(); |
||||
} |
||||
cells.put(id + "_sorting", rs.getObject(dimensionAttribute.getDimensionColumnAlias()+"_"+dimensionAttribute.getSortOrderColumn())); |
||||
} catch (SQLException e) { |
||||
e.printStackTrace(); |
||||
} |
||||
} |
||||
} |
||||
} |
||||
try { |
||||
row.aggregated = aggregationLvl; |
||||
if(row.aggregated == -1) { |
||||
int colNum = this.reportMetadata.maxBridgeLvl + this.columnElements.size() - this.reportMetadata.measures.size(); |
||||
for (Measure measure : reportMetadata.measures) { |
||||
cells.put(getTotalCellHeaderPrefix(reportMetadata) + measure.getId().composedId, rs.getObject("col" + (colNum++))); |
||||
} |
||||
} |
||||
} catch (SQLException e) { |
||||
e.printStackTrace(); |
||||
} |
||||
|
||||
} |
||||
if (!columnElements.isEmpty()) { |
||||
for (ColumnElement columnElement : columnElements) { |
||||
try { |
||||
cells.put(columnElement.caption, rs.getObject("col" + columnElement.columnNumber)); |
||||
} catch (SQLException e) { |
||||
e.printStackTrace(); |
||||
} |
||||
} |
||||
} |
||||
row.cells = cells; |
||||
return row; |
||||
} |
||||
|
||||
|
||||
public static List<Column> buildColumns(ReportMetadata reportMetadata, List<ColumnElement> columnElements) { |
||||
List<Column> columns = new ArrayList<Column>(); |
||||
if (reportMetadata.leftDimensionAttributes != null) { |
||||
reportMetadata.leftDimensionAttributes.forEach((dimensionAttribute) -> { |
||||
String id = dimensionAttribute.getAttrConformedId(); |
||||
if(id == null) { |
||||
id = dimensionAttribute.getStringId(); |
||||
} |
||||
if(dimensionAttribute.isHierarchy()) { |
||||
int cnt = 0; |
||||
for (int i = reportMetadata.minBridgeLvl; i < reportMetadata.maxBridgeLvl - 1; i++) { |
||||
if(cnt == 0) { |
||||
columns.add(new Column(id, dimensionAttribute.getCaption(), ColumnType.StringColumn, true)); |
||||
} else { |
||||
String caption = id + " (Ebene " + cnt + ")"; |
||||
String header = dimensionAttribute.getCaption() + " (Ebene " + (cnt) + ")"; |
||||
columns.add(new Column(caption, header, ColumnType.HierarchyLevelColumn, true, id, false)); |
||||
} |
||||
cnt += 1; |
||||
} |
||||
} else { |
||||
if(dimensionAttribute.getSortOrderColumn() != null) { |
||||
columns.add(new Column(id, dimensionAttribute.getCaption(), ColumnType.StringColumn, true, dimensionAttribute.getStringId() + "_sorting", false)); |
||||
columns.add(new Column(id + "_sorting", dimensionAttribute.getCaption(), ColumnType.SortOrderColumn, false, dimensionAttribute.getStringId(), true)); |
||||
}else { |
||||
columns.add(new Column(id, dimensionAttribute.getCaption(), ColumnType.StringColumn, true)); |
||||
} |
||||
} |
||||
}); |
||||
} |
||||
if (!columnElements.isEmpty()) { |
||||
columnElements.forEach((columnElement) -> { |
||||
Column col = new Column(columnElement.caption, columnElement.header, columnElement.measure.getMeasureType(), false); |
||||
col.aggregation = columnElement.measure.getAggregationType(); |
||||
columns.add(col); |
||||
}); |
||||
} |
||||
return columns; |
||||
} |
||||
|
||||
public List<Row> getRowsForReport(String sqlStatement, Connection con) { |
||||
|
||||
List<Row> rows = null; |
||||
try (Statement stmt = con.createStatement(); |
||||
ResultSet rs = stmt.executeQuery(sqlStatement)) { |
||||
rows = buildRowList(rs); |
||||
} catch (SQLException e) { |
||||
System.out.println(sqlStatement); |
||||
throw new RuntimeException(e); |
||||
} |
||||
|
||||
return rows; |
||||
} |
||||
|
||||
public List<Row> buildRowList(ResultSet rs) throws SQLException { |
||||
List<Row> rows = new ArrayList<>(); |
||||
while (rs.next()) { |
||||
Row row = buildRowCells(rs); |
||||
rows.add(row); |
||||
} |
||||
List<Row> result = new ArrayList<>(); |
||||
if (reportMetadata.getHierarchyAttributes().size() > 0) { |
||||
HashMap<String, Row> keys = new HashMap<>(); |
||||
for (Row row : rows) { |
||||
if (keys.containsKey(row.rowKey)) { |
||||
Row currentRow = keys.get(row.rowKey ); |
||||
if (currentRow.aggregated > row.aggregated) { |
||||
keys.put(currentRow.rowKey , row); |
||||
} |
||||
// workaround: fix multiple aggregated rows, take only highest
|
||||
if(currentRow.aggregated == -1 && row.aggregated == -1) { |
||||
for(String cell : currentRow.cells.keySet()) { |
||||
Number currentCellVal = (Number) currentRow.cells.get(cell); |
||||
Number candidateVal = (Number) row.cells.get(cell); |
||||
if(candidateVal.doubleValue() > currentCellVal.doubleValue()) { |
||||
keys.put(currentRow.rowKey, row); |
||||
} |
||||
} |
||||
} |
||||
} else { |
||||
keys.put(row.rowKey, row); |
||||
} |
||||
} |
||||
rows = new ArrayList<Row>(keys.values()); |
||||
} |
||||
|
||||
for (Row row : rows) { |
||||
if(row != null) { |
||||
result.add(row); |
||||
} |
||||
} |
||||
return rows; |
||||
} |
||||
|
||||
public Result buildReport(List<Item> sqlStatements, boolean isCreateRight) { |
||||
JdbcTemplate jt = new JdbcTemplate(dataSource); |
||||
Result report; |
||||
report = new Result(); |
||||
if(isCreateRight) { |
||||
report.info.setSqlStatements(sqlStatements); |
||||
} |
||||
|
||||
String sql = findByLabel(sqlStatements, "noAggregatesSQL").value; |
||||
String sqlColumnTotal = findByLabel(sqlStatements, "totalsColumnSQL").value; |
||||
|
||||
List<Column> columns = buildColumns(reportMetadata, columnElements); |
||||
List<Row> rows = null; |
||||
List<Row> totalColumns = null; |
||||
|
||||
try (Connection con = jt.getDataSource().getConnection()){ |
||||
rows = getRowsForReport(sql, con); |
||||
} catch (Exception e) { |
||||
logger.error(e); |
||||
e.printStackTrace(); |
||||
report.info.setSegmentCaption(reportMetadata.factTable.getCaption()); |
||||
report.info.setErrorMessage(e.getCause().getMessage()); |
||||
return report; |
||||
} |
||||
|
||||
// add one column for each measure to the report with the total sum
|
||||
if(!reportMetadata.topDimensionAttributes.isEmpty()) { |
||||
try { |
||||
totalColumns = getTotalColumnResult(sqlColumnTotal, jt); |
||||
ResultBuilder.setTotalColumnToColumns(columns, reportMetadata); |
||||
ResultBuilder.setTotalColumnToRows(rows, totalColumns); |
||||
} catch (Exception e) { |
||||
e.printStackTrace(); |
||||
report.info.setErrorMessage("Die Gesamtspalte konnte nicht ermittelt werden."); |
||||
} |
||||
} |
||||
|
||||
setAttributesToReport(report, reportMetadata, rows, columns); |
||||
|
||||
try { |
||||
if(reportMetadata.hideEmptyColumns) { |
||||
removeEmptyColumns(columns, rows); |
||||
} |
||||
} catch(Exception e ) { |
||||
logger.error(e); |
||||
} |
||||
|
||||
return report; |
||||
} |
||||
|
||||
public static void removeEmptyColumns(List<Column> columns, List<Row> rows) { |
||||
HashMap<String, Integer> map = new HashMap<>(); |
||||
for (Column col : columns) { |
||||
if(!map.containsKey(col.field)) { |
||||
map.put(col.field, Integer.valueOf(-1)); |
||||
} |
||||
} |
||||
|
||||
for (Row row : rows) { |
||||
for (String cellKey : row.cells.keySet()) { |
||||
Object value = row.cells.get(cellKey); |
||||
if(value instanceof Number) { |
||||
Number val = (Number) value; |
||||
if(val.intValue() != 0) { |
||||
if(map.containsKey(cellKey)) { |
||||
map.remove(cellKey); |
||||
} |
||||
} |
||||
} else { |
||||
if(value == null) { |
||||
continue; |
||||
} |
||||
if(map.containsKey(cellKey)) { |
||||
map.remove(cellKey); |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
|
||||
for (Row row : rows) { |
||||
for(String key : map.keySet()) { |
||||
if(row.cells.containsKey(key)) { |
||||
row.cells.remove(key); |
||||
} |
||||
} |
||||
} |
||||
|
||||
if(rows.size() > 1) { |
||||
for(Iterator<Column> iterator = columns.iterator(); iterator.hasNext(); ) { |
||||
if(map.containsKey(iterator.next().field)) |
||||
iterator.remove(); |
||||
} |
||||
} |
||||
} |
||||
|
||||
public static void setAttributesToReport(Result report, ReportMetadata reportMetadata, List<Row> rows, List<Column> columns) { |
||||
report.setResultType(ResultType.DrilldownTableGroupable); |
||||
report.setRows(rows); |
||||
report.setColumns(columns); |
||||
if(reportMetadata.factTable.getCaption() != null) { |
||||
report.info.setSegmentCaption(reportMetadata.factTable.getCaption()); |
||||
InfoItem facttableInfo = new InfoItem (reportMetadata.factTable.getId().composedId, |
||||
reportMetadata.factTable.getCaption(), |
||||
reportMetadata.factTable.getDescription()); |
||||
report.info.addFacttable(facttableInfo); |
||||
report.info.addSachgebiet(reportMetadata.sachgebiet.name); |
||||
} |
||||
report.info.setMeasures(reportMetadata.getMeasureInfo()); |
||||
report.info.setLeftDimensionAttributes(reportMetadata.getLeftDimAttrAsInfo()); |
||||
report.info.setTopDimensionAttributes(reportMetadata.getTopDimAttrAsInfo()); |
||||
report.info.setFilter(reportMetadata.getFilterAsInfo()); |
||||
report.info.setLastUpdateBiad(reportMetadata.lastBiadUpdate); |
||||
report.info.hideEmptyColumns(reportMetadata.hideEmptyColumns); |
||||
} |
||||
|
||||
private static String getTotalCellHeaderPrefix(ReportMetadata reportMetadata) { |
||||
String totalCellHeaderPrefix = ""; |
||||
for (int i = 0; i < reportMetadata.topDimensionAttributes.size(); i++) { |
||||
DimensionAttribute attr = reportMetadata.topDimensionAttributes.get(i); |
||||
if(i == 0) { |
||||
totalCellHeaderPrefix += ColumnElement.buildField(attr, "Gesamt"); |
||||
} else { |
||||
totalCellHeaderPrefix += ColumnElement.buildField(attr, " "); |
||||
} |
||||
} |
||||
totalCellHeaderPrefix += " || Kennzahl|"; |
||||
return totalCellHeaderPrefix; |
||||
} |
||||
|
||||
private static String getTotalCellHeaderPrefixHeader(ReportMetadata reportMetadata) { |
||||
String totalCellHeaderPrefix = ""; |
||||
for (int i = 0; i < reportMetadata.topDimensionAttributes.size(); i++) { |
||||
DimensionAttribute attr = reportMetadata.topDimensionAttributes.get(i); |
||||
if(i == 0) { |
||||
totalCellHeaderPrefix += attr.getCaption() + ": " + attr.getCaption() + "| Gesamt "; |
||||
} else { |
||||
totalCellHeaderPrefix += " || " + attr.getCaption() + ": " + attr.getCaption() + "| "; |
||||
} |
||||
} |
||||
totalCellHeaderPrefix += " || Kennzahl|"; |
||||
return totalCellHeaderPrefix; |
||||
} |
||||
|
||||
public List<Row> getTotalColumnResult(String sqlStatement, JdbcTemplate jt) { |
||||
if(sqlStatement.isEmpty()) { |
||||
return null; |
||||
} |
||||
List<Row> rows = null; |
||||
rows = jt.query(sqlStatement, new Object[0], new RowMapper<Row>() { |
||||
@Override |
||||
public Row mapRow(ResultSet rs, int rowNum) { |
||||
Row row = new Row(); |
||||
Map<String, Object> cells = new TreeMap<String, Object>(); |
||||
int numCols = reportMetadata.maxBridgeLvl; |
||||
try { |
||||
for (DimensionAttribute attr : reportMetadata.leftDimensionAttributes) { |
||||
if(attr.isHierarchy()) { |
||||
String prevCell = ""; |
||||
for (int i = reportMetadata.minBridgeLvl; i < reportMetadata.maxBridgeLvl; i++) { |
||||
Object cell = rs.getObject("col" + i); |
||||
if(cell == null || cell.equals(prevCell)) { |
||||
continue; |
||||
} |
||||
String cellKey = attr.getStringId() + " (Ebene " + i + ")"; |
||||
if(i == 0) { |
||||
cellKey = attr.getStringId(); |
||||
} |
||||
row.rowKey += cellKey + cell; |
||||
prevCell = (String) cell; |
||||
} |
||||
} else { |
||||
Object val = rs.getObject(attr.getDimensionColumnAlias()); |
||||
if(val != null) { |
||||
String id = attr.getAttrConformedId(); |
||||
if(id == null) { |
||||
id = attr.getStringId(); |
||||
} |
||||
cells.put(id, val); |
||||
row.rowKey += id + val; |
||||
} |
||||
} |
||||
} |
||||
} catch (SQLException e) { |
||||
e.printStackTrace(); |
||||
} |
||||
for (Measure measure : reportMetadata.measures) { |
||||
try { |
||||
String key = getTotalCellHeaderPrefix(reportMetadata) + measure.getId().composedId; |
||||
Object val = rs.getObject("col" + numCols++); |
||||
cells.put(key, val); |
||||
} catch (SQLException e) { |
||||
e.printStackTrace(); |
||||
} |
||||
} |
||||
row.cells = cells; |
||||
return row; |
||||
} |
||||
}); |
||||
|
||||
if(reportMetadata.getHierarchyAttributes().size() > 0) { |
||||
HashMap<String, Row> rowKeyValue = new HashMap<>(); |
||||
for (Row row : rows) { |
||||
boolean replace = false; |
||||
if(rowKeyValue.containsKey(row.rowKey)) { |
||||
Row alreadyThere = rowKeyValue.get(row.rowKey); |
||||
for(String key : alreadyThere.cells.keySet()) { |
||||
Number candidateVal = (Number) row.cells.get(key); |
||||
if(candidateVal == null) { |
||||
continue; |
||||
} |
||||
Number alreadyVal = (Number) alreadyThere.cells.get(key); |
||||
if(alreadyVal == null) { |
||||
continue; |
||||
} |
||||
if(candidateVal.doubleValue() > alreadyVal.doubleValue()) { |
||||
replace = true; |
||||
} |
||||
} |
||||
if(replace) { |
||||
rowKeyValue.put(row.rowKey, row); |
||||
replace = false; |
||||
} |
||||
} else { |
||||
rowKeyValue.put(row.rowKey, row); |
||||
} |
||||
} |
||||
return new ArrayList<>(rowKeyValue.values()); |
||||
} |
||||
|
||||
return rows; |
||||
} |
||||
|
||||
public static void setTotalColumnToRows(List<Row> rows, List<Row> result) { |
||||
for (Row row : rows) { |
||||
for (Row r : result) { |
||||
if(r.rowKey.equals(row.rowKey)) { |
||||
row.cells.putAll(r.cells); |
||||
} |
||||
} |
||||
} |
||||
//for (int i = 0; i < rows.size(); i++) {
|
||||
// Row row = rows.get(i);
|
||||
// if(row.aggregated == -1) {
|
||||
// continue;
|
||||
// }
|
||||
// row.cells.putAll(result.get(i).cells);
|
||||
//}
|
||||
} |
||||
|
||||
public static void setTotalColumnToColumns(List<Column> columns, ReportMetadata reportMetadata) { |
||||
for (Measure measure : reportMetadata.measures) { |
||||
String field = getTotalCellHeaderPrefix(reportMetadata) + measure.getId().composedId; |
||||
String header = getTotalCellHeaderPrefixHeader(reportMetadata) + measure.getCaption(); |
||||
Column col = new Column(field, header, measure.getMeasureType(), false); |
||||
col.setHidden(true); |
||||
col.setTotalColumn(true); |
||||
columns.add(col); |
||||
} |
||||
|
||||
} |
||||
|
||||
private static Item findByLabel(List<Item> items, String label) { |
||||
return items.stream() |
||||
.filter(s -> s.label.equals(label)) |
||||
.findAny() |
||||
.get(); |
||||
} |
||||
|
||||
} |
||||
@ -0,0 +1,158 @@
@@ -0,0 +1,158 @@
|
||||
package de.superx.bianalysis; |
||||
|
||||
import java.util.ArrayList; |
||||
import java.util.HashMap; |
||||
import java.util.List; |
||||
import java.util.Map; |
||||
import java.util.StringJoiner; |
||||
import java.util.stream.Collectors; |
||||
|
||||
import de.superx.bianalysis.metadata.Identifier; |
||||
import de.superx.bianalysis.models.DimensionAttribute; |
||||
import de.superx.bianalysis.models.FactTable; |
||||
import de.superx.bianalysis.models.Filter; |
||||
import de.superx.bianalysis.models.InfoItem; |
||||
import de.superx.bianalysis.service.DbMetaAdapter; |
||||
import de.superx.jdbc.entity.Sachgebiet; |
||||
import de.superx.rest.model.Column; |
||||
import de.superx.rest.model.Result; |
||||
import de.superx.rest.model.Row; |
||||
|
||||
public class ResultMerger { |
||||
|
||||
private DbMetaAdapter dbAdapter; |
||||
|
||||
public ResultMerger(DbMetaAdapter dbAdapter) { |
||||
this.dbAdapter = dbAdapter; |
||||
} |
||||
|
||||
public Result buildMergedReport(ReportDefinition definition, List<Result> reportResults) { |
||||
|
||||
Result result = new Result(); |
||||
ReportMetadata metadata = new ReportMetadata(definition, null, dbAdapter); |
||||
|
||||
List<ColumnElement> columnElements = ColumnElementBuilder.buildColumnElements(metadata); |
||||
List<Column> columns = ResultBuilder.buildColumns(metadata, columnElements); |
||||
if(!metadata.topDimensionAttributes.isEmpty()) { |
||||
ResultBuilder.setTotalColumnToColumns(columns, metadata); |
||||
} |
||||
|
||||
// create list of merged rows
|
||||
List<List<Row>> allRows = Result.getRowsFromReports(reportResults); |
||||
List<Row> rows = mergeRows(allRows); |
||||
|
||||
if(metadata.hideEmptyColumns) { |
||||
ResultBuilder.removeEmptyColumns(columns, rows); |
||||
} |
||||
|
||||
ResultBuilder.setAttributesToReport(result, metadata, rows, columns); |
||||
|
||||
// override merge report specific attributes
|
||||
result.setSubResults(reportResults); |
||||
List<InfoItem> factTablesInfo = getFactTablesAsInfo(dbAdapter, definition.factTableIds); |
||||
result.info.setSegmentCaption(factTablesInfo.stream().map(f -> f.caption).collect(Collectors.joining(", "))); |
||||
result.info.setSachgebiete(getSachgebieteAsInfo(dbAdapter, definition.factTableIds)); |
||||
result.info.setFacttables(factTablesInfo); |
||||
|
||||
for (Result r : reportResults) { |
||||
if(r.info.error != null && !r.info.error.isBlank()) { |
||||
result.info.setErrorMessage(r.info.error); |
||||
break; |
||||
} |
||||
} |
||||
return result; |
||||
} |
||||
|
||||
private static List<String> getSachgebieteAsInfo(DbMetaAdapter dbAdapter, List<Identifier> factTableIds) { |
||||
List<String> sachgebiete = new ArrayList<String>(); |
||||
List<Integer> tids = new ArrayList<>(); |
||||
for (Identifier id : factTableIds) { |
||||
FactTable factTable = dbAdapter.getFactTable(id); |
||||
Sachgebiet sachgebiet = dbAdapter.getSachgebietById(factTable.getSachgebiettid()); |
||||
Integer tid = sachgebiet.tid; |
||||
if(!tids.contains(tid)) { |
||||
tids.add(tid); |
||||
sachgebiete.add(sachgebiet.name.trim()); |
||||
} |
||||
} |
||||
return sachgebiete; |
||||
} |
||||
|
||||
private static List<InfoItem> getFactTablesAsInfo(DbMetaAdapter dbAdapter, List<Identifier> factTableIds) { |
||||
List<InfoItem> facttables = new ArrayList<InfoItem>(); |
||||
for (Identifier id : factTableIds) { |
||||
FactTable factTable = dbAdapter.getFactTable(id); |
||||
facttables.add(new InfoItem(factTable.getId().composedId, factTable.getCaption(), factTable.getDescription())); |
||||
} |
||||
return facttables; |
||||
} |
||||
|
||||
public ReportDefinition createFactTableSpecificReportDefinition(ReportDefinition reportDefinition, |
||||
Identifier factTableId) { |
||||
ReportDefinition definition = new ReportDefinition(); |
||||
definition.hideEmptyColumns = reportDefinition.hideEmptyColumns; |
||||
definition.factTableIds.add(factTableId); |
||||
for (int i = 0; i < reportDefinition.leftDimensionAttributeIds.size(); i++) { |
||||
Identifier attr = reportDefinition.leftDimensionAttributeIds.get(i); |
||||
Identifier checkedAttr = dbAdapter.checkIfFactTableHasDimensionAttribute(attr, factTableId); |
||||
if(checkedAttr != null) { |
||||
definition.leftDimensionAttributeIds.add(checkedAttr); |
||||
} |
||||
} |
||||
for (int i = 0; i < reportDefinition.topDimensionAttributeIds.size(); i++) { |
||||
Identifier attr = reportDefinition.topDimensionAttributeIds.get(i); |
||||
Identifier checkedAttr = dbAdapter.checkIfFactTableHasDimensionAttribute(attr, factTableId); |
||||
if(checkedAttr != null) { |
||||
definition.topDimensionAttributeIds.add(checkedAttr); |
||||
} |
||||
} |
||||
for (int i = 0; i < reportDefinition.measureIds.size(); i++) { |
||||
Identifier measure = reportDefinition.measureIds.get(i); |
||||
if(dbAdapter.checkIfFactTableHasMeasure(measure, factTableId)) { |
||||
definition.measureIds.add(measure); |
||||
} |
||||
} |
||||
for (int i = 0; i < reportDefinition.filters.size(); i++) { |
||||
Filter filter = reportDefinition.filters.get(i); |
||||
Identifier checkedAttr = dbAdapter.checkIfFactTableHasDimensionAttribute(filter.dimensionAttributeId, factTableId); |
||||
if(checkedAttr != null) { |
||||
Filter roleFilter = new Filter(filter); |
||||
roleFilter.dimensionAttributeId = checkedAttr; |
||||
definition.filters.add(roleFilter); |
||||
} |
||||
} |
||||
return definition; |
||||
} |
||||
|
||||
public static List<Row> mergeRows(List<List<Row>> rows) { |
||||
List<Row> result = new ArrayList<>(); |
||||
|
||||
for (List<Row> inputRows : rows) { |
||||
for (Row row : inputRows) { |
||||
|
||||
Row rowRepl = new Row(row.aggregated); |
||||
rowRepl.rowKey = row.rowKey; |
||||
|
||||
for(String key : row.cells.keySet()){ |
||||
String newKey = key; |
||||
rowRepl.cells.put(newKey, row.cells.get(key)); |
||||
} |
||||
|
||||
if(!result.contains(rowRepl)) { |
||||
result.add(rowRepl); |
||||
} else { |
||||
// row with the same rowkey exists -> add only cells
|
||||
Row found = result.get(result.indexOf(rowRepl)); |
||||
for(String key : rowRepl.cells.keySet()){ |
||||
if(!found.cells.containsKey(key)) { |
||||
found.cells.put(key, rowRepl.cells.get(key)); |
||||
} |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
return result; |
||||
} |
||||
|
||||
} |
||||
@ -0,0 +1,82 @@
@@ -0,0 +1,82 @@
|
||||
package de.superx.bianalysis; |
||||
|
||||
import java.util.ArrayList; |
||||
|
||||
|
||||
import org.springframework.data.annotation.Id; |
||||
import org.springframework.data.annotation.Transient; |
||||
import org.springframework.data.relational.core.mapping.Column; |
||||
import org.springframework.data.relational.core.mapping.Table; |
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty; |
||||
import com.fasterxml.jackson.core.JsonProcessingException; |
||||
import com.fasterxml.jackson.databind.ObjectMapper; |
||||
import com.fasterxml.jackson.databind.ObjectWriter; |
||||
import com.fasterxml.jackson.databind.json.JsonMapper; |
||||
|
||||
import de.superx.rest.model.Result; |
||||
import de.superx.rest.model.TreeNode; |
||||
|
||||
@Table(value ="metadata\".\"rw_report_definitions") |
||||
public class StoredReport { |
||||
|
||||
@Id |
||||
public int id; |
||||
|
||||
public String name; |
||||
|
||||
public String description; |
||||
|
||||
public String definition; |
||||
|
||||
@Column(value = "show_total_column") |
||||
@JsonProperty("show_total_column") |
||||
public int showTotalColumn; |
||||
|
||||
@Transient |
||||
public Boolean isReadOnly = Boolean.FALSE; |
||||
|
||||
@Transient |
||||
public ReportDefinition reportDefinition; |
||||
|
||||
@Transient |
||||
public Result exportedResult; |
||||
|
||||
@Transient |
||||
public ArrayList<TreeNode> hierarchy; |
||||
|
||||
public StoredReport(String name, ReportDefinition reportDefinition, Result exportedResult) { |
||||
super(); |
||||
this.name = name; |
||||
this.reportDefinition = reportDefinition; |
||||
this.exportedResult = exportedResult; |
||||
} |
||||
|
||||
public StoredReport() { |
||||
super(); |
||||
} |
||||
|
||||
public static void setReportDefinitionJson(StoredReport report) { |
||||
ObjectWriter ow = new ObjectMapper().writer(); |
||||
String reportDefinitionJson = null; |
||||
try { |
||||
reportDefinitionJson = ow.writeValueAsString(report.reportDefinition); |
||||
} catch (JsonProcessingException e) { |
||||
e.printStackTrace(); |
||||
} |
||||
report.definition = reportDefinitionJson; |
||||
} |
||||
|
||||
public static void setReportDefinitionFromJson(StoredReport report) { |
||||
ObjectMapper mapper = JsonMapper.builder().findAndAddModules().build(); |
||||
ReportDefinition reportDefinition = null; |
||||
try { |
||||
reportDefinition = mapper.readValue(report.definition, ReportDefinition.class); |
||||
} catch (Exception e) { |
||||
e.printStackTrace(); |
||||
} |
||||
report.reportDefinition = reportDefinition; |
||||
report.definition = ""; |
||||
} |
||||
|
||||
} |
||||
@ -0,0 +1,302 @@
@@ -0,0 +1,302 @@
|
||||
package de.superx.bianalysis.bin; |
||||
|
||||
import java.io.BufferedReader; |
||||
import java.io.File; |
||||
import java.io.FileReader; |
||||
import java.io.IOException; |
||||
import java.io.PrintWriter; |
||||
import java.nio.file.Path; |
||||
import java.util.List; |
||||
|
||||
import org.apache.commons.cli.CommandLine; |
||||
import org.apache.commons.cli.CommandLineParser; |
||||
import org.apache.commons.cli.GnuParser; |
||||
import org.apache.commons.cli.HelpFormatter; |
||||
import org.apache.commons.cli.Option; |
||||
import org.apache.commons.cli.OptionBuilder; |
||||
import org.apache.commons.cli.Options; |
||||
import org.apache.commons.cli.ParseException; |
||||
import org.apache.log4j.BasicConfigurator; |
||||
import org.apache.log4j.Level; |
||||
import org.apache.log4j.Logger; |
||||
|
||||
import com.fasterxml.jackson.core.util.DefaultIndenter; |
||||
import com.fasterxml.jackson.core.util.DefaultPrettyPrinter; |
||||
import com.fasterxml.jackson.databind.ObjectMapper; |
||||
|
||||
import de.superx.bianalysis.metadata.Identifier; |
||||
import de.superx.bianalysis.metadata.MetaImport; |
||||
import de.superx.bianalysis.metadata.MetaJson; |
||||
import de.superx.bianalysis.metadata.MetadataImporter; |
||||
import de.superx.bianalysis.metadata.models.json.MetaDimension; |
||||
import de.superx.bianalysis.metadata.models.json.MetaDimensionAttribute; |
||||
import de.superx.bianalysis.metadata.models.json.MetaFact; |
||||
import de.superx.bianalysis.metadata.models.json.MetaMeasure; |
||||
import de.superx.bianalysis.metadata.models.json.MetaObject; |
||||
import de.superx.bianalysis.metadata.models.yml.MetaYml; |
||||
import de.superx.servlet.SuperXManager; |
||||
import de.superx.util.PathAndFileUtils; |
||||
|
||||
public class BiAnalysisCLI { |
||||
|
||||
private static final String DEFAULT_RELEASE_BRANCH = "2025_12"; |
||||
|
||||
public static void main(String[] args) throws IOException { |
||||
Options options = createOptions(); |
||||
CommandLine parsedArgs = readArgs(args, options); |
||||
if(parsedArgs.hasOption("-i")) { |
||||
addMissingIdsInMetadataDir(parsedArgs); |
||||
} else if(parsedArgs.hasOption("-m")) { |
||||
convertJsonFilesToSql(); |
||||
} else if(parsedArgs.hasOption("-y")) { |
||||
generateYmlForJsonFile(parsedArgs); |
||||
} else if(parsedArgs.hasOption("-d")) { |
||||
generateWikiMarkdown(parsedArgs); |
||||
} else { |
||||
printHelp(options); |
||||
} |
||||
} |
||||
|
||||
private static void generateWikiMarkdown(CommandLine parsedArgs) throws IOException { |
||||
SuperXManager.setWEB_INFPfad(PathAndFileUtils.getWebinfPath()); |
||||
String facttable = parsedArgs.getOptionValue("d"); |
||||
String filePath = PathAndFileUtils.getReportGeneratorDir("hisinone"); |
||||
String ymlPath = PathAndFileUtils.getDbtTransformDirectory("hisinone") + File.separator + "docs_and_tests"; |
||||
MetadataImporter importer = new MetadataImporter(ymlPath); |
||||
Logger.getLogger(MetadataImporter.class).setLevel(Level.ERROR); |
||||
importer.deserializeMetadataFromJsonFiles(filePath); |
||||
|
||||
String docDirectory = PathAndFileUtils.getModulePath("biad"); |
||||
docDirectory = String.join(File.separator, docDirectory, "conf", "his1", "edustore_doc"); |
||||
|
||||
for(MetaFact fact : importer.getAllFactTables()) { |
||||
if("all".equals(facttable) || fact.getFacttable().equals(facttable)) { |
||||
PrintWriter writer = new PrintWriter(docDirectory + File.separator + fact.getFacttable() + "_mediawiki.txt", "UTF-8"); |
||||
writer.println("===Kennzahlen ==="); |
||||
writer.println(";Kennzahlen " + fact.getCaption()); |
||||
writer.println(); |
||||
writer.println("{| class=\"wikitable\"\n ! Kennzahl !! Beschreibung"); |
||||
writer.println("|-"); |
||||
for(int i = 0; i < fact.getMeasures().size(); i++) { |
||||
MetaMeasure m = fact.getMeasures().get(i); |
||||
writer.println("| "+m.getCaption()); |
||||
writer.println("| "+m.getDescription()); |
||||
if(i != fact.getMeasures().size() - 1) { |
||||
writer.println("|-"); |
||||
|
||||
} |
||||
} |
||||
writer.println("|-\n|}"); |
||||
writer.println(); |
||||
writer.println(); |
||||
|
||||
writer.println("===Dimension ==="); |
||||
writer.println(";"+fact.getCaption()); |
||||
writer.println(":"+fact.getDescription()); |
||||
writer.println(); |
||||
writer.println("'''Dimension und Dimensionsattribut'''"); |
||||
writer.println(); |
||||
for(MetaDimension d : fact.getDimensions()) { |
||||
|
||||
String dimCaption = d.getCaption(); |
||||
if((dimCaption == null || dimCaption.isBlank()) && d.getConformedDimension() != null) { |
||||
dimCaption = d.getConformedDimension().getCaption(); |
||||
} |
||||
|
||||
String dimDescription = d.getDescription(); |
||||
if((dimDescription == null || dimDescription.isBlank()) && d.getConformedDimension() != null) { |
||||
dimDescription = d.getConformedDimension().getDescription(); |
||||
} |
||||
|
||||
writer.println(";"+dimCaption); |
||||
writer.println(":"+dimDescription); |
||||
|
||||
List<MetaDimensionAttribute> attributes = d.getAttributes(); |
||||
if(attributes == null || attributes.size() == 0) { |
||||
attributes = d.getConformedDimension().getAttributes(); |
||||
} |
||||
|
||||
for(MetaDimensionAttribute a : attributes) { |
||||
String caption = a.getCaption() == null ? a.getConfDimAttrRef().getCaption() : a.getCaption(); |
||||
writer.println("*"+caption); |
||||
try { |
||||
String desc = a.getDescription() == null ? a.getConfDimAttrRef().getDescription() : a.getDescription(); |
||||
if(!desc.equals("null")) { |
||||
writer.println("*:"+desc); |
||||
} |
||||
} catch (Exception e) { |
||||
// TODO: handle exception
|
||||
} |
||||
} |
||||
writer.println(); |
||||
} |
||||
writer.close(); |
||||
} |
||||
} |
||||
|
||||
} |
||||
|
||||
private static void generateYmlForJsonFile(CommandLine parsedArgs) { |
||||
String file = parsedArgs.getOptionValue("y"); |
||||
if(file == null || !new File(file).exists()) { |
||||
throw new RuntimeException("File " + file +" is not valid."); |
||||
} |
||||
MetadataImporter importer = new MetadataImporter(); |
||||
Logger.getLogger(MetadataImporter.class).setLevel(Level.ERROR); |
||||
importer.setShouldReadYMLDoc(false); |
||||
importer.deserializeMetadataFromJsonFiles(file); |
||||
MetaImport metaImport = importer.getMetaImports().get(0); |
||||
MetaYml yml = importer.createYMLFileForMetaJson(metaImport); |
||||
System.out.println(MetadataImporter.writeYmlToString(yml)); |
||||
} |
||||
|
||||
private static void addMissingIdsInMetadataDir(CommandLine parsedArgs) { |
||||
String[] files = parsedArgs.getOptionValues("i"); |
||||
BasicConfigurator.configure(); // initializes console logging to stdout
|
||||
try { |
||||
MetadataImporter metaImporter = new MetadataImporter(); |
||||
metaImporter.setShouldReadYMLDoc(false); |
||||
metaImporter.deserializeMetadataFromJsonFiles(files); |
||||
for (MetaJson meta : metaImporter.getMetaJsons()) { |
||||
List<Identifier> allIds = meta.getIds(); |
||||
boolean isFileUpdateNecessary = false; |
||||
for (MetaObject obj : meta.getMetaObjects()) { |
||||
if(obj.getId() == null) { |
||||
Identifier id = Identifier.getNewIdentifierValue(allIds, obj.getNamespace()); |
||||
obj.setId(id); |
||||
allIds.add(id); |
||||
isFileUpdateNecessary = true; |
||||
} |
||||
if(obj.getDefaultRelease() == null) { |
||||
obj.setDefaultRelease(DEFAULT_RELEASE_BRANCH); |
||||
isFileUpdateNecessary = true; |
||||
} |
||||
} |
||||
if(isFileUpdateNecessary) { |
||||
writeMetaImportToFile(meta); |
||||
Logger.getRootLogger().info("Updated file " + meta.getFile().getPath()); |
||||
} |
||||
} |
||||
if (!metaImporter.errorMessages.isEmpty()) { |
||||
System.out.println(metaImporter.getPrintableErrorMessages()); |
||||
System.exit(1); |
||||
} |
||||
} catch (Exception e) { |
||||
e.printStackTrace(); |
||||
System.exit(1); |
||||
} |
||||
} |
||||
|
||||
private static void convertJsonFilesToSql() { |
||||
SuperXManager.setWEB_INFPfad(PathAndFileUtils.getWebinfPath()); |
||||
String filePath = PathAndFileUtils.getReportGeneratorDir("hisinone"); |
||||
String ymlPath = PathAndFileUtils.getDbtTransformDirectory("hisinone"); |
||||
|
||||
String out = |
||||
"DROP TABLE IF EXISTS metadata.facttable; " + |
||||
"DROP TABLE IF EXISTS metadata.measure; " + |
||||
"DROP TABLE IF EXISTS metadata.measure_filter; " + |
||||
"DROP TABLE IF EXISTS metadata.dimension; " + |
||||
"DROP TABLE IF EXISTS metadata.dimension_attribute; "; |
||||
|
||||
Path schemaSqlDir = Path.of("superx", "WEB-INF", "conf", "edustore", "db", "install", "schluesseltabellen"); |
||||
|
||||
out += readLinesWithNewline(new File(schemaSqlDir.toString() + File.separator + "biad_create_meta_tables.sql")); |
||||
out += readLinesWithNewline(new File(schemaSqlDir.toString() + File.separator + "biad_alter_meta_tables.sql")); |
||||
out += readLinesWithNewline(new File(schemaSqlDir.toString() + File.separator + "biad_metadaten_fuellen.sql")); |
||||
|
||||
Logger.getLogger(MetadataImporter.class).setLevel(Level.ERROR); |
||||
MetadataImporter importer = new MetadataImporter(ymlPath); |
||||
importer.deserializeMetadataFromJsonFiles(filePath); |
||||
out += String.join("\n", importer.getAllUpsertStrings(false)); |
||||
|
||||
if(!importer.errorMessages.isEmpty()) { |
||||
System.out.println(importer.getPrintableErrorMessages()); |
||||
System.exit(1); |
||||
} else { |
||||
System.out.println(out); |
||||
|
||||
} |
||||
|
||||
} |
||||
|
||||
private static CommandLine readArgs(String[] args, Options options) { |
||||
CommandLineParser parser = new GnuParser(); |
||||
try { |
||||
return parser.parse(options, args, false); |
||||
} catch (ParseException e) { |
||||
e.printStackTrace(); |
||||
System.exit(1); |
||||
} |
||||
return null; |
||||
} |
||||
|
||||
private static void printHelp(Options options) { |
||||
HelpFormatter help = new HelpFormatter(); |
||||
help.setOptionComparator(null); |
||||
help.setWidth(200); |
||||
help.printHelp("This tool streamlines common tasks during development for the BIAnalysis.", options); |
||||
} |
||||
|
||||
private static Options createOptions() { |
||||
Options options = new Options(); |
||||
|
||||
OptionBuilder.withDescription("convert metadata directory to sql"); |
||||
OptionBuilder.withLongOpt("convert-metadata"); |
||||
Option outMeta = OptionBuilder.create("m"); |
||||
|
||||
OptionBuilder.withDescription("generate yml documentation for json file"); |
||||
OptionBuilder.withLongOpt("generate-yml"); |
||||
OptionBuilder.withArgName("json-file"); |
||||
OptionBuilder.hasArg(true); |
||||
Option generateYml = OptionBuilder.create("y"); |
||||
|
||||
|
||||
OptionBuilder.withDescription("generate wiki documentation for measures and dimensions"); |
||||
OptionBuilder.withLongOpt("generate-doc"); |
||||
OptionBuilder.withArgName("facttable"); |
||||
OptionBuilder.hasArg(true); |
||||
Option generateDoc = OptionBuilder.create("d"); |
||||
|
||||
OptionBuilder.withLongOpt("add-ids"); |
||||
OptionBuilder.withDescription("add missing ids to json files"); |
||||
OptionBuilder.withArgName("directories"); |
||||
OptionBuilder.hasArgs(); |
||||
Option updateIds = OptionBuilder.create("i"); |
||||
|
||||
options.addOption(updateIds); |
||||
options.addOption(generateYml); |
||||
options.addOption(generateDoc); |
||||
options.addOption(outMeta); |
||||
options.addOption(new Option("h", "help", false, "get help")); |
||||
|
||||
return options; |
||||
} |
||||
|
||||
public static void writeMetaImportToFile(MetaJson meta) { |
||||
ObjectMapper mapper = new ObjectMapper(); |
||||
DefaultPrettyPrinter.Indenter indenter = new DefaultIndenter(" ", DefaultIndenter.SYS_LF); |
||||
DefaultPrettyPrinter printer = new DefaultPrettyPrinter(); |
||||
printer.indentObjectsWith(indenter); |
||||
printer.indentArraysWith(indenter); |
||||
try { |
||||
mapper.writer(printer).writeValue(meta.getFile(), meta); |
||||
} catch (Exception e) { |
||||
throw new RuntimeException(e); |
||||
} |
||||
} |
||||
|
||||
private static String readLinesWithNewline(File file) { |
||||
String result = ""; |
||||
try (BufferedReader br = new BufferedReader(new FileReader(file))) { |
||||
String line; |
||||
while ((line = br.readLine()) != null) { |
||||
result += line; |
||||
} |
||||
} catch (IOException e) { |
||||
e.printStackTrace(); |
||||
} |
||||
return result+"\n"; |
||||
} |
||||
|
||||
} |
||||
@ -0,0 +1,74 @@
@@ -0,0 +1,74 @@
|
||||
package de.superx.bianalysis.metadata; |
||||
|
||||
import java.util.Collections; |
||||
import java.util.List; |
||||
import java.util.stream.Collectors; |
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore; |
||||
import com.fasterxml.jackson.databind.annotation.JsonSerialize; |
||||
|
||||
@JsonSerialize(using = IdentifierSerializer.class) |
||||
public class Identifier { |
||||
|
||||
private static final String ID_SEPARATOR = ":"; |
||||
|
||||
@JsonIgnore |
||||
public Integer value; |
||||
@JsonIgnore |
||||
public String namespace; |
||||
|
||||
public String composedId; |
||||
|
||||
public Identifier(String composedId) { |
||||
this.composedId = composedId; |
||||
String[] result = composedId.split(ID_SEPARATOR); |
||||
this.namespace = result[0]; |
||||
this.value = Integer.valueOf(result[1]); |
||||
} |
||||
|
||||
public Identifier(Identifier id) { |
||||
this.value = id.value; |
||||
this.namespace = id.namespace; |
||||
this.composedId = id.composedId; |
||||
} |
||||
|
||||
@JsonIgnore |
||||
public static Identifier getNewIdentifierValue(List<Identifier> list, String namespace) { |
||||
List<Integer> values = list |
||||
.stream() |
||||
.filter(i->i.value!=null) |
||||
.map(i->i.value) |
||||
.collect(Collectors.toList()); |
||||
Integer value; |
||||
if(values.isEmpty()) { |
||||
value = Integer.valueOf(1); |
||||
} else { |
||||
value = Integer.valueOf(Collections.max(values).intValue() + 1); |
||||
} |
||||
return new Identifier(namespace + ID_SEPARATOR + value); |
||||
} |
||||
|
||||
@Override |
||||
@JsonIgnore |
||||
public boolean equals(Object obj) { |
||||
if(obj == null || !(obj instanceof Identifier)) { |
||||
return false; |
||||
} |
||||
Identifier id = (Identifier) obj; |
||||
return id.composedId.equals(this.composedId); |
||||
} |
||||
|
||||
@Override |
||||
@JsonIgnore |
||||
public int hashCode() { |
||||
return this.value.hashCode() + this.namespace.hashCode(); |
||||
} |
||||
|
||||
@Override |
||||
public String toString() { |
||||
return composedId; |
||||
} |
||||
|
||||
|
||||
|
||||
} |
||||
@ -0,0 +1,27 @@
@@ -0,0 +1,27 @@
|
||||
package de.superx.bianalysis.metadata; |
||||
|
||||
import java.io.IOException; |
||||
|
||||
import com.fasterxml.jackson.core.JsonGenerator; |
||||
import com.fasterxml.jackson.core.JsonProcessingException; |
||||
import com.fasterxml.jackson.databind.SerializerProvider; |
||||
import com.fasterxml.jackson.databind.ser.std.StdSerializer; |
||||
|
||||
public class IdentifierSerializer extends StdSerializer<Identifier>{ |
||||
|
||||
public IdentifierSerializer() { |
||||
this(null); |
||||
} |
||||
|
||||
public IdentifierSerializer(Class<Identifier> t) { |
||||
super(t); |
||||
} |
||||
|
||||
@Override |
||||
public void serialize( |
||||
Identifier id, JsonGenerator jgen, SerializerProvider provider) |
||||
throws IOException, JsonProcessingException { |
||||
jgen.writeRawValue('"'+id.composedId+'"'); |
||||
} |
||||
|
||||
} |
||||
@ -0,0 +1,148 @@
@@ -0,0 +1,148 @@
|
||||
package de.superx.bianalysis.metadata; |
||||
|
||||
import java.util.ArrayList; |
||||
import java.util.HashMap; |
||||
import java.util.List; |
||||
import java.util.Map; |
||||
import java.util.Optional; |
||||
|
||||
import org.apache.log4j.Logger; |
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore; |
||||
|
||||
import de.superx.bianalysis.FaultyMetadataException; |
||||
import de.superx.bianalysis.metadata.models.json.MetaDimension; |
||||
import de.superx.bianalysis.metadata.models.json.MetaDimensionAttribute; |
||||
import de.superx.bianalysis.metadata.models.json.MetaFact; |
||||
import de.superx.bianalysis.metadata.models.json.MetaMeasure; |
||||
import de.superx.bianalysis.metadata.models.json.MetaMeasureFilter; |
||||
import de.superx.bianalysis.metadata.models.json.MetaObject; |
||||
|
||||
public class MetaImport extends MetaJson { |
||||
|
||||
public List<MetaFact> facts; |
||||
|
||||
private static Logger log = Logger.getLogger(MetaImport.class); |
||||
|
||||
@JsonIgnore |
||||
private Map<String, MetaDimensionAttribute> keysForMeasureFilter = new HashMap<>(); |
||||
|
||||
@JsonIgnore |
||||
public List<MetaDimension> conformedDimensions; |
||||
|
||||
@JsonIgnore |
||||
public void setConformedDimensions(List<MetaDimension> conformedDimensions){ |
||||
this.conformedDimensions = conformedDimensions; |
||||
if(this.conformedDimensions != null) { |
||||
for (MetaDimension dim : this.conformedDimensions) { |
||||
for (MetaDimensionAttribute attr : dim.getAttributes()) { |
||||
attr.setDimension(dim); |
||||
keysForMeasureFilter.put(dim.getDimension()+"."+attr.getDimColumn(), attr); |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
@JsonIgnore |
||||
@Override |
||||
public void init() { |
||||
this.allMetaObj = new ArrayList<MetaObject>(); |
||||
for (MetaFact fact : this.facts) { |
||||
allMetaObj.add(fact); |
||||
for (MetaDimension dim : fact.getDimensions()) { |
||||
allMetaObj.add(dim); |
||||
if(dim.getRefTo() != null && !dim.getRefTo().isEmpty()) { |
||||
MetaDimension conformedDim = findByRef(dim); |
||||
dim.setConformedDimension(conformedDim); |
||||
} |
||||
for (MetaDimensionAttribute attr : dim.getAttributes()) { |
||||
allMetaObj.add(attr); |
||||
if(attr.getRefTo() != null && !attr.getRefTo().isEmpty()) { |
||||
attr.setConformedDimensionAttribute(findByRefAttr(dim.getConformedDimension().getDimension(), attr)); |
||||
} |
||||
keysForMeasureFilter.put(dim.getDimension()+"."+attr.getDimColumn(), attr); |
||||
} |
||||
} |
||||
if(fact.getMeasures() != null) { |
||||
for (MetaMeasure measure : fact.getMeasures()) { |
||||
allMetaObj.add(measure); |
||||
MetaMeasureFilter filter = measure.getFilter(); |
||||
if(filter != null) { |
||||
if(filter.getDimensionRef() != null && !filter.getDimensionRef().isBlank()) { |
||||
MetaDimensionAttribute attr = keysForMeasureFilter.get(filter.getDimensionRef()); |
||||
if(attr == null) { |
||||
throw new FaultyMetadataException("Could not resolve dimensionRef '" + filter.getDimensionRef() + |
||||
"' (" + file.getName() + " -> " + fact.getFacttable() + ")"); |
||||
} |
||||
filter.setAttribute(attr); |
||||
allMetaObj.add(filter); |
||||
} else if(filter.getFactColumnRef() != null && !filter.getFactColumnRef().isBlank()) { |
||||
allMetaObj.add(filter); |
||||
} |
||||
} |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
private MetaDimensionAttribute findByRefAttr(String dimensionTable, MetaDimensionAttribute attribute) { |
||||
String attributeColumn = attribute.getRefTo(); |
||||
MetaDimensionAttribute confAttr = null; |
||||
for (MetaDimension confDim : this.conformedDimensions) { |
||||
if(!confDim.getDimension().equals(dimensionTable)) { |
||||
continue; |
||||
} |
||||
for (MetaDimensionAttribute attr : confDim.getAttributes()) { |
||||
if(attr.getDimColumn().equals(attributeColumn)) { |
||||
confAttr = attr; |
||||
break; |
||||
} |
||||
} |
||||
} |
||||
if(confAttr == null) { |
||||
throw new FaultyMetadataException( |
||||
"Could not resolve attribute reference '" + attributeColumn + "' (" |
||||
+ file.getName() + " -> " |
||||
+ attribute.getDimension().getFact().getFacttable() + " -> " |
||||
+ attribute.getDimension().getRefTo() + ")" |
||||
); |
||||
} |
||||
return confAttr; |
||||
} |
||||
|
||||
|
||||
@JsonIgnore |
||||
private MetaDimension findByRef(MetaDimension dim) { |
||||
String refTo = dim.getRefTo(); |
||||
MetaDimension resolvedRefTo = null; |
||||
for (MetaDimension dimConf : this.conformedDimensions) { |
||||
if (dimConf.getDimension() == null) { |
||||
log.error("Missing dimension attribute for " + dimConf.getCaption()); |
||||
continue; |
||||
} |
||||
if (dimConf.getDimension().equals(refTo)) { |
||||
resolvedRefTo = dimConf; |
||||
break; |
||||
} |
||||
} |
||||
if (resolvedRefTo == null) { |
||||
throw new FaultyMetadataException("Could not resolve dimension reference '" + refTo + "' (" + file.getName() + " -> " + dim.getFact().getFacttable() + ")"); |
||||
} |
||||
return resolvedRefTo; |
||||
} |
||||
|
||||
@JsonIgnore |
||||
public List<MetaDimension> getDimensionsWithoutRefTo() { |
||||
List<MetaDimension> dims = new ArrayList<>(); |
||||
for (MetaFact fact : facts) { |
||||
for (MetaDimension dim : fact.getDimensions()) { |
||||
if(dim.getRefTo() == null) { |
||||
dims.add(dim); |
||||
} |
||||
} |
||||
} |
||||
return dims; |
||||
} |
||||
|
||||
|
||||
} |
||||
@ -0,0 +1,38 @@
@@ -0,0 +1,38 @@
|
||||
package de.superx.bianalysis.metadata; |
||||
|
||||
import java.util.ArrayList; |
||||
import java.util.List; |
||||
|
||||
import com.fasterxml.jackson.annotation.JsonProperty; |
||||
|
||||
import de.superx.bianalysis.metadata.models.json.MetaDimension; |
||||
import de.superx.bianalysis.metadata.models.json.MetaDimensionAttribute; |
||||
import de.superx.bianalysis.metadata.models.json.MetaMeasure; |
||||
import de.superx.bianalysis.metadata.models.json.MetaObject; |
||||
|
||||
public class MetaImportConformedDimensions extends MetaJson { |
||||
|
||||
@JsonProperty("conformed_dimensions") |
||||
public List<MetaDimension> conformedDimensions; |
||||
|
||||
@Override |
||||
public void init() { |
||||
this.allMetaObj = new ArrayList<MetaObject>(); |
||||
for (MetaDimension metaDimension : this.conformedDimensions) { |
||||
metaDimension.setConformed(true); |
||||
this.allMetaObj.add(metaDimension); |
||||
for (MetaDimensionAttribute attr : metaDimension.getAttributes()) { |
||||
attr.setDimension(metaDimension); |
||||
this.allMetaObj.add(attr); |
||||
} |
||||
} |
||||
|
||||
for (MetaObject metaObject : allMetaObj) { |
||||
metaObject.setNamespace(this.namespace); |
||||
if(metaObject.getId() != null) { |
||||
metaObject.getId().namespace = this.namespace; |
||||
} |
||||
} |
||||
} |
||||
|
||||
} |
||||
@ -0,0 +1,58 @@
@@ -0,0 +1,58 @@
|
||||
package de.superx.bianalysis.metadata; |
||||
|
||||
import java.io.File; |
||||
import java.util.List; |
||||
import java.util.stream.Collectors; |
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore; |
||||
|
||||
import de.superx.bianalysis.metadata.models.json.MetaObject; |
||||
|
||||
public abstract class MetaJson { |
||||
|
||||
public String namespace; |
||||
|
||||
@JsonIgnore |
||||
protected File file; |
||||
|
||||
@JsonIgnore |
||||
protected List<MetaObject> allMetaObj; |
||||
|
||||
@JsonIgnore |
||||
public abstract void init(); |
||||
|
||||
@JsonIgnore |
||||
public File getFile() { |
||||
return this.file; |
||||
} |
||||
|
||||
@JsonIgnore |
||||
public List<MetaObject> getMetaObjects() { |
||||
return this.allMetaObj; |
||||
} |
||||
|
||||
@JsonIgnore |
||||
public void setFile(File file) { |
||||
this.file = file; |
||||
} |
||||
|
||||
@JsonIgnore |
||||
public List<Identifier> getIds() { |
||||
return this.getMetaObjects() |
||||
.stream() |
||||
.map(o -> o.getId()) |
||||
.filter(i -> i != null && i.composedId != null) |
||||
.collect(Collectors.toList()); |
||||
} |
||||
|
||||
@JsonIgnore |
||||
public void setNamespaceToMetaObjects() { |
||||
for (MetaObject metaObject : allMetaObj) { |
||||
metaObject.setNamespace(this.namespace); |
||||
if(metaObject.getId() != null) { |
||||
metaObject.getId().namespace = this.namespace; |
||||
} |
||||
} |
||||
} |
||||
|
||||
} |
||||
@ -0,0 +1,593 @@
@@ -0,0 +1,593 @@
|
||||
package de.superx.bianalysis.metadata; |
||||
|
||||
import java.io.BufferedReader; |
||||
import java.io.File; |
||||
import java.io.FileReader; |
||||
import java.io.FilenameFilter; |
||||
import java.sql.Connection; |
||||
import java.sql.ResultSet; |
||||
import java.sql.Statement; |
||||
import java.util.ArrayList; |
||||
import java.util.HashMap; |
||||
import java.util.Iterator; |
||||
import java.util.List; |
||||
import java.util.Optional; |
||||
import java.util.stream.Collectors; |
||||
|
||||
import javax.sql.DataSource; |
||||
|
||||
import org.apache.commons.lang.exception.ExceptionUtils; |
||||
import org.apache.log4j.Logger; |
||||
import org.springframework.jdbc.core.JdbcTemplate; |
||||
|
||||
import com.fasterxml.jackson.core.util.DefaultIndenter; |
||||
import com.fasterxml.jackson.core.util.DefaultPrettyPrinter; |
||||
import com.fasterxml.jackson.databind.JsonMappingException; |
||||
import com.fasterxml.jackson.databind.ObjectMapper; |
||||
import com.fasterxml.jackson.databind.json.JsonMapper; |
||||
import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; |
||||
import com.fasterxml.jackson.dataformat.yaml.YAMLGenerator; |
||||
|
||||
import de.superx.bianalysis.StoredReport; |
||||
import de.superx.bianalysis.metadata.models.json.MetaDimension; |
||||
import de.superx.bianalysis.metadata.models.json.MetaDimensionAttribute; |
||||
import de.superx.bianalysis.metadata.models.json.MetaFact; |
||||
import de.superx.bianalysis.metadata.models.json.MetaObject; |
||||
import de.superx.bianalysis.metadata.models.yml.MetaYml; |
||||
import de.superx.bianalysis.metadata.models.yml.MetaYmlModel; |
||||
import de.superx.bianalysis.metadata.models.yml.MetaYmlModelColumns; |
||||
import de.superx.util.PathAndFileUtils; |
||||
|
||||
/** |
||||
* Provides functionality for updating the tables in the metadata schema. |
||||
* The tables are updated by reading the metadata information from various |
||||
* metaimport.json files and transforming that information into executable sql. |
||||
* |
||||
* The BIAnalysis Tool uses the tables to read information about the different |
||||
* meta objects and more importantly to figure out their relationships, e.g. what |
||||
* dimension is part of which facttable or which attribute belongs to which |
||||
* dimension. |
||||
* |
||||
* To learn more about the metadata concept for the BIAnalysis Tool see: |
||||
* doc\bi_analysis\report_wizard\metadaten.adoc |
||||
* |
||||
*/ |
||||
public final class MetadataImporter { |
||||
|
||||
/** |
||||
* Each file containing metadata information must have the following file suffix. |
||||
*/ |
||||
private static final String METAIMPORT_FILE_SUFFIX = "_metaimport.json"; |
||||
|
||||
protected static final String CONFORMED_DIMENSIONS_FILE_SUFFIX = "conformed_dimensions" + METAIMPORT_FILE_SUFFIX; |
||||
|
||||
/** |
||||
* Holds all Metaimport objects with which this instance was initalized. |
||||
* (One MetaImport object corresponds to exactly one deserialized json file) |
||||
*/ |
||||
private List<MetaJson> metaImports = new ArrayList<>(); |
||||
|
||||
public List<String> errorMessages = new ArrayList<>(); |
||||
|
||||
/** |
||||
* SQL String for deleting from all metadata tables except 'custom' releases. |
||||
*/ |
||||
public static final String TRUNCATE_METADATA_SQL = |
||||
"DELETE FROM metadata.facttable WHERE default_release != 'custom' or default_release is null; " + |
||||
"DELETE FROM metadata.measure WHERE default_release != 'custom' or default_release is null; " + |
||||
"DELETE FROM metadata.measure_filter WHERE default_release != 'custom' or default_release is null; " + |
||||
"DELETE FROM metadata.dimension WHERE default_release != 'custom' or default_release is null; " + |
||||
"DELETE FROM metadata.dimension_attribute WHERE default_release != 'custom' or default_release is null; "; |
||||
|
||||
private static Logger log = Logger.getLogger(MetadataImporter.class); |
||||
|
||||
private boolean shouldReadYMLDoc = true; |
||||
|
||||
private String ymlDir = ""; |
||||
|
||||
public MetadataImporter() {} |
||||
|
||||
public MetadataImporter(String ymlDir) {this.ymlDir = ymlDir;} |
||||
|
||||
/** |
||||
* Calling this method initalizes the MetadataImporter by deserializing all unique meta objects |
||||
* from the provided json files. Faulty json files are ignored. |
||||
* |
||||
* @param paths Path(s) to the metadata file(s). A path can point to a directory or a file. |
||||
* Multiple paths and/or directories can be provided. |
||||
*/ |
||||
public void deserializeMetadataFromJsonFiles(String... paths) { |
||||
|
||||
ObjectMapper mapper = JsonMapper.builder().findAndAddModules().build(); |
||||
List<MetaImportConformedDimensions> conformedDimension = new ArrayList<>(); |
||||
List<MetaDimension> conformedDims = new ArrayList<>(); |
||||
|
||||
for (String path : paths) { |
||||
List<File> metaFiles = readMetaImportFiles(path); |
||||
for (File file : metaFiles) { |
||||
MetaJson meta = null; |
||||
try{ |
||||
if(file.getName().endsWith(CONFORMED_DIMENSIONS_FILE_SUFFIX)) { |
||||
meta = mapper.readValue(file, MetaImportConformedDimensions.class); |
||||
conformedDimension.add((MetaImportConformedDimensions) meta); |
||||
} else { |
||||
meta = mapper.readValue(file, MetaImport.class); |
||||
conformedDims.addAll(((MetaImport) meta).getDimensionsWithoutRefTo()); |
||||
} |
||||
|
||||
} catch(JsonMappingException e) { |
||||
String message = "Could not deserialize metadata from file: " + file.getName() + "\n"; |
||||
message += e.getMessage(); |
||||
errorMessages.add(message); |
||||
} catch(Exception e) { |
||||
errorMessages.add(ExceptionUtils.getFullStackTrace(e)); |
||||
} |
||||
if(meta != null) { |
||||
log.info("Read metadata from file: " + file.getName()); |
||||
meta.setFile(file); |
||||
metaImports.add(meta); |
||||
} |
||||
} |
||||
} |
||||
|
||||
// gather all conformed dimensions
|
||||
List<MetaDimension> confDims = new ArrayList<>(); |
||||
confDims.addAll(conformedDims); |
||||
for (MetaImportConformedDimensions conf : conformedDimension) { |
||||
confDims.addAll(conf.conformedDimensions); |
||||
} |
||||
|
||||
// resolve conformed references ('ref_to' attributes)
|
||||
for (MetaJson metaJson : metaImports) { |
||||
if (conformedDimension.size() > 0 && metaJson instanceof MetaImport) { |
||||
((MetaImport) metaJson).setConformedDimensions(confDims); |
||||
} |
||||
try { |
||||
metaJson.init(); |
||||
metaJson.setNamespaceToMetaObjects(); |
||||
} catch (Exception e) { |
||||
errorMessages.add(ExceptionUtils.getFullStackTrace(e)); |
||||
} |
||||
} |
||||
|
||||
if(shouldReadYMLDoc) { |
||||
addDescriptionsFromYMLFiles(); |
||||
} |
||||
|
||||
} |
||||
|
||||
public List<String> readStoredReports() { |
||||
List<String> result = new ArrayList<>(); |
||||
try { |
||||
String dir = PathAndFileUtils.getStoredReportDir("hisinone"); |
||||
File[] files = new File(dir).listFiles(); |
||||
if(files == null) { |
||||
return result; |
||||
} |
||||
for (File file : files) { |
||||
try { |
||||
ObjectMapper mapper = JsonMapper.builder().findAndAddModules().build(); |
||||
StoredReport report = mapper.readValue(file, StoredReport.class); |
||||
UpsertStringBuilder builder = new UpsertStringBuilder() |
||||
.forTable("metadata", "rw_report_definitions") |
||||
.withIntCol("id", Integer.valueOf(report.id)) |
||||
.withStringCol("name", report.name) |
||||
.withStringCol("definition", report.definition) |
||||
.withIntCol("show_total_column", Integer.valueOf(report.showTotalColumn)); |
||||
result.add(builder.build(true)); |
||||
} catch (JsonMappingException e) { |
||||
String message = "Could not deserialize stored report from file: " + file.getName() + "\n"; |
||||
message += e.getMessage(); |
||||
errorMessages.add(message); |
||||
} |
||||
} |
||||
// After inserting the stored reports with a fixed id we need to re-sync the
|
||||
// id column of the rw_report_definitions table
|
||||
if(result.size() != 0) { |
||||
result.add("SELECT setval(pg_get_serial_sequence('metadata.rw_report_definitions', 'id')," |
||||
+ "(SELECT max(id) FROM metadata.rw_report_definitions ));"); |
||||
} |
||||
} catch(Exception e) { |
||||
errorMessages.add("Unable to read stored report:\n"); |
||||
errorMessages.add(ExceptionUtils.getFullStackTrace(e)); |
||||
} |
||||
return result; |
||||
} |
||||
|
||||
public void addDescriptionsFromYMLFiles() { |
||||
String dir = ymlDir; |
||||
if(ymlDir == null || ymlDir.isBlank()) { |
||||
dir = PathAndFileUtils.getDbtTransformDirectory("hisinone"); |
||||
} |
||||
HashMap<String, String> map = getMarkdownDefinitions(dir); |
||||
addYMLDescriptionsToMetaObjects(dir, map); |
||||
} |
||||
|
||||
public void addYMLDescriptionsToMetaObjects(String ymlDir, HashMap<String, String> mdDefs){ |
||||
log.info("Adding descriptions from yml files"); |
||||
HashMap<String, String> descriptions = createDescriptions(new File(ymlDir), mdDefs); |
||||
List<MetaObject> objs = getAllMetaObjectsWithConformed(); |
||||
for (MetaObject metaObj : objs ) { |
||||
String docIdentifier = metaObj.getDocIdentifier(); |
||||
if(docIdentifier == null || docIdentifier.isBlank()) { |
||||
continue; |
||||
} |
||||
// only use yml doc if json description does not exist
|
||||
if(metaObj.getDescription() == null || metaObj.getDescription().isBlank()) { |
||||
String desc = descriptions.get(docIdentifier); |
||||
if(desc == null) { |
||||
log.warn("Missing yml description for: " + docIdentifier); |
||||
} else { |
||||
metaObj.setDescription(desc); |
||||
if(desc.isBlank()) { |
||||
log.warn("Empty yml description for MetaObject: " + docIdentifier); |
||||
} |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
public MetaYml createYMLFileForMetaJson(MetaJson metaJson) { |
||||
|
||||
MetaYml newYml = new MetaYml(); |
||||
List<MetaYmlModel> newYmlModels = new ArrayList<>(); |
||||
newYml.setVersion(1); |
||||
newYml.setModels(newYmlModels); |
||||
|
||||
if(metaJson instanceof MetaImport) { |
||||
MetaImport metaimport = (MetaImport) metaJson; |
||||
for (MetaFact fact : metaimport.facts) { |
||||
MetaYmlModel factModel = new MetaYmlModel(fact.getFacttable(), " "); |
||||
newYmlModels.add(factModel); |
||||
List<MetaYmlModelColumns> factCols = new ArrayList<>(); |
||||
for(MetaDimension dim : fact.getDimensions()) { |
||||
if(dim.getRefTo() == null) { |
||||
factCols.add(new MetaYmlModelColumns(dim.getFactColumn(), " ", "not_null")); |
||||
MetaYmlModel dimModel = new MetaYmlModel(dim.getDimension(), " "); |
||||
newYmlModels.add(dimModel); |
||||
List<MetaYmlModelColumns> dimCols = new ArrayList<>(); |
||||
for(MetaDimensionAttribute attr : dim.getAttributes()) { |
||||
dimCols.add(new MetaYmlModelColumns(attr.getDimColumn(), " ", "not_null")); |
||||
} |
||||
dimModel.setColumns(dimCols); |
||||
} |
||||
} |
||||
factModel.setColumns(factCols); |
||||
} |
||||
} else { |
||||
MetaImportConformedDimensions metaimport = (MetaImportConformedDimensions) metaJson; |
||||
for(MetaDimension dim : metaimport.conformedDimensions) { |
||||
MetaYmlModel dimModel = new MetaYmlModel(dim.getDimension(), " "); |
||||
newYmlModels.add(dimModel); |
||||
List<MetaYmlModelColumns> dimCols = new ArrayList<>(); |
||||
for(MetaDimensionAttribute attr : dim.getAttributes()) { |
||||
dimCols.add(new MetaYmlModelColumns(attr.getDimColumn(), " ", "not_null")); |
||||
} |
||||
dimModel.setColumns(dimCols); |
||||
} |
||||
} |
||||
return newYml; |
||||
} |
||||
|
||||
private HashMap<String, String> createDescriptions(File startDir, HashMap<String, String> mdDefs){ |
||||
HashMap<String, String> result = new HashMap<>(); |
||||
for (MetaYml yml : getDescriptionYMLs(startDir)) { |
||||
for (MetaYmlModel model : yml.getModels()) { |
||||
String modelName = model.getName(); |
||||
String modelDesc = model.getDescription(); |
||||
result.put(modelName, getDescription(modelDesc, mdDefs)); |
||||
for (MetaYmlModelColumns column : model.getColumns()) { |
||||
String colName = column.getName(); |
||||
String colDesc = column.getDescription(); |
||||
result.put(modelName + "." + colName, getDescription(colDesc, mdDefs)); |
||||
} |
||||
} |
||||
} |
||||
return result; |
||||
} |
||||
|
||||
private static String getDescription(String desc, HashMap<String, String> mdDefs) { |
||||
if(desc == null) { |
||||
return ""; |
||||
} |
||||
if(desc.startsWith("{{")) { |
||||
String[] parts = desc.split("\""); |
||||
String docRef = parts[1]; |
||||
return mdDefs.get(docRef); |
||||
} |
||||
return desc; |
||||
} |
||||
|
||||
private List<MetaYml> getDescriptionYMLs(File startDir){ |
||||
List<MetaYml> ymls = new ArrayList<>(); |
||||
List<String> files = getFiles(startDir, "", ".yml"); |
||||
ObjectMapper mapperYml = new ObjectMapper(new YAMLFactory()); |
||||
for (String f : files) { |
||||
File file = new File(startDir + File.separator + f); |
||||
MetaYml doc = null; |
||||
try { |
||||
doc = mapperYml.readValue(file, MetaYml.class); |
||||
} catch (Exception e) { |
||||
String message = "Could not read documentation from file: " + file.getName() + "\n"; |
||||
errorMessages.add(message); |
||||
errorMessages.add(ExceptionUtils.getFullStackTrace(e)); |
||||
} |
||||
if(doc != null) { |
||||
log.info("Read documentation from file: " + file.getName()); |
||||
ymls.add(doc); |
||||
} |
||||
} |
||||
return ymls; |
||||
} |
||||
|
||||
|
||||
/** |
||||
* Gathers all metadata json files. |
||||
* |
||||
* @param path A path to a metadata json file or a directory containing metadata json files. |
||||
* @return A list of files matching the metadata json suffix. |
||||
*/ |
||||
private static List<File> readMetaImportFiles(String path) { |
||||
File metaimportPath = new File(PathAndFileUtils.getDbtJsonPath(path)); |
||||
List<File> metaimportFiles = new ArrayList<>(); |
||||
if (metaimportPath.isDirectory()) { |
||||
metaimportPath.list(new FilenameFilter() { |
||||
@Override |
||||
public boolean accept(File dir, String name) { |
||||
if (name.endsWith(METAIMPORT_FILE_SUFFIX)) { |
||||
File file = new File(dir.getAbsolutePath() + File.separator + name); |
||||
metaimportFiles.add(file); |
||||
return true; |
||||
} |
||||
return false; |
||||
} |
||||
}); |
||||
} else { |
||||
metaimportFiles.add(metaimportPath); |
||||
} |
||||
return metaimportFiles; |
||||
} |
||||
|
||||
private static List<String> getFiles(File startDir, String subDir, String extension) { |
||||
List<String> filtered = new ArrayList<String>(); |
||||
for (File file : startDir.listFiles()) { |
||||
String name = file.getName(); |
||||
if(file.isDirectory()) { |
||||
filtered.addAll(getFiles(file, subDir + File.separator + name, extension)); |
||||
} |
||||
String filename = name.strip().toLowerCase(); |
||||
if(filename.endsWith(extension)) { |
||||
filtered.add(subDir + File.separator + name); |
||||
} |
||||
} |
||||
return filtered; |
||||
} |
||||
|
||||
/* |
||||
|
||||
/** |
||||
* Generates the sql upsert strings for all unique, deserialized MetaObjects. |
||||
* |
||||
* @param hasOnConflictConstruct If set to true generates upsert strings with the postgres-specific "ON CONFLICT" clause. |
||||
* @return All the generated upserts from the metadata files. |
||||
*/ |
||||
public List<String> getAllUpsertStrings(boolean hasOnConflictConstruct) { |
||||
|
||||
List<String> upsertStmts = new ArrayList<>(); |
||||
List<Identifier> ids = new ArrayList<>(); |
||||
|
||||
for (MetaJson meta : metaImports) { |
||||
for(MetaObject obj: meta.getMetaObjects()) { |
||||
Identifier id = obj.getId(); |
||||
if(id == null) { |
||||
String message = String.format("Missing ID for Element '%s' in file: %s.", obj.getCaption(), meta.getFile().getAbsolutePath()); |
||||
errorMessages.add(message); |
||||
continue; |
||||
} |
||||
if(ids.contains(id)) { |
||||
String message = String.format("Duplicate ID '%s'. Ignoring Element '%s'.", obj.getCaption(), obj.getId().composedId); |
||||
errorMessages.add(message); |
||||
continue; |
||||
} |
||||
ids.add(obj.getId()); |
||||
String stmt = obj.getUpsertBuilder().build(hasOnConflictConstruct); |
||||
upsertStmts.add(stmt); |
||||
} |
||||
} |
||||
return upsertStmts; |
||||
} |
||||
|
||||
public void updateMetadataForH2Database(DataSource dataSource) throws Exception { |
||||
String metaFilesDir = String.join(File.separator, new String[] {"test", "resources", "db", "fixtures", "reportwizard", "metadata"}); |
||||
deserializeMetadataFromJsonFiles(metaFilesDir); |
||||
JdbcTemplate jt = new JdbcTemplate(dataSource); |
||||
String upserts = String.join("\n", getAllUpsertStrings(false).toString()); |
||||
jt.execute(upserts); |
||||
} |
||||
|
||||
/** |
||||
* Updates tables in the metadata schema. |
||||
* |
||||
* @param metaPath Location of the metadata file or directory. |
||||
* @param dataSource The datasource on which the sql is executed. |
||||
* @throws Exception |
||||
*/ |
||||
public void updateMetadataSchema(String project, DataSource dataSource) throws Exception { |
||||
String metaFilesDir = PathAndFileUtils.getReportGeneratorDir(project); |
||||
deserializeMetadataFromJsonFiles(metaFilesDir); |
||||
|
||||
try (Connection con = dataSource.getConnection()) { |
||||
try (Statement st = con.createStatement()) { |
||||
log.info("Update Metadata for BIAnalysis."); |
||||
st.execute(TRUNCATE_METADATA_SQL); |
||||
List<String> upserts = getAllUpsertStrings(true); |
||||
upserts.addAll(readStoredReports()); |
||||
for (String sql : upserts) { |
||||
log.info(sql); |
||||
try (Statement stUpsert = con.createStatement()) { |
||||
stUpsert.execute(sql); |
||||
} catch (Exception e) { |
||||
throw e; |
||||
} |
||||
} |
||||
} |
||||
|
||||
|
||||
// execute sql in "attributes_sql" in metadata files to build the attributes
|
||||
// dynamically
|
||||
for (MetaJson i : this.metaImports) { |
||||
for (MetaObject obj : i.getMetaObjects()) { |
||||
if (!(obj instanceof MetaDimension)) continue; |
||||
MetaDimension dim = (MetaDimension) obj; |
||||
if (dim.getAttributesSql() == null) continue; |
||||
|
||||
String sqlDone = ""; |
||||
String sql = "select param_val from unload_params where param_id = '" + dim.getAttributesSql() + "';"; |
||||
try (Statement stAttr = con.createStatement(); ResultSet rs = stAttr.executeQuery(sql)) { |
||||
if(rs.next()) { |
||||
sqlDone = rs.getString("param_val"); |
||||
} |
||||
} |
||||
|
||||
try (Statement stAttr = con.createStatement(); ResultSet rs = stAttr.executeQuery(sqlDone)) { |
||||
int numAttributes = 0; |
||||
while (rs.next()) { |
||||
|
||||
MetaDimensionAttribute attribute = new MetaDimensionAttribute(); |
||||
attribute.setDimension(dim); |
||||
attribute.setCaption(rs.getString("caption")); |
||||
attribute.setDimColumn(rs.getString("dim_column")); |
||||
|
||||
// create a new 'on the fly' identifier for the new metadata
|
||||
// attribute
|
||||
Identifier id = Identifier.getNewIdentifierValue(i.getIds(), dim.getNamespace()); |
||||
Integer val = Integer.valueOf(id.value.intValue() + numAttributes); |
||||
attribute.setId(new Identifier(dim.getNamespace() + ":" +val)); |
||||
numAttributes++; |
||||
|
||||
String stmt = attribute.getUpsertBuilder().build(true); |
||||
try (Statement stUpsert = con.createStatement()) { |
||||
stUpsert.execute(stmt); |
||||
} |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
} |
||||
} |
||||
|
||||
public static String writeYmlToString(MetaYml yml) { |
||||
YAMLFactory yf = new YAMLFactory() |
||||
.enable(YAMLGenerator.Feature.MINIMIZE_QUOTES) |
||||
.disable(YAMLGenerator.Feature.WRITE_DOC_START_MARKER); |
||||
ObjectMapper mapper = new ObjectMapper(yf); |
||||
DefaultPrettyPrinter.Indenter indenter = new DefaultIndenter(" ", DefaultIndenter.SYS_LF); |
||||
DefaultPrettyPrinter printer = new DefaultPrettyPrinter(); |
||||
printer.indentObjectsWith(indenter); |
||||
printer.indentArraysWith(indenter); |
||||
try { |
||||
//mapper.writer(printer).writeValue(file, yml);
|
||||
return mapper.writer(printer).writeValueAsString(yml); |
||||
} catch (Exception e) { |
||||
throw new RuntimeException(e); |
||||
} |
||||
} |
||||
|
||||
public String getPrintableErrorMessages() { |
||||
String output = ""; |
||||
if(!errorMessages.isEmpty()) { |
||||
output += "The following errors occured:\n"; |
||||
for (String message : errorMessages) { |
||||
output += message + "\n"; |
||||
} |
||||
} |
||||
return output; |
||||
} |
||||
|
||||
public HashMap<String, String> getMarkdownDefinitions(String ymlDir) { |
||||
List<String> files = getFiles(new File(ymlDir), "", ".md"); |
||||
HashMap<String, String> map = new HashMap<>(); |
||||
for (String file : files) { |
||||
try { |
||||
try (BufferedReader br = new BufferedReader(new FileReader(ymlDir + File.separator + file))) { |
||||
String line; |
||||
String key = null; |
||||
boolean readHeading = false; |
||||
while ((line = br.readLine()) != null) { |
||||
if (line.startsWith("{% docs ")) { |
||||
key = line.split(" ")[2]; |
||||
map.put(key, ""); |
||||
} else if(key != null && line.startsWith("# ")) { |
||||
readHeading = true; |
||||
} else { |
||||
if(readHeading && !line.isBlank()) { |
||||
map.put(key, line); |
||||
key = null; |
||||
readHeading = false; |
||||
} |
||||
} |
||||
} |
||||
} |
||||
} catch (Exception e) { |
||||
String message = "ERROR getting markdown definitions from file: " + file + "\n"; |
||||
errorMessages.add(message); |
||||
errorMessages.add(ExceptionUtils.getFullStackTrace(e)); |
||||
} |
||||
} |
||||
return map; |
||||
} |
||||
|
||||
public Optional<MetaImport> getMetaImport(String fileName) { |
||||
return metaImports.stream() |
||||
.filter(json -> (json instanceof MetaImport) && json.file.getName().equals(fileName)) |
||||
.map(json -> (MetaImport) json) |
||||
.findFirst(); |
||||
} |
||||
|
||||
public Optional<MetaJson> getMetaJson(String fileName) { |
||||
return metaImports.stream() |
||||
.filter(json -> json.file.getName().equals(fileName)) |
||||
.findFirst(); |
||||
} |
||||
|
||||
public List<MetaImport> getMetaImports() { |
||||
return metaImports.stream() |
||||
.filter(json -> (json instanceof MetaImport)) |
||||
.map(json -> (MetaImport) json) |
||||
.collect(Collectors.toList()); |
||||
} |
||||
|
||||
public List<MetaJson> getMetaJsons() { |
||||
return metaImports.stream().collect(Collectors.toList()); |
||||
} |
||||
|
||||
public List<MetaObject> getAllMetaObjects(){ |
||||
return metaImports.stream() |
||||
.filter(json -> (json instanceof MetaImport)) |
||||
.map(meta -> ((MetaImport) meta).getMetaObjects()) |
||||
.flatMap(List::stream) |
||||
.collect(Collectors.toList()); |
||||
} |
||||
|
||||
public List<MetaObject> getAllMetaObjectsWithConformed(){ |
||||
return metaImports.stream() |
||||
.map(meta -> meta.getMetaObjects()) |
||||
.flatMap(List::stream) |
||||
.collect(Collectors.toList()); |
||||
} |
||||
|
||||
public List<MetaFact> getAllFactTables(){ |
||||
return metaImports.stream() |
||||
.filter(json -> (json instanceof MetaImport)) |
||||
.map(meta -> ((MetaImport) meta).facts) |
||||
.flatMap(List::stream) |
||||
.collect(Collectors.toList()); |
||||
} |
||||
|
||||
public void setShouldReadYMLDoc(boolean shouldReadYMLDoc) { |
||||
this.shouldReadYMLDoc = shouldReadYMLDoc; |
||||
} |
||||
|
||||
|
||||
} |
||||
@ -0,0 +1,99 @@
@@ -0,0 +1,99 @@
|
||||
package de.superx.bianalysis.metadata; |
||||
|
||||
import java.util.ArrayList; |
||||
import java.util.List; |
||||
import java.util.StringJoiner; |
||||
|
||||
public class UpsertStringBuilder { |
||||
|
||||
private StringJoiner values; |
||||
private StringJoiner columns; |
||||
private StringJoiner onConflict; |
||||
private String schema; |
||||
private String tablename; |
||||
|
||||
private List<UpsertStringBuilder> builders = new ArrayList<>(); |
||||
|
||||
public UpsertStringBuilder() { |
||||
values = new StringJoiner(", "); |
||||
columns = new StringJoiner(", "); |
||||
onConflict = new StringJoiner(", "); |
||||
} |
||||
|
||||
public void addUpsertStringBuilder(UpsertStringBuilder builder) { |
||||
this.builders.add(builder); |
||||
} |
||||
|
||||
public UpsertStringBuilder forTable(String schema, String tablename) { |
||||
this.tablename = tablename; |
||||
this.schema = schema; |
||||
return this; |
||||
} |
||||
|
||||
public UpsertStringBuilder withStringCol(String colName, String value) { |
||||
appendToSelection(colName); |
||||
if(value == null) { |
||||
values.add("null"); |
||||
} else { |
||||
values.add("'"+value+"'"); |
||||
} |
||||
return this; |
||||
} |
||||
|
||||
public UpsertStringBuilder withStringCol(String colName, Object value) { |
||||
if(value != null) { |
||||
return this.withStringCol(colName, value.toString()); |
||||
} |
||||
return this.withStringCol(colName, "unknown"); |
||||
} |
||||
|
||||
public UpsertStringBuilder withStringCol(String colName, String value, String defaultVal) { |
||||
if(value != null) { |
||||
return this.withStringCol(colName, value); |
||||
} |
||||
return this.withStringCol(colName, defaultVal); |
||||
} |
||||
|
||||
public UpsertStringBuilder withStringCol(String colName, Object value, Object defaultVal) { |
||||
if(value != null) { |
||||
return this.withStringCol(colName, value); |
||||
} |
||||
return this.withStringCol(colName, defaultVal); |
||||
} |
||||
|
||||
public UpsertStringBuilder withIntCol(String colName, Integer value) { |
||||
appendToSelection(colName); |
||||
values.add(String.valueOf(value)); |
||||
return this; |
||||
} |
||||
|
||||
public UpsertStringBuilder withIdCol(String colName, Identifier id) { |
||||
if(id != null) { |
||||
return this.withStringCol(colName, id.composedId); |
||||
} |
||||
return this.withStringCol(colName, null); |
||||
} |
||||
|
||||
private void appendToSelection(String colName) { |
||||
onConflict.add(String.format("%s = EXCLUDED.%s", colName, colName)); |
||||
columns.add(colName); |
||||
} |
||||
|
||||
public String build(boolean hasOnConflictConstruct) { |
||||
String result = "INSERT INTO %s.%s(%s) VALUES(%s)"; |
||||
result = String.format(result, this.schema, this.tablename, this.columns, this.values); |
||||
if(hasOnConflictConstruct) { |
||||
// TODO: log message for on id conflict
|
||||
//result += " ON CONFLICT(id) DO UPDATE SET " + this.onConflict;
|
||||
result += " ON CONFLICT(id) DO NOTHING"; |
||||
} |
||||
result += ";\n"; |
||||
if(this.builders.size() > 0) { |
||||
for (UpsertStringBuilder upsertStringBuilder : builders) { |
||||
result += upsertStringBuilder.build(hasOnConflictConstruct); |
||||
} |
||||
} |
||||
return result; |
||||
} |
||||
|
||||
} |
||||
@ -0,0 +1,269 @@
@@ -0,0 +1,269 @@
|
||||
package de.superx.bianalysis.metadata.models.json; |
||||
|
||||
import java.util.ArrayList; |
||||
import com.fasterxml.jackson.annotation.JsonInclude.Include; |
||||
|
||||
import de.superx.bianalysis.metadata.Identifier; |
||||
import de.superx.bianalysis.metadata.UpsertStringBuilder; |
||||
|
||||
import java.util.List; |
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore; |
||||
import com.fasterxml.jackson.annotation.JsonInclude; |
||||
import com.fasterxml.jackson.annotation.JsonProperty; |
||||
import com.fasterxml.jackson.annotation.JsonPropertyOrder; |
||||
|
||||
@JsonInclude(Include.NON_DEFAULT) |
||||
@JsonPropertyOrder({ "id", "default_release", "ref_to", "caption", "dimension", "fact_column", "alias", "bridge", "attributes"}) |
||||
public class MetaDimension extends MetaObject { |
||||
|
||||
@JsonProperty("ref_to") |
||||
private String refTo; |
||||
private String dimension; |
||||
@JsonProperty("fact_column") |
||||
private String factColumn; |
||||
private String alias; |
||||
private String view; |
||||
@JsonProperty("id_column") |
||||
private String idColumn; |
||||
private List<MetaDimensionAttribute> attributes; |
||||
|
||||
@JsonProperty("is_hierarchy") |
||||
private boolean isHierarchy; |
||||
@JsonProperty("is_historical") |
||||
private boolean isHistorical; |
||||
|
||||
@JsonProperty("attributes_sql") |
||||
private String attributesSql; |
||||
|
||||
@JsonIgnore |
||||
private MetaFact fact; |
||||
|
||||
// true if dimension is from the conformed_dimensions_metaimport.json
|
||||
@JsonIgnore |
||||
private boolean isConformed = false; |
||||
|
||||
// isConformed must be false
|
||||
// the correpsonding dimension from the conformed_dimensions_metaimport.json
|
||||
// referenced by 'ref_to'
|
||||
@JsonIgnore |
||||
private MetaDimension conformedDimension; |
||||
|
||||
public MetaDimension() { |
||||
super("dimension"); |
||||
} |
||||
|
||||
public void setConformedDimension(MetaDimension dimension) { |
||||
this.conformedDimension = dimension; |
||||
} |
||||
|
||||
@Override |
||||
public UpsertStringBuilder getUpsertBuilder() { |
||||
Identifier factId = (isConformed) ? null : this.fact.id; |
||||
UpsertStringBuilder builder = new UpsertStringBuilder(); |
||||
if(conformedDimension == null) { |
||||
builder = super.getUpsert() |
||||
.withIdCol("facttable_id", factId) |
||||
.withStringCol("joincolumn", this.factColumn) |
||||
.withStringCol("alias", this.alias) |
||||
.withStringCol("is_hierarchy", String.valueOf(this.isHierarchy)) |
||||
.withStringCol("is_historical", String.valueOf(this.isHistorical)) |
||||
//.withStringCol("attributes_sql", this.attributesSql)
|
||||
.withStringCol("tablename", this.dimension) |
||||
.withStringCol("id_column", this.idColumn); |
||||
} else { |
||||
builder = new UpsertStringBuilder() |
||||
.forTable("metadata", this.sourceTable) |
||||
.withStringCol("namespace", this.namespace) |
||||
.withIdCol("id", this.id) |
||||
.withIntCol("default_release", Integer.valueOf(1)); |
||||
builder = builder.withIdCol("facttable_id", factId); |
||||
|
||||
if(this.idColumn != null && !this.idColumn.isBlank()) { |
||||
builder = builder.withStringCol("id_column", idColumn); |
||||
} else { |
||||
builder = builder.withStringCol("id_column", this.conformedDimension.getIdColumn()); |
||||
} |
||||
|
||||
if(this.caption != null && !this.caption.isBlank()) { |
||||
builder = builder.withStringCol("caption", caption); |
||||
} else { |
||||
builder = builder.withStringCol("caption", this.conformedDimension.getCaption()); |
||||
} |
||||
|
||||
if(this.factColumn != null && !this.factColumn.isBlank()) { |
||||
builder = builder.withStringCol("joincolumn", this.factColumn); |
||||
} else { |
||||
builder = builder.withStringCol("joincolumn", this.conformedDimension.getFactColumn()); |
||||
} |
||||
|
||||
if(this.alias != null && !this.factColumn.isBlank()) { |
||||
builder = builder.withStringCol("alias", this.alias); |
||||
} else { |
||||
builder = builder.withStringCol("alias", this.conformedDimension.getAlias()); |
||||
} |
||||
|
||||
if(this.isHierarchy) { |
||||
builder = builder.withStringCol("is_hierarchy", String.valueOf(isHierarchy)); |
||||
} else { |
||||
builder = builder.withStringCol("is_hierarchy", String.valueOf(conformedDimension.isHierarchy)); |
||||
} |
||||
|
||||
if(this.isHistorical) { |
||||
builder = builder.withStringCol("is_historical", String.valueOf(isHistorical)); |
||||
} else { |
||||
builder = builder.withStringCol("is_historical", String.valueOf(conformedDimension.isHistorical)); |
||||
} |
||||
|
||||
if(this.conformedDimension.getDimension() != null && !this.conformedDimension.getDimension().isBlank()) { |
||||
|
||||
if(view != null && !view.isBlank()) { |
||||
builder = builder.withStringCol("tablename", this.view); |
||||
} else { |
||||
builder = builder.withStringCol("tablename", this.conformedDimension.getDimension()); |
||||
} |
||||
|
||||
} else { |
||||
builder = builder.withStringCol("tablename", this.dimension); |
||||
} |
||||
builder = builder.withIdCol("conformed", this.conformedDimension.id); |
||||
} |
||||
|
||||
if(this.description != null && !this.description.isBlank()) { |
||||
builder = builder.withStringCol("description", this.description); |
||||
} else { |
||||
if(conformedDimension != null) { |
||||
builder = builder.withStringCol("description", conformedDimension.getDescription()); |
||||
} else { |
||||
builder = builder.withStringCol("description", ""); |
||||
} |
||||
} |
||||
return builder; |
||||
} |
||||
|
||||
public String getRefTo() { |
||||
return refTo; |
||||
} |
||||
|
||||
public void setRefTo(String refTo) { |
||||
this.refTo = refTo; |
||||
} |
||||
|
||||
public String getDimension() { |
||||
return dimension; |
||||
} |
||||
|
||||
public void setDimension(String dimension) { |
||||
this.dimension = dimension; |
||||
} |
||||
|
||||
public String getAlias() { |
||||
return alias; |
||||
} |
||||
|
||||
public void setAlias(String alias) { |
||||
this.alias = alias; |
||||
} |
||||
|
||||
public List<MetaDimensionAttribute> getAttributes() { |
||||
if(this.attributes == null) { |
||||
return new ArrayList<>(); |
||||
} |
||||
return attributes; |
||||
} |
||||
|
||||
public void setAttributes(List<MetaDimensionAttribute> attributes) { |
||||
for (MetaDimensionAttribute metaDimensionAttribute : attributes) { |
||||
metaDimensionAttribute.setDimension(this); |
||||
} |
||||
this.attributes = attributes; |
||||
} |
||||
|
||||
public MetaFact getFact() { |
||||
return fact; |
||||
} |
||||
|
||||
public void setFact(MetaFact fact) { |
||||
this.fact = fact; |
||||
} |
||||
|
||||
@JsonIgnore |
||||
public boolean isConformed() { |
||||
return isConformed; |
||||
} |
||||
|
||||
@JsonIgnore |
||||
public void setConformed(boolean isConformed) { |
||||
this.isConformed = isConformed; |
||||
} |
||||
|
||||
public MetaDimension getConformedDimension() { |
||||
return conformedDimension; |
||||
} |
||||
|
||||
public String getFactColumn() { |
||||
return factColumn; |
||||
} |
||||
|
||||
public void setFactColumn(String factColumn) { |
||||
this.factColumn = factColumn; |
||||
} |
||||
|
||||
public void addAttribute(MetaDimensionAttribute metaDimensionAttribute) { |
||||
if(this.attributes == null) { |
||||
this.attributes = new ArrayList<>(); |
||||
} |
||||
this.attributes.add(metaDimensionAttribute); |
||||
} |
||||
|
||||
@Override |
||||
@JsonIgnore |
||||
public String getDocIdentifier() { |
||||
if(this.conformedDimension != null) { |
||||
return conformedDimension.getDocIdentifier(); |
||||
} |
||||
return this.dimension; |
||||
} |
||||
|
||||
public boolean isHierarchy() { |
||||
return isHierarchy; |
||||
} |
||||
|
||||
public void setHierarchy(boolean isHierarchy) { |
||||
this.isHierarchy = isHierarchy; |
||||
} |
||||
|
||||
public boolean isHistorical() { |
||||
return isHistorical; |
||||
} |
||||
|
||||
public void setHistorical(boolean isHistorical) { |
||||
this.isHistorical = isHistorical; |
||||
} |
||||
|
||||
public String getView() { |
||||
return view; |
||||
} |
||||
|
||||
public void setView(String view) { |
||||
this.view = view; |
||||
} |
||||
|
||||
public String getIdColumn() { |
||||
return idColumn; |
||||
} |
||||
|
||||
public void setIdColumn(String idColumn) { |
||||
this.idColumn = idColumn; |
||||
} |
||||
|
||||
public String getAttributesSql() { |
||||
return attributesSql; |
||||
} |
||||
|
||||
public void setAttributesSql(String attributesSql) { |
||||
this.attributesSql = attributesSql; |
||||
} |
||||
|
||||
|
||||
} |
||||
@ -0,0 +1,179 @@
@@ -0,0 +1,179 @@
|
||||
package de.superx.bianalysis.metadata.models.json; |
||||
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore; |
||||
import com.fasterxml.jackson.annotation.JsonInclude; |
||||
import com.fasterxml.jackson.annotation.JsonProperty; |
||||
import com.fasterxml.jackson.annotation.JsonPropertyOrder; |
||||
import com.fasterxml.jackson.annotation.JsonInclude.Include; |
||||
|
||||
import de.superx.bianalysis.metadata.UpsertStringBuilder; |
||||
|
||||
@JsonInclude(Include.NON_DEFAULT) |
||||
@JsonPropertyOrder({ "id", "default_release", "ref", "caption", "dim_column"}) |
||||
public class MetaDimensionAttribute extends MetaObject { |
||||
|
||||
@JsonProperty("dim_column") |
||||
private String dimColumn; |
||||
|
||||
@JsonProperty("sort_order_column") |
||||
private String sortOrderColumn; |
||||
|
||||
@JsonProperty("hierarchical_filter") |
||||
private boolean hierarchicalFilter; |
||||
|
||||
@JsonIgnore |
||||
private MetaDimension dimension; |
||||
|
||||
@JsonIgnore |
||||
private MetaDimensionAttribute confDimAttrRef; |
||||
|
||||
@JsonProperty("ref_to") |
||||
private String refTo; |
||||
|
||||
@JsonProperty("filter_selection") |
||||
private String filterSelection; |
||||
|
||||
public MetaDimensionAttribute() { |
||||
super("dimension_attribute"); |
||||
} |
||||
|
||||
public MetaDimensionAttribute(String attrColumn) { |
||||
super("dimension_attribute"); |
||||
this.refTo = attrColumn; |
||||
//this.refTo = dimensionTable + "." + attrColumn;
|
||||
} |
||||
|
||||
public void setConformedDimensionAttribute(MetaDimensionAttribute attribute) { |
||||
this.confDimAttrRef = attribute; |
||||
} |
||||
|
||||
@Override |
||||
public UpsertStringBuilder getUpsertBuilder() { |
||||
UpsertStringBuilder builder = new UpsertStringBuilder(); |
||||
if(confDimAttrRef == null) { |
||||
builder = super.getUpsert() |
||||
.withIdCol("dimension_id", this.dimension.id) |
||||
.withStringCol("columnname", this.dimColumn) |
||||
.withStringCol("sort_order_column", this.sortOrderColumn) |
||||
.withStringCol("filter_selection", this.filterSelection); |
||||
} else { |
||||
builder = new UpsertStringBuilder() |
||||
.forTable("metadata", this.sourceTable) |
||||
.withStringCol("namespace", this.namespace) |
||||
.withIdCol("id", this.id) |
||||
.withIntCol("default_release", Integer.valueOf(1)); |
||||
|
||||
if(getCaption() != null && !getCaption().isBlank()) { |
||||
builder = builder.withStringCol("caption", caption); |
||||
} else { |
||||
builder = builder.withStringCol("caption", confDimAttrRef.getCaption()); |
||||
} |
||||
|
||||
if(getDimColumn() != null && !getDimColumn().isBlank()) { |
||||
builder = builder.withStringCol("columnname", this.dimColumn); |
||||
} else { |
||||
builder = builder.withStringCol("columnname", confDimAttrRef.getDimColumn()); |
||||
} |
||||
|
||||
if(getFilterSelection() != null && !getFilterSelection().isBlank()) { |
||||
builder = builder.withStringCol("filter_selection", this.filterSelection); |
||||
} else { |
||||
builder = builder.withStringCol("filter_selection", confDimAttrRef.getFilterSelection()); |
||||
} |
||||
|
||||
if(getSortOrderColumn() != null && !getSortOrderColumn().isBlank()) { |
||||
builder = builder.withStringCol("sort_order_column", this.sortOrderColumn); |
||||
} else { |
||||
builder = builder.withStringCol("sort_order_column", confDimAttrRef.getSortOrderColumn()); |
||||
} |
||||
|
||||
builder = builder.withIdCol("dimension_id", this.dimension.id); |
||||
builder = builder.withIdCol("conformed", this.confDimAttrRef.id); |
||||
} |
||||
|
||||
if(confDimAttrRef != null && confDimAttrRef.hierarchicalFilter) { |
||||
builder = builder.withStringCol("hierarchical_filter", String.valueOf(confDimAttrRef.hierarchicalFilter)); |
||||
} else { |
||||
builder = builder.withStringCol("hierarchical_filter", String.valueOf(hierarchicalFilter)); |
||||
} |
||||
|
||||
if(this.description != null && !this.description.isBlank()) { |
||||
builder = builder.withStringCol("description", this.description); |
||||
} else { |
||||
if(confDimAttrRef != null) { |
||||
builder = builder.withStringCol("description", confDimAttrRef.getDescription()); |
||||
} else { |
||||
builder = builder.withStringCol("description", ""); |
||||
} |
||||
} |
||||
|
||||
return builder; |
||||
} |
||||
|
||||
public String getDimColumn() { |
||||
return dimColumn; |
||||
} |
||||
|
||||
public void setDimColumn(String dimColumn) { |
||||
this.dimColumn = dimColumn; |
||||
} |
||||
|
||||
public String getSortOrderColumn() { |
||||
return sortOrderColumn; |
||||
} |
||||
|
||||
public void setSortOrderColumn(String sortOrderColumn) { |
||||
this.sortOrderColumn = sortOrderColumn; |
||||
} |
||||
|
||||
public String getFilterSelection() { |
||||
return filterSelection; |
||||
} |
||||
|
||||
public void setFilterSelection(String filterSelection) { |
||||
this.filterSelection = filterSelection; |
||||
} |
||||
|
||||
public MetaDimension getDimension() { |
||||
return dimension; |
||||
} |
||||
|
||||
public void setDimension(MetaDimension dimension) { |
||||
this.dimension = dimension; |
||||
} |
||||
|
||||
public MetaDimensionAttribute getConfDimAttrRef() { |
||||
return confDimAttrRef; |
||||
} |
||||
|
||||
public void setConfDimAttrRef(MetaDimensionAttribute confDimAttrRef) { |
||||
this.confDimAttrRef = confDimAttrRef; |
||||
} |
||||
|
||||
public String getRefTo() { |
||||
return refTo; |
||||
} |
||||
|
||||
public void setRefTo(String refTo) { |
||||
this.refTo = refTo; |
||||
} |
||||
|
||||
@JsonIgnore |
||||
@Override |
||||
public String getDocIdentifier() { |
||||
if(refTo != null) { |
||||
return this.confDimAttrRef.getDocIdentifier(); |
||||
} |
||||
return this.dimension.getDocIdentifier()+"."+this.dimColumn; |
||||
} |
||||
|
||||
public boolean isHierarchicalFilter() { |
||||
return hierarchicalFilter; |
||||
} |
||||
|
||||
public void setHierarchicalFilter(boolean hierarchicalFilter) { |
||||
this.hierarchicalFilter = hierarchicalFilter; |
||||
} |
||||
|
||||
} |
||||
@ -0,0 +1,77 @@
@@ -0,0 +1,77 @@
|
||||
package de.superx.bianalysis.metadata.models.json; |
||||
|
||||
import java.util.List; |
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore; |
||||
import com.fasterxml.jackson.annotation.JsonPropertyOrder; |
||||
|
||||
import de.superx.bianalysis.metadata.Identifier; |
||||
import de.superx.bianalysis.metadata.UpsertStringBuilder; |
||||
|
||||
@JsonPropertyOrder({ "id", "default_release", "caption", "sachgebiettid", "facttable", "conformed_dimensions", "dimensions", "measures" }) |
||||
public class MetaFact extends MetaObject { |
||||
|
||||
private Integer sachgebiettid; |
||||
private String facttable; |
||||
private List<MetaDimension> dimensions; |
||||
private List<MetaMeasure> measures; |
||||
|
||||
public MetaFact() { |
||||
super("facttable"); |
||||
} |
||||
|
||||
@Override |
||||
public UpsertStringBuilder getUpsertBuilder() { |
||||
UpsertStringBuilder builder = super.getUpsert() |
||||
.withIntCol("sachgebiettid", this.sachgebiettid) |
||||
.withStringCol("tablename", this.facttable) |
||||
.withStringCol("description", super.getDescription()); |
||||
|
||||
return builder; |
||||
} |
||||
|
||||
public Integer getSachgebiettid() { |
||||
return sachgebiettid; |
||||
} |
||||
|
||||
public void setSachgebiettid(Integer sachgebiettid) { |
||||
this.sachgebiettid = sachgebiettid; |
||||
} |
||||
|
||||
public String getFacttable() { |
||||
return facttable; |
||||
} |
||||
|
||||
public void setFacttable(String facttable) { |
||||
this.facttable = facttable; |
||||
} |
||||
|
||||
public List<MetaDimension> getDimensions() { |
||||
return dimensions; |
||||
} |
||||
|
||||
public void setDimensions(List<MetaDimension> dimensions) { |
||||
for (MetaDimension metaDimension : dimensions) { |
||||
metaDimension.setFact(this); |
||||
} |
||||
this.dimensions = dimensions; |
||||
} |
||||
|
||||
public List<MetaMeasure> getMeasures() { |
||||
return measures; |
||||
} |
||||
|
||||
public void setMeasures(List<MetaMeasure> measures) { |
||||
for (MetaMeasure metaMeasure : measures) { |
||||
metaMeasure.setFact(this); |
||||
} |
||||
this.measures = measures; |
||||
} |
||||
|
||||
@JsonIgnore |
||||
@Override |
||||
public String getDocIdentifier() { |
||||
return this.facttable; |
||||
} |
||||
|
||||
} |
||||
@ -0,0 +1,82 @@
@@ -0,0 +1,82 @@
|
||||
package de.superx.bianalysis.metadata.models.json; |
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore; |
||||
import com.fasterxml.jackson.annotation.JsonPropertyOrder; |
||||
|
||||
import de.superx.bianalysis.metadata.UpsertStringBuilder; |
||||
import de.superx.rest.model.ColumnType; |
||||
|
||||
@JsonPropertyOrder({ "id", "default_release"} ) |
||||
public class MetaMeasure extends MetaObject { |
||||
|
||||
private String factcolumn; |
||||
private String aggregation; |
||||
private ColumnType type; |
||||
private MetaMeasureFilter filter; |
||||
|
||||
@JsonIgnore |
||||
private MetaFact fact; |
||||
|
||||
public MetaMeasure() { |
||||
super("measure"); |
||||
} |
||||
|
||||
@Override |
||||
public UpsertStringBuilder getUpsertBuilder() { |
||||
return super.getUpsert() |
||||
.withIdCol("facttable_id", (this.fact != null) ? this.fact.id : null) |
||||
.withIdCol("measure_filter_id", (this.filter != null) ? this.filter.id : null) |
||||
.withStringCol("columnname", this.factcolumn) |
||||
.withStringCol("aggregation_type", this.aggregation) |
||||
.withStringCol("description", this.description) |
||||
.withStringCol("measure_type", this.type, ColumnType.IntegerColumn); |
||||
} |
||||
|
||||
public String getFactcolumn() { |
||||
return factcolumn; |
||||
} |
||||
|
||||
public void setFactcolumn(String factcolumn) { |
||||
this.factcolumn = factcolumn; |
||||
} |
||||
|
||||
public String getAggregation() { |
||||
return aggregation; |
||||
} |
||||
|
||||
public void setAggregation(String aggregation) { |
||||
this.aggregation = aggregation; |
||||
} |
||||
|
||||
public ColumnType getType() { |
||||
return type; |
||||
} |
||||
|
||||
public void setType(ColumnType type) { |
||||
this.type = type; |
||||
} |
||||
|
||||
public MetaMeasureFilter getFilter() { |
||||
return filter; |
||||
} |
||||
|
||||
public void setFilter(MetaMeasureFilter filter) { |
||||
this.filter = filter; |
||||
} |
||||
|
||||
public MetaFact getFact() { |
||||
return fact; |
||||
} |
||||
|
||||
public void setFact(MetaFact fact) { |
||||
this.fact = fact; |
||||
} |
||||
|
||||
@JsonIgnore |
||||
@Override |
||||
public String getDocIdentifier() { |
||||
return this.factcolumn; |
||||
} |
||||
|
||||
} |
||||
|
||||
@ -0,0 +1,106 @@
@@ -0,0 +1,106 @@
|
||||
package de.superx.bianalysis.metadata.models.json; |
||||
|
||||
import java.util.List; |
||||
|
||||
import com.fasterxml.jackson.annotation.JsonFormat; |
||||
import com.fasterxml.jackson.annotation.JsonIgnore; |
||||
import com.fasterxml.jackson.annotation.JsonPropertyOrder; |
||||
|
||||
import de.superx.bianalysis.metadata.UpsertStringBuilder; |
||||
|
||||
@JsonPropertyOrder({ "id", "default_release"} ) |
||||
public class MetaMeasureFilter extends MetaObject { |
||||
|
||||
private String dimensionRef; |
||||
|
||||
private String factColumnRef; |
||||
|
||||
@JsonFormat(with = JsonFormat.Feature.ACCEPT_SINGLE_VALUE_AS_ARRAY) |
||||
private List<String> included; |
||||
|
||||
@JsonFormat(with = JsonFormat.Feature.ACCEPT_SINGLE_VALUE_AS_ARRAY) |
||||
private List<String> excluded; |
||||
|
||||
@JsonIgnore |
||||
private MetaDimensionAttribute attribute; |
||||
|
||||
public MetaMeasureFilter() { |
||||
super("measure_filter"); |
||||
} |
||||
|
||||
@Override |
||||
public UpsertStringBuilder getUpsertBuilder() { |
||||
|
||||
UpsertStringBuilder builder = super.getUpsert(); |
||||
builder.withStringCol("included_values", concatValues(included)); |
||||
builder.withStringCol("excluded_values", concatValues(excluded)); |
||||
|
||||
if(this.dimensionRef != null) { |
||||
builder.withIdCol("dimension_attribute_id", this.attribute.id); |
||||
} else if(this.factColumnRef != null){ |
||||
builder.withStringCol("fact_column_filter", this.factColumnRef); |
||||
} |
||||
|
||||
return builder; |
||||
} |
||||
|
||||
private static String concatValues(List<String> values) { |
||||
if (values == null || values.isEmpty()) { |
||||
return null; |
||||
} |
||||
String result = ""; |
||||
int size = values.size(); |
||||
for (int i = 0; i < size - 1; i++) { |
||||
result += "''" + values.get(i) + "'', "; |
||||
} |
||||
result += "''" + values.get(size - 1) + "''"; |
||||
return result; |
||||
} |
||||
|
||||
public String getDimensionRef() { |
||||
return dimensionRef; |
||||
} |
||||
|
||||
public void setDimensionRef(String dimensionRef) { |
||||
this.dimensionRef = dimensionRef; |
||||
} |
||||
|
||||
public List<String> getIncluded() { |
||||
return included; |
||||
} |
||||
|
||||
public void setIncluded(List<String> included) { |
||||
this.included = included; |
||||
} |
||||
|
||||
public List<String> getExcluded() { |
||||
return excluded; |
||||
} |
||||
|
||||
public void setExcluded(List<String> excluded) { |
||||
this.excluded = excluded; |
||||
} |
||||
|
||||
public MetaDimensionAttribute getAttribute() { |
||||
return attribute; |
||||
} |
||||
|
||||
public void setAttribute(MetaDimensionAttribute attribute) { |
||||
this.attribute = attribute; |
||||
} |
||||
|
||||
@JsonIgnore |
||||
@Override |
||||
public String getDocIdentifier() { |
||||
return ""; |
||||
} |
||||
|
||||
public String getFactColumnRef() { |
||||
return factColumnRef; |
||||
} |
||||
|
||||
public void setFactColumnRef(String factColumnRef) { |
||||
this.factColumnRef = factColumnRef; |
||||
} |
||||
|
||||
} |
||||
@ -0,0 +1,112 @@
@@ -0,0 +1,112 @@
|
||||
package de.superx.bianalysis.metadata.models.json; |
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore; |
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties; |
||||
import com.fasterxml.jackson.annotation.JsonInclude; |
||||
import com.fasterxml.jackson.annotation.JsonInclude.Include; |
||||
import com.fasterxml.jackson.annotation.JsonProperty; |
||||
|
||||
import de.superx.bianalysis.metadata.Identifier; |
||||
import de.superx.bianalysis.metadata.UpsertStringBuilder; |
||||
|
||||
@JsonIgnoreProperties(ignoreUnknown = true) |
||||
@JsonInclude(Include.NON_NULL) |
||||
public abstract class MetaObject { |
||||
|
||||
protected Identifier id; |
||||
protected String caption; |
||||
protected String description; |
||||
|
||||
@JsonProperty("default_release") |
||||
protected String defaultRelease; |
||||
|
||||
@JsonIgnore |
||||
protected String sourceTable; |
||||
|
||||
@JsonIgnore |
||||
protected String namespace; |
||||
|
||||
protected MetaObject(String sourceTable) { |
||||
this.sourceTable = sourceTable; |
||||
} |
||||
|
||||
/** |
||||
* Returns the documentation identifier for a specific meta object. |
||||
* The Identifier is used in a *.md file and is referenced in the yml |
||||
* file like the following: '{{ doc("<identifier>") }}'. |
||||
*/ |
||||
@JsonIgnore |
||||
public abstract String getDocIdentifier(); |
||||
|
||||
@JsonIgnore |
||||
public abstract UpsertStringBuilder getUpsertBuilder(); |
||||
|
||||
@JsonIgnore |
||||
protected UpsertStringBuilder getUpsert() { |
||||
return new UpsertStringBuilder() |
||||
.forTable("metadata", this.sourceTable) |
||||
.withStringCol("namespace", this.namespace) |
||||
.withIdCol("id", this.id) |
||||
.withStringCol("default_release", this.defaultRelease) |
||||
.withStringCol("caption", this.caption); |
||||
} |
||||
|
||||
@Override |
||||
public boolean equals(Object obj) { |
||||
if(!(obj instanceof MetaObject)) { |
||||
return false; |
||||
} else if(((MetaObject)obj).id == null) { |
||||
return false; |
||||
} |
||||
return this.id.equals(((MetaObject)obj).id); |
||||
} |
||||
|
||||
public Identifier getId() { |
||||
return id; |
||||
} |
||||
|
||||
public void setId(Identifier id) { |
||||
this.id = id; |
||||
} |
||||
|
||||
public String getCaption() { |
||||
return caption; |
||||
} |
||||
|
||||
public void setCaption(String caption) { |
||||
this.caption = caption; |
||||
} |
||||
|
||||
public String getSourceTable() { |
||||
return sourceTable; |
||||
} |
||||
|
||||
public void setSourceTable(String sourceTable) { |
||||
this.sourceTable = sourceTable; |
||||
} |
||||
|
||||
public String getNamespace() { |
||||
return namespace; |
||||
} |
||||
|
||||
public void setNamespace(String namespace) { |
||||
this.namespace = namespace; |
||||
} |
||||
|
||||
public String getDescription() { |
||||
return description; |
||||
} |
||||
|
||||
public void setDescription(String description) { |
||||
this.description = description; |
||||
} |
||||
|
||||
public String getDefaultRelease() { |
||||
return defaultRelease; |
||||
} |
||||
|
||||
public void setDefaultRelease(String defaultRelease) { |
||||
this.defaultRelease = defaultRelease; |
||||
} |
||||
|
||||
} |
||||
@ -0,0 +1,32 @@
@@ -0,0 +1,32 @@
|
||||
package de.superx.bianalysis.metadata.models.yml; |
||||
|
||||
import java.util.ArrayList; |
||||
import java.util.List; |
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties; |
||||
|
||||
@JsonIgnoreProperties(ignoreUnknown = true) |
||||
public class MetaYml { |
||||
|
||||
private int version; |
||||
private List<MetaYmlModel> models; |
||||
|
||||
public int getVersion() { |
||||
return version; |
||||
} |
||||
|
||||
public void setVersion(int version) { |
||||
this.version = version; |
||||
} |
||||
|
||||
public List<MetaYmlModel> getModels() { |
||||
if(this.models == null) { |
||||
return new ArrayList<MetaYmlModel>(); |
||||
} |
||||
return models; |
||||
} |
||||
|
||||
public void setModels(List<MetaYmlModel> models) { |
||||
this.models = models; |
||||
} |
||||
} |
||||
@ -0,0 +1,49 @@
@@ -0,0 +1,49 @@
|
||||
package de.superx.bianalysis.metadata.models.yml; |
||||
|
||||
import java.util.ArrayList; |
||||
import java.util.List; |
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties; |
||||
|
||||
@JsonIgnoreProperties(ignoreUnknown = true) |
||||
public class MetaYmlModel { |
||||
|
||||
private String name; |
||||
private String description; |
||||
private List<MetaYmlModelColumns> columns; |
||||
|
||||
public MetaYmlModel() { } |
||||
|
||||
public MetaYmlModel(String name, String description) { |
||||
super(); |
||||
this.name = name; |
||||
this.description = description; |
||||
} |
||||
|
||||
public String getName() { |
||||
return name; |
||||
} |
||||
|
||||
public void setName(String name) { |
||||
this.name = name; |
||||
} |
||||
|
||||
public String getDescription() { |
||||
return description; |
||||
} |
||||
|
||||
public void setDescription(String description) { |
||||
this.description = description; |
||||
} |
||||
|
||||
public List<MetaYmlModelColumns> getColumns() { |
||||
if(this.columns == null) { |
||||
return new ArrayList<MetaYmlModelColumns>(); |
||||
} |
||||
return columns; |
||||
} |
||||
|
||||
public void setColumns(List<MetaYmlModelColumns> columns) { |
||||
this.columns = columns; |
||||
} |
||||
} |
||||
@ -0,0 +1,52 @@
@@ -0,0 +1,52 @@
|
||||
package de.superx.bianalysis.metadata.models.yml; |
||||
|
||||
import java.util.ArrayList; |
||||
import java.util.List; |
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties; |
||||
|
||||
@JsonIgnoreProperties(ignoreUnknown = true) |
||||
public class MetaYmlModelColumns { |
||||
|
||||
private String name; |
||||
private String description; |
||||
private List<Object> tests; |
||||
|
||||
public MetaYmlModelColumns() {} |
||||
|
||||
public MetaYmlModelColumns(String name, String description, String test) { |
||||
super(); |
||||
this.name = name; |
||||
this.description = description; |
||||
this.tests = new ArrayList<Object>(); |
||||
this.tests.add(test); |
||||
} |
||||
|
||||
public MetaYmlModelColumns(String name, String description) { |
||||
super(); |
||||
this.name = name; |
||||
this.description = description; |
||||
} |
||||
|
||||
public String getName() { |
||||
return name; |
||||
} |
||||
public void setName(String name) { |
||||
this.name = name; |
||||
} |
||||
public String getDescription() { |
||||
return description; |
||||
} |
||||
public void setDescription(String description) { |
||||
this.description = description; |
||||
} |
||||
|
||||
public List<Object> getTests() { |
||||
return tests; |
||||
} |
||||
|
||||
public void setTests(List<Object> tests) { |
||||
this.tests = tests; |
||||
} |
||||
|
||||
} |
||||
@ -0,0 +1,94 @@
@@ -0,0 +1,94 @@
|
||||
package de.superx.bianalysis.models; |
||||
|
||||
import java.util.List; |
||||
|
||||
import org.springframework.data.annotation.Transient; |
||||
import de.superx.bianalysis.metadata.Identifier; |
||||
import de.superx.bianalysis.repository.dto.DimensionDto; |
||||
|
||||
public class Dimension { |
||||
|
||||
private DimensionDto dimensionDto; |
||||
|
||||
@Transient |
||||
public String conformedCaption; |
||||
|
||||
@Transient |
||||
public String conformedDescription; |
||||
|
||||
@Transient |
||||
public List<DimensionAttribute> dimensionAttributes; |
||||
|
||||
public Dimension(DimensionDto dimDto) { |
||||
this.setDimensionDto(dimDto); |
||||
} |
||||
|
||||
public void setDimensionAttributes(List<DimensionAttribute> lda) { |
||||
for (DimensionAttribute dimensionAttribute : lda) { |
||||
dimensionAttribute.setDimensionColumnAlias(dimensionAttribute.getColumnname() + "_" + getId().value); |
||||
} |
||||
this.dimensionAttributes = lda; |
||||
} |
||||
|
||||
public boolean isHidden() { |
||||
if(this.dimensionDto.isHidden == null) { |
||||
return false; |
||||
} |
||||
return this.dimensionDto.isHidden.booleanValue(); |
||||
} |
||||
|
||||
public DimensionDto getDimensionDto() { |
||||
return dimensionDto; |
||||
} |
||||
|
||||
public void setDimensionDto(DimensionDto dimensionDto) { |
||||
this.dimensionDto = dimensionDto; |
||||
} |
||||
|
||||
public Identifier getId() { |
||||
return this.dimensionDto.id; |
||||
} |
||||
|
||||
public String getCaption() { |
||||
return this.dimensionDto.caption; |
||||
} |
||||
|
||||
public Identifier getFactTableId() { |
||||
return this.dimensionDto.factTableId; |
||||
} |
||||
|
||||
public String getTablename() { |
||||
return this.dimensionDto.tablename; |
||||
} |
||||
|
||||
public String getJoincolumn() { |
||||
return this.dimensionDto.joincolumn; |
||||
} |
||||
|
||||
public String getAlias() { |
||||
return this.dimensionDto.alias; |
||||
} |
||||
|
||||
public String getDescription() { |
||||
return this.dimensionDto.description; |
||||
} |
||||
|
||||
public boolean isHierarchy() { |
||||
if(this.dimensionDto.isHierarchy == null) return false; |
||||
return this.dimensionDto.isHierarchy.booleanValue(); |
||||
} |
||||
|
||||
public boolean isHistorical() { |
||||
if(this.dimensionDto.isHistorical == null) return false; |
||||
return this.dimensionDto.isHistorical.booleanValue(); |
||||
} |
||||
|
||||
public String getConformed() { |
||||
return this.dimensionDto.conformed; |
||||
} |
||||
|
||||
public String getIdColumn() { |
||||
return this.dimensionDto.idColumn; |
||||
} |
||||
|
||||
} |
||||
@ -0,0 +1,276 @@
@@ -0,0 +1,276 @@
|
||||
package de.superx.bianalysis.models; |
||||
|
||||
import java.util.Comparator; |
||||
import java.util.List; |
||||
import java.util.Objects; |
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore; |
||||
import com.fasterxml.jackson.annotation.JsonProperty; |
||||
|
||||
import de.superx.bianalysis.metadata.Identifier; |
||||
import de.superx.bianalysis.repository.dto.AttributeDto; |
||||
|
||||
public class DimensionAttribute { |
||||
|
||||
public static final List<String> SPECIAL_VALUES = List.of("n. v.", "k.A.", "k.a.", "unbekannt", "Unbekannt", "ungültig", "Ungültig"); |
||||
|
||||
public static final Comparator<String> SPECIAL_VALUE_COMPARATOR = (a1, a2) -> { |
||||
if (a1.equals(a2)) { |
||||
return 0; |
||||
} |
||||
if (DimensionAttribute.SPECIAL_VALUES.contains(a1)) { |
||||
return -1; |
||||
} |
||||
if (DimensionAttribute.SPECIAL_VALUES.contains(a2)) { |
||||
return 1; |
||||
} |
||||
return a1.compareTo(a2); |
||||
}; |
||||
|
||||
public static String specialValueListForSql() { |
||||
String result = String.join("', '", SPECIAL_VALUES); |
||||
return "'" + result + "'"; |
||||
} |
||||
|
||||
|
||||
private String conformedCaption; |
||||
|
||||
private String conformedDescription; |
||||
|
||||
@JsonIgnore |
||||
private AttributeDto attributeTable; |
||||
|
||||
@JsonIgnore |
||||
private String dimCaption; |
||||
|
||||
@JsonIgnore |
||||
private String dimId; |
||||
|
||||
@JsonIgnore |
||||
private String dimConformedId; |
||||
|
||||
@JsonIgnore |
||||
private String tablename; |
||||
|
||||
@JsonIgnore |
||||
private String joincolumn; |
||||
|
||||
private boolean isHierarchy; |
||||
|
||||
@JsonIgnore |
||||
private boolean isHistorical; |
||||
|
||||
@JsonIgnore |
||||
private String dimensionTableAlias; |
||||
|
||||
@JsonIgnore |
||||
private String dimensionColumnAlias; |
||||
|
||||
private List<String> dimensionAttributeValues; |
||||
|
||||
@JsonIgnore |
||||
private String dimIdJoinColumn; |
||||
|
||||
public DimensionAttribute() { |
||||
super(); |
||||
} |
||||
|
||||
public DimensionAttribute(AttributeDto attributeTable) { |
||||
this.attributeTable = attributeTable; |
||||
} |
||||
|
||||
public void setDimension(Dimension dim) { |
||||
this.dimCaption = dim.getCaption(); |
||||
this.dimId = dim.getId().composedId; |
||||
this.tablename = dim.getTablename(); |
||||
this.joincolumn = dim.getJoincolumn(); |
||||
this.isHierarchy = dim.isHierarchy(); |
||||
this.isHistorical = dim.isHistorical(); |
||||
this.dimIdJoinColumn = dim.getIdColumn(); |
||||
if(dim.getAlias() != null){ |
||||
this.dimensionTableAlias = dim.getAlias(); |
||||
} else { |
||||
this.dimensionTableAlias = generateDimensionTableAlias(joincolumn); |
||||
} |
||||
this.dimensionColumnAlias = getColumnname() + "_" + getId().value; |
||||
} |
||||
|
||||
public static String generateDimensionTableAlias(String joincolumn) { |
||||
if (joincolumn != null) { |
||||
return joincolumn.replaceFirst("_id$", ""); |
||||
} |
||||
return null; |
||||
} |
||||
|
||||
@Override |
||||
public int hashCode() { |
||||
return Objects.hash(getId()); |
||||
} |
||||
|
||||
@Override |
||||
public boolean equals(Object obj) { |
||||
if (this == obj) return true; |
||||
if (obj == null) return false; |
||||
if (getClass() != obj.getClass()) return false; |
||||
DimensionAttribute other = (DimensionAttribute) obj; |
||||
return Objects.equals(this.getId(), other.getId()); |
||||
} |
||||
|
||||
public String getCaption() { |
||||
return this.attributeTable.caption; |
||||
} |
||||
|
||||
public boolean isHidden() { |
||||
if(this.attributeTable.isHidden == null) { |
||||
return false; |
||||
} |
||||
return this.attributeTable.isHidden.booleanValue(); |
||||
} |
||||
|
||||
public String getColumnname() { |
||||
return this.attributeTable.columnname; |
||||
} |
||||
|
||||
public String getSortOrderColumn() { |
||||
return this.attributeTable.sortOrderColumn; |
||||
} |
||||
|
||||
public String getFilterSelection() { |
||||
return this.attributeTable.filterSelection; |
||||
} |
||||
|
||||
public String getDimId() { |
||||
return dimId; |
||||
} |
||||
|
||||
public void setDimId(String dimId) { |
||||
this.dimId = dimId; |
||||
} |
||||
|
||||
public String getDimConformedId() { |
||||
return dimConformedId; |
||||
} |
||||
|
||||
public void setDimConformedId(String dimConformedId) { |
||||
this.dimConformedId = dimConformedId; |
||||
} |
||||
|
||||
public String getAttrConformedId() { |
||||
return this.attributeTable.attrConformedId; |
||||
} |
||||
|
||||
public Identifier getId() { |
||||
return this.attributeTable.id; |
||||
} |
||||
|
||||
@JsonIgnore |
||||
public String getStringId() { |
||||
return this.attributeTable.id.composedId; |
||||
} |
||||
|
||||
public String getDimCaption() { |
||||
return dimCaption; |
||||
} |
||||
|
||||
public void setDimCaption(String dimCaption) { |
||||
this.dimCaption = dimCaption; |
||||
} |
||||
|
||||
public String getDimensionTableAlias() { |
||||
return dimensionTableAlias; |
||||
} |
||||
|
||||
public void setDimensionTableAlias(String dimensionTableAlias) { |
||||
this.dimensionTableAlias = dimensionTableAlias; |
||||
} |
||||
|
||||
public String getDimensionColumnAlias() { |
||||
return dimensionColumnAlias; |
||||
} |
||||
|
||||
public void setDimensionColumnAlias(String dimensionColumnAlias) { |
||||
this.dimensionColumnAlias = dimensionColumnAlias; |
||||
} |
||||
|
||||
public String getConformedCaption() { |
||||
return conformedCaption; |
||||
} |
||||
|
||||
public void setConformedCaption(String conformedCaption) { |
||||
this.conformedCaption = conformedCaption; |
||||
} |
||||
|
||||
public String getConformedDescription() { |
||||
return conformedDescription; |
||||
} |
||||
|
||||
public void setConformedDescription(String conformedDescription) { |
||||
this.conformedDescription = conformedDescription; |
||||
} |
||||
|
||||
public String getDescription() { |
||||
return this.attributeTable.description; |
||||
} |
||||
|
||||
public Identifier getDimensionId() { |
||||
return this.attributeTable.dimensionId; |
||||
} |
||||
|
||||
public String getTablename() { |
||||
return tablename; |
||||
} |
||||
|
||||
public void setTablename(String tablename) { |
||||
this.tablename = tablename; |
||||
} |
||||
|
||||
public String getJoincolumn() { |
||||
return joincolumn; |
||||
} |
||||
|
||||
public void setJoincolumn(String joincolumn) { |
||||
this.joincolumn = joincolumn; |
||||
} |
||||
|
||||
@JsonProperty(value="isHierarchy") |
||||
public boolean isHierarchy() { |
||||
return isHierarchy; |
||||
} |
||||
|
||||
public boolean isHistorical() { |
||||
return isHistorical; |
||||
} |
||||
|
||||
public void setHierarchy(boolean isHierarchy) { |
||||
this.isHierarchy = isHierarchy; |
||||
} |
||||
|
||||
public void setHistorical(boolean isHistorical) { |
||||
this.isHistorical = isHistorical; |
||||
} |
||||
|
||||
public boolean isHierarchicalFilter() { |
||||
return this.attributeTable.hierarchicalFilter.booleanValue(); |
||||
} |
||||
|
||||
public List<String> getDimensionAttributeValues() { |
||||
return dimensionAttributeValues; |
||||
} |
||||
|
||||
public void setDimensionAttributeValues(List<String> dimensionAttributeValues) { |
||||
this.dimensionAttributeValues = dimensionAttributeValues; |
||||
} |
||||
|
||||
public void setDimIdJoinColumn(String idColumn) { |
||||
this.dimIdJoinColumn = idColumn; |
||||
} |
||||
|
||||
public String getDimIdJoinColumn() { |
||||
return this.dimIdJoinColumn; |
||||
} |
||||
|
||||
public void setAttrConformedId(String stringId) { |
||||
this.attributeTable.attrConformedId = stringId; |
||||
} |
||||
|
||||
} |
||||
@ -0,0 +1,71 @@
@@ -0,0 +1,71 @@
|
||||
package de.superx.bianalysis.models; |
||||
|
||||
import java.util.List; |
||||
|
||||
import org.springframework.data.annotation.Id; |
||||
import org.springframework.data.annotation.Transient; |
||||
import org.springframework.data.relational.core.mapping.Table; |
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore; |
||||
|
||||
import de.superx.bianalysis.metadata.Identifier; |
||||
import de.superx.bianalysis.repository.dto.FactDto; |
||||
import de.superx.jdbc.entity.Sachgebiet; |
||||
|
||||
@Table(value ="metadata\".\"facttable") |
||||
public class FactTable { |
||||
|
||||
private FactDto factDto; |
||||
|
||||
@Transient |
||||
private Sachgebiet sachgebiet; |
||||
|
||||
@Transient |
||||
private List<Dimension> conformedDimensions; |
||||
|
||||
public FactTable() {} |
||||
|
||||
public FactTable(FactDto factDto) { |
||||
this.factDto = factDto; |
||||
} |
||||
|
||||
public Identifier getId() { |
||||
return this.factDto.id; |
||||
} |
||||
|
||||
public String getCaption() { |
||||
if(this.factDto != null) { |
||||
return this.factDto.caption; |
||||
} |
||||
return null; |
||||
} |
||||
|
||||
public int getSachgebiettid() { |
||||
return this.factDto.sachgebiettid.intValue(); |
||||
} |
||||
|
||||
public String getDescription() { |
||||
return this.factDto.description; |
||||
} |
||||
|
||||
public String getTablename() { |
||||
return this.factDto.tablename; |
||||
} |
||||
|
||||
public List<Dimension> getConformedDimensions() { |
||||
return conformedDimensions; |
||||
} |
||||
|
||||
public void setConformedDimensions(List<Dimension> conformedDimensions) { |
||||
this.conformedDimensions = conformedDimensions; |
||||
} |
||||
|
||||
public Sachgebiet getSachgebiet() { |
||||
return sachgebiet; |
||||
} |
||||
|
||||
public void setSachgebiet(Sachgebiet sachgebiet) { |
||||
this.sachgebiet = sachgebiet; |
||||
} |
||||
|
||||
} |
||||
@ -0,0 +1,89 @@
@@ -0,0 +1,89 @@
|
||||
package de.superx.bianalysis.models; |
||||
|
||||
import java.util.List; |
||||
import java.util.StringJoiner; |
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore; |
||||
|
||||
import de.superx.bianalysis.ReportMetadata; |
||||
import de.superx.bianalysis.metadata.Identifier; |
||||
|
||||
public class Filter { |
||||
|
||||
public Identifier dimensionAttributeId; |
||||
public List<String> filterValues; |
||||
public String columnname; |
||||
public String tablename; |
||||
public String joincolumn; |
||||
public String dimensionTableAlias; |
||||
|
||||
public Filter() { |
||||
super(); |
||||
} |
||||
|
||||
public Filter(List<String> values, Identifier dimAttrId) { |
||||
this.filterValues = values; |
||||
this.dimensionAttributeId = dimAttrId; |
||||
} |
||||
|
||||
public Filter(Filter filter) { |
||||
this.dimensionAttributeId = filter.dimensionAttributeId; |
||||
this.filterValues = filter.filterValues; |
||||
this.columnname = filter.columnname; |
||||
this.tablename = filter.tablename; |
||||
this.joincolumn = filter.joincolumn; |
||||
this.dimensionTableAlias = filter.dimensionTableAlias; |
||||
} |
||||
|
||||
public void setDimensionAttribute(DimensionAttribute attr) { |
||||
this.columnname = attr.getColumnname(); |
||||
} |
||||
|
||||
public void setDimension(Dimension dim) { |
||||
this.tablename = dim.getTablename(); |
||||
this.joincolumn = dim.getJoincolumn(); |
||||
if(dim.getAlias() != null) { |
||||
this.dimensionTableAlias = dim.getAlias(); |
||||
} else { |
||||
this.dimensionTableAlias = joincolumn.replaceFirst("_id$", ""); |
||||
} |
||||
} |
||||
|
||||
public DimensionAttribute getDimAttribute(ReportMetadata reportMetadata) { |
||||
return reportMetadata.getDimAttrById(this.dimensionAttributeId); |
||||
} |
||||
|
||||
@Override |
||||
public String toString() { |
||||
return String.valueOf(this.dimensionAttributeId); |
||||
} |
||||
|
||||
public static Filter findFilterById(List<Filter> filters, Identifier id) { |
||||
return filters |
||||
.stream() |
||||
.filter(f -> f.dimensionAttributeId.equals(id)) |
||||
.findFirst() |
||||
.orElse(null); |
||||
} |
||||
|
||||
@JsonIgnore |
||||
public String getValuesAsString() { |
||||
if(this.filterValues == null || this.filterValues.isEmpty()) { |
||||
return null; |
||||
} |
||||
return String.join(", ", this.filterValues); |
||||
} |
||||
|
||||
@JsonIgnore |
||||
public String getValues() { |
||||
if(this.filterValues == null || this.filterValues.isEmpty()) { |
||||
return null; |
||||
} |
||||
StringJoiner joiner = new StringJoiner(", "); |
||||
for (String value : filterValues) { |
||||
joiner.add("'"+value+"'"); |
||||
} |
||||
return joiner.toString(); |
||||
} |
||||
|
||||
} |
||||
@ -0,0 +1,89 @@
@@ -0,0 +1,89 @@
|
||||
package de.superx.bianalysis.models; |
||||
|
||||
import java.util.ArrayList; |
||||
import java.util.List; |
||||
|
||||
import de.superx.rest.model.Item; |
||||
|
||||
public class Info { |
||||
|
||||
public String segmentCaption; |
||||
public String lastUpdateBiad; |
||||
|
||||
|
||||
public List<String> sachgebiete = new ArrayList<String>(); |
||||
public List<InfoItem> facttables = new ArrayList<InfoItem>(); |
||||
public List<InfoItem> measures = new ArrayList<InfoItem>(); |
||||
public List<InfoItem> leftDimensionAttributes = new ArrayList<InfoItem>(); |
||||
public List<InfoItem> topDimensionAttributes = new ArrayList<InfoItem>(); |
||||
public List<String> filter = new ArrayList<String>(); |
||||
public String hideEmptyColumns; |
||||
|
||||
public List<Item> sqlStatements = new ArrayList<Item>(); |
||||
|
||||
public String error; |
||||
|
||||
|
||||
public void addSachgebiet(String sachgebiet) { |
||||
sachgebiete.add(sachgebiet); |
||||
} |
||||
|
||||
public void addFacttable(InfoItem facttable) { |
||||
facttables.add(facttable); |
||||
} |
||||
|
||||
public void setMeasures(List<InfoItem> measures) { |
||||
this.measures = measures; |
||||
} |
||||
|
||||
public void setLeftDimensionAttributes(List<InfoItem> leftDimensionAttributes) { |
||||
this.leftDimensionAttributes = leftDimensionAttributes; |
||||
} |
||||
|
||||
public void setTopDimensionAttributes(List<InfoItem> topDimensionAttributes) { |
||||
this.topDimensionAttributes = topDimensionAttributes; |
||||
} |
||||
|
||||
public void setSachgebiete(List<String> sachgebiete) { |
||||
this.sachgebiete = sachgebiete; |
||||
} |
||||
|
||||
public void setFacttables(List<InfoItem> facttables) { |
||||
this.facttables = facttables; |
||||
} |
||||
|
||||
public void setFilter(List<String> filter) { |
||||
this.filter = filter; |
||||
} |
||||
|
||||
public void setSqlStatements(List<Item> sqlStatements) { |
||||
this.sqlStatements = sqlStatements; |
||||
} |
||||
|
||||
public void setLastUpdateBiad(String lastUpdateBiad) { |
||||
this.lastUpdateBiad = lastUpdateBiad; |
||||
} |
||||
|
||||
public void setErrorMessage(String error) { |
||||
this.error = error; |
||||
} |
||||
|
||||
public void setSegmentCaption(String segmentCaption) { |
||||
this.segmentCaption = segmentCaption; |
||||
} |
||||
|
||||
public void hideEmptyColumns(boolean hideEmptyColumns) { |
||||
if(hideEmptyColumns) { |
||||
this.hideEmptyColumns = "Ja"; |
||||
} else { |
||||
this.hideEmptyColumns = "Nein"; |
||||
} |
||||
} |
||||
|
||||
public void setHideEmptyColumns(String hideEmptyColumns) { |
||||
this.hideEmptyColumns = hideEmptyColumns; |
||||
} |
||||
|
||||
|
||||
} |
||||
|
||||
@ -0,0 +1,19 @@
@@ -0,0 +1,19 @@
|
||||
package de.superx.bianalysis.models; |
||||
|
||||
public class InfoItem { |
||||
|
||||
public String id; |
||||
public String caption; |
||||
public String description; |
||||
|
||||
public InfoItem(String id, String caption, String description) { |
||||
this.id = id; |
||||
this.caption = caption; |
||||
this.description = description; |
||||
} |
||||
|
||||
public InfoItem() { |
||||
super(); |
||||
} |
||||
|
||||
} |
||||
@ -0,0 +1,130 @@
@@ -0,0 +1,130 @@
|
||||
package de.superx.bianalysis.models; |
||||
|
||||
import com.fasterxml.jackson.annotation.JsonIgnore; |
||||
|
||||
import de.superx.bianalysis.metadata.Identifier; |
||||
import de.superx.bianalysis.repository.dto.MeasureDto; |
||||
import de.superx.bianalysis.repository.dto.MeasureFilterDto; |
||||
import de.superx.rest.model.ColumnType; |
||||
|
||||
public class Measure { |
||||
|
||||
private MeasureDto measureDto; |
||||
|
||||
@JsonIgnore |
||||
public String filterTablename; |
||||
|
||||
@JsonIgnore |
||||
public String filterJoincolumn; |
||||
|
||||
@JsonIgnore |
||||
public String filterColumnname; |
||||
|
||||
@JsonIgnore |
||||
public String filterInclude; |
||||
|
||||
@JsonIgnore |
||||
public String filterExclude; |
||||
|
||||
@JsonIgnore |
||||
public String filterDimensionTableAlias; |
||||
|
||||
@JsonIgnore |
||||
public String filterCondition; |
||||
|
||||
@JsonIgnore |
||||
public String factColumnFilter; |
||||
|
||||
@JsonIgnore |
||||
public Identifier filterAttributeId; |
||||
|
||||
public Measure() { |
||||
super(); |
||||
} |
||||
|
||||
public Measure(MeasureDto measureDTO) { |
||||
this.measureDto = measureDTO; |
||||
} |
||||
|
||||
public void setMeasureFilterAttributes(MeasureFilterDto filter, DimensionAttribute attribute, Dimension dimension) { |
||||
this.filterInclude = filter.includedValues; |
||||
this.filterExclude = filter.excludedValues; |
||||
this.filterTablename = dimension.getTablename(); |
||||
this.filterJoincolumn = dimension.getJoincolumn(); |
||||
this.filterColumnname = attribute.getColumnname(); |
||||
this.filterAttributeId = attribute.getId(); |
||||
if (dimension.getAlias() != null) { |
||||
this.filterDimensionTableAlias = dimension.getAlias(); |
||||
} else { |
||||
this.filterDimensionTableAlias = generatefilterDimensionTableAlias(filterJoincolumn); |
||||
} |
||||
this.filterCondition = generateFilterCondition(); |
||||
} |
||||
|
||||
public void setFactColumnFilter(MeasureFilterDto filter) { |
||||
this.factColumnFilter = filter.factColumnFilter; |
||||
this.filterInclude = filter.includedValues; |
||||
this.filterExclude = filter.excludedValues; |
||||
this.filterCondition = generateFilterCondition(); |
||||
} |
||||
|
||||
private static String generatefilterDimensionTableAlias(String filterJoincolumn) { |
||||
if (filterJoincolumn != null) { |
||||
return filterJoincolumn.replaceFirst("_id$", ""); |
||||
} |
||||
return null; |
||||
} |
||||
|
||||
private String generateFilterCondition() { |
||||
if (this.measureDto.measureFilterId.value != null) { |
||||
StringBuilder filterConditionStatement = new StringBuilder(); |
||||
String tableDotColumn = this.filterDimensionTableAlias + "." + this.filterColumnname; |
||||
if(factColumnFilter != null && !factColumnFilter.isBlank()) { |
||||
tableDotColumn = factColumnFilter; |
||||
} |
||||
if (this.filterInclude != null) { |
||||
filterConditionStatement.append(tableDotColumn + " IN (" |
||||
+ this.filterInclude + ")"); |
||||
} |
||||
if (this.filterInclude != null && this.filterExclude != null) { |
||||
filterConditionStatement.append(" AND "); |
||||
} |
||||
if (this.filterExclude != null) { |
||||
filterConditionStatement.append(tableDotColumn |
||||
+ " NOT IN (" + this.filterExclude + ")"); |
||||
} |
||||
return filterConditionStatement.toString(); |
||||
} |
||||
return null; |
||||
} |
||||
|
||||
public Identifier getId() { |
||||
return this.measureDto.id; |
||||
} |
||||
|
||||
public String getCaption() { |
||||
return this.measureDto.caption; |
||||
} |
||||
|
||||
public String getColumnname() { |
||||
return this.measureDto.columnname; |
||||
} |
||||
|
||||
public String getDescription() { |
||||
return this.measureDto.description; |
||||
} |
||||
|
||||
public String getAggregationType() { |
||||
return this.measureDto.aggregationType; |
||||
} |
||||
|
||||
public ColumnType getMeasureType() { |
||||
return this.measureDto.measureType; |
||||
} |
||||
|
||||
public Identifier getMeasureFilterId() { |
||||
return this.measureDto.measureFilterId; |
||||
} |
||||
|
||||
|
||||
} |
||||
@ -0,0 +1,29 @@
@@ -0,0 +1,29 @@
|
||||
package de.superx.bianalysis.models; |
||||
|
||||
public enum Right { |
||||
|
||||
VIEW_REPORT("RIGHT_CS_BIA_ANALYSIS_VIEW_ANALYSIS_TABLE"), |
||||
CREATE_ANALYSIS("RIGHT_CS_BIA_ANALYSIS_CREATE_ANALYSIS"); |
||||
|
||||
private String string; |
||||
|
||||
Right(String string) { |
||||
this.setString(string); |
||||
} |
||||
|
||||
public String getString() { |
||||
return string; |
||||
} |
||||
|
||||
public void setString(String string) { |
||||
this.string = string; |
||||
} |
||||
|
||||
public static String getPrintableRights(Right... rights) { |
||||
String out = ""; |
||||
for (Right right : rights) { |
||||
out += right.toString(); |
||||
} |
||||
return out; |
||||
} |
||||
} |
||||
@ -0,0 +1,21 @@
@@ -0,0 +1,21 @@
|
||||
package de.superx.bianalysis.models; |
||||
|
||||
public enum RightParam { |
||||
|
||||
TOPIC_AREA("bianalysis.topic_area"), |
||||
TOPIC("bianalysis.topic"); |
||||
|
||||
private String string; |
||||
|
||||
RightParam(String string) { |
||||
this.setString(string); |
||||
} |
||||
|
||||
public String getString() { |
||||
return string; |
||||
} |
||||
|
||||
public void setString(String string) { |
||||
this.string = string; |
||||
} |
||||
} |
||||
@ -0,0 +1,42 @@
@@ -0,0 +1,42 @@
|
||||
package de.superx.bianalysis.repository; |
||||
|
||||
import java.util.List; |
||||
import java.util.Optional; |
||||
|
||||
import org.springframework.data.jdbc.repository.query.Query; |
||||
import org.springframework.data.repository.CrudRepository; |
||||
import org.springframework.data.repository.RepositoryDefinition; |
||||
import org.springframework.data.repository.query.Param; |
||||
|
||||
import de.superx.bianalysis.metadata.Identifier; |
||||
import de.superx.bianalysis.repository.dto.AttributeDto; |
||||
import de.superx.jdbc.repository.BiaAdminCrudRepository; |
||||
|
||||
@RepositoryDefinition(domainClass = AttributeDto.class, idClass = Identifier.class) |
||||
public interface DimensionAttributeRepository extends BiaAdminCrudRepository<AttributeDto> { |
||||
|
||||
List<AttributeDto> findByDimensionId(Identifier dimensionId); |
||||
|
||||
Optional<AttributeDto> findById(Identifier id); |
||||
|
||||
@Query( |
||||
"SELECT da.id" |
||||
+ " FROM metadata.dimension_attribute da" |
||||
+ " LEFT JOIN metadata.dimension d" |
||||
+ " ON d.id = da.dimension_id" |
||||
+ " WHERE da.conformed = :confAttrId" |
||||
+ " AND facttable_id = :factId" |
||||
) |
||||
List<Identifier> findAttributesByConformedAttributeAndFactTable(@Param("confAttrId") String confAttrId, @Param("factId") String factId); |
||||
|
||||
@Query( |
||||
"SELECT da.id" |
||||
+ " FROM metadata.dimension_attribute da" |
||||
+ " LEFT JOIN metadata.dimension d" |
||||
+ " ON d.id = da.dimension_id" |
||||
+ " WHERE da.id = :attrId" |
||||
+ " AND d.facttable_id = :factId" |
||||
) |
||||
Identifier findAttributesByIdAndFactTable(@Param("attrId") String confAttrId, @Param("factId") String factId); |
||||
|
||||
} |
||||
@ -0,0 +1,50 @@
@@ -0,0 +1,50 @@
|
||||
package de.superx.bianalysis.repository; |
||||
|
||||
import java.util.List; |
||||
import java.util.Optional; |
||||
|
||||
import org.springframework.data.jdbc.repository.query.Query; |
||||
import org.springframework.data.repository.CrudRepository; |
||||
import org.springframework.data.repository.RepositoryDefinition; |
||||
import org.springframework.data.repository.query.Param; |
||||
|
||||
import de.superx.bianalysis.metadata.Identifier; |
||||
import de.superx.bianalysis.repository.dto.DimensionDto; |
||||
import de.superx.jdbc.repository.BiaAdminCrudRepository; |
||||
|
||||
@RepositoryDefinition(domainClass = DimensionDto.class, idClass = Identifier.class) |
||||
public interface DimensionRepository extends BiaAdminCrudRepository<DimensionDto> { |
||||
|
||||
List<DimensionDto> findByFactTableId(Identifier factTableId); |
||||
|
||||
Optional<DimensionDto> findById(Identifier id); |
||||
|
||||
@Override |
||||
List<DimensionDto> findAll(); |
||||
|
||||
@Query( |
||||
" SELECT d.id" |
||||
+ " FROM metadata.dimension d" |
||||
+ " LEFT JOIN metadata.dimension_attribute da" |
||||
+ " ON da.dimension_id = d.id" |
||||
+ " WHERE da.id is null" |
||||
+ " AND d.conformed = :confDim" |
||||
+ " AND d.facttable_id = :factId" |
||||
) |
||||
List<Identifier> getRolePlayingIds(@Param("confDim") String confDim, @Param("factId") String factId); |
||||
|
||||
@Query( |
||||
"SELECT dimension_id " |
||||
+ "FROM metadata.dimension_attribute da " |
||||
+ "WHERE id = :attrId" |
||||
) |
||||
Identifier findDimensionIdForAttribute(@Param("attrId") String attrId); |
||||
|
||||
@Query( |
||||
"SELECT conformed" |
||||
+ " FROM metadata.dimension" |
||||
+ " WHERE facttable_id = :factId" |
||||
+ " AND conformed IS NOT NULL" |
||||
) |
||||
List<Identifier> getUsedConformedDimensionsByFactTable(@Param("factId") String factId); |
||||
} |
||||
@ -0,0 +1,47 @@
@@ -0,0 +1,47 @@
|
||||
package de.superx.bianalysis.repository; |
||||
|
||||
import java.util.List; |
||||
import java.util.Optional; |
||||
import org.springframework.data.jdbc.repository.query.Query; |
||||
import org.springframework.data.repository.CrudRepository; |
||||
import org.springframework.data.repository.RepositoryDefinition; |
||||
import org.springframework.data.repository.query.Param; |
||||
|
||||
import de.superx.bianalysis.metadata.Identifier; |
||||
import de.superx.bianalysis.repository.dto.FactDto; |
||||
import de.superx.jdbc.repository.BiaAdminCrudRepository; |
||||
|
||||
@RepositoryDefinition(domainClass = FactDto.class, idClass = Identifier.class) |
||||
public interface FactRepository extends BiaAdminCrudRepository<FactDto> { |
||||
|
||||
@Override |
||||
List<FactDto> findAll(); |
||||
|
||||
Optional<FactDto> findById(Identifier id); |
||||
|
||||
Optional<FactDto> findByTablename(String tablename); |
||||
|
||||
@Query( |
||||
"SELECT COUNT(*) > 0" |
||||
+ " FROM metadata.facttable f" |
||||
+ " LEFT JOIN metadata.measure m" |
||||
+ " ON m.facttable_id = f.id" |
||||
+ " WHERE f.id = :factId" |
||||
+ " AND m.id = :measureId" |
||||
) |
||||
boolean hasFactTableMeasure(@Param("factId") String factId, @Param("measureId") String measureId); |
||||
|
||||
@Query( |
||||
"SELECT f.tablename" |
||||
+ " FROM metadata.dimension_attribute da" |
||||
+ " LEFT JOIN metadata.dimension d" |
||||
+ " ON d.id = da.dimension_id" |
||||
+ " LEFT JOIN metadata.facttable f" |
||||
+ " ON f.id = d.facttable_id" |
||||
+ " WHERE f.id = :factId" |
||||
+ " AND (da.conformed = :attrId OR da.id = :attrId)" |
||||
) |
||||
String getFactTableNameForAttribute(@Param("factId") String factId, @Param("attrId") String attrId); |
||||
|
||||
} |
||||
|
||||
@ -0,0 +1,13 @@
@@ -0,0 +1,13 @@
|
||||
package de.superx.bianalysis.repository; |
||||
|
||||
import org.springframework.data.repository.CrudRepository; |
||||
import org.springframework.data.repository.RepositoryDefinition; |
||||
|
||||
import de.superx.bianalysis.metadata.Identifier; |
||||
import de.superx.bianalysis.repository.dto.MeasureFilterDto; |
||||
import de.superx.jdbc.repository.BiaAdminCrudRepository; |
||||
|
||||
@RepositoryDefinition(domainClass = MeasureFilterDto.class, idClass = Identifier.class) |
||||
public interface MeasureFilterRepository extends BiaAdminCrudRepository<MeasureFilterDto> { |
||||
|
||||
} |
||||
@ -0,0 +1,17 @@
@@ -0,0 +1,17 @@
|
||||
package de.superx.bianalysis.repository; |
||||
|
||||
import java.util.List; |
||||
|
||||
import org.springframework.data.repository.CrudRepository; |
||||
import org.springframework.data.repository.RepositoryDefinition; |
||||
|
||||
import de.superx.bianalysis.metadata.Identifier; |
||||
import de.superx.bianalysis.repository.dto.MeasureDto; |
||||
import de.superx.jdbc.repository.BiaAdminCrudRepository; |
||||
|
||||
@RepositoryDefinition(domainClass = MeasureDto.class, idClass = Identifier.class) |
||||
public interface MeasureRepository extends BiaAdminCrudRepository<MeasureDto> { |
||||
|
||||
List<MeasureDto> findByFactTableId(Identifier id); |
||||
|
||||
} |
||||
@ -0,0 +1,23 @@
@@ -0,0 +1,23 @@
|
||||
package de.superx.bianalysis.repository; |
||||
|
||||
import java.util.List; |
||||
import java.util.Optional; |
||||
|
||||
import org.springframework.data.repository.CrudRepository; |
||||
import org.springframework.data.repository.RepositoryDefinition; |
||||
|
||||
import de.superx.bianalysis.StoredReport; |
||||
|
||||
@RepositoryDefinition(domainClass = StoredReport.class, idClass = Integer.class) |
||||
public interface StoredReportRepository extends CrudRepository<StoredReport, Integer> { |
||||
|
||||
Optional<StoredReport> findByName(String name); |
||||
|
||||
Optional<StoredReport> findById(int id); |
||||
|
||||
void deleteById(int id); |
||||
|
||||
@Override |
||||
List<StoredReport> findAll(); |
||||
|
||||
} |
||||
@ -0,0 +1,74 @@
@@ -0,0 +1,74 @@
|
||||
package de.superx.bianalysis.repository.dto; |
||||
|
||||
import org.springframework.data.annotation.Id; |
||||
import org.springframework.data.relational.core.mapping.Column; |
||||
import org.springframework.data.relational.core.mapping.Table; |
||||
|
||||
import de.superx.bianalysis.metadata.Identifier; |
||||
import de.superx.jdbc.entity.EntityBase; |
||||
import de.superx.jdbc.model.DynamicFieldType; |
||||
import de.superx.jdbc.model.EntityDescriptor; |
||||
import de.superx.jdbc.model.TableRef; |
||||
import de.superx.rest.model.ColumnType; |
||||
import de.superx.rest.model.FieldType; |
||||
|
||||
@Table(schema = "metadata", value = "dimension_attribute") |
||||
public class AttributeDto extends EntityBase { |
||||
|
||||
@Id |
||||
@DynamicFieldType(label="ID", readOnly = true, visibleInSimplifiedForm = false) |
||||
public Identifier id; |
||||
|
||||
@EntityDescriptor |
||||
@DynamicFieldType(label="Titel") |
||||
public String caption; |
||||
|
||||
@DynamicFieldType(label="Beschreibung", editControlType=FieldType.TextArea) |
||||
public String description; |
||||
|
||||
@DynamicFieldType(label="Dimension", readOnly = true, visibleInSimplifiedForm = false) |
||||
@TableRef(schema = "metadata", table = "dimension", keyField = "id", labelField = "caption") |
||||
@Column(value = "dimension_id") |
||||
public Identifier dimensionId; |
||||
|
||||
@DynamicFieldType(label="Spaltenname", readOnly = true) |
||||
public String columnname; |
||||
|
||||
@DynamicFieldType(label="Sortierspalte", readOnly = true, visibleInSimplifiedForm = false) |
||||
@Column(value = "sort_order_column") |
||||
public String sortOrderColumn; |
||||
|
||||
@DynamicFieldType(label="Filter-Auswahl", visibleInSimplifiedForm = false) |
||||
@Column(value = "filter_selection") |
||||
public String filterSelection; |
||||
|
||||
@DynamicFieldType(label="Hierarchie", editControlType=FieldType.Select, columnType = ColumnType.BooleanColumnBiAnalysis, readOnly = true) |
||||
@Column(value = "hierarchical_filter") |
||||
public Boolean hierarchicalFilter; |
||||
|
||||
@DynamicFieldType(label="Ausgeblendet", editControlType=FieldType.Select, columnType = ColumnType.BooleanColumnBiAnalysis) |
||||
@Column(value = "is_hidden") |
||||
public Boolean isHidden; |
||||
|
||||
@DynamicFieldType(label="Conformed Attribute", readOnly = true, visibleInSimplifiedForm = false) |
||||
@TableRef(schema = "metadata", table = "dimension_attribute", keyField = "id", labelField = "caption") |
||||
@Column(value = "conformed") |
||||
public String attrConformedId; |
||||
|
||||
@DynamicFieldType(label="Auslieferungsversion", visibleInSimplifiedForm = false) |
||||
@Column(value = "default_release") |
||||
public String defaultRelease; |
||||
|
||||
public AttributeDto() {} |
||||
|
||||
@Override |
||||
public boolean canBeCreatedByUser() { |
||||
return false; |
||||
} |
||||
|
||||
@Override |
||||
public boolean canBeDeletedByUser() { |
||||
return false; |
||||
} |
||||
|
||||
} |
||||
@ -0,0 +1,79 @@
@@ -0,0 +1,79 @@
|
||||
package de.superx.bianalysis.repository.dto; |
||||
|
||||
import org.springframework.data.annotation.Id; |
||||
import org.springframework.data.relational.core.mapping.Column; |
||||
import org.springframework.data.relational.core.mapping.Table; |
||||
|
||||
import de.superx.bianalysis.metadata.Identifier; |
||||
import de.superx.jdbc.entity.EntityBase; |
||||
import de.superx.jdbc.model.DynamicFieldType; |
||||
import de.superx.jdbc.model.EntityDescriptor; |
||||
import de.superx.rest.model.ColumnType; |
||||
import de.superx.rest.model.FieldType; |
||||
import de.superx.jdbc.model.TableRef; |
||||
|
||||
@Table(schema="metadata", value = "dimension") |
||||
public class DimensionDto extends EntityBase{ |
||||
|
||||
@Id |
||||
@DynamicFieldType(label="ID", readOnly = true) |
||||
public Identifier id; |
||||
|
||||
@EntityDescriptor |
||||
@DynamicFieldType(label="Titel") |
||||
public String caption; |
||||
|
||||
@DynamicFieldType(label="Beschreibung", editControlType=FieldType.TextArea) |
||||
public String description; |
||||
|
||||
@DynamicFieldType(label="Faktentabelle", readOnly = true, visibleInSimplifiedForm = false) |
||||
@TableRef(schema = "metadata", table = "facttable", keyField = "id", labelField = "caption") |
||||
@Column(value = "facttable_id") |
||||
public Identifier factTableId; |
||||
|
||||
@DynamicFieldType(label="Tabellenname", readOnly = true) |
||||
public String tablename; |
||||
|
||||
@DynamicFieldType(label="Join-Spalte", readOnly = true, visibleInSimplifiedForm = false) |
||||
public String joincolumn; |
||||
|
||||
@DynamicFieldType(label="Join-Alias", readOnly = true, visibleInSimplifiedForm = false) |
||||
public String alias; |
||||
|
||||
@DynamicFieldType(label="Hierarchie", editControlType=FieldType.Select, columnType = ColumnType.BooleanColumnBiAnalysis, readOnly = true) |
||||
@Column(value = "is_hierarchy") |
||||
public Boolean isHierarchy; |
||||
|
||||
@DynamicFieldType(label="Historisch", editControlType=FieldType.Select, columnType = ColumnType.BooleanColumnBiAnalysis, readOnly = true) |
||||
@Column(value = "is_historical") |
||||
public Boolean isHistorical; |
||||
|
||||
@DynamicFieldType(label="Conformed Dimension", readOnly = true, visibleInSimplifiedForm = false) |
||||
@TableRef(schema = "metadata", table = "dimension", keyField = "id", labelField = "caption") |
||||
public String conformed; |
||||
|
||||
@DynamicFieldType(label="ID Spalte", readOnly = true, visibleInSimplifiedForm = false) |
||||
@Column(value = "id_column") |
||||
public String idColumn; |
||||
|
||||
@DynamicFieldType(label="Auslieferungsversion", visibleInSimplifiedForm = false) |
||||
@Column(value = "default_release") |
||||
public String defaultRelease; |
||||
|
||||
@DynamicFieldType(label="Ausgeblendet", editControlType=FieldType.Select, columnType = ColumnType.BooleanColumnBiAnalysis) |
||||
@Column(value = "is_hidden") |
||||
public Boolean isHidden; |
||||
|
||||
public DimensionDto() {} |
||||
|
||||
@Override |
||||
public boolean canBeCreatedByUser() { |
||||
return false; |
||||
} |
||||
|
||||
@Override |
||||
public boolean canBeDeletedByUser() { |
||||
return false; |
||||
} |
||||
|
||||
} |
||||
@ -0,0 +1,50 @@
@@ -0,0 +1,50 @@
|
||||
package de.superx.bianalysis.repository.dto; |
||||
|
||||
import org.springframework.data.annotation.Id; |
||||
import org.springframework.data.relational.core.mapping.Column; |
||||
import org.springframework.data.relational.core.mapping.Table; |
||||
|
||||
import de.superx.bianalysis.metadata.Identifier; |
||||
import de.superx.jdbc.entity.EntityBase; |
||||
import de.superx.jdbc.model.DynamicFieldType; |
||||
import de.superx.jdbc.model.EntityDescriptor; |
||||
import de.superx.jdbc.model.TableRef; |
||||
import de.superx.rest.model.FieldType; |
||||
|
||||
@Table(schema = "metadata", value = "facttable") |
||||
public class FactDto extends EntityBase { |
||||
|
||||
@Id |
||||
@DynamicFieldType(label="ID", readOnly = true, visibleInSimplifiedForm = false) |
||||
public Identifier id; |
||||
|
||||
@EntityDescriptor |
||||
@DynamicFieldType(label="Titel") |
||||
public String caption; |
||||
|
||||
@DynamicFieldType(label="Beschreibung", editControlType=FieldType.TextArea) |
||||
public String description; |
||||
|
||||
@DynamicFieldType(label = "Sachgebiet", editControlType = FieldType.Select) |
||||
@TableRef(table = "sachgebiete", keyField = "tid", labelField = "name") |
||||
public Integer sachgebiettid; |
||||
|
||||
@DynamicFieldType(label="Tabellenname", readOnly = true) |
||||
public String tablename; |
||||
|
||||
@DynamicFieldType(label="Auslieferungsversion", visibleInSimplifiedForm = false) |
||||
@Column(value = "default_release") |
||||
public String defaultRelease; |
||||
|
||||
public FactDto() {} |
||||
|
||||
@Override |
||||
public boolean canBeCreatedByUser() { |
||||
return false; |
||||
} |
||||
|
||||
@Override |
||||
public boolean canBeDeletedByUser() { |
||||
return false; |
||||
} |
||||
} |
||||
@ -0,0 +1,66 @@
@@ -0,0 +1,66 @@
|
||||
package de.superx.bianalysis.repository.dto; |
||||
|
||||
import org.springframework.data.annotation.Id; |
||||
import org.springframework.data.relational.core.mapping.Column; |
||||
import org.springframework.data.relational.core.mapping.Table; |
||||
|
||||
import de.superx.bianalysis.metadata.Identifier; |
||||
import de.superx.jdbc.entity.EntityBase; |
||||
import de.superx.jdbc.model.DynamicFieldType; |
||||
import de.superx.jdbc.model.EntityDescriptor; |
||||
import de.superx.rest.model.ColumnType; |
||||
import de.superx.rest.model.FieldType; |
||||
import de.superx.jdbc.model.TableRef; |
||||
|
||||
@Table(schema = "metadata", value = "measure") |
||||
public class MeasureDto extends EntityBase { |
||||
|
||||
@Id |
||||
@DynamicFieldType(label="ID", readOnly = true, visibleInSimplifiedForm = false) |
||||
public Identifier id; |
||||
|
||||
@EntityDescriptor |
||||
@DynamicFieldType(label="Titel") |
||||
public String caption; |
||||
|
||||
@DynamicFieldType(label="Beschreibung", editControlType=FieldType.TextArea) |
||||
public String description; |
||||
|
||||
@DynamicFieldType(label="Spaltenname") |
||||
public String columnname; |
||||
|
||||
@DynamicFieldType(label="Faktentabelle", readOnly = true, visibleInSimplifiedForm = false) |
||||
@TableRef(schema = "metadata", table = "facttable", keyField = "id", labelField = "caption") |
||||
@Column(value = "facttable_id") |
||||
public Identifier factTableId; |
||||
|
||||
@DynamicFieldType(label="Filter", readOnly = true, visibleInSimplifiedForm = false) |
||||
@TableRef(schema = "metadata", table = "measure_filter", keyField = "id", labelField = "caption") |
||||
@Column(value = "measure_filter_id") |
||||
public Identifier measureFilterId; |
||||
|
||||
@DynamicFieldType(label="Aggregationstyp") |
||||
@Column(value = "aggregation_type") |
||||
public String aggregationType; |
||||
|
||||
@DynamicFieldType(label="Datentyp", readOnly = true, visibleInSimplifiedForm = false) |
||||
@Column(value = "measure_type") |
||||
public ColumnType measureType; |
||||
|
||||
@DynamicFieldType(label="Auslieferungsversion", visibleInSimplifiedForm = false) |
||||
@Column(value = "default_release") |
||||
public String defaultRelease; |
||||
|
||||
public MeasureDto() {}; |
||||
|
||||
@Override |
||||
public boolean canBeCreatedByUser() { |
||||
return false; |
||||
} |
||||
|
||||
@Override |
||||
public boolean canBeDeletedByUser() { |
||||
return false; |
||||
} |
||||
|
||||
} |
||||
@ -0,0 +1,61 @@
@@ -0,0 +1,61 @@
|
||||
package de.superx.bianalysis.repository.dto; |
||||
|
||||
import org.springframework.data.annotation.Id; |
||||
import org.springframework.data.relational.core.mapping.Column; |
||||
import org.springframework.data.relational.core.mapping.Table; |
||||
|
||||
import de.superx.bianalysis.metadata.Identifier; |
||||
import de.superx.jdbc.entity.EntityBase; |
||||
import de.superx.jdbc.model.DynamicFieldType; |
||||
import de.superx.jdbc.model.EntityDescriptor; |
||||
import de.superx.rest.model.FieldType; |
||||
import de.superx.jdbc.model.TableRef; |
||||
|
||||
@Table(schema = "metadata", value = "measure_filter") |
||||
public class MeasureFilterDto extends EntityBase { |
||||
|
||||
@Id |
||||
@DynamicFieldType(label="ID", readOnly = true, visibleInSimplifiedForm = false) |
||||
public Identifier id; |
||||
|
||||
@EntityDescriptor |
||||
@DynamicFieldType(label="Titel") |
||||
public String caption; |
||||
|
||||
@DynamicFieldType(label="Beschreibung", editControlType=FieldType.TextArea) |
||||
public String description; |
||||
|
||||
@DynamicFieldType(label="Attribute", readOnly = true, visibleInSimplifiedForm = false) |
||||
@TableRef(schema = "metadata", table = "dimension_attribute", keyField = "id", labelField = "caption") |
||||
@Column(value = "dimension_attribute_id") |
||||
public Identifier dimensionAttributeId; |
||||
|
||||
@DynamicFieldType(label="Faktenspalte Filter", readOnly = true) |
||||
@Column(value = "fact_column_filter") |
||||
public String factColumnFilter; |
||||
|
||||
@DynamicFieldType(label="Einbezogene Werte", readOnly = true) |
||||
@Column(value = "included_values") |
||||
public String includedValues; |
||||
|
||||
@DynamicFieldType(label="Ausgeschlossene Werte", readOnly = true) |
||||
@Column(value = "excluded_values") |
||||
public String excludedValues; |
||||
|
||||
@DynamicFieldType(label="Auslieferungsversion", visibleInSimplifiedForm = false) |
||||
@Column(value = "default_release") |
||||
public String defaultRelease; |
||||
|
||||
public MeasureFilterDto() {} |
||||
|
||||
@Override |
||||
public boolean canBeCreatedByUser() { |
||||
return false; |
||||
} |
||||
|
||||
@Override |
||||
public boolean canBeDeletedByUser() { |
||||
return false; |
||||
} |
||||
|
||||
} |
||||
@ -0,0 +1,277 @@
@@ -0,0 +1,277 @@
|
||||
package de.superx.bianalysis.rest; |
||||
|
||||
import java.io.ByteArrayOutputStream; |
||||
import java.text.SimpleDateFormat; |
||||
import java.util.ArrayList; |
||||
import java.util.List; |
||||
import java.util.Optional; |
||||
import java.util.Base64; |
||||
import java.util.Date; |
||||
import org.apache.log4j.Logger; |
||||
import org.apache.poi.xssf.usermodel.XSSFWorkbook; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.web.bind.annotation.RequestBody; |
||||
import org.springframework.web.bind.annotation.RequestMapping; |
||||
import org.springframework.web.bind.annotation.RequestMethod; |
||||
import org.springframework.web.bind.annotation.RequestParam; |
||||
import org.springframework.web.bind.annotation.RestController; |
||||
|
||||
import de.superx.bianalysis.ExcelSheetBuilder; |
||||
import de.superx.bianalysis.ReportDefinition; |
||||
import de.superx.bianalysis.StoredReport; |
||||
import de.superx.bianalysis.metadata.Identifier; |
||||
import de.superx.bianalysis.models.Dimension; |
||||
import de.superx.bianalysis.models.DimensionAttribute; |
||||
import de.superx.bianalysis.models.FactTable; |
||||
import de.superx.bianalysis.models.Measure; |
||||
import de.superx.bianalysis.models.Right; |
||||
import de.superx.bianalysis.service.BiAnalysisManager; |
||||
import de.superx.bianalysis.service.BiAnalysisRightService; |
||||
import de.superx.bianalysis.service.DbMetaAdapter; |
||||
import de.superx.common.NotYetImplementedException; |
||||
import de.superx.rest.RestControllerBase; |
||||
import de.superx.rest.model.Download; |
||||
import de.superx.rest.model.Result; |
||||
import de.superx.rest.model.ResultType; |
||||
import de.superx.rest.model.Row; |
||||
|
||||
@RestController |
||||
@RequestMapping("/api/reportwizard") |
||||
public class BiAnalysisApi extends RestControllerBase { |
||||
|
||||
/* Autor: Robin Wübbeling |
||||
* Achtung: Code ist schnell und unschön zusammengestellt, für einen ersten Entwurf |
||||
* TODO: Filter |
||||
* TODO: Berechnete Kennzahlen |
||||
* TODO: Kennzahlenfilter momentan nur über 1 Dimensionsattribut möglich |
||||
* TODO: Strings schöner durch "Platzhalter" |
||||
* */ |
||||
|
||||
static Logger logger = Logger.getLogger(BiAnalysisApi.class); |
||||
|
||||
@Autowired |
||||
DbMetaAdapter dbAdapter; |
||||
|
||||
@Autowired |
||||
BiAnalysisRightService rightsService; |
||||
|
||||
@Autowired |
||||
BiAnalysisManager biAnalysisManager; |
||||
|
||||
@Override |
||||
protected Logger getLogger() { |
||||
return logger; |
||||
} |
||||
|
||||
@RequestMapping(method = RequestMethod.GET, path = "/facttables") |
||||
public List<FactTable> listFactTables() throws NotYetImplementedException { |
||||
List<Integer> sachgebiete = rightsService.getSachgebiete(Right.CREATE_ANALYSIS, Right.VIEW_REPORT); |
||||
List<Identifier> factTables = rightsService.getFactTables(Right.CREATE_ANALYSIS, Right.VIEW_REPORT); |
||||
List<FactTable> facts = dbAdapter.getFactTables(sachgebiete, factTables); |
||||
return facts; |
||||
} |
||||
|
||||
@RequestMapping(method = RequestMethod.GET, path = "/dimensions") |
||||
public List<Dimension> listDimensions(@RequestParam(value = "facttable_id") String facttable_id) { |
||||
int sachgebietTid = dbAdapter.getSachgebietByFactTableId(facttable_id); |
||||
rightsService.checkSachgebiet(sachgebietTid, Right.CREATE_ANALYSIS); |
||||
rightsService.checkFactTable(new Identifier(facttable_id), Right.CREATE_ANALYSIS); |
||||
return dbAdapter.getDimensions(new Identifier(facttable_id)); |
||||
} |
||||
|
||||
// TODO: zeig in benamung does es sich vllt. um eine reduzierte liste handelt
|
||||
// granted vs allowed
|
||||
// umstellen auf camel case
|
||||
@RequestMapping(method = RequestMethod.GET, path = "/dimensionAttributeValues") |
||||
public List<String> listAttributeValues (@RequestParam(value = "attribute_id") List<Identifier> attribute_id, @RequestParam(value = "facts") List<Identifier> facts) { |
||||
List<Integer> tids = rightsService.getSachgebiete(Right.CREATE_ANALYSIS); |
||||
List<Identifier> factTables = rightsService.getFactTables(Right.CREATE_ANALYSIS); |
||||
List<DimensionAttribute> attributes = dbAdapter.getAllowedDimensionAttributes(attribute_id, tids, factTables); |
||||
return dbAdapter.getDimensionAttributeValues(attributes, facts); |
||||
} |
||||
|
||||
@RequestMapping(method = RequestMethod.GET, path = "/dimensionAttributeValuesHierarchy") |
||||
public List<List<Object>> listAttributeValuesHierarchy(@RequestParam(value = "attribute_id") String attribute_id) { |
||||
rightsService.checkCreateRights(); |
||||
return dbAdapter.getDimensionAttributeValuesHierarchy(new Identifier(attribute_id)); |
||||
} |
||||
|
||||
@RequestMapping(method = RequestMethod.GET, path = "/measures") |
||||
public List<Measure> listMeasures(@RequestParam(value = "facttable_id") String facttable_id) { |
||||
rightsService.checkSachgebiet(dbAdapter.getSachgebietByFactTableId(facttable_id), Right.CREATE_ANALYSIS, Right.VIEW_REPORT); |
||||
rightsService.checkFactTable(new Identifier(facttable_id), Right.CREATE_ANALYSIS, Right.VIEW_REPORT); |
||||
return dbAdapter.getMeasures(new Identifier(facttable_id)); |
||||
} |
||||
|
||||
@RequestMapping(method = RequestMethod.GET, path = "/findReportDefinition") |
||||
public List<StoredReport> findReportDefinition( |
||||
@RequestParam(value = "title") Optional<String> title, |
||||
@RequestParam(value = "sach") Optional<Integer> sach, |
||||
@RequestParam(value = "facts") Optional<List<String>> facts) { |
||||
|
||||
List<Integer> allowedSachgebiete = rightsService.getSachgebiete(Right.CREATE_ANALYSIS, Right.VIEW_REPORT); |
||||
List<Identifier> allowedFacts = rightsService.getFactTables(Right.CREATE_ANALYSIS, Right.VIEW_REPORT); |
||||
|
||||
List<StoredReport> result = new ArrayList<>(); |
||||
for (StoredReport report : dbAdapter.findAllStoredReports()) { |
||||
|
||||
List<Integer> sachgebieteOfReport = dbAdapter.getSachgebieteForReport(report.reportDefinition); |
||||
if (!allowedSachgebiete.isEmpty() && !allowedSachgebiete.containsAll(sachgebieteOfReport)) { |
||||
continue; |
||||
} |
||||
|
||||
if (title.isPresent()) { |
||||
if (!report.name.toLowerCase().contains(title.get().toLowerCase())) { |
||||
continue; |
||||
} |
||||
} |
||||
|
||||
if (sach.isPresent()) { |
||||
if (!sachgebieteOfReport.contains(sach.get())) { |
||||
continue; |
||||
} |
||||
} |
||||
|
||||
if(allowedFacts != null && allowedFacts.size() > 0) { |
||||
boolean isFactAllowed = true; |
||||
for (Identifier reportFactId : report.reportDefinition.factTableIds) { |
||||
if(!allowedFacts.isEmpty() && !allowedFacts.contains(reportFactId)) { |
||||
isFactAllowed = false; |
||||
} |
||||
} |
||||
if (!isFactAllowed) { |
||||
continue; |
||||
} |
||||
} |
||||
|
||||
if (facts.isPresent()) { |
||||
boolean isFactMissing = false; |
||||
for (Identifier reportFactId : report.reportDefinition.factTableIds) { |
||||
if (!facts.get().contains(reportFactId.composedId)) { |
||||
isFactMissing = true; |
||||
break; |
||||
} |
||||
} |
||||
if (isFactMissing) { |
||||
continue; |
||||
} |
||||
} |
||||
result.add(report); |
||||
} |
||||
return result; |
||||
} |
||||
|
||||
@RequestMapping(method = RequestMethod.GET, path = "/getStoredReport") |
||||
public StoredReport getStoredReport(@RequestParam(value = "id") int id) { |
||||
Optional<StoredReport> storedReportOpt = dbAdapter.findById(id); |
||||
if(storedReportOpt.isPresent()) { |
||||
StoredReport storedReport = storedReportOpt.get(); |
||||
checkCreateOrViewRightForFactTables(storedReport.reportDefinition.factTableIds); |
||||
try { |
||||
storedReport.exportedResult = biAnalysisManager.createResult(storedReport.reportDefinition, dbAdapter); |
||||
} catch (Exception e) { |
||||
logger.error("Couldn't create report", e); |
||||
e.printStackTrace(); |
||||
} |
||||
|
||||
storedReport.isReadOnly = Boolean.valueOf(!this.rightsService.isCreateRight()); |
||||
return storedReport; |
||||
} |
||||
return null; |
||||
} |
||||
|
||||
@RequestMapping(method = RequestMethod.POST, path = "/report") |
||||
public Result getReport(@RequestBody final ReportDefinition reportDefinition) throws Exception { |
||||
List<Identifier> factTableIds = reportDefinition.factTableIds; |
||||
checkCreateRightForFactTables(factTableIds); |
||||
return biAnalysisManager.createResult(reportDefinition, dbAdapter); |
||||
} |
||||
|
||||
@RequestMapping(method = RequestMethod.POST, path = "/persistReportDefinition") |
||||
public int persistReportDefinition(@RequestBody final StoredReport storedReport) throws Exception { |
||||
StoredReport.setReportDefinitionJson(storedReport); |
||||
List<Identifier> factTableIds = storedReport.reportDefinition.factTableIds; |
||||
checkCreateRightForFactTables(factTableIds); |
||||
return dbAdapter.saveReportDefinition(storedReport); |
||||
} |
||||
|
||||
@RequestMapping(value = "/report/download", method = RequestMethod.POST) |
||||
public Download getFile(@RequestBody final StoredReport storedReport) throws Exception { |
||||
checkCreateOrViewRightForFactTables(storedReport.reportDefinition.factTableIds); |
||||
Date date = new Date(); |
||||
String fileName = "BI-Analyse_"; |
||||
if(storedReport.id != 0) { |
||||
fileName += dbAdapter.findById(storedReport.id).get().name + "_"; |
||||
if(fileName.length() > 206) { |
||||
fileName = fileName.substring(0, 160); |
||||
} |
||||
fileName = fileName.replaceAll("[^a-zA-Z0-9äöüÄÖÜß_]+", "_"); |
||||
} |
||||
fileName += new SimpleDateFormat("yyyyMMdd_HHmmss").format(date); |
||||
if(!storedReport.exportedResult.resultType.equals(ResultType.FlatTable)) { |
||||
Row totalRow = storedReport.exportedResult.getTotalRow(); |
||||
storedReport.exportedResult.rows = (BiAnalysisManager.hierarchyToRows(storedReport.hierarchy)); |
||||
storedReport.exportedResult.rows.add(totalRow); |
||||
} |
||||
XSSFWorkbook workbook = new ExcelSheetBuilder(storedReport.exportedResult) |
||||
.withFileName(fileName) |
||||
.withReportName(storedReport.name) |
||||
.withDescription(storedReport.description) |
||||
.withDate(date) |
||||
.build(); |
||||
ByteArrayOutputStream bos = new ByteArrayOutputStream(); |
||||
workbook.write(bos); |
||||
String base64String = Base64.getEncoder().encodeToString(bos.toByteArray()); |
||||
String contentType = "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"; |
||||
return new Download(fileName, contentType, base64String); |
||||
} |
||||
|
||||
@RequestMapping(method = RequestMethod.POST, path = "/deleteReportDefinition") |
||||
public boolean deleteReportDefinition(@RequestBody final int id) throws Exception { |
||||
try { |
||||
Optional<StoredReport> reportOpt = dbAdapter.findById(id); |
||||
if(reportOpt.isEmpty()) { |
||||
throw new Exception("FEHLER: Berichtskonfiguration konnte nicht gefunden werden."); |
||||
} |
||||
checkCreateRightForFactTables(reportOpt.get().reportDefinition.factTableIds); |
||||
dbAdapter.deleteById(id); |
||||
return true; |
||||
} catch(Exception e) { |
||||
throw new Exception("FEHLER: Berichtskonfiguration konnte nicht gelöscht werden.", e); |
||||
} |
||||
} |
||||
|
||||
@RequestMapping(method = RequestMethod.GET, path = "/reportDefinitions") |
||||
public List<StoredReport> listReportDefinitions() throws Exception { |
||||
rightsService.checkCreateOrViewRights(); |
||||
List<StoredReport> storedReports = null; |
||||
try { |
||||
storedReports = dbAdapter.findAllStoredReports(); |
||||
// TODO mit Marnie abklären, für überschreiben notwendig?
|
||||
} catch (Exception e) { |
||||
e.printStackTrace(); |
||||
if (e.getCause().getMessage().contains("FEHLER: Relation »metadata.rw_report_definitions« existiert nicht")) { |
||||
throw new NotYetImplementedException("Bitte installieren Sie zuerst die Komponente 'BI-Analyse-Daten' und führen Sie anschließend den Konnektor aus."); |
||||
} |
||||
throw e; |
||||
} |
||||
return storedReports; |
||||
} |
||||
|
||||
private void checkCreateOrViewRightForFactTables(List<Identifier> factTableIds) { |
||||
for (Identifier factId : factTableIds) { |
||||
int sachgebiet = dbAdapter.getSachgebietByFactTableId(factId.composedId); |
||||
rightsService.checkSachgebiet(sachgebiet, Right.CREATE_ANALYSIS, Right.VIEW_REPORT); |
||||
rightsService.checkFactTable(factId, Right.CREATE_ANALYSIS, Right.VIEW_REPORT); |
||||
} |
||||
} |
||||
|
||||
private void checkCreateRightForFactTables(List<Identifier> factTableIds) { |
||||
for (Identifier factId : factTableIds) { |
||||
int sachgebiet = dbAdapter.getSachgebietByFactTableId(factId.composedId); |
||||
rightsService.checkSachgebiet(sachgebiet, Right.CREATE_ANALYSIS); |
||||
rightsService.checkFactTable(factId, Right.CREATE_ANALYSIS); |
||||
} |
||||
} |
||||
|
||||
} |
||||
@ -0,0 +1,141 @@
@@ -0,0 +1,141 @@
|
||||
package de.superx.bianalysis.service; |
||||
|
||||
import java.util.ArrayList; |
||||
import java.util.List; |
||||
import java.util.Map; |
||||
|
||||
import org.apache.log4j.Logger; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.stereotype.Service; |
||||
|
||||
import de.superx.bianalysis.ColumnElement; |
||||
import de.superx.bianalysis.ColumnElementBuilder; |
||||
import de.superx.bianalysis.ReportDefinition; |
||||
import de.superx.bianalysis.ReportMetadata; |
||||
import de.superx.bianalysis.ResultBuilder; |
||||
import de.superx.bianalysis.ResultMerger; |
||||
import de.superx.bianalysis.metadata.Identifier; |
||||
import de.superx.bianalysis.sqlgeneration.SQLGenerator; |
||||
import de.superx.bianalysis.sqlgeneration.SQLGeneratorTotals; |
||||
import de.superx.common.NotYetImplementedException; |
||||
import de.superx.rest.model.Item; |
||||
import de.superx.rest.model.Result; |
||||
import de.superx.rest.model.Row; |
||||
import de.superx.rest.model.TreeNode; |
||||
|
||||
@Service |
||||
public class BiAnalysisManager { |
||||
|
||||
static Logger logger = Logger.getLogger(BiAnalysisManager.class); |
||||
|
||||
@Autowired |
||||
BiAnalysisRightService biAnalysisRightService; |
||||
|
||||
public Result createResult(ReportDefinition reportDefinition, DbMetaAdapter dbAdapter) throws Exception { |
||||
|
||||
List<Result> results = new ArrayList<>(); |
||||
ResultMerger resultMerger = new ResultMerger(dbAdapter); |
||||
|
||||
for (Identifier factTableId : reportDefinition.factTableIds) { |
||||
ReportDefinition definition = resultMerger.createFactTableSpecificReportDefinition(reportDefinition, factTableId); |
||||
if(definition.leftDimensionAttributeIds.isEmpty() || |
||||
definition.measureIds.isEmpty()) { |
||||
continue; |
||||
} |
||||
try { |
||||
biAnalysisRightService.checkCreateOrViewRights(); |
||||
ReportMetadata metadata = new ReportMetadata(definition, factTableId, dbAdapter); |
||||
checkColLimit(reportDefinition, dbAdapter, metadata); |
||||
Result reportSegment = getReportData(metadata, dbAdapter); |
||||
results.add(reportSegment); |
||||
} catch (Exception e) { |
||||
logger.error("Couldn't create report", e); |
||||
throw e; |
||||
} |
||||
} |
||||
Result result; |
||||
if(reportDefinition.factTableIds.size() > 1) { |
||||
result = resultMerger.buildMergedReport(reportDefinition, results); |
||||
} else { |
||||
result = results.get(0); |
||||
} |
||||
return result; |
||||
} |
||||
|
||||
private static void checkColLimit(ReportDefinition reportDefinition, DbMetaAdapter dbAdapter, ReportMetadata metadata) throws NotYetImplementedException { |
||||
final int POSTGRES_MAX_COL_LIMIT = 1664; |
||||
int resultCols = dbAdapter.getColNumbers(metadata.topDimensionAttributes, metadata.filters); |
||||
resultCols *= reportDefinition.measureIds.size(); |
||||
if(resultCols > POSTGRES_MAX_COL_LIMIT - 1) { |
||||
throw new NotYetImplementedException("FEHLER: Ihre Anfrage überschreitet das Spaltenlimit. " |
||||
+ "Bitte wählen Sie eine andere Kombination an Attributen."); |
||||
} |
||||
} |
||||
|
||||
public static String getSqlStatement(ReportDefinition definition, DbMetaAdapter dbAdapter) { |
||||
String sqlStatement = ""; |
||||
ReportMetadata reportMetadata = definition.getReportMetadata(dbAdapter, definition.factTableIds.get(0)); |
||||
List<ColumnElement> columnElements = ColumnElementBuilder.buildColumnElements(reportMetadata); |
||||
SQLGenerator sqlGenerator = new SQLGenerator(reportMetadata, columnElements); |
||||
sqlStatement = sqlGenerator.buildSqlStatement(); |
||||
return sqlStatement; |
||||
} |
||||
|
||||
private Result getReportData(ReportMetadata metadata, DbMetaAdapter dbAdapter) throws Exception { |
||||
List<ColumnElement> columnElements = ColumnElementBuilder.buildColumnElements(metadata); |
||||
|
||||
List<Item> sqlStatements = new ArrayList<>(); |
||||
String reportSQL = new SQLGenerator(metadata, columnElements).buildFormattedSqlStatement(); |
||||
String totalsColumnSQL = SQLGeneratorTotals.generateTotalsColumnSQL(metadata); |
||||
sqlStatements.add(new Item("noAggregatesSQL", reportSQL)); |
||||
sqlStatements.add(new Item("totalsColumnSQL", totalsColumnSQL)); |
||||
|
||||
ResultBuilder resultBuilder = new ResultBuilder(dbAdapter.getDataSource()); |
||||
resultBuilder.setColumnElements(columnElements); |
||||
resultBuilder.setReportMetadata(metadata); |
||||
|
||||
Result report = resultBuilder.buildReport(sqlStatements, biAnalysisRightService.isCreateRight()); |
||||
return report; |
||||
} |
||||
|
||||
public static List<Row> hierarchyToRows(ArrayList<TreeNode> hierarchy) { |
||||
List<Row> rows = new ArrayList<Row>(); |
||||
for (TreeNode treeNode : hierarchy) { |
||||
nodeToRow(treeNode, rows); |
||||
} |
||||
return rows; |
||||
} |
||||
|
||||
private static List<Row> nodeToRow(TreeNode<Map<String, Object>> treeNode, List<Row> rows) { |
||||
splitHierarchyColumn(treeNode); |
||||
rows.add(new Row(treeNode.data, true)); |
||||
for (TreeNode child: treeNode.children) { |
||||
List<Row> childRows = nodeToRow(child, rows); |
||||
for (Row row : childRows) { |
||||
if (!rows.contains(row)){ |
||||
rows.add(row); |
||||
} |
||||
} |
||||
} |
||||
return rows; |
||||
} |
||||
|
||||
//Im Treenode wird die Hierarchie in einer Spalte abgebildet, diese muss wieder auf die ursprünglichen Spalten aufgeuteilt werden
|
||||
private static void splitHierarchyColumn(TreeNode<Map<String, Object>> node) { |
||||
String realColumn = node.data.get("column").toString(); |
||||
if( realColumn.contains(" (Ebene ")) { |
||||
String[] splittedString = realColumn.split(" \\(Ebene "); |
||||
String mainColumn = splittedString[0]; |
||||
node.data.put(realColumn, node.data.get(mainColumn)); |
||||
node.data.put(mainColumn, ""); |
||||
} |
||||
} |
||||
|
||||
public static String getTotalsColumnSqlStatement(ReportDefinition definition, DbMetaAdapter dbAdapter) { |
||||
String sqlStatement = ""; |
||||
ReportMetadata metadata = new ReportMetadata(definition, definition.factTableIds.get(0), dbAdapter); |
||||
sqlStatement = SQLGeneratorTotals.generateTotalsColumnSQL(metadata); |
||||
return sqlStatement; |
||||
} |
||||
|
||||
} |
||||
@ -0,0 +1,135 @@
@@ -0,0 +1,135 @@
|
||||
package de.superx.bianalysis.service; |
||||
|
||||
import java.util.ArrayList; |
||||
import java.util.List; |
||||
import java.util.Map; |
||||
import java.util.stream.Collectors; |
||||
|
||||
import org.apache.commons.lang3.StringUtils; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.stereotype.Service; |
||||
|
||||
import de.superx.bianalysis.metadata.Identifier; |
||||
import de.superx.bianalysis.models.Right; |
||||
import de.superx.bianalysis.models.RightParam; |
||||
import de.superx.common.AccessDeniedException; |
||||
import de.superx.common.SxUser; |
||||
import de.superx.spring.service.UserService; |
||||
|
||||
@Service() |
||||
public class BiAnalysisRightService { |
||||
|
||||
@Autowired |
||||
DbMetaAdapter dbAdapter; |
||||
|
||||
@Autowired |
||||
UserService userService; |
||||
|
||||
public void checkCreateRights() { |
||||
SxUser user = (SxUser) userService.currentUserDetails(); |
||||
if ( user != null && !user.getHis1Rights().contains("RIGHT_CS_BIA_ANALYSIS_CREATE_ANALYSIS") ) { |
||||
throw new AccessDeniedException("No right to access Create functions"); |
||||
} |
||||
} |
||||
|
||||
public boolean isCreateRight() { |
||||
SxUser user = (SxUser) userService.currentUserDetails(); |
||||
if ( user != null && !user.getHis1Rights().contains("RIGHT_CS_BIA_ANALYSIS_CREATE_ANALYSIS") ) { |
||||
return false; |
||||
} |
||||
return true; |
||||
} |
||||
|
||||
public void checkCreateOrViewRights() { |
||||
SxUser user = (SxUser) userService.currentUserDetails(); |
||||
if ( user != null && !user.getHis1Rights().contains("RIGHT_CS_BIA_ANALYSIS_CREATE_ANALYSIS") && |
||||
!user.getHis1Rights().contains("RIGHT_CS_BIA_ANALYSIS_VIEW_ANALYSIS_TABLE") ) { |
||||
throw new AccessDeniedException("No right to access Create and View functions"); |
||||
} |
||||
} |
||||
|
||||
public List<Integer> getSachgebiete(Right... rights) { |
||||
List<String> values = getRightParamValues(RightParam.TOPIC_AREA, rights); |
||||
List<String> valuesForTopics = dbAdapter.getSachgebieteForFactTables(getRightParamValues(RightParam.TOPIC, rights)); |
||||
values.addAll(valuesForTopics); |
||||
if (values.isEmpty()) { |
||||
return new ArrayList<>(); |
||||
} |
||||
List<Integer> sachgebietValues = values.stream() |
||||
.map(Integer::parseInt).collect(Collectors.toList()); |
||||
return sachgebietValues; |
||||
} |
||||
|
||||
public List<Identifier> getFactTables(Right... rights) { |
||||
List<String> values = getRightParamValues(RightParam.TOPIC, rights); |
||||
if (values.isEmpty()) { |
||||
return new ArrayList<>(); |
||||
} |
||||
List<Identifier> factsValues = values.stream() |
||||
.map(value -> new Identifier(value)).collect(Collectors.toList()); |
||||
return factsValues; |
||||
} |
||||
|
||||
public void checkSachgebiet(int sachgebiet, Right... rights) { |
||||
List<Integer> sachgebiete = getSachgebiete(rights); |
||||
if (sachgebiete.isEmpty()) { |
||||
return; |
||||
} else if (!sachgebiete.contains(Integer.valueOf(sachgebiet))){ |
||||
throw new AccessDeniedException("No right to access Sachgebiet " + sachgebiet); |
||||
} |
||||
} |
||||
|
||||
public void checkFactTable(Identifier fact, Right... rights) { |
||||
List<Identifier> factTables = getFactTables(rights); |
||||
if (factTables.isEmpty()) { |
||||
return; |
||||
} else if (!factTables.contains(fact)){ |
||||
throw new AccessDeniedException("No right to access Fact Table " + fact.composedId); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Return the values of a RightParam for a given array of Rights. |
||||
* @param param RightParam which can be assigned to multiple Rights. |
||||
* @param rights Rights which may or may not contain the RightParam. |
||||
* @return List of values for the RightParam (first occurrence for multiple Rights). |
||||
*/ |
||||
public List<String> getRightParamValues(RightParam param, Right... rights) { |
||||
SxUser user = (SxUser) userService.currentUserDetails(); |
||||
if(user == null) { |
||||
return new ArrayList<>(); |
||||
} |
||||
|
||||
Map<String, Map<String,String>> rightsMap = user.getRightsMap(); |
||||
Map<String, String> rightParamMap = null; |
||||
boolean noRights = true; |
||||
for (Right right : rights) { |
||||
if(rightsMap.containsKey(right.getString())) { |
||||
rightParamMap = rightsMap.get(right.getString()); |
||||
noRights = false; |
||||
if(rightParamMap != null) { |
||||
break; |
||||
} |
||||
} |
||||
} |
||||
|
||||
if(noRights) { |
||||
throw new AccessDeniedException("Missing rights: " + Right.getPrintableRights(rights)); |
||||
} |
||||
|
||||
if(rightParamMap == null || rightParamMap.isEmpty()) { |
||||
return new ArrayList<>(); |
||||
} |
||||
|
||||
String paramValues = rightParamMap.get(param.getString()); |
||||
if (paramValues != null) { |
||||
List<String> paramValuesResult = new ArrayList<>(); |
||||
for (String string : StringUtils.split(paramValues, ',')) { |
||||
paramValuesResult.add(string); |
||||
} |
||||
return paramValuesResult; |
||||
} |
||||
return new ArrayList<>(); |
||||
} |
||||
|
||||
} |
||||
@ -0,0 +1,637 @@
@@ -0,0 +1,637 @@
|
||||
package de.superx.bianalysis.service; |
||||
|
||||
import java.sql.ResultSet; |
||||
import java.sql.SQLException; |
||||
import java.text.SimpleDateFormat; |
||||
import java.util.ArrayList; |
||||
import java.util.Date; |
||||
import java.util.List; |
||||
import java.util.Optional; |
||||
import java.util.StringJoiner; |
||||
import java.util.stream.Collectors; |
||||
|
||||
import javax.sql.DataSource; |
||||
|
||||
import org.springframework.beans.factory.InitializingBean; |
||||
import org.springframework.beans.factory.annotation.Autowired; |
||||
import org.springframework.jdbc.core.JdbcTemplate; |
||||
import org.springframework.jdbc.core.RowMapper; |
||||
import org.springframework.stereotype.Service; |
||||
|
||||
import de.superx.bianalysis.FaultyMetadataException; |
||||
import de.superx.bianalysis.ReportDefinition; |
||||
import de.superx.bianalysis.ReportMetadata; |
||||
import de.superx.bianalysis.StoredReport; |
||||
import de.superx.bianalysis.metadata.Identifier; |
||||
import de.superx.bianalysis.models.Dimension; |
||||
import de.superx.bianalysis.models.DimensionAttribute; |
||||
import de.superx.bianalysis.models.FactTable; |
||||
import de.superx.bianalysis.models.Filter; |
||||
import de.superx.bianalysis.models.Measure; |
||||
import de.superx.bianalysis.repository.DimensionAttributeRepository; |
||||
import de.superx.bianalysis.repository.DimensionRepository; |
||||
import de.superx.bianalysis.repository.FactRepository; |
||||
import de.superx.bianalysis.repository.MeasureFilterRepository; |
||||
import de.superx.bianalysis.repository.MeasureRepository; |
||||
import de.superx.bianalysis.repository.StoredReportRepository; |
||||
import de.superx.bianalysis.repository.dto.AttributeDto; |
||||
import de.superx.bianalysis.repository.dto.DimensionDto; |
||||
import de.superx.bianalysis.repository.dto.FactDto; |
||||
import de.superx.bianalysis.repository.dto.MeasureDto; |
||||
import de.superx.bianalysis.repository.dto.MeasureFilterDto; |
||||
import de.superx.common.NotYetImplementedException; |
||||
import de.superx.jdbc.entity.Sachgebiet; |
||||
import de.superx.jdbc.entity.Systeminfo; |
||||
import de.superx.jdbc.repository.SachgebieteRepository; |
||||
import de.superx.jdbc.repository.SysteminfoRepository; |
||||
|
||||
@Service() |
||||
public class DbMetaAdapter implements InitializingBean { |
||||
|
||||
@Autowired |
||||
FactRepository factRepository; |
||||
|
||||
@Autowired |
||||
MeasureRepository measureRepository; |
||||
|
||||
@Autowired |
||||
DimensionRepository dimensionRepository; |
||||
|
||||
@Autowired |
||||
DimensionAttributeRepository dimensionAttrRepo; |
||||
|
||||
@Autowired |
||||
MeasureFilterRepository measureFilterRepo; |
||||
|
||||
@Autowired |
||||
StoredReportRepository storedReportRepository; |
||||
|
||||
@Autowired |
||||
SachgebieteRepository sachgebieterepository; |
||||
|
||||
@Autowired |
||||
SysteminfoRepository systeminfoRepository; |
||||
|
||||
@Autowired |
||||
DataSource dataSource; |
||||
|
||||
private JdbcTemplate jt; |
||||
|
||||
@Override |
||||
public void afterPropertiesSet() throws Exception { |
||||
this.jt = new JdbcTemplate(dataSource); |
||||
} |
||||
|
||||
public List<FactTable> getFactTables(List<Integer> sachgebieteTids, List<Identifier> facts) throws NotYetImplementedException { |
||||
try { |
||||
List<FactTable> factTables = this.factRepository.findAll() |
||||
.stream() |
||||
.filter(f -> getSachgebietForFacttable(f.sachgebiettid.intValue()).tid.intValue() != -1) |
||||
.filter(f -> sachgebieteTids.isEmpty() || sachgebieteTids.contains(Integer.valueOf(f.sachgebiettid.intValue()))) |
||||
.filter(f -> facts.isEmpty() || facts.contains(f.id) || !hasSachgebietTopicRestrictions(facts, f.sachgebiettid.intValue()) ) |
||||
.map(f -> { |
||||
FactTable fact = new FactTable(f); |
||||
fact.setSachgebiet(getSachgebietForFacttable(f.sachgebiettid.intValue())); |
||||
fact.setConformedDimensions(getConformedDimensionsForFacttable(f.id)); |
||||
return fact; |
||||
}) |
||||
.collect(Collectors.toList()); |
||||
return factTables; |
||||
} catch (Exception e) { |
||||
|
||||
e.printStackTrace(); |
||||
if (e.getCause().getMessage().contains("FEHLER: Relation »metadata.facttable« existiert nicht")) { |
||||
throw new NotYetImplementedException("Bitte installieren Sie zuerst die Komponente 'BI-Analyse-Daten' und führen Sie anschließend den Konnektor aus."); |
||||
} |
||||
throw e; |
||||
} |
||||
} |
||||
|
||||
private boolean hasSachgebietTopicRestrictions(List<Identifier> facts, int tid) { |
||||
for (Identifier factId : facts) { |
||||
if(getFactTable(factId).getSachgebiettid() == tid) { |
||||
return true; |
||||
} |
||||
} |
||||
return false; |
||||
} |
||||
|
||||
public List<Dimension> getConformedDimensionsForFacttable(Identifier factId){ |
||||
List<Dimension> conformedDimension = new ArrayList<>(); |
||||
List<Identifier> ids = dimensionRepository.getUsedConformedDimensionsByFactTable(factId.composedId); |
||||
for (Identifier id : ids) { |
||||
Dimension dimension = getDimension(id); |
||||
List<DimensionAttribute> attributes = getAttributesOfDimension(dimension.getId()); |
||||
|
||||
dimension.setDimensionAttributes(attributes); |
||||
for(DimensionAttribute a : attributes) { |
||||
a.setDimension(dimension); |
||||
} |
||||
conformedDimension.add(getDimension(id)); |
||||
} |
||||
return conformedDimension; |
||||
} |
||||
|
||||
public List<DimensionAttribute> getDimensionAttributeMetadata(List<Identifier> attributeIds, Identifier factId) { |
||||
|
||||
if (attributeIds == null || attributeIds.size() <= 0 ) { |
||||
return null; |
||||
} |
||||
|
||||
List<DimensionAttribute> result = new ArrayList<DimensionAttribute>(); |
||||
for (Identifier id : attributeIds) { |
||||
Optional<AttributeDto> optAttribute = dimensionAttrRepo.findById(id); |
||||
if(optAttribute.isEmpty()) { |
||||
throw new FaultyMetadataException(id, "Attribute"); |
||||
} |
||||
|
||||
DimensionAttribute attr = new DimensionAttribute(optAttribute.get()); |
||||
Dimension dim = null; |
||||
|
||||
if(factId != null) { |
||||
// case 1: id is role playing -> rpId is null because the attribute array of the conf dim is not empty
|
||||
// case 2: id is NOT role playing because the attribute array of the conf dim is empty
|
||||
Identifier rpId = getRolePlayingDimensionWithNoAttributes(id.composedId, factId.composedId); |
||||
if(rpId != null) { |
||||
dim = getDimension(rpId); |
||||
} |
||||
} |
||||
|
||||
if(dim == null) { |
||||
dim = getDimension(attr.getDimensionId()); |
||||
} |
||||
|
||||
attr.setDimension(dim); |
||||
|
||||
if(attr.getAttrConformedId() != null) { |
||||
Optional<AttributeDto> optAttributeConf = dimensionAttrRepo.findById(new Identifier(attr.getAttrConformedId())); |
||||
if(optAttributeConf.isPresent()) { |
||||
Dimension dimConf = getDimension(optAttributeConf.get().dimensionId); |
||||
attr.setDimConformedId(dimConf.getId().composedId); |
||||
} |
||||
} |
||||
result.add(attr); |
||||
} |
||||
return result; |
||||
} |
||||
|
||||
public List<Measure> getMeasureMetadata(List<Identifier> measureIds) { |
||||
if (measureIds != null && measureIds.size() > 0 ) { |
||||
List<Measure> result = new ArrayList<Measure>(); |
||||
for (Identifier id : measureIds) { |
||||
Measure measure = getMeasure(id); |
||||
if(measure.getMeasureFilterId() != null) { |
||||
MeasureFilterDto filter = measureFilterRepo.findById(measure.getMeasureFilterId()).get(); |
||||
if(filter.dimensionAttributeId != null) { |
||||
DimensionAttribute attribute = getDimensionAttributeById(filter.dimensionAttributeId); |
||||
Dimension dimension = getDimension(attribute.getDimensionId()); |
||||
measure.setMeasureFilterAttributes(filter, attribute, dimension); |
||||
} else if(filter.factColumnFilter != null) { |
||||
measure.setFactColumnFilter(filter); |
||||
} |
||||
} |
||||
result.add(measure); |
||||
} |
||||
return result; |
||||
} |
||||
return null; |
||||
} |
||||
|
||||
|
||||
|
||||
public List<Dimension> getDimensions(Identifier factTableId) { |
||||
List<Dimension> dimensions = new ArrayList<>(); |
||||
for (DimensionDto dimensionDto : dimensionRepository.findByFactTableId(factTableId)) { |
||||
if(dimensionDto.isHidden != null && dimensionDto.isHidden.booleanValue() == true) { |
||||
continue; |
||||
} |
||||
Dimension dimension = new Dimension(dimensionDto); |
||||
dimensions.add(dimension); |
||||
List<DimensionAttribute> attr = getAttributesOfDimension(dimension.getId()); |
||||
dimension.setDimensionAttributes(attr); |
||||
if(dimension.getConformed() != null) { |
||||
Dimension dimConf = getDimension(new Identifier(dimension.getConformed())); |
||||
dimension.conformedCaption = dimConf.getCaption(); |
||||
dimension.conformedDescription = dimConf.getDescription(); |
||||
if(attr.isEmpty() && !dimension.getConformed().isEmpty()) { |
||||
attr = getAttributesOfDimension(new Identifier(dimension.getConformed())); |
||||
for( DimensionAttribute a : attr) { |
||||
a.setAttrConformedId(a.getStringId()); |
||||
a.setHierarchy(dimConf.isHierarchy()); |
||||
} |
||||
dimension.setDimensionAttributes(attr); |
||||
continue; |
||||
} |
||||
} |
||||
for(DimensionAttribute a : attr) { |
||||
if(a.getAttrConformedId() != null) { |
||||
DimensionAttribute attribute = getDimensionAttributeById(new Identifier(a.getAttrConformedId())); |
||||
a.setConformedCaption(attribute.getCaption()); |
||||
a.setConformedDescription(attribute.getDescription()); |
||||
} |
||||
a.setDimension(dimension); |
||||
} |
||||
} |
||||
|
||||
return dimensions; |
||||
} |
||||
|
||||
public List<DimensionAttribute> getAllowedDimensionAttributes(List<Identifier> ids, List<Integer> sachgebietTids, List<Identifier> factTables){ |
||||
List<DimensionAttribute> attributes = new ArrayList<>(); |
||||
for (Identifier id : ids) { |
||||
DimensionAttribute attr = getDimensionAttributeById(id); |
||||
Dimension dim = getDimension(attr.getDimensionId()); |
||||
attr.setDimension(dim); |
||||
Optional<FactDto> factOpt = factRepository.findById(dim.getId()); |
||||
if(factOpt.isPresent()) { |
||||
if(!factTables.contains(factOpt.get().id)) { |
||||
continue; |
||||
} |
||||
Integer sachgebiettsTid = Integer.valueOf(factOpt.get().sachgebiettid.intValue()); |
||||
if(!sachgebietTids.isEmpty() && !sachgebietTids.contains(sachgebiettsTid)) { |
||||
continue; |
||||
} |
||||
} |
||||
attributes.add(attr); |
||||
} |
||||
return attributes; |
||||
} |
||||
|
||||
public List<String> getDimensionAttributeValues(List<DimensionAttribute> attributes, List<Identifier> factTables) { |
||||
List<String> result = new ArrayList<String>(); |
||||
List<String> tables = new ArrayList<String>(); |
||||
for (DimensionAttribute attr : attributes) { |
||||
if(tables.contains(attr.getTablename())) { |
||||
continue; |
||||
} |
||||
tables.add(attr.getTablename()); |
||||
for(Identifier factId : factTables) { |
||||
FactTable fact = getFactTable(factId); |
||||
Dimension dim = getDimension(attr.getDimensionId()); |
||||
List<String> values = getDimensionAttributeValues(attr, dim, fact); |
||||
result.addAll(values); |
||||
} |
||||
} |
||||
return result; |
||||
} |
||||
|
||||
public List<List<Object>> getDimensionAttributeValuesHierarchy(Identifier attribute_id) { |
||||
DimensionAttribute attr = getDimensionAttributeById(attribute_id); |
||||
Dimension dim = getDimension(attr.getDimensionId()); |
||||
return getDimensionAttributeValuesHierarchy(attr.getColumnname(), dim.getTablename()); |
||||
} |
||||
|
||||
public List<Filter> getFilterMetadata(List<Filter> filters) { |
||||
for (Filter filter : filters) { |
||||
DimensionAttribute attr = getDimensionAttributeById(filter.dimensionAttributeId); |
||||
Dimension dim = getDimension(attr.getDimensionId()); |
||||
attr.setDimension(dim); |
||||
filter.setDimension(dim); |
||||
filter.setDimensionAttribute(attr); |
||||
} |
||||
return filters; |
||||
} |
||||
|
||||
public List<String> getDimensionAttributeValues(DimensionAttribute attr, Dimension dim, FactTable factTable) { |
||||
String sortOrderColumnName = attr.getSortOrderColumn() != null ? attr.getSortOrderColumn(): attr.getColumnname(); |
||||
|
||||
String templateSql = |
||||
"SELECT DISTINCT d.%s AS value, d.%s, " + |
||||
"array_position(array[%s], d.%s::text) " + |
||||
"FROM presentation.%s d"; |
||||
|
||||
if(dim != null |
||||
&& attr.getFilterSelection() != null |
||||
&& attr.getFilterSelection().equals("show_existing_only")) { |
||||
|
||||
templateSql += " INNER JOIN presentation." + factTable.getTablename() |
||||
+ " f ON d.id = f." + dim.getJoincolumn(); |
||||
} |
||||
|
||||
templateSql += " WHERE d.%s IS NOT NULL "; |
||||
|
||||
if(dim != null |
||||
&& attr.getFilterSelection() != null |
||||
&& attr.getFilterSelection().equals("show_range")) { |
||||
|
||||
templateSql += " AND d.id BETWEEN (" |
||||
+ "SELECT MIN(" + dim.getJoincolumn() + ")" |
||||
+ " FROM presentation." + factTable.getTablename() |
||||
+ ") AND (" |
||||
+ "SELECT MAX(" + dim.getJoincolumn() |
||||
+ " FROM presentation." + factTable.getTablename() + ")"; |
||||
} |
||||
|
||||
// TODO: sometimes we need DESC
|
||||
templateSql += " ORDER BY 3, 2 ASC;"; |
||||
|
||||
String query = String.format( |
||||
templateSql, |
||||
attr.getColumnname(), |
||||
sortOrderColumnName, |
||||
DimensionAttribute.specialValueListForSql(), |
||||
attr.getColumnname(), |
||||
attr.getTablename(), |
||||
attr.getColumnname() |
||||
); |
||||
|
||||
List<String> values = jt.query(query, (rs, rowNum) -> rs.getString("value")) |
||||
.stream().distinct().collect(Collectors.toList()); |
||||
|
||||
return values; |
||||
} |
||||
|
||||
public List<List<Object>> getDimensionAttributeValuesHierarchy(String columname, String tablename) { |
||||
String query = "select distinct id, parent_id, "+columname+" from presentation." + tablename; |
||||
List<List<Object>> values = jt.query(query, |
||||
new Object[0], |
||||
new RowMapper<List<Object>>() { |
||||
@Override |
||||
public List<Object> mapRow(ResultSet rs, int rowNum) throws SQLException { |
||||
List<Object> result = new ArrayList<>(); |
||||
result.add(Integer.valueOf(rs.getInt("id"))); |
||||
result.add(Integer.valueOf(rs.getInt("parent_id"))); |
||||
result.add(rs.getString(columname)); |
||||
return result; |
||||
} |
||||
} |
||||
); |
||||
|
||||
return values; |
||||
} |
||||
|
||||
public DimensionAttribute getDimensionAttributeMetadataById(Identifier id) { |
||||
DimensionAttribute attr = getDimensionAttributeById(id); |
||||
Dimension dim = getDimension(attr.getDimensionId()); |
||||
attr.setDimension(dim); |
||||
return attr; |
||||
} |
||||
|
||||
public Sachgebiet getSachgebietById(int sachgebietId) { |
||||
return this.sachgebieterepository.findById(Integer.valueOf(sachgebietId)).get(); |
||||
} |
||||
|
||||
public int saveReportDefinition(StoredReport report) { |
||||
StoredReport savedStoredReport = this.storedReportRepository.save(report); |
||||
return savedStoredReport.id; |
||||
} |
||||
|
||||
public int getBridgeMaxLevel(DimensionAttribute bridgeAttr, ReportMetadata metadata) { |
||||
JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); |
||||
String sql = buildMaxHierarchyLvlSQL(bridgeAttr, metadata.factTable.getTablename(), metadata.getFilterNoHierarchy(), bridgeAttr.getTablename()); |
||||
int value = jdbcTemplate.queryForObject(sql, Integer.class).intValue() + 1; |
||||
return value; |
||||
} |
||||
|
||||
public int getBridgeMaxLevel(DimensionAttribute bridgeAttr, ReportMetadata metadata, String factTableName) { |
||||
JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); |
||||
String sql = buildMaxHierarchyLvlSQL(bridgeAttr, factTableName, metadata.getFilterNoHierarchy(), bridgeAttr.getTablename()); |
||||
int value = jdbcTemplate.queryForObject(sql, Integer.class).intValue() + 1; |
||||
return value; |
||||
} |
||||
|
||||
public int getBridgeMinLevel(List<Filter> filters, int maxLvl, String dimTable) { |
||||
if(filters == null || filters.isEmpty() ) { |
||||
return 0; |
||||
} |
||||
JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); |
||||
List<String> filterValues = new ArrayList<>(); |
||||
|
||||
for (Filter filter : filters) { |
||||
filterValues.add("ancestor_" + filter.columnname + "[%s] IN ( " + filter.getValues() + " )"); |
||||
} |
||||
|
||||
for (int i = 0; i < maxLvl; i++) { |
||||
String sql = "select count(*) from presentation." + dimTable + "_hierarchy" |
||||
+ " where "; |
||||
StringJoiner filterJoiner = new StringJoiner(" OR "); |
||||
|
||||
for (String string : filterValues) { |
||||
filterJoiner.add(String.format(string, Integer.valueOf(i+1))); |
||||
} |
||||
|
||||
sql += filterJoiner.toString(); |
||||
int value = jdbcTemplate.queryForObject(sql, Integer.class).intValue(); |
||||
if(value > 0) { |
||||
return i; |
||||
} |
||||
} |
||||
return -1; |
||||
} |
||||
|
||||
public boolean isAttributeHierarchyBridge(Identifier leftDimensionAttributeId) { |
||||
Optional<AttributeDto> dimAttrOpt = this.dimensionAttrRepo.findById(leftDimensionAttributeId); |
||||
if(dimAttrOpt.isEmpty()) { |
||||
return false; |
||||
} |
||||
Optional<DimensionDto> dimOpt = this.dimensionRepository.findById(dimAttrOpt.get().dimensionId); |
||||
if(dimOpt.isEmpty()) { |
||||
return false; |
||||
} |
||||
|
||||
if(dimOpt.get().isHierarchy == null) return false; |
||||
return dimOpt.get().isHierarchy.booleanValue(); |
||||
} |
||||
|
||||
public int getColNumbers(List<DimensionAttribute> topDimensionAttributes, List<Filter> filters) { |
||||
JdbcTemplate jdbcTemplate = new JdbcTemplate(dataSource); |
||||
int num = 1; |
||||
for (DimensionAttribute dimensionAttribute : topDimensionAttributes) { |
||||
if(filters != null && filters.size() > 0) { |
||||
Filter filter = Filter.findFilterById(filters, dimensionAttribute.getId()); |
||||
if(filter != null && filter.filterValues.size() > 0) { |
||||
num *= filter.filterValues.size(); |
||||
continue; |
||||
} |
||||
} |
||||
String sql = String.format("SELECT count(DISTINCT %s) FROM presentation.%s WHERE %s IS NOT NULL", dimensionAttribute.getColumnname(), dimensionAttribute.getTablename(), dimensionAttribute.getColumnname()); |
||||
int value = jdbcTemplate.queryForObject(sql, new Object[] {}, Integer.class).intValue(); |
||||
num *= value; |
||||
} |
||||
return num; |
||||
} |
||||
|
||||
/** |
||||
* Given a conformed attribute and a facttable this function determines the following: |
||||
* |
||||
* Does this conformed attribute belong to a role playing dimension with no attributes ? |
||||
* |
||||
* If it is the case: return the id of the rp dimension |
||||
*/ |
||||
public Identifier getRolePlayingDimensionWithNoAttributes(String attrId, String factId) { |
||||
Identifier confDimId = dimensionRepository.findDimensionIdForAttribute(attrId); |
||||
System.out.println(attrId); |
||||
List<Identifier> preids = dimensionRepository.getRolePlayingIds(confDimId.composedId, factId); |
||||
if(preids.size() == 1) { |
||||
return preids.get(0); |
||||
} else if (preids.size() > 1) { |
||||
throw new RuntimeException("Not yet implemented: Can't use conformed dimension " |
||||
+ "more than once for the same facttable."); |
||||
} |
||||
return null; |
||||
} |
||||
|
||||
public Identifier checkIfFactTableHasDimensionAttribute(Identifier conformedAttrId, Identifier fact) { |
||||
Identifier rpId = getRolePlayingDimensionWithNoAttributes(conformedAttrId.composedId, fact.composedId); |
||||
|
||||
if(rpId != null) { |
||||
return conformedAttrId; |
||||
} |
||||
List<Identifier> ids = dimensionAttrRepo |
||||
.findAttributesByConformedAttributeAndFactTable(conformedAttrId.composedId, fact.composedId); |
||||
if (ids.size() == 1) { |
||||
return ids.get(0); |
||||
} else if (ids.size() > 1) { |
||||
throw new FaultyMetadataException("Für die Faktentabelle mit der ID '" + fact.composedId |
||||
+ "' existieren zwei Role-Playing Dimensions " + "zugehörig zu der Conformed Dimension mit der ID '" |
||||
+ conformedAttrId.composedId + "'. " + "Dieser Fall ist zurzeit noch nicht umgesetzt."); |
||||
} else { |
||||
Identifier id = dimensionAttrRepo.findAttributesByIdAndFactTable(conformedAttrId.composedId, fact.composedId); |
||||
return id; |
||||
} |
||||
} |
||||
|
||||
public boolean checkIfFactTableHasMeasure(Identifier measure, Identifier fact) { |
||||
return factRepository.hasFactTableMeasure(fact.composedId, measure.composedId); |
||||
} |
||||
|
||||
public String getLastUpdate(int tid) { |
||||
Optional<Systeminfo> systeminfoOpt = systeminfoRepository.findById(Integer.valueOf(tid)); |
||||
if(systeminfoOpt.isPresent()) { |
||||
Date lastUpdate = systeminfoOpt.get().datum; |
||||
return new SimpleDateFormat("dd.MM.yyyy hh:mm").format(lastUpdate); |
||||
} |
||||
return "Unknown"; |
||||
} |
||||
|
||||
public String getFactTableNameMaxBridgeLvl(Identifier fact, Identifier attr) { |
||||
return factRepository.getFactTableNameForAttribute(fact.composedId, attr.composedId); |
||||
} |
||||
|
||||
public DataSource getDataSource() { |
||||
return this.dataSource; |
||||
} |
||||
|
||||
public int getSachgebietByFactTableId(String factTableId) { |
||||
Optional<FactDto> fact = this.factRepository.findById(new Identifier(factTableId)); |
||||
if(fact.isPresent()) { |
||||
return fact.get().sachgebiettid.intValue(); |
||||
} |
||||
return -1; |
||||
} |
||||
|
||||
public List<Integer> getSachgebieteForReport(ReportDefinition reportDefinition) { |
||||
List<Integer> result = new ArrayList<>(); |
||||
for (Identifier factTableId : reportDefinition.factTableIds) { |
||||
int sachgebiet = getSachgebietByFactTableId(factTableId.composedId); |
||||
result.add(Integer.valueOf(sachgebiet)); |
||||
} |
||||
return result; |
||||
} |
||||
|
||||
public List<StoredReport> findAllStoredReports(){ |
||||
List<StoredReport> reports = new ArrayList<>(); |
||||
for (StoredReport report : this.storedReportRepository.findAll()) { |
||||
StoredReport.setReportDefinitionFromJson(report); |
||||
reports.add(report); |
||||
} |
||||
return reports; |
||||
} |
||||
|
||||
public Optional<StoredReport> findById(int id) { |
||||
Optional<StoredReport> report = storedReportRepository.findById(id); |
||||
if(report.isPresent()) { |
||||
StoredReport.setReportDefinitionFromJson(report.get()); |
||||
} |
||||
return report; |
||||
} |
||||
|
||||
public void deleteById(int id) { |
||||
storedReportRepository.deleteById(id); |
||||
} |
||||
|
||||
private Sachgebiet getSachgebietForFacttable(int sachgebiettid) { |
||||
Optional<Sachgebiet> sachgebiet = sachgebieterepository |
||||
.findById(Integer.valueOf(sachgebiettid)); |
||||
if(sachgebiet.isPresent()) { |
||||
return sachgebiet.get(); |
||||
} |
||||
return new Sachgebiet(Integer.valueOf(-1), "Unknown", null); |
||||
} |
||||
|
||||
public List<String> getSachgebieteForFactTables(List<String> rightParamValues) { |
||||
List<String> result = new ArrayList<>(); |
||||
for (String id : rightParamValues) { |
||||
int sachgebiet = getSachgebietByFactTableId(id); |
||||
if(sachgebiet != -1) { |
||||
result.add(String.valueOf(sachgebiet)); |
||||
} |
||||
} |
||||
return result; |
||||
} |
||||
|
||||
public List<DimensionAttribute> getAttributesOfDimension(Identifier dimId){ |
||||
List<AttributeDto> attr = dimensionAttrRepo.findByDimensionId(dimId); |
||||
List<DimensionAttribute> attributes = attr.stream() |
||||
.map(a -> new DimensionAttribute(a)) |
||||
.filter(a -> !a.isHidden()) |
||||
.collect(Collectors.toList()); |
||||
return attributes; |
||||
} |
||||
|
||||
public Dimension getDimension(Identifier id) { |
||||
Optional<DimensionDto> dimOpt = dimensionRepository.findById(id); |
||||
if(dimOpt.isEmpty()) { |
||||
throw new FaultyMetadataException(id, "Dimension"); |
||||
} |
||||
Dimension dimension = new Dimension(dimOpt.get()); |
||||
return dimension; |
||||
} |
||||
|
||||
public Measure getMeasure(Identifier id) { |
||||
Optional<MeasureDto> measureOpt = measureRepository.findById(id); |
||||
if(measureOpt.isEmpty()) { |
||||
throw new FaultyMetadataException(id, "Measure"); |
||||
} |
||||
Measure measure = new Measure(measureOpt.get()); |
||||
return measure; |
||||
} |
||||
|
||||
public DimensionAttribute getDimensionAttributeById(Identifier dimAttrId) { |
||||
Optional<AttributeDto> optAttr = this.dimensionAttrRepo.findById(dimAttrId); |
||||
if(optAttr.isEmpty()) { |
||||
throw new FaultyMetadataException(dimAttrId, "Attribute"); |
||||
} |
||||
return new DimensionAttribute(optAttr.get()); |
||||
} |
||||
|
||||
public List<Measure> getMeasures(Identifier factTableId) { |
||||
List<Measure> measures = new ArrayList<>(); |
||||
for (MeasureDto measureDto : measureRepository.findByFactTableId(factTableId)) { |
||||
measures.add(new Measure(measureDto)); |
||||
} |
||||
return measures; |
||||
} |
||||
|
||||
public FactTable getFactTable(Identifier factTableId) { |
||||
Optional<FactDto> optFact = factRepository.findById(factTableId); |
||||
if(optFact.isEmpty()) { |
||||
throw new FaultyMetadataException(factTableId, "Fact"); |
||||
} |
||||
return new FactTable(optFact.get()); |
||||
} |
||||
|
||||
/** |
||||
* Generates the SQL which returns the max depth for a hierarchy. |
||||
* (Unfortunately i can't think of a better way to achieve this without relying on additional SQL) |
||||
*/ |
||||
private static String buildMaxHierarchyLvlSQL(DimensionAttribute bridgeAttr, String factTable, List<Filter> filters, String dimTab) { |
||||
// TODO: filters
|
||||
String sql = "SELECT MAX(lvl) " |
||||
+ "FROM presentation."+ factTable + " fw " |
||||
+ "LEFT JOIN presentation."+dimTab+"_hierarchy h " |
||||
+ "ON h." + bridgeAttr.getDimIdJoinColumn() +" = fw."+bridgeAttr.getJoincolumn(); |
||||
return sql; |
||||
} |
||||
|
||||
} |
||||
@ -0,0 +1,378 @@
@@ -0,0 +1,378 @@
|
||||
package de.superx.bianalysis.sqlgeneration; |
||||
|
||||
import java.util.List; |
||||
import java.util.StringJoiner; |
||||
|
||||
import de.superx.bianalysis.ColumnElement; |
||||
import de.superx.bianalysis.ReportMetadata; |
||||
import de.superx.bianalysis.models.DimensionAttribute; |
||||
import de.superx.bianalysis.models.Filter; |
||||
import de.superx.bianalysis.models.Measure; |
||||
|
||||
/** |
||||
* Lets consider the following example for the SQL Generation: |
||||
* |
||||
* Dimensions: X, Y, Z with one attribute each |
||||
* Attributes |
||||
* - X: DA |
||||
* - values: DA1, DA1 |
||||
* - Y: DB |
||||
* - values: DB1, DB1 |
||||
* - Z: DC |
||||
* - values: DC1, DC1 |
||||
* Measures: |
||||
* - M1: count on col_a |
||||
* - M2: sum on col_b |
||||
* |
||||
* For the simplest use case (all attributes and measures selected without any |
||||
* filters or bridge tables) the generated table would look like this: |
||||
* |
||||
* +---------+-----------------------+----------------------+ |
||||
* | | DA1 | DA2 | |
||||
* | |-----------+-----------+-----------+----------+ |
||||
* | DC | DB1 | DB2 | DB1 | DB2 | |
||||
* | |-----+-----+-----+-----+-----+-----+-----+----+ |
||||
* | | M1 | M2 | M1 | M2 | M1 | M2 | M1 | M2 | |
||||
* +=========+=====+=====+=====+=====+=====+=====+=====+====+ |
||||
* | DC1 | | | | | | | | | |
||||
* +---------+-----+-----+-----+-----+-----+-----+-----+----+ |
||||
* | DC2 | | | | | | | | | |
||||
* +---------+-----+-----+-----+-----+-----+-----+-----+----+ |
||||
* |
||||
* and the generated SQL would look like this: |
||||
* |
||||
* SELECT |
||||
* DC, |
||||
* COUNT(col_a) FILTER (WHERE DA = 'DA1' AND DB = 'DB1') as "col0", |
||||
* SUM(col_b) FILTER (WHERE DA = 'DA1' AND DB = 'DB1') as "col1", |
||||
* COUNT(col_a) FILTER (WHERE DA = 'DA1' AND DB = 'DB2') as "col2", |
||||
* SUM(col_b) FILTER (WHERE DA = 'DA1' AND DB = 'DB2') as "col3", |
||||
* COUNT(col_a) FILTER (WHERE DA = 'DA2' AND DB = 'DB1') as "col4", |
||||
* SUM(col_b) FILTER (WHERE DA = 'DA2' AND DB = 'DB1') as "col5", |
||||
* COUNT(col_a) FILTER (WHERE DA = 'DA2' AND DB = 'DB2') as "col6", |
||||
* SUM(col_b) FILTER (WHERE DA = 'DA2' AND DB = 'DB2') as "col7" |
||||
* FROM |
||||
* presentation.fact_table |
||||
* JOIN presentation.dim_a |
||||
* ON fact_table.dim_a = dim_a.id |
||||
* JOIN presentation.dim_b |
||||
* ON fact_table.dim_b = dim_b.id |
||||
* JOIN presentation.dim_a |
||||
* ON fact_table.dim_c = dim_c.id |
||||
* GROUP BY dim_c.DC |
||||
* |
||||
* |
||||
* !! Special Cases: |
||||
* |
||||
* 1. Filtering Attributes |
||||
* User Filtered to see only DA with values DA1. |
||||
* In this case the select section would shrink to the following four columns: |
||||
* |
||||
* COUNT(col_a) FILTER (WHERE DA = 'DA1' AND DB = 'DB1') as "col0", |
||||
* SUM(col_b) FILTER (WHERE DA = 'DA1' AND DB = 'DB1') as "col1", |
||||
* COUNT(col_a) FILTER (WHERE DA = 'DA1' AND DB = 'DB2') as "col2", |
||||
* SUM(col_b) FILTER (WHERE DA = 'DA1' AND DB = 'DB2') as "col3", |
||||
* COUNT(col_a) FILTER (WHERE DA = 'DA2' AND DB = 'DB1') as "col4" |
||||
* |
||||
* and the following where clause would be appended: |
||||
* |
||||
* WHERE dim_a.DA IN ('DA1') |
||||
* |
||||
* 2. Measures with Build-In-Filter |
||||
* Consider the measure M1 should only count the values DA1 of attribute DA. |
||||
* In this case the filter condition is prepended to the selection section |
||||
* of the specific columns for this measure: |
||||
* |
||||
* COUNT(col_a) FILTER (WHERE col_a IN ('DA1') AND DA = 'DA1' AND DB = 'DB1') as "col0", |
||||
* ... |
||||
* COUNT(col_a) FILTER (WHERE col_a IN ('DA1') AND DA = 'DA1' AND DB = 'DB2') as "col2", |
||||
* ... |
||||
* COUNT(col_a) FILTER (WHERE col_a IN ('DA1') AND DA = 'DA2' AND DB = 'DB1') as "col4", |
||||
* ... |
||||
* COUNT(col_a) FILTER (WHERE col_a IN ('DA1') AND DA = 'DA2' AND DB = 'DB2') as "col6", |
||||
* ... |
||||
* |
||||
* The filter condition for a measure can be either an IN or NOT IN condition. |
||||
* |
||||
*/ |
||||
public class SQLGenerator { |
||||
|
||||
public ReportMetadata reportMetadata; |
||||
public List<ColumnElement> columnElements; |
||||
public char formatSql = ' '; |
||||
private final static String HIERARCHY_MODEL_SUFFIX = "_hierarchy"; |
||||
|
||||
public SQLGenerator(ReportMetadata reportMetadata, List<ColumnElement> columnElements) { |
||||
this.reportMetadata = reportMetadata; |
||||
this.columnElements = columnElements; |
||||
} |
||||
|
||||
public SQLGenerator(ReportMetadata reportMetadata) { |
||||
this.reportMetadata = reportMetadata; |
||||
} |
||||
|
||||
public String buildFormattedSqlStatement() { |
||||
formatSql = '\n'; |
||||
return buildSqlStatement(); |
||||
} |
||||
|
||||
public String buildSqlStatement() { |
||||
StringBuilder statement = new StringBuilder(); |
||||
statement.append("SELECT "); |
||||
statement.append(buildSelectSection()); |
||||
statement.append(formatSql + "FROM presentation." + reportMetadata.factTable.getTablename() ); |
||||
statement.append(buildJoinSection()); |
||||
statement.append(buildFilterSection()); |
||||
statement.append(buildGroupBySection()); |
||||
statement.append(buildOrderBySection()); |
||||
return statement.toString(); |
||||
} |
||||
|
||||
public String buildSelectSection() { |
||||
StringJoiner columns = new StringJoiner(", "); |
||||
String dimensionAttributesStatement = selectDimensionAttributes(); |
||||
if (dimensionAttributesStatement != null && !dimensionAttributesStatement.isBlank() ) { |
||||
columns.add(dimensionAttributesStatement); |
||||
} |
||||
StringJoiner measuresStatementJoiner = new StringJoiner(", "); |
||||
columnElements.forEach((columnElement) -> { |
||||
measuresStatementJoiner.add(selectMeasure(columnElement)); |
||||
}); |
||||
String measuresStatement = measuresStatementJoiner.toString(); |
||||
if (measuresStatement != null && !measuresStatement.isBlank()) { |
||||
columns.add(measuresStatement); |
||||
} |
||||
return columns.toString(); |
||||
} |
||||
|
||||
|
||||
public String selectDimensionAttributes() { |
||||
if (reportMetadata.leftDimensionAttributes == null) { |
||||
return null; |
||||
} |
||||
StringJoiner columns = new StringJoiner(", "); |
||||
for (DimensionAttribute attribute : reportMetadata.leftDimensionAttributes) { |
||||
|
||||
String columnName = attribute.getColumnname(); |
||||
String tableAlias = attribute.getDimensionTableAlias(); |
||||
String columnAlias = attribute.getDimensionColumnAlias(); |
||||
|
||||
if(attribute.isHierarchy()) { |
||||
|
||||
// Build select expressions for each hierarchy level (ancestor node),
|
||||
// assigning aliases col0, col1, etc.
|
||||
StringBuilder resultBuilder = new StringBuilder(); |
||||
for (int i = reportMetadata.minBridgeLvl; i < reportMetadata.maxBridgeLvl; i++) { |
||||
resultBuilder |
||||
.append(attribute.getDimensionTableAlias()) |
||||
.append(".ancestor_") |
||||
.append(columnName) |
||||
.append('[').append(i + 1).append(']') |
||||
.append(" AS \"col").append(i).append("\""); |
||||
|
||||
if (i < reportMetadata.maxBridgeLvl - 1) { |
||||
resultBuilder.append(", "); |
||||
} |
||||
} |
||||
columns.add(resultBuilder.toString()); |
||||
|
||||
} else { |
||||
columns.add(String.format("%s.%s AS %s", tableAlias, columnName, columnAlias)); |
||||
|
||||
String sortOrderColumn = attribute.getSortOrderColumn(); |
||||
if (sortOrderColumn != null) { |
||||
columns.add(String.format("%s.%s AS %s_%s", |
||||
tableAlias, sortOrderColumn, columnAlias, sortOrderColumn)); |
||||
} |
||||
} |
||||
} |
||||
return columns.toString(); |
||||
} |
||||
|
||||
public String getMeasureTablePart(String factTableTablename, Measure measure, List<DimensionAttribute> dimensionAttributes) { |
||||
String result = ""; |
||||
String tableCol = factTableTablename + "." + measure.getColumnname(); |
||||
if(measure.getAggregationType().equals("sum")) { |
||||
result = "SUM(" + tableCol + ")"; |
||||
} else if (measure.getAggregationType().equals("count")) { |
||||
result = "COUNT(" + tableCol + ")"; |
||||
} else if (measure.getAggregationType().equals("distinct-count")) { |
||||
result = "COUNT(distinct(" + tableCol + "))"; |
||||
} else if (measure.getAggregationType().equals("avg")) { |
||||
result = "AVG(" + tableCol + ")"; |
||||
} else if (measure.getAggregationType().equals("min")) { |
||||
result = "MIN(" + tableCol + ")"; |
||||
} else if (measure.getAggregationType().equals("max")) { |
||||
result = "MAX(" + tableCol + ")"; |
||||
} else if (measure.getAggregationType().equals("std")) { |
||||
result = "STDDEV_SAMP(" + tableCol + ")"; |
||||
} else if (measure.getAggregationType().equals("var")) { |
||||
result = "VAR_SAMP(" + tableCol + ")"; |
||||
} |
||||
return result; |
||||
} |
||||
|
||||
public String selectMeasure(ColumnElement columnElement) { |
||||
String factTableTablename = reportMetadata.factTable.getTablename(); |
||||
StringBuilder measureSelect = new StringBuilder(); |
||||
Measure measure = columnElement.measure; |
||||
measureSelect.append(getMeasureTablePart(factTableTablename, measure, reportMetadata.leftDimensionAttributes));//todo topdimen hinzufügen
|
||||
if ( measure.filterCondition != null ) { |
||||
// if there exists a filter condition for a specific measure, prepend it to the column filter condition
|
||||
measureSelect.append(formatSql+ "FILTER (WHERE " + measure.filterCondition); |
||||
if (columnElement.dimensionAttributeFilter != null) { |
||||
measureSelect.append(" AND " + columnElement.dimensionAttributeFilter); |
||||
} |
||||
measureSelect.append(")"); |
||||
} else if (columnElement.dimensionAttributeFilter != null) { |
||||
measureSelect.append(formatSql + "FILTER (WHERE " + columnElement.dimensionAttributeFilter + ")"); |
||||
} |
||||
if (measureSelect.length() != 0) { |
||||
measureSelect.append(" AS \"col" + columnElement.columnNumber + "\""); |
||||
} |
||||
return measureSelect.toString(); |
||||
} |
||||
|
||||
public String buildJoinSection() { |
||||
StringBuilder statement = new StringBuilder(); |
||||
for (DimensionAttribute attr : reportMetadata.getUniqueDimensionAttributes()) { |
||||
|
||||
String joinColumn = "id"; |
||||
if( attr.getDimIdJoinColumn() != null |
||||
&& !attr.getDimIdJoinColumn().isBlank()) { |
||||
// Hierarchy dimension models must always be joined on an id column.
|
||||
// See the "hierarchy_dim.sql" dbt macro for implementation details.
|
||||
// For other models, the default join column can be customized in the metadata JSON files
|
||||
// using the "id_column" attribute.
|
||||
joinColumn = attr.getDimIdJoinColumn(); |
||||
} |
||||
|
||||
String dimensionTable = attr.getTablename(); |
||||
boolean isTopAttribute = reportMetadata.topDimensionAttributes.contains(attr); |
||||
if(attr.isHierarchy() && !isTopAttribute) { |
||||
// Hierarchy dimension tables use a dedicated join suffix.
|
||||
// For example, dim_orgunit is joined as dim_orgunit_hierarchy.
|
||||
// This hierarchy table contains all node paths in the hierarchy tree.
|
||||
// For additional details, see the "hierarchy_dim.sql" macro.
|
||||
dimensionTable += HIERARCHY_MODEL_SUFFIX; |
||||
} |
||||
|
||||
String join = String.format( |
||||
" JOIN presentation.%s AS %s ON %s.%s = %s.%s", |
||||
dimensionTable, |
||||
attr.getDimensionTableAlias(), |
||||
reportMetadata.factTable.getTablename(), |
||||
attr.getJoincolumn(), |
||||
attr.getDimensionTableAlias(), |
||||
joinColumn |
||||
); |
||||
statement.append(join); |
||||
/* TODO userinput for histroical keys: |
||||
1. is_current |
||||
2. last_known |
||||
3. specific date: (ANY_DATE BETWEEN %s.valid_from AND %s.valid_to) |
||||
*/ |
||||
if(attr.isHistorical()) { |
||||
String currentFilter = String.format( |
||||
" AND %s.is_current = true ", |
||||
attr.getDimensionTableAlias() |
||||
); |
||||
statement.append(currentFilter); |
||||
} |
||||
} |
||||
return statement.toString(); |
||||
} |
||||
|
||||
public String buildFilterSection() { |
||||
if (reportMetadata.filters == null || reportMetadata.filters.size() <= 0) { |
||||
return ""; |
||||
} |
||||
StringBuilder statement = new StringBuilder(" WHERE "); |
||||
StringJoiner groups = new StringJoiner(" AND "); |
||||
|
||||
for (Filter filter : reportMetadata.filters) { |
||||
|
||||
if(reportMetadata.isHierarchyFilter(filter)) { |
||||
StringBuilder resultBuilder = new StringBuilder(); |
||||
|
||||
for (int i = reportMetadata.minBridgeLvl; i < reportMetadata.maxBridgeLvl; i++) { |
||||
resultBuilder |
||||
.append(filter.joincolumn) |
||||
.append(".ancestor_") |
||||
.append(filter.columnname) |
||||
.append('[').append(i).append("] IN (") |
||||
.append(filter.getValues()) |
||||
.append(')'); |
||||
|
||||
if (i < reportMetadata.maxBridgeLvl - 1) { |
||||
resultBuilder.append(" OR "); |
||||
} |
||||
} |
||||
|
||||
groups.add(resultBuilder.toString()); |
||||
|
||||
} else { |
||||
groups.add(filter.dimensionTableAlias + "." + filter.columnname + " IN (" + filter.getValues() + ")"); |
||||
} |
||||
} |
||||
|
||||
statement.append(groups.toString()); |
||||
if(groups.length() == 0) { |
||||
return ""; |
||||
} |
||||
return statement.toString(); |
||||
} |
||||
|
||||
public String buildGroupBySection() { |
||||
if(reportMetadata.leftDimensionAttributes == null || reportMetadata.leftDimensionAttributes.size() <= 0) { |
||||
return ""; |
||||
} |
||||
StringBuilder statement = new StringBuilder("GROUP BY ROLLUP ("); |
||||
StringJoiner groups = new StringJoiner(", "); |
||||
for (DimensionAttribute attr : reportMetadata.leftDimensionAttributes) { |
||||
if(attr.isHierarchy()) { |
||||
// TODO: what is happening here?
|
||||
int numOfHierarchyAttributes = reportMetadata.getHierarchyAttributes().size(); |
||||
for (int i = 0; i < numOfHierarchyAttributes; i++) { |
||||
for (int j = 0; j < reportMetadata.maxBridgeLvl; j++) { |
||||
if(j < reportMetadata.minBridgeLvl) { |
||||
continue; |
||||
} |
||||
groups.add("col"+(j + (i * reportMetadata.maxBridgeLvl))); |
||||
} |
||||
} |
||||
} else { |
||||
groups.add(attr.getDimensionTableAlias() + "." + attr.getColumnname()); |
||||
if(attr.getSortOrderColumn() != null) { |
||||
groups.add(attr.getDimensionTableAlias() + "." + attr.getSortOrderColumn()); |
||||
} |
||||
} |
||||
} |
||||
statement.append(groups.toString()); |
||||
if(groups.length() == 0) { |
||||
return ""; |
||||
} |
||||
statement.append(")"); |
||||
return formatSql + statement.toString(); |
||||
} |
||||
|
||||
public StringJoiner buildOrderBySection() { |
||||
StringJoiner orderCols = new StringJoiner(", ", " ORDER BY ", ""); |
||||
orderCols.setEmptyValue(""); |
||||
for (DimensionAttribute attr : reportMetadata.leftDimensionAttributes) { |
||||
if(attr.isHierarchy()) { |
||||
for (int i = reportMetadata.minBridgeLvl; i < reportMetadata.maxBridgeLvl; i++) { |
||||
orderCols.add("col" + i); |
||||
} |
||||
continue; |
||||
} |
||||
if(attr.getSortOrderColumn() != null) { |
||||
orderCols.add(attr.getDimensionTableAlias() + "." + attr.getSortOrderColumn()); |
||||
} else { |
||||
orderCols.add(attr.getDimensionColumnAlias()); |
||||
} |
||||
} |
||||
return orderCols; |
||||
} |
||||
|
||||
} |
||||
@ -0,0 +1,82 @@
@@ -0,0 +1,82 @@
|
||||
package de.superx.bianalysis.sqlgeneration; |
||||
|
||||
import java.util.List; |
||||
import java.util.StringJoiner; |
||||
|
||||
import de.superx.bianalysis.ReportMetadata; |
||||
import de.superx.bianalysis.models.DimensionAttribute; |
||||
import de.superx.bianalysis.models.Filter; |
||||
import de.superx.bianalysis.models.Measure; |
||||
|
||||
public class SQLGeneratorTotals { |
||||
|
||||
public SQLGeneratorTotals() {} |
||||
|
||||
public static String buildTotalsColumnSQL(SQLGenerator generator, int maxBridgeLvlOfReport) { |
||||
ReportMetadata metadata = generator.reportMetadata; |
||||
StringBuilder statement = new StringBuilder(); |
||||
statement.append("SELECT "); |
||||
statement.append(buildSelectSectionForTotalsCol(generator, metadata, maxBridgeLvlOfReport)); |
||||
statement.append(generator.formatSql + "FROM presentation." + metadata.factTable.getTablename() ); |
||||
statement.append(generator.buildJoinSection()); |
||||
statement.append(buildFilterSectionForTotalsCol(generator, metadata)); |
||||
statement.append(generator.buildGroupBySection()); |
||||
return statement.toString(); |
||||
} |
||||
|
||||
private static String buildFilterSectionForTotalsCol(SQLGenerator generator, ReportMetadata metadata) { |
||||
if(metadata.topDimensionAttributes.size() == 0 && metadata.filters.size() == 0) { |
||||
return ""; |
||||
} |
||||
StringBuilder where = new StringBuilder(" WHERE "); |
||||
StringJoiner groups = new StringJoiner(" AND "); |
||||
for (DimensionAttribute attribute : metadata.topDimensionAttributes) { |
||||
groups.add(attribute.getDimensionTableAlias() + "." + attribute.getColumnname() + " IN (" + getValues(attribute.getDimensionAttributeValues()) + ")"); |
||||
} |
||||
for (Filter filter : generator.reportMetadata.filters) { |
||||
groups.add(filter.dimensionTableAlias + "." + filter.columnname + " IN (" + filter.getValues() + ")"); |
||||
} |
||||
where.append(groups); |
||||
return where.toString(); |
||||
} |
||||
|
||||
private static String buildSelectSectionForTotalsCol(SQLGenerator generator, ReportMetadata metadata, int maxBridgeLvlOfReport) { |
||||
StringJoiner columns = new StringJoiner(", "); |
||||
columns.add(generator.selectDimensionAttributes()); |
||||
//columns.add(metadata.aggregationLvl + " AS aggregationLvl");
|
||||
int numCols = generator.reportMetadata.maxBridgeLvl; |
||||
for (Measure measure : metadata.measures) { |
||||
String value = generator.getMeasureTablePart(metadata.factTable.getTablename(), measure, metadata.leftDimensionAttributes); |
||||
if ( measure.filterCondition != null ) { |
||||
value += " FILTER (WHERE " + measure.filterCondition +")"; |
||||
} |
||||
value += " AS \"col" + (numCols++) + "\""; |
||||
columns.add(value); |
||||
} |
||||
String selectSection = columns.toString(); |
||||
return selectSection; |
||||
} |
||||
|
||||
private static String getValues(List<String> values) { |
||||
if(values == null || values.isEmpty()) { |
||||
return null; |
||||
} |
||||
StringJoiner joiner = new StringJoiner(", "); |
||||
for (String value : values) { |
||||
joiner.add("'"+value+"'"); |
||||
} |
||||
return joiner.toString(); |
||||
} |
||||
|
||||
public static String generateTotalsColumnSQL(ReportMetadata metadata) { |
||||
if(metadata.topDimensionAttributes.isEmpty()) { |
||||
return ""; |
||||
} |
||||
|
||||
SQLGenerator generator = new SQLGenerator(metadata); |
||||
String finalSQL = buildTotalsColumnSQL(generator, metadata.maxBridgeLvl) |
||||
+ generator.buildOrderBySection(); |
||||
return finalSQL; |
||||
} |
||||
|
||||
} |
||||
@ -0,0 +1,510 @@
@@ -0,0 +1,510 @@
|
||||
package de.superx.bin; |
||||
|
||||
import static de.superx.servlet.SxSQL_Server.DEFAULT_MANDANTEN_ID; |
||||
|
||||
import java.io.File; |
||||
import java.util.Arrays; |
||||
import java.util.LinkedList; |
||||
import java.util.List; |
||||
import java.util.stream.Collectors; |
||||
|
||||
import javax.sql.DataSource; |
||||
|
||||
import org.apache.commons.cli.CommandLine; |
||||
import org.apache.commons.cli.CommandLineParser; |
||||
import org.apache.commons.cli.GnuParser; |
||||
import org.apache.commons.cli.HelpFormatter; |
||||
import org.apache.commons.cli.Option; |
||||
import org.apache.commons.cli.Options; |
||||
import org.apache.commons.cli.ParseException; |
||||
import org.apache.log4j.BasicConfigurator; |
||||
import org.apache.log4j.Level; |
||||
import org.apache.log4j.Logger; |
||||
import org.springframework.batch.core.ExitStatus; |
||||
import org.springframework.beans.BeansException; |
||||
import org.springframework.context.annotation.AnnotationConfigApplicationContext; |
||||
import org.springframework.context.support.GenericApplicationContext; |
||||
import de.superx.job.ContainerNode; |
||||
import de.superx.rest.EtlJobApi; |
||||
import de.superx.rest.model.job.Component; |
||||
import de.superx.rest.model.job.JobExecutionStatus; |
||||
import de.superx.rest.model.job.StepExecutionStatus; |
||||
import de.superx.servlet.SuperXManager; |
||||
import de.superx.servlet.SxPools; |
||||
import de.superx.spring.HisInOneConfiguration; |
||||
import de.superx.spring.batch.His1DataSources; |
||||
import de.superx.spring.cli.config.CLIConfig; |
||||
import de.superx.spring.config.BatchConfig; |
||||
import de.superx.spring.config.DataJdbcConfiguration; |
||||
import de.superx.spring.config.ServiceConfig; |
||||
import de.superx.spring.service.BatchJobDescriptionAdapter; |
||||
import de.superx.spring.service.EntityJobDescriptionSource; |
||||
import de.superx.util.PathAndFileUtils; |
||||
|
||||
/*** |
||||
* This class provides functionality to run the component actions of the |
||||
* 'modernized component administration' |
||||
* of the HISinOne-BI via command line. That includes |
||||
* - listing ETL jobs |
||||
* - installing components |
||||
* - updating components |
||||
* - deinstalling components |
||||
* - running "Hauptladeroutine" ETL jobs |
||||
* - running "Unterladeroutine" ETL jobs |
||||
* The class is just meant to be a frontend, so it uses the same implementation as the web application |
||||
* (which happens to be the "EtlJobApi" bean) |
||||
* However, there are to be some things to be considered: |
||||
* - The "jobLauncher" bean from the "BatchConfig" class works asynchronously, which is fine for the |
||||
* web application context, but in the CLI context, it is overridden by a synchronous implementation |
||||
* (in fact the tweaks to get the configurations ready for the CLI context, are placed in the |
||||
* "CLIConfig" class). |
||||
* - In the web application context, the HISinOne-BI code in the HISinOne web application writes the |
||||
* database configuration to a file, which is then consumed by the "superx" application (the actual |
||||
* HISinOne-BI application). The command line application needs this file too, so before using it, you |
||||
* should start the web application one time, in order to have the file in place. |
||||
* @author witt |
||||
* |
||||
*/ |
||||
public class ComponentAdminCLI { |
||||
|
||||
private static GenericApplicationContext APPLICATION_CONTEXT = null; |
||||
|
||||
private static String HELP_STRING = "Use this tool to run component actions via command line. " |
||||
+ "It needs the config file 'his1_databases.properties' inside the classpath; " |
||||
+ "this file gets written automatically when starting the web application."; |
||||
|
||||
private static boolean FILESYSTEM = false; |
||||
|
||||
static Logger logger = Logger.getLogger(ComponentAdminCLI.class); |
||||
|
||||
public static void main(String[] args) { |
||||
BasicConfigurator.configure(); // initializes console logging to stdout
|
||||
System.setProperty(SuperXManager.SUPER_X_HISINONE_VERSION, "non-empty-value"); |
||||
Options options = createOptions(); |
||||
CommandLine parsedArgs = parseArgs(args, options); |
||||
logOptions(parsedArgs); |
||||
if (parsedArgs.hasOption("h")) { |
||||
printHelp(options); |
||||
System.exit(0); |
||||
} |
||||
if(parsedArgs.hasOption("f")) { |
||||
FILESYSTEM = true; |
||||
} |
||||
initSuperXManager(); |
||||
if(parsedArgs.hasOption("s")) { |
||||
initTablesForEmptyDB(); |
||||
} |
||||
if(parsedArgs.hasOption("lg")) { |
||||
setBatchLoggerToOneFile(); |
||||
} |
||||
SuperXManager.initKettleEnv(createContext()); |
||||
if (parsedArgs.hasOption("la")) { |
||||
printAllJobs(); |
||||
} else if (parsedArgs.hasOption("li")) { |
||||
printInstallableJobs(); |
||||
} else if (parsedArgs.hasOption("le")) { |
||||
printEtlJobs(); |
||||
} else if (parsedArgs.hasOption("i")) { |
||||
installComponents(parsedArgs); |
||||
} else if (parsedArgs.hasOption("d")) { |
||||
deinstallComponent(parsedArgs); |
||||
} else if (parsedArgs.hasOption("u")) { |
||||
upgradeComponents(parsedArgs); |
||||
} else if (parsedArgs.hasOption("ua")) { |
||||
upgradeAll(); |
||||
} else if (parsedArgs.hasOption("e")) { |
||||
etlJobs(parsedArgs); |
||||
} else if (parsedArgs.hasOption("r")) { |
||||
reloadModule(); |
||||
} else if (parsedArgs.hasOption("if")) { |
||||
installFunctions(parsedArgs); |
||||
} else { |
||||
printHelp(options); |
||||
} |
||||
} |
||||
|
||||
private static void logOptions(CommandLine parsedArgs) { |
||||
logger.info("Starting with the following options:"); |
||||
for (Option opt : parsedArgs.getOptions()) { |
||||
logger.info(opt); |
||||
if(opt.getValues() != null && opt.getValues().length > 0) { |
||||
logger.info("Values for option " + opt.getOpt() + ": " + Arrays.asList(opt.getValues())); |
||||
} |
||||
} |
||||
} |
||||
|
||||
private static void initTablesForEmptyDB() { |
||||
try { |
||||
GenericApplicationContext context = createContext(); |
||||
EtlJobApi componentApi = context.getBean(EtlJobApi.class); |
||||
ContainerNode root = EntityJobDescriptionSource.getPreKernInstallJob(); |
||||
componentApi.executeJob("eduetl", root); |
||||
} catch (Exception e) { |
||||
e.printStackTrace(); |
||||
} |
||||
} |
||||
|
||||
private static void setBatchLoggerToOneFile() { |
||||
try { |
||||
GenericApplicationContext context = createContext(); |
||||
BatchJobDescriptionAdapter bjda = context.getBean(BatchJobDescriptionAdapter.class); |
||||
bjda.setLogJobToFile(false); |
||||
} catch (Exception e) { |
||||
e.printStackTrace(); |
||||
} |
||||
} |
||||
|
||||
private static void installComponents(CommandLine parsedArgs) { |
||||
String[] components = parsedArgs.getOptionValues("i"); |
||||
String currentComp = null; |
||||
try (GenericApplicationContext context = createContext()) { |
||||
initSxPools(); |
||||
EtlJobApi componentApi = context.getBean(EtlJobApi.class); |
||||
for (String comp : components) { |
||||
try { |
||||
currentComp = comp; |
||||
Long jobStartStatus = Long.valueOf(-1); |
||||
if(!FILESYSTEM) { |
||||
logger.info("EXECUTING: install job from database"); |
||||
jobStartStatus = componentApi.executeInstall(comp); |
||||
} else { |
||||
// disable mondrian step
|
||||
logger.info("EXECUTING: install job from filesystem for " + currentComp); |
||||
jobStartStatus = componentApi.executeInstallForQAMuster(comp); |
||||
logger.info("EXECUTING: workaround upgrade job from filesystem for " + currentComp); |
||||
jobStartStatus = componentApi.executeUpgradeForQAMuster(comp); |
||||
} |
||||
handleStartResult(jobStartStatus, componentApi); |
||||
if(comp.equals("kern") && FILESYSTEM) { |
||||
DataSource dataSource = context.getBean(His1DataSources.class).get("eduetl"); |
||||
SuperXManager.setWebInfFilePath(dataSource); |
||||
} |
||||
} catch (Exception e) { |
||||
logger.error("ERROR installing component " + comp, e); |
||||
} |
||||
} |
||||
} catch (BeansException be) { |
||||
handleBeansException(be); |
||||
} catch (Exception e) { |
||||
handleJobException(e, currentComp); |
||||
} |
||||
} |
||||
|
||||
private static void reloadModule() { |
||||
try (GenericApplicationContext context = createContext()){ |
||||
initSxPools(); |
||||
initSuperXManager(); |
||||
EtlJobApi componentApi = context.getBean(EtlJobApi.class); |
||||
componentApi.writeJobsToDb(); |
||||
} |
||||
} |
||||
|
||||
private static void printHelp(Options options) { |
||||
HelpFormatter help = new HelpFormatter(); |
||||
help.printHelp(HELP_STRING, options); |
||||
} |
||||
|
||||
private static void printInstallableJobs() { |
||||
try (GenericApplicationContext context = createContext()) { |
||||
EtlJobApi componentApi = context.getBean(EtlJobApi.class); |
||||
List<Component> installJobs = componentApi.getInstallJobs(); |
||||
for (Component comp : installJobs) { |
||||
printDetails(comp); |
||||
System.out.println(); |
||||
} |
||||
} |
||||
} |
||||
|
||||
private static void printEtlJobs() { |
||||
try (GenericApplicationContext context = createContext()) { |
||||
EtlJobApi componentApi = context.getBean(EtlJobApi.class); |
||||
List<Component> etlJobs = componentApi.getEtlJobs(); |
||||
for (Component etlJob : etlJobs) { |
||||
printDetails(etlJob); |
||||
System.out.println(); |
||||
} |
||||
} |
||||
} |
||||
|
||||
private static void printDetails(Component comp) { |
||||
System.out.println("abbrev: " + comp.getAbbreviation()); |
||||
System.out.println("name: " + comp.getName()); |
||||
System.out.println("database: " + comp.getDatabase()); |
||||
System.out.println("systeminfo_id: " + comp.getSysteminfoId()); |
||||
System.out.println("installed: " + comp.isInstalled()); |
||||
} |
||||
|
||||
private static void deinstallComponent(CommandLine parsedArgs) { |
||||
String comp = parsedArgs.getOptionValue("d"); |
||||
System.out.println(comp); |
||||
try (GenericApplicationContext context = createContext()) { |
||||
initSxPools(); |
||||
EtlJobApi componentApi = context.getBean(EtlJobApi.class); |
||||
Long jobStartStatus = componentApi.executeUninstall(comp); |
||||
handleStartResult(jobStartStatus, componentApi); |
||||
} catch (BeansException be) { |
||||
handleBeansException(be); |
||||
} catch (Exception e) { |
||||
handleJobException(e, comp); |
||||
} |
||||
} |
||||
|
||||
private static void upgradeComponents(CommandLine parsedArgs) { |
||||
String[] components = parsedArgs.getOptionValues("u"); |
||||
String currentComp = null; |
||||
try (GenericApplicationContext context = createContext()) { |
||||
initSxPools(); |
||||
EtlJobApi componentApi = context.getBean(EtlJobApi.class); |
||||
for (String comp : components) { |
||||
currentComp = comp; |
||||
Long jobStartStatus = componentApi.executeUpgrade(comp); |
||||
if (jobStartStatus.longValue() == -1) { |
||||
logger.warn(comp + " not installed. Skipping upgrade."); |
||||
continue; |
||||
} |
||||
handleStartResult(jobStartStatus, componentApi); |
||||
} |
||||
} catch (BeansException be) { |
||||
handleBeansException(be); |
||||
} catch (Exception e) { |
||||
handleJobException(e, currentComp); |
||||
} |
||||
} |
||||
|
||||
private static void installFunctions(CommandLine parsedArgs) { |
||||
String[] components = parsedArgs.getOptionValues("if"); |
||||
try (GenericApplicationContext context = createContext()) { |
||||
initSxPools(); |
||||
EtlJobApi componentApi = context.getBean(EtlJobApi.class); |
||||
// No components given: install
|
||||
// functions for all installed components
|
||||
if (components.length == 1 && "all".equals(components[0])) { |
||||
List<Component> installedComponents = componentApi.getInstallJobs(); |
||||
components = installedComponents.stream().map( |
||||
c -> c.getAbbreviation()).collect(Collectors.toList()).toArray(new String[] {}); |
||||
} |
||||
boolean exitFailure = false; |
||||
for (String comp : components) { |
||||
componentApi.installModuleFunctions(comp); |
||||
} |
||||
if(exitFailure) { |
||||
System.out.println(("Beim Ausführen einer Aktion ist ein Fehler aufgetreten:")); |
||||
System.exit(1); |
||||
} |
||||
} catch (BeansException be) { |
||||
handleBeansException(be); |
||||
} |
||||
} |
||||
|
||||
private static void upgradeAll() { |
||||
try (GenericApplicationContext context = createContext()) { |
||||
initSxPools(); |
||||
EtlJobApi componentApi = context.getBean(EtlJobApi.class); |
||||
Long jobStartStatus = componentApi.executeUpgradeAll(); |
||||
handleStartResult(jobStartStatus, componentApi); |
||||
} catch (BeansException be) { |
||||
handleBeansException(be); |
||||
} catch (Exception e) { |
||||
handleJobException(e, null); |
||||
} |
||||
} |
||||
|
||||
private static void etlJobs(CommandLine parsedArgs) { |
||||
String[] jobIds = parsedArgs.getOptionValues("e"); |
||||
String currentJobId = null; |
||||
try (GenericApplicationContext context = createContext()) { |
||||
initSxPools(); |
||||
EtlJobApi componentApi = context.getBean(EtlJobApi.class); |
||||
Long jobStartStatus; |
||||
for (String jobId : jobIds) { |
||||
try { |
||||
currentJobId = jobId; |
||||
if (isHauptladeroutine(jobId, componentApi)) { |
||||
jobStartStatus = componentApi.complete(jobId); |
||||
} else if (isLoadTransform(jobId, componentApi)) { |
||||
jobStartStatus = componentApi.load(jobId); |
||||
} else { |
||||
jobStartStatus = componentApi.executeJob(null, jobId); |
||||
} |
||||
handleStartResult(jobStartStatus, componentApi); |
||||
} catch (Exception e) { |
||||
logger.error("ERROR executing job " + jobId, e); |
||||
} |
||||
} |
||||
} catch (BeansException be) { |
||||
handleBeansException(be); |
||||
} catch (Exception e) { |
||||
handleJobException(e, currentJobId); |
||||
} |
||||
} |
||||
|
||||
private static void handleStartResult(Long jobStartStatus, EtlJobApi componentApi) { |
||||
if (jobStartStatus.intValue() == -1) { |
||||
System.err.println("Aktion konnte nicht gestartet werden: Es läuft bereits eine Aktion"); |
||||
} |
||||
try { |
||||
JobExecutionStatus es = componentApi.getStatus(jobStartStatus); |
||||
if ("FAILED".equals(es.exitStatus.getExitCode())) { |
||||
EtlJobApi.outputErrorSummary(es, System.err); |
||||
} |
||||
} catch (Exception e) { |
||||
System.err.println(("Beim Ausführen der Aktion ist ein Fehler aufgetreten:")); |
||||
e.printStackTrace(); |
||||
} |
||||
} |
||||
|
||||
private static void handleJobException(Exception e, String jobName) { |
||||
System.err.println("error while executing the job '" + jobName + "'"); |
||||
e.printStackTrace(); |
||||
} |
||||
|
||||
private static void handleBeansException(BeansException be) { |
||||
System.err.println("configuration error or error with resolving the bean '" + EtlJobApi.class.getCanonicalName() + "'"); |
||||
be.printStackTrace(); |
||||
} |
||||
|
||||
private static boolean isHauptladeroutine(String comp, EtlJobApi etlJob) { |
||||
List<Component> installJobs = etlJob.getEtlJobs(); |
||||
for (Component comp_meta : installJobs) { |
||||
if (comp_meta != null && comp_meta.getAbbreviation().equals(comp) && comp_meta.isDatabaseConnected()) { |
||||
return true; |
||||
} |
||||
} |
||||
return false; |
||||
} |
||||
|
||||
private static boolean isLoadTransform(String comp, EtlJobApi etlJob) { |
||||
List<Component> installJobs = etlJob.getEtlJobs(); |
||||
for (Component comp_meta : installJobs) { |
||||
if (comp_meta != null && comp_meta.getAbbreviation().equals(comp) && !comp_meta.isDatabaseConnected()) { |
||||
return true; |
||||
} |
||||
} |
||||
return false; |
||||
} |
||||
|
||||
private static void printAllJobs() { |
||||
try (GenericApplicationContext context = createContext()) { |
||||
EtlJobApi etlJob = context.getBean(EtlJobApi.class); |
||||
List<ContainerNode> allJobs = etlJob.getAllJobs(); |
||||
for (ContainerNode cn : allJobs) { |
||||
printDetails(cn); |
||||
System.out.println(); |
||||
} |
||||
} |
||||
} |
||||
|
||||
private static GenericApplicationContext createContext() { |
||||
/* |
||||
* https://docs.spring.io/spring-framework/docs/current/javadoc-api/org/springframework/context/annotation/AnnotationConfigApplicationContext.html
|
||||
* quote: |
||||
* "In case of multiple @Configuration classes, @Bean methods defined in later classes will override those defined in earlier classes. |
||||
* This can be leveraged to deliberately override certain bean definitions via an extra @Configuration class." |
||||
* - so it's alright to override some beans via "CLIConfig" |
||||
*/ |
||||
if (APPLICATION_CONTEXT == null) { |
||||
APPLICATION_CONTEXT = new AnnotationConfigApplicationContext(BatchConfig.class, DataJdbcConfiguration.class, CLIConfig.class, ServiceConfig.class); |
||||
HisInOneConfiguration.configSuperXDbformsXML("pg", SuperXManager.getWEB_INFPfad() + File.separator + ".."); |
||||
} |
||||
// Override the JobDescriptionSource Bean if the -f flag is passed.
|
||||
if(FILESYSTEM) { |
||||
EtlJobApi etlJob = APPLICATION_CONTEXT.getBean(EtlJobApi.class); |
||||
EntityJobDescriptionSource entityJobDescriptionSource = APPLICATION_CONTEXT.getBean(EntityJobDescriptionSource.class); |
||||
etlJob.setJobDescriptionSource(entityJobDescriptionSource); |
||||
} |
||||
return APPLICATION_CONTEXT; |
||||
} |
||||
|
||||
private static void printDetails(ContainerNode cn) { |
||||
System.out.println("id: " + cn.id); |
||||
System.out.println("name: " + cn.name); |
||||
System.out.println("systeminfo_id: " + cn.systemInfoId); |
||||
System.out.println("tid: " + cn.tid); |
||||
System.out.println("type: " + cn.type); |
||||
} |
||||
|
||||
private static Options createOptions() { |
||||
Options options = new Options(); |
||||
Option opt; |
||||
opt = new Option("h", "help", false, "get help"); |
||||
options.addOption(opt); |
||||
opt = new Option("f", "filesystem", false, "load jobs from filesystem"); |
||||
options.addOption(opt); |
||||
opt = new Option("s", "setup", false, "setup minimal list of tables necessary for kern install"); |
||||
options.addOption(opt); |
||||
opt = new Option("la", "list-all", false, "list all available components"); |
||||
options.addOption(opt); |
||||
opt = new Option("li", "list-installables", false, "list all installable components"); |
||||
options.addOption(opt); |
||||
opt = new Option("le", "list-etl", false, "list all etl components"); |
||||
options.addOption(opt); |
||||
opt = new Option("i", "install", true, "install components"); |
||||
opt.setArgs(Option.UNLIMITED_VALUES); |
||||
options.addOption(opt); |
||||
opt = new Option("if", "install-functions", true, "install database functions for components (use 'all' to install functions for all installed components)"); |
||||
options.addOption(opt); |
||||
opt.setArgs(Option.UNLIMITED_VALUES); |
||||
opt = new Option("d", "deinstall", true, "de-/uninstall component"); |
||||
options.addOption(opt); |
||||
opt = new Option("u", "upgrade", true, "upgrade components"); |
||||
opt.setArgs(Option.UNLIMITED_VALUES); |
||||
options.addOption(opt); |
||||
opt = new Option("ua", "upgrade-all", false, "upgrade all installed components"); |
||||
options.addOption(opt); |
||||
opt = new Option("e", "etl", true, "run etl jobs"); |
||||
opt.setArgs(Option.UNLIMITED_VALUES); |
||||
options.addOption(opt); |
||||
opt = new Option("r", "reload-modules", false, "reload modules"); |
||||
options.addOption(opt); |
||||
//opt = new Option("re", "reload-module-etl", true, "reload module etl");
|
||||
//options.addOption(opt);
|
||||
opt = new Option("db", "database", true, "database system"); |
||||
options.addOption(opt); |
||||
opt = new Option("lg", "log-to-stdout", false, "log only to stdout and not to individual job files"); |
||||
options.addOption(opt); |
||||
return options; |
||||
} |
||||
|
||||
private static CommandLine parseArgs(String[] args, Options options) { |
||||
CommandLineParser parser = new GnuParser(); |
||||
try { |
||||
return parser.parse(options, args, false); |
||||
} catch (ParseException e) { |
||||
System.out.println("error while reading the command line parameters:"); |
||||
e.printStackTrace(); |
||||
System.exit(1); |
||||
} |
||||
return null; |
||||
} |
||||
|
||||
// some actions require the SuperXManager, this is the place for initializing its static class attributes when needed
|
||||
private static void initSuperXManager() { |
||||
try { |
||||
SuperXManager.setWEB_INFPfad(PathAndFileUtils.getWebinfPath()); |
||||
SuperXManager.setModuleDir(PathAndFileUtils.getWebinfPath() + File.separator + PathAndFileUtils.MODULE_PATH); |
||||
} catch(Exception e) { |
||||
System.out.println("error while initialising the SuperXManger:"); |
||||
e.printStackTrace(); |
||||
System.exit(1); |
||||
} |
||||
} |
||||
|
||||
// sxPools need to be initialized, because spring batch ETL uses them to look up the database connections
|
||||
private static void initSxPools() { |
||||
try { |
||||
List<String> mandantenNamen = new LinkedList<String>(); |
||||
mandantenNamen.add(DEFAULT_MANDANTEN_ID); |
||||
SxPools.closeAll(); |
||||
SxPools.init(mandantenNamen); |
||||
SxPools.get(DEFAULT_MANDANTEN_ID).init(); |
||||
SxPools.get(DEFAULT_MANDANTEN_ID).initLogging(true, Level.DEBUG); |
||||
// also init kettle env, set plugin dir
|
||||
SuperXManager.initKettleEnv(APPLICATION_CONTEXT); |
||||
} catch (Exception e) { |
||||
System.out.println("error while initialising the SuperX pools:"); |
||||
e.printStackTrace(); |
||||
System.exit(1); |
||||
} |
||||
} |
||||
|
||||
} |
||||
@ -0,0 +1,674 @@
@@ -0,0 +1,674 @@
|
||||
package de.superx.bin; |
||||
|
||||
import static de.superx.servlet.SxSQL_Server.DEFAULT_MANDANTEN_ID; |
||||
|
||||
import java.io.File; |
||||
import java.io.FileOutputStream; |
||||
import java.io.IOException; |
||||
import java.sql.Connection; |
||||
import java.sql.DatabaseMetaData; |
||||
import java.sql.JDBCType; |
||||
import java.sql.ResultSet; |
||||
import java.sql.SQLException; |
||||
import java.sql.Time; |
||||
import java.sql.Timestamp; |
||||
import java.util.ArrayList; |
||||
import java.util.Arrays; |
||||
import java.util.Date; |
||||
import java.util.LinkedList; |
||||
import java.util.List; |
||||
import java.util.Optional; |
||||
|
||||
import javax.sql.DataSource; |
||||
|
||||
import org.apache.commons.cli.CommandLine; |
||||
import org.apache.commons.cli.CommandLineParser; |
||||
import org.apache.commons.cli.GnuParser; |
||||
import org.apache.commons.cli.HelpFormatter; |
||||
import org.apache.commons.cli.Option; |
||||
import org.apache.commons.cli.Options; |
||||
import org.apache.commons.cli.ParseException; |
||||
import org.apache.log4j.Level; |
||||
import org.apache.log4j.Logger; |
||||
import org.apache.poi.xssf.usermodel.XSSFCell; |
||||
import org.apache.poi.xssf.usermodel.XSSFCellStyle; |
||||
import org.apache.poi.xssf.usermodel.XSSFDataFormat; |
||||
import org.apache.poi.xssf.usermodel.XSSFFont; |
||||
import org.apache.poi.xssf.usermodel.XSSFRow; |
||||
import org.apache.poi.xssf.usermodel.XSSFSheet; |
||||
import org.apache.poi.xssf.usermodel.XSSFWorkbook; |
||||
import org.springframework.context.annotation.AnnotationConfigApplicationContext; |
||||
import org.springframework.context.support.GenericApplicationContext; |
||||
import org.springframework.jdbc.core.JdbcTemplate; |
||||
import org.springframework.jdbc.core.RowCallbackHandler; |
||||
import org.springframework.jdbc.core.RowMapper; |
||||
|
||||
import de.superx.servlet.SuperXManager; |
||||
import de.superx.servlet.SxPools; |
||||
import de.superx.spring.batch.His1DataSources; |
||||
import de.superx.spring.cli.config.CLIConfig; |
||||
import de.superx.spring.config.BatchConfig; |
||||
import de.superx.spring.config.DataJdbcConfiguration; |
||||
import de.superx.spring.config.ServiceConfig; |
||||
|
||||
/** |
||||
* A utility for creating data profiling statistics for tables in a database |
||||
* to be used in Data Warehouse Design. |
||||
* This can be used as a command line utility or embedded in an application. |
||||
*/ |
||||
public class DataProfiler { |
||||
|
||||
private final static String SQL_COUNT_NULL = "select count(*) from %s where %s is null"; |
||||
private final static String SQL_PERCENT_UNIQUE = "select\n" |
||||
+ " count(distinct %s) as unique_anz,\n" |
||||
+ " count(distinct %s)::float / count(*) * 100 as unique_percentage\n" |
||||
+ "from\n" |
||||
+ " %s;"; |
||||
private final static String SQL_RANKING = "select %s, count(*) as anz from %s where %s is not null group by %s order by 2 desc limit 10;"; |
||||
private final static String SQL_MIN_MAX_LEN = "select min(length(%s)) as min_length, max(length(%s)) as max_length\n" |
||||
+ "from %s\n" |
||||
+ "where %s is not null"; |
||||
private final static String SQL_COUNT_VALUE = "select count(*) from %s where %s = %s"; |
||||
private final static String SQL_MIN_MAX_AVG = "select min(%s), max(%s), avg(%s) from %s where %s is not null;"; |
||||
private final static String SQL_START_END = "select min(%s), max(%s) from %s where %s is not null"; |
||||
|
||||
private static GenericApplicationContext APPLICATION_CONTEXT = null; |
||||
|
||||
private static String HELP_STRING = "Use this tool to profile database tables for dwh design. " |
||||
+ "It needs the config file 'his1_databases.properties' inside the classpath; " |
||||
+ "this file gets written automatically when starting the web application."; |
||||
|
||||
|
||||
static Logger logger = Logger.getLogger(DataProfiler.class); |
||||
|
||||
private DataSource dataSource; |
||||
private String database; |
||||
private String schema; |
||||
private String[] tables; |
||||
|
||||
/** |
||||
* Instantiate a new DataProfiler. |
||||
* @param dataSource The DataSource from which to read the table statistics. |
||||
* @param schema The schema from which to read. If null public is assumed. |
||||
* @param tables An Array of the names of the tables for which statistics should be created. |
||||
*/ |
||||
public DataProfiler(DataSource dataSource, String schema, String[] tables) { |
||||
try (Connection con = dataSource.getConnection()) { |
||||
this.database = con.getCatalog(); |
||||
} catch (SQLException e) { |
||||
logger.error("Couldn't read catalog", e); |
||||
} |
||||
this.schema = schema != null ? schema : "public"; |
||||
List<String> tableList = Arrays.asList(tables); |
||||
// TODO: Make sorting configurable?
|
||||
tableList.sort(null); |
||||
this.tables = tableList.toArray(new String[] {}); |
||||
this.dataSource = dataSource; |
||||
|
||||
} |
||||
|
||||
public static void main(String[] args) { |
||||
System.setProperty(SuperXManager.SUPER_X_HISINONE_VERSION, "non-empty-value"); |
||||
Options options = createOptions(); |
||||
CommandLine parsedArgs = parseArgs(args, options); |
||||
if (parsedArgs.hasOption("h")) { |
||||
printHelp(options); |
||||
System.exit(0); |
||||
} |
||||
|
||||
String database = null; |
||||
String schema = "public"; |
||||
String[] tables = null; |
||||
|
||||
if (parsedArgs.hasOption("d")) { |
||||
database = parsedArgs.getOptionValue('d'); |
||||
} |
||||
if (parsedArgs.hasOption("s")) { |
||||
schema = parsedArgs.getOptionValue('s'); |
||||
} |
||||
if (parsedArgs.hasOption("t")) { |
||||
tables = parsedArgs.getOptionValues('t'); |
||||
} |
||||
if (!parsedArgs.hasOption('d') || !parsedArgs.hasOption('t')) { |
||||
printHelp(options); |
||||
System.exit(0); |
||||
} |
||||
try (GenericApplicationContext context = createContext()) { |
||||
initSxPools(); |
||||
DataSource dataSource = context.getBean(His1DataSources.class).get(database); |
||||
DataProfiler profiler = new DataProfiler(dataSource, schema, tables); |
||||
profiler.outputExcel(profiler.createStatistics(), null); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Create the List of TableStatistics. |
||||
* @return List of TableStatistics. |
||||
*/ |
||||
public List<TableStatistic> createStatistics() { |
||||
List<TableStatistic> tableStats = new ArrayList<>(); |
||||
try { |
||||
JdbcTemplate jt = new JdbcTemplate(dataSource); |
||||
logger.info("Database: " + database); |
||||
logger.info("Schema: " + schema); |
||||
logger.info("Tables: " + Arrays.asList(tables)); |
||||
try (Connection con = dataSource.getConnection()) { |
||||
jt.execute("set search_path to " + schema); |
||||
DatabaseMetaData meta = con.getMetaData(); |
||||
for (String table : tables) { |
||||
long rowCount = jt.queryForObject("select count(*) from " + table, Long.class).longValue(); |
||||
TableStatistic tableStat = new TableStatistic(table, rowCount); |
||||
logger.info("Table " + table); |
||||
try(ResultSet columns = meta.getColumns(null, schema, table, null); |
||||
ResultSet exported = meta.getExportedKeys(null, schema, table); |
||||
ResultSet imported = meta.getImportedKeys(null, schema, table); |
||||
ResultSet pks = meta.getPrimaryKeys(null, schema, table)) { |
||||
while(columns.next()) { |
||||
ColumnStatistic columnStat = new ColumnStatistic(); |
||||
columnStat.name = columns.getString("COLUMN_NAME"); |
||||
columnStat.size = columns.getInt("COLUMN_SIZE"); |
||||
columnStat.decimalDigits = columns.getInt("DECIMAL_DIGITS"); |
||||
columnStat.type = JDBCType.valueOf(columns.getInt("DATA_TYPE")); |
||||
columnStat.comment = columns.getString("REMARKS"); |
||||
columnStat.isNullable = columns.getString("IS_NULLABLE").equalsIgnoreCase("yes"); |
||||
columnStat.isAutoincrement = columns.getString("IS_AUTOINCREMENT").equalsIgnoreCase("yes"); |
||||
columnStat.countNull = jt.queryForObject(String.format(SQL_COUNT_NULL, tableStat.name, columnStat.name), Long.class).longValue(); |
||||
columnStat.percentNull = (double) columnStat.countNull / (double) tableStat.rowCount * 100.0; |
||||
jt.query(String.format(SQL_PERCENT_UNIQUE, columnStat.name, columnStat.name, tableStat.name), new RowCallbackHandler() { |
||||
|
||||
@Override |
||||
public void processRow(ResultSet rs) throws SQLException { |
||||
columnStat.uniqueCount = rs.getLong(1); |
||||
columnStat.uniquePercent = rs.getDouble(2); |
||||
|
||||
} |
||||
}); |
||||
columnStat.ranking = jt.query(String.format(SQL_RANKING, columnStat.name, tableStat.name, columnStat.name, columnStat.name), new RowMapper<RankingEntry>() { |
||||
@Override |
||||
public RankingEntry mapRow(ResultSet rs, int rowNum) throws SQLException { |
||||
return new RankingEntry(rs.getString(1), rs.getInt(2)); |
||||
} |
||||
|
||||
}); |
||||
switch (columnStat.type) { |
||||
case CHAR: |
||||
case NCHAR: |
||||
case VARCHAR: |
||||
case NVARCHAR: |
||||
case LONGVARCHAR: |
||||
case LONGNVARCHAR: |
||||
jt.query(String.format(SQL_MIN_MAX_LEN, columnStat.name, columnStat.name, tableStat.name, columnStat.name), new RowCallbackHandler() { |
||||
@Override |
||||
public void processRow(ResultSet rs) throws SQLException { |
||||
columnStat.minLen = Optional.of(Integer.valueOf(rs.getInt(1))); |
||||
columnStat.maxLen = Optional.of(Integer.valueOf(rs.getInt(2))); |
||||
} |
||||
}); |
||||
columnStat.min_count = Optional.of(jt.queryForObject(String.format(SQL_COUNT_VALUE, tableStat.name, |
||||
"length(" + columnStat.name + ")", columnStat.minLen.get()), Integer.class)); |
||||
columnStat.max_count = Optional.of(jt.queryForObject(String.format(SQL_COUNT_VALUE, tableStat.name, |
||||
"length(" + columnStat.name + ")", columnStat.maxLen.get()), Integer.class)); |
||||
break; |
||||
case BIGINT: |
||||
case DECIMAL: |
||||
case DOUBLE: |
||||
case FLOAT: |
||||
case INTEGER: |
||||
case REAL: |
||||
case NUMERIC: |
||||
case SMALLINT: |
||||
case TINYINT: |
||||
jt.query(String.format(SQL_MIN_MAX_AVG, columnStat.name, columnStat.name, columnStat.name, tableStat.name, columnStat.name), new RowCallbackHandler() { |
||||
@Override |
||||
public void processRow(ResultSet rs) throws SQLException { |
||||
columnStat.min = Optional.of(Double.valueOf(rs.getDouble(1))); |
||||
columnStat.max = Optional.of(Double.valueOf(rs.getDouble(2))); |
||||
columnStat.avg = Optional.of(Double.valueOf(rs.getDouble(3))); |
||||
} |
||||
}); |
||||
columnStat.min_count = Optional.of(jt.queryForObject(String.format(SQL_COUNT_VALUE, tableStat.name, columnStat.name, columnStat.min.get()), Integer.class)); |
||||
columnStat.max_count = Optional.of(jt.queryForObject(String.format(SQL_COUNT_VALUE, tableStat.name, columnStat.name, columnStat.max.get()), Integer.class)); |
||||
break; |
||||
case DATE: |
||||
jt.query(String.format(SQL_START_END, columnStat.name, columnStat.name, tableStat.name, columnStat.name), new RowCallbackHandler() { |
||||
@Override |
||||
public void processRow(ResultSet rs) throws SQLException { |
||||
columnStat.earliestDate = Optional.ofNullable(rs.getDate(1)); |
||||
columnStat.latestDate = Optional.ofNullable(rs.getDate(2)); |
||||
} |
||||
}); |
||||
if (columnStat.earliestDate.isPresent()) { |
||||
columnStat.min_count = Optional.of(jt.queryForObject(String.format(SQL_COUNT_VALUE, tableStat.name, columnStat.name, quote(columnStat.earliestDate.get())), Integer.class)); |
||||
} |
||||
if (columnStat.latestDate.isPresent()) { |
||||
columnStat.max_count = Optional.of(jt.queryForObject(String.format(SQL_COUNT_VALUE, tableStat.name, columnStat.name, quote(columnStat.latestDate.get())), Integer.class)); |
||||
} |
||||
break; |
||||
case TIMESTAMP: |
||||
case TIMESTAMP_WITH_TIMEZONE: |
||||
jt.query(String.format(SQL_START_END, columnStat.name, columnStat.name, tableStat.name, columnStat.name), new RowCallbackHandler() { |
||||
@Override |
||||
public void processRow(ResultSet rs) throws SQLException { |
||||
columnStat.earliestTimestamp = Optional.ofNullable(rs.getTimestamp(1)); |
||||
columnStat.latestTimestamp = Optional.ofNullable(rs.getTimestamp(2)); |
||||
} |
||||
}); |
||||
if (columnStat.earliestTimestamp.isPresent()) { |
||||
columnStat.min_count = Optional.of(jt.queryForObject(String.format(SQL_COUNT_VALUE, tableStat.name, columnStat.name, quote(columnStat.earliestTimestamp.get())), Integer.class)); |
||||
} |
||||
if (columnStat.latestTimestamp.isPresent()) { |
||||
columnStat.max_count = Optional.of(jt.queryForObject(String.format(SQL_COUNT_VALUE, tableStat.name, columnStat.name, quote(columnStat.latestTimestamp.get())), Integer.class)); |
||||
} |
||||
break; |
||||
case TIME: |
||||
case TIME_WITH_TIMEZONE: |
||||
jt.query(String.format(SQL_START_END, columnStat.name, columnStat.name, tableStat.name, columnStat.name), new RowCallbackHandler() { |
||||
@Override |
||||
public void processRow(ResultSet rs) throws SQLException { |
||||
columnStat.earliestTime = Optional.ofNullable(rs.getTime(1)); |
||||
columnStat.latestTime = Optional.ofNullable(rs.getTime(2)); |
||||
} |
||||
}); |
||||
if (columnStat.earliestTime.isPresent()) { |
||||
columnStat.min_count = Optional.of(jt.queryForObject(String.format(SQL_COUNT_VALUE, tableStat.name, columnStat.name, quote(columnStat.earliestTime.get())), Integer.class)); |
||||
} |
||||
if (columnStat.latestTime.isPresent()) { |
||||
columnStat.max_count = Optional.of(jt.queryForObject(String.format(SQL_COUNT_VALUE, tableStat.name, columnStat.name, quote(columnStat.latestTime.get())), Integer.class)); |
||||
} |
||||
break; |
||||
default: |
||||
} |
||||
tableStat.columns.add(columnStat); |
||||
} |
||||
while (exported.next()) { |
||||
String fromColumn = exported.getString("PKCOLUMN_NAME"); |
||||
String toTable = exported.getString("FKTABLE_NAME"); |
||||
String toColumn = exported .getString("FKCOLUMN_NAME"); |
||||
tableStat.exportedKeys.add(new ForeignKey(fromColumn, toTable, toColumn)); |
||||
|
||||
} |
||||
while (imported.next()) { |
||||
String fromColumn = imported.getString("FKCOLUMN_NAME"); |
||||
String toTable = imported.getString("PKTABLE_NAME"); |
||||
String toColumn = imported .getString("PKCOLUMN_NAME"); |
||||
tableStat.importedKeys.add(new ForeignKey(fromColumn, toTable, toColumn)); |
||||
|
||||
} |
||||
while (pks.next()) { |
||||
String column = pks.getString("COLUMN_NAME"); |
||||
tableStat.primaryKeys.add(column); |
||||
} |
||||
|
||||
} |
||||
tableStats.add(tableStat); |
||||
} |
||||
} |
||||
} catch (SQLException e) { |
||||
logger.error("SQL Fehler", e); |
||||
} |
||||
return tableStats; |
||||
} |
||||
|
||||
|
||||
/** |
||||
* Output statistic for a list of tables to an Excel file. |
||||
* The statistics of each table are written to a separate sheet. |
||||
* @param tableStats The list of TableStats |
||||
* @param outputFile The File to output to. If null output to current dir with a default file name. |
||||
*/ |
||||
public void outputExcel(List<TableStatistic> tableStats, File outputFile) { |
||||
XSSFWorkbook workbook = new XSSFWorkbook(); |
||||
XSSFDataFormat dataFormat = workbook.createDataFormat(); |
||||
XSSFCellStyle cellStyleDouble = workbook.createCellStyle(); |
||||
cellStyleDouble.setDataFormat(dataFormat.getFormat("0.##")); |
||||
XSSFCellStyle headerStyle = workbook.createCellStyle(); |
||||
XSSFFont bold = workbook.createFont(); |
||||
bold.setBold(true); |
||||
headerStyle.setFont(bold); |
||||
for (TableStatistic tableStat : tableStats) { |
||||
XSSFSheet sheet = workbook.createSheet("Table " + tableStat.name); |
||||
XSSFRow header = sheet.createRow(0); |
||||
XSSFCell cell = header.createCell(0); |
||||
cell.setCellValue("Database: " + database); |
||||
cell.setCellStyle(headerStyle); |
||||
cell = header.createCell(1); |
||||
cell.setCellStyle(headerStyle); |
||||
cell.setCellValue("Schema: " + schema); |
||||
XSSFRow first = sheet.createRow(2); |
||||
first.createCell(0).setCellValue("Table"); |
||||
first.getCell(0).setCellStyle(headerStyle); |
||||
first.createCell(1).setCellValue("Row Count"); |
||||
first.getCell(1).setCellStyle(headerStyle); |
||||
first.createCell(2).setCellValue("Column"); |
||||
first.getCell(2).setCellStyle(headerStyle); |
||||
first.createCell(3).setCellValue("Type"); |
||||
first.getCell(3).setCellStyle(headerStyle); |
||||
first.createCell(4).setCellValue("Size"); |
||||
first.getCell(4).setCellStyle(headerStyle); |
||||
first.createCell(5).setCellValue("Not Null"); |
||||
first.getCell(5).setCellStyle(headerStyle); |
||||
first.createCell(6).setCellValue("Autoincrement"); |
||||
first.getCell(6).setCellStyle(headerStyle); |
||||
first.createCell(7).setCellValue("Count NULL"); |
||||
first.getCell(7).setCellStyle(headerStyle); |
||||
first.createCell(8).setCellValue("% NULL"); |
||||
first.getCell(8).setCellStyle(headerStyle); |
||||
first.createCell(9).setCellValue("Count Unique"); |
||||
first.getCell(9).setCellStyle(headerStyle); |
||||
first.createCell(10).setCellValue("% Unique"); |
||||
first.getCell(10).setCellStyle(headerStyle); |
||||
first.createCell(11).setCellValue("Min Len"); |
||||
first.getCell(11).setCellStyle(headerStyle); |
||||
first.createCell(12).setCellValue("Max Len"); |
||||
first.getCell(12).setCellStyle(headerStyle); |
||||
first.createCell(13).setCellValue("Min"); |
||||
first.getCell(13).setCellStyle(headerStyle); |
||||
first.createCell(14).setCellValue("Max"); |
||||
first.getCell(14).setCellStyle(headerStyle); |
||||
first.createCell(15).setCellValue("Avg"); |
||||
first.getCell(15).setCellStyle(headerStyle); |
||||
first.createCell(16).setCellValue("Min Count"); |
||||
first.getCell(16).setCellStyle(headerStyle); |
||||
first.createCell(17).setCellValue("Max Count"); |
||||
first.getCell(17).setCellStyle(headerStyle); |
||||
first.createCell(18).setCellValue("Earliest"); |
||||
first.getCell(18).setCellStyle(headerStyle); |
||||
first.createCell(19).setCellValue("Latest"); |
||||
first.getCell(19).setCellStyle(headerStyle); |
||||
first.createCell(20).setCellValue("Comment"); |
||||
first.getCell(20).setCellStyle(headerStyle); |
||||
int row = 3; |
||||
XSSFRow tableRow = sheet.createRow(row); |
||||
tableRow.createCell(0).setCellValue(tableStat.name); |
||||
tableRow.getCell(0).setCellStyle(headerStyle); |
||||
tableRow.createCell(1).setCellValue(tableStat.rowCount); |
||||
tableRow.getCell(1).setCellStyle(headerStyle); |
||||
for (ColumnStatistic columnStat : tableStat.columns) { |
||||
row += 1; |
||||
XSSFRow descRow = sheet.createRow(row); |
||||
descRow.createCell(2).setCellValue(columnStat.name); |
||||
if (tableStat.primaryKeys.contains(columnStat.name)) { |
||||
descRow.getCell(2).setCellValue(columnStat.name + " (PK)"); |
||||
descRow.getCell(2).setCellStyle(headerStyle); |
||||
} |
||||
descRow.createCell(3).setCellValue(columnStat.type.getName()); |
||||
descRow.createCell(4).setCellValue(columnStat.size); |
||||
if (columnStat.decimalDigits != 0) { |
||||
descRow.getCell(4).setCellValue( |
||||
Double.valueOf(columnStat.size + "." + columnStat.decimalDigits).doubleValue() |
||||
); |
||||
descRow.getCell(4).setCellStyle(cellStyleDouble); |
||||
} |
||||
descRow.createCell(5).setCellValue(!columnStat.isNullable); |
||||
descRow.createCell(6).setCellValue(columnStat.isAutoincrement); |
||||
descRow.createCell(7).setCellValue(columnStat.countNull); |
||||
descRow.createCell(8).setCellValue(columnStat.percentNull); |
||||
descRow.getCell(8).setCellStyle(cellStyleDouble); |
||||
descRow.createCell(9).setCellValue(columnStat.uniqueCount); |
||||
descRow.createCell(10).setCellValue(columnStat.uniquePercent); |
||||
descRow.getCell(10).setCellStyle(cellStyleDouble); |
||||
if (columnStat.minLen.isPresent()) { |
||||
descRow.createCell(11).setCellValue(columnStat.minLen.get().doubleValue()); |
||||
} |
||||
if (columnStat.maxLen.isPresent()) { |
||||
descRow.createCell(12).setCellValue(columnStat.maxLen.get().doubleValue()); |
||||
} |
||||
if (columnStat.min.isPresent()) { |
||||
descRow.createCell(13).setCellValue(columnStat.min.get().doubleValue()); |
||||
descRow.getCell(13).setCellStyle(cellStyleDouble); |
||||
} |
||||
if (columnStat.max.isPresent()) { |
||||
descRow.createCell(14).setCellValue(columnStat.max.get().doubleValue()); |
||||
descRow.getCell(14).setCellStyle(cellStyleDouble); |
||||
} |
||||
if (columnStat.avg.isPresent()) { |
||||
descRow.createCell(15).setCellValue(columnStat.avg.get().doubleValue()); |
||||
descRow.getCell(15).setCellStyle(cellStyleDouble); |
||||
} |
||||
if (columnStat.min_count.isPresent()) { |
||||
descRow.createCell(16).setCellValue(columnStat.min_count.get().doubleValue()); |
||||
descRow.getCell(16).setCellStyle(cellStyleDouble); |
||||
} |
||||
if (columnStat.max_count.isPresent()) { |
||||
descRow.createCell(17).setCellValue(columnStat.max_count.get().doubleValue()); |
||||
descRow.getCell(17).setCellStyle(cellStyleDouble); |
||||
} |
||||
if (columnStat.earliestDate.isPresent()) { |
||||
descRow.createCell(18).setCellValue(columnStat.earliestDate.get().toString()); |
||||
} |
||||
if (columnStat.latestDate.isPresent()) { |
||||
descRow.createCell(19).setCellValue(columnStat.latestDate.get().toString()); |
||||
} |
||||
if (columnStat.earliestTime.isPresent()) { |
||||
descRow.createCell(18).setCellValue(columnStat.earliestTime.get().toString()); |
||||
} |
||||
if (columnStat.latestTime.isPresent()) { |
||||
descRow.createCell(19).setCellValue(columnStat.latestTime.get().toString()); |
||||
} |
||||
if (columnStat.earliestTimestamp.isPresent()) { |
||||
descRow.createCell(18).setCellValue(columnStat.earliestTimestamp.get().toString()); |
||||
} |
||||
if (columnStat.latestTimestamp.isPresent()) { |
||||
descRow.createCell(19).setCellValue(columnStat.latestTimestamp.get().toString()); |
||||
} |
||||
descRow.createCell(20).setCellValue(columnStat.comment); |
||||
} |
||||
for (int i = 0; i < 20; i++) { |
||||
sheet.autoSizeColumn(i); |
||||
} |
||||
XSSFRow frequHeader1 = sheet.createRow(row + 2); |
||||
XSSFRow frequHeader2 = sheet.createRow(row + 3); |
||||
frequHeader1.createCell(0).setCellValue("Frequency"); |
||||
frequHeader1.getCell(0).setCellStyle(headerStyle); |
||||
frequHeader2.createCell(0).setCellValue("Column"); |
||||
frequHeader2.getCell(0).setCellStyle(headerStyle); |
||||
for (int n = 1; n <= 10; n++) { |
||||
frequHeader1.createCell(n).setCellValue(n); |
||||
frequHeader1.getCell(n).setCellStyle(headerStyle); |
||||
} |
||||
for (int colNr = 0; colNr < tableStat.columns.size(); colNr++) { |
||||
XSSFRow frequRowLabel = sheet.createRow(row + 4 + 2 * colNr); |
||||
XSSFRow frequRowCount = sheet.createRow(row + 5 + 2 * colNr); |
||||
frequRowLabel.createCell(0).setCellValue(tableStat.columns.get(colNr).name); |
||||
for (int rankNr = 0; rankNr < tableStat.columns.get(colNr).ranking.size(); rankNr++) { |
||||
frequRowLabel.createCell(rankNr + 1).setCellValue(tableStat.columns.get(colNr).ranking.get(rankNr).label); |
||||
frequRowCount.createCell(rankNr + 1).setCellValue(tableStat.columns.get(colNr).ranking.get(rankNr).count); |
||||
} |
||||
} |
||||
|
||||
row = row + 2 * tableStat.columns.size() + 5; |
||||
XSSFRow exHeader1 = sheet.createRow(row); |
||||
exHeader1.createCell(0).setCellValue("Exported Keys"); |
||||
exHeader1.getCell(0).setCellStyle(headerStyle); |
||||
XSSFRow exHeader2 = sheet.createRow(row + 1); |
||||
exHeader2.createCell(0).setCellValue("From Column"); |
||||
exHeader2.getCell(0).setCellStyle(headerStyle); |
||||
exHeader2.createCell(1).setCellValue("To Table"); |
||||
exHeader2.getCell(1).setCellStyle(headerStyle); |
||||
exHeader2.createCell(2).setCellValue("To Column"); |
||||
exHeader2.getCell(2).setCellStyle(headerStyle); |
||||
for (int fkNr = 0; fkNr < tableStat.exportedKeys.size(); fkNr++) { |
||||
XSSFRow fkRow = sheet.createRow(row + 2 + fkNr); |
||||
fkRow.createCell(0).setCellValue(tableStat.exportedKeys.get(fkNr).fromColumn); |
||||
fkRow.createCell(1).setCellValue(tableStat.exportedKeys.get(fkNr).toTable); |
||||
fkRow.createCell(2).setCellValue(tableStat.exportedKeys.get(fkNr).toColumn); |
||||
} |
||||
row = row + 3 + tableStat.exportedKeys.size(); |
||||
XSSFRow imHeader1 = sheet.createRow(row); |
||||
imHeader1.createCell(0).setCellValue("Imported Keys"); |
||||
imHeader1.getCell(0).setCellStyle(headerStyle); |
||||
XSSFRow imHeader2 = sheet.createRow(row + 1); |
||||
imHeader2.createCell(0).setCellValue("From Table"); |
||||
imHeader2.getCell(0).setCellStyle(headerStyle); |
||||
imHeader2.createCell(1).setCellValue("From Column"); |
||||
imHeader2.getCell(1).setCellStyle(headerStyle); |
||||
imHeader2.createCell(2).setCellValue("To Column"); |
||||
imHeader2.getCell(2).setCellStyle(headerStyle); |
||||
for (int fkNr = 0; fkNr < tableStat.importedKeys.size(); fkNr++) { |
||||
XSSFRow fkRow = sheet.createRow(row + 2 + fkNr); |
||||
fkRow.createCell(0).setCellValue(tableStat.importedKeys.get(fkNr).toTable); |
||||
fkRow.createCell(1).setCellValue(tableStat.importedKeys.get(fkNr).toColumn); |
||||
fkRow.createCell(2).setCellValue(tableStat.importedKeys.get(fkNr).fromColumn); |
||||
} |
||||
} |
||||
|
||||
File currDir = new File("."); |
||||
String path = currDir.getAbsolutePath(); |
||||
String fileLocation = path.substring(0, path.length() - 1) + "db_profile_" + database + ".xlsx"; |
||||
if (outputFile != null) { |
||||
fileLocation = outputFile.getAbsolutePath(); |
||||
} |
||||
logger.info("Writing to " + fileLocation); |
||||
|
||||
FileOutputStream outputStream; |
||||
try { |
||||
outputStream = new FileOutputStream(fileLocation); |
||||
workbook.write(outputStream); |
||||
workbook.close(); |
||||
} catch (IOException e) { |
||||
logger.error("Couldn't write excel file", e); |
||||
} |
||||
} |
||||
|
||||
|
||||
private static Options createOptions() { |
||||
Options options = new Options(); |
||||
Option opt; |
||||
opt = new Option("h", "help", false, "get help"); |
||||
options.addOption(opt); |
||||
opt = new Option("t", "tables", true, "tables"); |
||||
opt.setArgs(Option.UNLIMITED_VALUES); |
||||
// opt.setRequired(true);
|
||||
options.addOption(opt); |
||||
opt = new Option("s", "schema", true, "schema"); |
||||
options.addOption(opt); |
||||
opt = new Option("d", "database", true, "database"); |
||||
// opt.setRequired(true);
|
||||
options.addOption(opt); |
||||
return options; |
||||
} |
||||
|
||||
private static CommandLine parseArgs(String[] args, Options options) { |
||||
CommandLineParser parser = new GnuParser(); |
||||
try { |
||||
return parser.parse(options, args, false); |
||||
} catch (ParseException e) { |
||||
System.out.println("error while reading the command line parameters:"); |
||||
e.printStackTrace(); |
||||
System.exit(1); |
||||
} |
||||
return null; |
||||
} |
||||
|
||||
private static void initSxPools() { |
||||
try { |
||||
List<String> mandantenNamen = new LinkedList<String>(); |
||||
mandantenNamen.add(DEFAULT_MANDANTEN_ID); |
||||
SxPools.closeAll(); |
||||
SxPools.init(mandantenNamen); |
||||
SxPools.get(DEFAULT_MANDANTEN_ID).init(); |
||||
SxPools.get(DEFAULT_MANDANTEN_ID).initLogging(true, Level.DEBUG); |
||||
// also init kettle env, set plugin dir
|
||||
SuperXManager.initKettleEnv(APPLICATION_CONTEXT); |
||||
} catch (Exception e) { |
||||
System.out.println("error while initialising the SuperX pools:"); |
||||
e.printStackTrace(); |
||||
System.exit(1); |
||||
} |
||||
} |
||||
|
||||
private static void printHelp(Options options) { |
||||
HelpFormatter help = new HelpFormatter(); |
||||
help.printHelp(HELP_STRING, options); |
||||
} |
||||
|
||||
private static GenericApplicationContext createContext() { |
||||
/* |
||||
* https://docs.spring.io/spring-framework/docs/current/javadoc-api/org/springframework/context/annotation/AnnotationConfigApplicationContext.html
|
||||
* quote: |
||||
* "In case of multiple @Configuration classes, @Bean methods defined in later classes will override those defined in earlier classes. |
||||
* This can be leveraged to deliberately override certain bean definitions via an extra @Configuration class." |
||||
* - so it's alright to override some beans via "CLIConfig" |
||||
*/ |
||||
if (APPLICATION_CONTEXT == null) { |
||||
APPLICATION_CONTEXT = new AnnotationConfigApplicationContext(BatchConfig.class, DataJdbcConfiguration.class, CLIConfig.class, ServiceConfig.class); |
||||
} |
||||
return APPLICATION_CONTEXT; |
||||
} |
||||
|
||||
private static String quote(Object o) { |
||||
return "'" + o + "'"; |
||||
} |
||||
} |
||||
|
||||
class TableStatistic { |
||||
|
||||
public String name; |
||||
public long rowCount; |
||||
public List<ColumnStatistic> columns; |
||||
public List<ForeignKey> exportedKeys; |
||||
public List<ForeignKey> importedKeys; |
||||
public List<String> primaryKeys; |
||||
|
||||
public TableStatistic(String name, long rowCount) { |
||||
this.name = name; |
||||
this.rowCount = rowCount; |
||||
this.columns = new ArrayList<>(); |
||||
this.exportedKeys = new ArrayList<>(); |
||||
this.importedKeys = new ArrayList<>(); |
||||
this.primaryKeys = new ArrayList<>(); |
||||
} |
||||
} |
||||
|
||||
class ColumnStatistic { |
||||
public String name; |
||||
public JDBCType type; |
||||
public int size; |
||||
public int decimalDigits; |
||||
public boolean isNullable; |
||||
public boolean isAutoincrement; |
||||
public String comment; |
||||
public long countNull; |
||||
public double percentNull; |
||||
public long uniqueCount; |
||||
public double uniquePercent; |
||||
public Optional<Integer> max_count = Optional.empty(); |
||||
public Optional<Integer> min_count = Optional.empty(); |
||||
public List<RankingEntry> ranking; |
||||
public Optional<Integer> minLen = Optional.empty(); |
||||
public Optional<Integer> maxLen = Optional.empty(); |
||||
public Optional<Double> min = Optional.empty(); |
||||
public Optional<Double> max = Optional.empty(); |
||||
public Optional<Double> avg = Optional.empty(); |
||||
public Optional<Date> earliestDate = Optional.empty(); |
||||
public Optional<Date> latestDate = Optional.empty(); |
||||
public Optional<Time> earliestTime = Optional.empty(); |
||||
public Optional<Time> latestTime = Optional.empty(); |
||||
public Optional<Timestamp> earliestTimestamp = Optional.empty(); |
||||
public Optional<Timestamp> latestTimestamp = Optional.empty(); |
||||
} |
||||
|
||||
class RankingEntry { |
||||
|
||||
public RankingEntry(String label, int count) { |
||||
this.label = label; |
||||
this.count = count; |
||||
} |
||||
|
||||
public String label; |
||||
public int count; |
||||
} |
||||
|
||||
|
||||
class ForeignKey { |
||||
|
||||
public ForeignKey(String fromColumn, String toTable, String toColumn) { |
||||
this.fromColumn = fromColumn; |
||||
this.toTable = toTable; |
||||
this.toColumn = toColumn; |
||||
} |
||||
|
||||
public String fromColumn; |
||||
public String toTable; |
||||
public String toColumn; |
||||
} |
||||
@ -0,0 +1,927 @@
@@ -0,0 +1,927 @@
|
||||
package de.superx.bin; |
||||
|
||||
import static de.superx.servlet.SxSQL_Server.DEFAULT_MANDANTEN_ID; |
||||
|
||||
import java.sql.ResultSet; |
||||
import java.sql.SQLException; |
||||
import java.sql.Timestamp; |
||||
import java.text.SimpleDateFormat; |
||||
import java.util.ArrayList; |
||||
import java.util.Date; |
||||
import java.util.HashMap; |
||||
import java.util.LinkedList; |
||||
import java.util.List; |
||||
import java.util.Random; |
||||
import java.util.regex.Matcher; |
||||
import java.util.regex.Pattern; |
||||
|
||||
import javax.sql.DataSource; |
||||
|
||||
import org.apache.commons.cli.CommandLine; |
||||
import org.apache.commons.cli.CommandLineParser; |
||||
import org.apache.commons.cli.GnuParser; |
||||
import org.apache.commons.cli.HelpFormatter; |
||||
import org.apache.commons.cli.Option; |
||||
import org.apache.commons.cli.Options; |
||||
import org.apache.commons.cli.ParseException; |
||||
import org.apache.log4j.Level; |
||||
import org.apache.log4j.Logger; |
||||
import org.springframework.batch.core.ExitStatus; |
||||
import org.springframework.beans.BeansException; |
||||
import org.springframework.context.ApplicationContext; |
||||
import org.springframework.context.annotation.AnnotationConfigApplicationContext; |
||||
import org.springframework.context.support.GenericApplicationContext; |
||||
import org.springframework.dao.DataIntegrityViolationException; |
||||
import org.springframework.jdbc.BadSqlGrammarException; |
||||
import org.springframework.jdbc.core.JdbcTemplate; |
||||
import org.springframework.jdbc.core.ResultSetExtractor; |
||||
import org.springframework.jdbc.core.RowCallbackHandler; |
||||
|
||||
import de.superx.rest.EtlJobApi; |
||||
import de.superx.rest.model.job.Component; |
||||
import de.superx.rest.model.job.JobExecutionStatus; |
||||
import de.superx.rest.model.job.StepExecutionStatus; |
||||
import de.superx.servlet.SuperXManager; |
||||
import de.superx.servlet.SxPools; |
||||
import de.superx.spring.batch.His1DataSources; |
||||
import de.superx.spring.cli.config.CLIConfig; |
||||
import de.superx.spring.config.BatchConfig; |
||||
import de.superx.spring.config.DataJdbcConfiguration; |
||||
|
||||
|
||||
public class EtlFuzzer { |
||||
|
||||
private static String HELP = "Use this tool to check for problems with incongruencies, " |
||||
+ "which may occur " |
||||
+ "while running etljobs. Note that Tomcat should not be running," |
||||
+ " due to resulting interferences. Additionally, " |
||||
+ "the usage of the latest version of the data sample in the source systems is recommended" |
||||
+ "WARNING: you can only run either all of the mbs modules at once or on of them, before you" |
||||
+ "must reset your mbs database. It will crash otherwise."; |
||||
|
||||
private static Logger log = Logger.getLogger(EtlFuzzer.class.getName()); |
||||
|
||||
private static int EXTREME_INT = 2100000000; |
||||
|
||||
private static Random random = new Random(); |
||||
|
||||
private static final String COLNAME_NULLABLE = "is_nullable"; |
||||
|
||||
private static final String COLNAME_COLUMN = "column_name"; |
||||
|
||||
private static final String COLNAME_DATATYPE = "data_type"; |
||||
|
||||
private static final String COLNAME_CHAR_MAX = "character_maximum_length"; |
||||
|
||||
private static final String COLNAME_PRECISION = "numeric_precision"; |
||||
|
||||
private static final String COLNAME_SCALE = "numeric_scale"; |
||||
|
||||
private static final String COLNAME_TABLE = "table_name"; |
||||
|
||||
private static final String COLNAME_RELNAME = "relname"; |
||||
|
||||
private static final String DB_MBS = "mbs"; |
||||
|
||||
private static final String DB_H1 = "hisinone"; |
||||
|
||||
private static int extreme_int_puffer = 10000; |
||||
|
||||
private static String SQL_GET_ACCESSED_TABLES_H1 = "Select * from pg_stat_all_tables where " |
||||
+ "(seq_scan > 0 or seq_tup_read > 0 or idx_scan > 0) and schemaname = 'hisinone'"; |
||||
|
||||
private static String SQL_GET_ACCESSED_TABLES_MBS = "Select * from pg_stat_all_tables where " |
||||
+ "(seq_scan > 0 or seq_tup_read > 0 or idx_scan > 0) and schemaname = 'mbs'"; |
||||
|
||||
private static String SQL_GET_TABLE_METADATA_BASE = "select column_name, data_type," |
||||
+ " is_nullable, numeric_precision, numeric_scale, character_maximum_length from information_schema.columns where table_name ="; |
||||
|
||||
private static String SQL_RESET_STATISTICS = "Select pg_stat_reset()"; |
||||
|
||||
private static String[] modulesH1 = {"kern", "cob", "prom", "sos", "res", "zul"}; |
||||
|
||||
private static String[] modulesMBS = {"fin","ivs","bau"}; |
||||
|
||||
private static final int MODULES_KERN_INDEX = 0; |
||||
private static final int MODULES_COB_INDEX = 1; |
||||
private static final int MODULES_PROM_INDEX = 2; |
||||
private static final int MODULES_SOS_INDEX = 3; |
||||
private static final int MODULES_RES_INDEX = 4; |
||||
private static final int MODULES_ZUL_INDEX = 5; |
||||
private static final int MODULES_FIN_INDEX = 0; |
||||
private static final int MODULES_IVS_INDEX = 1; |
||||
private static final int MODULES_BAU_INDEX = 2; |
||||
|
||||
private static int next_id = 0; |
||||
|
||||
private static int next_join_nr = 0; |
||||
|
||||
private static ArrayList<String> errors = new ArrayList<>(); |
||||
|
||||
private static void setNext_join_nr(int next_nr) { |
||||
EtlFuzzer.next_join_nr = next_nr; |
||||
} |
||||
|
||||
private static void setNext_id(int next_id) { |
||||
EtlFuzzer.next_id = next_id; |
||||
} |
||||
|
||||
static HashMap<String, String> error_tables_columns = new HashMap<String, String>(); |
||||
|
||||
private static ArrayList<String> error_keys = new ArrayList<>(); |
||||
|
||||
private static ApplicationContext applicationContext; |
||||
|
||||
private static GenericApplicationContext APPLICATION_CONTEXT = null; |
||||
|
||||
/** |
||||
* Generiert einen String, bestehend aus dem Zeichen x, mit einer bestimmten Länge. |
||||
* @param length gewollte Stringlänge |
||||
*/ |
||||
private static String StringGenerator(int length) { |
||||
|
||||
StringBuilder sb = new StringBuilder(); |
||||
for (int i = 0; i < length; i++) { |
||||
sb.append("x"); |
||||
} |
||||
return sb.toString(); |
||||
|
||||
} |
||||
|
||||
/** |
||||
* generiert einen numerischen Wert mit precision Stellen vor dem Komma und scale Nachkommastellen. |
||||
* @param precision |
||||
* @param scale |
||||
* @return |
||||
*/ |
||||
private static String NumGen(int precision, int scale) { |
||||
StringBuilder sb = new StringBuilder(); |
||||
for (int i = 0; i < precision; i++) { |
||||
sb.append("9"); |
||||
} |
||||
String num = sb.toString(); |
||||
String firstPart = num.substring(0, precision-scale); |
||||
String secondPart = num.substring(precision-scale); |
||||
return firstPart + "." + secondPart; |
||||
} |
||||
|
||||
/** |
||||
* Gibt alle Tabellen aus, auf die durch ein Entladeskript zugegriffen wurde. |
||||
* @param stmt Statement |
||||
* @param tables ArrayListe, in die die Tabellennamen gespeichert werden sollen. |
||||
* @return ArrayList mit den Tabellennamen |
||||
* @throws SQLException |
||||
*/ |
||||
private static ArrayList<String> getAccessedTablesH1(JdbcTemplate jt, ArrayList<String> tables) throws SQLException { |
||||
|
||||
ResultSetExtractor<Void> extractor = new ResultSetExtractor<Void>() { |
||||
@Override |
||||
public Void extractData(ResultSet rs) throws SQLException { |
||||
while(rs.next()) { |
||||
tables.add(rs.getString(COLNAME_RELNAME)); |
||||
} |
||||
return null; |
||||
} |
||||
}; |
||||
jt.query(SQL_GET_ACCESSED_TABLES_H1, extractor); |
||||
return tables; |
||||
} |
||||
|
||||
/** |
||||
* gibt alle durch ETL Laderoutinen zugegriffenen Tabellen für das mbs Vorsystem aus. |
||||
* @return |
||||
* @throws SQLException |
||||
*/ |
||||
private static ArrayList<String> getAccessedTablesMBS(JdbcTemplate jt, ArrayList<String> tables) throws SQLException { |
||||
|
||||
ResultSetExtractor<Void> extractor = new ResultSetExtractor<Void>() { |
||||
@Override |
||||
public Void extractData(ResultSet rs) throws SQLException { |
||||
while(rs.next()) { |
||||
tables.add(rs.getString(COLNAME_RELNAME)); |
||||
} |
||||
return null; |
||||
} |
||||
}; |
||||
jt.query(SQL_GET_ACCESSED_TABLES_MBS, extractor); |
||||
return tables; |
||||
} |
||||
|
||||
/** |
||||
* Gibt den SQL Befehl für die Metadaten einer Tabelle aus dem hisinone Vorsystem zurück. |
||||
* @param table Tabellenname |
||||
* @return SQL Befehl |
||||
*/ |
||||
private static String sqlTableMetaDataH1(String table) { |
||||
return SQL_GET_TABLE_METADATA_BASE + "'" + table + "'"; |
||||
} |
||||
|
||||
/** |
||||
* Gibt den SQL Befehl für die Metadaten einer Tabelle aus dem mbs Vorsystem zurück. |
||||
* @param table |
||||
* @return |
||||
*/ |
||||
private static String sqlTableMetaDataMBS(String table) { |
||||
return String.format("%s '%s' and table_schema='mbs'",SQL_GET_TABLE_METADATA_BASE, table); |
||||
} |
||||
|
||||
/** |
||||
* für HLR. |
||||
* @param <T> |
||||
* @param beanName |
||||
* @param requiredType |
||||
* @return |
||||
*/ |
||||
public static <T> T getBean(String beanName, Class<T> requiredType) { |
||||
if (applicationContext != null) { |
||||
return applicationContext.getBean(beanName, requiredType); |
||||
} |
||||
return null; |
||||
} |
||||
|
||||
/** |
||||
* für HLR. |
||||
*/ |
||||
private static void initSxPools() { |
||||
try { |
||||
List<String> mandantenNamen = new LinkedList<String>(); |
||||
mandantenNamen.add(DEFAULT_MANDANTEN_ID); |
||||
SxPools.closeAll(); |
||||
SxPools.init(mandantenNamen); |
||||
SxPools.get(DEFAULT_MANDANTEN_ID).init(); |
||||
SxPools.get(DEFAULT_MANDANTEN_ID).initLogging(true, Level.DEBUG); |
||||
} catch (Exception e) { |
||||
System.out.println("error while initialising the SuperX pools:"); |
||||
e.printStackTrace(); |
||||
System.exit(1); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* für HLR. |
||||
* @return |
||||
*/ |
||||
private static GenericApplicationContext createContext() { |
||||
/* |
||||
* https://docs.spring.io/spring-framework/docs/current/javadoc-api/org/springframework/context/annotation/AnnotationConfigApplicationContext.html
|
||||
* quote: |
||||
* "In case of multiple @Configuration classes, @Bean methods defined in later classes will override those defined in earlier classes. |
||||
* This can be leveraged to deliberately override certain bean definitions via an extra @Configuration class." |
||||
* - so it's alright to override some beans via "CLIConfig" |
||||
*/ |
||||
if (APPLICATION_CONTEXT == null) { |
||||
APPLICATION_CONTEXT = new AnnotationConfigApplicationContext(BatchConfig.class, DataJdbcConfiguration.class, CLIConfig.class); |
||||
} |
||||
return APPLICATION_CONTEXT; |
||||
} |
||||
|
||||
/** |
||||
* für HLR. |
||||
* @param comp |
||||
* @param etlJob |
||||
* @return |
||||
*/ |
||||
private static boolean isHauptladeroutine(String comp, EtlJobApi etlJob) { |
||||
List<Component> installJobs = etlJob.getEtlJobs(); |
||||
for (Component comp_meta : installJobs) { |
||||
if (comp_meta != null && comp_meta.getAbbreviation().equals(comp)) { |
||||
return true; |
||||
} |
||||
} |
||||
return false; |
||||
} |
||||
|
||||
/** |
||||
* Erstellt einen Timestamp, in einem bestimmten Format. |
||||
* @param d |
||||
* @return |
||||
*/ |
||||
private static String timestampMaker(String d) { |
||||
Date date = new Date(); |
||||
Timestamp ts=new Timestamp(date.getTime()); |
||||
SimpleDateFormat formatterNow = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); |
||||
SimpleDateFormat formatterDay = new SimpleDateFormat("yyyy-MM-dd"); |
||||
String time = ""; |
||||
|
||||
switch(d) { |
||||
case "day": |
||||
time = formatterDay.format(ts); |
||||
break; |
||||
case "now": |
||||
time = formatterNow.format(ts); |
||||
break; |
||||
} |
||||
return time; |
||||
} |
||||
|
||||
/** |
||||
* für HLR. |
||||
* @param job_id |
||||
* @param context |
||||
*/ |
||||
private static void etlJob(String job_id, GenericApplicationContext context, String db) { |
||||
System.out.println(job_id); |
||||
try { |
||||
EtlJobApi componentApi = context.getBean(EtlJobApi.class); |
||||
String database = db; |
||||
Long jobStartStatus; |
||||
if (isHauptladeroutine(job_id, componentApi)) { |
||||
jobStartStatus = componentApi.complete(job_id); |
||||
} else { |
||||
jobStartStatus = componentApi.executeJob(database, job_id); |
||||
} |
||||
handleStartResult(jobStartStatus, componentApi); |
||||
} catch (BeansException be) { |
||||
handleBeansException(be); |
||||
} catch (Exception e) { |
||||
handleJobException(e, job_id); |
||||
} |
||||
} |
||||
|
||||
private static void unload(String job_id, GenericApplicationContext context, String db) { |
||||
System.out.println(job_id); |
||||
try { |
||||
EtlJobApi componentApi = context.getBean(EtlJobApi.class); |
||||
String database = db; |
||||
Long jobStartStatus; |
||||
if (isHauptladeroutine(job_id, componentApi)) { |
||||
jobStartStatus = componentApi.unload(job_id); |
||||
} else { |
||||
jobStartStatus = componentApi.executeJob(database, job_id); |
||||
} |
||||
handleStartResult(jobStartStatus, componentApi); |
||||
} catch (BeansException be) { |
||||
handleBeansException(be); |
||||
} catch (Exception e) { |
||||
handleJobException(e, job_id); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Findet in einem String den ersten Integer Wert der innerhalb dieser Zeichen "»«" liegt. Die Zeichen sind relevant für die Fehlermeldung. |
||||
* @param input |
||||
* @return |
||||
*/ |
||||
public static String findInt(String input) { |
||||
Pattern pattern = Pattern.compile("»\\d+«"); |
||||
Matcher matcher = pattern.matcher(input); |
||||
if (matcher.find()) { |
||||
String match = matcher.group(); |
||||
return match.substring(1, match.length() - 1); |
||||
} |
||||
return null; |
||||
} |
||||
|
||||
|
||||
|
||||
|
||||
/** |
||||
* Gibt die gefundenen fehler aus. |
||||
* @param ses |
||||
*/ |
||||
private static void collectErrors(StepExecutionStatus ses) { |
||||
for (StepExecutionStatus sec : ses.getChildren()) { |
||||
if(sec.getChildren() != null) { |
||||
for(StepExecutionStatus sec2 : sec.getChildren()) { |
||||
if (sec2.getExitStatus().getExitCode().equals("FAILED")) { |
||||
errors.add(sec2.getName()); |
||||
String etl_error = sec2.getExitStatus().getExitDescription(); |
||||
errors.add(etl_error); |
||||
if(error_tables_columns.containsKey(findInt(etl_error))) { |
||||
error_keys.add(findInt(etl_error)); |
||||
} |
||||
} |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* für HLR. |
||||
* @param jobStartStatus |
||||
* @param componentApi |
||||
*/ |
||||
private static void handleStartResult(Long jobStartStatus, EtlJobApi componentApi) { |
||||
if (jobStartStatus.intValue() == -1) { |
||||
System.out.println("Aktion konnte nicht gestartet werden: Es läuft bereits eine Aktion"); |
||||
System.exit(1); |
||||
} |
||||
try { |
||||
JobExecutionStatus es = componentApi.getStatus(jobStartStatus); |
||||
for(StepExecutionStatus ses : es.getStepExecutions()) { |
||||
collectErrors(ses); |
||||
} |
||||
|
||||
ExitStatus exst = es.exitStatus; |
||||
|
||||
if (exst.equals(ExitStatus.FAILED)) { |
||||
System.out.println(("Beim Ausführen der Aktion ist ein Fehler aufgetreten; Bitte prüfen Sie die Logdatei.")); |
||||
} |
||||
} catch (Exception e) { |
||||
System.out.println(("Beim Ausführen der Aktion ist ein Fehler aufgetreten:")); |
||||
e.printStackTrace(); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* für HRL. |
||||
* @param e |
||||
* @param jobName |
||||
*/ |
||||
private static void handleJobException(Exception e, String jobName) { |
||||
System.out.println("error while executing the job '" + jobName + "'"); |
||||
e.printStackTrace(); |
||||
} |
||||
|
||||
/** |
||||
* für HLR. |
||||
* @param be |
||||
*/ |
||||
private static void handleBeansException(BeansException be) { |
||||
System.out.println("configuration error or error with resolving the bean '" + EtlJobApi.class.getCanonicalName() + "'"); |
||||
be.printStackTrace(); |
||||
} |
||||
|
||||
/** |
||||
* gibt den Inhalt einer ArrayList aus. |
||||
* @param list |
||||
*/ |
||||
private static void printList(ArrayList<String> list) { |
||||
for(int i = 0; i<list.size();i++) { |
||||
String text = list.get(i); |
||||
log.info(String.format(" %s", text)); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* gibt die Tabellen- und Spaltennamen von den Fehlern aus, die durch die Ints entstehen. |
||||
*/ |
||||
private static void printIntErrors() { |
||||
for(String key : error_keys) { |
||||
String intError = String.format("Der Wert: %s wurde initial in die Tabelle.Spalte: %s eingetragen.", key, error_tables_columns.get(key)); |
||||
log.info(intError); |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Findet für ein ID Attribut, den nächstgrößeren Wert. |
||||
* @param jt |
||||
* @param table |
||||
* @param column_name |
||||
*/ |
||||
private static void nextID(JdbcTemplate jt, String table, String column_name, String db) { |
||||
ResultSetExtractor<Void> extractor = new ResultSetExtractor<Void>() { |
||||
@Override |
||||
public Void extractData(ResultSet rs) throws SQLException { |
||||
while(rs.next()) { |
||||
setNext_id(rs.getInt(1)+1); |
||||
} |
||||
return null; |
||||
} |
||||
}; |
||||
if (db.equals(DB_H1)) { |
||||
String SQL_highest_id = String.format("Select max(%s) from %s", column_name, table); |
||||
jt.query(SQL_highest_id, extractor); |
||||
} else { |
||||
String SQL_highest_id = String.format("Select max(%s) from mbs.%s", column_name, table); |
||||
jt.query(SQL_highest_id, extractor); |
||||
} |
||||
} |
||||
|
||||
private static void next_nr(JdbcTemplate jt, String table, String column_name) { |
||||
ResultSetExtractor<Void> extractor = new ResultSetExtractor<Void>() { |
||||
@Override |
||||
public Void extractData(ResultSet rs) throws SQLException { |
||||
while(rs.next()) { |
||||
setNext_join_nr(rs.getInt(1)+1); |
||||
} |
||||
return null; |
||||
} |
||||
}; |
||||
String SQL_highest_nr = String.format("select max(%s) from mbs.%s", column_name, table); |
||||
jt.query(SQL_highest_nr, extractor); |
||||
} |
||||
|
||||
public static String separateWithComma(ArrayList<String> array) { |
||||
StringBuilder sb = new StringBuilder(); |
||||
for (int i = 0; i < array.size(); i++) { |
||||
sb.append(array.get(i)); |
||||
if (i < array.size() - 1) { |
||||
sb.append(", "); |
||||
} |
||||
} |
||||
return sb.toString(); |
||||
} |
||||
|
||||
/** |
||||
* Erzeugt ein INSERT SQL Befehl. |
||||
* @param traps |
||||
* @param id |
||||
* @param table |
||||
* @return |
||||
*/ |
||||
private static String createTrap(ArrayList<String> traps, String table, String db) { |
||||
String trap = ""; |
||||
ArrayList<String> columns = new ArrayList<>(); |
||||
ArrayList<String> values = new ArrayList<>(); |
||||
for (int i = 0;i<traps.size();i++) { |
||||
if (i % 2 == 0) { |
||||
columns.add(traps.get(i)); |
||||
} else { |
||||
values.add(traps.get(i)); |
||||
} |
||||
} |
||||
if (db.equals(DB_H1)) { |
||||
trap = String.format("Insert into %s (%s) values (%s)", table, separateWithComma(columns), separateWithComma(values)); |
||||
} else if (db.equals(DB_MBS)) { |
||||
trap = String.format("Insert into mbs.%s (%s) values (%s)", table, separateWithComma(columns), separateWithComma(values)); |
||||
} |
||||
return trap; |
||||
} |
||||
|
||||
/** |
||||
* |
||||
* fügt die Extremwerte in die Datenbank. |
||||
* @param jt |
||||
* @param table |
||||
*/ |
||||
private static void insertTraps(JdbcTemplate jt, String table, String db) { |
||||
ResultSetExtractor<Void> extractor = new ResultSetExtractor<Void>() { |
||||
|
||||
@Override |
||||
public Void extractData(ResultSet rs) throws SQLException { |
||||
ArrayList<String> traps = new ArrayList<>(); |
||||
while(rs.next()) { |
||||
//firstly, checks if nullable
|
||||
if(rs.getString(COLNAME_NULLABLE).equals("YES")) { |
||||
traps.add(rs.getString(COLNAME_COLUMN)); |
||||
traps.add("null"); |
||||
continue; |
||||
//else generate extreme values
|
||||
} else if (rs.getString(COLNAME_NULLABLE).equals("NO")) { |
||||
//IDs
|
||||
if (rs.getString(COLNAME_COLUMN).equals("id") || rs.getString(COLNAME_COLUMN).endsWith("_id") || |
||||
rs.getString(COLNAME_COLUMN).equals("lid") || rs.getString(COLNAME_COLUMN).endsWith("_lid")) { |
||||
nextID(jt, table, rs.getString(COLNAME_COLUMN), db); |
||||
traps.add(rs.getString(COLNAME_COLUMN)); |
||||
traps.add(String.valueOf(next_id)); |
||||
continue; |
||||
//Join_Nr
|
||||
} else if (rs.getString(COLNAME_COLUMN).equals("join_nr")) { |
||||
next_nr(jt, table, rs.getString(COLNAME_COLUMN)); |
||||
traps.add(rs.getString(COLNAME_COLUMN)); |
||||
traps.add(String.valueOf(next_join_nr)); |
||||
//Int
|
||||
} else if (rs.getString(COLNAME_DATATYPE).equals("integer")) { |
||||
int randomNumber = EXTREME_INT + random.nextInt(extreme_int_puffer); |
||||
//String number = String.format("%d", Integer.valueOf(randomNumber));
|
||||
String number = Integer.toString(randomNumber); |
||||
traps.add(rs.getString(COLNAME_COLUMN)); |
||||
traps.add(number); |
||||
error_tables_columns.put(number, String.format("%s.%s", table, rs.getString(COLNAME_COLUMN))); |
||||
continue; |
||||
//String
|
||||
} else if (rs.getString(COLNAME_DATATYPE).equals("character varying")) { |
||||
int max_length = rs.getInt(COLNAME_CHAR_MAX); |
||||
String col = rs.getString(COLNAME_COLUMN); |
||||
if (rs.getObject(COLNAME_CHAR_MAX) != null) { |
||||
int length = max_length-table.length()-col.length()-2; |
||||
//falls tabelle schon länger als max erlaubt
|
||||
if (max_length <= table.length()) { |
||||
traps.add(rs.getString(COLNAME_COLUMN)); |
||||
traps.add(String.format("'%s'",StringGenerator(max_length))); |
||||
continue; |
||||
} |
||||
//falls tabelle+spalte länger ist als max erlaubt
|
||||
if((table.length() + col.length()) >= max_length) { |
||||
length = max_length-table.length()-1; |
||||
traps.add(rs.getString(COLNAME_COLUMN)); |
||||
traps.add(String.format("'%s:%s'",table, StringGenerator(length))); |
||||
continue; |
||||
} |
||||
String longString = StringGenerator(length); |
||||
traps.add(rs.getString(COLNAME_COLUMN)); |
||||
traps.add(String.format("'%s.%s%s'",table,col, longString)); |
||||
continue; |
||||
} |
||||
int string_length = random.nextInt(501) + 2500; |
||||
String longString = StringGenerator(string_length); |
||||
traps.add(rs.getString(COLNAME_COLUMN)); |
||||
traps.add(String.format("'%s.%s:%s'",table,col, longString)); |
||||
continue; |
||||
//Timestamp
|
||||
} else if (rs.getString(COLNAME_DATATYPE).equals("timestamp without time zone")) { |
||||
traps.add(rs.getString(COLNAME_COLUMN)); |
||||
traps.add("'" + timestampMaker("now") + "'"); |
||||
continue; |
||||
//NUMERIC
|
||||
} else if (rs.getString(COLNAME_DATATYPE).equals("numeric")) { |
||||
int precision = rs.getInt(COLNAME_PRECISION); |
||||
int scale = rs.getInt(COLNAME_SCALE); |
||||
traps.add(rs.getString(COLNAME_COLUMN)); |
||||
traps.add(String.format("%s", NumGen(precision, scale))); |
||||
continue; |
||||
//Timestamp day only
|
||||
} else if (rs.getString(COLNAME_DATATYPE).equals("date")) { |
||||
traps.add(rs.getString(COLNAME_COLUMN)); |
||||
traps.add("'" + timestampMaker("day") + "'"); |
||||
continue; |
||||
} |
||||
} |
||||
} |
||||
if (db.equals(DB_H1)) { |
||||
String trap = createTrap(traps, table, db); |
||||
log.info(trap); |
||||
String disableTrigger = String.format("alter table %s disable trigger all;", table); |
||||
String enableTrigger = String.format("alter table %s enable trigger all;", table); |
||||
jt.update(disableTrigger); |
||||
jt.update(trap); |
||||
jt.update(enableTrigger); |
||||
} else if (db.equals(DB_MBS)) { |
||||
String trap = createTrap(traps, table, db); |
||||
log.info(trap); |
||||
String disableTrigger = String.format("alter table mbs.%s disable trigger all;", table); |
||||
String enableTrigger = String.format("alter table mbs.%s enable trigger all;", table); |
||||
jt.update(disableTrigger); |
||||
jt.update(trap); |
||||
jt.update(enableTrigger); |
||||
} |
||||
return null; |
||||
} |
||||
}; |
||||
if (db.equals(DB_H1)) { |
||||
String query = sqlTableMetaDataH1(table); |
||||
try { |
||||
jt.query(query, extractor); |
||||
} catch(Exception e) { |
||||
log.error("Fehler bei Statement (Vorbereitung/Setup):\n" + query + "\n" + e.getMessage()); |
||||
} |
||||
} else if (db.equals(DB_MBS)) { |
||||
String query = sqlTableMetaDataMBS(table); |
||||
try { |
||||
jt.query(query, extractor); |
||||
} catch(Exception e) { |
||||
log.error("Fehler bei Statement (Vorbereitung/Setup):\n" + query + "\n" + e.getMessage()); |
||||
} |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Setzt die Postgres Statistiken zurück. |
||||
* @param jt |
||||
*/ |
||||
private static void resetStats(JdbcTemplate jt) { |
||||
log.info("Zurücksetzung der Statistiken gestartet..."); |
||||
|
||||
RowCallbackHandler handler = new RowCallbackHandler() { |
||||
@Override |
||||
public void processRow(ResultSet rs) throws SQLException { |
||||
//nothing
|
||||
} |
||||
}; |
||||
jt.query(SQL_RESET_STATISTICS, handler); |
||||
log.info("Statistiken erfolgreich zurückgesetzt"); |
||||
} |
||||
|
||||
/** |
||||
* Entfernt die Extremwerte aus der Datenbank. |
||||
* @param jt |
||||
* @param tables |
||||
*/ |
||||
@SuppressWarnings("unused") |
||||
private static void removeTraps(JdbcTemplate jt, ArrayList<String> tables) { |
||||
|
||||
for (int i = 0; i<tables.size();i++){ |
||||
String table = tables.get(i); |
||||
if (!table.startsWith("tmp")) { |
||||
String deleteEntry = String.format("ALTER TABLE %s DISABLE TRIGGER ALL; delete from %s where id in (select max(id) from %s); alter table %s enable trigger all;", table,table,table,table); |
||||
jt.update(deleteEntry); |
||||
} |
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Findet Inkongruenzen zwischen der hisinone und eduetl Datenbank. |
||||
*/ |
||||
public static void checkH1(String[] checked_modules) { |
||||
ArrayList<String> tables = new ArrayList<>(); |
||||
JdbcTemplate jt = new JdbcTemplate(); |
||||
|
||||
try (GenericApplicationContext context = createContext()){ |
||||
initSxPools(); |
||||
|
||||
//Datenbankzugriff erstellen
|
||||
His1DataSources hds = context.getBean(His1DataSources.class); |
||||
DataSource ds = hds.get(DB_H1); |
||||
jt.setDataSource(ds); |
||||
|
||||
//Statistiken zurücksetzen
|
||||
resetStats(jt); |
||||
|
||||
//Hauptladeroutine durchführen
|
||||
log.info("Unloadscript (Analysedurchlauf) gestartet..."); |
||||
for(String m : checked_modules) { |
||||
unload(m,context, DB_H1); |
||||
} |
||||
|
||||
log.info("Unloadscript (Analysedurchlauf) erfolgreich abgeschlossen."); |
||||
|
||||
//Zugegriffene Tabellen abfangen
|
||||
tables = getAccessedTablesH1(jt, tables); |
||||
printList(tables); |
||||
log.info("zugegriffene Tabellen erfolgreich abgefangen"); |
||||
|
||||
//Extremwerte einfügen
|
||||
log.info("Einfügen von Extremwerten gestartet..."); |
||||
for(int i = 0; i<tables.size();i++) { |
||||
insertTraps(jt, tables.get(i), DB_H1); |
||||
} |
||||
log.info("Extremwerte erfolgreich eingefügt"); |
||||
|
||||
//nochmal Hauptladeroutine durchführen
|
||||
log.info("Hauptladeroutine (Fuzzingtestdurchlauf) gestartet..."); |
||||
for(String m : checked_modules) { |
||||
etlJob(m,context, DB_H1); |
||||
} |
||||
log.info("Hauptladeroutine (Fuzzingtestdurchlauf) erfolgreich."); |
||||
printList(errors); |
||||
printIntErrors(); |
||||
} catch (SQLException e) { |
||||
e.printStackTrace(); |
||||
} catch (DataIntegrityViolationException e) { |
||||
e.printStackTrace(); |
||||
} catch (BadSqlGrammarException e) { |
||||
e.printStackTrace(); |
||||
} catch (Exception e){ |
||||
e.printStackTrace(); |
||||
} finally { |
||||
errors.clear(); |
||||
error_keys.clear(); |
||||
//removeTraps(jt,tables);
|
||||
//log.info("Einträge erfolgreich entfernt.");
|
||||
} |
||||
} |
||||
|
||||
public static void checkMBS(String[] checked_modules) { |
||||
ArrayList<String> tables = new ArrayList<>(); |
||||
JdbcTemplate jt = new JdbcTemplate(); |
||||
try (GenericApplicationContext context = createContext()){ |
||||
initSxPools(); |
||||
//Datenbankzugriff erstellen
|
||||
His1DataSources hds = context.getBean(His1DataSources.class); |
||||
DataSource ds = hds.get(DB_MBS); |
||||
jt.setDataSource(ds); |
||||
//Statistiken zurücksetzen
|
||||
resetStats(jt); |
||||
//Hauptladeroutine durchführen
|
||||
log.info("Unloadscript (Analysedurchlauf) gestartet..."); |
||||
for(String m : checked_modules) { |
||||
unload(m, context, DB_MBS); |
||||
} |
||||
log.info("Unloadscript (Analysedurchlauf) erfolgreich abgeschlossen."); |
||||
//Zugegriffene Tabellen abfangen
|
||||
tables = getAccessedTablesMBS(jt, tables); |
||||
printList(tables); |
||||
log.info("zugegriffene Tabellen erfolgreich abgefangen"); |
||||
//Extremwerte einfügen
|
||||
log.info("Einfügen von Extremwerten gestartet..."); |
||||
for(int i = 0; i<tables.size();i++) { |
||||
insertTraps(jt, tables.get(i), DB_MBS); |
||||
} |
||||
log.info("Extremwerte erfolgreich eingefügt"); |
||||
//nochmal Hauptladeroutine durchführen
|
||||
log.info("Hauptladeroutine (Fuzzingtestdurchlauf) gestartet..."); |
||||
for(String m : checked_modules) { |
||||
etlJob(m,context, DB_MBS); |
||||
} |
||||
log.info("Hauptladeroutine (Fuzzingtestdurchlauf) erfolgreich."); |
||||
printList(errors); |
||||
printIntErrors(); |
||||
} catch (SQLException e) { |
||||
e.printStackTrace(); |
||||
} catch (DataIntegrityViolationException e) { |
||||
e.printStackTrace(); |
||||
} catch (BadSqlGrammarException e) { |
||||
e.printStackTrace(); |
||||
} catch (Exception e){ |
||||
e.printStackTrace(); |
||||
} finally { |
||||
errors.clear(); |
||||
error_keys.clear(); |
||||
//removeTraps(jt,tables);
|
||||
//log.info("Einträge erfolgreich entfernt.");
|
||||
} |
||||
} |
||||
|
||||
/** |
||||
* Für CommandLine Skript. |
||||
* @param args |
||||
* @param options |
||||
* @return |
||||
*/ |
||||
private static CommandLine parseArgs(String[] args, Options options) { |
||||
CommandLineParser parser = new GnuParser(); |
||||
try { |
||||
return parser.parse(options, args, false); |
||||
} catch (ParseException e) { |
||||
System.out.println("error while reading the command line parameters:"); |
||||
e.printStackTrace(); |
||||
System.exit(1); |
||||
} |
||||
return null; |
||||
} |
||||
|
||||
/** |
||||
* Für CommandLine Skript. |
||||
* @return |
||||
*/ |
||||
private static Options createOptions() { |
||||
Options options = new Options(); |
||||
Option opt; |
||||
opt = new Option("h", "help", false, "get help"); |
||||
options.addOption(opt); |
||||
opt = new Option("rh1", "run-h1", false, "runs the script for all h1 modules"); |
||||
options.addOption(opt); |
||||
opt = new Option("rmbs", "run-mbs", false, "runs the script for all mbs modules"); |
||||
options.addOption(opt); |
||||
opt = new Option("rk", "run-kern", false, "runs the script for kern module"); |
||||
options.addOption(opt); |
||||
opt = new Option("rc", "run-cob", false, "runs the script for cob module"); |
||||
options.addOption(opt); |
||||
opt = new Option("rf", "run-fin", false, "runs the script for fin module"); |
||||
options.addOption(opt); |
||||
opt = new Option("ri", "run-ivs", false, "runs the script for ivs module"); |
||||
options.addOption(opt); |
||||
opt = new Option("rb", "run-bau", false, "runs the script for bau module"); |
||||
options.addOption(opt); |
||||
opt = new Option("rs", "run-sos", false, "runs the script for sos module"); |
||||
options.addOption(opt); |
||||
opt = new Option("rp", "run-prom", false, "runs the script for prom module"); |
||||
options.addOption(opt); |
||||
opt = new Option("rz", "run-zul", false, "runs the script for zul module"); |
||||
options.addOption(opt); |
||||
opt = new Option("rr", "run-res", false, "runs the script for res module"); |
||||
options.addOption(opt); |
||||
return options; |
||||
} |
||||
|
||||
/** |
||||
* Für CommandLine Skript. |
||||
* @param options |
||||
*/ |
||||
private static void printHelp(Options options) { |
||||
HelpFormatter help = new HelpFormatter(); |
||||
help.printHelp(HELP, options); |
||||
} |
||||
|
||||
public static void main(String[] args) { |
||||
System.setProperty(SuperXManager.SUPER_X_HISINONE_VERSION, "non-empty-value"); |
||||
Options options = createOptions(); |
||||
CommandLine parsedArgs = parseArgs(args, options); |
||||
if (parsedArgs.hasOption("h")) { |
||||
printHelp(options); |
||||
} else if (parsedArgs.hasOption("rh1")) { |
||||
checkH1(modulesH1); |
||||
} else if (parsedArgs.hasOption("rmbs")) { |
||||
checkMBS(modulesMBS); |
||||
} else if (parsedArgs.hasOption("rk")) { |
||||
String[] module = new String[1]; |
||||
module[0] = modulesH1[MODULES_KERN_INDEX]; |
||||
checkH1(module); |
||||
} else if (parsedArgs.hasOption("rc")) { |
||||
String[] module = new String[1]; |
||||
module[0] = modulesH1[MODULES_COB_INDEX]; |
||||
checkH1(module); |
||||
} else if (parsedArgs.hasOption("rf")) { |
||||
String[] module = new String[1]; |
||||
module[0] = modulesMBS[MODULES_FIN_INDEX]; |
||||
checkMBS(module); |
||||
} else if (parsedArgs.hasOption("ri")) { |
||||
String[] module = new String[1]; |
||||
module[0] = modulesMBS[MODULES_IVS_INDEX]; |
||||
checkMBS(module); |
||||
} else if (parsedArgs.hasOption("rb")) { |
||||
String[] module = new String[1]; |
||||
module[0] = modulesMBS[MODULES_BAU_INDEX]; |
||||
checkMBS(module); |
||||
} else if (parsedArgs.hasOption("rs")) { |
||||
String[] module = new String[1]; |
||||
module[0] = modulesH1[MODULES_SOS_INDEX]; |
||||
checkH1(module); |
||||
} else if (parsedArgs.hasOption("rp")) { |
||||
String[] module = new String[1]; |
||||
module[0] = modulesH1[MODULES_PROM_INDEX]; |
||||
checkH1(module); |
||||
} else if (parsedArgs.hasOption("rz")) { |
||||
String[] module = new String[1]; |
||||
module[0] = modulesH1[MODULES_ZUL_INDEX]; |
||||
checkH1(module); |
||||
} else if (parsedArgs.hasOption("rr")) { |
||||
String[] module = new String[1]; |
||||
module[0] = modulesH1[MODULES_RES_INDEX]; |
||||
checkH1(module); |
||||
} else { |
||||
printHelp(options); |
||||
} |
||||
} |
||||
} |
||||
@ -0,0 +1,745 @@
@@ -0,0 +1,745 @@
|
||||
package de.superx.bin; |
||||
|
||||
|
||||
import java.io.BufferedReader; |
||||
import java.io.ByteArrayInputStream; |
||||
import java.io.File; |
||||
import java.io.FileInputStream; |
||||
import java.io.FileNotFoundException; |
||||
import java.io.FileOutputStream; |
||||
import java.io.FileReader; |
||||
import java.io.IOException; |
||||
import java.io.InputStreamReader; |
||||
import java.io.PrintStream; |
||||
import java.io.Reader; |
||||
import java.io.UnsupportedEncodingException; |
||||
import java.sql.Connection; |
||||
import java.sql.DatabaseMetaData; |
||||
import java.sql.PreparedStatement; |
||||
import java.sql.ResultSet; |
||||
import java.sql.ResultSetMetaData; |
||||
import java.sql.SQLException; |
||||
import java.sql.Types; |
||||
import java.text.ParseException; |
||||
import java.util.Hashtable; |
||||
import java.util.StringTokenizer; |
||||
|
||||
import org.postgresql.PGConnection; |
||||
import org.postgresql.copy.CopyManager; |
||||
|
||||
import de.memtext.util.DateUtils; |
||||
import de.memtext.util.GetOpts; |
||||
import de.memtext.util.GetOpts.Options; |
||||
import de.superx.bin.SxConnection.DriverClass; |
||||
/* |
||||
* |
||||
* Basierend auf de.superx.common.FileToTableUpload mit Unterstützung für Header |
||||
* |
||||
*/ |
||||
public class GxstageCSVImport { |
||||
private String logfile; |
||||
private String dbpropfile; |
||||
private String mode="stop"; |
||||
private String inFormat; |
||||
private String targetTable; |
||||
private String srcFile; |
||||
private static final boolean header=true; |
||||
|
||||
private String delim="^"; |
||||
private String encoding; |
||||
private String inserts=""; |
||||
private boolean continueAfterError; |
||||
private boolean removeTrailingDelim=true; |
||||
private boolean isPostgres; |
||||
private boolean useBatch=true; |
||||
private static int maxCols=1000; |
||||
private String[] insert_cols = new String[maxCols]; |
||||
private int[] insert_types = new int[maxCols]; |
||||
|
||||
private int numberOfColumns; |
||||
public long numberOfRows; |
||||
private Connection uploadConnection; |
||||
private DatabaseMetaData dbmd; |
||||
private PreparedStatement pst; |
||||
private static String usage = |
||||
"-------------------------------------\n" |
||||
+ "Gebrauch: java de.superx.bin.GxstageCSVImport \n-dbproperties:<Pfad zu db.properties> \n" |
||||
+ "-table:<Tabellenname> \n-unl:<Dateipfad Quelldatei>(optional, default ist Tabellenname.unl) \n-delim:<delimiter>(optional, default ist ^) \n-header:<true|false>(optional, mit Feldüberschriften, default ist false)\n" |
||||
+ "-mode:<stop|exclude-row>(optional, default is stop) #Bei Fehlerhaften Daten kann das Hochladen gestoppt werden, oder der Datensatz wird übersprungen" |
||||
+ "\n-inserts:<false|simple|batch>(optional, default is false) #Bei -inserts:simple und batch werden Die Rohdaten in Insert-sql-Statements übersetzt (nur für Debugging-Zwecke, sehr langsam. Der Modus exclude-field ist darüberhinaus nicht anwendbar)" |
||||
+ "\n-encoding:<utf8,ISO-8859-1, default ist System.file.encoding>" |
||||
+ "\n---------------------------------------------------"; |
||||
|
||||
public Connection getUploadConnection() { |
||||
return uploadConnection; |
||||
} |
||||
public void setUploadConnection(Connection uploadConnection) { |
||||
this.uploadConnection = uploadConnection; |
||||
} |
||||
public boolean isRemoveTrailingDelim() { |
||||
return removeTrailingDelim; |
||||
} |
||||
public void setRemoveTrailingDelim(boolean removeTrailingDelim) { |
||||
this.removeTrailingDelim = removeTrailingDelim; |
||||
} |
||||
|
||||
|
||||
|
||||
public String getDbpropfile() { |
||||
return dbpropfile; |
||||
} |
||||
public void setDbpropfile(String dbpropfile) { |
||||
this.dbpropfile = dbpropfile; |
||||
} |
||||
public String getMode() { |
||||
return mode; |
||||
} |
||||
public void setMode(String mode) { |
||||
if (!mode.equals("stop") && !mode.equals("exclude-field")&& !mode.equals("transaction")) |
||||
mode = "exclude-row"; |
||||
this.mode = mode; |
||||
} |
||||
public String getInFormat() { |
||||
return inFormat; |
||||
} |
||||
public void setInFormat(String inFormat) { |
||||
this.inFormat = inFormat; |
||||
} |
||||
public String getTargetTable() { |
||||
return targetTable; |
||||
} |
||||
public void setTargetTable(String targetTable) { |
||||
this.targetTable = targetTable; |
||||
} |
||||
public String getSrcFile() { |
||||
return srcFile; |
||||
} |
||||
public void setSrcFile(String srcFile) { |
||||
this.srcFile = srcFile; |
||||
} |
||||
|
||||
public String getDelim() { |
||||
return delim; |
||||
} |
||||
public void setDelim(String delim) { |
||||
if (delim.equals("tab")) |
||||
delim = "\t"; //Tab
|
||||
if (delim.equals("")) |
||||
delim = "^"; //default Delimiter
|
||||
this.delim = delim; |
||||
} |
||||
public String getEncoding() { |
||||
return encoding; |
||||
} |
||||
public void setEncoding(String encoding) { |
||||
if(encoding==null || encoding.equals("")) |
||||
encoding="UTF-8"; |
||||
this.encoding = encoding; |
||||
} |
||||
public String getInserts() { |
||||
return inserts; |
||||
} |
||||
public void setInserts(String inserts) { |
||||
if(inserts.equalsIgnoreCase("batch")) |
||||
useBatch=true; |
||||
if(inserts.equalsIgnoreCase("simple")) |
||||
useBatch=false; |
||||
|
||||
this.inserts = inserts; |
||||
} |
||||
public boolean isContinueAfterError() { |
||||
return continueAfterError; |
||||
} |
||||
public void setContinueAfterError(boolean continueAfterError) { |
||||
this.continueAfterError = continueAfterError; |
||||
} |
||||
public String uploadFile() throws Exception |
||||
{ |
||||
String protokoll=""; |
||||
if(inFormat!=null&&inFormat.equalsIgnoreCase("xml")) |
||||
{ |
||||
throw new IllegalArgumentException("xml nicht unterstützt"); |
||||
} |
||||
else |
||||
{ |
||||
protokoll=uploadCSV(); |
||||
} |
||||
if(protokoll.indexOf("Exception")>-1) |
||||
throw new Exception(protokoll); |
||||
return protokoll; |
||||
|
||||
} |
||||
|
||||
|
||||
private String uploadCSV() throws Exception |
||||
{ |
||||
String line; |
||||
String line2; |
||||
File outFile=null; |
||||
String protokoll=""; |
||||
if(isPostgres && !inserts.equalsIgnoreCase("simple") && !inserts.equalsIgnoreCase("batch")) |
||||
{ |
||||
if(removeTrailingDelim) |
||||
srcFile=removeTrailingDelim(srcFile); |
||||
|
||||
protokoll=uploadCSVinPostgres(srcFile,removeTrailingDelim); |
||||
|
||||
} |
||||
else |
||||
protokoll=uploadCSVwithAnsiSQL(srcFile); |
||||
return protokoll; |
||||
|
||||
|
||||
} |
||||
private String removeTrailingDelim(String srcFile) throws UnsupportedEncodingException, FileNotFoundException, IOException { |
||||
String line; |
||||
File outFile; |
||||
String returnSrcFile=srcFile+".tmp"; |
||||
BufferedReader in2 = new BufferedReader(new InputStreamReader(new FileInputStream(srcFile), encoding)); |
||||
|
||||
outFile=new File(srcFile+".tmp"); |
||||
FileOutputStream out = new FileOutputStream(outFile, false); |
||||
PrintStream out2 = new PrintStream(out, true, encoding); |
||||
|
||||
|
||||
while ((line = in2.readLine()) != null) { |
||||
|
||||
if (line.endsWith(delim)) |
||||
line=line.substring(0,line.length()-delim.length()); |
||||
out2.println(line); |
||||
out2.flush(); |
||||
|
||||
} |
||||
return returnSrcFile; |
||||
} |
||||
private String uploadCSVinPostgres(String srcFile, boolean deleteSrcFile) { |
||||
//neues Format für Postgres42-Treiber mit Format csv, geht aber auch mit Postgres9.2 Treiber
|
||||
/*copy target_table (b, a, c) |
||||
from file.csv |
||||
with (delimiter ',', format csv, header)*/ |
||||
// String test=" (hs_nr,gjahr,inst_ext,bund_fachgebiet,asp_akl,rest,rest_vj,zugang,abgang,abschr,histor_ahk,umbuch,zuschr,abschr_ges,datum,extkotr) ";
|
||||
String cols=""; |
||||
String msg=""; |
||||
boolean dataFound=true; |
||||
try |
||||
{ |
||||
if (header) |
||||
{ |
||||
|
||||
String headersInFile = getHeaderString(srcFile); |
||||
if (headersInFile!=null) |
||||
{ |
||||
cols=headersInFile.replace(delim,","); |
||||
cols=" ("+cols+") "; |
||||
} |
||||
else |
||||
{ |
||||
//headers ist null, Datei leer
|
||||
dataFound=false; |
||||
numberOfRows=0; |
||||
} |
||||
|
||||
|
||||
} |
||||
//default quote ist ", kann aber vorkommen, daher quote Zeichen auf nicht erwartetes Backspace Oktal 10 setzen
|
||||
String copySql = "COPY " + targetTable + cols+ " FROM STDIN (FORMAT csv, QUOTE '\b', DELIMITER '"+delim+"',NULL '',ENCODING '"+ encoding+"'"+(header?", HEADER true":"")+")"; |
||||
|
||||
if (dataFound) |
||||
{ |
||||
|
||||
final CopyManager cpm = ((PGConnection) uploadConnection).getCopyAPI(); |
||||
msg = ""; |
||||
FileReader in3 = new FileReader(srcFile); |
||||
Reader in4 = new BufferedReader(in3); |
||||
numberOfRows = cpm.copyIn(copySql, in4); |
||||
} |
||||
if(deleteSrcFile) |
||||
{ |
||||
File outFile=new File(srcFile); |
||||
if(outFile!=null) |
||||
outFile.delete(); |
||||
} |
||||
} catch (Exception e) { |
||||
|
||||
msg=e.toString(); |
||||
} |
||||
return msg; |
||||
} |
||||
protected String getHeaderString(String srcFile) throws FileNotFoundException, UnsupportedEncodingException, IOException { |
||||
FileInputStream fileInputStream = new FileInputStream(srcFile); |
||||
InputStreamReader ir=new InputStreamReader(fileInputStream, encoding); |
||||
BufferedReader br = new BufferedReader(ir); |
||||
String headersInFile=br.readLine(); |
||||
br.close(); |
||||
ir.close(); |
||||
fileInputStream.close(); |
||||
return headersInFile; |
||||
} |
||||
|
||||
private String uploadCSVwithAnsiSQL(String srcFile) throws SQLException, FileNotFoundException, IOException { |
||||
numberOfRows=0; |
||||
String text; |
||||
String text2; |
||||
String msg=""; |
||||
int zeilennr=1; |
||||
int fehlerSaetze=0; |
||||
String headersInFile=null; |
||||
if (header) |
||||
{ headersInFile = getHeaderString(srcFile); |
||||
|
||||
} |
||||
BufferedReader in = new BufferedReader(new InputStreamReader(new FileInputStream(srcFile), encoding)); |
||||
initializeColumnSchema(headersInFile); |
||||
String insertHead=createPreparedStatementHead(headersInFile); |
||||
pst = uploadConnection.prepareStatement(insertHead); |
||||
if(useBatch) |
||||
pst.clearBatch(); |
||||
boolean isFirstRow=true; |
||||
while ((text = in.readLine()) != null) { |
||||
if (isFirstRow&&header) { isFirstRow=false; continue;} |
||||
if (text.endsWith("\\")) { |
||||
text=text.substring(0, text.length()-1); |
||||
text2 = in.readLine(); |
||||
if (text2 != null) { |
||||
text += "\n"+ text2; |
||||
while (text2.endsWith("\\")) { |
||||
text=text.substring(0, text.length()-1); |
||||
text2 = in.readLine(); |
||||
if (text2 != null) |
||||
text += "\n"+text2; |
||||
|
||||
} |
||||
} |
||||
} |
||||
|
||||
String prepare = |
||||
createPreparedInsertStatement(zeilennr, |
||||
insertHead, |
||||
text); |
||||
if(!prepare.equals("") && mode.equals("stop")) |
||||
{ |
||||
msg=prepare; |
||||
break; |
||||
} |
||||
if(useBatch) |
||||
pst.addBatch(); |
||||
else |
||||
pst.executeUpdate(); |
||||
numberOfRows++; |
||||
|
||||
|
||||
|
||||
} |
||||
if(useBatch) |
||||
pst.executeBatch(); |
||||
|
||||
return msg; |
||||
} |
||||
|
||||
private String createPreparedInsertStatement( |
||||
int line, |
||||
String insertHead, |
||||
String text) |
||||
throws SQLException { |
||||
int p; |
||||
int i=0; |
||||
int k=0; |
||||
String errmsg = ""; |
||||
String feld_wert; |
||||
//pst.clearParameters();
|
||||
do { |
||||
//ggf. Trennzeichen am Ende hinzufügen:
|
||||
if(!text.endsWith(delim)) |
||||
text+= delim; |
||||
p = text.indexOf(delim, i); |
||||
//logger.config("Type "+types[k]);
|
||||
//maskierte Trennzeichen abfangen:
|
||||
if(p>0 && text.substring(p-1, p).equals("\\")) |
||||
p = text.indexOf(delim, p+1); |
||||
|
||||
if (p > -1 ) { |
||||
if(p==-1) |
||||
feld_wert = text.substring(i); |
||||
else |
||||
feld_wert = text.substring(i, p); |
||||
//wenn der Feldwert zufällig das Zeichen "\\n" enthält, wird es zu "\n"
|
||||
if(feld_wert != null && (feld_wert.indexOf("\\\\n") >0 )) |
||||
{ |
||||
feld_wert=de.memtext.util.StringUtils.replace(feld_wert, "\\\\n", "\\n"); |
||||
} |
||||
//wenn der Feldwert das Zeichen "\Trennzeichen" enthält, wird der \ entfernt
|
||||
if(feld_wert != null && (feld_wert.indexOf("\\"+delim) >0 )) |
||||
{ |
||||
feld_wert=de.memtext.util.StringUtils.replace(feld_wert, "\\", ""); |
||||
} |
||||
//wenn der Feldwert das Zeichen "\\" enthält, wird ein \ entfernt
|
||||
if(feld_wert != null && (feld_wert.indexOf("\\\\") >0 )) |
||||
{ |
||||
feld_wert=de.memtext.util.StringUtils.replace(feld_wert, "\\\\", "\\"); |
||||
} |
||||
|
||||
errmsg = feld_wert_to_pst(line,k, errmsg, feld_wert); |
||||
k++; |
||||
i = p + 1; |
||||
} |
||||
|
||||
} while (p > -1); |
||||
return errmsg; |
||||
} |
||||
private String feld_wert_to_pst(int line, int col, String errmsg, String feld_wert) throws SQLException { |
||||
|
||||
|
||||
if( col >= numberOfColumns) |
||||
errmsg+= "Anzahl Spalten in Datei ist "+col+", aber es sollten nur "+(numberOfColumns-1)+" Spalten sein. Bitte prüfen Sie das Trennzeichen"; |
||||
else |
||||
{ |
||||
if (feld_wert.equals("")) |
||||
try { |
||||
pst.setNull(col + 1, insert_types[col]); |
||||
} catch (SQLException e1) { |
||||
errmsg += e1.toString(); |
||||
} else { |
||||
|
||||
switch (insert_types[col]) { |
||||
case Types.BIGINT : |
||||
case Types.TINYINT : |
||||
case Types.SMALLINT : |
||||
case Types.INTEGER : |
||||
|
||||
try { |
||||
int myInt = (int) Integer.parseInt(feld_wert.trim()); |
||||
pst.setInt(col + 1, myInt); |
||||
} catch (NumberFormatException e1) { |
||||
errmsg += e1.toString(); |
||||
setFieldToNull(col, insert_types, pst); |
||||
} catch (SQLException e1) { |
||||
errmsg += conversionException(line, col, feld_wert,e1.toString()); |
||||
setFieldToNull(col, insert_types, pst); |
||||
} |
||||
break; |
||||
case Types.FLOAT : |
||||
try { |
||||
float myFloat = |
||||
(float) Float.parseFloat(feld_wert.trim()); |
||||
pst.setFloat(col + 1, myFloat); |
||||
} catch (NumberFormatException e1) { |
||||
errmsg += conversionException(line, col, feld_wert,e1.toString()); |
||||
setFieldToNull(col, insert_types, pst); |
||||
} catch (SQLException e1) { |
||||
errmsg += conversionException(line, col, feld_wert,e1.toString()); |
||||
setFieldToNull(col, insert_types, pst); |
||||
} |
||||
break; |
||||
|
||||
case Types.REAL : |
||||
case Types.DOUBLE : |
||||
case Types.NUMERIC : |
||||
case Types.DECIMAL : |
||||
try { |
||||
double myDouble = |
||||
(double) Double.parseDouble(feld_wert.trim()); |
||||
pst.setDouble(col + 1, myDouble); |
||||
} catch (NumberFormatException e1) { |
||||
errmsg += conversionException(line, col, feld_wert,e1.toString()); |
||||
setFieldToNull(col, insert_types, pst); |
||||
} catch (SQLException e1) { |
||||
errmsg += conversionException(line, col, feld_wert, e1.toString()); |
||||
setFieldToNull(col, insert_types, pst); |
||||
} |
||||
break; |
||||
|
||||
case Types.CHAR : |
||||
case Types.VARCHAR : |
||||
default : |
||||
if(feld_wert.equals(" ")) |
||||
feld_wert=""; //Leerzeichen im UNL-File wird zu Leerstring
|
||||
try { |
||||
pst.setString(col + 1, feld_wert); |
||||
} catch (SQLException e1) { |
||||
errmsg += conversionException(line, col, feld_wert,e1.toString()); |
||||
setFieldToNull(col, insert_types, pst); |
||||
} |
||||
break; |
||||
case Types.LONGVARCHAR : |
||||
ByteArrayInputStream by = |
||||
new ByteArrayInputStream(feld_wert.getBytes()); |
||||
pst.setAsciiStream( |
||||
col + 1, |
||||
by, |
||||
feld_wert.length()); |
||||
break; |
||||
case Types.DATE : |
||||
try { |
||||
java.util.Date datum = |
||||
DateUtils.parse(feld_wert.trim()); |
||||
feld_wert = DateUtils.formatUS(datum); |
||||
//Leider ist dieser Schritt wg java.sql.Date nötig
|
||||
pst.setDate( |
||||
col + 1, |
||||
java.sql.Date.valueOf(feld_wert)); |
||||
|
||||
} catch (SQLException e1) { |
||||
errmsg += conversionException(line, col, feld_wert, e1.toString()); |
||||
setFieldToNull(col, insert_types, pst); |
||||
} catch (ParseException e1) { |
||||
errmsg += conversionException(line, col, feld_wert, e1.toString()); |
||||
setFieldToNull(col, insert_types, pst); |
||||
} |
||||
catch (IllegalArgumentException e1) { |
||||
errmsg += conversionException(line, col, feld_wert, e1.toString()); |
||||
setFieldToNull(col, insert_types, pst); |
||||
} |
||||
|
||||
break; |
||||
case Types.TIME : |
||||
|
||||
try { |
||||
//Time zeit = (java.sql.Time)
|
||||
//DateUtils.timeParse(feld_wert);
|
||||
pst.setTime(col + 1, java.sql.Time.valueOf( |
||||
feld_wert.trim())); |
||||
} catch (SQLException e1) { |
||||
errmsg += conversionException(line, col, feld_wert, e1.toString()); |
||||
setFieldToNull(col, insert_types, pst); |
||||
} |
||||
catch (IllegalArgumentException e1) { |
||||
errmsg += conversionException(line, col, feld_wert, e1.toString()); |
||||
setFieldToNull(col, insert_types, pst); |
||||
} |
||||
|
||||
break; |
||||
case Types.TIMESTAMP : |
||||
try { |
||||
java.util.Date datum = |
||||
DateUtils.dateTimeParse(feld_wert.trim()); |
||||
feld_wert = DateUtils.dateTimeFormatUS(datum); |
||||
//Leider ist dieser Schritt wg java.sql.Date nötig
|
||||
pst.setTimestamp( |
||||
col + 1, |
||||
java.sql.Timestamp.valueOf( |
||||
feld_wert + ".0")); |
||||
|
||||
} catch (SQLException e1) { |
||||
errmsg += conversionException(line, col,feld_wert, e1.toString()); |
||||
setFieldToNull(col, insert_types, pst); |
||||
} catch (ParseException e1) { |
||||
errmsg += conversionException(line, col, feld_wert, e1.toString()); |
||||
setFieldToNull(col, insert_types, pst); |
||||
} |
||||
catch (IllegalArgumentException e1) { |
||||
errmsg += conversionException(line, col, feld_wert, e1.toString()); |
||||
setFieldToNull(col, insert_types, pst); |
||||
} |
||||
|
||||
break; |
||||
|
||||
case Types.BIT : |
||||
// Types.BOOLEAN gibt es im jdk 1.3 nicht
|
||||
try { |
||||
boolean wf = |
||||
(boolean) Boolean.getBoolean(feld_wert.trim()); |
||||
pst.setBoolean(col + 1, wf); |
||||
} catch (SQLException e1) { |
||||
errmsg += conversionException(line, col, feld_wert, e1.toString()); |
||||
setFieldToNull(col, insert_types, pst); |
||||
} |
||||
//Boolean wird vom Informix-Treiber als OTHER (1111) erkannt
|
||||
//Da aber default '' ist, klappt es trotzdem
|
||||
break; |
||||
} |
||||
|
||||
} |
||||
} |
||||
return errmsg; |
||||
} |
||||
private void setFieldToNull( |
||||
int k, |
||||
int[] insert_types, |
||||
PreparedStatement pst) { |
||||
if (mode.equals("exclude-field")) |
||||
try { |
||||
pst.setNull(k + 1, insert_types[k]); |
||||
} catch (SQLException e3) { |
||||
System.err.println("Invalid Field " + (k + 1) + " could not be set to null"); |
||||
} |
||||
|
||||
} |
||||
private String conversionException(int line,int col, String field_value, String error) { |
||||
String err_msg = ""; |
||||
|
||||
err_msg = "Error in line "+line+" in Column " + (col + 1) + " "+insert_cols[col]+" value "+ field_value+ ": " + error.toString() + "; "; |
||||
|
||||
return err_msg; |
||||
} |
||||
private void initializeColumnSchema(String headersInFile) throws SQLException |
||||
{ |
||||
|
||||
|
||||
ResultSet rs = null; |
||||
ResultSetMetaData rsmd = null; |
||||
String tabelle=targetTable; |
||||
if (!dbmd.storesLowerCaseIdentifiers()) |
||||
tabelle = tabelle.toUpperCase(); |
||||
rs =dbmd.getColumns(uploadConnection.getCatalog(), null, tabelle, null); |
||||
rsmd = rs.getMetaData(); |
||||
Hashtable<String,Integer> fieldtypes=new Hashtable<String,Integer>(); |
||||
while (rs.next()) { |
||||
fieldtypes.put(rs.getObject("COLUMN_NAME").toString(),Integer.valueOf(rs.getInt("DATA_TYPE"))); |
||||
} |
||||
int i=0; |
||||
StringTokenizer st=new StringTokenizer(headersInFile,delim); |
||||
while (st.hasMoreTokens()) |
||||
{ |
||||
String colname=st.nextToken(); |
||||
insert_cols[i]= colname ; |
||||
insert_types[i] = fieldtypes.get(colname).intValue(); |
||||
i++; |
||||
} |
||||
|
||||
|
||||
numberOfColumns=i; |
||||
if(!dbmd.supportsBatchUpdates()) |
||||
useBatch=false; |
||||
|
||||
} |
||||
private String createPreparedStatementHeadOld() throws SQLException |
||||
{ |
||||
|
||||
String sql=null; |
||||
|
||||
String insert_head = "insert into " + targetTable+"("; |
||||
String insert_val=""; |
||||
for (int i = 0; i < numberOfColumns; i++) |
||||
{ |
||||
insert_head += insert_cols[i] + ", "; |
||||
insert_val+="?, "; |
||||
} |
||||
insert_head = insert_head.substring(0, insert_head.length() - 2); |
||||
insert_val = insert_val.substring(0, insert_val.length() - 2); |
||||
insert_head +=") values( "; |
||||
sql=insert_head + insert_val+");"; |
||||
return sql; |
||||
|
||||
} |
||||
|
||||
private String createPreparedStatementHead(String headersInFile) throws SQLException |
||||
{ |
||||
|
||||
String sql=null; |
||||
|
||||
String insert_head = "insert into " + targetTable+"("; |
||||
String insert_val=""; |
||||
if (headersInFile!=null) |
||||
{ |
||||
StringTokenizer st=new StringTokenizer(headersInFile,delim); |
||||
while (st.hasMoreTokens()) |
||||
{ |
||||
String colname=st.nextToken(); |
||||
insert_head += colname + ", "; |
||||
insert_val+="?, "; |
||||
} |
||||
} |
||||
else |
||||
{ |
||||
for (int i = 0; i < numberOfColumns; i++) |
||||
{ |
||||
insert_head += insert_cols[i] + ", "; |
||||
insert_val+="?, "; |
||||
} |
||||
} |
||||
insert_head = insert_head.substring(0, insert_head.length() - 2); |
||||
insert_val = insert_val.substring(0, insert_val.length() - 2); |
||||
insert_head +=") values( "; |
||||
sql=insert_head + insert_val+");"; |
||||
return sql; |
||||
|
||||
} |
||||
public Connection getConnection(Connection myConnection,String propfile) throws Exception { |
||||
|
||||
if(myConnection==null) |
||||
{ |
||||
SxConnection mySxConnection = null; |
||||
mySxConnection = new SxConnection(); |
||||
mySxConnection.setPropfile(propfile); |
||||
|
||||
myConnection = mySxConnection.getConnection(); |
||||
|
||||
String db_driver = mySxConnection.m_DriverClass.stringValue(); |
||||
if(db_driver.equals(DriverClass.dc_postgre.stringValue())) |
||||
isPostgres=true; |
||||
} |
||||
dbmd = myConnection.getMetaData(); |
||||
|
||||
return myConnection; |
||||
|
||||
} |
||||
|
||||
public static void main(String args[]) { |
||||
try { |
||||
GetOpts.setOpts(args); |
||||
String isdrin = |
||||
GetOpts.isAllRequiredOptionsPresent(new Options[] {Options.opt_dbprops,Options.opt_table,Options.opt_unl}); |
||||
if (isdrin != null) { |
||||
System.err.println("Folgende Optionen fehlen: " + isdrin); |
||||
System.err.println(usage); |
||||
System.exit(1); |
||||
} |
||||
GxstageCSVImport myUploader=new GxstageCSVImport(); |
||||
//GetOpts myOpts=new GetOpts();
|
||||
if (GetOpts.isPresent(Options.opt_dbprops)) |
||||
myUploader.setDbpropfile(GetOpts.getValue(Options.opt_dbprops)); |
||||
if (GetOpts.isPresent(Options.opt_inFormat)) |
||||
myUploader.setInFormat(GetOpts.getValue(Options.opt_inFormat)); |
||||
if (GetOpts.isPresent(Options.opt_table)) |
||||
myUploader.setTargetTable( GetOpts.getValue(Options.opt_table)); |
||||
|
||||
if (GetOpts.isPresent(Options.opt_unl)) |
||||
myUploader.setSrcFile(GetOpts.getValue(Options.opt_unl)); |
||||
else |
||||
myUploader.setSrcFile(myUploader.getTargetTable() + ".unl"); |
||||
if (GetOpts.isPresent(Options.opt_header)&&!GetOpts.getValue(Options.opt_header).equalsIgnoreCase("true")) |
||||
{ |
||||
throw new IllegalArgumentException("nur header=true unterstützt"); |
||||
} |
||||
|
||||
|
||||
if (GetOpts.isPresent(Options.opt_delim)) |
||||
myUploader.setDelim(GetOpts.getValue(Options.opt_delim)); |
||||
if (GetOpts.isPresent(Options.opt_encoding)) |
||||
{ |
||||
String encodingParam=GetOpts.getValue(Options.opt_encoding); |
||||
|
||||
if(encodingParam != null && !encodingParam.equals("") ) |
||||
myUploader.setEncoding(encodingParam); |
||||
} |
||||
else |
||||
myUploader.setEncoding(System.getProperty("file.encoding")); |
||||
if (GetOpts.isPresent(Options.opt_mode)) { |
||||
myUploader.setMode(GetOpts.getValue(Options.opt_mode).toLowerCase()); |
||||
|
||||
} |
||||
if (GetOpts.isPresent(Options.opt_inserts)) |
||||
myUploader.setInserts(GetOpts.getValue(Options.opt_inserts)); |
||||
long jetzt = new java.util.Date().getTime() ; |
||||
myUploader.setUploadConnection(myUploader.getConnection(null,myUploader.getDbpropfile())); |
||||
if (new File(myUploader.getSrcFile()).length()==0) |
||||
{ |
||||
System.out.println("Nichts zu tun, Datei "+myUploader.getSrcFile()+" ist leer"); |
||||
} |
||||
else |
||||
{ |
||||
String protokoll=myUploader.uploadFile(); |
||||
long erstrecht = new java.util.Date().getTime() ; |
||||
System.out.println(myUploader.numberOfRows+" lines loaded"); |
||||
System.out.println("File "+myUploader.getSrcFile() +" uploaded"); |
||||
if(protokoll.equals("")) |
||||
protokoll= " in "+(erstrecht-jetzt)/1000 +" Sec."; |
||||
System.out.println(protokoll); |
||||
} |
||||
|
||||
} catch (Exception ex) { |
||||
System.err.println("Upload fehlgeschlagen: " + ex); |
||||
System.exit(1); |
||||
} |
||||
} |
||||
} |
||||
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,550 @@
@@ -0,0 +1,550 @@
|
||||
package de.superx.bin; |
||||
|
||||
import java.awt.BorderLayout; |
||||
import java.awt.Container; |
||||
import java.awt.FlowLayout; |
||||
import java.awt.Font; |
||||
import java.awt.event.ActionEvent; |
||||
import java.awt.event.ActionListener; |
||||
import java.awt.event.ItemEvent; |
||||
import java.awt.event.ItemListener; |
||||
import java.awt.event.WindowAdapter; |
||||
import java.awt.event.WindowEvent; |
||||
import java.io.BufferedReader; |
||||
import java.io.FileInputStream; |
||||
import java.io.FileOutputStream; |
||||
import java.io.IOException; |
||||
import java.io.InputStreamReader; |
||||
import java.io.OutputStream; |
||||
import java.sql.Connection; |
||||
import java.sql.DatabaseMetaData; |
||||
import java.sql.DriverManager; |
||||
import java.sql.ResultSet; |
||||
import java.sql.Statement; |
||||
import java.util.LinkedList; |
||||
import java.util.Properties; |
||||
|
||||
import javax.swing.JButton; |
||||
import javax.swing.JComboBox; |
||||
import javax.swing.JFrame; |
||||
import javax.swing.JLabel; |
||||
import javax.swing.JOptionPane; |
||||
import javax.swing.JPanel; |
||||
import javax.swing.JPasswordField; |
||||
import javax.swing.JTextField; |
||||
|
||||
import de.memtext.util.CryptUtils; |
||||
import de.memtext.util.GetOpts; |
||||
import de.memtext.util.GetOpts.Options; |
||||
import de.memtext.widgets.LabeledComboBox; |
||||
import de.memtext.widgets.LabeledTextField; |
||||
import de.memtext.widgets.RadioButtonGroup; |
||||
import de.memtext.widgets.VerticalBox; |
||||
import de.memtext.widgets.WarningMessage; |
||||
import de.superx.util.ConnectionDialogCommon; |
||||
import de.superx.util.SqlStringUtils; |
||||
|
||||
public class PropAdminOld extends JFrame implements ActionListener, ItemListener { |
||||
static Container cp; |
||||
|
||||
static LinkedList logLevels = new LinkedList(); |
||||
|
||||
static JTextField tAdminPasswd, tdriver, turl, tadminUser, tRestrictedUser, tRestrictedPassword; |
||||
|
||||
static LabeledTextField ltfMaskCache; |
||||
|
||||
static LabeledTextField ltfUserCache; |
||||
|
||||
static JComboBox tname; |
||||
|
||||
static LabeledComboBox sqlLogLevel, xmlLogLevel; |
||||
|
||||
static LabeledTextField maxActive; |
||||
|
||||
static LabeledTextField minIdle; |
||||
|
||||
static LabeledTextField maxIdle; |
||||
|
||||
static RadioButtonGroup rbEntwicklungsmodus = new RadioButtonGroup(); |
||||
|
||||
static private String defaultDBDriver = "com.informix.jdbc.IfxDriver"; |
||||
|
||||
static private String defaultConnection = "jdbc:informix-sqli://<<hostname>>:<<Portnr>>:informixserver=<<informixserver>>;database=superx"; |
||||
|
||||
static private String defaultUser = "superx"; |
||||
|
||||
private static String dbpropfile = "db.properties"; |
||||
|
||||
static private String connTypes[][]; |
||||
|
||||
private static boolean isGuiWanted = true; |
||||
|
||||
static Properties props = new Properties(); |
||||
|
||||
static byte key[] = { (byte) 255, (byte) 221, (byte) 127, (byte) 109, (byte) 129 }; |
||||
|
||||
static int keyLength = key.length; |
||||
|
||||
private static String usage = "-------------------------------------\nGebrauch: java de.superx.bin.PropAdmin -dbproperties:<<Pfad zu den db-Properties>>(optional) \n---------------------------------------------------"; |
||||
|
||||
// private static String newAdminPassword;
|
||||
|
||||
public PropAdminOld() { |
||||
super("DB-Properties Admin @version@"); |
||||
ltfMaskCache = new LabeledTextField("Masken, die im Cache sein sollen", "select tid from maskeninfo where 1=0", 50); |
||||
ltfUserCache = new LabeledTextField("User, die im Cache sein sollen", "select tid from userinfo where 1=0", 50); |
||||
maxActive = new LabeledTextField("maxActive", 3); |
||||
minIdle = new LabeledTextField("minIdle", 3); |
||||
maxIdle = new LabeledTextField("maxIdle", 3); |
||||
|
||||
sqlLogLevel = new LabeledComboBox("Log Level SQL", logLevels); |
||||
xmlLogLevel = new LabeledComboBox("Log Level XML", logLevels); |
||||
sqlLogLevel.setSelectedIndex(1); |
||||
xmlLogLevel.setSelectedIndex(1); |
||||
rbEntwicklungsmodus.add(new JLabel("Entwicklungsmodus ")); |
||||
rbEntwicklungsmodus.add("an"); |
||||
rbEntwicklungsmodus.add("aus"); |
||||
rbEntwicklungsmodus.setSelection("an"); |
||||
JButton btnTestAdmin = new JButton("Verbindung testen"); |
||||
btnTestAdmin.addActionListener(this); |
||||
|
||||
cp = this.getContentPane(); |
||||
cp.setLayout(new BorderLayout()); |
||||
JPanel titel = new JPanel(); |
||||
JLabel ltitel = new JLabel("DB-Properties Admin für " + dbpropfile); |
||||
ltitel.setFont(new Font("Courier", Font.BOLD, 14)); |
||||
titel.add(ltitel); |
||||
cp.add(titel, "North"); |
||||
VerticalBox center = new VerticalBox(); |
||||
JPanel p0 = new JPanel(new FlowLayout(FlowLayout.LEFT)); |
||||
|
||||
tname = new JComboBox(); |
||||
|
||||
connTypes = ConnectionDialogCommon.getTypes(); |
||||
|
||||
for (int i = 0; i < connTypes.length; i++) { |
||||
tname.addItem(connTypes[i][0]); |
||||
} |
||||
// controls.add(types);
|
||||
|
||||
JLabel lname = new JLabel(" Driver :"); |
||||
lname.setFont(new Font("Courier", Font.BOLD, 12)); |
||||
// tname=new JTextField(30);
|
||||
JLabel c_name = new JLabel("(mögliche Datenbanksysteme für SuperX)"); |
||||
p0.add(lname); |
||||
p0.add(tname); |
||||
p0.add(c_name); |
||||
center.add(p0); |
||||
// JPanel center=new JPanel(new GridLayout(0,1));
|
||||
JPanel p1 = new JPanel(new FlowLayout(FlowLayout.LEFT)); |
||||
JLabel ldriver = new JLabel(" Driver Class:"); |
||||
ldriver.setFont(new Font("Courier", Font.BOLD, 12)); |
||||
tdriver = new JTextField(30); |
||||
JLabel c_driver = new JLabel("(muss im CLASSPATH stehen!)"); |
||||
p1.add(ldriver); |
||||
p1.add(tdriver); |
||||
p1.add(c_driver); |
||||
center.add(p1); |
||||
|
||||
JPanel p1b = new JPanel(new FlowLayout(FlowLayout.LEFT)); |
||||
JLabel lurl = new JLabel(" Connection URL:"); |
||||
lurl.setFont(new Font("Courier", Font.BOLD, 12)); |
||||
turl = new JTextField(50); |
||||
p1b.add(lurl); |
||||
p1b.add(turl); |
||||
center.add(p1b); |
||||
|
||||
JPanel p3 = new JPanel(new FlowLayout(FlowLayout.LEFT)); |
||||
JLabel luser = new JLabel(" Username:"); |
||||
luser.setFont(new Font("Courier", Font.BOLD, 12)); |
||||
tadminUser = new JTextField(10); |
||||
JLabel lpasswd = new JLabel(" Password:"); |
||||
lpasswd.setFont(new Font("Courier", Font.BOLD, 12)); |
||||
tAdminPasswd = new JPasswordField(10); |
||||
p3.add(luser); |
||||
p3.add(tadminUser); |
||||
p3.add(lpasswd); |
||||
p3.add(tAdminPasswd); |
||||
p3.add(btnTestAdmin); |
||||
center.add(p3); |
||||
JPanel p3b = new JPanel(new FlowLayout(FlowLayout.LEFT)); |
||||
JLabel lbl33 = new JLabel(" Eingeschränkter User:"); |
||||
lbl33.setFont(new Font("Courier", Font.BOLD, 12)); |
||||
p3b.add(lbl33); |
||||
|
||||
tRestrictedUser = new JTextField(10); |
||||
p3b.add(tRestrictedUser); |
||||
tRestrictedPassword = new JPasswordField(10); |
||||
p3b.add(new JLabel("Passwort")); |
||||
p3b.add(tRestrictedPassword); |
||||
JButton btnTest2 = new JButton("Verb.testen"); |
||||
btnTest2.addActionListener(this); |
||||
JButton btnSelectRights = new JButton("select-Rechte auf alle Tabellen"); |
||||
btnSelectRights.addActionListener(this); |
||||
|
||||
p3b.add(btnTest2); |
||||
p3b.add(btnSelectRights); |
||||
center.add(p3b); |
||||
center.add(sqlLogLevel); |
||||
center.add(xmlLogLevel); |
||||
center.addWithLeftAlignment(rbEntwicklungsmodus); |
||||
center.addWithLeftAlignment(new JLabel("<html>(Im Entwicklungsmodus werden alle SQL-Befehle von Abfragen einzeln an die Datenbank geschickt.<br>Das dauert etwas länger, ermöglicht aber bessere Fehlermeldungen.)<br>")); |
||||
|
||||
center.add(ltfMaskCache); |
||||
center.add(ltfUserCache); |
||||
|
||||
JPanel p4 = new JPanel(); |
||||
p4.add(new JLabel("<html>Der Apache ConnectionPool verwaltet die Anzahl benötigter Verbindungen dynamisch.<br>min/max idle gibt an wieviele Connections ständig bereit gehalten werden sollen.<br>maxActive gibt an wieviele Connections maximal gleichzeitig aktiv sein sollen.")); |
||||
center.add(p4); |
||||
JPanel p5 = new JPanel(); |
||||
p5.add(minIdle); |
||||
p5.add(maxIdle); |
||||
p5.add(maxActive); |
||||
center.add(p5); |
||||
cp.add(center, "Center"); |
||||
JButton OK = new JButton("Speichern"); |
||||
OK.addActionListener(this); |
||||
JPanel unten = new JPanel(); |
||||
|
||||
unten.add(OK); |
||||
cp.add(unten, "South"); |
||||
addWindowListener(new WindowAdapter() { |
||||
@Override |
||||
public void windowClosing(WindowEvent e) { |
||||
System.exit(0); |
||||
} |
||||
}); |
||||
|
||||
tname.addItemListener(this); |
||||
|
||||
this.pack(); |
||||
} |
||||
|
||||
private static void properties_einlesen() throws IOException { |
||||
if (!isGuiWanted) System.out.println("Lese ein: " + dbpropfile); |
||||
props = new Properties(); |
||||
FileInputStream is = new FileInputStream(dbpropfile); |
||||
|
||||
if (is != null) { |
||||
props.load(is); |
||||
is.close(); |
||||
} else { |
||||
if (isGuiWanted) JOptionPane.showMessageDialog(null, "Kann Properties nicht einlesen.", "DB-Prop Admin", JOptionPane.INFORMATION_MESSAGE); |
||||
System.out.println("kann properties nicht einlesen"); |
||||
} |
||||
// System.out.println(CryptUtils.decryptSimple(props.getProperty(
|
||||
// "connectionPassword")));
|
||||
} |
||||
|
||||
private static void initFormFromProps() { |
||||
String tdriverclass = props.getProperty("driverName"); |
||||
tdriver.setText(tdriverclass); |
||||
|
||||
// if(props.getProperty( "connectionURL" ).equals(""))
|
||||
// {
|
||||
// Default-Url:
|
||||
for (int i = 0; i < connTypes.length; i++) { |
||||
if (tdriverclass.equals(connTypes[i][1])) { |
||||
tname.setSelectedIndex(i); |
||||
} |
||||
} |
||||
if (props.getProperty("connectionURL") != null) turl.setText(props.getProperty("connectionURL")); |
||||
// }
|
||||
tadminUser.setText(props.getProperty("connectionName")); |
||||
tRestrictedUser.setText(props.getProperty("restrictedConnectionName")); |
||||
|
||||
if (props.getProperty("logLevelSQL") != null) sqlLogLevel.setSelectedItem(props.getProperty("logLevelSQL")); |
||||
if (props.getProperty("logLevelXML") != null) xmlLogLevel.setSelectedItem(props.getProperty("logLevelXML")); |
||||
if (props.getProperty("maskCache") != null) ltfMaskCache.setValue(props.getProperty("maskCache")); |
||||
if (props.getProperty("userCache") != null) ltfUserCache.setValue(props.getProperty("userCache")); |
||||
if (props.getProperty("minIdle") != null) |
||||
minIdle.setValue(props.getProperty("minIdle")); |
||||
else |
||||
minIdle.setValue("5"); |
||||
if (props.getProperty("maxIdle") != null) maxIdle.setValue(props.getProperty("maxIdle")); |
||||
if (props.getProperty("maxActive") != null) maxActive.setValue(props.getProperty("maxActive")); |
||||
if (props.getProperty("developmentMode") == null || props.getProperty("developmentMode").equals("true")) |
||||
rbEntwicklungsmodus.setSelection("an"); |
||||
else |
||||
rbEntwicklungsmodus.setSelection("aus"); |
||||
} |
||||
|
||||
@Override |
||||
public void actionPerformed(ActionEvent event) { |
||||
String cmd = event.getActionCommand(); |
||||
if (cmd.equals("Speichern")) |
||||
|
||||
{ |
||||
try { |
||||
formValuesToProps(); |
||||
saveProps(); |
||||
System.exit(0); |
||||
} catch (Exception e) { |
||||
System.out.println("Es ist ein Fehler aufgetreten."); |
||||
e.printStackTrace(); |
||||
WarningMessage.show(null, "Fehler: " + e, "SuperX"); |
||||
} |
||||
|
||||
} |
||||
if (cmd.equals("Verbindung testen")) { |
||||
try { |
||||
formValuesToProps(); |
||||
boolean isOk = testConnection(props.getProperty("connectionName"), tAdminPasswd.getText()); |
||||
|
||||
} catch (Exception e) { |
||||
WarningMessage.show(null, "Fehler:" + e, "PropAdmin"); |
||||
} |
||||
} |
||||
if (cmd.equals("Verb.testen")) { |
||||
try { |
||||
formValuesToProps(); |
||||
testConnection(props.getProperty("restrictedConnectionName"), tRestrictedPassword.getText()); |
||||
|
||||
} catch (Exception e) { |
||||
WarningMessage.show(null, "Fehler:" + e, "PropAdmin"); |
||||
} |
||||
} |
||||
if (cmd.equals("select-Rechte auf alle Tabellen")) { |
||||
try { |
||||
if (tRestrictedUser.getText() == null || tRestrictedUser.getText().trim().equals("")) |
||||
throw new IllegalArgumentException("Eingeschränkter user muss angegeben sein"); |
||||
if (tadminUser.getText() == null || tadminUser.getText().trim().equals("")) throw new IllegalArgumentException("User muss angegeben sein"); |
||||
if (tAdminPasswd.getText() == null || tAdminPasswd.getText().trim().equals("")) throw new IllegalArgumentException("Userpassword muss angegeben sein"); |
||||
// int result=JOptionPane.showConfirmDialog(this,
|
||||
// "Wollen Sie dem eingeschränktem User select-Rechte auf alle Tabellen geben?"
|
||||
// ,"PropAdmin",JOptionPane.YES_NO_OPTION);
|
||||
// if (result==JOptionPane.YES_OPTION)
|
||||
grantSelectToRestrictedUser(); |
||||
|
||||
} catch (Exception e) { |
||||
WarningMessage.show(null, "Fehler:" + e, "PropAdmin"); |
||||
} |
||||
} |
||||
|
||||
} |
||||
|
||||
private void grantSelectToRestrictedUser() throws Exception { |
||||
Class.forName(props.getProperty("driverName")); |
||||
|
||||
Connection conn = DriverManager.getConnection(props.getProperty("connectionURL"), tadminUser.getText(), tAdminPasswd.getText()); |
||||
Statement stm = conn.createStatement(); |
||||
ResultSet rs = null; |
||||
if (props.getProperty("driverName").indexOf("postgres") > -1) |
||||
rs = conn.getMetaData().getTables(null, null, null, null); |
||||
else |
||||
rs = conn.getMetaData().getTables("superx", "superx", null, null); |
||||
while (rs.next()) { |
||||
System.out.println(rs.getObject(3).toString()); |
||||
if (rs.getString(4) != null && (rs.getString(4).equals("TABLE") || rs.getString(4).equals("VIEW"))) |
||||
stm.execute("grant select on " + rs.getObject(3).toString() + " to " + tRestrictedUser.getText() + ";"); |
||||
} |
||||
rs.close(); |
||||
stm.close(); |
||||
conn.close(); |
||||
} |
||||
|
||||
private static boolean testConnection(String username, String password) { |
||||
boolean result = false; |
||||
try { |
||||
Class.forName(props.getProperty("driverName")); |
||||
if (props.getProperty("driverName").indexOf("postgres") > -1) { |
||||
props.put("charSet", SqlStringUtils.getEncoding().equals("xUTF-8") ? "UTF-8" : "Latin-1"); |
||||
props.put("DateStyle", "German, DMY"); |
||||
} else // Informix
|
||||
{ |
||||
props.put("GL_DATETIME", "%d.%m.%Y %T"); |
||||
props.put("CLIENT_LOCALE", SqlStringUtils.getEncoding().equals("xUTF-8") ? "UTF-8" : "de_de.8859-1"); |
||||
} |
||||
Connection conn = DriverManager.getConnection(props.getProperty("connectionURL"), username, password); |
||||
DatabaseMetaData dbmd = conn.getMetaData(); |
||||
|
||||
/* |
||||
* am 19.1.2006 auskommentiert, weil propadmin auch für DBen ausser |
||||
* superx genutzt wird. dq Statement stm=conn.createStatement(); if |
||||
* (props.getProperty("driverName").indexOf("postgres")>-1) |
||||
* |
||||
* stm.executeQuery("select date('1.1.2005');"); else |
||||
* stm.executeQuery("select 'xx' from xdummy"); |
||||
*/ |
||||
String msg = "Verbindung mit Datenbank " + conn.getCatalog() + " (" + dbmd.getDatabaseProductName() + " " + dbmd.getDatabaseProductVersion() + ") als " + username |
||||
+ " erfolgreich aufgebaut"; |
||||
if (isGuiWanted) |
||||
JOptionPane.showMessageDialog(null, msg, "DB-Prop Admin", JOptionPane.INFORMATION_MESSAGE); |
||||
else |
||||
System.out.println(msg); |
||||
// stm.close();
|
||||
conn.close(); |
||||
result = true; |
||||
} catch (Exception e) { |
||||
String msg = "Ein Fehler ist aufgetreten.\n" + e.toString(); |
||||
if (props.getProperty("driverName").indexOf("postgres") > -1 && e.toString().indexOf("Date Style") > -1) { |
||||
msg += "\nPrüfen Sie,ob Date Style auf dem Server auf German, DMY steht."; |
||||
} |
||||
if (isGuiWanted) |
||||
JOptionPane.showMessageDialog(null, msg, "DB-Prop Admin", JOptionPane.WARNING_MESSAGE); |
||||
else |
||||
System.out.println("Fehler: " + e.toString()); |
||||
|
||||
} |
||||
return result; |
||||
} |
||||
|
||||
private static void formValuesToProps() throws Exception { |
||||
|
||||
String driver = tdriver.getText(); |
||||
String name = tadminUser.getText(); |
||||
String passwd = tAdminPasswd.getText(); |
||||
String url = turl.getText(); |
||||
|
||||
if (driver == null || name == null || passwd == null || url == null || driver.equals("") || name.equals("") || url.equals("")) { |
||||
if (isGuiWanted) JOptionPane.showMessageDialog(null, "Bitte alle Felder ausfüllen!", "DB-Prop Admin", JOptionPane.INFORMATION_MESSAGE); |
||||
return; |
||||
} |
||||
|
||||
props.setProperty("connectionPassword", "sx_des" + CryptUtils.encryptStringDES(tAdminPasswd.getText())); |
||||
props.setProperty("connectionName", name); |
||||
props.setProperty("restrictedConnectionName", tRestrictedUser.getText()); |
||||
if (tRestrictedPassword.getText() != null && tRestrictedPassword.getText().length() > 1) |
||||
props.setProperty("restrictedConnectionPassword", "sx_des" + CryptUtils.encryptStringDES(tRestrictedPassword.getText())); |
||||
|
||||
props.setProperty("connectionURL", url); |
||||
props.setProperty("driverName", driver); |
||||
props.setProperty("minIdle", (String) minIdle.getValue()); |
||||
props.setProperty("maxIdle", (String) maxIdle.getValue()); |
||||
props.setProperty("maxActive", (String) maxActive.getValue()); |
||||
props.setProperty("maskCache", (String) ltfMaskCache.getValue()); |
||||
props.setProperty("userCache", (String) ltfUserCache.getValue()); |
||||
props.setProperty("logLevelSQL", sqlLogLevel.getSelectedItem().toString()); |
||||
props.setProperty("logLevelXML", xmlLogLevel.getSelectedItem().toString()); |
||||
props.setProperty("developmentMode", rbEntwicklungsmodus.getSelectedName().equals("an") ? "true" : "false"); |
||||
|
||||
} |
||||
|
||||
private static void saveProps() { |
||||
try { |
||||
props.remove("charSet"); |
||||
props.remove("DateStyle"); |
||||
|
||||
props.remove("GL_DATETIME"); |
||||
props.remove("CLIENT_LOCALE"); |
||||
OutputStream os = new FileOutputStream(dbpropfile); |
||||
props.store(os, "SuperX DB.properties"); |
||||
os.close(); |
||||
System.out.println("Änderung gespeichert in " + dbpropfile); |
||||
|
||||
} catch (IOException e) { |
||||
String fehler = "Konnte db.properties-Datei nicht speichern:" + e.toString(); |
||||
if (isGuiWanted) JOptionPane.showMessageDialog(null, fehler, "DB-Prop Admin", JOptionPane.WARNING_MESSAGE); |
||||
System.out.println(e.toString()); |
||||
System.exit(1); |
||||
} |
||||
} |
||||
|
||||
public static void main(String args[]) { |
||||
logLevels.add("SEVERE"); |
||||
logLevels.add("WARNING"); |
||||
logLevels.add("INFO"); |
||||
logLevels.add("FINE"); |
||||
logLevels.add("FINER"); |
||||
logLevels.add("FINEST"); |
||||
System.out.println("Umgebungsvariable LANG: " + System.getProperty("file.encoding")); |
||||
GetOpts.setOpts(args); |
||||
PropAdminOld propAdmin = null; |
||||
if (GetOpts.isPresent(Options.opt_dbprops)) dbpropfile = GetOpts.getValue(Options.opt_dbprops); |
||||
|
||||
if (GetOpts.isPresent(Options.opt_noguiVar0) || GetOpts.isPresent(Options.opt_noguiVar1) || GetOpts.isPresent(Options.opt_noguiVar2)) { |
||||
isGuiWanted = false; |
||||
} else { |
||||
try { |
||||
// wenn keine graphische Umgebung verfügbar ist
|
||||
// tritt ein Fehler auf.
|
||||
JFrame f = new JFrame(); |
||||
|
||||
} catch (Throwable e) { |
||||
System.out.println("Keine graphische Umgebung verfuegbar - starte Shell-Modus"); |
||||
isGuiWanted = false; |
||||
} |
||||
if (isGuiWanted) propAdmin = new PropAdminOld(); |
||||
} |
||||
|
||||
try { |
||||
properties_einlesen(); |
||||
if (isGuiWanted) { |
||||
initFormFromProps(); |
||||
} |
||||
} catch (IOException e) { |
||||
String fehler = "Konnte db.properties-Datei nicht finden.\nEs wird eine neue in \n" + dbpropfile + " \nangelegt."; |
||||
if (isGuiWanted) { |
||||
JOptionPane.showMessageDialog(null, fehler, "DB-Prop Admin", JOptionPane.INFORMATION_MESSAGE); |
||||
System.out.println(fehler); |
||||
tdriver.setText(defaultDBDriver); |
||||
tadminUser.setText(defaultUser); |
||||
turl.setText(defaultConnection); |
||||
|
||||
} else { |
||||
System.out.println("Sie koennen nur bestehende db.properties Dateien bearbeiten."); |
||||
System.out.println("Geben Sie den Parameter -dbproperties:/home/superx/../db.properties an"); |
||||
System.out.println(e); |
||||
System.exit(1); |
||||
} |
||||
} |
||||
|
||||
if (isGuiWanted) { |
||||
propAdmin.show(); |
||||
} else { |
||||
noGuiEdit(); |
||||
|
||||
} |
||||
} |
||||
|
||||
private static void noGuiEdit() { |
||||
try { |
||||
BufferedReader br = new BufferedReader(new InputStreamReader(System.in)); |
||||
|
||||
System.out.println("Die meisten Paramter koennen Sie mit dem vi bearbeiten."); |
||||
System.out.print("Passwort für uneingeschränkten User (erscheint auf Bildschirm!): "); |
||||
|
||||
String newAdminPassword = br.readLine(); |
||||
System.out.println(); |
||||
// System.out.println("you entered: " + pass);
|
||||
|
||||
if (testConnection(props.getProperty("connectionName"), newAdminPassword)) { |
||||
System.out.print("Einen Moment - Verschlüsselung läuft ..."); |
||||
props.setProperty("connectionPassword", "sx_des" + CryptUtils.encryptStringDES(newAdminPassword)); |
||||
System.out.println(" OK"); |
||||
saveProps(); |
||||
} |
||||
if (props.getProperty("restrictedConnectionName") != null) { |
||||
System.out.print("Passwort für eingeschraenkten User (erscheint auf Bildschirm!): "); |
||||
|
||||
String newPassword = br.readLine(); |
||||
|
||||
System.out.println(); |
||||
|
||||
if (testConnection(props.getProperty("restrictedConnectionName"), newPassword)) { |
||||
System.out.print("Einen Moment - Verschlüsselung läuft ..."); |
||||
props.setProperty("restrictedConnectionPassword", "sx_des" + CryptUtils.encryptStringDES(newPassword)); |
||||
System.out.println(" OK"); |
||||
saveProps(); |
||||
} |
||||
} |
||||
System.exit(0); |
||||
} catch (Exception e) { |
||||
e.printStackTrace(); |
||||
} |
||||
} |
||||
|
||||
@Override |
||||
public void itemStateChanged(ItemEvent e) { |
||||
|
||||
String s = (String) e.getItem(); |
||||
|
||||
for (int i = 0; i < connTypes.length; i++) { |
||||
if (s.equals(connTypes[i][0])) { |
||||
tdriver.setText(connTypes[i][1]); |
||||
// if(turl.getText().indexOf( ">>") < 0)
|
||||
turl.setText(connTypes[i][2]); |
||||
} |
||||
} |
||||
} |
||||
} |
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in new issue