From 420aa4481d54f4aa3e60f58d76e00e69e2f26551 Mon Sep 17 00:00:00 2001 From: Marco Libanori Date: Thu, 18 Jan 2024 14:25:53 +0100 Subject: [PATCH] [SONAR] --- .../spagobi/api/SelfServiceDataSetCRUD.java | 12 +- .../serializer/DataStoreJSONSerializer.java | 6 +- .../impl/sqldbcache/DataStoreStatistics.java | 8 +- .../materializer/InMemoryMaterializer.java | 10 +- .../GeoSpatialDimensionDatasetNormalizer.java | 4 +- .../SolrFacetPivotEvaluationStrategy.java | 10 +- .../GeoSpatialDimensionDatasetValidator.java | 8 +- .../validation/NumericColumnValidator.java | 4 +- .../TimeDimensionDatasetValidator.java | 4 +- .../HierarchyTreeNode.java | 4 +- .../service/rest/HierarchyMasterService.java | 343 +++++++++++------- .../rest/HierarchyTechnicalService.java | 56 +-- .../MailDocumentDispatchChannel.java | 39 +- .../UniqueMailDocumentDispatchChannel.java | 229 ++++++------ .../test/AbstractSQLDBCacheTest.java | 6 +- .../engine/chart/util/DataSetTransformer.java | 52 +-- .../engines/qbe/exporter/QbeXLSExporter.java | 36 +- .../services/registry/LoadRegistryAction.java | 170 +++++---- .../registry/UpdateRecordsAction.java | 79 ++-- .../datareader/FacetSolrDataReader.java | 14 +- .../eng/qbe/statement/AbstractQbeDataSet.java | 20 +- .../birt/oda/impl/server/ResultSet.java | 12 +- 22 files changed, 601 insertions(+), 525 deletions(-) diff --git a/knowage-core/src/main/java/it/eng/spagobi/api/SelfServiceDataSetCRUD.java b/knowage-core/src/main/java/it/eng/spagobi/api/SelfServiceDataSetCRUD.java index 10c9746dffb..a8df2518fb3 100644 --- a/knowage-core/src/main/java/it/eng/spagobi/api/SelfServiceDataSetCRUD.java +++ b/knowage-core/src/main/java/it/eng/spagobi/api/SelfServiceDataSetCRUD.java @@ -2026,8 +2026,8 @@ private String guessColumnType(IDataStore dataStore, int columnIndex) { boolean foundLong = false; boolean foundInteger = false; for (int i = 0; i < Math.min(ROWS_LIMIT_GUESS_TYPE_HEURISTIC, dataStore.getRecordsCount()); i++) { - IRecord record = dataStore.getRecordAt(i); - IField field = record.getFieldAt(columnIndex); + IRecord currRecord = dataStore.getRecordAt(i); + IField field = currRecord.getFieldAt(columnIndex); Object value = field.getValue(); if ((value == null) || (value.toString().isEmpty())) { continue; @@ -2067,8 +2067,8 @@ private String guessColumnType(IDataStore dataStore, int columnIndex) { private boolean isADate(JSONObject jsonConf, IDataStore dataStore, int columnIndex) throws JSONException { String dateFormat = jsonConf.get(DataSetConstants.FILE_DATE_FORMAT).toString(); for (int i = 0; i < Math.min(10, dataStore.getRecordsCount()); i++) { - IRecord record = dataStore.getRecordAt(i); - IField field = record.getFieldAt(columnIndex); + IRecord currRecord = dataStore.getRecordAt(i); + IField field = currRecord.getFieldAt(columnIndex); Object value = field.getValue(); if (value instanceof Date) { if (value instanceof Timestamp) @@ -2098,8 +2098,8 @@ private boolean isADate(JSONObject jsonConf, IDataStore dataStore, int columnInd private boolean isATimestamp(JSONObject jsonConf, IDataStore dataStore, int columnIndex) throws JSONException { String timestampFormat = jsonConf.get(DataSetConstants.FILE_TIMESTAMP_FORMAT).toString(); for (int i = 0; i < Math.min(10, dataStore.getRecordsCount()); i++) { - IRecord record = dataStore.getRecordAt(i); - IField field = record.getFieldAt(columnIndex); + IRecord currRecord = dataStore.getRecordAt(i); + IField field = currRecord.getFieldAt(columnIndex); Object value = field.getValue(); if (value instanceof Timestamp) { continue; diff --git a/knowage-core/src/main/java/it/eng/spagobi/commons/serializer/DataStoreJSONSerializer.java b/knowage-core/src/main/java/it/eng/spagobi/commons/serializer/DataStoreJSONSerializer.java index 31f19a72528..f5baeea8b35 100644 --- a/knowage-core/src/main/java/it/eng/spagobi/commons/serializer/DataStoreJSONSerializer.java +++ b/knowage-core/src/main/java/it/eng/spagobi/commons/serializer/DataStoreJSONSerializer.java @@ -44,7 +44,7 @@ public Object serialize(Object o, Locale locale) throws SerializationException { IField field; JSONArray fieldsMetaDataJSON; JSONObject fieldMetaDataJSON; - IRecord record; + IRecord currRecord; JSONObject recordJSON; int recNo; IDataStore dataStore; @@ -103,12 +103,12 @@ public Object serialize(Object o, Locale locale) throws SerializationException { recNo = 0; Iterator records = dataStore.iterator(); while(records.hasNext()) { - record = (IRecord)records.next(); + currRecord = (IRecord)records.next(); recordJSON = new JSONObject(); recordJSON.put("id", ++recNo); for(int i = 0; i < metadata.getJSONArray("fields").length(); i++) { - field = record.getFieldAt(i); + field = currRecord.getFieldAt(i); recordJSON.put("column-" + (i+1), field.getValue().toString()); } diff --git a/knowage-core/src/main/java/it/eng/spagobi/tools/dataset/cache/impl/sqldbcache/DataStoreStatistics.java b/knowage-core/src/main/java/it/eng/spagobi/tools/dataset/cache/impl/sqldbcache/DataStoreStatistics.java index 661bc0116c6..b91944fffcd 100644 --- a/knowage-core/src/main/java/it/eng/spagobi/tools/dataset/cache/impl/sqldbcache/DataStoreStatistics.java +++ b/knowage-core/src/main/java/it/eng/spagobi/tools/dataset/cache/impl/sqldbcache/DataStoreStatistics.java @@ -109,7 +109,7 @@ private BigDecimal[] extimateFieldsMemorySize() { return fieldsMemorySize; } - private BigDecimal[] extimateFieldsMemorySize(IRecord record, BigDecimal[] fieldsMaxMemorySize) { + private BigDecimal[] extimateFieldsMemorySize(IRecord currRecord, BigDecimal[] fieldsMaxMemorySize) { BigDecimal[] fieldsMemorySize = null; @@ -128,8 +128,8 @@ private BigDecimal[] extimateFieldsMemorySize(IRecord record, BigDecimal[] field if (fieldTypeName.contains("String") && extimateVarCharMemorySize) { String value = ""; - if (!(record.getFieldAt(i).getValue() instanceof String)) { - Object nonStringValue = record.getFieldAt(i).getValue(); + if (!(currRecord.getFieldAt(i).getValue() instanceof String)) { + Object nonStringValue = currRecord.getFieldAt(i).getValue(); if (nonStringValue != null) { value = nonStringValue.toString(); } else { @@ -138,7 +138,7 @@ private BigDecimal[] extimateFieldsMemorySize(IRecord record, BigDecimal[] field logger.debug("An unexpected error occured while extimating field [" + fmd.getName() + "] memory size whose type is equal to [" + fmd.getType().toString() + "]. Field forced to String"); } else { - value = (String) record.getFieldAt(i).getValue(); + value = (String) currRecord.getFieldAt(i).getValue(); } int valueLength = value != null ? value.length() : 0; diff --git a/knowage-core/src/main/java/it/eng/spagobi/tools/dataset/measurecatalogue/materializer/InMemoryMaterializer.java b/knowage-core/src/main/java/it/eng/spagobi/tools/dataset/measurecatalogue/materializer/InMemoryMaterializer.java index a9958697681..450f2d602e4 100644 --- a/knowage-core/src/main/java/it/eng/spagobi/tools/dataset/measurecatalogue/materializer/InMemoryMaterializer.java +++ b/knowage-core/src/main/java/it/eng/spagobi/tools/dataset/measurecatalogue/materializer/InMemoryMaterializer.java @@ -619,7 +619,7 @@ public InMemoryAggregator(IAggregationFunction aggreationFunction, int measureCo public InMemoryAggregator() { }; - public void addRecord(IRecord record) { + public void addRecord(IRecord recordToAdd) { // check if the record already exists boolean recordFound = false; @@ -628,22 +628,22 @@ public void addRecord(IRecord record) { for (int j = 0; j < records.get(i).getFields().size(); j++) { if (j != measureColumnIndex && !// checks only dimensions - (records.get(i).getFieldAt(j).equals(record.getFieldAt(j)))) {// if a dimension is not equal + (records.get(i).getFieldAt(j).equals(recordToAdd.getFieldAt(j)))) {// if a dimension is not equal recordFound = false; break; } } if (recordFound) { List recordsMeasuresValue = recordsMeasuresValues.get(i); - recordsMeasuresValue.add(record.getFieldAt(measureColumnIndex).getValue());// record found + recordsMeasuresValue.add(recordToAdd.getFieldAt(measureColumnIndex).getValue());// record found break; } } if (!recordFound) { - records.add(record); + records.add(recordToAdd); List recordsMeasuresValue = new ArrayList(); - recordsMeasuresValue.add(record.getFieldAt(measureColumnIndex).getValue()); + recordsMeasuresValue.add(recordToAdd.getFieldAt(measureColumnIndex).getValue()); recordsMeasuresValues.add(recordsMeasuresValue); } } diff --git a/knowage-core/src/main/java/it/eng/spagobi/tools/dataset/normalization/GeoSpatialDimensionDatasetNormalizer.java b/knowage-core/src/main/java/it/eng/spagobi/tools/dataset/normalization/GeoSpatialDimensionDatasetNormalizer.java index a173dfc0678..7797c42cd10 100644 --- a/knowage-core/src/main/java/it/eng/spagobi/tools/dataset/normalization/GeoSpatialDimensionDatasetNormalizer.java +++ b/knowage-core/src/main/java/it/eng/spagobi/tools/dataset/normalization/GeoSpatialDimensionDatasetNormalizer.java @@ -115,8 +115,8 @@ public void checkCurrentHierarchyLevel(MetaModelWrapper metaModelWrapper, Hierar // Get the first value of the datastore to validate Iterator it = datastoreToValidate.iterator(); int columnIndex = datastoreToValidate.getMetaData().getFieldIndex(columnNameOnDataset); - IRecord record = (IRecord) it.next(); - IField field = record.getFieldAt(columnIndex); + IRecord currRecord = (IRecord) it.next(); + IField field = currRecord.getFieldAt(columnIndex); fieldValue = field.getValue(); // then check if the value is ammissible for the Level members (default values used as identifiers values) diff --git a/knowage-core/src/main/java/it/eng/spagobi/tools/dataset/strategy/SolrFacetPivotEvaluationStrategy.java b/knowage-core/src/main/java/it/eng/spagobi/tools/dataset/strategy/SolrFacetPivotEvaluationStrategy.java index f3eb98a008e..b77d39e5512 100644 --- a/knowage-core/src/main/java/it/eng/spagobi/tools/dataset/strategy/SolrFacetPivotEvaluationStrategy.java +++ b/knowage-core/src/main/java/it/eng/spagobi/tools/dataset/strategy/SolrFacetPivotEvaluationStrategy.java @@ -238,7 +238,7 @@ private IDataStore appendCalculatedFieldColumnToSummaryRow(AbstractSelectionFiel } - public String transformFormula(Record record, IMetaData metadata, String formula) { + public String transformFormula(Record currRecord, IMetaData metadata, String formula) { formula = formula.replaceAll("\"", ""); for (int i = 0; i < metadata.getFieldCount(); i++) { @@ -255,7 +255,7 @@ public String transformFormula(Record record, IMetaData metadata, String formula Matcher m = r.matcher(formula); while (m.find()) { - formula = formula.replace(m.group(), record.getFieldAt(i).getValue().toString()); + formula = formula.replace(m.group(), currRecord.getFieldAt(i).getValue().toString()); } pattern = "((?:AVG|MIN|MAX|SUM|COUNT_DISTINCT|COUNT|DISTINCT COUNT)\\()([a-zA-Z0-9\\-\\+\\/\\*\\_\\s\\$\\{\\}\\\"]*)(\\))"; @@ -263,7 +263,7 @@ public String transformFormula(Record record, IMetaData metadata, String formula m = r.matcher(formula); while (m.find()) { - formula = formula.replace(m.group(), record.getFieldAt(i).getValue().toString()); + formula = formula.replace(m.group(), currRecord.getFieldAt(i).getValue().toString()); } } @@ -274,7 +274,7 @@ public String transformFormula(Record record, IMetaData metadata, String formula } - public Map findBindings(Record record, IMetaData metadata, String formula) { + public Map findBindings(Record currRecord, IMetaData metadata, String formula) { Map bindings = new HashMap<>(); bindings.put("parameters", new HashMap()); @@ -283,7 +283,7 @@ public Map findBindings(Record record, IMetaData metadata, Strin if (formula.contains(metadata.getFieldName(i))) { - BigDecimal value = new BigDecimal(record.getFieldAt(i).getValue().toString()); + BigDecimal value = new BigDecimal(currRecord.getFieldAt(i).getValue().toString()); bindings.put(metadata.getFieldName(i), value); diff --git a/knowage-core/src/main/java/it/eng/spagobi/tools/dataset/validation/GeoSpatialDimensionDatasetValidator.java b/knowage-core/src/main/java/it/eng/spagobi/tools/dataset/validation/GeoSpatialDimensionDatasetValidator.java index 2f4d2d45fee..9805680eea6 100644 --- a/knowage-core/src/main/java/it/eng/spagobi/tools/dataset/validation/GeoSpatialDimensionDatasetValidator.java +++ b/knowage-core/src/main/java/it/eng/spagobi/tools/dataset/validation/GeoSpatialDimensionDatasetValidator.java @@ -94,8 +94,8 @@ public ValidationErrors doValidateDataset(IDataStore dataStore,Map testValidationCriteria(MetaModelWrapper metaModelWrapper, Hie Iterator it = datastoreToValidate.iterator(); int columnIndex = datastoreToValidate.getMetaData().getFieldIndex(columnNameOnDataset); while( it.hasNext() ) { - IRecord record = (IRecord)it.next(); - IField field = record.getFieldAt(columnIndex); + IRecord currRecord = (IRecord)it.next(); + IField field = currRecord.getFieldAt(columnIndex); fieldValue = field.getValue(); if (fieldValue != null){ if (fieldValue instanceof String){ diff --git a/knowage-core/src/main/java/it/eng/spagobi/tools/dataset/validation/NumericColumnValidator.java b/knowage-core/src/main/java/it/eng/spagobi/tools/dataset/validation/NumericColumnValidator.java index e1e978565c5..e82c1516f72 100644 --- a/knowage-core/src/main/java/it/eng/spagobi/tools/dataset/validation/NumericColumnValidator.java +++ b/knowage-core/src/main/java/it/eng/spagobi/tools/dataset/validation/NumericColumnValidator.java @@ -60,8 +60,8 @@ public ValidationErrors doValidateDataset(IDataStore dataStore, int columnIndex = dataStore.getMetaData().getFieldIndex(columnName); int rowNumber = 0; while( it.hasNext() ) { - IRecord record = (IRecord)it.next(); - IField field = record.getFieldAt(columnIndex); + IRecord currRecord = (IRecord)it.next(); + IField field = currRecord.getFieldAt(columnIndex); Object fieldValue = field.getValue(); if(fieldValue != null) { if (!fieldValue.toString().isEmpty()){ diff --git a/knowage-core/src/main/java/it/eng/spagobi/tools/dataset/validation/TimeDimensionDatasetValidator.java b/knowage-core/src/main/java/it/eng/spagobi/tools/dataset/validation/TimeDimensionDatasetValidator.java index 0ec87281a49..4c768dbf2dc 100644 --- a/knowage-core/src/main/java/it/eng/spagobi/tools/dataset/validation/TimeDimensionDatasetValidator.java +++ b/knowage-core/src/main/java/it/eng/spagobi/tools/dataset/validation/TimeDimensionDatasetValidator.java @@ -86,8 +86,8 @@ public ValidationErrors doValidateDataset(IDataStore dataStore,Map getChildrensKeys() { * @return the node */ public HierarchyTreeNode getHierarchyNode(String key, boolean theLast, Integer levelToCheck, - HierarchyTreeNodeData data, IRecord record, IMetaData dsMeta, String prefix) { + HierarchyTreeNodeData data, IRecord currRecord, IMetaData dsMeta, String prefix) { /* As default, node toReturn is assigned to root. If toReturn will not be overridden, current node/leaf is attached to root. */ HierarchyTreeNode toReturn = this; @@ -249,7 +249,7 @@ public HierarchyTreeNode getHierarchyNode(String key, boolean theLast, Integer l String recordCdLev = null; if (levelToCheck > 0) { /* Retrieving record CD LEV for record when node is not root */ - recordCdLev = ((String) record + recordCdLev = ((String) currRecord .getFieldAt(dsMeta.getFieldIndex(prefix + HierarchyConstants.SUFFIX_CD_LEV + levelToCheck)) .getValue()).trim(); } diff --git a/knowage-core/src/main/java/it/eng/spagobi/tools/hierarchiesmanagement/service/rest/HierarchyMasterService.java b/knowage-core/src/main/java/it/eng/spagobi/tools/hierarchiesmanagement/service/rest/HierarchyMasterService.java index 0e9a302e378..e90dbf37d56 100644 --- a/knowage-core/src/main/java/it/eng/spagobi/tools/hierarchiesmanagement/service/rest/HierarchyMasterService.java +++ b/knowage-core/src/main/java/it/eng/spagobi/tools/hierarchiesmanagement/service/rest/HierarchyMasterService.java @@ -96,30 +96,34 @@ public String getHierarchiesMaster(@QueryParam("dimension") String dimension) { IDataSourceDAO dataSourceDAO = DAOFactory.getDataSourceDAO(); IDataSource dataSource = dataSourceDAO.loadDataSourceByLabel(dataSourceName); if (dataSource == null) { - throw new SpagoBIServiceException("An unexpected error occured while retriving hierarchies names", "No datasource found for Hierarchies"); + throw new SpagoBIServiceException("An unexpected error occured while retriving hierarchies names", + "No datasource found for Hierarchies"); } // 3- execute query to get hierarchies names String hierarchyNameColumn = AbstractJDBCDataset.encapsulateColumnName("HIER_NM", dataSource); String typeColumn = AbstractJDBCDataset.encapsulateColumnName("HIER_TP", dataSource); - String hierarchyCodeColumn = AbstractJDBCDataset.encapsulateColumnName(HierarchyConstants.HIER_CD, dataSource); - String hierarchyDescriptionColumn = AbstractJDBCDataset.encapsulateColumnName(HierarchyConstants.HIER_DS, dataSource); + String hierarchyCodeColumn = AbstractJDBCDataset.encapsulateColumnName(HierarchyConstants.HIER_CD, + dataSource); + String hierarchyDescriptionColumn = AbstractJDBCDataset.encapsulateColumnName(HierarchyConstants.HIER_DS, + dataSource); String bkpColumn = AbstractJDBCDataset.encapsulateColumnName(HierarchyConstants.BKP_COLUMN, dataSource); String columns = hierarchyNameColumn + "," + typeColumn + "," + hierarchyDescriptionColumn + " "; - String queryText = "SELECT DISTINCT(" + hierarchyCodeColumn + ")," + columns + " FROM " + tableName + " WHERE " + typeColumn + "=\'MASTER\' AND (" - + bkpColumn + "= 0 OR " + bkpColumn + " IS NULL) ORDER BY " + hierarchyCodeColumn; + String queryText = "SELECT DISTINCT(" + hierarchyCodeColumn + ")," + columns + " FROM " + tableName + + " WHERE " + typeColumn + "=\'MASTER\' AND (" + bkpColumn + "= 0 OR " + bkpColumn + + " IS NULL) ORDER BY " + hierarchyCodeColumn; // IDataStore dataStore = dataSource.executeStatement("SELECT DISTINCT(" + hierarchyCodeColumn + ")," + columns + " FROM " + tableName + " WHERE " // + typeColumn + "=\'MASTER\' AND (" + bkpColumn + "= 0 OR " + bkpColumn + " IS NULL) ORDER BY " + hierarchyCodeColumn, null, null); IDataStore dataStore = dataSource.executeStatement(queryText, 0, 0); - for (Iterator iterator = dataStore.iterator(); iterator.hasNext();) { - IRecord record = (IRecord) iterator.next(); - IField field = record.getFieldAt(0); + for (Iterator iterator = dataStore.iterator(); iterator.hasNext();) { + IRecord currRecord = iterator.next(); + IField field = currRecord.getFieldAt(0); String hierarchyCode = (String) field.getValue(); - field = record.getFieldAt(1); + field = currRecord.getFieldAt(1); String hierarchyName = (String) field.getValue(); - field = record.getFieldAt(2); + field = currRecord.getFieldAt(2); String hierarchyType = (String) field.getValue(); - field = record.getFieldAt(3); + field = currRecord.getFieldAt(3); String hierarchyDescription = (String) field.getValue(); JSONObject hierarchy = new JSONObject(); @@ -133,7 +137,8 @@ public String getHierarchiesMaster(@QueryParam("dimension") String dimension) { } catch (Throwable t) { logger.error("An unexpected error occured while retriving automatic hierarchies names"); - throw new SpagoBIServiceException("An unexpected error occured while retriving automatic hierarchies names", t); + throw new SpagoBIServiceException("An unexpected error occured while retriving automatic hierarchies names", + t); } logger.debug("END"); return hierarchiesJSONArray.toString(); @@ -156,18 +161,23 @@ public String createHierarchyMaster(@Context HttpServletRequest req) throws SQLE String dimensionLabel = requestVal.getString("dimension"); String validityDate = (requestVal.isNull("validityDate")) ? null : requestVal.getString("validityDate"); String filterDate = (requestVal.isNull("filterDate")) ? null : requestVal.getString("filterDate"); - String filterHierarchy = (requestVal.isNull("filterHierarchy")) ? null : requestVal.getString("filterHierarchy"); - String filterHierType = (requestVal.isNull("filterHierType")) ? null : requestVal.getString("filterHierType"); - String optionalFilters = (requestVal.isNull("optionalFilters")) ? null : requestVal.getString("optionalFilters"); + String filterHierarchy = (requestVal.isNull("filterHierarchy")) ? null + : requestVal.getString("filterHierarchy"); + String filterHierType = (requestVal.isNull("filterHierType")) ? null + : requestVal.getString("filterHierType"); + String optionalFilters = (requestVal.isNull("optionalFilters")) ? null + : requestVal.getString("optionalFilters"); if (dimensionLabel == null) { - throw new SpagoBIServiceException("An unexpected error occured while creating hierarchy master", "wrong request parameters"); + throw new SpagoBIServiceException("An unexpected error occured while creating hierarchy master", + "wrong request parameters"); } IDataSource dataSource = HierarchyUtils.getDataSource(dimensionLabel); if (dataSource == null) { - throw new SpagoBIServiceException("An unexpected error occured while retriving hierarchies names", "No datasource found for Hierarchies"); + throw new SpagoBIServiceException("An unexpected error occured while retriving hierarchies names", + "No datasource found for Hierarchies"); } Hierarchies hierarchies = HierarchiesSingleton.getInstance(); @@ -175,7 +185,8 @@ public String createHierarchyMaster(@Context HttpServletRequest req) throws SQLE HashMap hierConfig = hierarchies.getConfig(dimensionLabel); - boolean forceNameAsLevel = Boolean.parseBoolean((String) hierConfig.get(HierarchyConstants.FORCE_NAME_AS_LEVEL)); + boolean forceNameAsLevel = Boolean + .parseBoolean((String) hierConfig.get(HierarchyConstants.FORCE_NAME_AS_LEVEL)); if (forceNameAsLevel) { // WORKAROUND: if code and name of hierarchy are equals put a prefix to make them different! @@ -218,22 +229,24 @@ public String createHierarchyMaster(@Context HttpServletRequest req) throws SQLE primaryKeyCount = HierarchyUtils.getCountId(primaryKey, hierTableName, dbConnection, dataSource); } - List metadataFields = new ArrayList(dimension.getMetadataFields()); + List metadataFields = new ArrayList<>(dimension.getMetadataFields()); Map metatadaFieldsMap = HierarchyUtils.getMetadataFieldsMap(metadataFields); - List generalFields = new ArrayList(hierarchy.getMetadataGeneralFields()); - List nodeFields = new ArrayList(hierarchy.getMetadataNodeFields()); + List generalFields = new ArrayList<>(hierarchy.getMetadataGeneralFields()); + List nodeFields = new ArrayList<>(hierarchy.getMetadataNodeFields()); boolean exludeHierLeaf = (filterHierarchy != null) ? true : false; - IDataStore dataStore = HierarchyUtils.getDimensionDataStore(dataSource, dimensionName, metadataFields, validityDate, optionalFilters, filterDate, - filterHierarchy, filterHierType, hierTableName, prefix, exludeHierLeaf); + IDataStore dataStore = HierarchyUtils.getDimensionDataStore(dataSource, dimensionName, metadataFields, + validityDate, optionalFilters, filterDate, filterHierarchy, filterHierType, hierTableName, prefix, + exludeHierLeaf); - Iterator iterator = dataStore.iterator(); + Iterator iterator = dataStore.iterator(); while (iterator.hasNext()) { // dataStore. - IRecord record = (IRecord) iterator.next(); + IRecord currRecord = iterator.next(); primaryKeyCount++; - insertHierarchyMaster(dbConnection, dataSource, record, dataStore, hierTableName, generalFields, nodeFields, metatadaFieldsMap, requestVal, - prefix, dimensionName, validityDate, hierConfig, primaryKey, primaryKeyCount); + insertHierarchyMaster(dbConnection, dataSource, currRecord, dataStore, hierTableName, generalFields, + nodeFields, metatadaFieldsMap, requestVal, prefix, dimensionName, validityDate, hierConfig, + primaryKey, primaryKeyCount); } saveHierarchyMasterConfiguration(dbConnection, dataSource, requestVal); @@ -273,21 +286,28 @@ public String syncronizeHierarchyMaster(@Context HttpServletRequest req) throws String validityTreeDate = requestVal.getString("validityTreeDate"); String validityDate = (requestVal.isNull("validityDate")) ? null : requestVal.getString("validityDate"); String filterDate = (requestVal.isNull("filterDate")) ? null : requestVal.getString("filterDate"); - String filterHierarchy = (requestVal.isNull("filterHierarchy")) ? null : requestVal.getString("filterHierarchy"); - String filterHierType = (requestVal.isNull("filterHierType")) ? null : requestVal.getString("filterHierType"); - String optionalFilters = (requestVal.isNull("optionalFilters")) ? null : requestVal.getString("optionalFilters"); + String filterHierarchy = (requestVal.isNull("filterHierarchy")) ? null + : requestVal.getString("filterHierarchy"); + String filterHierType = (requestVal.isNull("filterHierType")) ? null + : requestVal.getString("filterHierType"); + String optionalFilters = (requestVal.isNull("optionalFilters")) ? null + : requestVal.getString("optionalFilters"); String optionDate = (requestVal.isNull("optionDate")) ? null : requestVal.getString("optionDate"); - String optionHierarchy = (requestVal.isNull("optionHierarchy")) ? null : requestVal.getString("optionHierarchy"); - String optionHierType = (requestVal.isNull("optionHierType")) ? null : requestVal.getString("optionHierType"); + String optionHierarchy = (requestVal.isNull("optionHierarchy")) ? null + : requestVal.getString("optionHierarchy"); + String optionHierType = (requestVal.isNull("optionHierType")) ? null + : requestVal.getString("optionHierType"); if (dimensionLabel == null) { - throw new SpagoBIServiceException("An unexpected error occured while syncronize hierarchy master", "wrong request parameters"); + throw new SpagoBIServiceException("An unexpected error occured while syncronize hierarchy master", + "wrong request parameters"); } IDataSource dataSource = HierarchyUtils.getDataSource(dimensionLabel); if (dataSource == null) { - throw new SpagoBIServiceException("An unexpected error occured while retriving hierarchies names", "No datasource found for Hierarchies"); + throw new SpagoBIServiceException("An unexpected error occured while retriving hierarchies names", + "No datasource found for Hierarchies"); } dbConnection = dataSource.getConnection(); @@ -299,7 +319,8 @@ public String syncronizeHierarchyMaster(@Context HttpServletRequest req) throws Assert.assertNotNull(dimension, "Impossible to find a valid dimension with label [" + dimensionLabel + "]"); Hierarchy hierarchy = hierarchies.getHierarchy(dimensionLabel); - Assert.assertNotNull(hierarchy, "Impossible to find a valid hierarchy for dimension [" + dimensionLabel + "]"); + Assert.assertNotNull(hierarchy, + "Impossible to find a valid hierarchy for dimension [" + dimensionLabel + "]"); String dimensionName = dimension.getName(); String hierTableName = hierarchies.getHierarchyTableName(dimensionLabel); @@ -313,17 +334,17 @@ public String syncronizeHierarchyMaster(@Context HttpServletRequest req) throws HashMap hierConfig = hierarchies.getConfig(dimensionLabel); int numLevels = Integer.parseInt((String) hierConfig.get(HierarchyConstants.NUM_LEVELS)); - List metadataFields = new ArrayList(dimension.getMetadataFields()); + List metadataFields = new ArrayList<>(dimension.getMetadataFields()); Map metatadaFieldsMap = HierarchyUtils.getMetadataFieldsMap(metadataFields); - List generalFields = new ArrayList(hierarchy.getMetadataGeneralFields()); - List nodeFields = new ArrayList(hierarchy.getMetadataNodeFields()); + List generalFields = new ArrayList<>(hierarchy.getMetadataGeneralFields()); + List nodeFields = new ArrayList<>(hierarchy.getMetadataNodeFields()); List orderFields = null; for (int i = 0; i < nodeFields.size(); i++) { Field f = nodeFields.get(i); if (f.isOrderField()) { - orderFields = new LinkedList(); + orderFields = new LinkedList<>(); if (f.isSingleValue()) { orderFields.add(f.getId()); } else { @@ -350,16 +371,18 @@ public String syncronizeHierarchyMaster(@Context HttpServletRequest req) throws exludeHierLeaf = true; hierNameForDim = optionHierarchy; } - IDataStore dsNewDimensions = HierarchyUtils.getDimensionDataStore(dataSource, dimensionName, metadataFields, validityDate, optionalFilters, - validityTreeDate, hierNameForDim, filterHierType, hierTableName, prefix, exludeHierLeaf); + IDataStore dsNewDimensions = HierarchyUtils.getDimensionDataStore(dataSource, dimensionName, metadataFields, + validityDate, optionalFilters, validityTreeDate, hierNameForDim, filterHierType, hierTableName, + prefix, exludeHierLeaf); logger.error("#Records from dimension: " + dsNewDimensions.getRecordsCount()); // 3 - Get the dimension leaves already present into the original Hierarchy // IDataStore dsDimensionsFromHier = HierarchyUtils.getDimensionFromHierDataStore(dataSource, dimensionName, metadataFields, validityDate, // optionalFilters, validityTreeDate, filterHierarchy, filterHierType, hierTableName, prefix, false); - IDataStore dsDimensionsFromHier = HierarchyUtils.getDimensionFromHierDataStore(dataSource, dimensionName, metadataFields, validityDate, - optionalFilters, null, filterHierarchy, filterHierType, hierTableName, prefix, false); + IDataStore dsDimensionsFromHier = HierarchyUtils.getDimensionFromHierDataStore(dataSource, dimensionName, + metadataFields, validityDate, optionalFilters, null, filterHierarchy, filterHierType, hierTableName, + prefix, false); logger.error("#Records from hierarchy: " + dsDimensionsFromHier.getRecordsCount()); // 4 - Iterate on the dimensions' leaves used by the hierarchy datastore and check if the record is present into the dimension datastore: @@ -376,8 +399,8 @@ public String syncronizeHierarchyMaster(@Context HttpServletRequest req) throws } if (posID == -1) { logger.error("Impossible synchronize the hierarchy."); - throw new SpagoBIServiceException("Error", - "Impossible synchronize the hierarchy. Column " + prefix + HierarchyConstants.DIM_FILTER_FIELD + " not found into the resultset. "); + throw new SpagoBIServiceException("Error", "Impossible synchronize the hierarchy. Column " + prefix + + HierarchyConstants.DIM_FILTER_FIELD + " not found into the resultset. "); } Iterator iterFromHier = dsDimensionsFromHier.iterator(); @@ -405,15 +428,16 @@ public String syncronizeHierarchyMaster(@Context HttpServletRequest req) throws String backupHierName = HierarchyUtils.updateHierarchyForBackup(dataSource, dbConnection, paramsMap, true); // 5 - insert the new hierarchy (merged) - Iterator iterFromDim = dsNewDimensions.iterator(); + Iterator iterFromDim = dsNewDimensions.iterator(); int cont = 0; while (iterFromDim.hasNext()) { cont++; // iterate on dimension records - IRecord record = (IRecord) iterFromDim.next(); + IRecord currRecord = iterFromDim.next(); primaryKeyCount++; - insertHierarchyMaster(dbConnection, dataSource, record, dsNewDimensions, hierTableName, generalFields, nodeFields, metatadaFieldsMap, - requestVal, prefix, dimensionName, validityDate, hierConfig, primaryKey, primaryKeyCount); + insertHierarchyMaster(dbConnection, dataSource, currRecord, dsNewDimensions, hierTableName, + generalFields, nodeFields, metatadaFieldsMap, requestVal, prefix, dimensionName, validityDate, + hierConfig, primaryKey, primaryKeyCount); } logger.error("#Records inserted into master hierarchy: " + cont); @@ -426,7 +450,7 @@ public String syncronizeHierarchyMaster(@Context HttpServletRequest req) throws dbConnection.commit(); } catch (Throwable t) { - if (dbConnection.getAutoCommit() == false && dbConnection != null && !dbConnection.isClosed()) { + if (!dbConnection.getAutoCommit() && dbConnection != null && !dbConnection.isClosed()) { dbConnection.rollback(); } logger.error("An unexpected error occured while retriving dimension data"); @@ -440,22 +464,25 @@ public String syncronizeHierarchyMaster(@Context HttpServletRequest req) throws } - public static void updateOrderField(IDataSource dataSource, Connection databaseConnection, HashMap paramsMap, List listField) { + public static void updateOrderField(IDataSource dataSource, Connection databaseConnection, HashMap paramsMap, + List listField) { logger.debug("START"); String hierNameColumn = AbstractJDBCDataset.encapsulateColumnName(HierarchyConstants.HIER_NM, dataSource); String beginDtColumn = AbstractJDBCDataset.encapsulateColumnName(HierarchyConstants.BEGIN_DT, dataSource); String endDtColumn = AbstractJDBCDataset.encapsulateColumnName(HierarchyConstants.END_DT, dataSource); String hierTypeColumn = AbstractJDBCDataset.encapsulateColumnName(HierarchyConstants.HIER_TP, dataSource); - String leafIdColumn = AbstractJDBCDataset.encapsulateColumnName(((String) paramsMap.get("prefix")) + "_" + HierarchyConstants.LEAF_ID, dataSource); + String leafIdColumn = AbstractJDBCDataset.encapsulateColumnName( + ((String) paramsMap.get("prefix")) + "_" + HierarchyConstants.LEAF_ID, dataSource); Date vDateConverted = Date.valueOf((String) paramsMap.get("validityDate")); - String srcTable = "(SELECT * FROM " + (String) paramsMap.get("hierarchyTable") + " WHERE " + hierNameColumn + "=?) SRC "; + String srcTable = "(SELECT * FROM " + (String) paramsMap.get("hierarchyTable") + " WHERE " + hierNameColumn + + "=?) SRC "; String updatePart = "UPDATE " + (String) paramsMap.get("hierarchyTable") + " DST, " + srcTable; - StringBuffer sb = new StringBuffer(); + StringBuilder sb = new StringBuilder(); for (int i = 0; i < listField.size(); i++) { String sep = i == (listField.size() - 1) ? " " : ", "; @@ -467,13 +494,15 @@ public static void updateOrderField(IDataSource dataSource, Connection databaseC String vDateWhereClause = " ? >= DST." + beginDtColumn + " AND ? <= DST." + endDtColumn; String joinClause = " DST." + leafIdColumn + " = SRC." + leafIdColumn; - String wherePart = " WHERE DST." + hierNameColumn + "=? AND DST." + hierTypeColumn + "= ? AND " + vDateWhereClause + "AND " + joinClause; + String wherePart = " WHERE DST." + hierNameColumn + "=? AND DST." + hierTypeColumn + "= ? AND " + + vDateWhereClause + "AND " + joinClause; String updateQuery = updatePart + setPart + wherePart; logger.debug("The update query is [" + updateQuery + "]"); - try (Statement stmt = databaseConnection.createStatement(); PreparedStatement preparedStatement = databaseConnection.prepareStatement(updateQuery)) { + try (Statement stmt = databaseConnection.createStatement(); + PreparedStatement preparedStatement = databaseConnection.prepareStatement(updateQuery)) { preparedStatement.setString(1, (String) paramsMap.get("backupHierName")); preparedStatement.setString(2, (String) paramsMap.get("hierTargetName")); preparedStatement.setString(3, (String) paramsMap.get("hierTargetType")); @@ -492,18 +521,22 @@ public static void updateOrderField(IDataSource dataSource, Connection databaseC } - public void saveHierarchyMasterConfiguration(Connection dbConnection, IDataSource dataSource, JSONObject requestVal) throws SQLException, JSONException { + public void saveHierarchyMasterConfiguration(Connection dbConnection, IDataSource dataSource, JSONObject requestVal) + throws SQLException, JSONException { String hierCdColumn = AbstractJDBCDataset.encapsulateColumnName(HierarchyConstants.HIER_CD, dataSource); String hierNmColumn = AbstractJDBCDataset.encapsulateColumnName(HierarchyConstants.HIER_NM, dataSource); - String confColumn = AbstractJDBCDataset.encapsulateColumnName(HierarchyConstants.HIER_MASTERS_CONFIG, dataSource); - String idColumn = AbstractJDBCDataset.encapsulateColumnName(HierarchyConstants.HIER_MASTERS_CONFIG_ID, dataSource); - int countId = HierarchyUtils.getCountId(HierarchyConstants.HIER_MASTERS_CONFIG_ID, HierarchyConstants.HIER_MASTERS_CONFIG_TABLE, dbConnection, + String confColumn = AbstractJDBCDataset.encapsulateColumnName(HierarchyConstants.HIER_MASTERS_CONFIG, + dataSource); + String idColumn = AbstractJDBCDataset.encapsulateColumnName(HierarchyConstants.HIER_MASTERS_CONFIG_ID, dataSource); + int countId = HierarchyUtils.getCountId(HierarchyConstants.HIER_MASTERS_CONFIG_ID, + HierarchyConstants.HIER_MASTERS_CONFIG_TABLE, dbConnection, dataSource); String insertClause = idColumn + "," + hierCdColumn + "," + hierNmColumn + "," + confColumn; - String saveConfQuery = "INSERT INTO " + HierarchyConstants.HIER_MASTERS_CONFIG_TABLE + " (" + insertClause + ") VALUES (?,?,?,?)"; + String saveConfQuery = "INSERT INTO " + HierarchyConstants.HIER_MASTERS_CONFIG_TABLE + " (" + insertClause + + ") VALUES (?,?,?,?)"; logger.debug("Insert query is [" + saveConfQuery + "]"); @@ -526,9 +559,10 @@ public void saveHierarchyMasterConfiguration(Connection dbConnection, IDataSourc } - private void insertHierarchyMaster(Connection dbConnection, IDataSource dataSource, IRecord record, IDataStore dataStore, String hTableName, - List generalFields, List nodeFields, Map metatadaFieldsMap, JSONObject requestVal, String prefix, - String dimensionName, String validityDate, HashMap hierConfig, String primaryKey, int primaryKeyCount) { + private void insertHierarchyMaster(Connection dbConnection, IDataSource dataSource, IRecord recordToInsert, + IDataStore dataStore, String hTableName, List generalFields, List nodeFields, + Map metatadaFieldsMap, JSONObject requestVal, String prefix, String dimensionName, + String validityDate, HashMap hierConfig, String primaryKey, int primaryKeyCount) { logger.debug("START"); @@ -542,13 +576,13 @@ private void insertHierarchyMaster(Connection dbConnection, IDataSource dataSour String sep = ","; // fieldsMap is necessary to keep track of the position for values we need to use later when replace the prep. stat. - Map fieldsMap = new HashMap(); + Map fieldsMap = new HashMap<>(); // typeMap is necessary to keep track of the position for types we need to use later when replace the prep. stat. - Map typeMap = new HashMap(); + Map typeMap = new HashMap<>(); // levelsMap is necessary to keep track of values for levels - Map levelsMap = new HashMap(); + Map levelsMap = new HashMap<>(); // this counter come across different logics to build the insert query and it's used to keep things sequential // int index = 0; @@ -573,21 +607,23 @@ private void insertHierarchyMaster(Connection dbConnection, IDataSource dataSour * in this section we add columns and values related to hierarchy general fields specified in request JSON* **********************************************************************************************************/ - manageGeneralFieldsSection(dataSource, generalFields, record, metatadaFieldsMap, fieldsMap, typeMap, requestVal, columnsClause, valuesClause, sep); + manageGeneralFieldsSection(dataSource, generalFields, recordToInsert, metatadaFieldsMap, fieldsMap, typeMap, + requestVal, columnsClause, valuesClause, sep); /**************************************************************************************** * in this section we add columns and values related to levels specified in request JSON* ****************************************************************************************/ - manageLevelsSection(dataSource, nodeFields, record, metatadaFieldsMap, fieldsMap, levelsMap, requestVal, columnsClause, valuesClause, sep, prefix, - fillConfiguration, hierConfig); + manageLevelsSection(dataSource, nodeFields, recordToInsert, metatadaFieldsMap, fieldsMap, levelsMap, requestVal, + columnsClause, valuesClause, sep, prefix, fillConfiguration, hierConfig); /*********************************************************************** * in this section we add a recursive logic to calculate parents levels* ***********************************************************************/ - manageRecursiveSection(dbConnection, dataSource, nodeFields, record, metatadaFieldsMap, fieldsMap, levelsMap, requestVal, columnsClause, - valuesClause, sep, prefix, dimensionName, validityDate, fillConfiguration, hierConfig); + manageRecursiveSection(dbConnection, dataSource, nodeFields, recordToInsert, metatadaFieldsMap, fieldsMap, + levelsMap, requestVal, columnsClause, valuesClause, sep, prefix, dimensionName, validityDate, + fillConfiguration, hierConfig); checkMaxLevel(levelsMap, hierConfig); @@ -595,7 +631,8 @@ private void insertHierarchyMaster(Connection dbConnection, IDataSource dataSour * in this section we add columns and values related to the leaf code and name* ******************************************************************************/ - manageLeafSection(dataSource, record, metatadaFieldsMap, fieldsMap, levelsMap, columnsClause, valuesClause, sep, prefix); + manageLeafSection(dataSource, recordToInsert, metatadaFieldsMap, fieldsMap, levelsMap, columnsClause, valuesClause, + sep, prefix); /****************************************************************************** * in this section we add columns and values related to the parent of the leaf* @@ -607,7 +644,8 @@ private void insertHierarchyMaster(Connection dbConnection, IDataSource dataSour * in this section we add column and value related to the leaf id that comes from id in dimension record* ********************************************************************************************************/ - manageLeafIdSection(dataSource, metatadaFieldsMap, record, fieldsMap, columnsClause, valuesClause, sep, prefix); + manageLeafIdSection(dataSource, metatadaFieldsMap, recordToInsert, fieldsMap, columnsClause, valuesClause, sep, + prefix); /************************************************************************ * in this section we add column and value related to the hierarchy type* @@ -625,7 +663,8 @@ private void insertHierarchyMaster(Connection dbConnection, IDataSource dataSour * put together clauses in order to create the insert prepared statement and execute it* ***************************************************************************************/ - StringBuffer insertQuery = new StringBuffer("INSERT INTO " + hTableName + columnsClause + " VALUES " + valuesClause); + StringBuffer insertQuery = new StringBuffer( + "INSERT INTO " + hTableName + columnsClause + " VALUES " + valuesClause); logger.debug("The insert query is [" + insertQuery.toString() + "]"); @@ -649,7 +688,8 @@ private void insertHierarchyMaster(Connection dbConnection, IDataSource dataSour try { insertPs.executeUpdate(); } catch (SQLException se) { - logger.error("Error while executing stmt: [" + insertQuery.toString() + "]\n with values: " + fieldsMap.values().toString()); + logger.error("Error while executing stmt: [" + insertQuery.toString() + "]\n with values: " + + fieldsMap.values().toString()); throw new SpagoBIServiceException("An unexpected error occured while inserting a new hierarchy", se); } finally { if (!insertPs.isClosed()) { @@ -667,9 +707,10 @@ private void insertHierarchyMaster(Connection dbConnection, IDataSource dataSour logger.debug("END"); } - private void manageGeneralFieldsSection(IDataSource dataSource, List generalFields, IRecord record, Map metatadaFieldsMap, - Map fieldsMap, Map typesMap, JSONObject requestVal, StringBuffer columnsClause, StringBuffer valuesClause, - String sep) throws JSONException, ParseException { + private void manageGeneralFieldsSection(IDataSource dataSource, List generalFields, IRecord recordToManage, + Map metatadaFieldsMap, Map fieldsMap, Map typesMap, + JSONObject requestVal, StringBuffer columnsClause, StringBuffer valuesClause, String sep) + throws JSONException, ParseException { int index = fieldsMap.size(); @@ -707,7 +748,7 @@ private void manageGeneralFieldsSection(IDataSource dataSource, List gene if (!requestVal.isNull(HierarchyConstants.BEGIN_DT)) { beginDtValue = requestVal.getString(HierarchyConstants.BEGIN_DT); } else { - Date dt = (Date) record.getFieldAt(metatadaFieldsMap.get(HierarchyConstants.BEGIN_DT)).getValue(); + Date dt = (Date) recordToManage.getFieldAt(metatadaFieldsMap.get(HierarchyConstants.BEGIN_DT)).getValue(); beginDtValue = dt.toString(); } // updating sql clauses for columns and values @@ -726,7 +767,7 @@ private void manageGeneralFieldsSection(IDataSource dataSource, List gene if (!requestVal.isNull(HierarchyConstants.END_DT)) { endDtValue = requestVal.getString(HierarchyConstants.END_DT); } else { - Date dt = (Date) record.getFieldAt(metatadaFieldsMap.get(HierarchyConstants.END_DT)).getValue(); + Date dt = (Date) recordToManage.getFieldAt(metatadaFieldsMap.get(HierarchyConstants.END_DT)).getValue(); endDtValue = dt.toString(); } @@ -741,9 +782,10 @@ private void manageGeneralFieldsSection(IDataSource dataSource, List gene } - private void manageLevelsSection(IDataSource dataSource, List nodeFields, IRecord record, Map metatadaFieldsMap, - Map fieldsMap, Map levelsMap, JSONObject requestVal, StringBuffer columnsClause, StringBuffer valuesClause, - String sep, String prefix, FillConfiguration fillConfiguration, HashMap hierConfig) throws JSONException { + private void manageLevelsSection(IDataSource dataSource, List nodeFields, IRecord recordToManage, + Map metatadaFieldsMap, Map fieldsMap, Map levelsMap, + JSONObject requestVal, StringBuffer columnsClause, StringBuffer valuesClause, String sep, String prefix, + FillConfiguration fillConfiguration, HashMap hierConfig) throws JSONException { // retrieve levels from request json if (requestVal.isNull("levels")) { @@ -771,14 +813,17 @@ private void manageLevelsSection(IDataSource dataSource, List nodeFields, // columns for code and name level // String cdColumn = AbstractJDBCDataset.encapsulateColumnName(prefix + "_CD_LEV" + lvlIndex, dataSource); // String nmColumn = AbstractJDBCDataset.encapsulateColumnName(prefix + "_NM_LEV" + lvlIndex, dataSource); - String cdColumn = AbstractJDBCDataset.encapsulateColumnName((String) hierConfig.get(HierarchyConstants.TREE_NODE_CD) + lvlIndex, dataSource); - String nmColumn = AbstractJDBCDataset.encapsulateColumnName((String) hierConfig.get(HierarchyConstants.TREE_NODE_NM) + lvlIndex, dataSource); + String cdColumn = AbstractJDBCDataset.encapsulateColumnName( + (String) hierConfig.get(HierarchyConstants.TREE_NODE_CD) + lvlIndex, dataSource); + String nmColumn = AbstractJDBCDataset.encapsulateColumnName( + (String) hierConfig.get(HierarchyConstants.TREE_NODE_NM) + lvlIndex, dataSource); // retrieve values to look for in dimension columns String cdLvl = lvl.getString("CD"); String nmLvl = lvl.getString("NM"); - logger.debug("In the level [" + lvlIndex + "] user has specified the code [" + cdLvl + "] and the name [" + nmLvl + "]"); + logger.debug("In the level [" + lvlIndex + "] user has specified the code [" + cdLvl + "] and the name [" + + nmLvl + "]"); Object cdValue = null; Object nmValue = null; @@ -789,8 +834,8 @@ private void manageLevelsSection(IDataSource dataSource, List nodeFields, nmValue = nmLvl; } else { // retrieve record fields looking at metafield position in the dimension - IField cdTmpField = record.getFieldAt(metatadaFieldsMap.get(cdLvl)); - IField nmTmpField = record.getFieldAt(metatadaFieldsMap.get(nmLvl)); + IField cdTmpField = recordToManage.getFieldAt(metatadaFieldsMap.get(cdLvl)); + IField nmTmpField = recordToManage.getFieldAt(metatadaFieldsMap.get(nmLvl)); // Filling logic: if the user has enabled the filling option, null values in a level are replaced by values from the previous level @@ -802,7 +847,8 @@ private void manageLevelsSection(IDataSource dataSource, List nodeFields, } concatNmValues += (nmValue == null) ? "" : nmValue; - logger.debug("For the level [" + lvlIndex + "] we are going to insert code [" + cdValue + "] and name [" + nmValue + "]"); + logger.debug("For the level [" + lvlIndex + "] we are going to insert code [" + cdValue + "] and name [" + + nmValue + "]"); // updating sql clauses for columns and values columnsClause.append(cdColumn + "," + nmColumn + sep); @@ -841,9 +887,10 @@ private void manageLevelsSection(IDataSource dataSource, List nodeFields, } } - private void manageRecursiveSection(Connection dbConnection, IDataSource dataSource, List nodeFields, IRecord record, - Map metatadaFieldsMap, Map fieldsMap, Map levelsMap, JSONObject requestVal, - StringBuffer columnsClause, StringBuffer valuesClause, String sep, String prefix, String dimensionName, String validityDate, + private void manageRecursiveSection(Connection dbConnection, IDataSource dataSource, List nodeFields, + IRecord recordToManage, Map metatadaFieldsMap, Map fieldsMap, + Map levelsMap, JSONObject requestVal, StringBuffer columnsClause, + StringBuffer valuesClause, String sep, String prefix, String dimensionName, String validityDate, FillConfiguration fillConfiguration, HashMap hierConfig) throws JSONException, SQLException { int index = fieldsMap.size(); @@ -851,7 +898,7 @@ private void manageRecursiveSection(Connection dbConnection, IDataSource dataSou if (!requestVal.isNull("recursive")) { - LinkedList recursiveValuesList = new LinkedList(); + LinkedList recursiveValuesList = new LinkedList<>(); // retrieve recursive object from request json JSONObject recursive = requestVal.getJSONObject("recursive"); @@ -861,7 +908,8 @@ private void manageRecursiveSection(Connection dbConnection, IDataSource dataSou String jsonRecursiveParentCd = recursive.getString(HierarchyConstants.JSON_CD_PARENT); String jsonRecursiveParentNm = recursive.getString(HierarchyConstants.JSON_NM_PARENT); - logger.debug("Parent field selected are [" + jsonRecursiveParentCd + "] and [" + jsonRecursiveParentNm + "]"); + logger.debug( + "Parent field selected are [" + jsonRecursiveParentCd + "] and [" + jsonRecursiveParentNm + "]"); // create columns for recursive fields selected in the json @@ -872,8 +920,8 @@ private void manageRecursiveSection(Connection dbConnection, IDataSource dataSou // get values from recursive selected fields - IField recursiveCdField = record.getFieldAt(metatadaFieldsMap.get(jsonRecursiveCd)); - IField recursiveNmField = record.getFieldAt(metatadaFieldsMap.get(jsonRecursiveNm)); + IField recursiveCdField = recordToManage.getFieldAt(metatadaFieldsMap.get(jsonRecursiveCd)); + IField recursiveNmField = recordToManage.getFieldAt(metatadaFieldsMap.get(jsonRecursiveNm)); Object recursiveCdValue = recursiveCdField.getValue(); Object recursiveNmValue = recursiveNmField.getValue(); @@ -888,8 +936,8 @@ private void manageRecursiveSection(Connection dbConnection, IDataSource dataSou // get values from parent fields - IField recursiveParentCdField = record.getFieldAt(metatadaFieldsMap.get(jsonRecursiveParentCd)); - IField recursiveParentNmField = record.getFieldAt(metatadaFieldsMap.get(jsonRecursiveParentNm)); + IField recursiveParentCdField = recordToManage.getFieldAt(metatadaFieldsMap.get(jsonRecursiveParentCd)); + IField recursiveParentNmField = recordToManage.getFieldAt(metatadaFieldsMap.get(jsonRecursiveParentNm)); Object recursiveParentCdValue = recursiveParentCdField.getValue(); Object recursiveParentNmValue = recursiveParentNmField.getValue(); @@ -898,8 +946,9 @@ private void manageRecursiveSection(Connection dbConnection, IDataSource dataSou if (recursiveParentCdValue != null) { - recursiveParentSelect(dbConnection, dataSource, recursiveValuesList, recursiveParentCdValue, recursiveParentNmValue, recursiveCdValue, - dimensionName, jsonRecursiveCd, jsonRecursiveNm, jsonRecursiveParentCd, jsonRecursiveParentNm, validityDate); + recursiveParentSelect(dbConnection, dataSource, recursiveValuesList, recursiveParentCdValue, + recursiveParentNmValue, recursiveCdValue, dimensionName, jsonRecursiveCd, jsonRecursiveNm, + jsonRecursiveParentCd, jsonRecursiveParentNm, validityDate); } int recursiveValuesSize = recursiveValuesList.size(); @@ -915,15 +964,18 @@ private void manageRecursiveSection(Connection dbConnection, IDataSource dataSou // columns for code and name level // String cdColumn = AbstractJDBCDataset.encapsulateColumnName(prefix + "_CD_LEV" + (lvlIndex), dataSource); // String nmColumn = AbstractJDBCDataset.encapsulateColumnName(prefix + "_NM_LEV" + (lvlIndex), dataSource); - String cdColumn = AbstractJDBCDataset.encapsulateColumnName((String) hierConfig.get(HierarchyConstants.TREE_NODE_CD) + lvlIndex, dataSource); - String nmColumn = AbstractJDBCDataset.encapsulateColumnName((String) hierConfig.get(HierarchyConstants.TREE_NODE_NM) + lvlIndex, dataSource); + String cdColumn = AbstractJDBCDataset.encapsulateColumnName( + (String) hierConfig.get(HierarchyConstants.TREE_NODE_CD) + lvlIndex, dataSource); + String nmColumn = AbstractJDBCDataset.encapsulateColumnName( + (String) hierConfig.get(HierarchyConstants.TREE_NODE_NM) + lvlIndex, dataSource); Object cdValue = ((recursiveValuesList.get(i)) != null) ? recursiveValuesList.get(i) : fillConfiguration.fillHandler(levelsMap, HierarchyConstants.CD_VALUE_POSITION); Object nmValue = ((recursiveValuesList.get(i + 1)) != null) ? recursiveValuesList.get(i + 1) : fillConfiguration.fillHandler(levelsMap, HierarchyConstants.NM_VALUE_POSITION); - logger.debug("In the level [" + lvlIndex + "] user has specified the code [" + cdValue + "] and the name [" + nmValue + "]"); + logger.debug("In the level [" + lvlIndex + "] user has specified the code [" + cdValue + + "] and the name [" + nmValue + "]"); concatNmValues += (nmValue == null) ? "" : nmValue; // updating sql clauses for columns and values @@ -948,9 +1000,11 @@ private void manageRecursiveSection(Connection dbConnection, IDataSource dataSou for (int n = 0; n < nodeFields.size(); n++) { Field f = nodeFields.get(n); if (f.isUniqueCode()) { - cdUniqueColumn = AbstractJDBCDataset.encapsulateColumnName(f.getId() + lvlIndex, dataSource); + cdUniqueColumn = AbstractJDBCDataset.encapsulateColumnName(f.getId() + lvlIndex, + dataSource); // cdUniqueValue = (cdValue == null || cdValue.equals("")) ? null : Helper.sha256(String.valueOf(Math.random()) + concatNmValues); - cdUniqueValue = (cdValue == null || cdValue.equals("")) ? null : Helper.sha256(concatNmValues); + cdUniqueValue = (cdValue == null || cdValue.equals("")) ? null + : Helper.sha256(concatNmValues); break; } } @@ -966,8 +1020,9 @@ private void manageRecursiveSection(Connection dbConnection, IDataSource dataSou } - private void manageLeafSection(IDataSource dataSource, IRecord record, Map metatadaFieldsMap, Map fieldsMap, - Map levelsMap, StringBuffer columnsClause, StringBuffer valuesClause, String sep, String prefix) { + private void manageLeafSection(IDataSource dataSource, IRecord recordToManage, Map metatadaFieldsMap, + Map fieldsMap, Map levelsMap, StringBuffer columnsClause, + StringBuffer valuesClause, String sep, String prefix) { int index = fieldsMap.size(); int lvlIndex = levelsMap.size(); @@ -983,7 +1038,8 @@ private void manageLeafSection(IDataSource dataSource, IRecord record, Map fieldsMap, Map levelsMap, StringBuffer columnsClause, - StringBuffer valuesClause, String sep) { + private void manageParentLeafSection(IDataSource dataSource, Map fieldsMap, + Map levelsMap, StringBuffer columnsClause, StringBuffer valuesClause, String sep) { int index = fieldsMap.size(); int lvlIndex = levelsMap.size(); @@ -1020,7 +1076,8 @@ private void manageParentLeafSection(IDataSource dataSource, Map metatadaFieldsMap, IRecord record, Map fieldsMap, - StringBuffer columnsClause, StringBuffer valuesClause, String sep, String prefix) { + private void manageLeafIdSection(IDataSource dataSource, Map metatadaFieldsMap, IRecord recordToManage, + Map fieldsMap, StringBuffer columnsClause, StringBuffer valuesClause, String sep, + String prefix) { int index = fieldsMap.size(); String leafIdColumn = AbstractJDBCDataset.encapsulateColumnName(prefix + "_LEAF_ID", dataSource); - IField leafIdTmpField = record.getFieldAt(metatadaFieldsMap.get(prefix + "_ID")); + IField leafIdTmpField = recordToManage.getFieldAt(metatadaFieldsMap.get(prefix + "_ID")); Object leafIdValue = leafIdTmpField.getValue(); @@ -1065,8 +1124,8 @@ private void manageLeafIdSection(IDataSource dataSource, Map me } - private void manageHierTypeSection(IDataSource dataSource, Map fieldsMap, StringBuffer columnsClause, StringBuffer valuesClause, - String sep) { + private void manageHierTypeSection(IDataSource dataSource, Map fieldsMap, + StringBuffer columnsClause, StringBuffer valuesClause, String sep) { String hierTypeColumn = AbstractJDBCDataset.encapsulateColumnName(HierarchyConstants.HIER_TP, dataSource); logger.debug("Hierarchy tipe is [" + HierarchyConstants.HIER_TP_MASTER + "]"); @@ -1082,8 +1141,8 @@ private void manageHierTypeSection(IDataSource dataSource, Map } - private void manageMaxDepthSection(IDataSource dataSource, Map fieldsMap, Map levelsMap, StringBuffer columnsClause, - StringBuffer valuesClause) { + private void manageMaxDepthSection(IDataSource dataSource, Map fieldsMap, + Map levelsMap, StringBuffer columnsClause, StringBuffer valuesClause) { int index = fieldsMap.size(); int lvlIndex = levelsMap.size(); @@ -1105,8 +1164,9 @@ private void manageMaxDepthSection(IDataSource dataSource, Map * * @throws SQLException */ - private void recursiveParentSelect(Connection dbConnection, IDataSource dataSource, LinkedList parentValuesList, Object parentCdValue, - Object parentNmValue, Object oldCdValue, String dimensionName, String jsonRecursiveCd, String jsonRecursiveNm, String jsonRecursiveParentCd, + private void recursiveParentSelect(Connection dbConnection, IDataSource dataSource, + LinkedList parentValuesList, Object parentCdValue, Object parentNmValue, Object oldCdValue, + String dimensionName, String jsonRecursiveCd, String jsonRecursiveNm, String jsonRecursiveParentCd, String jsonRecursiveParentNm, String validityDate) throws SQLException { logger.debug("START"); @@ -1127,10 +1187,11 @@ private void recursiveParentSelect(Connection dbConnection, IDataSource dataSour String vDateConverted = HierarchyUtils.getConvertedDate(validityDate, dataSource); vDateWhereClause = vDateConverted + ">= " + beginDtColumn + " AND " + vDateConverted + " <= " + endDtColumn; } - String recursiveSelectClause = cdRecursiveColumn + "," + nmRecursiveColumn + "," + cdParentColumn + "," + nmParentColumn; + String recursiveSelectClause = cdRecursiveColumn + "," + nmRecursiveColumn + "," + cdParentColumn + "," + + nmParentColumn; - String recurisveSelect = "SELECT " + recursiveSelectClause + " FROM " + dimensionName + " WHERE " + cdRecursiveColumn + " = ? AND " + nmRecursiveColumn - + " = ? AND " + vDateWhereClause; + String recurisveSelect = "SELECT " + recursiveSelectClause + " FROM " + dimensionName + " WHERE " + + cdRecursiveColumn + " = ? AND " + nmRecursiveColumn + " = ? AND " + vDateWhereClause; logger.debug("Select query is [" + recurisveSelect + "]"); @@ -1138,7 +1199,8 @@ private void recursiveParentSelect(Connection dbConnection, IDataSource dataSour ps.setObject(1, parentCdValue); ps.setObject(2, parentNmValue); - logger.debug("PreparedStatment is using [" + parentCdValue + "] and [" + parentNmValue + "] with validity date [" + validityDate + "]"); + logger.debug("PreparedStatment is using [" + parentCdValue + "] and [" + parentNmValue + + "] with validity date [" + validityDate + "]"); ResultSet rs = ps.executeQuery(); @@ -1151,24 +1213,29 @@ private void recursiveParentSelect(Connection dbConnection, IDataSource dataSour parentValuesList.addFirst(newRecursiveNmValue); parentValuesList.addFirst(newRecursiveCdValue); - logger.debug("Result found! Creating a new recursive level with values [" + newRecursiveCdValue + "] and [" + newRecursiveNmValue + "]"); + logger.debug("Result found! Creating a new recursive level with values [" + newRecursiveCdValue + "] and [" + + newRecursiveNmValue + "]"); Object tmpParentCdValue = rs.getObject(jsonRecursiveParentCd); Object tmpParentNmValue = rs.getObject(jsonRecursiveParentNm); if (tmpParentCdValue != null) { - logger.debug("Check values validity. New value is [" + tmpParentCdValue + "] and old is [" + oldCdValue + "]"); + logger.debug("Check values validity. New value is [" + tmpParentCdValue + "] and old is [" + oldCdValue + + "]"); if (tmpParentCdValue.equals(oldCdValue)) { logger.error("Impossible to create recursive levels. A cycle found during recursive selections"); - throw new SQLException("Impossible to create recursive levels. A cycle found during recursive selections"); + throw new SQLException( + "Impossible to create recursive levels. A cycle found during recursive selections"); } - logger.debug("Look for another parent with values [" + tmpParentCdValue + "] and [" + tmpParentNmValue + "]"); + logger.debug("Look for another parent with values [" + tmpParentCdValue + "] and [" + tmpParentNmValue + + "]"); - recursiveParentSelect(dbConnection, dataSource, parentValuesList, tmpParentCdValue, tmpParentNmValue, newRecursiveCdValue, dimensionName, - jsonRecursiveCd, jsonRecursiveNm, jsonRecursiveParentCd, jsonRecursiveParentNm, validityDate); + recursiveParentSelect(dbConnection, dataSource, parentValuesList, tmpParentCdValue, tmpParentNmValue, + newRecursiveCdValue, dimensionName, jsonRecursiveCd, jsonRecursiveNm, jsonRecursiveParentCd, + jsonRecursiveParentNm, validityDate); } else { logger.debug("No parent found!"); logger.debug("END"); @@ -1186,7 +1253,8 @@ private void checkMaxLevel(Map levelsMap, HashMap hierConfig) int numLevels = Integer.parseInt((String) hierConfig.get(HierarchyConstants.NUM_LEVELS)); if (lvlIndex > numLevels) { - throw new SQLException("Creation failed. You have " + lvlIndex + " levels, but the maximum is " + numLevels + " levels"); + throw new SQLException( + "Creation failed. You have " + lvlIndex + " levels, but the maximum is " + numLevels + " levels"); } } @@ -1196,13 +1264,16 @@ private String getHierMasterConfig(IDataSource dataSource, Connection dbConnecti String hierCdColumn = AbstractJDBCDataset.encapsulateColumnName(HierarchyConstants.HIER_CD, dataSource); String hierNmColumn = AbstractJDBCDataset.encapsulateColumnName(HierarchyConstants.HIER_NM, dataSource); - String confColumn = AbstractJDBCDataset.encapsulateColumnName(HierarchyConstants.HIER_MASTERS_CONFIG, dataSource); + String confColumn = AbstractJDBCDataset.encapsulateColumnName(HierarchyConstants.HIER_MASTERS_CONFIG, + dataSource); String selectClause = hierCdColumn + "," + hierNmColumn + "," + confColumn; - String selectQuery = "SELECT " + selectClause + " FROM " + HierarchyConstants.HIER_MASTERS_CONFIG_TABLE + " WHERE HIER_NM = ? ORDER BY TIME_IN DESC "; + String selectQuery = "SELECT " + selectClause + " FROM " + HierarchyConstants.HIER_MASTERS_CONFIG_TABLE + + " WHERE HIER_NM = ? ORDER BY TIME_IN DESC "; - try (Statement stmt = dbConnection.createStatement(); PreparedStatement selectPs = dbConnection.prepareStatement(selectQuery)) { + try (Statement stmt = dbConnection.createStatement(); + PreparedStatement selectPs = dbConnection.prepareStatement(selectQuery)) { selectPs.setString(1, hierarchyName); diff --git a/knowage-core/src/main/java/it/eng/spagobi/tools/hierarchiesmanagement/service/rest/HierarchyTechnicalService.java b/knowage-core/src/main/java/it/eng/spagobi/tools/hierarchiesmanagement/service/rest/HierarchyTechnicalService.java index 942096d805a..2548bf11652 100644 --- a/knowage-core/src/main/java/it/eng/spagobi/tools/hierarchiesmanagement/service/rest/HierarchyTechnicalService.java +++ b/knowage-core/src/main/java/it/eng/spagobi/tools/hierarchiesmanagement/service/rest/HierarchyTechnicalService.java @@ -17,6 +17,18 @@ */ package it.eng.spagobi.tools.hierarchiesmanagement.service.rest; +import java.util.Iterator; + +import javax.ws.rs.GET; +import javax.ws.rs.Path; +import javax.ws.rs.Produces; +import javax.ws.rs.QueryParam; +import javax.ws.rs.core.MediaType; + +import org.apache.log4j.Logger; +import org.json.JSONArray; +import org.json.JSONObject; + import it.eng.spagobi.commons.constants.CommunityFunctionalityConstants; import it.eng.spagobi.commons.dao.DAOFactory; import it.eng.spagobi.services.rest.annotations.UserConstraint; @@ -31,18 +43,6 @@ import it.eng.spagobi.tools.hierarchiesmanagement.utils.HierarchyConstants; import it.eng.spagobi.utilities.exceptions.SpagoBIServiceException; -import java.util.Iterator; - -import javax.ws.rs.GET; -import javax.ws.rs.Path; -import javax.ws.rs.Produces; -import javax.ws.rs.QueryParam; -import javax.ws.rs.core.MediaType; - -import org.apache.log4j.Logger; -import org.json.JSONArray; -import org.json.JSONObject; - /* * This class contains all REST services used for specific TECHNICAL hierarchy types */ @@ -70,32 +70,37 @@ public String getHierarchiesTechnical(@QueryParam("dimension") String dimension) IDataSourceDAO dataSourceDAO = DAOFactory.getDataSourceDAO(); IDataSource dataSource = dataSourceDAO.loadDataSourceByLabel(dataSourceName); if (dataSource == null) { - throw new SpagoBIServiceException("An unexpected error occured while retriving hierarchies names", "No datasource found for Hierarchies"); + throw new SpagoBIServiceException("An unexpected error occured while retriving hierarchies names", + "No datasource found for Hierarchies"); } // 3- execute query to get hierarchies names - String hierarchyCodeColumn = AbstractJDBCDataset.encapsulateColumnName(HierarchyConstants.HIER_CD, dataSource); - String hierarchyNameColumn = AbstractJDBCDataset.encapsulateColumnName(HierarchyConstants.HIER_NM, dataSource); + String hierarchyCodeColumn = AbstractJDBCDataset.encapsulateColumnName(HierarchyConstants.HIER_CD, + dataSource); + String hierarchyNameColumn = AbstractJDBCDataset.encapsulateColumnName(HierarchyConstants.HIER_NM, + dataSource); String typeColumn = AbstractJDBCDataset.encapsulateColumnName(HierarchyConstants.HIER_TP, dataSource); - String hierarchyDescriptionColumn = AbstractJDBCDataset.encapsulateColumnName(HierarchyConstants.HIER_DS, dataSource); + String hierarchyDescriptionColumn = AbstractJDBCDataset.encapsulateColumnName(HierarchyConstants.HIER_DS, + dataSource); // String scopeColumn = AbstractJDBCDataset.encapsulateColumnName(HierarchyConstants.HIER_SCOPE, dataSource); String bkpColumn = AbstractJDBCDataset.encapsulateColumnName(HierarchyConstants.BKP_COLUMN, dataSource); String columns = hierarchyNameColumn + "," + typeColumn + "," + hierarchyDescriptionColumn + " "; - String queryText = "SELECT DISTINCT(" + hierarchyCodeColumn + ")," + columns + " FROM " + tableName + " WHERE " + typeColumn - + "=\'TECHNICAL\' AND (" + bkpColumn + "= 0 OR " + bkpColumn + " IS NULL) ORDER BY " + hierarchyCodeColumn; + String queryText = "SELECT DISTINCT(" + hierarchyCodeColumn + ")," + columns + " FROM " + tableName + + " WHERE " + typeColumn + "=\'TECHNICAL\' AND (" + bkpColumn + "= 0 OR " + bkpColumn + + " IS NULL) ORDER BY " + hierarchyCodeColumn; // IDataStore dataStore = dataSource.executeStatement("SELECT DISTINCT(" + hierarchyCodeColumn + ")," + columns + " FROM " + tableName + " WHERE " // + typeColumn + "=\"TECHNICAL\" AND " + bkpColumn + "= 0 ORDER BY " + hierarchyCodeColumn, 0, 0); IDataStore dataStore = dataSource.executeStatement(queryText, 0, 0); - for (Iterator iterator = dataStore.iterator(); iterator.hasNext();) { - IRecord record = (IRecord) iterator.next(); - IField field = record.getFieldAt(0); + for (Iterator iterator = dataStore.iterator(); iterator.hasNext();) { + IRecord currRecord = iterator.next(); + IField field = currRecord.getFieldAt(0); String hierarchyCode = (String) field.getValue(); - field = record.getFieldAt(1); + field = currRecord.getFieldAt(1); String hierarchyName = (String) field.getValue(); - field = record.getFieldAt(2); + field = currRecord.getFieldAt(2); String hierarchyType = (String) field.getValue(); - field = record.getFieldAt(3); + field = currRecord.getFieldAt(3); String hierarchyDescription = (String) field.getValue(); JSONObject hierarchy = new JSONObject(); hierarchy.put(HierarchyConstants.HIER_CD, hierarchyCode); @@ -108,7 +113,8 @@ public String getHierarchiesTechnical(@QueryParam("dimension") String dimension) } catch (Throwable t) { logger.error("An unexpected error occured while retriving custom hierarchies names"); - throw new SpagoBIServiceException("An unexpected error occured while retriving custom hierarchies names", t); + throw new SpagoBIServiceException("An unexpected error occured while retriving custom hierarchies names", + t); } logger.debug("END"); return hierarchiesJSONArray.toString(); diff --git a/knowage-core/src/main/java/it/eng/spagobi/tools/scheduler/dispatcher/MailDocumentDispatchChannel.java b/knowage-core/src/main/java/it/eng/spagobi/tools/scheduler/dispatcher/MailDocumentDispatchChannel.java index fd6a25a2928..bac2c9fc971 100644 --- a/knowage-core/src/main/java/it/eng/spagobi/tools/scheduler/dispatcher/MailDocumentDispatchChannel.java +++ b/knowage-core/src/main/java/it/eng/spagobi/tools/scheduler/dispatcher/MailDocumentDispatchChannel.java @@ -35,7 +35,6 @@ import javax.activation.DataSource; import javax.mail.Message; import javax.mail.Multipart; -import javax.mail.PasswordAuthentication; import javax.mail.internet.InternetAddress; import javax.mail.internet.MimeBodyPart; import javax.mail.internet.MimeMultipart; @@ -123,15 +122,15 @@ public boolean dispatch(BIObject document, byte[] executionOutput) { emailDispatchDataStore = dispatchContext.getEmailDispatchDataStore(); nameSuffix = dispatchContext.getNameSuffix(); descriptionSuffix = dispatchContext.getDescriptionSuffix(); - containedFileName = dispatchContext.getContainedFileName() != null && !dispatchContext.getContainedFileName().equals("") ? dispatchContext - .getContainedFileName() : document.getName(); - zipFileName = dispatchContext.getZipMailName() != null && !dispatchContext.getZipMailName().equals("") ? dispatchContext.getZipMailName() + containedFileName = dispatchContext.getContainedFileName() != null + && !dispatchContext.getContainedFileName().equals("") ? dispatchContext.getContainedFileName() + : document.getName(); + zipFileName = dispatchContext.getZipMailName() != null && !dispatchContext.getZipMailName().equals("") + ? dispatchContext.getZipMailName() : document.getName(); reportNameInSubject = dispatchContext.isReportNameInSubject(); - SessionFacade facade = MailSessionBuilder.newInstance() - .usingSchedulerProfile() - .build(); + SessionFacade facade = MailSessionBuilder.newInstance().usingSchedulerProfile().build(); String mailSubj = dispatchContext.getMailSubj(); mailSubj = StringUtilities.substituteParametersInString(mailSubj, parametersMap, null, false); @@ -174,7 +173,8 @@ public boolean dispatch(BIObject document, byte[] executionOutput) { } // else else { - sds = new SchedulerDataSource(executionOutput, contentType, containedFileName + nameSuffix + fileExtension); + sds = new SchedulerDataSource(executionOutput, contentType, + containedFileName + nameSuffix + fileExtension); mbp2.setDataHandler(new DataHandler(sds)); mbp2.setFileName(sds.getName()); } @@ -198,7 +198,8 @@ public boolean dispatch(BIObject document, byte[] executionOutput) { return true; } - private MimeBodyPart zipAttachment(byte[] attach, String containedFileName, String zipFileName, String nameSuffix, String fileExtension) { + private MimeBodyPart zipAttachment(byte[] attach, String containedFileName, String zipFileName, String nameSuffix, + String fileExtension) { MimeBodyPart messageBodyPart = null; try { @@ -271,7 +272,8 @@ private byte[] zipDocument(String fileZipName, byte[] content) { } - public static boolean canDispatch(DispatchContext dispatchContext, BIObject document, IDataStore emailDispatchDataStore) { + public static boolean canDispatch(DispatchContext dispatchContext, BIObject document, + IDataStore emailDispatchDataStore) { String[] recipients = findRecipients(dispatchContext, document, emailDispatchDataStore); return (recipients != null && recipients.length > 0); } @@ -363,7 +365,8 @@ private static List findRecipientsFromExpression(DispatchContext info, B return recipients; } - private static List findRecipientsFromDataSet(DispatchContext info, BIObject biobj, IDataStore dataStore) throws Exception { + private static List findRecipientsFromDataSet(DispatchContext info, BIObject biobj, IDataStore dataStore) + throws Exception { logger.debug("IN"); List recipients = new ArrayList(); if (info.isUseDataSet()) { @@ -388,24 +391,26 @@ private static List findRecipientsFromDataSet(DispatchContext info, BIOb } } if (parameter == null) { - throw new Exception("The document parameter with label [" + dsParameterLabel + "] was not found. Cannot filter the dataset."); + throw new Exception("The document parameter with label [" + dsParameterLabel + + "] was not found. Cannot filter the dataset."); } // considering the first value of the parameter List values = parameter.getParameterValues(); if (values == null || values.isEmpty()) { - throw new Exception("The document parameter with label [" + dsParameterLabel + "] has no values. Cannot filter the dataset."); + throw new Exception("The document parameter with label [" + dsParameterLabel + + "] has no values. Cannot filter the dataset."); } codeValue = (String) values.get(0); logger.debug("Using value [" + codeValue + "] for dataset filtering..."); - Iterator it = dataStore.iterator(); + Iterator it = dataStore.iterator(); while (it.hasNext()) { String recipient = null; - IRecord record = (IRecord) it.next(); + IRecord currRecord = it.next(); // the parameter value is used to filter on the first dataset field - IField valueField = record.getFieldAt(0); + IField valueField = currRecord.getFieldAt(0); Object valueObj = valueField.getValue(); String value = null; if (valueObj != null) @@ -413,7 +418,7 @@ private static List findRecipientsFromDataSet(DispatchContext info, BIOb if (codeValue.equals(value)) { logger.debug("Found value [" + codeValue + "] on the first field of a record of the dataset."); // recipient address is on the second dataset field - IField recipientField = record.getFieldAt(1); + IField recipientField = currRecord.getFieldAt(1); Object recipientFieldObj = recipientField.getValue(); if (recipientFieldObj != null) { recipient = recipientFieldObj.toString(); diff --git a/knowage-core/src/main/java/it/eng/spagobi/tools/scheduler/dispatcher/UniqueMailDocumentDispatchChannel.java b/knowage-core/src/main/java/it/eng/spagobi/tools/scheduler/dispatcher/UniqueMailDocumentDispatchChannel.java index 7cc69ef4c90..29404352e88 100644 --- a/knowage-core/src/main/java/it/eng/spagobi/tools/scheduler/dispatcher/UniqueMailDocumentDispatchChannel.java +++ b/knowage-core/src/main/java/it/eng/spagobi/tools/scheduler/dispatcher/UniqueMailDocumentDispatchChannel.java @@ -38,7 +38,6 @@ import javax.activation.DataSource; import javax.mail.Message; import javax.mail.Multipart; -import javax.mail.PasswordAuthentication; import javax.mail.internet.InternetAddress; import javax.mail.internet.MimeBodyPart; import javax.mail.internet.MimeMultipart; @@ -73,7 +72,6 @@ public class UniqueMailDocumentDispatchChannel implements IDocumentDispatchChann // logger component private static Logger logger = Logger.getLogger(UniqueMailDocumentDispatchChannel.class); - // COnfigurations stored in mailOptions Map public static final String MAIL_SUBJECT = "MAIL_SUBJECT"; public static final String RECIPIENTS = "RECIPIENTS"; @@ -93,26 +91,24 @@ public class UniqueMailDocumentDispatchChannel implements IDocumentDispatchChann public static final String DOCUMENT_LABELS = "DOCUMENT_LABELS"; public static final String IS_ZIP_DOCUMENT = "IS_ZIP_DOCUMENT"; - - - - public UniqueMailDocumentDispatchChannel(){}; + public UniqueMailDocumentDispatchChannel() { + } public UniqueMailDocumentDispatchChannel(DispatchContext dispatchContext) { this.dispatchContext = dispatchContext; try { IEngUserProfile userProfile = this.dispatchContext.getUserProfile(); - //gets the dataset data about the email address + // gets the dataset data about the email address IDataStore emailDispatchDataStore = null; if (dispatchContext.isUseDataSet()) { IDataSet dataSet = DAOFactory.getDataSetDAO().loadDataSetByLabel(dispatchContext.getDataSetLabel()); - //loadActiveDataSetByLabel(dispatchContext.getDataSetLabel()); + // loadActiveDataSetByLabel(dispatchContext.getDataSetLabel()); dataSet.setUserProfileAttributes(UserProfileUtils.getProfileAttributes(userProfile)); dataSet.loadData(); emailDispatchDataStore = dataSet.getDataStore(); } dispatchContext.setEmailDispatchDataStore(emailDispatchDataStore); - } catch(Throwable t) { + } catch (Throwable t) { throw new SpagoBIRuntimeException("Impossible to instatiate MailDocumentDispatchChannel class", t); } } @@ -128,13 +124,12 @@ public void close() { } @Override - public boolean canDispatch(BIObject document) { - return canDispatch(dispatchContext, document, dispatchContext.getEmailDispatchDataStore() ); + public boolean canDispatch(BIObject document) { + return canDispatch(dispatchContext, document, dispatchContext.getEmailDispatchDataStore()); } - /** - * dispatch in this case does not send mail, but store files in temporar folder + * dispatch in this case does not send mail, but store files in temporar folder */ @Override @@ -144,30 +139,31 @@ public boolean dispatch(BIObject document, byte[] executionOutput) { String containedFileName; logger.debug("IN"); - try{ + try { fileExtension = dispatchContext.getFileExtension(); nameSuffix = dispatchContext.getNameSuffix(); - containedFileName = dispatchContext.getContainedFileName() != null && !dispatchContext.getContainedFileName().equals("")? - dispatchContext.getContainedFileName() : document.getName(); + containedFileName = dispatchContext.getContainedFileName() != null + && !dispatchContext.getContainedFileName().equals("") ? dispatchContext.getContainedFileName() + : document.getName(); - //check if temp folder is already created otherwise create it + // check if temp folder is already created otherwise create it String tempFolderPath = dispatchContext.getTempFolderPath(); - File folder = new File(tempFolderPath); + File folder = new File(tempFolderPath); - if(!folder.exists()){ - logger.debug("Temporary Folder not retrieved: "+folder.getAbsolutePath()); - throw new Exception("Temporary Folder not retrieved: "+folder.getAbsolutePath()); + if (!folder.exists()) { + logger.debug("Temporary Folder not retrieved: " + folder.getAbsolutePath()); + throw new Exception("Temporary Folder not retrieved: " + folder.getAbsolutePath()); } - logger.debug("Temporary Folder retrieved: "+folder.getAbsolutePath()); + logger.debug("Temporary Folder retrieved: " + folder.getAbsolutePath()); // create file inside temp directory String fileToCreate = containedFileName + nameSuffix + fileExtension; - logger.debug("File to store in temporary folder: "+fileToCreate); - String pathToCreate = folder.getAbsolutePath()+File.separator+fileToCreate; + logger.debug("File to store in temporary folder: " + fileToCreate); + String pathToCreate = folder.getAbsolutePath() + File.separator + fileToCreate; FileOutputStream fileOuputStream = new FileOutputStream(pathToCreate); fileOuputStream.write(executionOutput); @@ -175,52 +171,50 @@ public boolean dispatch(BIObject document, byte[] executionOutput) { logger.debug("File stored"); - } catch (Exception e) { - logger.error("Error while sending schedule result mail",e); + logger.error("Error while sending schedule result mail", e); return false; - }finally{ + } finally { logger.debug("OUT"); } return true; } - -/** AFter all files are stored in temporary tabe takes them and sens as zip or as separate attachments - * - * @param mailOptions - * @return - */ + /** + * AFter all files are stored in temporary tabe takes them and sens as zip or as separate attachments + * + * @param mailOptions + * @return + */ public boolean sendFiles(Map mailOptions, String allDocumentLabels) { logger.debug("IN"); - try{ - String tempFolderPath = (String)mailOptions.get(TEMP_FOLDER_PATH); + try { + String tempFolderPath = (String) mailOptions.get(TEMP_FOLDER_PATH); File tempFolder = new File(tempFolderPath); - if(!tempFolder.exists() || !tempFolder.isDirectory()){ - logger.error("Temp Folder "+tempFolderPath+" does not exist or is not a directory: stop sending mail"); + if (!tempFolder.exists() || !tempFolder.isDirectory()) { + logger.error( + "Temp Folder " + tempFolderPath + " does not exist or is not a directory: stop sending mail"); return false; } - SessionFacade facade = MailSessionBuilder.newInstance() - .usingSchedulerProfile() - .build(); + SessionFacade facade = MailSessionBuilder.newInstance().usingSchedulerProfile().build(); - String mailSubj = mailOptions.get(MAIL_SUBJECT) != null ? (String)mailOptions.get(MAIL_SUBJECT) : null; - Map parametersMap = mailOptions.get(PARAMETERS_MAP) != null ? (Map)mailOptions.get(PARAMETERS_MAP) : null; + String mailSubj = mailOptions.get(MAIL_SUBJECT) != null ? (String) mailOptions.get(MAIL_SUBJECT) : null; + Map parametersMap = mailOptions.get(PARAMETERS_MAP) != null ? (Map) mailOptions.get(PARAMETERS_MAP) : null; mailSubj = StringUtilities.substituteParametersInString(mailSubj, parametersMap, null, false); - String mailTxt = mailOptions.get(MAIL_TXT) != null ? (String)mailOptions.get(MAIL_TXT) : null; - String[] recipients = mailOptions.get(RECIPIENTS) != null ? (String[])mailOptions.get(RECIPIENTS) : null; + String mailTxt = mailOptions.get(MAIL_TXT) != null ? (String) mailOptions.get(MAIL_TXT) : null; + String[] recipients = mailOptions.get(RECIPIENTS) != null ? (String[]) mailOptions.get(RECIPIENTS) : null; // create a message Message msg = facade.createNewMimeMessage(); InternetAddress[] addressTo = new InternetAddress[recipients.length]; - for (int i = 0; i < recipients.length; i++) { + for (int i = 0; i < recipients.length; i++) { addressTo[i] = new InternetAddress(recipients[i]); } msg.setRecipients(Message.RecipientType.TO, addressTo); @@ -229,13 +223,19 @@ public boolean sendFiles(Map mailOptions, String allDocumentLabe String subject = mailSubj; String nameSuffix = mailOptions.get(NAME_SUFFIX) != null ? (String) mailOptions.get(NAME_SUFFIX) : null; - Boolean reportNameInSubject =mailOptions.get(REPORT_NAME_IN_SUBJECT) != null && !mailOptions.get(REPORT_NAME_IN_SUBJECT).toString().equals("") ? (Boolean) mailOptions.get(REPORT_NAME_IN_SUBJECT) : null; - //Boolean descriptionSuffix =mailOptions.get(DESCRIPTION_SUFFIX) != null && !mailOptions.get(DESCRIPTION_SUFFIX).toString().equals("")? (Boolean) mailOptions.get(DESCRIPTION_SUFFIX) : null; - String zipFileName=mailOptions.get(ZIP_FILE_NAME) != null ? (String) mailOptions.get(ZIP_FILE_NAME) : "Zipped Documents"; - String contentType=mailOptions.get(CONTENT_TYPE) != null ? (String) mailOptions.get(CONTENT_TYPE) : null; - String fileExtension = mailOptions.get(FILE_EXTENSION) != null ? (String)mailOptions.get(FILE_EXTENSION) : null; - - if(reportNameInSubject){ + Boolean reportNameInSubject = mailOptions.get(REPORT_NAME_IN_SUBJECT) != null + && !mailOptions.get(REPORT_NAME_IN_SUBJECT).toString().equals("") + ? (Boolean) mailOptions.get(REPORT_NAME_IN_SUBJECT) + : null; + // Boolean descriptionSuffix =mailOptions.get(DESCRIPTION_SUFFIX) != null && !mailOptions.get(DESCRIPTION_SUFFIX).toString().equals("")? (Boolean) + // mailOptions.get(DESCRIPTION_SUFFIX) : null; + String zipFileName = mailOptions.get(ZIP_FILE_NAME) != null ? (String) mailOptions.get(ZIP_FILE_NAME) + : "Zipped Documents"; + String contentType = mailOptions.get(CONTENT_TYPE) != null ? (String) mailOptions.get(CONTENT_TYPE) : null; + String fileExtension = mailOptions.get(FILE_EXTENSION) != null ? (String) mailOptions.get(FILE_EXTENSION) + : null; + + if (reportNameInSubject) { subject += " " + nameSuffix; } @@ -246,40 +246,40 @@ public boolean sendFiles(Map mailOptions, String allDocumentLabe // attach the file to the message - boolean isZipDocument= mailOptions.get(IS_ZIP_DOCUMENT) != null ? (Boolean) mailOptions.get(IS_ZIP_DOCUMENT) : false; - zipFileName = mailOptions.get(ZIP_FILE_NAME) != null ? (String) mailOptions.get(ZIP_FILE_NAME) : "Zipped Documents"; + boolean isZipDocument = mailOptions.get(IS_ZIP_DOCUMENT) != null + ? (Boolean) mailOptions.get(IS_ZIP_DOCUMENT) + : false; + zipFileName = mailOptions.get(ZIP_FILE_NAME) != null ? (String) mailOptions.get(ZIP_FILE_NAME) + : "Zipped Documents"; // create the Multipart and add its parts to it Multipart mp = new MimeMultipart(); mp.addBodyPart(mbp1); - if(isZipDocument){ + if (isZipDocument) { logger.debug("Make zip"); // create the second message part MimeBodyPart mbp2 = new MimeBodyPart(); mbp2 = zipAttachment(zipFileName, mailOptions, tempFolder); mp.addBodyPart(mbp2); - } - else{ + } else { logger.debug("Attach single files"); SchedulerDataSource sds = null; MimeBodyPart bodyPart = null; - try - { + try { String[] entries = tempFolder.list(); - for(int i=0;i mailOptions, String allDocumentLabe mp.addBodyPart(bodyPart); } - } - catch( Exception e ) - { + } catch (Exception e) { logger.error("Error while attaching files", e); } @@ -302,56 +300,48 @@ public boolean sendFiles(Map mailOptions, String allDocumentLabe // send message facade.sendMessage(msg); - logger.info("Mail sent for documents with labels ["+allDocumentLabels+"]"); + logger.info("Mail sent for documents with labels [" + allDocumentLabels + "]"); // logger.debug("delete tempFolder path "+tempFolder.getPath()); // boolean deleted = tempFolder.delete(); // logger.debug("Temp folder deleted "+deleted); } catch (Exception e) { - logger.error("Error while sending schedule result mail",e); + logger.error("Error while sending schedule result mail", e); return false; - }finally{ + } finally { logger.debug("OUT"); } return true; } - - - - - public static MimeBodyPart zipAttachment( String zipFileName, Map mailOptions, File tempFolder) - { + public static MimeBodyPart zipAttachment(String zipFileName, Map mailOptions, File tempFolder) { logger.debug("IN"); MimeBodyPart messageBodyPart = null; - try - { + try { - String nameSuffix = mailOptions.get(NAME_SUFFIX) != null ? (String)mailOptions.get(NAME_SUFFIX) : ""; + String nameSuffix = mailOptions.get(NAME_SUFFIX) != null ? (String) mailOptions.get(NAME_SUFFIX) : ""; byte[] buffer = new byte[4096]; // Create a buffer for copying int bytesRead; // the zip - String tempFolderPath = (String)mailOptions.get(TEMP_FOLDER_PATH); + String tempFolderPath = (String) mailOptions.get(TEMP_FOLDER_PATH); ByteArrayOutputStream bout = new ByteArrayOutputStream(); ZipOutputStream out = new ZipOutputStream(bout); + logger.debug("File zip to write: " + tempFolderPath + File.separator + "zippedFile.zip"); - logger.debug("File zip to write: "+tempFolderPath+File.separator+"zippedFile.zip"); - - //files to zip + // files to zip String[] entries = tempFolder.list(); - for (int i = 0; i < entries.length; i++) { - //File f = new File(tempFolder, entries[i]); - File f = new File(tempFolder+File.separator+entries[i]); + // File f = new File(tempFolder, entries[i]); + File f = new File(tempFolder + File.separator + entries[i]); if (f.isDirectory()) - continue;//Ignore directory - logger.debug("insert file: "+f.getName()); + continue;// Ignore directory + logger.debug("insert file: " + f.getName()); FileInputStream in = new FileInputStream(f); // Stream to read file ZipEntry entry = new ZipEntry(f.getName()); // Make a ZipEntry out.putNextEntry(entry); // Store entry @@ -362,15 +352,12 @@ public static MimeBodyPart zipAttachment( String zipFileName, Map mailOptions, F out.close(); messageBodyPart = new MimeBodyPart(); - DataSource source = new ByteArrayDataSource( bout.toByteArray(), "application/zip" ); - messageBodyPart.setDataHandler( new DataHandler( source ) ); - + DataSource source = new ByteArrayDataSource(bout.toByteArray(), "application/zip"); + messageBodyPart.setDataHandler(new DataHandler(source)); - messageBodyPart.setFileName( zipFileName+nameSuffix+".zip" ); + messageBodyPart.setFileName(zipFileName + nameSuffix + ".zip"); - } - catch( Exception e ) - { + } catch (Exception e) { logger.error("Error while creating the zip", e); return null; } @@ -383,14 +370,14 @@ public static MimeBodyPart zipAttachment( String zipFileName, Map mailOptions, F private byte[] zipDocument(String fileZipName, byte[] content) { logger.debug("IN"); - ByteArrayOutputStream bos=null; - ZipOutputStream zos=null; - ByteArrayInputStream in=null; - try{ + ByteArrayOutputStream bos = null; + ZipOutputStream zos = null; + ByteArrayInputStream in = null; + try { bos = new ByteArrayOutputStream(); zos = new ZipOutputStream(bos); - ZipEntry ze= new ZipEntry(fileZipName); + ZipEntry ze = new ZipEntry(fileZipName); zos.putNextEntry(ze); in = new ByteArrayInputStream(content); @@ -398,13 +385,12 @@ private byte[] zipDocument(String fileZipName, byte[] content) { zos.write(c); } - return bos.toByteArray(); - }catch(IOException ex){ - logger.error("Error zipping the document",ex); + } catch (IOException ex) { + logger.error("Error zipping the document", ex); return null; - }finally{ + } finally { if (bos != null) { try { bos.close(); @@ -429,13 +415,14 @@ private byte[] zipDocument(String fileZipName, byte[] content) { } } - public static boolean canDispatch(DispatchContext dispatchContext, BIObject document, IDataStore emailDispatchDataStore) { + + public static boolean canDispatch(DispatchContext dispatchContext, BIObject document, + IDataStore emailDispatchDataStore) { String[] recipients = findRecipients(dispatchContext, document, emailDispatchDataStore); return (recipients != null && recipients.length > 0); } - public static String[] findRecipients(DispatchContext info, BIObject biobj, - IDataStore dataStore) { + public static String[] findRecipients(DispatchContext info, BIObject biobj, IDataStore dataStore) { logger.debug("IN"); String[] toReturn = null; List recipients = new ArrayList(); @@ -512,7 +499,7 @@ private static List findRecipientsFromExpression(DispatchContext info, B } } // we must substitute parameter values on the expression - String recipientStr = StringUtilities.substituteParametersInString(expression, parametersMap,null, false); + String recipientStr = StringUtilities.substituteParametersInString(expression, parametersMap, null, false); logger.debug("The expression, after substitution, now is [" + recipientStr + "]."); String[] recipientsArray = recipientStr.split(","); logger.debug("Recipients found with expression: " + recipientsArray); @@ -522,8 +509,8 @@ private static List findRecipientsFromExpression(DispatchContext info, B return recipients; } - private static List findRecipientsFromDataSet(DispatchContext info, BIObject biobj, - IDataStore dataStore) throws Exception { + private static List findRecipientsFromDataSet(DispatchContext info, BIObject biobj, IDataStore dataStore) + throws Exception { logger.debug("IN"); List recipients = new ArrayList(); if (info.isUseDataSet()) { @@ -548,24 +535,26 @@ private static List findRecipientsFromDataSet(DispatchContext info, BIOb } } if (parameter == null) { - throw new Exception("The document parameter with label [" + dsParameterLabel + "] was not found. Cannot filter the dataset."); + throw new Exception("The document parameter with label [" + dsParameterLabel + + "] was not found. Cannot filter the dataset."); } // considering the first value of the parameter List values = parameter.getParameterValues(); if (values == null || values.isEmpty()) { - throw new Exception("The document parameter with label [" + dsParameterLabel + "] has no values. Cannot filter the dataset."); + throw new Exception("The document parameter with label [" + dsParameterLabel + + "] has no values. Cannot filter the dataset."); } codeValue = (String) values.get(0); logger.debug("Using value [" + codeValue + "] for dataset filtering..."); - Iterator it = dataStore.iterator(); + Iterator it = dataStore.iterator(); while (it.hasNext()) { String recipient = null; - IRecord record = (IRecord)it.next(); + IRecord currRecord = it.next(); // the parameter value is used to filter on the first dataset field - IField valueField = record.getFieldAt(0); + IField valueField = currRecord.getFieldAt(0); Object valueObj = valueField.getValue(); String value = null; if (valueObj != null) @@ -573,7 +562,7 @@ private static List findRecipientsFromDataSet(DispatchContext info, BIOb if (codeValue.equals(value)) { logger.debug("Found value [" + codeValue + "] on the first field of a record of the dataset."); // recipient address is on the second dataset field - IField recipientField = record.getFieldAt(1); + IField recipientField = currRecord.getFieldAt(1); Object recipientFieldObj = recipientField.getValue(); if (recipientFieldObj != null) { recipient = recipientFieldObj.toString(); @@ -631,9 +620,6 @@ public SchedulerDataSource(byte[] content, String contentType, String name) { } } - - - // Returns the contents of the file in a byte array. public static byte[] getBytesFromFile(File file) throws IOException { // Get the size of the file @@ -649,7 +635,7 @@ public static byte[] getBytesFromFile(File file) throws IOException { } // Create the byte array to hold the data - byte[] bytes = new byte[(int)length]; + byte[] bytes = new byte[(int) length]; // Read in the bytes int offset = 0; @@ -657,8 +643,7 @@ public static byte[] getBytesFromFile(File file) throws IOException { InputStream is = new FileInputStream(file); try { - while (offset < bytes.length - && (numRead=is.read(bytes, offset, bytes.length-offset)) >= 0) { + while (offset < bytes.length && (numRead = is.read(bytes, offset, bytes.length - offset)) >= 0) { offset += numRead; } } finally { diff --git a/knowage-core/src/test/java/it/eng/spagobi/dataset/cache/impl/sqldbcache/test/AbstractSQLDBCacheTest.java b/knowage-core/src/test/java/it/eng/spagobi/dataset/cache/impl/sqldbcache/test/AbstractSQLDBCacheTest.java index 34715d30eae..2b0591dc751 100644 --- a/knowage-core/src/test/java/it/eng/spagobi/dataset/cache/impl/sqldbcache/test/AbstractSQLDBCacheTest.java +++ b/knowage-core/src/test/java/it/eng/spagobi/dataset/cache/impl/sqldbcache/test/AbstractSQLDBCacheTest.java @@ -447,11 +447,11 @@ public void testSchemaRead() { fieldMetaData.setFieldType(FieldType.ATTRIBUTE); metadata.addFiedMeta(fieldMetaData); dataStore.setMetaData(metadata); - Record record = new Record(); + Record newRecord = new Record(); Field field = new Field(); field.setValue("try"); - record.appendField(field); - dataStore.appendRecord(record); + newRecord.appendField(field); + dataStore.appendRecord(newRecord); // persist the datastore as a table on db DatabaseDialect dialect = DatabaseDialect.get(dataSourceWriting.getHibDialectClass()); diff --git a/knowagecockpitengine/src/main/java/it/eng/spagobi/engine/chart/util/DataSetTransformer.java b/knowagecockpitengine/src/main/java/it/eng/spagobi/engine/chart/util/DataSetTransformer.java index 5f21a2cb2ee..6d1a325211e 100644 --- a/knowagecockpitengine/src/main/java/it/eng/spagobi/engine/chart/util/DataSetTransformer.java +++ b/knowagecockpitengine/src/main/java/it/eng/spagobi/engine/chart/util/DataSetTransformer.java @@ -280,10 +280,10 @@ public JSONObject toMatrix(List dataRows, Object columnsNeeded, Object s /** * Take each record from the 'dataRows' parameter, i.e. each record from the dataset and put it inside the local (temporary) 'records' variable. */ - Map record = (Map) dataRows.get(i); + Map currRecord = (Map) dataRows.get(i); - if (!allColumns.contains(record.get(rawColumnNameColumn))) { - allColumns.add((String) record.get(rawColumnNameColumn)); + if (!allColumns.contains(currRecord.get(rawColumnNameColumn))) { + allColumns.add((String) currRecord.get(rawColumnNameColumn)); } } @@ -307,17 +307,17 @@ public JSONObject toMatrix(List dataRows, Object columnsNeeded, Object s /** * Current record (row) from the map of maps of available data (primitive (not pivoted) dataset). */ - Map record = (Map) dataRows.get(i); + Map currRecord = (Map) dataRows.get(i); /** * Value (name) of the current record's row from the map of maps. Current row of the matrix. */ - String currentRow = (String) record.get(rawColumnNameRow); + String currentRow = (String) currRecord.get(rawColumnNameRow); /** * Value (name) of the current record's column from the map of maps. Current column of the matrix. */ - String currentColumn = (String) record.get(rawColumnNameColumn); + String currentColumn = (String) currRecord.get(rawColumnNameColumn); /** * Put a new map for the row that is not contained by the map of maps. @@ -326,7 +326,7 @@ public JSONObject toMatrix(List dataRows, Object columnsNeeded, Object s HashMap submapWithNewColumn = new HashMap<>(); - if (record.get(columnsMapper.get(serie + "_" + aggregationType)).getClass().toString() + if (currRecord.get(columnsMapper.get(serie + "_" + aggregationType)).getClass().toString() .equals("class java.lang.Integer")) { /** @@ -335,15 +335,15 @@ public JSONObject toMatrix(List dataRows, Object columnsNeeded, Object s * * NOTE: The same goes for other variables of the same name ('serieValueForXOfRowAndColumn') in the code afterwards. */ - Integer serieValueForXOfRowAndColumn = (int) record + Integer serieValueForXOfRowAndColumn = (int) currRecord .get(columnsMapper.get(serie + "_" + aggregationType)); submapWithNewColumn.put(currentColumn, Float.parseFloat(Integer.toString(serieValueForXOfRowAndColumn))); } else { - String serieValueForXOfRowAndColumn = (record.get(columnsMapper.get(serie + "_" + aggregationType))) - .toString(); + String serieValueForXOfRowAndColumn = (currRecord + .get(columnsMapper.get(serie + "_" + aggregationType))).toString(); submapWithNewColumn.put(currentColumn, Float.parseFloat(serieValueForXOfRowAndColumn)); } @@ -351,18 +351,18 @@ public JSONObject toMatrix(List dataRows, Object columnsNeeded, Object s } else { - if (record.get(columnsMapper.get(serie + "_" + aggregationType)).getClass().toString() + if (currRecord.get(columnsMapper.get(serie + "_" + aggregationType)).getClass().toString() .equals("class java.lang.Integer")) { - Integer serieValueForXOfRowAndColumn = (int) record + Integer serieValueForXOfRowAndColumn = (int) currRecord .get(columnsMapper.get(serie + "_" + aggregationType)); availableDataMapOfMaps.get(currentRow).put(currentColumn, Float.parseFloat(Integer.toString(serieValueForXOfRowAndColumn))); } else { - String serieValueForXOfRowAndColumn = (record.get(columnsMapper.get(serie + "_" + aggregationType))) - .toString(); + String serieValueForXOfRowAndColumn = (currRecord + .get(columnsMapper.get(serie + "_" + aggregationType))).toString(); availableDataMapOfMaps.get(currentRow).put(currentColumn, Float.parseFloat(serieValueForXOfRowAndColumn)); @@ -529,17 +529,17 @@ public JSONArray toTree(Object columnsNeeded, Object serie, Object dataColumnsMa */ for (int i = 0; i < dataRows.size(); i++) { Map row = (Map) dataRows.get(i); - HashMap record = new HashMap<>(); + HashMap newRecord = new HashMap<>(); /* For every record take these columns */ for (int j = 0; j < listColumns.size(); j++) { Object x = row.get(listColumns.get(j)); - record.put(columns.get(j).toString(), x.toString()); - record.put(serie.toString(), row.get(serieRawColumn).toString()); + newRecord.put(columns.get(j).toString(), x.toString()); + newRecord.put(serie.toString(), row.get(serieRawColumn).toString()); - result.put(new Integer(i), record); + result.put(i, newRecord); } } @@ -614,17 +614,17 @@ public JSONObject createTreeChart(Object columnsNeeded, Object serie, Object dat for (int i = 0; i < dataRows.size(); i++) { Map row = (Map) dataRows.get(i); - HashMap record = new HashMap<>(); + HashMap newRecord = new HashMap<>(); /* For every record take these columns */ for (int j = 0; j < listColumns.size(); j++) { Object x = row.get(listColumns.get(j)); - record.put(columns.get(j).toString(), x.toString()); + newRecord.put(columns.get(j).toString(), x.toString()); } - record.put(serie.toString(), row.get(serieRawColumn).toString()); + newRecord.put(serie.toString(), row.get(serieRawColumn).toString()); - result.put(new Integer(i), record); + result.put(i, newRecord); } JSONObject res = createTreeMap(columns, serie, result); @@ -1579,17 +1579,17 @@ public Map getData(List dataRows, Object serie, Object columnsNeeded, Ob if (dataRows != null) { for (int i = 0; i < dataRows.size(); i++) { Map row = (Map) dataRows.get(i); - HashMap record = new HashMap<>(); + HashMap newRecord = new HashMap<>(); /* For every record take these columns */ for (String column : listColumns) { Object x = row.get(column); - record.put(columns.get(columnsIndex.get(column)).toString(), x); + newRecord.put(columns.get(columnsIndex.get(column)).toString(), x); } - record.put(serie.toString(), row.get(serieRawColumn)); + newRecord.put(serie.toString(), row.get(serieRawColumn)); - firstresult.put(new Integer(i), record); + firstresult.put(i, newRecord); } } return firstresult; diff --git a/knowageqbeengine/src/main/java/it/eng/spagobi/engines/qbe/exporter/QbeXLSExporter.java b/knowageqbeengine/src/main/java/it/eng/spagobi/engines/qbe/exporter/QbeXLSExporter.java index 4dddac96c65..3f7babf67ac 100644 --- a/knowageqbeengine/src/main/java/it/eng/spagobi/engines/qbe/exporter/QbeXLSExporter.java +++ b/knowageqbeengine/src/main/java/it/eng/spagobi/engines/qbe/exporter/QbeXLSExporter.java @@ -87,17 +87,16 @@ public class QbeXLSExporter { public static final String ADDITIONAL_DATA_FIELDS_OPTIONS_SCALE_FACTOR = "measureScaleFactor"; private Locale locale; - private Map properties; + private final Map properties; IDataStore dataStore = null; Vector extractedFields = null; - Map decimalFormats = new HashMap(); + Map decimalFormats = new HashMap<>(); public QbeXLSExporter(IDataStore dataStore, Locale locale) { - super(); this.dataStore = dataStore; this.locale = locale; - this.properties = new HashMap(); + this.properties = new HashMap<>(); } public IDataStore getDataStore() { @@ -109,8 +108,7 @@ public void setDataStore(IDataStore dataStore) { } public QbeXLSExporter() { - super(); - this.properties = new HashMap(); + this.properties = new HashMap<>(); } public void setProperty(String propertyName, Object propertyValue) { @@ -152,7 +150,8 @@ public void fillSheet(Sheet sheet, Workbook wb, CreationHelper createHelper, int * * @return ... */ - private CellStyle[] fillSheetHeader(Sheet sheet, Workbook workbook, CreationHelper createHelper, int beginRowHeaderData, int beginColumnHeaderData) { + private CellStyle[] fillSheetHeader(Sheet sheet, Workbook workbook, CreationHelper createHelper, + int beginRowHeaderData, int beginColumnHeaderData) { CellStyle[] cellTypes; @@ -174,7 +173,8 @@ private CellStyle[] fillSheetHeader(Sheet sheet, Workbook workbook, CreationHelp IFieldMetaData fieldMetaData = dataStoreMetaData.getFieldMeta(j); String format = (String) fieldMetaData.getProperty("format"); String alias = fieldMetaData.getAlias(); - String scaleFactorHeader = (String) fieldMetaData.getProperty(ADDITIONAL_DATA_FIELDS_OPTIONS_SCALE_FACTOR); + String scaleFactorHeader = (String) fieldMetaData + .getProperty(ADDITIONAL_DATA_FIELDS_OPTIONS_SCALE_FACTOR); String header; if (extractedFields != null && j < extractedFields.size() && extractedFields.get(j) != null) { @@ -308,16 +308,18 @@ public CellStyle buildCellStyle(Sheet sheet) { String cellColor = (String) this.getProperty(PROPERTY_CELL_COLOR); logger.debug("Cell color : " + cellColor); - short cellColorIndex = cellColor != null ? IndexedColors.valueOf(cellColor).getIndex() : IndexedColors.valueOf(DEFAULT_CELL_COLOR).getIndex(); + short cellColorIndex = cellColor != null ? IndexedColors.valueOf(cellColor).getIndex() + : IndexedColors.valueOf(DEFAULT_CELL_COLOR).getIndex(); font.setColor(cellColorIndex); cellStyle.setFont(font); return cellStyle; } - public void fillSheetData(Sheet sheet, Workbook wb, CreationHelper createHelper, CellStyle[] cellTypes, int beginRowData, int beginColumnData) { + public void fillSheetData(Sheet sheet, Workbook wb, CreationHelper createHelper, CellStyle[] cellTypes, + int beginRowData, int beginColumnData) { CellStyle dCellStyle = this.buildCellStyle(sheet); - Iterator it = dataStore.iterator(); + Iterator it = dataStore.iterator(); int rownum = beginRowData; short formatIndexInt = this.getBuiltinFormat("#,##0"); CellStyle cellStyleInt = this.buildCellStyle(sheet); // cellStyleInt is the default cell style for integers @@ -332,11 +334,11 @@ public void fillSheetData(Sheet sheet, Workbook wb, CreationHelper createHelper, while (it.hasNext()) { Row rowVal = sheet.getRow(rownum); - IRecord record = (IRecord) it.next(); - List fields = record.getFields(); + IRecord currRecord = it.next(); + List fields = currRecord.getFields(); int length = fields.size(); for (int fieldIndex = 0; fieldIndex < length; fieldIndex++) { - IField f = (IField) fields.get(fieldIndex); + IField f = fields.get(fieldIndex); if (f != null && f.getValue() != null) { Class c = d.getFieldType(fieldIndex); @@ -349,7 +351,8 @@ public void fillSheetData(Sheet sheet, Workbook wb, CreationHelper createHelper, if (Integer.class.isAssignableFrom(c) || Short.class.isAssignableFrom(c)) { logger.debug("Column [" + (fieldIndex + 1) + "] type is equal to [" + "INTEGER" + "]"); IFieldMetaData fieldMetaData = d.getFieldMeta(fieldIndex); - String scaleFactor = (String) fieldMetaData.getProperty(ADDITIONAL_DATA_FIELDS_OPTIONS_SCALE_FACTOR); + String scaleFactor = (String) fieldMetaData + .getProperty(ADDITIONAL_DATA_FIELDS_OPTIONS_SCALE_FACTOR); Number val = (Number) f.getValue(); Double doubleValue = MeasureScaleFactorOption.applyScaleFactor(val.doubleValue(), scaleFactor); cell.setCellValue(doubleValue); @@ -367,7 +370,8 @@ public void fillSheetData(Sheet sheet, Workbook wb, CreationHelper createHelper, } Number val = (Number) f.getValue(); Double value = val.doubleValue(); - String scaleFactor = (String) fieldMetaData.getProperty(ADDITIONAL_DATA_FIELDS_OPTIONS_SCALE_FACTOR); + String scaleFactor = (String) fieldMetaData + .getProperty(ADDITIONAL_DATA_FIELDS_OPTIONS_SCALE_FACTOR); cell.setCellValue(MeasureScaleFactorOption.applyScaleFactor(value, scaleFactor)); cell.setCellType(this.getCellTypeNumeric()); cell.setCellStyle((cellTypes[fieldIndex] != null) ? cellTypes[fieldIndex] : cs); diff --git a/knowageqbeengine/src/main/java/it/eng/spagobi/engines/qbe/services/registry/LoadRegistryAction.java b/knowageqbeengine/src/main/java/it/eng/spagobi/engines/qbe/services/registry/LoadRegistryAction.java index c449b24d875..689243cfd1c 100644 --- a/knowageqbeengine/src/main/java/it/eng/spagobi/engines/qbe/services/registry/LoadRegistryAction.java +++ b/knowageqbeengine/src/main/java/it/eng/spagobi/engines/qbe/services/registry/LoadRegistryAction.java @@ -183,22 +183,25 @@ private IDataSet getActiveQueryAsDataSet(Query q) { public IDataStore executeQuery(Integer start, Integer limit, Query filteredQuery) { IDataStore dataStore = null; - QbeEngineInstance qbeEngineInstance = (QbeEngineInstance) getAttributeFromSession(EngineConstants.ENGINE_INSTANCE); + QbeEngineInstance qbeEngineInstance = (QbeEngineInstance) getAttributeFromSession( + EngineConstants.ENGINE_INSTANCE); IStatement statement = getEngineInstance().getDataSource().createStatement(filteredQuery); IDataSet dataSet = getActiveQueryAsDataSet(filteredQuery); AbstractQbeDataSet qbeDataSet = (AbstractQbeDataSet) dataSet; // QueryGraph graph = statement.getQuery().getQueryGraph(); boolean valid = true; // GraphManager.getGraphValidatorInstance(QbeEngineConfig.getInstance().getGraphValidatorImpl()).isValid(graph, - // statement.getQuery().getQueryEntities(getDataSource())); + // statement.getQuery().getQueryEntities(getDataSource())); // logger.debug("QueryGraph valid = " + valid); if (!valid) { - throw new SpagoBIEngineServiceException(getActionName(), "error.mesage.description.relationship.not.enough"); + throw new SpagoBIEngineServiceException(getActionName(), + "error.mesage.description.relationship.not.enough"); } try { logger.debug("Executing query ..."); Integer maxSize = QbeEngineConfig.getInstance().getResultLimit(); - logger.debug("Configuration setting [" + "QBE.QBE-SQL-RESULT-LIMIT.value" + "] is equals to [" + (maxSize != null ? maxSize : "none") + "]"); + logger.debug("Configuration setting [" + "QBE.QBE-SQL-RESULT-LIMIT.value" + "] is equals to [" + + (maxSize != null ? maxSize : "none") + "]"); String jpaQueryStr = statement.getQueryString(); logger.debug("Executable query (HQL/JPQL): [" + jpaQueryStr + "]"); @@ -212,13 +215,15 @@ public IDataStore executeQuery(Integer start, Integer limit, Query filteredQuery dataStore = dataSet.getDataStore(); dataStore = new DecoratedDataStore(dataStore, qbeEngineInstance); changeAlias(dataStore); - Assert.assertNotNull(dataStore, "The dataStore returned by loadData method of the class [" + dataSet.getClass().getName() + "] cannot be null"); + Assert.assertNotNull(dataStore, "The dataStore returned by loadData method of the class [" + + dataSet.getClass().getName() + "] cannot be null"); } catch (Exception e) { logger.debug("Query execution aborted because of an internal exceptian"); SpagoBIEngineServiceException exception; String message; - message = "An error occurred in " + getActionName() + " service while executing query: [" + statement.getQueryString() + "]"; + message = "An error occurred in " + getActionName() + " service while executing query: [" + + statement.getQueryString() + "]"; exception = new SpagoBIEngineServiceException(getActionName(), message, e); exception.addHint("Check if the query is properly formed: [" + statement.getQueryString() + "]"); exception.addHint("Check connection configuration"); @@ -342,10 +347,10 @@ private void addSumRows(IDataStore dataStore) { summaryColorCellsArray = new JSONArray(); summaryCellsArray = new JSONArray(); - ArrayList columnsIndexToMerge = new ArrayList(); - ArrayList columnsIndexToEmpty = new ArrayList(); - ArrayList columnsIndexToAfter = new ArrayList(); - HashMap columnsIndexToSum2Counter = new HashMap(); + ArrayList columnsIndexToMerge = new ArrayList<>(); + ArrayList columnsIndexToEmpty = new ArrayList<>(); + ArrayList columnsIndexToAfter = new ArrayList<>(); + HashMap columnsIndexToSum2Counter = new HashMap<>(); // collect columns to merge and columns to sum and colummsn to empty: // -- columns to merge have merge attributes until a columns with summaryFunc is found @@ -359,11 +364,11 @@ private void addSumRows(IDataStore dataStore) { for (Iterator iterator = columns.iterator(); iterator.hasNext();) { Column column = (Column) iterator.next(); - if (column.isMerge() && summaryFuncFound == false) { + if (column.isMerge() && !summaryFuncFound) { columnsIndexToMerge.add(index); - } else if (summaryFuncFound == true && !column.isMeasure() && !measureFound) { + } else if (summaryFuncFound && !column.isMeasure() && !measureFound) { columnsIndexToEmpty.add(index); - } else if (summaryFuncFound == true && !column.isMeasure() && measureFound) { + } else if (summaryFuncFound && !column.isMeasure() && measureFound) { columnsIndexToAfter.add(index); } else if (column.isMeasure()) { columnsIndexToSum2Counter.put(index, 0); @@ -375,27 +380,27 @@ private void addSumRows(IDataStore dataStore) { } // Map to store previous merge values on iteration - HashMap previousMergeValues = new HashMap(); + HashMap previousMergeValues = new HashMap<>(); for (Iterator iterator = columnsIndexToMerge.iterator(); iterator.hasNext();) { Integer columnIndex = (Integer) iterator.next(); previousMergeValues.put(columnIndex, null); } - TreeMap recordsToAddMap = new TreeMap(); + TreeMap recordsToAddMap = new TreeMap<>(); int sumCounter = 0; // add total row only if grouping has more than one member // iterate on each store row for (int i = 0; i < dataStore.getRecordsCount(); i++) { - IRecord record = dataStore.getRecordAt(i); + IRecord currRecord = dataStore.getRecordAt(i); // get current values of column to merge - HashMap currentMergeValues = new HashMap(); + HashMap currentMergeValues = new HashMap<>(); // iterate on each column to merge and store values - for (Iterator iterator = columnsIndexToMerge.iterator(); iterator.hasNext();) { - Integer columnIndex = (Integer) iterator.next(); - Object value = record.getFieldAt(columnIndex).getValue(); + for (Iterator iterator = columnsIndexToMerge.iterator(); iterator.hasNext();) { + Integer columnIndex = iterator.next(); + Object value = currRecord.getFieldAt(columnIndex).getValue(); currentMergeValues.put(columnIndex, value); } @@ -406,9 +411,9 @@ private void addSumRows(IDataStore dataStore) { if (isEqual) { sumCounter++; - for (Iterator iterator = columnsIndexToSum2Counter.keySet().iterator(); iterator.hasNext();) { - Integer indexMeasure = (Integer) iterator.next(); - Object value = record.getFieldAt(indexMeasure).getValue(); + for (Iterator iterator = columnsIndexToSum2Counter.keySet().iterator(); iterator.hasNext();) { + Integer indexMeasure = iterator.next(); + Object value = currRecord.getFieldAt(indexMeasure).getValue(); // TODO treat the case this is not a number, should keep it to null if (value != null) { @@ -427,14 +432,14 @@ private void addSumRows(IDataStore dataStore) { // add a new record only if sumCounter > 0 if (sumCounter > 0) { - addTotalRecord(dataStore, i, columnsIndexToMerge, columnsIndexToEmpty, columnsIndexToAfter, columnsIndexToSum2Counter, previousMergeValues, - recordsToAddMap); + addTotalRecord(dataStore, i, columnsIndexToMerge, columnsIndexToEmpty, columnsIndexToAfter, + columnsIndexToSum2Counter, previousMergeValues, recordsToAddMap); } // put the counters to actual values - for (Iterator iterator = columnsIndexToSum2Counter.keySet().iterator(); iterator.hasNext();) { - Integer columnInd = (Integer) iterator.next(); - Object v = record.getFieldAt(columnInd).getValue(); + for (Iterator iterator = columnsIndexToSum2Counter.keySet().iterator(); iterator.hasNext();) { + Integer columnInd = iterator.next(); + Object v = currRecord.getFieldAt(columnInd).getValue(); columnsIndexToSum2Counter.put(columnInd, v); } @@ -447,13 +452,13 @@ private void addSumRows(IDataStore dataStore) { // add final total if last records were merged if (sumCounter > 0) { - addTotalRecord(dataStore, null, columnsIndexToMerge, columnsIndexToEmpty, columnsIndexToAfter, columnsIndexToSum2Counter, previousMergeValues, - recordsToAddMap); + addTotalRecord(dataStore, null, columnsIndexToMerge, columnsIndexToEmpty, columnsIndexToAfter, + columnsIndexToSum2Counter, previousMergeValues, recordsToAddMap); } // finally add the record (could not add them while cycling the store) - for (Iterator iterator = recordsToAddMap.keySet().iterator(); iterator.hasNext();) { - Integer indexR = (Integer) iterator.next(); + for (Iterator iterator = recordsToAddMap.keySet().iterator(); iterator.hasNext();) { + Integer indexR = iterator.next(); Record rec = recordsToAddMap.get(indexR); if (indexR == -1) { dataStore.appendRecord(rec); @@ -465,8 +470,9 @@ private void addSumRows(IDataStore dataStore) { logger.debug("OUT"); } - private void addTotalRecord(IDataStore dataStore, Integer currentIndexRow, ArrayList columnsIndexToMerge, ArrayList columnsIndexToEmpty, - ArrayList columnsIndexToAfter, HashMap columnsIndexToSum2Counter, HashMap previousMergeValues, + private void addTotalRecord(IDataStore dataStore, Integer currentIndexRow, ArrayList columnsIndexToMerge, + ArrayList columnsIndexToEmpty, ArrayList columnsIndexToAfter, + HashMap columnsIndexToSum2Counter, HashMap previousMergeValues, TreeMap recordsToAddMap) { logger.debug("IN"); Record recordNew = new Record(); @@ -483,8 +489,8 @@ private void addTotalRecord(IDataStore dataStore, Integer currentIndexRow, Array summaryRecordsAddedCounter++; // insert fields for each column to merge - for (Iterator iterator = columnsIndexToMerge.iterator(); iterator.hasNext();) { - Integer columnIndex = (Integer) iterator.next(); + for (Iterator iterator = columnsIndexToMerge.iterator(); iterator.hasNext();) { + Integer columnIndex = iterator.next(); Field field = new Field(); Object valueToPut = previousMergeValues.get(columnIndex); field.setValue(valueToPut); @@ -617,9 +623,10 @@ private void setFieldsDefaultValue(JSONObject gridDataFeed) { private void setFieldsKeyColumnProperty(JSONObject gridDataFeed) { - QbeEngineInstance qbeEngineInstance = (QbeEngineInstance) getAttributeFromSession(RegistryEngineStartAction.ENGINE_INSTANCE); - Assert.assertNotNull(qbeEngineInstance, - "It's not possible to execute " + this.getActionName() + " service before having properly created an instance of EngineInstance class"); + QbeEngineInstance qbeEngineInstance = (QbeEngineInstance) getAttributeFromSession( + RegistryEngineStartAction.ENGINE_INSTANCE); + Assert.assertNotNull(qbeEngineInstance, "It's not possible to execute " + this.getActionName() + + " service before having properly created an instance of EngineInstance class"); RegistryConfiguration registryConf = qbeEngineInstance.getRegistryConfiguration(); IDataSource genericDatasource = qbeEngineInstance.getDataSource(); String keyColumn = genericDatasource.getPersistenceManager().getKeyColumn(registryConf); @@ -684,7 +691,8 @@ private void setNewSummaryColorCell(IDataStore dataStore, Integer row, Integer c obj.put("column", column); summaryColorCellsArray.put(obj); } catch (JSONException e) { - logger.error("Error while tracing summary cell in row " + row + " and column " + column + ": " + e.getMessage()); + logger.error( + "Error while tracing summary cell in row " + row + " and column " + column + ": " + e.getMessage()); } } @@ -701,7 +709,8 @@ private void setNewSummaryCell(IDataStore dataStore, Integer row, Integer column obj.put("column", column); summaryCellsArray.put(obj); } catch (JSONException e) { - logger.error("Error while tracing summary cell in row " + row + " and column " + column + ": " + e.getMessage()); + logger.error( + "Error while tracing summary cell in row " + row + " and column " + column + ": " + e.getMessage()); } } @@ -716,7 +725,7 @@ private void getColumnsInfos(Column column) { infoObj.putOpt("size", size); infoObj.putOpt("unsigned", unsigned); - if (size != null || unsigned != false) { + if (size != null || unsigned) { columnsInfos.put(infoObj); } @@ -748,7 +757,7 @@ private Query buildQuery(String fieldName, String orderType) { columnMaxSize = registryConfig.getColumnsMaxSize(); Iterator it = columns.iterator(); - Map fieldNameIdMap = new HashMap(); + Map fieldNameIdMap = new HashMap<>(); while (it.hasNext()) { Column column = it.next(); @@ -767,13 +776,13 @@ private Query buildQuery(String fieldName, String orderType) { if (fieldName != null && orderType != null) { sorter = name.equals(fieldName) ? orderType.toUpperCase() : null; } else { - sorter = column.getSorter() != null && (column.getSorter().equalsIgnoreCase("ASC") || column.getSorter().equalsIgnoreCase("DESC")) - ? column.getSorter().toUpperCase() - : null; + sorter = column.getSorter() != null && (column.getSorter().equalsIgnoreCase("ASC") + || column.getSorter().equalsIgnoreCase("DESC")) ? column.getSorter().toUpperCase() + : null; } - query.addSelectFiled(field.getUniqueName(), "NONE", field.getName(), true, true, false, sorter, field.getPropertyAsString("format"), null, - field.getJavaClass()); + query.addSelectFiled(field.getUniqueName(), "NONE", field.getName(), true, true, false, sorter, + field.getPropertyAsString("format"), null, field.getJavaClass()); fieldNameIdMap.put(column.getField(), field.getUniqueName()); } } @@ -782,7 +791,7 @@ private Query buildQuery(String fieldName, String orderType) { List filters = registryConfig.getFilters(); int i = 0; - ArrayList expressionNodes = new ArrayList(); + ArrayList expressionNodes = new ArrayList<>(); for (Iterator iterator = filters.iterator(); iterator.hasNext();) { Filter filter = (Filter) iterator.next(); addFilter(i, query, env, fieldNameIdMap, filter, expressionNodes); @@ -803,7 +812,8 @@ private Query buildQuery(String fieldName, String orderType) { return query; } - private void addSort(int i, Query query, Map env, Map fieldNameIdMap, ArrayList expressionNodes) { + private void addSort(int i, Query query, Map env, Map fieldNameIdMap, + ArrayList expressionNodes) { logger.debug("IN"); // if(requestContainsAttribute("sort")){ @@ -820,7 +830,8 @@ private void addSort(int i, Query query, Map env, Map fieldNameI logger.debug("OUT"); } - private void addFilter(int i, Query query, Map env, Map fieldNameIdMap, Filter filter, ArrayList expressionNodes) { + private void addFilter(int i, Query query, Map env, Map fieldNameIdMap, Filter filter, + ArrayList expressionNodes) { logger.debug("IN"); // in case it is a driver @@ -838,18 +849,22 @@ private void addFilter(int i, Query query, Map env, Map fieldNam String[] valuesArr = value.toString().split(",", -1); - logger.debug("Set filter from analytical driver " + driverName + ": " + filter.getField() + "=" + value); + logger.debug( + "Set filter from analytical driver " + driverName + ": " + filter.getField() + "=" + value); String fieldId = fieldNameIdMap.get(fieldName); String[] fields = new String[] { fieldId }; - WhereField.Operand left = new WhereField.Operand(fields, "driverName", AbstractStatement.OPERAND_TYPE_SIMPLE_FIELD, null, null); + WhereField.Operand left = new WhereField.Operand(fields, "driverName", + AbstractStatement.OPERAND_TYPE_SIMPLE_FIELD, null, null); - WhereField.Operand right = new WhereField.Operand(valuesArr, "value", AbstractStatement.OPERAND_TYPE_STATIC, null, null); + WhereField.Operand right = new WhereField.Operand(valuesArr, "value", + AbstractStatement.OPERAND_TYPE_STATIC, null, null); if (valuesArr.length > 1) { query.addWhereField("Driver_" + i, driverName, false, left, CriteriaConstants.IN, right, "AND"); } else { - query.addWhereField("Driver_" + i, driverName, false, left, CriteriaConstants.EQUALS_TO, right, "AND"); + query.addWhereField("Driver_" + i, driverName, false, left, CriteriaConstants.EQUALS_TO, right, + "AND"); } ExpressionNode newFilterNode = new ExpressionNode("NODE_CONST", "$F{" + "Driver_" + i + "}"); @@ -870,7 +885,8 @@ else if (requestContainsAttribute(filter.getField()) || filter.isStatic()) { if (value != null && !value.equalsIgnoreCase("")) { if (value.contains("?")) { - throw new SpagoBIEngineServiceException(getActionName(), "Character '?' not allowed in query filter"); + throw new SpagoBIEngineServiceException(getActionName(), + "Character '?' not allowed in query filter"); } logger.debug("Set filter " + filter.getField() + "=" + value); @@ -878,15 +894,19 @@ else if (requestContainsAttribute(filter.getField()) || filter.isStatic()) { String[] fields = new String[] { fieldId }; String[] values = new String[] { value }; - WhereField.Operand left = new WhereField.Operand(fields, "filterName", AbstractStatement.OPERAND_TYPE_SIMPLE_FIELD, null, null); + WhereField.Operand left = new WhereField.Operand(fields, "filterName", + AbstractStatement.OPERAND_TYPE_SIMPLE_FIELD, null, null); - WhereField.Operand right = new WhereField.Operand(values, "value", AbstractStatement.OPERAND_TYPE_STATIC, null, null); + WhereField.Operand right = new WhereField.Operand(values, "value", + AbstractStatement.OPERAND_TYPE_STATIC, null, null); // if filter type is manual use it as string starting, else as equals if (filter.getPresentationType().equals(RegistryConfigurationXMLParser.PRESENTATION_TYPE_COMBO)) { - query.addWhereField("Filter_" + i, filter.getField(), false, left, CriteriaConstants.EQUALS_TO, right, "AND"); + query.addWhereField("Filter_" + i, filter.getField(), false, left, CriteriaConstants.EQUALS_TO, + right, "AND"); } else { - query.addWhereField("Filter_" + i, filter.getField(), false, left, CriteriaConstants.STARTS_WITH, right, "AND"); + query.addWhereField("Filter_" + i, filter.getField(), false, left, CriteriaConstants.STARTS_WITH, + right, "AND"); } ExpressionNode newFilterNode = new ExpressionNode("NODE_CONST", "$F{" + "Filter_" + i + "}"); @@ -903,13 +923,14 @@ private IModelField getColumnModelField(Column column, IModelEntity entity) { // In order to recover subentities the new way if DEFAULT_MAX_RECURSION_LEVEL is set to zero /* - * QbeEngineInstance engineInstance = getEngineInstance(); QbeTemplate template = engineInstance.getTemplate(); // takes the only datamart's name - * configured String modelName = (String) template.getDatamartNames().get(0); IModelStructure md = getDataSource().getModelStructure(); IModelEntity - * subEntity = md.getEntity(column.getSubEntity()); + * QbeEngineInstance engineInstance = getEngineInstance(); QbeTemplate template = engineInstance.getTemplate(); // takes the only datamart's name configured + * String modelName = (String) template.getDatamartNames().get(0); IModelStructure md = getDataSource().getModelStructure(); IModelEntity subEntity = + * md.getEntity(column.getSubEntity()); */ String entityUName = entity.getUniqueName(); - String subEntityKey = entityUName.substring(0, entityUName.lastIndexOf("::")) + "::" + column.getSubEntity() + "(" + column.getForeignKey() + ")"; + String subEntityKey = entityUName.substring(0, entityUName.lastIndexOf("::")) + "::" + column.getSubEntity() + + "(" + column.getForeignKey() + ")"; IModelEntity subEntity = entity.getSubEntity(subEntityKey); if (subEntity == null) { throw new SpagoBIEngineServiceException(getActionName(), @@ -943,11 +964,13 @@ private IModelEntity getSelectedEntity() { QbeTemplate template = engineInstance.getTemplate(); if (template.isComposite()) { // composite Qbe is not supported logger.error("Template is composite. This is not supported by the Registry engine"); - throw new SpagoBIEngineServiceException(getActionName(), "Template is composite. This is not supported by the Registry engine"); + throw new SpagoBIEngineServiceException(getActionName(), + "Template is composite. This is not supported by the Registry engine"); } // takes the only datamart's name configured String modelName = (String) template.getDatamartNames().get(0); - RegistryConfiguration registryConfig = (RegistryConfiguration) template.getProperty("registryConfiguration"); + RegistryConfiguration registryConfig = (RegistryConfiguration) template + .getProperty("registryConfiguration"); String entityName = registryConfig.getEntity(); int index = entityName.lastIndexOf("."); @@ -990,7 +1013,6 @@ class DecoratedDataStore implements IDataStore { private final RegistryConfiguration registryConfiguration; public DecoratedDataStore(IDataStore dataStore, QbeEngineInstance qbeEngineInstance) { - super(); this.dataStore = dataStore; this.template = qbeEngineInstance.getTemplate(); @@ -1113,13 +1135,13 @@ public void appendRecord(IRecord r) { } @Override - public void prependRecord(IRecord record) { - dataStore.prependRecord(record); + public void prependRecord(IRecord recordToPrepend) { + dataStore.prependRecord(recordToPrepend); } @Override - public void insertRecord(int recordIndex, IRecord record) { - dataStore.insertRecord(recordIndex, record); + public void insertRecord(int recordIndex, IRecord recordToInsert) { + dataStore.insertRecord(recordIndex, recordToInsert); } @Override @@ -1128,12 +1150,14 @@ public IDataStore aggregateAndFilterRecords(String sqlQuery, int offset, int fet } @Override - public IDataStore aggregateAndFilterRecords(String sqlQuery, int offset, int fetchSize, int maxRowCount, String dateFormatJava) { + public IDataStore aggregateAndFilterRecords(String sqlQuery, int offset, int fetchSize, int maxRowCount, + String dateFormatJava) { return dataStore.aggregateAndFilterRecords(sqlQuery, offset, fetchSize, maxRowCount, dateFormatJava); } @Override - public IDataStore aggregateAndFilterRecords(String sqlQuery, List values, int offset, int fetchSize, int maxRowCount, String dateFormatJava) { + public IDataStore aggregateAndFilterRecords(String sqlQuery, List values, int offset, int fetchSize, + int maxRowCount, String dateFormatJava) { return dataStore.aggregateAndFilterRecords(sqlQuery, values, offset, fetchSize, maxRowCount, dateFormatJava); } diff --git a/knowageqbeengine/src/main/java/it/eng/spagobi/engines/qbe/services/registry/UpdateRecordsAction.java b/knowageqbeengine/src/main/java/it/eng/spagobi/engines/qbe/services/registry/UpdateRecordsAction.java index 12991e3b84d..406afcd1e75 100644 --- a/knowageqbeengine/src/main/java/it/eng/spagobi/engines/qbe/services/registry/UpdateRecordsAction.java +++ b/knowageqbeengine/src/main/java/it/eng/spagobi/engines/qbe/services/registry/UpdateRecordsAction.java @@ -84,7 +84,7 @@ public void service(SourceBean request, SourceBean response) { } } - Map properties = new HashMap(); + Map properties = new HashMap<>(); properties.put("keyField", keyColumn); properties.put("ids", arrays); @@ -102,7 +102,8 @@ public void service(SourceBean request, SourceBean response) { } catch (Throwable t) { errorHitsMonitor = MonitorFactory.start("QbeEngine.updateRecordsAction.errorHits"); errorHitsMonitor.stop(); - throw SpagoBIEngineServiceExceptionHandler.getInstance().getWrappedException(getActionName(), getEngineInstance(), t); + throw SpagoBIEngineServiceExceptionHandler.getInstance().getWrappedException(getActionName(), + getEngineInstance(), t); } finally { if (totalTimeMonitor != null) totalTimeMonitor.stop(); @@ -124,14 +125,14 @@ private Vector executeUpdate() throws Exception { } qbeEngineInstance = (QbeEngineInstance) getAttributeFromSession(RegistryEngineStartAction.ENGINE_INSTANCE); - Assert.assertNotNull(qbeEngineInstance, - "It's not possible to execute " + this.getActionName() + " service before having properly created an instance of EngineInstance class"); + Assert.assertNotNull(qbeEngineInstance, "It's not possible to execute " + this.getActionName() + + " service before having properly created an instance of EngineInstance class"); registryConf = qbeEngineInstance.getRegistryConfiguration(); - Assert.assertNotNull(registryConf, - "It's not possible to execute " + this.getActionName() + " service before having properly created an instance of RegistryConfiguration class"); + Assert.assertNotNull(registryConf, "It's not possible to execute " + this.getActionName() + + " service before having properly created an instance of RegistryConfiguration class"); - idsToReturn = new Vector(); + idsToReturn = new Vector<>(); for (int i = 0; i < modifiedRecords.length(); i++) { JSONObject aRecord = modifiedRecords.getJSONObject(i); @@ -155,13 +156,19 @@ private Vector executeUpdate() throws Exception { autoLoadPK = true; } - String tableForPkMax = registryConf.getConfiguration(RegistryConfiguration.Configuration.TABLE_FOR_PK_MAX); - String columnForPkMax = registryConf.getConfiguration(RegistryConfiguration.Configuration.COLUMN_FOR_PK_MAX); + String tableForPkMax = registryConf + .getConfiguration(RegistryConfiguration.Configuration.TABLE_FOR_PK_MAX); + String columnForPkMax = registryConf + .getConfiguration(RegistryConfiguration.Configuration.COLUMN_FOR_PK_MAX); - boolean enableAddRecords = registryConf.getConfiguration(RegistryConfiguration.Configuration.ENABLE_ADD_RECORDS) != null - && registryConf.getConfiguration(RegistryConfiguration.Configuration.ENABLE_ADD_RECORDS).equalsIgnoreCase("true") ? true : false; - boolean enableButtons = registryConf.getConfiguration(RegistryConfiguration.Configuration.ENABLE_BUTTONs) != null - && registryConf.getConfiguration(RegistryConfiguration.Configuration.ENABLE_BUTTONs).equalsIgnoreCase("true") ? true : false; + boolean enableAddRecords = registryConf + .getConfiguration(RegistryConfiguration.Configuration.ENABLE_ADD_RECORDS) != null + && registryConf.getConfiguration(RegistryConfiguration.Configuration.ENABLE_ADD_RECORDS) + .equalsIgnoreCase("true") ? true : false; + boolean enableButtons = registryConf + .getConfiguration(RegistryConfiguration.Configuration.ENABLE_BUTTONs) != null + && registryConf.getConfiguration(RegistryConfiguration.Configuration.ENABLE_BUTTONs) + .equalsIgnoreCase("true") ? true : false; if (!enableAddRecords && !enableButtons) { String message = "You are not allowed to execute operation adding new records"; @@ -173,7 +180,8 @@ private Vector executeUpdate() throws Exception { if (columnForPkMax == null || columnForPkMax.trim().equals("")) columnForPkMax = null; - Integer id = insertRecord(aRecord, qbeEngineInstance, registryConf, autoLoadPK, tableForPkMax, columnForPkMax); + Integer id = insertRecord(aRecord, qbeEngineInstance, registryConf, autoLoadPK, tableForPkMax, + columnForPkMax); idsToReturn.add(id); } else { logger.debug("Update Row with id " + keyColumn + " = " + keyValueObject.toString()); @@ -186,53 +194,26 @@ private Vector executeUpdate() throws Exception { } - /** - * The Id column is the one not editable - * - * @param registryConfiguration - * @return - * @throws Exception - */ - - // private String getFieldId(RegistryConfiguration registryConfiguration) throws Exception{ - // logger.debug("IN"); - // String toReturn = null; - // List columns = registryConfiguration.getColumns(); - // for (Iterator iterator = columns.iterator(); iterator.hasNext() && toReturn == null;) { - // RegistryConfiguration.Column column = (RegistryConfiguration.Column) iterator.next(); - // boolean editable = column.isEditable(); - // if(!editable){ - // toReturn = column.getField(); - // } - // } - // logger.debug("ID field is "+toReturn); - // - // if(toReturn == null){ - // logger.error("Cannot insert new record because no logical primary key could be found (field with editable = false)"); - // throw new Exception("Cannot insert new record because no logical primary key could be found (field with editable = false)"); - // } - // - // logger.debug("OUT"); - // return toReturn; - // } - - private void updateRecord(JSONObject aRecord, QbeEngineInstance qbeEngineInstance, RegistryConfiguration registryConf) { + private void updateRecord(JSONObject aRecord, QbeEngineInstance qbeEngineInstance, + RegistryConfiguration registryConf) { logger.debug("IN"); IDataSource genericDatasource = qbeEngineInstance.getDataSource(); genericDatasource.getPersistenceManager().updateRecord(aRecord, registryConf); logger.debug("OUT"); } - private Integer insertRecord(JSONObject aRecord, QbeEngineInstance qbeEngineInstance, RegistryConfiguration registryConf, boolean autoLoadPK, - String tableForPkMax, String columnForPkMax) { + private Integer insertRecord(JSONObject aRecord, QbeEngineInstance qbeEngineInstance, + RegistryConfiguration registryConf, boolean autoLoadPK, String tableForPkMax, String columnForPkMax) { logger.debug("IN"); IDataSource genericDatasource = qbeEngineInstance.getDataSource(); - Integer id = genericDatasource.getPersistenceManager().insertRecord(aRecord, registryConf, autoLoadPK, tableForPkMax, columnForPkMax); + Integer id = genericDatasource.getPersistenceManager().insertRecord(aRecord, registryConf, autoLoadPK, + tableForPkMax, columnForPkMax); logger.debug("OUT"); return id; } - private void addDefaultValuesRecord(JSONObject aRecord, QbeEngineInstance qbeEngineInstance, RegistryConfiguration registryConf) throws JSONException { + private void addDefaultValuesRecord(JSONObject aRecord, QbeEngineInstance qbeEngineInstance, + RegistryConfiguration registryConf) throws JSONException { IDataSource genericDatasource = qbeEngineInstance.getDataSource(); genericDatasource.getPersistenceManager().addDefaultValueToRecord(aRecord, registryConf); } diff --git a/knowageutils/src/main/java/it/eng/spagobi/tools/dataset/common/datareader/FacetSolrDataReader.java b/knowageutils/src/main/java/it/eng/spagobi/tools/dataset/common/datareader/FacetSolrDataReader.java index c380a60afc2..b8bf57347ec 100644 --- a/knowageutils/src/main/java/it/eng/spagobi/tools/dataset/common/datareader/FacetSolrDataReader.java +++ b/knowageutils/src/main/java/it/eng/spagobi/tools/dataset/common/datareader/FacetSolrDataReader.java @@ -81,13 +81,13 @@ protected void addData(String data, IDataStore dataStore, IMetaData dataStoreMet for (int j = 0; j < parsedData.size(); j++) { if (maxResults <= 0 || rowFetched < maxResults) { - IRecord record = new Record(dataStore); + IRecord currRecord = new Record(dataStore); Map aMap = (Map) parsedData.get(j); for (Object key : aMap.keySet()) { Object value = aMap.get(key); - record.appendField(new Field(value)); + currRecord.appendField(new Field(value)); } - dataStore.appendRecord(record); + dataStore.appendRecord(currRecord); rowFetched++; } } @@ -95,13 +95,13 @@ protected void addData(String data, IDataStore dataStore, IMetaData dataStoreMet for (int j = 0; j < parsedData.size(); j++) { if (maxResults <= 0 || rowFetched < maxResults) { - IRecord record = new Record(dataStore); + IRecord currRecord = new Record(dataStore); IField field = new Field(parsedData.get(j)); - record.appendField(field); + currRecord.appendField(field); field = new Field(parsedData.get(j + 1)); - record.appendField(field); - dataStore.appendRecord(record); + currRecord.appendField(field); + dataStore.appendRecord(currRecord); rowFetched++; } j = j + 1; diff --git a/qbecore/src/main/java/it/eng/qbe/statement/AbstractQbeDataSet.java b/qbecore/src/main/java/it/eng/qbe/statement/AbstractQbeDataSet.java index 0f5668dee66..8ccf4118055 100644 --- a/qbecore/src/main/java/it/eng/qbe/statement/AbstractQbeDataSet.java +++ b/qbecore/src/main/java/it/eng/qbe/statement/AbstractQbeDataSet.java @@ -108,7 +108,7 @@ public static IRecord toRecord(Object o, IMetaData dataStoreMeta) { row = (Object[]) o; } LogMF.debug(logger, "Processing record {0}", Arrays.toString(row)); - IRecord record = new Record(); + IRecord newRecord = new Record(); for (int i = 0, j = 0; i < dataStoreMeta.getFieldCount(); i++) { IFieldMetaData fieldMeta = dataStoreMeta.getFieldMeta(i); Boolean calculated = (Boolean) fieldMeta.getProperty("calculated"); @@ -128,13 +128,13 @@ public static IRecord toRecord(Object o, IMetaData dataStoreMeta) { s.useDelimiter("\\A"); String clobAsString = s.hasNext() ? s.next() : ""; - record.appendField(new Field(clobAsString)); + newRecord.appendField(new Field(clobAsString)); } if (row[j] != null) fieldMeta.setType(row[j].getClass()); } else { - record.appendField(new Field(row[j])); + newRecord.appendField(new Field(row[j])); if (row[j] != null) fieldMeta.setType(row[j].getClass()); } @@ -145,15 +145,15 @@ public static IRecord toRecord(Object o, IMetaData dataStoreMeta) { variable.reset(); } - record.appendField(new Field(variable.getValue())); + newRecord.appendField(new Field(variable.getValue())); if (variable.getValue() != null) fieldMeta.setType(variable.getValue().getClass()); } } - return record; + return newRecord; } - private void processCalculatedFields(IRecord record, IDataStore dataStore) { + private void processCalculatedFields(IRecord currRecord, IDataStore dataStore) { IMetaData dataStoreMeta; List calculatedFieldsMeta; @@ -182,9 +182,9 @@ private void processCalculatedFields(IRecord record, IDataStore dataStore) { Map dmFields = new HashMap(); Object[] columns = new Object[dataStoreMeta.getFieldCount()]; for (int j = 0; j < dataStoreMeta.getFieldCount(); j++) { - qFields.put(dataStoreMeta.getFieldMeta(j).getAlias(), record.getFieldAt(j).getValue()); - dmFields.put(dataStoreMeta.getFieldMeta(j).getProperty("uniqueName"), record.getFieldAt(j).getValue()); - columns[j] = record.getFieldAt(j).getValue(); + qFields.put(dataStoreMeta.getFieldMeta(j).getAlias(), currRecord.getFieldAt(j).getValue()); + dmFields.put(dataStoreMeta.getFieldMeta(j).getProperty("uniqueName"), currRecord.getFieldAt(j).getValue()); + columns[j] = currRecord.getFieldAt(j).getValue(); } groovyBindings.put("qFields", qFields); // key = alias @@ -210,7 +210,7 @@ private void processCalculatedFields(IRecord record, IDataStore dataStore) { logger.debug("Field [" + fieldMeta.getName() + "] is equals to [" + calculatedValue + "]"); variable.setValue(calculatedValue); - record.getFieldAt(dataStoreMeta.getFieldIndex(fieldMeta.getAlias())).setValue(variable.getValue()); + currRecord.getFieldAt(dataStoreMeta.getFieldIndex(fieldMeta.getAlias())).setValue(variable.getValue()); } } diff --git a/spagobi.birt.oda/src/main/java/spagobi/birt/oda/impl/server/ResultSet.java b/spagobi.birt.oda/src/main/java/spagobi/birt/oda/impl/server/ResultSet.java index f1cfcc87fbe..fb5f69e470f 100644 --- a/spagobi.birt.oda/src/main/java/spagobi/birt/oda/impl/server/ResultSet.java +++ b/spagobi.birt.oda/src/main/java/spagobi/birt/oda/impl/server/ResultSet.java @@ -144,9 +144,9 @@ public int getRow() throws OdaException */ public String getString( int index ) throws OdaException { logger.debug("IN getString"); - IRecord record = dataStore.getRecordAt(getRow()); + IRecord currRecord = dataStore.getRecordAt(getRow()); - if(record == null){ + if(currRecord == null){ logger.debug("ODA Exception Record null"); throw (OdaException) new OdaException("Impossible to read row [" + getRow() + "]. The resultset contains [" + dataStore.getRecordsCount() + "] rows"); } @@ -158,7 +158,7 @@ public String getString( int index ) throws OdaException { String toReturn = null; try { - IField field = record.getFieldAt(fieldIndex); + IField field = currRecord.getFieldAt(fieldIndex); toReturn = "" + field.getValue(); } catch (IndexOutOfBoundsException e) { logger.warn("Column index not found in the record",e); @@ -181,9 +181,9 @@ public String getString( String columnName ) throws OdaException { */ public int getInt( int index ) throws OdaException { logger.debug("IN getInt"); - IRecord record = dataStore.getRecordAt(getRow()); + IRecord currRecord = dataStore.getRecordAt(getRow()); - if(record == null){ + if(currRecord == null){ throw (OdaException) new OdaException("Impossible to read row [" + getRow() + "]. The resultset contains [" + dataStore.getRecordsCount() + "] rows"); } @@ -193,7 +193,7 @@ public int getInt( int index ) throws OdaException { IField field = null; int value = 0; try { - field = record.getFieldAt(fieldIndex); + field = currRecord.getFieldAt(fieldIndex); if(field == null){ throw (OdaException) new OdaException("Impossible to read column [" + (index-1) + "]. The resultset contains [" + dataStore.getMetaData().getFieldCount() + "] columns");