galovics commented on code in PR #3321:
URL: https://github.com/apache/fineract/pull/3321#discussion_r1275016683
##########
fineract-core/src/main/java/org/apache/fineract/infrastructure/core/service/database/DatabaseSpecificSQLGenerator.java:
##########
@@ -189,4 +207,49 @@ public String castJson(String sql) {
throw new IllegalStateException("Database type is not supported
for casting to json " + databaseTypeResolver.databaseType());
}
}
+
+ public static String alias(@NotNull String field, String alias) {
Review Comment:
How are these methods below specific to the database type?
##########
fineract-core/src/main/java/org/apache/fineract/infrastructure/core/service/database/DatabaseSpecificSQLGenerator.java:
##########
@@ -36,14 +42,26 @@ public DatabaseSpecificSQLGenerator(DatabaseTypeResolver
databaseTypeResolver) {
}
public String escape(String arg) {
- if (databaseTypeResolver.isMySQL()) {
+ return escape(databaseTypeResolver.databaseType(), arg);
+ }
+
+ public static String escape(DatabaseType dialect, String arg) {
Review Comment:
Why do we need to pass the dialect here but not everywhere else?
##########
fineract-core/src/main/java/org/apache/fineract/infrastructure/core/service/database/DatabaseSpecificSQLGenerator.java:
##########
@@ -36,14 +42,26 @@ public DatabaseSpecificSQLGenerator(DatabaseTypeResolver
databaseTypeResolver) {
}
public String escape(String arg) {
- if (databaseTypeResolver.isMySQL()) {
+ return escape(databaseTypeResolver.databaseType(), arg);
+ }
+
+ public static String escape(DatabaseType dialect, String arg) {
+ if (dialect.isMySql()) {
return format("`%s`", arg);
- } else if (databaseTypeResolver.isPostgreSQL()) {
+ } else if (dialect.isPostgres()) {
return format("\"%s\"", arg);
}
return arg;
}
+ public String formatValue(JdbcJavaType columnType, String value) {
+ return formatValue(databaseTypeResolver.databaseType(), columnType,
value);
+ }
+
+ public static String formatValue(DatabaseType dialect, JdbcJavaType
columnType, String value) {
Review Comment:
How is this specific again to a dialect?
##########
fineract-core/src/main/java/org/apache/fineract/infrastructure/core/data/SqlOperator.java:
##########
@@ -0,0 +1,197 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.fineract.infrastructure.core.data;
+
+import static java.lang.String.format;
+import static
org.apache.fineract.infrastructure.core.service.database.DatabaseSpecificSQLGenerator.alias;
+import static
org.apache.fineract.infrastructure.core.service.database.DatabaseSpecificSQLGenerator.escape;
+import static
org.apache.fineract.infrastructure.core.service.database.DatabaseSpecificSQLGenerator.formatValue;
+
+import jakarta.validation.constraints.NotNull;
+import java.util.Arrays;
+import java.util.List;
+import java.util.stream.Collectors;
+import lombok.AllArgsConstructor;
+import lombok.Getter;
+import
org.apache.fineract.infrastructure.core.exception.PlatformServiceUnavailableException;
+import org.apache.fineract.infrastructure.core.service.database.DatabaseType;
+import org.apache.fineract.infrastructure.core.service.database.JdbcJavaType;
+
+@AllArgsConstructor
+@Getter
+public enum SqlOperator {
+
+ EQ("="), //
+ NEQ("<>"), //
+ GTE(">="), //
+ LTE("<="), //
+ GT(">"), //
+ LT("<"), //
+ LIKE("LIKE") { //
+
+ @Override
+ public String formatImpl(@NotNull DatabaseType dialect, JdbcJavaType
columnType, String definition, String... values) {
+ return format("%s %s %s", definition, getSymbol(),
formatValue(dialect, columnType, "%" + values[0] + "%"));
+ }
+
+ @Override
+ public String formatPlaceholderImpl(String definition, String
placeholder) {
+ return format("%s %s CONCAT('%%', %s, '%%')", definition,
getSymbol(), placeholder);
+ }
+ },
+ NLIKE("NOT LIKE") { //
+
+ @Override
+ public String formatImpl(@NotNull DatabaseType dialect, JdbcJavaType
columnType, String definition, String... values) {
+ return format("%s %s %s", definition, getSymbol(),
formatValue(dialect, columnType, "%" + values[0] + "%"));
+ }
+
+ @Override
+ public String formatPlaceholderImpl(String definition, String
placeholder) {
+ return format("%s %s CONCAT('%%', %s, '%%')", definition,
getSymbol(), placeholder);
+ }
+ },
+ BTW("BETWEEN", 2) { //
+
+ @Override
+ public String formatImpl(@NotNull DatabaseType dialect, JdbcJavaType
columnType, String definition, String... values) {
+ return format("%s %s %s AND %s", definition, getSymbol(),
formatValue(dialect, columnType, values[0]),
+ formatValue(dialect, columnType, values[1]));
+ }
+
+ @Override
+ public String formatPlaceholderImpl(String definition, String
placeholder) {
+ return format("%s %s %s AND %s", definition, getSymbol(),
placeholder, placeholder);
+ }
+ },
+ NBTW("NOT BETWEEN", 2) { //
+
+ @Override
+ public String formatImpl(@NotNull DatabaseType dialect, JdbcJavaType
columnType, String definition, String... values) {
+ return format("%s %s %s AND %s", definition, getSymbol(),
formatValue(dialect, columnType, values[0]),
+ formatValue(dialect, columnType, values[1]));
+ }
+
+ @Override
+ public String formatPlaceholderImpl(String definition, String
placeholder) {
+ return format("%s %s %s AND %s", definition, getSymbol(),
placeholder, placeholder);
+ }
+ },
+ IN("IN", -1) { //
+
+ @Override
+ public String formatImpl(@NotNull DatabaseType dialect, JdbcJavaType
columnType, String definition, String... values) {
+ return format("%s %s (%s)", definition, getSymbol(),
+ Arrays.stream(values).map(e -> formatValue(dialect,
columnType, e)).collect(Collectors.joining(", ")));
+ }
+
+ @Override
+ public boolean isPlaceholderSupported() {
+ return false;
+ }
+ },
+ NIN("NOT IN", -1) { //
+
+ @Override
+ public String formatImpl(@NotNull DatabaseType dialect, JdbcJavaType
columnType, String definition, String... values) {
+ return format("%s %s (%s)", definition, getSymbol(),
+ Arrays.stream(values).map(e -> formatValue(dialect,
columnType, e)).collect(Collectors.joining(", ")));
+ }
+
+ @Override
+ public boolean isPlaceholderSupported() {
+ return false;
+ }
+ },
+ NULL("IS NULL", 0), //
+ NNULL("IS NOT NULL", 0), //
+ ;
+
+ private final String symbol;
+ private final int paramCount;
+
+ SqlOperator(String symbol) {
+ this(symbol, 1);
+ }
+
+ public boolean isDefault() {
+ return this == getDefault();
+ }
+
+ public static SqlOperator getDefault() {
+ return EQ;
+ }
+
+ public boolean isListType() {
+ return paramCount < 0;
+ }
+
+ public String formatSql(@NotNull DatabaseType dialect, JdbcJavaType
columnType, String definition, String alias, List<String> values) {
+ return formatSql(dialect, columnType, definition, alias, values ==
null ? null : values.toArray(String[]::new));
+ }
+
+ public String formatSql(@NotNull DatabaseType dialect, JdbcJavaType
columnType, String definition, String alias, String... values) {
+ validateValues(values);
+ return formatImpl(dialect, columnType, alias(escape(dialect,
definition), alias), values);
+ }
+
+ protected String formatImpl(@NotNull DatabaseType dialect, JdbcJavaType
columnType, String definition, String... values) {
+ return paramCount == 0 ? format("%s %s", definition, symbol)
+ : format("%s %s %s", definition, symbol, formatValue(dialect,
columnType, values[0]));
+ }
+
+ public boolean isPlaceholderSupported() {
+ return true;
+ }
+
+ public String formatPlaceholder(@NotNull DatabaseType dialect, String
definition, String alias) {
+ return formatPlaceholder(dialect, definition, alias, "?");
+ }
+
+ public String formatPlaceholder(@NotNull DatabaseType dialect, String
definition, String alias, String placeholder) {
+ return formatPlaceholderImpl(alias(escape(dialect, definition),
alias), placeholder);
+ }
+
+ protected String formatPlaceholderImpl(String definition, String
placeholder) {
+ if (!isPlaceholderSupported()) {
+ throw new UnsupportedOperationException("Placeholder is not
supported for this operator");
+ }
+ return paramCount == 0 ? format("%s %s", definition, symbol) :
format("%s %s %s", definition, symbol, placeholder);
+ }
+
+ public void validateValues(String... values) {
+ if (values == null ? paramCount != 0 : (paramCount < 0 ? values.length
< -paramCount : values.length != paramCount)) {
Review Comment:
This condition is hard to understand unfortunately, can't we do it
differently?
##########
fineract-core/src/main/java/org/apache/fineract/infrastructure/core/data/SqlOperator.java:
##########
@@ -0,0 +1,197 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.fineract.infrastructure.core.data;
+
+import static java.lang.String.format;
+import static
org.apache.fineract.infrastructure.core.service.database.DatabaseSpecificSQLGenerator.alias;
+import static
org.apache.fineract.infrastructure.core.service.database.DatabaseSpecificSQLGenerator.escape;
+import static
org.apache.fineract.infrastructure.core.service.database.DatabaseSpecificSQLGenerator.formatValue;
+
+import jakarta.validation.constraints.NotNull;
+import java.util.Arrays;
+import java.util.List;
+import java.util.stream.Collectors;
+import lombok.AllArgsConstructor;
+import lombok.Getter;
+import
org.apache.fineract.infrastructure.core.exception.PlatformServiceUnavailableException;
+import org.apache.fineract.infrastructure.core.service.database.DatabaseType;
+import org.apache.fineract.infrastructure.core.service.database.JdbcJavaType;
+
+@AllArgsConstructor
+@Getter
+public enum SqlOperator {
+
+ EQ("="), //
+ NEQ("<>"), //
+ GTE(">="), //
+ LTE("<="), //
+ GT(">"), //
+ LT("<"), //
+ LIKE("LIKE") { //
+
+ @Override
+ public String formatImpl(@NotNull DatabaseType dialect, JdbcJavaType
columnType, String definition, String... values) {
+ return format("%s %s %s", definition, getSymbol(),
formatValue(dialect, columnType, "%" + values[0] + "%"));
+ }
+
+ @Override
+ public String formatPlaceholderImpl(String definition, String
placeholder) {
+ return format("%s %s CONCAT('%%', %s, '%%')", definition,
getSymbol(), placeholder);
+ }
+ },
+ NLIKE("NOT LIKE") { //
+
+ @Override
+ public String formatImpl(@NotNull DatabaseType dialect, JdbcJavaType
columnType, String definition, String... values) {
+ return format("%s %s %s", definition, getSymbol(),
formatValue(dialect, columnType, "%" + values[0] + "%"));
+ }
+
+ @Override
+ public String formatPlaceholderImpl(String definition, String
placeholder) {
+ return format("%s %s CONCAT('%%', %s, '%%')", definition,
getSymbol(), placeholder);
+ }
+ },
+ BTW("BETWEEN", 2) { //
+
+ @Override
+ public String formatImpl(@NotNull DatabaseType dialect, JdbcJavaType
columnType, String definition, String... values) {
+ return format("%s %s %s AND %s", definition, getSymbol(),
formatValue(dialect, columnType, values[0]),
+ formatValue(dialect, columnType, values[1]));
+ }
+
+ @Override
+ public String formatPlaceholderImpl(String definition, String
placeholder) {
+ return format("%s %s %s AND %s", definition, getSymbol(),
placeholder, placeholder);
+ }
+ },
+ NBTW("NOT BETWEEN", 2) { //
+
+ @Override
+ public String formatImpl(@NotNull DatabaseType dialect, JdbcJavaType
columnType, String definition, String... values) {
+ return format("%s %s %s AND %s", definition, getSymbol(),
formatValue(dialect, columnType, values[0]),
+ formatValue(dialect, columnType, values[1]));
+ }
+
+ @Override
+ public String formatPlaceholderImpl(String definition, String
placeholder) {
+ return format("%s %s %s AND %s", definition, getSymbol(),
placeholder, placeholder);
+ }
+ },
+ IN("IN", -1) { //
+
+ @Override
+ public String formatImpl(@NotNull DatabaseType dialect, JdbcJavaType
columnType, String definition, String... values) {
+ return format("%s %s (%s)", definition, getSymbol(),
+ Arrays.stream(values).map(e -> formatValue(dialect,
columnType, e)).collect(Collectors.joining(", ")));
+ }
+
+ @Override
+ public boolean isPlaceholderSupported() {
+ return false;
+ }
+ },
+ NIN("NOT IN", -1) { //
+
+ @Override
+ public String formatImpl(@NotNull DatabaseType dialect, JdbcJavaType
columnType, String definition, String... values) {
+ return format("%s %s (%s)", definition, getSymbol(),
+ Arrays.stream(values).map(e -> formatValue(dialect,
columnType, e)).collect(Collectors.joining(", ")));
+ }
+
+ @Override
+ public boolean isPlaceholderSupported() {
+ return false;
+ }
+ },
+ NULL("IS NULL", 0), //
+ NNULL("IS NOT NULL", 0), //
+ ;
+
+ private final String symbol;
+ private final int paramCount;
+
+ SqlOperator(String symbol) {
+ this(symbol, 1);
+ }
+
+ public boolean isDefault() {
+ return this == getDefault();
+ }
+
+ public static SqlOperator getDefault() {
+ return EQ;
+ }
+
+ public boolean isListType() {
+ return paramCount < 0;
+ }
+
+ public String formatSql(@NotNull DatabaseType dialect, JdbcJavaType
columnType, String definition, String alias, List<String> values) {
+ return formatSql(dialect, columnType, definition, alias, values ==
null ? null : values.toArray(String[]::new));
+ }
+
+ public String formatSql(@NotNull DatabaseType dialect, JdbcJavaType
columnType, String definition, String alias, String... values) {
+ validateValues(values);
+ return formatImpl(dialect, columnType, alias(escape(dialect,
definition), alias), values);
+ }
+
+ protected String formatImpl(@NotNull DatabaseType dialect, JdbcJavaType
columnType, String definition, String... values) {
+ return paramCount == 0 ? format("%s %s", definition, symbol)
+ : format("%s %s %s", definition, symbol, formatValue(dialect,
columnType, values[0]));
+ }
+
+ public boolean isPlaceholderSupported() {
+ return true;
+ }
+
+ public String formatPlaceholder(@NotNull DatabaseType dialect, String
definition, String alias) {
+ return formatPlaceholder(dialect, definition, alias, "?");
+ }
+
+ public String formatPlaceholder(@NotNull DatabaseType dialect, String
definition, String alias, String placeholder) {
+ return formatPlaceholderImpl(alias(escape(dialect, definition),
alias), placeholder);
+ }
+
+ protected String formatPlaceholderImpl(String definition, String
placeholder) {
+ if (!isPlaceholderSupported()) {
+ throw new UnsupportedOperationException("Placeholder is not
supported for this operator");
+ }
+ return paramCount == 0 ? format("%s %s", definition, symbol) :
format("%s %s %s", definition, symbol, placeholder);
+ }
+
+ public void validateValues(String... values) {
+ if (values == null ? paramCount != 0 : (paramCount < 0 ? values.length
< -paramCount : values.length != paramCount)) {
+ throw new
PlatformServiceUnavailableException("error.msg.database.operator.invalid",
+ "Number of parameters " + Arrays.toString(values) + " must
be " + Math.abs(paramCount) + " on " + this);
+ }
+ }
+
+ public void validateValues(List<String> values) {
+ int size = values == null ? 0 : values.size();
+ if (paramCount < 0 ? size < -paramCount : size != paramCount) {
Review Comment:
This condition is hard to understand unfortunately, can't we do it
differently?
##########
fineract-provider/src/main/java/org/apache/fineract/infrastructure/core/serialization/DatatableCommandFromApiJsonDeserializer.java:
##########
@@ -34,161 +42,101 @@
import org.apache.fineract.infrastructure.core.exception.InvalidJsonException;
import
org.apache.fineract.infrastructure.core.exception.PlatformApiDataValidationException;
import
org.apache.fineract.infrastructure.core.exception.PlatformDataIntegrityException;
+import
org.apache.fineract.infrastructure.core.service.database.DatabaseTypeResolver;
+import org.apache.fineract.infrastructure.core.service.database.JdbcJavaType;
import org.apache.fineract.infrastructure.dataqueries.data.EntityTables;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@Component
public class DatatableCommandFromApiJsonDeserializer {
- public static final String DATATABLE_NAME = "datatableName";
- public static final String ENTITY_SUB_TYPE = "entitySubType";
- public static final String APPTABLE_NAME = "apptableName";
- public static final String MULTI_ROW = "multiRow";
- public static final String COLUMNS = "columns";
- public static final String NAME = "name";
- public static final String TYPE = "type";
- public static final String LENGTH = "length";
- public static final String MANDATORY = "mandatory";
- public static final String CODE = "code";
- public static final String CHANGE_COLUMNS = "changeColumns";
- public static final String ADD_COLUMNS = "addColumns";
- public static final String DROP_COLUMNS = "dropColumns";
- public static final String AFTER = "after";
- public static final String NEW_CODE = "newCode";
- public static final String NEW_NAME = "newName";
- public static final String STRING = "string";
- public static final String NUMBER = "number";
- public static final String BOOLEAN = "boolean";
- public static final String DECIMAL = "decimal";
- public static final String DATE = "date";
- public static final String DATETIME = "datetime";
- public static final String TEXT = "text";
- public static final String DROPDOWN = "dropdown";
- private static final String DATATABLE_NAME_REGEX_PATTERN =
"^[a-zA-Z][a-zA-Z0-9\\-_\\s]{0,48}[a-zA-Z0-9]$";
- private static final String DATATABLE_COLUMN_NAME_REGEX_PATTERN =
"^[a-zA-Z][a-zA-Z0-9\\-_\\s]{0,}[a-zA-Z0-9]$";
- private static final String INDEXED = "indexed";
- private static final String UNIQUE = "unique";
+ public static final String DATATABLE_NAME_REGEX_PATTERN =
"^[a-zA-Z][a-zA-Z0-9\\-_\\s]{0,48}[a-zA-Z0-9]$";
+ public static final String DATATABLE_COLUMN_NAME_REGEX_PATTERN =
"^[a-zA-Z][a-zA-Z0-9\\-_\\s]{0,}[a-zA-Z0-9]$";
+
/**
* The parameters supported for this command.
*/
- private static final Set<String> SUPPORTED_PARAMETERS_FOR_CREATE = new
HashSet<>(
- Arrays.asList(DATATABLE_NAME, ENTITY_SUB_TYPE, APPTABLE_NAME,
MULTI_ROW, COLUMNS));
- private static final Set<String> SUPPORTED_PARAMETERS_FOR_CREATE_COLUMNS =
new HashSet<>(
- Arrays.asList(NAME, TYPE, LENGTH, MANDATORY, CODE, UNIQUE,
INDEXED));
- private static final Set<String> SUPPORTED_PARAMETERS_FOR_UPDATE = new
HashSet<>(
- Arrays.asList(APPTABLE_NAME, ENTITY_SUB_TYPE, CHANGE_COLUMNS,
ADD_COLUMNS, DROP_COLUMNS));
- private static final Set<String> SUPPORTED_PARAMETERS_FOR_ADD_COLUMNS =
new HashSet<>(
- Arrays.asList(NAME, TYPE, LENGTH, MANDATORY, AFTER, CODE, UNIQUE,
INDEXED));
- private static final Set<String> SUPPORTED_PARAMETERS_FOR_CHANGE_COLUMNS =
new HashSet<>(
- Arrays.asList(NAME, NEW_NAME, LENGTH, MANDATORY, AFTER, CODE,
NEW_CODE, UNIQUE, INDEXED));
- private static final Set<String> SUPPORTED_PARAMETERS_FOR_DROP_COLUMNS =
new HashSet<>(List.of(NAME));
- private static final Object[] SUPPORTED_COLUMN_TYPES = { STRING, NUMBER,
BOOLEAN, DECIMAL, DATE, DATETIME, TEXT, DROPDOWN };
+ private static final Set<String> SUPPORTED_PARAMETERS_FOR_CREATE =
Set.of(API_PARAM_DATATABLE_NAME, API_PARAM_SUBTYPE,
Review Comment:
Why was it necessary to rename it? Your PR is already way too big, this is
just bringing more unnecessary changes into scope.
##########
fineract-loan/src/main/java/org/apache/fineract/portfolio/loanaccount/domain/Loan.java:
##########
@@ -558,8 +558,8 @@ private Loan(final String accountNo, final Client client,
final Group group, fin
this.createStandingInstructionAtDisbursement =
createStandingInstructionAtDisbursement;
/*
- * During loan origination stage and before loan is approved
principal_amount, approved_principal and
- * principal_amount_demanded will same amount and that amount is same
as applicant loan demanded amount.
+ * During loan origination stage and before loan is approved
principal_amount, approved_principal and principal_amount_demanded will same
amount and that amount is same as applicant loan
Review Comment:
Please revert these unnecessary changes. I'm fine with javadoc changes if
needed but these are polluting the PR.
##########
fineract-core/src/main/java/org/apache/fineract/infrastructure/core/service/database/JdbcJavaType.java:
##########
@@ -0,0 +1,321 @@
+package org.apache.fineract.infrastructure.core.service.database;
+
+import jakarta.validation.constraints.NotNull;
+import java.sql.JDBCType;
+import
org.apache.fineract.infrastructure.core.exception.PlatformServiceUnavailableException;
+
+public enum JdbcJavaType {
Review Comment:
Same as for JavaType.
##########
fineract-provider/src/main/java/org/apache/fineract/infrastructure/dataqueries/service/ReadWriteNonCoreDataServiceImpl.java:
##########
@@ -19,31 +19,50 @@
package org.apache.fineract.infrastructure.dataqueries.service;
import static java.util.Arrays.asList;
-import static java.util.Locale.ENGLISH;
import static
org.apache.fineract.infrastructure.core.data.ApiParameterError.parameterErrorWithValue;
+import static org.apache.fineract.infrastructure.core.data.SqlOperator.EQ;
+import static
org.apache.fineract.infrastructure.core.serialization.DatatableCommandFromApiJsonDeserializer.DATATABLE_NAME_REGEX_PATTERN;
+import static
org.apache.fineract.infrastructure.core.service.database.JdbcJavaType.BIT;
+import static
org.apache.fineract.infrastructure.core.service.database.JdbcJavaType.DATE;
+import static
org.apache.fineract.infrastructure.core.service.database.JdbcJavaType.DATETIME;
+import static
org.apache.fineract.infrastructure.dataqueries.api.DataTableApiConstant.API_FIELD_AFTER;
+import static
org.apache.fineract.infrastructure.dataqueries.api.DataTableApiConstant.API_FIELD_CODE;
+import static
org.apache.fineract.infrastructure.dataqueries.api.DataTableApiConstant.API_FIELD_INDEXED;
+import static
org.apache.fineract.infrastructure.dataqueries.api.DataTableApiConstant.API_FIELD_LENGTH;
+import static
org.apache.fineract.infrastructure.dataqueries.api.DataTableApiConstant.API_FIELD_MANDATORY;
+import static
org.apache.fineract.infrastructure.dataqueries.api.DataTableApiConstant.API_FIELD_NAME;
+import static
org.apache.fineract.infrastructure.dataqueries.api.DataTableApiConstant.API_FIELD_NEWCODE;
+import static
org.apache.fineract.infrastructure.dataqueries.api.DataTableApiConstant.API_FIELD_NEWNAME;
+import static
org.apache.fineract.infrastructure.dataqueries.api.DataTableApiConstant.API_FIELD_TYPE;
+import static
org.apache.fineract.infrastructure.dataqueries.api.DataTableApiConstant.API_FIELD_TYPE_DROPDOWN;
+import static
org.apache.fineract.infrastructure.dataqueries.api.DataTableApiConstant.API_FIELD_UNIQUE;
+import static
org.apache.fineract.infrastructure.dataqueries.api.DataTableApiConstant.API_PARAM_ADDCOLUMNS;
+import static
org.apache.fineract.infrastructure.dataqueries.api.DataTableApiConstant.API_PARAM_APPTABLE_NAME;
+import static
org.apache.fineract.infrastructure.dataqueries.api.DataTableApiConstant.API_PARAM_CHANGECOLUMNS;
+import static
org.apache.fineract.infrastructure.dataqueries.api.DataTableApiConstant.API_PARAM_COLUMNS;
+import static
org.apache.fineract.infrastructure.dataqueries.api.DataTableApiConstant.API_PARAM_DATATABLE_NAME;
+import static
org.apache.fineract.infrastructure.dataqueries.api.DataTableApiConstant.API_PARAM_DROPCOLUMNS;
+import static
org.apache.fineract.infrastructure.dataqueries.api.DataTableApiConstant.API_PARAM_MULTIROW;
+import static
org.apache.fineract.infrastructure.dataqueries.api.DataTableApiConstant.API_PARAM_SUBTYPE;
+import static
org.apache.fineract.infrastructure.dataqueries.api.DataTableApiConstant.TABLE_COLUMN_CODE_MAPPINGS;
+import static
org.apache.fineract.infrastructure.dataqueries.api.DataTableApiConstant.TABLE_FIELD_ID;
+import static
org.apache.fineract.infrastructure.dataqueries.api.DataTableApiConstant.TABLE_REGISTERED_TABLE;
+import static
org.apache.fineract.portfolio.search.SearchConstants.API_PARAM_COLUMN_FILTERS;
import com.google.common.base.Splitter;
-import com.google.common.collect.ImmutableMap;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.reflect.TypeToken;
import jakarta.persistence.PersistenceException;
+import jakarta.validation.constraints.NotNull;
import java.lang.reflect.Type;
import java.math.BigDecimal;
import java.sql.Date;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Timestamp;
-import java.time.LocalDate;
-import java.time.LocalDateTime;
-import java.time.format.DateTimeFormatter;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Locale;
-import java.util.Map;
-import java.util.Set;
+import java.util.*;
Review Comment:
I'm not sure how this didn't break but let's not use * imports. Spotless
should break on these.
##########
fineract-provider/src/main/java/org/apache/fineract/infrastructure/dataqueries/service/EntityDatatableChecksWritePlatformServiceImpl.java:
##########
@@ -105,11 +104,11 @@ public CommandProcessingResult createCheck(final
JsonCommand command) {
throw new
EntityDatatableCheckAlreadyExistsException(entity, status, datatableName);
}
} else {
- EntityTables entityTable = EntityTables.fromName(entity);
- if (EntityTables.LOAN == entityTable) {
+ EntityTables entityTable = EntityTables.fromEntityName(entity);
+ if (entityTable == EntityTables.LOAN) {
Review Comment:
So why did we flip the condition? To be honest what you did is also my
preference but it's unnecessary. The result is the same yet it's adding an
extra change to your PR.
##########
fineract-provider/src/main/java/org/apache/fineract/portfolio/loanaccount/domain/LoanAccountDomainServiceJpa.java:
##########
@@ -173,9 +173,8 @@ public LoanTransaction makeRepayment(final
LoanTransactionType repaymentTransact
// TODO: Is it required to validate transaction date with meeting dates
// if repayments is synced with meeting?
/*
- * if(loan.isSyncDisbursementWithMeeting()){ // validate actual
disbursement date against meeting date
- * CalendarInstance calendarInstance =
this.calendarInstanceRepository.findCalendarInstaneByLoanId
- * (loan.getId(), CalendarEntityType.LOANS.getValue());
this.loanEventApiJsonValidator
+ * if(loan.isSyncDisbursementWithMeeting()){ // validate actual
disbursement date against meeting date CalendarInstance calendarInstance =
Review Comment:
Not needed. Please revert.
##########
fineract-provider/src/main/java/org/apache/fineract/portfolio/search/service/SearchUtil.java:
##########
@@ -0,0 +1,266 @@
+package org.apache.fineract.portfolio.search.service;
+
+import static java.util.Locale.ENGLISH;
+import static
org.apache.fineract.infrastructure.core.data.ApiParameterError.parameterErrorWithValue;
+import static
org.apache.fineract.infrastructure.dataqueries.api.DataTableApiConstant.API_FIELD_MANDATORY;
+import static
org.apache.fineract.portfolio.search.SearchConstants.API_PARAM_COLUMN;
+
+import com.google.common.base.Predicate;
+import com.google.gson.JsonObject;
+import jakarta.validation.constraints.NotNull;
+import java.math.BigDecimal;
+import java.sql.Date;
+import java.sql.Timestamp;
+import java.time.LocalDate;
+import java.time.LocalDateTime;
+import java.time.format.DateTimeFormatter;
+import java.util.*;
+import java.util.stream.Collectors;
+import org.apache.commons.lang3.BooleanUtils;
+import org.apache.commons.lang3.StringUtils;
+import org.apache.fineract.infrastructure.core.data.ApiParameterError;
+import org.apache.fineract.infrastructure.core.data.SqlOperator;
+import
org.apache.fineract.infrastructure.core.exception.PlatformApiDataValidationException;
+import
org.apache.fineract.infrastructure.core.exception.PlatformDataIntegrityException;
+import org.apache.fineract.infrastructure.core.serialization.JsonParserHelper;
+import org.apache.fineract.infrastructure.core.service.database.DatabaseType;
+import org.apache.fineract.infrastructure.core.service.database.JdbcJavaType;
+import
org.apache.fineract.infrastructure.dataqueries.data.ResultsetColumnHeaderData;
+import org.apache.fineract.infrastructure.security.utils.SQLInjectionValidator;
+import org.apache.fineract.portfolio.search.data.AdvancedQueryData;
+import org.apache.fineract.portfolio.search.data.ColumnFilterData;
+import org.apache.fineract.portfolio.search.data.FilterData;
+import org.springframework.jdbc.support.rowset.SqlRowSet;
+
+public class SearchUtil {
Review Comment:
Woah, this class is really really complex and impossible to review. Can we
separate pieces out according to responsibilities?
##########
fineract-provider/src/main/java/org/apache/fineract/portfolio/savings/domain/search/SavingsTransactionSearch.java:
##########
@@ -43,7 +43,7 @@ public static class Filters {
@Data
public static class RangeFilter<T> {
- private RangeOperator operator;
+ private SqlOperator operator;
Review Comment:
Now this I don't like. We're exposing "SqlOperators" to higher layers
(i.e.API) which shouldn't happen.
##########
fineract-core/src/main/java/org/apache/fineract/infrastructure/core/data/SqlOperator.java:
##########
@@ -0,0 +1,197 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.fineract.infrastructure.core.data;
+
+import static java.lang.String.format;
+import static
org.apache.fineract.infrastructure.core.service.database.DatabaseSpecificSQLGenerator.alias;
+import static
org.apache.fineract.infrastructure.core.service.database.DatabaseSpecificSQLGenerator.escape;
+import static
org.apache.fineract.infrastructure.core.service.database.DatabaseSpecificSQLGenerator.formatValue;
+
+import jakarta.validation.constraints.NotNull;
+import java.util.Arrays;
+import java.util.List;
+import java.util.stream.Collectors;
+import lombok.AllArgsConstructor;
+import lombok.Getter;
+import
org.apache.fineract.infrastructure.core.exception.PlatformServiceUnavailableException;
+import org.apache.fineract.infrastructure.core.service.database.DatabaseType;
+import org.apache.fineract.infrastructure.core.service.database.JdbcJavaType;
+
+@AllArgsConstructor
+@Getter
+public enum SqlOperator {
Review Comment:
I think this class is way too complicated and trying to solve a lot of
things which you don't need to solve your needs.
##########
fineract-core/src/main/java/org/apache/fineract/infrastructure/core/service/database/JavaType.java:
##########
@@ -0,0 +1,352 @@
+package org.apache.fineract.infrastructure.core.service.database;
+
+import jakarta.validation.constraints.NotNull;
+import java.io.InputStream;
+import java.io.Reader;
+import java.io.Serializable;
+import java.math.BigDecimal;
+import java.math.BigInteger;
+import java.sql.Time;
+import java.sql.Timestamp;
+import java.time.LocalDate;
+import java.time.LocalDateTime;
+import java.time.LocalTime;
+import java.time.OffsetDateTime;
+import java.time.OffsetTime;
+import java.util.Arrays;
+import java.util.Calendar;
+import java.util.Collection;
+import java.util.Date;
+import java.util.Locale;
+import java.util.Map;
+import java.util.stream.Collectors;
+import org.apache.commons.lang3.StringUtils;
+import
org.apache.fineract.infrastructure.core.exception.PlatformServiceUnavailableException;
+import org.apache.fineract.organisation.monetary.domain.MoneyHelper;
+import org.springframework.data.domain.Persistable;
+
+public enum JavaType {
Review Comment:
Just like I said with the SqlOperator class, this is way way way too complex
for a single change. Let's shoot for the minimum you need instead of trying to
solve all the world problems in a single breath.
##########
fineract-core/src/main/java/org/apache/fineract/infrastructure/core/service/database/DatabaseTypeResolver.java:
##########
@@ -58,10 +58,10 @@ public DatabaseType databaseType() {
}
public boolean isPostgreSQL() {
- return DatabaseType.POSTGRESQL.equals(currentDatabaseType.get());
+ return DatabaseType.POSTGRESQL == currentDatabaseType.get();
Review Comment:
Why was this necessary? It's not an issue, I'm just curious.
##########
fineract-provider/src/main/java/org/apache/fineract/infrastructure/dataqueries/api/DatatablesApiResource.java:
##########
@@ -225,6 +228,18 @@ public String queryValues(@PathParam("datatable")
@Parameter(description = "data
return
this.toApiJsonSerializer.serializePretty(ApiParameterHelper.prettyPrint(uriInfo.getQueryParameters()),
result);
}
+ @POST
+ @Path("{datatable}/query")
+ @Produces({ MediaType.APPLICATION_JSON })
+ @Operation(summary = "Query Data Table values", description = "Query
values from a registered data table.")
+ @ApiResponses({
+ @ApiResponse(responseCode = "200", description = "OK", content =
@Content(schema = @Schema(implementation = List.class))) })
+ public String advancedQuery(@PathParam("datatable") @Parameter(description
= "datatable") final String datatable,
+ PagedRequest<AdvancedQueryData> queryRequest, @Context final
UriInfo uriInfo) {
+ final Page<JsonObject> result =
this.readWriteNonCoreDataService.queryDataTableAdvanced(datatable,
queryRequest);
+ return
this.toApiJsonSerializer.serializePretty(ApiParameterHelper.prettyPrint(uriInfo.getQueryParameters()),
result);
Review Comment:
Since this is a new API, you should simply return a Page<....> and it'll be
serialized automatically. That way you can drop all the openAPI annotations as
well.
##########
fineract-provider/src/main/java/org/apache/fineract/infrastructure/dataqueries/service/EntityDatatableChecksReadPlatformServiceImpl.java:
##########
@@ -174,20 +174,22 @@ private List<DatatableChecksData> getDataTables() {
return this.jdbcTemplate.query(sql, this.registerDataTableMapper); //
NOSONAR
}
- protected static final class RegisterDataTableMapper implements
RowMapper<DatatableChecksData> {
+ protected final class RegisterDataTableMapper implements
RowMapper<DatatableChecksData> {
+
+ public static final String SELECT_FROM = " t.application_table_name as
entity, t.registered_table_name as tableName FROM x_registered_table t WHERE ";
@Override
public DatatableChecksData mapRow(final ResultSet rs,
@SuppressWarnings("unused") final int rowNum) throws SQLException {
-
final String entity = rs.getString("entity");
final String tableName = rs.getString("tableName");
return new DatatableChecksData(entity, tableName);
}
public String schema() {
- return " t.application_table_name as entity,
t.registered_table_name as tableName " + " from x_registered_table t "
- + " where application_table_name IN(
'm_client','m_group','m_savings_account','m_loan')";
+ String[] values =
EntityTables.getFiltered(EntityTables::hasCheck).stream().map(EntityTables::getName).toArray(String[]::new);
+ return SELECT_FROM +
SqlOperator.IN.formatSql(databaseTypeResolver.databaseType(),
JdbcJavaType.VARCHAR,
Review Comment:
This SqlOperator doesn't seem like to be the best idea.. very confusing to
use. Why don't we simply do the "IN" condition as a string here? Nothing seems
to hard about that as far as I can see.
##########
fineract-provider/src/main/java/org/apache/fineract/portfolio/loanaccount/domain/transactionprocessor/AbstractLoanRepaymentScheduleTransactionProcessor.java:
##########
@@ -48,8 +48,7 @@
import
org.apache.fineract.portfolio.loanaccount.domain.transactionprocessor.impl.InterestPrincipalPenaltyFeesOrderLoanRepaymentScheduleTransactionProcessor;
/**
- * Abstract implementation of {@link
LoanRepaymentScheduleTransactionProcessor} which is more convenient for concrete
- * implementations to extend.
+ * Abstract implementation of {@link
LoanRepaymentScheduleTransactionProcessor} which is more convenient for
concrete implementations to extend.
Review Comment:
Not needed. Please revert.
##########
fineract-provider/src/main/java/org/apache/fineract/infrastructure/dataqueries/service/ReadReportingServiceImpl.java:
##########
@@ -383,10 +382,8 @@ public String schema(final Long reportId) {
return sql;
/*
- * used to only return reports that the use can run as done in
report UI but not necessary as there is a
- * read_report permission which should give user access to look
all reports + " where exists" +
- * " (select 'f'" + " from m_appuser_role ur " + " join m_role r
on r.id = ur.role_id" +
- * " left join m_role_permission rp on rp.role_id = r.id" +
+ * used to only return reports that the use can run as done in
report UI but not necessary as there is a read_report permission which should
give user access to look all reports +
Review Comment:
Why these changes again?
##########
fineract-provider/src/main/java/org/apache/fineract/infrastructure/core/serialization/DatatableCommandFromApiJsonDeserializer.java:
##########
@@ -34,161 +42,101 @@
import org.apache.fineract.infrastructure.core.exception.InvalidJsonException;
import
org.apache.fineract.infrastructure.core.exception.PlatformApiDataValidationException;
import
org.apache.fineract.infrastructure.core.exception.PlatformDataIntegrityException;
+import
org.apache.fineract.infrastructure.core.service.database.DatabaseTypeResolver;
+import org.apache.fineract.infrastructure.core.service.database.JdbcJavaType;
import org.apache.fineract.infrastructure.dataqueries.data.EntityTables;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@Component
public class DatatableCommandFromApiJsonDeserializer {
- public static final String DATATABLE_NAME = "datatableName";
- public static final String ENTITY_SUB_TYPE = "entitySubType";
- public static final String APPTABLE_NAME = "apptableName";
- public static final String MULTI_ROW = "multiRow";
- public static final String COLUMNS = "columns";
- public static final String NAME = "name";
- public static final String TYPE = "type";
- public static final String LENGTH = "length";
- public static final String MANDATORY = "mandatory";
- public static final String CODE = "code";
- public static final String CHANGE_COLUMNS = "changeColumns";
- public static final String ADD_COLUMNS = "addColumns";
- public static final String DROP_COLUMNS = "dropColumns";
- public static final String AFTER = "after";
- public static final String NEW_CODE = "newCode";
- public static final String NEW_NAME = "newName";
- public static final String STRING = "string";
- public static final String NUMBER = "number";
- public static final String BOOLEAN = "boolean";
- public static final String DECIMAL = "decimal";
- public static final String DATE = "date";
- public static final String DATETIME = "datetime";
- public static final String TEXT = "text";
- public static final String DROPDOWN = "dropdown";
- private static final String DATATABLE_NAME_REGEX_PATTERN =
"^[a-zA-Z][a-zA-Z0-9\\-_\\s]{0,48}[a-zA-Z0-9]$";
- private static final String DATATABLE_COLUMN_NAME_REGEX_PATTERN =
"^[a-zA-Z][a-zA-Z0-9\\-_\\s]{0,}[a-zA-Z0-9]$";
- private static final String INDEXED = "indexed";
- private static final String UNIQUE = "unique";
+ public static final String DATATABLE_NAME_REGEX_PATTERN =
"^[a-zA-Z][a-zA-Z0-9\\-_\\s]{0,48}[a-zA-Z0-9]$";
+ public static final String DATATABLE_COLUMN_NAME_REGEX_PATTERN =
"^[a-zA-Z][a-zA-Z0-9\\-_\\s]{0,}[a-zA-Z0-9]$";
+
/**
* The parameters supported for this command.
*/
- private static final Set<String> SUPPORTED_PARAMETERS_FOR_CREATE = new
HashSet<>(
- Arrays.asList(DATATABLE_NAME, ENTITY_SUB_TYPE, APPTABLE_NAME,
MULTI_ROW, COLUMNS));
- private static final Set<String> SUPPORTED_PARAMETERS_FOR_CREATE_COLUMNS =
new HashSet<>(
- Arrays.asList(NAME, TYPE, LENGTH, MANDATORY, CODE, UNIQUE,
INDEXED));
- private static final Set<String> SUPPORTED_PARAMETERS_FOR_UPDATE = new
HashSet<>(
- Arrays.asList(APPTABLE_NAME, ENTITY_SUB_TYPE, CHANGE_COLUMNS,
ADD_COLUMNS, DROP_COLUMNS));
- private static final Set<String> SUPPORTED_PARAMETERS_FOR_ADD_COLUMNS =
new HashSet<>(
- Arrays.asList(NAME, TYPE, LENGTH, MANDATORY, AFTER, CODE, UNIQUE,
INDEXED));
- private static final Set<String> SUPPORTED_PARAMETERS_FOR_CHANGE_COLUMNS =
new HashSet<>(
- Arrays.asList(NAME, NEW_NAME, LENGTH, MANDATORY, AFTER, CODE,
NEW_CODE, UNIQUE, INDEXED));
- private static final Set<String> SUPPORTED_PARAMETERS_FOR_DROP_COLUMNS =
new HashSet<>(List.of(NAME));
- private static final Object[] SUPPORTED_COLUMN_TYPES = { STRING, NUMBER,
BOOLEAN, DECIMAL, DATE, DATETIME, TEXT, DROPDOWN };
+ private static final Set<String> SUPPORTED_PARAMETERS_FOR_CREATE =
Set.of(API_PARAM_DATATABLE_NAME, API_PARAM_SUBTYPE,
+ API_PARAM_APPTABLE_NAME, API_PARAM_MULTIROW, API_PARAM_COLUMNS);
+ private static final Set<String> SUPPORTED_PARAMETERS_FOR_CREATE_COLUMNS =
Set.of(API_FIELD_NAME, API_FIELD_TYPE, API_FIELD_LENGTH,
+ API_FIELD_MANDATORY, API_FIELD_CODE, API_FIELD_UNIQUE,
API_FIELD_INDEXED);
+ private static final Set<String> SUPPORTED_PARAMETERS_FOR_UPDATE =
Set.of(API_PARAM_APPTABLE_NAME, API_PARAM_SUBTYPE,
+ API_PARAM_CHANGECOLUMNS, API_PARAM_ADDCOLUMNS,
API_PARAM_DROPCOLUMNS);
+ private static final Set<String> SUPPORTED_PARAMETERS_FOR_ADD_COLUMNS =
Set.of(API_FIELD_NAME, API_FIELD_TYPE, API_FIELD_LENGTH,
+ API_FIELD_MANDATORY, API_FIELD_AFTER, API_FIELD_CODE,
API_FIELD_UNIQUE, API_FIELD_INDEXED);
+ private static final Set<String> SUPPORTED_PARAMETERS_FOR_CHANGE_COLUMNS =
Set.of(API_FIELD_NAME, API_FIELD_NEWNAME, API_FIELD_LENGTH,
+ API_FIELD_MANDATORY, API_FIELD_AFTER, API_FIELD_CODE,
API_FIELD_NEWCODE, API_FIELD_UNIQUE, API_FIELD_INDEXED);
+ private static final Set<String> SUPPORTED_PARAMETERS_FOR_DROP_COLUMNS =
Set.of(API_FIELD_NAME);
+ private static final Object[] SUPPORTED_COLUMN_TYPES = {
API_FIELD_TYPE_STRING, API_FIELD_TYPE_NUMBER, API_FIELD_TYPE_BOOLEAN,
+ API_FIELD_TYPE_DECIMAL, API_FIELD_TYPE_DATE,
API_FIELD_TYPE_DATETIME, API_FIELD_TYPE_TEXT, API_FIELD_TYPE_DROPDOWN };
private final FromJsonHelper fromApiJsonHelper;
+ private final DatabaseTypeResolver databaseTypeResolver;
@Autowired
- public DatatableCommandFromApiJsonDeserializer(final FromJsonHelper
fromApiJsonHelper) {
+ public DatatableCommandFromApiJsonDeserializer(final FromJsonHelper
fromApiJsonHelper, DatabaseTypeResolver databaseTypeResolver) {
this.fromApiJsonHelper = fromApiJsonHelper;
- }
-
- private void validateType(final DataValidatorBuilder baseDataValidator,
final JsonElement column) {
Review Comment:
Why was this method moved down in the class? Unnecessary change again.
##########
fineract-provider/src/main/java/org/apache/fineract/portfolio/savings/api/SavingsAccountTransactionsApiResource.java:
##########
@@ -137,6 +141,20 @@ public String searchTransactions(@PathParam("savingsId")
@Parameter(description
return toApiJsonSerializer.serialize(transactionsData);
}
+ @POST
+ @Path("query")
+ @Consumes({ MediaType.APPLICATION_JSON })
+ @Produces({ MediaType.APPLICATION_JSON })
+ @Operation(summary = "Search Savings Account Transactions")
+ @ApiResponses({
+ @ApiResponse(responseCode = "200", description = "OK", content =
@Content(schema = @Schema(implementation = List.class))) })
+ public String advancedQuery(@PathParam("savingsId") @Parameter(description
= "savingsId") final Long savingsId,
+ PagedRequest<AdvancedQueryRequest> queryRequest, @Context final
UriInfo uriInfo) {
+ final org.springframework.data.domain.Page<JsonObject> result =
transactionsSearchServiceImpl.queryAdvanced(savingsId,
+ queryRequest);
+ return
this.toApiJsonSerializer.serializePretty(ApiParameterHelper.prettyPrint(uriInfo.getQueryParameters()),
result);
Review Comment:
As I said for the datatables API, you can go with Page<...> since this is a
new API.
##########
fineract-provider/src/main/java/org/apache/fineract/infrastructure/core/serialization/DatatableCommandFromApiJsonDeserializer.java:
##########
@@ -34,161 +42,101 @@
import org.apache.fineract.infrastructure.core.exception.InvalidJsonException;
import
org.apache.fineract.infrastructure.core.exception.PlatformApiDataValidationException;
import
org.apache.fineract.infrastructure.core.exception.PlatformDataIntegrityException;
+import
org.apache.fineract.infrastructure.core.service.database.DatabaseTypeResolver;
+import org.apache.fineract.infrastructure.core.service.database.JdbcJavaType;
import org.apache.fineract.infrastructure.dataqueries.data.EntityTables;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
@Component
public class DatatableCommandFromApiJsonDeserializer {
- public static final String DATATABLE_NAME = "datatableName";
- public static final String ENTITY_SUB_TYPE = "entitySubType";
- public static final String APPTABLE_NAME = "apptableName";
- public static final String MULTI_ROW = "multiRow";
- public static final String COLUMNS = "columns";
- public static final String NAME = "name";
- public static final String TYPE = "type";
- public static final String LENGTH = "length";
- public static final String MANDATORY = "mandatory";
- public static final String CODE = "code";
- public static final String CHANGE_COLUMNS = "changeColumns";
- public static final String ADD_COLUMNS = "addColumns";
- public static final String DROP_COLUMNS = "dropColumns";
- public static final String AFTER = "after";
- public static final String NEW_CODE = "newCode";
- public static final String NEW_NAME = "newName";
- public static final String STRING = "string";
- public static final String NUMBER = "number";
- public static final String BOOLEAN = "boolean";
- public static final String DECIMAL = "decimal";
- public static final String DATE = "date";
- public static final String DATETIME = "datetime";
- public static final String TEXT = "text";
- public static final String DROPDOWN = "dropdown";
- private static final String DATATABLE_NAME_REGEX_PATTERN =
"^[a-zA-Z][a-zA-Z0-9\\-_\\s]{0,48}[a-zA-Z0-9]$";
- private static final String DATATABLE_COLUMN_NAME_REGEX_PATTERN =
"^[a-zA-Z][a-zA-Z0-9\\-_\\s]{0,}[a-zA-Z0-9]$";
- private static final String INDEXED = "indexed";
- private static final String UNIQUE = "unique";
+ public static final String DATATABLE_NAME_REGEX_PATTERN =
"^[a-zA-Z][a-zA-Z0-9\\-_\\s]{0,48}[a-zA-Z0-9]$";
+ public static final String DATATABLE_COLUMN_NAME_REGEX_PATTERN =
"^[a-zA-Z][a-zA-Z0-9\\-_\\s]{0,}[a-zA-Z0-9]$";
+
/**
* The parameters supported for this command.
*/
- private static final Set<String> SUPPORTED_PARAMETERS_FOR_CREATE = new
HashSet<>(
- Arrays.asList(DATATABLE_NAME, ENTITY_SUB_TYPE, APPTABLE_NAME,
MULTI_ROW, COLUMNS));
- private static final Set<String> SUPPORTED_PARAMETERS_FOR_CREATE_COLUMNS =
new HashSet<>(
- Arrays.asList(NAME, TYPE, LENGTH, MANDATORY, CODE, UNIQUE,
INDEXED));
- private static final Set<String> SUPPORTED_PARAMETERS_FOR_UPDATE = new
HashSet<>(
- Arrays.asList(APPTABLE_NAME, ENTITY_SUB_TYPE, CHANGE_COLUMNS,
ADD_COLUMNS, DROP_COLUMNS));
- private static final Set<String> SUPPORTED_PARAMETERS_FOR_ADD_COLUMNS =
new HashSet<>(
- Arrays.asList(NAME, TYPE, LENGTH, MANDATORY, AFTER, CODE, UNIQUE,
INDEXED));
- private static final Set<String> SUPPORTED_PARAMETERS_FOR_CHANGE_COLUMNS =
new HashSet<>(
- Arrays.asList(NAME, NEW_NAME, LENGTH, MANDATORY, AFTER, CODE,
NEW_CODE, UNIQUE, INDEXED));
- private static final Set<String> SUPPORTED_PARAMETERS_FOR_DROP_COLUMNS =
new HashSet<>(List.of(NAME));
- private static final Object[] SUPPORTED_COLUMN_TYPES = { STRING, NUMBER,
BOOLEAN, DECIMAL, DATE, DATETIME, TEXT, DROPDOWN };
+ private static final Set<String> SUPPORTED_PARAMETERS_FOR_CREATE =
Set.of(API_PARAM_DATATABLE_NAME, API_PARAM_SUBTYPE,
+ API_PARAM_APPTABLE_NAME, API_PARAM_MULTIROW, API_PARAM_COLUMNS);
+ private static final Set<String> SUPPORTED_PARAMETERS_FOR_CREATE_COLUMNS =
Set.of(API_FIELD_NAME, API_FIELD_TYPE, API_FIELD_LENGTH,
+ API_FIELD_MANDATORY, API_FIELD_CODE, API_FIELD_UNIQUE,
API_FIELD_INDEXED);
+ private static final Set<String> SUPPORTED_PARAMETERS_FOR_UPDATE =
Set.of(API_PARAM_APPTABLE_NAME, API_PARAM_SUBTYPE,
+ API_PARAM_CHANGECOLUMNS, API_PARAM_ADDCOLUMNS,
API_PARAM_DROPCOLUMNS);
+ private static final Set<String> SUPPORTED_PARAMETERS_FOR_ADD_COLUMNS =
Set.of(API_FIELD_NAME, API_FIELD_TYPE, API_FIELD_LENGTH,
+ API_FIELD_MANDATORY, API_FIELD_AFTER, API_FIELD_CODE,
API_FIELD_UNIQUE, API_FIELD_INDEXED);
+ private static final Set<String> SUPPORTED_PARAMETERS_FOR_CHANGE_COLUMNS =
Set.of(API_FIELD_NAME, API_FIELD_NEWNAME, API_FIELD_LENGTH,
+ API_FIELD_MANDATORY, API_FIELD_AFTER, API_FIELD_CODE,
API_FIELD_NEWCODE, API_FIELD_UNIQUE, API_FIELD_INDEXED);
+ private static final Set<String> SUPPORTED_PARAMETERS_FOR_DROP_COLUMNS =
Set.of(API_FIELD_NAME);
+ private static final Object[] SUPPORTED_COLUMN_TYPES = {
API_FIELD_TYPE_STRING, API_FIELD_TYPE_NUMBER, API_FIELD_TYPE_BOOLEAN,
+ API_FIELD_TYPE_DECIMAL, API_FIELD_TYPE_DATE,
API_FIELD_TYPE_DATETIME, API_FIELD_TYPE_TEXT, API_FIELD_TYPE_DROPDOWN };
private final FromJsonHelper fromApiJsonHelper;
+ private final DatabaseTypeResolver databaseTypeResolver;
@Autowired
- public DatatableCommandFromApiJsonDeserializer(final FromJsonHelper
fromApiJsonHelper) {
+ public DatatableCommandFromApiJsonDeserializer(final FromJsonHelper
fromApiJsonHelper, DatabaseTypeResolver databaseTypeResolver) {
this.fromApiJsonHelper = fromApiJsonHelper;
- }
-
- private void validateType(final DataValidatorBuilder baseDataValidator,
final JsonElement column) {
- final String type = this.fromApiJsonHelper.extractStringNamed(TYPE,
column);
-
baseDataValidator.reset().parameter(TYPE).value(type).notBlank().isOneOfTheseStringValues(SUPPORTED_COLUMN_TYPES);
-
- if (type != null && type.equalsIgnoreCase("String")) {
- if (this.fromApiJsonHelper.parameterExists(LENGTH, column)) {
- final String lengthStr =
this.fromApiJsonHelper.extractStringNamed(LENGTH, column);
- if (lengthStr != null && !StringUtils.isWhitespace(lengthStr)
&& StringUtils.isNumeric(lengthStr)
- && StringUtils.isNotBlank(lengthStr)) {
- final Integer length = Integer.parseInt(lengthStr);
-
baseDataValidator.reset().parameter(LENGTH).value(length).positiveAmount();
- } else if (StringUtils.isBlank(lengthStr) ||
StringUtils.isWhitespace(lengthStr)) {
-
baseDataValidator.reset().parameter(LENGTH).failWithCode("must.be.provided.when.type.is.String");
- } else if (!StringUtils.isNumeric(lengthStr)) {
-
baseDataValidator.reset().parameter(LENGTH).failWithCode("not.greater.than.zero");
- }
- } else {
-
baseDataValidator.reset().parameter(LENGTH).failWithCode("must.be.provided.when.type.is.String");
- }
- } else {
-
baseDataValidator.reset().parameter(LENGTH).mustBeBlankWhenParameterProvidedIs(TYPE,
type);
- }
-
- final String code = this.fromApiJsonHelper.extractStringNamed(CODE,
column);
- if (type != null && type.equalsIgnoreCase(DROPDOWN)) {
- if (code != null) {
-
baseDataValidator.reset().parameter(CODE).value(code).notBlank().matchesRegularExpression(DATATABLE_NAME_REGEX_PATTERN);
- } else {
-
baseDataValidator.reset().parameter(CODE).value(code).cantBeBlankWhenParameterProvidedIs(TYPE,
type);
- }
- } else {
-
baseDataValidator.reset().parameter(CODE).value(code).mustBeBlankWhenParameterProvided(TYPE,
type);
- }
+ this.databaseTypeResolver = databaseTypeResolver;
}
public void validateForCreate(final String json) {
if (StringUtils.isBlank(json)) {
throw new InvalidJsonException();
}
- final Type typeOfMap = new TypeToken<Map<String, Object>>() {
-
- }.getType();
+ final Type typeOfMap = new TypeToken<Map<String, Object>>()
{}.getType();
this.fromApiJsonHelper.checkForUnsupportedParameters(typeOfMap, json,
SUPPORTED_PARAMETERS_FOR_CREATE);
final List<ApiParameterError> dataValidationErrors = new ArrayList<>();
final DataValidatorBuilder baseDataValidator = new
DataValidatorBuilder(dataValidationErrors).resource("datatable");
final JsonElement element = this.fromApiJsonHelper.parse(json);
- final String datatableName =
this.fromApiJsonHelper.extractStringNamed(DATATABLE_NAME, element);
-
baseDataValidator.reset().parameter(DATATABLE_NAME).value(datatableName).notBlank().notExceedingLengthOf(50)
+ final String datatableName =
this.fromApiJsonHelper.extractStringNamed(API_PARAM_DATATABLE_NAME, element);
+
baseDataValidator.reset().parameter(API_PARAM_DATATABLE_NAME).value(datatableName).notBlank().notExceedingLengthOf(50)
.matchesRegularExpression(DATATABLE_NAME_REGEX_PATTERN);
- final String apptableName =
this.fromApiJsonHelper.extractStringNamed(APPTABLE_NAME, element);
-
baseDataValidator.reset().parameter(APPTABLE_NAME).value(apptableName).notBlank().notExceedingLengthOf(50)
- .isOneOfTheseStringValues(EntityTables.getEntitiesList());
+ final String apptableName =
this.fromApiJsonHelper.extractStringNamed(API_PARAM_APPTABLE_NAME, element);
+
baseDataValidator.reset().parameter(API_PARAM_APPTABLE_NAME).value(apptableName).notBlank().notExceedingLengthOf(50)
+ .isOneOfTheseStringValues(EntityTables.getEntityNames());
+
+ EntityTables entityTable = EntityTables.fromEntityName(apptableName);
+ validateEntitySubType(baseDataValidator, element, entityTable);
- validateEntitySubType(baseDataValidator, element, apptableName);
- final String fkColumnName = (apptableName != null) ?
apptableName.substring(2) + "_id" : "";
+ final String fkColumnName =
entityTable.getForeignKeyColumnNameOnDatatable();
- final Boolean multiRow =
this.fromApiJsonHelper.extractBooleanNamed(MULTI_ROW, element);
-
baseDataValidator.reset().parameter(MULTI_ROW).value(multiRow).ignoreIfNull().notBlank().isOneOfTheseValues(true,
false);
+ final Boolean multiRow =
this.fromApiJsonHelper.extractBooleanNamed(API_PARAM_MULTIROW, element);
+
baseDataValidator.reset().parameter(API_PARAM_MULTIROW).value(multiRow).ignoreIfNull().notBlank().isOneOfTheseValues(true,
false);
- final JsonArray columns =
this.fromApiJsonHelper.extractJsonArrayNamed(COLUMNS, element);
-
baseDataValidator.reset().parameter(COLUMNS).value(columns).notNull().jsonArrayNotEmpty();
+ final JsonArray columns =
this.fromApiJsonHelper.extractJsonArrayNamed(API_PARAM_COLUMNS, element);
+
baseDataValidator.reset().parameter(API_PARAM_COLUMNS).value(columns).notNull().jsonArrayNotEmpty();
if (columns != null) {
for (final JsonElement column : columns) {
this.fromApiJsonHelper.checkForUnsupportedParameters(column.getAsJsonObject(),
SUPPORTED_PARAMETERS_FOR_CREATE_COLUMNS);
- final String name =
this.fromApiJsonHelper.extractStringNamed(NAME, column);
-
baseDataValidator.reset().parameter(NAME).value(name).notBlank().isNotOneOfTheseValues("id",
fkColumnName)
-
.matchesRegularExpression(DATATABLE_COLUMN_NAME_REGEX_PATTERN);
+ final String name =
this.fromApiJsonHelper.extractStringNamed(API_FIELD_NAME, column);
+
baseDataValidator.reset().parameter(API_FIELD_NAME).value(name).notBlank()
+ .isNotOneOfTheseValues(TABLE_FIELD_ID,
fkColumnName).matchesRegularExpression(DATATABLE_COLUMN_NAME_REGEX_PATTERN);
validateType(baseDataValidator, column);
- final Boolean mandatory =
this.fromApiJsonHelper.extractBooleanNamed(MANDATORY, column);
- final Boolean unique =
this.fromApiJsonHelper.extractBooleanNamed(UNIQUE, column);
- final Boolean indexed =
this.fromApiJsonHelper.extractBooleanNamed(INDEXED, column);
-
baseDataValidator.reset().parameter(MANDATORY).value(mandatory).ignoreIfNull().notBlank().isOneOfTheseValues(true,
false);
-
baseDataValidator.reset().parameter(UNIQUE).value(unique).ignoreIfNull().notBlank().isOneOfTheseValues(true,
false);
-
baseDataValidator.reset().parameter(INDEXED).value(indexed).ignoreIfNull().notBlank().isOneOfTheseValues(true,
false);
+ final Boolean mandatory =
this.fromApiJsonHelper.extractBooleanNamed(API_FIELD_MANDATORY, column);
+ final Boolean unique =
this.fromApiJsonHelper.extractBooleanNamed(API_FIELD_UNIQUE, column);
+ final Boolean indexed =
this.fromApiJsonHelper.extractBooleanNamed(API_FIELD_INDEXED, column);
+
baseDataValidator.reset().parameter(API_FIELD_MANDATORY).value(mandatory).ignoreIfNull().notBlank().isOneOfTheseValues(true,
+ false);
+
baseDataValidator.reset().parameter(API_FIELD_UNIQUE).value(unique).ignoreIfNull().notBlank().isOneOfTheseValues(true,
+ false);
+
baseDataValidator.reset().parameter(API_FIELD_INDEXED).value(indexed).ignoreIfNull().notBlank().isOneOfTheseValues(true,
+ false);
}
}
throwExceptionIfValidationWarningsExist(dataValidationErrors);
}
- private void validateEntitySubType(final DataValidatorBuilder
baseDataValidator, final JsonElement element, final String apptableName) {
Review Comment:
Same with this. This has been moved down unnecessarily.
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]