This is an automated email from the ASF dual-hosted git repository.
xxyu pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/kylin.git
The following commit(s) were added to refs/heads/main by this push:
new b207ca1 HOTFIX fix slf4j jar version conflict
b207ca1 is described below
commit b207ca1666213807177a4a6ae711efecb4ac9cbc
Author: Zhichao Zhang <[email protected]>
AuthorDate: Tue Aug 10 11:57:59 2021 +0800
HOTFIX fix slf4j jar version conflict
---
core-common/src/main/resources/kylin-defaults.properties | 2 +-
.../engine/spark/common/logging/SparkExecutorHdfsAppender.java | 6 ------
2 files changed, 1 insertion(+), 7 deletions(-)
diff --git a/core-common/src/main/resources/kylin-defaults.properties
b/core-common/src/main/resources/kylin-defaults.properties
index c990ac2..1009ee6 100644
--- a/core-common/src/main/resources/kylin-defaults.properties
+++ b/core-common/src/main/resources/kylin-defaults.properties
@@ -286,7 +286,7 @@
kylin.query.spark-conf.spark.serializer=org.apache.spark.serializer.JavaSerializ
#kylin.query.spark-conf.spark.yarn.jars=hdfs://localhost:9000/spark2_jars/*
kylin.query.spark-conf.spark.hadoop.yarn.timeline-service.enabled=false
-kylin.query.spark-conf.spark.executor.extraJavaOptions=-Dhdp.version=current
-Dlog4j.configuration=spark-executor-log4j.properties -Dlog4j.debug
-Dkylin.hdfs.working.dir=${kylin.env.hdfs-working-dir}
-Dkylin.metadata.identifier=${kylin.metadata.url.identifier}
-Dkylin.spark.category=sparder
+kylin.query.spark-conf.spark.executor.extraJavaOptions=-Dhdp.version=current
-Dlog4j.configuration=spark-executor-log4j.properties -Dlog4j.debug
-Dkylin.hdfs.working.dir=${kylin.env.hdfs-working-dir}
-Dkylin.metadata.identifier=${kylin.metadata.url.identifier}
-Dkylin.spark.category=sparder -Dkylin.spark.identifier={{APP_ID}}
# uncomment for HDP
#kylin.query.spark-conf.spark.driver.extraJavaOptions=-Dhdp.version=current
#kylin.query.spark-conf.spark.yarn.am.extraJavaOptions=-Dhdp.version=current
diff --git
a/kylin-spark-project/kylin-spark-common/src/main/java/org/apache/kylin/engine/spark/common/logging/SparkExecutorHdfsAppender.java
b/kylin-spark-project/kylin-spark-common/src/main/java/org/apache/kylin/engine/spark/common/logging/SparkExecutorHdfsAppender.java
index 0bbd3e7..f341340 100644
---
a/kylin-spark-project/kylin-spark-common/src/main/java/org/apache/kylin/engine/spark/common/logging/SparkExecutorHdfsAppender.java
+++
b/kylin-spark-project/kylin-spark-common/src/main/java/org/apache/kylin/engine/spark/common/logging/SparkExecutorHdfsAppender.java
@@ -30,7 +30,6 @@ import org.apache.log4j.helpers.LogLog;
import org.apache.log4j.spi.LoggingEvent;
import org.apache.spark.SparkEnv;
import org.apache.spark.deploy.SparkHadoopUtil;
-import org.apache.spark.deploy.yarn.YarnSparkHadoopUtil;
import scala.runtime.BoxedUnit;
import java.io.File;
@@ -112,11 +111,6 @@ public class SparkExecutorHdfsAppender extends
AbstractHdfsLogAppender {
@Override
void init() {
- if (StringUtils.isBlank(this.identifier)) {
- this.identifier =
YarnSparkHadoopUtil.getContainerId().getApplicationAttemptId().getApplicationId()
- .toString();
- }
-
LogLog.warn("metadataIdentifier -> " + getMetadataIdentifier());
LogLog.warn("category -> " + getCategory());
LogLog.warn("identifier -> " + getIdentifier());