This is an automated email from the ASF dual-hosted git repository.

arvindsh pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/fluo.git


The following commit(s) were added to refs/heads/master by this push:
     new 957eaed  Add Hadoop dirs to classpath and native lib path (#1101)
957eaed is described below

commit 957eaed4fed01cf93304b645cfcd27a42c5fb9d3
Author: Arvind Shyamsundar <[email protected]>
AuthorDate: Fri Jul 10 09:50:31 2020 -0700

    Add Hadoop dirs to classpath and native lib path (#1101)
    
    * Adds the $HADOOP_HOME/etc/hadoop directory to Fluo classpath, so that
      hdfs-site.xml can be located and loaded
    * Adds the Hadoop native library directory to the LD_LIBRARY_PATH /
      DYLD_LIBRARY_PATH for the OS
---
 modules/distribution/src/main/config/fluo-env.sh | 13 ++++++++++++-
 1 file changed, 12 insertions(+), 1 deletion(-)

diff --git a/modules/distribution/src/main/config/fluo-env.sh 
b/modules/distribution/src/main/config/fluo-env.sh
index 116f6d2..6942188 100755
--- a/modules/distribution/src/main/config/fluo-env.sh
+++ b/modules/distribution/src/main/config/fluo-env.sh
@@ -42,6 +42,12 @@ JAVA_OPTS=("${FLUO_JAVA_OPTS[@]}" 
"-Dlog4j.configuration=file:${FLUO_LOG4J_CONFI
 
 export JAVA_OPTS
 
+## Add Hadoop native libraries to shared library paths given operating system
+case "$(uname)" in
+  Darwin) export 
DYLD_LIBRARY_PATH="${HADOOP_HOME}/lib/native:${DYLD_LIBRARY_PATH}" ;;
+  *)      export 
LD_LIBRARY_PATH="${HADOOP_HOME}/lib/native:${LD_LIBRARY_PATH}" ;;
+esac
+
 ##########################
 # Build CLASSPATH variable
 ##########################
@@ -51,7 +57,7 @@ export JAVA_OPTS
 # ways to setup the classpath with these jars.  Go to the end of the file for
 # more info.
 
-addToClasspath() 
+addToClasspath()
 {
   local dir=$1
   local filterRegex=$2
@@ -79,6 +85,11 @@ setupClasspathFromSystem()
   CLASSPATH="$lib/*"
   CLASSPATH="$CLASSPATH:$lib/log4j/*"
 
+  # Include Hadoop conf folder in classpath. This allows the Hadoop client 
calls
+  # inside Fluo to cleanly resolve and load hdfs-site.xml, which in turn allows
+  # the usage of a Highly Available (HA) HDFS for the Fluo dfsRoot.
+  CLASSPATH="$CLASSPATH:$HADOOP_HOME/etc/hadoop"
+
   #any jars matching this pattern is excluded from classpath
   
EXCLUDE_RE="(.*log4j.*)|(.*asm.*)|(.*guava.*)|(.*gson.*)|(.*hadoop-client-minicluster.*)"
 

Reply via email to