-
Notifications
You must be signed in to change notification settings - Fork 5
/
hdfsconnector.conf.in
71 lines (62 loc) · 3.54 KB
/
hdfsconnector.conf.in
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
################################################################################
# Copyright (C) 2014 HPCC Systems.
#
# All rights reserved. This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
################################################################################
#Product Name: ${CMAKE_PROJECT_NAME}-${version}-${stagever}${packageRevisionArch}
#This version of H2H was built targeting hadoop version: ${HADOOP_VER}
HADOOP_VERSION=${HADOOP_VER}
#HADOOP_CONF_DIR = location of hadoop config files
#if hadoop was installed via deb/rpm install package
HADOOP_CONF_DIR=${HADOOP_STD_CONF_PATH}
#HADOOP_SHARE_DIR = location of hadoop share files
#IT IS VERY IMPORTANT TO SET THIS VARIABLE CORRECTLY
#This is used to set up the classpath for libhdfs.
#
# per Hadoop: http://hadoop.apache.org/docs/r2.3.0/hadoop-project-dist/hadoop-hdfs/LibHdfs.html
# One of the most common problems with libhdfs is the CLASSPATH is not set properly when calling a program that uses libhdfs.
# Make sure you set it to all the Hadoop jars needed to run Hadoop itself.
# Currently, there is no way to programmatically generate the classpath, but a good bet is to include all the jar files in
# $HADOOP_PREFIX and $HADOOP_PREFIX/lib as well as the right configuration directory containing hdfs-site.xml
# However as it turns out, the jars are in different subfolders under the share directory
#
# The share folder contains certain needed jar files such as:
# /hadoop/hdfs/hadoop-hdfs-2.3.0.jar, hadoop/common/hadoop-common-2.3.0.jar, etc
# NOTE: the contents of this path has changed in Hadoop's packaging strategy. Starting hadoop 2.x, the share folder only
# contains 32 bit binaries. And it is then, the user's responsibility to build all necessary libs/jars.
# This path is used by the h2h connector to build the necessary libhdfs classpath.
#Example: HADOOP_SHARE_DIR=${HADOOP_STD_SHARE_PATH}
HADOOP_SHARE_DIR=${HADOOP_STD_SHARE_PATH}
#H2H searches for libhdfs at this location:
# ${H2H_LIBHDFS_PATH}
#If libhdfs resides elsewhere, append correct location to H2H_LD_LIBRARY_PATH
#Example:
#H2H_LD_LIBRARY_PATH=$H2H_LD_LIBRARY_PATH:<path to libhdfs.so location>
H2H_LD_LIBRARY_PATH=${H2H_LIBHDFS_PATH}
#TARBALL_HADOOP_LOCATION = location of tarball hadoop instalation
#If hadoop was not installed via deb/rpm install package
#enable this var.
#NOTE: starting in Hadoop version 2.2 until version 2.5.2, Hadoop stopped distributing pre-built 64bit libraries
#This value is most likely only useful for Hadoop distributions prior to 2.2 and after 2.3
TARBALL_HADOOP_LOCATION=${TARBALLED_HADOOP_PATH}
#H2H default libjvm location:
# ${H2H_LIBJVM_PATH}
# If libjvm resides elsewhere, append correct location to H2H_LD_LIBRARY_PATH
#Example:
#H2H_LD_LIBRARY_PATH=$H2H_LD_LIBRARY_PATH:${H2H_LIBJVM_PATH}
H2H_LD_LIBRARY_PATH=$H2H_LD_LIBRARY_PATH:${H2H_LIBJVM_PATH}
#LOGS_LOCATION = H2H log location
LOGS_LOCATION=$log
#DO NOT ALTER THIS LINE
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$H2H_LD_LIBRARY_PATH