Skip to content

Commit 61bfcaf

Browse files
authored
BIGTOP-3718: Fix MapReduce2 service-check in Amabri Mpack (apache#932)
The patch is to fix the issues of checking MapReduce2 service and "JAR does not exist". Change-Id: I59ad4d3ecbe88af7bea25bf7177502ebb3e2cf6b Signed-off-by: Yuqi Gu <[email protected]>
1 parent 9519f17 commit 61bfcaf

File tree

3 files changed

+16
-12
lines changed

3 files changed

+16
-12
lines changed

bigtop-packages/src/common/bigtop-ambari-mpack/bgtp-ambari-mpack/src/main/resources/stacks/BGTP/1.0/services/YARN/package/scripts/mapred_service_check.py

Lines changed: 8 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -119,11 +119,18 @@ def service_check(self, env):
119119
env.set_params(params)
120120

121121
jar_path = format("{hadoop_mapred2_jar_location}/{hadoopMapredExamplesJarName}")
122+
source_file = format("/etc/passwd")
122123
input_file = format("/user/{smokeuser}/mapredsmokeinput")
123124
output_file = format("/user/{smokeuser}/mapredsmokeoutput")
124125

125-
test_cmd = format("fs -test -e {output_file}")
126+
hdfs_put_cmd = format("fs -put {source_file} {input_file}")
126127
run_wordcount_job = format("jar {jar_path} wordcount {input_file} {output_file}")
128+
test_cmd = format("fs -test -e {output_file}")
129+
130+
ExecuteHadoop(hdfs_put_cmd,
131+
user=params.smokeuser,
132+
bin_dir=params.execute_path,
133+
conf_dir=params.hadoop_conf_dir)
127134

128135
params.HdfsResource(format("/user/{smokeuser}"),
129136
type="directory",
@@ -136,12 +143,6 @@ def service_check(self, env):
136143
type = "directory",
137144
dfs_type = params.dfs_type,
138145
)
139-
params.HdfsResource(input_file,
140-
action = "create_on_execute",
141-
type = "file",
142-
source = "/etc/passwd",
143-
dfs_type = params.dfs_type,
144-
)
145146
params.HdfsResource(None, action="execute")
146147

147148
# initialize the ticket

bigtop-packages/src/common/bigtop-ambari-mpack/bgtp-ambari-mpack/src/main/resources/stacks/BGTP/1.0/services/YARN/package/scripts/params_linux.py

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -127,8 +127,11 @@ def get_spark_version(service_name, component_name, yarn_version):
127127
# these are used to render the classpath for picking up Spark classes
128128
# in the event that spark is not installed, then we must default to the vesrion of YARN installed
129129
# since it will still load classes from its own spark version
130-
spark_version = get_spark_version("SPARK", "SPARK_CLIENT", version)
131-
spark2_version = get_spark_version("SPARK2", "SPARK2_CLIENT", version)
130+
131+
# No Spark services in current Mpack;
132+
# TODO: Add Spark into stack;
133+
#spark_version = get_spark_version("SPARK", "SPARK_CLIENT", version)
134+
#spark2_version = get_spark_version("SPARK2", "SPARK2_CLIENT", version)
132135

133136
stack_supports_ranger_kerberos = check_stack_feature(StackFeature.RANGER_KERBEROS_SUPPORT, version_for_stack_feature_checks)
134137
stack_supports_ranger_audit_db = check_stack_feature(StackFeature.RANGER_AUDIT_DB_SUPPORT, version_for_stack_feature_checks)
@@ -294,8 +297,8 @@ def get_spark_version(service_name, component_name, yarn_version):
294297
nm_log_dir_to_mount_file = "/var/lib/ambari-agent/data/yarn/yarn_log_dir_mount.hist"
295298
nm_local_dir_to_mount_file = "/var/lib/ambari-agent/data/yarn/yarn_local_dir_mount.hist"
296299

297-
distrAppJarName = "hadoop-yarn-applications-distributedshell-2.*.jar"
298-
hadoopMapredExamplesJarName = "hadoop-mapreduce-examples-2.*.jar"
300+
distrAppJarName = "hadoop-yarn-applications-distributedshell-3.*.jar"
301+
hadoopMapredExamplesJarName = "hadoop-mapreduce-examples-3.*.jar"
299302

300303
entity_file_history_directory = "/tmp/entity-file-history/active"
301304

bigtop-packages/src/common/bigtop-ambari-mpack/bgtp-ambari-mpack/src/main/resources/stacks/BGTP/1.0/services/YARN/package/scripts/params_windows.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@
5555
hs_webui_address = format("{hs_host}:{hs_port}")
5656

5757
hadoop_mapred2_jar_location = os.path.join(os.environ["HADOOP_COMMON_HOME"], "share", "hadoop", "mapreduce")
58-
hadoopMapredExamplesJarName = "hadoop-mapreduce-examples-2.*.jar"
58+
hadoopMapredExamplesJarName = "hadoop-mapreduce-examples-3.*.jar"
5959

6060
exclude_hosts = default("/clusterHostInfo/decom_nm_hosts", [])
6161
exclude_file_path = default("/configurations/yarn-site/yarn.resourcemanager.nodes.exclude-path","/etc/hadoop/conf/yarn.exclude")

0 commit comments

Comments
 (0)