Hi all,
I just setup HDP2.3 on some virtual machines in Azure using Ambari 2.1.2.
I cannot get MapReduce to start, since the History Server keeps on failing with the following message:
stderr: /var/lib/ambari-agent/data/errors-200.txt
Traceback (most recent call last):
File "/var/lib/ambari-agent/cache/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py", line 176, in <module>
HistoryServer().execute()
File "/usr/lib/python2.6/site-packages/resource_management/libraries/script/script.py", line 219, in execute
method(env)
File "/var/lib/ambari-agent/cache/common-services/YARN/2.1.0.2.0/package/scripts/historyserver.py", line 99, in start
host_sys_prepped=params.host_sys_prepped)
File "/usr/lib/python2.6/site-packages/resource_management/libraries/functions/copy_tarball.py", line 199, in copy_to_hdfs
replace_existing_files=replace_existing_files,
File "/usr/lib/python2.6/site-packages/resource_management/core/base.py", line 154, in __init__
self.env.run()
File "/usr/lib/python2.6/site-packages/resource_management/core/environment.py", line 152, in run
self.run_action(resource, action)
File "/usr/lib/python2.6/site-packages/resource_management/core/environment.py", line 118, in run_action
provider_action()
File "/usr/lib/python2.6/site-packages/resource_management/libraries/providers/hdfs_resource.py", line 394, in action_create_on_execute
self.action_delayed("create")
File "/usr/lib/python2.6/site-packages/resource_management/libraries/providers/hdfs_resource.py", line 391, in action_delayed
self.get_hdfs_resource_executor().action_delayed(action_name, self)
File "/usr/lib/python2.6/site-packages/resource_management/libraries/providers/hdfs_resource.py", line 247, in action_delayed
self._create_resource()
File "/usr/lib/python2.6/site-packages/resource_management/libraries/providers/hdfs_resource.py", line 261, in _create_resource
self._create_file(self.main_resource.resource.target, source=self.main_resource.resource.source, mode=self.mode)
File "/usr/lib/python2.6/site-packages/resource_management/libraries/providers/hdfs_resource.py", line 314, in _create_file
self.util.run_command(target, 'CREATE', method='PUT', overwrite=True, assertable_result=False, file_to_put=source, **kwargs)
File "/usr/lib/python2.6/site-packages/resource_management/libraries/providers/hdfs_resource.py", line 202, in run_command
raise Fail(err_msg)
resource_management.core.exceptions.Fail: Execution of 'curl -sS -L -w '%{http_code}' -X PUT -T /usr/hdp/2.3.2.0-2950/hadoop/mapreduce.tar.gz 'http://ae-hdp-master-1:50070/webhdfs/v1/hdp/apps/2.3.2.0-2950/mapreduce/mapreduce.tar.gz?op=CREATE&user.name=hdfs&overwrite=True&permission=444'' returned status_code=403.
{
"RemoteException": {
"exception": "IOException",
"javaClassName": "java.io.IOException",
"message": "Failed to find datanode, suggest to check cluster health."
}
}
Looks like some security/rights issue? Can you guide me in the right direction?
Kind regards,
Niels