Hi everyone. I tried many tips found in this forum and in the Internet but nothing helped. I tried to start it manually and from Ambari Server Version 1.7.0. No luck. Below my log file. Would appreciate any help. Cheers!
stderr: /var/lib/ambari-agent/data/errors-126.txt
Traceback (most recent call last):
File “/var/lib/ambari-agent/cache/stacks/HDP/2.0.6/services/HDFS/package/scripts/namenode.py”, line 134, in <module>
NameNode().execute()
File “/usr/lib/python2.6/site-packages/resource_management/libraries/script/script.py”, line 123, in execute
method(env)
File “/var/lib/ambari-agent/cache/stacks/HDP/2.0.6/services/HDFS/package/scripts/namenode.py”, line 46, in start
namenode(action=”start”)
File “/var/lib/ambari-agent/cache/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_namenode.py”, line 33, in namenode
format_namenode()
File “/var/lib/ambari-agent/cache/stacks/HDP/2.0.6/services/HDFS/package/scripts/hdfs_namenode.py”, line 124, in format_namenode
path=”/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin”
File “/usr/lib/python2.6/site-packages/resource_management/core/base.py”, line 148, in __init__
self.env.run()
File “/usr/lib/python2.6/site-packages/resource_management/core/environment.py”, line 149, in run
self.run_action(resource, action)
File “/usr/lib/python2.6/site-packages/resource_management/core/environment.py”, line 115, in run_action
provider_action()
File “/usr/lib/python2.6/site-packages/resource_management/core/providers/system.py”, line 237, in action_run
path=self.resource.path)
File “/usr/lib/python2.6/site-packages/resource_management/core/shell.py”, line 36, in checked_call
return _call(command, logoutput, True, cwd, env, preexec_fn, user, wait_for_finish, timeout, path)
File “/usr/lib/python2.6/site-packages/resource_management/core/shell.py”, line 101, in _call
err_msg = Logger.get_protected_text((“Execution of ‘%s’ returned %d. %s”) % (command, code, out))
UnicodeDecodeError: ‘ascii’ codec can’t decode byte 0xc5 in position 277: ordinal not in range(128)
stdout: /var/lib/ambari-agent/data/output-126.txt
2015-04-08 14:55:27,486 – Group[‘hadoop’] {‘ignore_failures': False}
2015-04-08 14:55:27,487 – Modifying group hadoop
2015-04-08 14:55:27,618 – Group[‘nobody’] {‘ignore_failures': False}
2015-04-08 14:55:27,618 – Modifying group nobody
2015-04-08 14:55:27,711 – Group[‘users’] {‘ignore_failures': False}
2015-04-08 14:55:27,712 – Modifying group users
2015-04-08 14:55:27,797 – Group[‘nagios’] {‘ignore_failures': False}
2015-04-08 14:55:27,798 – Modifying group nagios
2015-04-08 14:55:27,882 – User[‘nobody’] {‘gid': ‘hadoop’, ‘ignore_failures': False, ‘groups': [u’nobody’]}
2015-04-08 14:55:27,882 – Modifying user nobody
2015-04-08 14:55:27,899 – User[‘mapred’] {‘gid': ‘hadoop’, ‘ignore_failures': False, ‘groups': [u’hadoop’]}
2015-04-08 14:55:27,900 – Modifying user mapred
2015-04-08 14:55:27,917 – User[‘nagios’] {‘gid': ‘nagios’, ‘ignore_failures': False, ‘groups': [u’hadoop’]}
2015-04-08 14:55:27,917 – Modifying user nagios
2015-04-08 14:55:27,940 – User[‘hbase’] {‘gid': ‘hadoop’, ‘ignore_failures': False, ‘groups': [u’hadoop’]}
2015-04-08 14:55:27,940 – Modifying user hbase
2015-04-08 14:55:27,961 – User[‘ambari-qa’] {‘gid': ‘hadoop’, ‘ignore_failures': False, ‘groups': [u’users’]}
2015-04-08 14:55:27,961 – Modifying user ambari-qa
2015-04-08 14:55:27,985 – User[‘zookeeper’] {‘gid': ‘hadoop’, ‘ignore_failures': False, ‘groups': [u’hadoop’]}
2015-04-08 14:55:27,986 – Modifying user zookeeper
2015-04-08 14:55:28,010 – User[‘tez’] {‘gid': ‘hadoop’, ‘ignore_failures': False, ‘groups': [u’users’]}
2015-04-08 14:55:28,010 – Modifying user tez
2015-04-08 14:55:28,032 – User[‘hdfs’] {‘gid': ‘hadoop’, ‘ignore_failures': False, ‘groups': [u’hadoop’]}
2015-04-08 14:55:28,032 – Modifying user hdfs
2015-04-08 14:55:28,054 – User[‘yarn’] {‘gid': ‘hadoop’, ‘ignore_failures': False, ‘groups': [u’hadoop’]}
2015-04-08 14:55:28,054 – Modifying user yarn
2015-04-08 14:55:28,079 – File[‘/var/lib/ambari-agent/data/tmp/changeUid.sh’] {‘content': StaticFile(‘changeToSecureUid.sh’), ‘mode': 0555}
2015-04-08 14:55:28,081 – Execute[‘/var/lib/ambari-agent/data/tmp/changeUid.sh ambari-qa /tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa 2>/dev/null’] {‘not_if': ‘test $(id -u ambari-qa) -gt 1000′}
2015-04-08 14:55:28,105 – Skipping Execute[‘/var/lib/ambari-agent/data/tmp/changeUid.sh ambari-qa /tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa 2>/dev/null’] due to not_if
2015-04-08 14:55:28,106 – File[‘/var/lib/ambari-agent/data/tmp/changeUid.sh’] {‘content': StaticFile(‘changeToSecureUid.sh’), ‘mode': 0555}
2015-04-08 14:55:28,107 – Execute[‘/var/lib/ambari-agent/data/tmp/changeUid.sh hbase /home/hbase,/tmp/hbase,/usr/bin/hbase,/var/log/hbase,/hadoop/hbase 2>/dev/null’] {‘not_if': ‘test $(id -u hbase) -gt 1000′}
2015-04-08 14:55:28,137 – Skipping Execute[‘/var/lib/ambari-agent/data/tmp/changeUid.sh hbase /home/hbase,/tmp/hbase,/usr/bin/hbase,/var/log/hbase,/hadoop/hbase 2>/dev/null’] due to not_if
2015-04-08 14:55:28,137 – Directory[‘/etc/hadoop/conf.empty’] {‘owner': ‘root’, ‘group': ‘root’, ‘recursive': True}
2015-04-08 14:55:28,138 – Link[‘/etc/hadoop/conf’] {‘not_if': ‘ls /etc/hadoop/conf’, ‘to': ‘/etc/hadoop/conf.empty’}
2015-04-08 14:55:28,155 – Skipping Link[‘/etc/hadoop/conf’] due to not_if
2015-04-08 14:55:28,172 – File[‘/etc/hadoop/conf/hadoop-env.sh’] {‘content': InlineTemplate(…), ‘owner': ‘hdfs’}
2015-04-08 14:55:28,189 – Execute[‘/bin/echo 0 > /selinux/enforce’] {‘only_if': ‘test -f /selinux/enforce’}
2015-04-08 14:55:28,204 – Skipping Execute[‘/bin/echo 0 > /selinux/enforce’] due to only_if
2015-04-08 14:55:28,206 – Execute[‘mkdir -p /usr/hdp/current/hadoop-client/lib/native/Linux-i386-32; ln -sf /usr/hdp/current/hadoop-client/lib/libsnappy.so /usr/hdp/current/hadoop-client/lib/native/Linux-i386-32/libsnappy.so’] {}
2015-04-08 14:55:28,225 – Execute[‘mkdir -p /usr/hdp/current/hadoop-client/lib/native/Linux-amd64-64; ln -sf /usr/hdp/current/hadoop-client/lib64/libsnappy.so /usr/hdp/current/hadoop-client/lib/native/Linux-amd64-64/libsnappy.so’] {}
2015-04-08 14:55:28,243 – Directory[‘/var/log/hadoop’] {‘owner': ‘root’, ‘group': ‘hadoop’, ‘mode': 0775, ‘recursive': True}
2015-04-08 14:55:28,243 – Directory[‘/var/run/hadoop’] {‘owner': ‘root’, ‘group': ‘root’, ‘recursive': True}
2015-04-08 14:55:28,244 – Directory[‘/tmp/hadoop-hdfs’] {‘owner': ‘hdfs’, ‘recursive': True}
2015-04-08 14:55:28,250 – File[‘/etc/hadoop/conf/commons-logging.properties’] {‘content': Template(‘commons-logging.properties.j2′), ‘owner': ‘hdfs’}
2015-04-08 14:55:28,253 – File[‘/etc/hadoop/conf/health_check’] {‘content': Template(‘health_check-v2.j2′), ‘owner': ‘hdfs’}
2015-04-08 14:55:28,253 – File[‘/etc/hadoop/conf/log4j.properties’] {‘content': ‘…’, ‘owner': ‘hdfs’, ‘group': ‘hadoop’, ‘mode': 0644}
2015-04-08 14:55:28,260 – File[‘/etc/hadoop/conf/hadoop-metrics2.properties’] {‘content': Template(‘hadoop-metrics2.properties.j2′), ‘owner': ‘hdfs’}
2015-04-08 14:55:28,260 – File[‘/etc/hadoop/conf/task-log4j.properties’] {‘content': StaticFile(‘task-log4j.properties’), ‘mode': 0755}
2015-04-08 14:55:28,450 – Directory[‘/etc/security/limits.d’] {‘owner': ‘root’, ‘group': ‘root’, ‘recursive': True}
2015-04-08 14:55:28,457 – File[‘/etc/security/limits.d/hdfs.conf’] {‘content': Template(‘hdfs.conf.j2′), ‘owner': ‘root’, ‘group': ‘root’, ‘mode': 0644}
2015-04-08 14:55:28,458 – XmlConfig[‘hadoop-policy.xml’] {‘owner': ‘hdfs’, ‘group': ‘hadoop’, ‘conf_dir': ‘/etc/hadoop/conf’, ‘configuration_attributes': …, ‘configurations': …}
2015-04-08 14:55:28,473 – Generating config: /etc/hadoop/conf/hadoop-policy.xml
2015-04-08 14:55:28,474 – File[‘/etc/hadoop/conf/hadoop-policy.xml’] {‘owner': ‘hdfs’, ‘content': InlineTemplate(…), ‘group': ‘hadoop’, ‘mode': None, ‘encoding': ‘UTF-8′}
2015-04-08 14:55:28,475 – Writing File[‘/etc/hadoop/conf/hadoop-policy.xml’] because contents don’t match
2015-04-08 14:55:28,475 – XmlConfig[‘hdfs-site.xml’] {‘owner': ‘hdfs’, ‘group': ‘hadoop’, ‘conf_dir': ‘/etc/hadoop/conf’, ‘configuration_attributes': …, ‘configurations': …}
2015-04-08 14:55:28,489 – Generating config: /etc/hadoop/conf/hdfs-site.xml
2015-04-08 14:55:28,490 – File[‘/etc/hadoop/conf/hdfs-site.xml’] {‘owner': ‘hdfs’, ‘content': InlineTemplate(…), ‘group': ‘hadoop’, ‘mode': None, ‘encoding': ‘UTF-8′}
2015-04-08 14:55:28,492 – Writing File[‘/etc/hadoop/conf/hdfs-site.xml’] because contents don’t match
2015-04-08 14:55:28,492 – XmlConfig[‘core-site.xml’] {‘group': ‘hadoop’, ‘conf_dir': ‘/etc/hadoop/conf’, ‘mode': 0644, ‘configuration_attributes': …, ‘owner': ‘hdfs’, ‘configurations': …}
2015-04-08 14:55:28,506 – Generating config: /etc/hadoop/conf/core-site.xml
2015-04-08 14:55:28,507 – File[‘/etc/hadoop/conf/core-site.xml’] {‘owner': ‘hdfs’, ‘content': InlineTemplate(…), ‘group': ‘hadoop’, ‘mode': 0644, ‘encoding': ‘UTF-8′}
2015-04-08 14:55:28,508 – Writing File[‘/etc/hadoop/conf/core-site.xml’] because contents don’t match
2015-04-08 14:55:28,510 – File[‘/etc/hadoop/conf/slaves’] {‘content': Template(‘slaves.j2′), ‘owner': ‘hdfs’}
2015-04-08 14:55:28,511 – Directory[‘/hadoop/hdfs/namenode’] {‘owner': ‘hdfs’, ‘group': ‘hadoop’, ‘recursive': True, ‘mode': 0755}
2015-04-08 14:55:28,512 – File[‘/var/lib/ambari-agent/data/tmp/checkForFormat.sh’] {‘content': StaticFile(‘checkForFormat.sh’), ‘mode': 0755}
2015-04-08 14:55:28,513 – Execute[‘/var/lib/ambari-agent/data/tmp/checkForFormat.sh hdfs /etc/hadoop/conf /usr/hdp/current/hadoop-client/bin /var/run/hadoop/hdfs/namenode/formatted/ /var/lib/hdfs/namenode/formatted/ /hadoop/hdfs/namenode’] {‘path': [‘/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin’], ‘not_if': ‘test -d /var/run/hadoop/hdfs/namenode/formatted/ || test -d /var/lib/hdfs/namenode/formatted/’}
2015-04-02 15:06:02,613 – User[‘nobody’] {‘gid': ‘hadoop’, ‘ignore_failures': False, ‘groups': [u’nobody’]}
2015-04-02 15:06:02,614 – Modifying user nobody
2015-04-02 15:06:02,630 – User[‘mapred’] {‘gid': ‘hadoop’, ‘ignore_failures': False, ‘groups': [u’hadoop’]}
2015-04-02 15:06:02,630 – Modifying user mapred
2015-04-02 15:06:02,650 – User[‘nagios’] {‘gid': ‘nagios’, ‘ignore_failures': False, ‘groups': [u’hadoop’]}
2015-04-02 15:06:02,650 – Modifying user nagios
2015-04-02 15:06:02,672 – User[‘hbase’] {‘gid': ‘hadoop’, ‘ignore_failures': False, ‘groups': [u’hadoop’]}
2015-04-02 15:06:02,672 – Modifying user hbase
2015-04-02 15:06:02,693 – User[‘ambari-qa’] {‘gid': ‘hadoop’, ‘ignore_failures': False, ‘groups': [u’users’]}
2015-04-02 15:06:02,694 – Modifying user ambari-qa
2015-04-02 15:06:02,709 – User[‘zookeeper’] {‘gid': ‘hadoop’, ‘ignore_failures': False, ‘groups': [u’hadoop’]}
2015-04-02 15:06:02,710 – Modifying user zookeeper
2015-04-02 15:06:02,732 – User[‘tez’] {‘gid': ‘hadoop’, ‘ignore_failures': False, ‘groups': [u’users’]}
2015-04-02 15:06:02,732 – Modifying user tez
2015-04-02 15:06:02,749 – User[‘hdfs’] {‘gid': ‘hadoop’, ‘ignore_failures': False, ‘groups': [u’hadoop’]}
2015-04-02 15:06:02,750 – Modifying user hdfs
2015-04-02 15:06:02,765 – User[‘yarn’] {‘gid': ‘hadoop’, ‘ignore_failures': False, ‘groups': [u’hadoop’]}
2015-04-02 15:06:02,766 – Modifying user yarn
2015-04-02 15:06:02,787 – File[‘/var/lib/ambari-agent/data/tmp/changeUid.sh’] {‘content': StaticFile(‘changeToSecureUid.sh’), ‘mode': 0555}
2015-04-02 15:06:02,789 – Execute[‘/var/lib/ambari-agent/data/tmp/changeUid.sh ambari-qa /tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa 2>/dev/null’] {‘not_if': ‘test $(id -u ambari-qa) -gt 1000′}
2015-04-02 15:06:02,803 – Skipping Execute[‘/var/lib/ambari-agent/data/tmp/changeUid.sh ambari-qa /tmp/hadoop-ambari-qa,/tmp/hsperfdata_ambari-qa,/home/ambari-qa,/tmp/ambari-qa,/tmp/sqoop-ambari-qa 2>/dev/null’] due to not_if
2015-04-02 15:06:02,804 – File[‘/var/lib/ambari-agent/data/tmp/changeUid.sh’] {‘content': StaticFile(‘changeToSecureUid.sh’), ‘mode': 0555}
2015-04-02 15:06:02,805 – Execute[‘/var/lib/ambari-agent/data/tmp/changeUid.sh hbase /home/hbase,/tmp/hbase,/usr/bin/hbase,/var/log/hbase,/hadoop/hbase 2>/dev/null’] {‘not_if': ‘test $(id -u hbase) -gt 1000′}
2015-04-02 15:06:02,824 – Skipping Execute[‘/var/lib/ambari-agent/data/tmp/changeUid.sh hbase /home/hbase,/tmp/hbase,/usr/bin/hbase,/var/log/hbase,/hadoop/hbase 2>/dev/null’] due to not_if
2015-04-02 15:06:02,824 – Directory[‘/etc/hadoop/conf.empty’] {‘owner': ‘root’, ‘group': ‘root’, ‘recursive': True}
2015-04-02 15:06:02,825 – Link[‘/etc/hadoop/conf’] {‘not_if': ‘ls /etc/hadoop/conf’, ‘to': ‘/etc/hadoop/conf.empty’}
2015-04-02 15:06:02,839 – Skipping Link[‘/etc/hadoop/conf’] due to not_if
2015-04-02 15:06:02,861 – File[‘/etc/hadoop/conf/hadoop-env.sh’] {‘content': InlineTemplate(…), ‘owner': ‘hdfs’}
2015-04-02 15:06:02,878 – Execute[‘/bin/echo 0 > /selinux/enforce’] {‘only_if': ‘test -f /selinux/enforce’}
2015-04-02 15:06:02,891 – Skipping Execute[‘/bin/echo 0 > /selinux/enforce’] due to only_if
2015-04-02 15:06:02,894 – Execute[‘mkdir -p /usr/hdp/current/hadoop-client/lib/native/Linux-i386-32; ln -sf /usr/hdp/current/hadoop-client/lib/libsnappy.so /usr/hdp/current/hadoop-client/lib/native/Linux-i386-32/libsnappy.so’] {}
2015-04-02 15:06:02,911 – Execute[‘mkdir -p /usr/hdp/current/hadoop-client/lib/native/Linux-amd64-64; ln -sf /usr/hdp/current/hadoop-client/lib64/libsnappy.so /usr/hdp/current/hadoop-client/lib/native/Linux-amd64-64/libsnappy.so’] {}
2015-04-02 15:06:02,930 – Directory[‘/var/log/hadoop’] {‘owner': ‘root’, ‘group': ‘hadoop’, ‘mode': 0775, ‘recursive': True}
2015-04-02 15:06:02,931 – Directory[‘/var/run/hadoop’] {‘owner': ‘root’, ‘group': ‘root’, ‘recursive': True}
2015-04-02 15:06:02,931 – Directory[‘/tmp/hadoop-hdfs’] {‘owner': ‘hdfs’, ‘recursive': True}
2015-04-02 15:06:02,937 – File[‘/etc/hadoop/conf/commons-logging.properties’] {‘content': Template(‘commons-logging.properties.j2′), ‘owner': ‘hdfs’}
2015-04-02 15:06:02,940 – File[‘/etc/hadoop/conf/health_check’] {‘content': Template(‘health_check-v2.j2′), ‘owner': ‘hdfs’}
2015-04-02 15:06:02,940 – File[‘/etc/hadoop/conf/log4j.properties’] {‘content': ‘…’, ‘owner': ‘hdfs’, ‘group': ‘hadoop’, ‘mode': 0644}
2015-04-02 15:06:02,946 – File[‘/etc/hadoop/conf/hadoop-metrics2.properties’] {‘content': Template(‘hadoop-metrics2.properties.j2′), ‘owner': ‘hdfs’}
2015-04-02 15:06:02,947 – File[‘/etc/hadoop/conf/task-log4j.properties’] {‘content': StaticFile(‘task-log4j.properties’), ‘mode': 0755}
2015-04-02 15:06:03,158 – Directory[‘/etc/security/limits.d’] {‘owner': ‘root’, ‘group': ‘root’, ‘recursive': True}
2015-04-02 15:06:03,165 – File[‘/etc/security/limits.d/hdfs.conf’] {‘content': Template(‘hdfs.conf.j2′), ‘owner': ‘root’, ‘group': ‘root’, ‘mode': 0644}
2015-04-02 15:06:03,166 – XmlConfig[‘hadoop-policy.xml’] {‘owner': ‘hdfs’, ‘group': ‘hadoop’, ‘conf_dir': ‘/etc/hadoop/conf’, ‘configuration_attributes': …, ‘configurations': …}
2015-04-02 15:06:03,182 – Generating config: /etc/hadoop/conf/hadoop-policy.xml
2015-04-02 15:06:03,183 – File[‘/etc/hadoop/conf/hadoop-policy.xml’] {‘owner': ‘hdfs’, ‘content': InlineTemplate(…), ‘group': ‘hadoop’, ‘mode': None, ‘encoding': ‘UTF-8′}
2015-04-02 15:06:03,184 – Writing File[‘/etc/hadoop/conf/hadoop-policy.xml’] because contents don’t match
2015-04-02 15:06:03,184 – XmlConfig[‘hdfs-site.xml’] {‘owner': ‘hdfs’, ‘group': ‘hadoop’, ‘conf_dir': ‘/etc/hadoop/conf’, ‘configuration_attributes': …, ‘configurations': …}
2015-04-02 15:06:03,198 – Generating config: /etc/hadoop/conf/hdfs-site.xml
2015-04-02 15:06:03,199 – File[‘/etc/hadoop/conf/hdfs-site.xml’] {‘owner': ‘hdfs’, ‘content': InlineTemplate(…), ‘group': ‘hadoop’, ‘mode': None, ‘encoding': ‘UTF-8′}
2015-04-02 15:06:03,201 – Writing File[‘/etc/hadoop/conf/hdfs-site.xml’] because contents don’t match
2015-04-02 15:06:03,201 – XmlConfig[‘core-site.xml’] {‘group': ‘hadoop’, ‘conf_dir': ‘/etc/hadoop/conf’, ‘mode': 0644, ‘configuration_attributes': …, ‘owner': ‘hdfs’, ‘configurations': …}
2015-04-02 15:06:03,214 – Generating config: /etc/hadoop/conf/core-site.xml
2015-04-02 15:06:03,215 – File[‘/etc/hadoop/conf/core-site.xml’] {‘owner': ‘hdfs’, ‘content': InlineTemplate(…), ‘group': ‘hadoop’, ‘mode': 0644, ‘encoding': ‘UTF-8′}
2015-04-02 15:06:03,216 – Writing File[‘/etc/hadoop/conf/core-site.xml’] because contents don’t match
2015-04-02 15:06:03,218 – File[‘/etc/hadoop/conf/slaves’] {‘content': Template(‘slaves.j2′), ‘owner': ‘hdfs’}
2015-04-02 15:06:03,219 – Directory[‘/hadoop/hdfs/namenode’] {‘owner': ‘hdfs’, ‘group': ‘hadoop’, ‘recursive': True, ‘mode': 0755}
2015-04-02 15:06:03,220 – File[‘/var/lib/ambari-agent/data/tmp/checkForFormat.sh’] {‘content': StaticFile(‘checkForFormat.sh’), ‘mode': 0755}
2015-04-02 15:06:03,221 – Execute[‘/var/lib/ambari-agent/data/tmp/checkForFormat.sh hdfs /etc/hadoop/conf /usr/hdp/current/hadoop-client/bin /var/run/hadoop/hdfs/namenode/formatted/ /var/lib/hdfs/namenode/formatted/ /hadoop/hdfs/namenode’] {‘path': [‘/usr/sbin:/sbin:/usr/local/bin:/bin:/usr/bin’], ‘not_if': ‘test -d /var/run/hadoop/hdfs/namenode/formatted/ || test -d /var/lib/hdfs/namenode/formatted/’}
2015-04-02 15:06:03,234 – Skipping Execute[‘/var/lib/ambari-agent/data/tmp/checkForFormat.sh hdfs /etc/hadoop/conf /usr/hdp/current/hadoop-client/bin /var/run/hadoop/hdfs/namenode/formatted/ /var/lib/hdfs/namenode/formatted/ /hadoop/hdfs/namenode’] due to not_if
2015-04-02 15:06:03,234 – Directory[‘/var/lib/hdfs/namenode/formatted/’] {‘recursive': True}
2015-04-02 15:06:03,237 – File[‘/etc/hadoop/conf/dfs.exclude’] {‘owner': ‘hdfs’, ‘content': Template(‘exclude_hosts_list.j2′), ‘group': ‘hadoop’}
2015-04-02 15:06:03,239 – Directory[‘/var/run/hadoop/hdfs’] {‘owner': ‘hdfs’, ‘recursive': True}
2015-04-02 15:06:03,240 – Directory[‘/var/log/hadoop/hdfs’] {‘owner': ‘hdfs’, ‘recursive': True}
2015-04-02 15:06:03,241 – File[‘/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid’] {‘action': [‘delete’], ‘not_if': ‘ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid
>/dev/null 2>&1′}
2015-04-02 15:06:03,268 – Deleting File[‘/var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid’]
2015-04-02 15:06:03,268 – Execute[‘ulimit -c unlimited; su -s /bin/bash – hdfs -c ‘export HADOOP_LIBEXEC_DIR=/usr/hdp/current/hadoop-client/libexec && /usr/hdp/current/hadoop-client/sbin/hadoop-daemon.sh –config /etc/hadoop/conf start namenode”] {‘not_if': ‘ls /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid >/dev/null 2>&1 && ps cat /var/run/hadoop/hdfs/hadoop-hdfs-namenode.pid
>/dev/null 2>&1′}
2015-04-02 15:06:07,398 – Execute[‘su -s /bin/bash – hdfs -c ‘export PATH=$PATH:/usr/hdp/current/hadoop-client/bin ; hdfs –config /etc/hadoop/conf dfsadmin -safemode get’ | grep ‘Safe mode is OFF”] {‘tries': 40, ‘only_if': None, ‘try_sleep': 10}