Member since
09-02-2019
5
Posts
0
Kudos Received
0
Solutions
09-05-2019
06:10 PM
This is my
/etc/hosts File
127.0.0.1 localhost 127.0.1.1 master
192.168.56.101 master.hadoop.com 192.168.56.102 slave1.hadoop.com 192.168.56.103 slave2.hadoop.com 192.168.56.104 slave3.hadoop.com 192.168.56.105 slave4.hadoop.com 192.168.56.106 slave5.hadoop.com
# The following lines are desirable for IPv6 capable hosts ::1 ip6-localhost ip6-loopback fe00::0 ip6-localnet ff00::0 ip6-mcastprefix ff02::1 ip6-allnodes ff02::2 ip6-allrouters
ambari-agent.ini
[server] hostname=master.hadoop.com url_port=8440 secured_url_port=8441
[agent] logdir=/var/log/ambari-agent piddir=/var/run/ambari-agent prefix=/var/lib/ambari-agent/data ;loglevel=(DEBUG/INFO) loglevel=INFO data_cleanup_interval=86400 data_cleanup_max_age=2592000 data_cleanup_max_size_MB = 100 ping_port=8670 cache_dir=/var/lib/ambari-agent/cache tolerate_download_failures=true run_as_user=root parallel_execution=0 alert_grace_period=5 alert_kinit_timeout=14400000 system_resource_overrides=/etc/resource_overrides ; memory_threshold_soft_mb=400 ; memory_threshold_hard_mb=1000
[security] keysdir=/var/lib/ambari-agent/keys server_crt=ca.crt passphrase_env_var_name=AMBARI_PASSPHRASE ssl_verify_cert=0 force_https_protocol=PROTOCOL_TLSv1_2
[services] pidLookupPath=/var/run/
[heartbeat] state_interval_seconds=60 dirs=/etc/hadoop,/etc/hadoop/conf,/etc/hbase,/etc/hcatalog,/etc/hive,/etc/oozie, /etc/sqoop,/etc/ganglia, /var/run/hadoop,/var/run/zookeeper,/var/run/hbase,/var/run/templeton,/var/run/oozie, /var/log/hadoop,/var/log/zookeeper,/var/log/hbase,/var/run/templeton,/var/log/hive ; 0 - unlimited log_lines_count=300 idle_interval_min=1 idle_interval_max=10
[logging] syslog_enabled=0
ambari-server.properties
agent.package.install.task.timeout=1800 agent.stack.retry.on_repo_unavailability=false agent.stack.retry.tries=5 agent.task.timeout=900 agent.threadpool.size.max=25 ambari-server.user=root ambari.python.wrap=ambari-python-wrap bootstrap.dir=/var/run/ambari-server/bootstrap bootstrap.script=/usr/lib/python2.6/site-packages/ambari_server/bootstrap.py bootstrap.setup_agent.script=/usr/lib/python2.6/site-packages/ambari_server/setupAgent.py check_database_skipped=false client.threadpool.size.max=25 common.services.path=/var/lib/ambari-server/resources/common-services custom.action.definitions=/var/lib/ambari-server/resources/custom_action_definitions custom.mysql.jdbc.name=mysql-connector-java.jar custom.postgres.jdbc.name=postgresql-jdbc3.jar extensions.path=/var/lib/ambari-server/resources/extensions http.strict-transport-security=max-age=31536000 http.x-frame-options=DENY http.x-xss-protection=1; mode=block java.home=/usr/lib/jvm/java-8-openjdk-amd64 java.releases=jdk1.8,jdk1.7 jce.download.supported=true jdk.download.supported=true jdk1.7.desc=Oracle JDK 1.7 + Java Cryptography Extension (JCE) Policy Files 7 jdk1.7.dest-file=jdk-7u67-linux-x64.tar.gz jdk1.7.home=/usr/jdk64/ jdk1.7.jcpol-file=UnlimitedJCEPolicyJDK7.zip jdk1.7.jcpol-url=http://public-repo-1.hortonworks.com/ARTIFACTS/UnlimitedJCEPolicyJDK7.zip jdk1.7.re=(jdk.*)/jre jdk1.7.url=http://public-repo-1.hortonworks.com/ARTIFACTS/jdk-7u67-linux-x64.tar.gz jdk1.8.desc=Oracle JDK 1.8 + Java Cryptography Extension (JCE) Policy Files 8 jdk1.8.dest-file=jdk-8u77-linux-x64.tar.gz jdk1.8.home=/usr/lib/jvm/java-8-openjdk-amd64 jdk1.8.jcpol-file=jce_policy-8.zip jdk1.8.jcpol-url=http://public-repo-1.hortonworks.com/ARTIFACTS/jce_policy-8.zip jdk1.8.re=(jdk.*)/jre jdk1.8.url=http://public-repo-1.hortonworks.com/ARTIFACTS/jdk-8u77-linux-x64.tar.gz kerberos.keytab.cache.dir=/var/lib/ambari-server/data/cache metadata.path=/var/lib/ambari-server/resources/stacks mpacks.staging.path=/var/lib/ambari-server/resources/mpacks pid.dir=/var/run/ambari-server previous.custom.mysql.jdbc.name=mysql-connector-java.jar previous.custom.postgres.jdbc.name=postgresql-jdbc4.jar recommendations.artifacts.lifetime=1w recommendations.dir=/var/run/ambari-server/stack-recommendations resources.dir=/var/lib/ambari-server/resources rolling.upgrade.skip.packages.prefixes= security.server.disabled.ciphers=TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384|TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384|TLS_RSA_WITH_AES_256_CBC_SHA256|TLS_ECDH_ECDSA_WITH_AES_256_CBC_SHA384|TLS_ECDH_RSA_WITH_AES_256_CBC_SHA384|TLS_DHE_RSA_WITH_AES_256_CBC_SHA256|TLS_DHE_DSS_WITH_AES_256_CBC_SHA256|TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA|TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA|TLS_RSA_WITH_AES_256_CBC_SHA|TLS_ECDH_ECDSA_WITH_AES_256_CBC_SHA|TLS_ECDH_RSA_WITH_AES_256_CBC_SHA|TLS_DHE_RSA_WITH_AES_256_CBC_SHA|TLS_DHE_DSS_WITH_AES_256_CBC_SHA|TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256|TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256|TLS_RSA_WITH_AES_128_CBC_SHA256|TLS_ECDH_ECDSA_WITH_AES_128_CBC_SHA256|TLS_ECDH_RSA_WITH_AES_128_CBC_SHA256|TLS_DHE_RSA_WITH_AES_128_CBC_SHA256|TLS_DHE_DSS_WITH_AES_128_CBC_SHA256|TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA|TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA|TLS_RSA_WITH_AES_128_CBC_SHA|TLS_ECDH_ECDSA_WITH_AES_128_CBC_SHA|TLS_ECDH_RSA_WITH_AES_128_CBC_SHA|TLS_DHE_RSA_WITH_AES_128_CBC_SHA|TLS_DHE_DSS_WITH_AES_128_CBC_SHA|TLS_ECDHE_ECDSA_WITH_3DES_EDE_CBC_SHA|TLS_ECDHE_RSA_WITH_3DES_EDE_CBC_SHA|TLS_ECDH_ECDSA_WITH_3DES_EDE_CBC_SHA|TLS_ECDH_RSA_WITH_3DES_EDE_CBC_SHA|SSL_DHE_RSA_WITH_3DES_EDE_CBC_SHA|SSL_DHE_DSS_WITH_3DES_EDE_CBC_SHA|TLS_EMPTY_RENEGOTIATION_INFO_SCSV|TLS_DH_anon_WITH_AES_256_CBC_SHA256|TLS_ECDH_anon_WITH_AES_256_CBC_SHA|TLS_DH_anon_WITH_AES_256_CBC_SHA|TLS_DH_anon_WITH_AES_128_CBC_SHA256|TLS_ECDH_anon_WITH_AES_128_CBC_SHA|TLS_DH_anon_WITH_AES_128_CBC_SHA|TLS_ECDH_anon_WITH_3DES_EDE_CBC_SHA|SSL_DH_anon_WITH_3DES_EDE_CBC_SHA|SSL_RSA_WITH_DES_CBC_SHA|SSL_DHE_RSA_WITH_DES_CBC_SHA|SSL_DHE_DSS_WITH_DES_CBC_SHA|SSL_DH_anon_WITH_DES_CBC_SHA|SSL_RSA_EXPORT_WITH_DES40_CBC_SHA|SSL_DHE_RSA_EXPORT_WITH_DES40_CBC_SHA|SSL_DHE_DSS_EXPORT_WITH_DES40_CBC_SHA|SSL_DH_anon_EXPORT_WITH_DES40_CBC_SHA|TLS_RSA_WITH_NULL_SHA256|TLS_ECDHE_ECDSA_WITH_NULL_SHA|TLS_ECDHE_RSA_WITH_NULL_SHA|SSL_RSA_WITH_NULL_SHA|TLS_ECDH_ECDSA_WITH_NULL_SHA|TLS_ECDH_RSA_WITH_NULL_SHA|TLS_ECDH_anon_WITH_NULL_SHA|SSL_RSA_WITH_NULL_MD5|TLS_KRB5_WITH_3DES_EDE_CBC_SHA|TLS_KRB5_WITH_3DES_EDE_CBC_MD5|TLS_KRB5_WITH_DES_CBC_SHA|TLS_KRB5_WITH_DES_CBC_MD5|TLS_KRB5_EXPORT_WITH_DES_CBC_40_SHA|TLS_KRB5_EXPORT_WITH_DES_CBC_40_MD5 security.server.keys_dir=/var/lib/ambari-server/keys server.connection.max.idle.millis=900000 server.execution.scheduler.isClustered=false server.execution.scheduler.maxDbConnections=5 server.execution.scheduler.maxThreads=5 server.execution.scheduler.misfire.toleration.minutes=480 server.fqdn.service.url=http://169.254.169.254/latest/meta-data/public-hostname server.http.session.inactive_timeout=1800 server.jdbc.connection-pool=c3p0 server.jdbc.connection-pool.acquisition-size=5 server.jdbc.connection-pool.idle-test-interval=7200 server.jdbc.connection-pool.max-age=0 server.jdbc.connection-pool.max-idle-time=14400 server.jdbc.connection-pool.max-idle-time-excess=0 server.jdbc.database=mysql server.jdbc.database_name=ambari server.jdbc.driver=com.mysql.jdbc.Driver server.jdbc.hostname=master.hadoop.com server.jdbc.port=3306 server.jdbc.postgres.schema=ambari server.jdbc.rca.driver=com.mysql.jdbc.Driver server.jdbc.rca.url=jdbc:mysql://localhost:3306/ambari server.jdbc.rca.user.name=ambari server.jdbc.rca.user.passwd=/etc/ambari-server/conf/password.dat server.jdbc.url=jdbc:mysql://localhost:3306/ambari server.jdbc.user.name=ambari server.jdbc.user.passwd=/etc/ambari-server/conf/password.dat server.os_family=ubuntu16 server.os_type=ubuntu16 server.persistence.type=remote server.stages.parallel=true server.task.timeout=1200 server.tmp.dir=/var/lib/ambari-server/data/tmp server.version.file=/var/lib/ambari-server/resources/version shared.resources.dir=/usr/lib/ambari-server/lib/ambari_commons/resources skip.service.checks=false stackadvisor.script=/var/lib/ambari-server/resources/scripts/stack_advisor.py ulimit.open.files=10000 user.inactivity.timeout.default=0 user.inactivity.timeout.role.readonly.default=0 views.ambari.request.connect.timeout.millis=30000 views.ambari.request.read.timeout.millis=45000 views.http.strict-transport-security=max-age=31536000 views.http.x-frame-options=SAMEORIGIN views.http.x-xss-protection=1; mode=block views.request.connect.timeout.millis=5000 views.request.read.timeout.millis=10000 webapp.dir=/usr/lib/ambari-server/web
hostname
master.hadoop.com
hostname -f
master.hadoop.com
Registration log for master.hadoop.com
========================== Creating target directory... ==========================
Command start time 2019-09-06 09:59:20
Connection to master.hadoop.com closed. SSH command execution finished host=master.hadoop.com, exitcode=0 Command end time 2019-09-06 09:59:20
========================== Copying ambari sudo script... ==========================
Command start time 2019-09-06 09:59:20
scp /var/lib/ambari-server/ambari-sudo.sh host=master.hadoop.com, exitcode=0 Command end time 2019-09-06 09:59:21
========================== Copying common functions script... ==========================
Command start time 2019-09-06 09:59:21
scp /usr/lib/python2.6/site-packages/ambari_commons host=master.hadoop.com, exitcode=0 Command end time 2019-09-06 09:59:21
========================== Copying OS type check script... ==========================
Command start time 2019-09-06 09:59:21
scp /usr/lib/python2.6/site-packages/ambari_server/os_check_type.py host=master.hadoop.com, exitcode=0 Command end time 2019-09-06 09:59:21
========================== Running OS type check... ==========================
Command start time 2019-09-06 09:59:21 Cluster primary/cluster OS family is ubuntu16 and local/current OS family is ubuntu16
Connection to master.hadoop.com closed. SSH command execution finished host=master.hadoop.com, exitcode=0 Command end time 2019-09-06 09:59:22
========================== Checking 'sudo' package on remote host... ==========================
Command start time 2019-09-06 09:59:22
Connection to master.hadoop.com closed. SSH command execution finished host=master.hadoop.com, exitcode=0 Command end time 2019-09-06 09:59:22
========================== Copying repo file to 'tmp' folder... ==========================
Command start time 2019-09-06 09:59:22
scp /etc/apt/sources.list.d/ambari.list host=master.hadoop.com, exitcode=0 Command end time 2019-09-06 09:59:22
========================== Moving file to repo dir... ==========================
Command start time 2019-09-06 09:59:22
Connection to master.hadoop.com closed. SSH command execution finished host=master.hadoop.com, exitcode=0 Command end time 2019-09-06 09:59:23
========================== Changing permissions for ambari.repo... ==========================
Command start time 2019-09-06 09:59:23
Connection to master.hadoop.com closed. SSH command execution finished host=master.hadoop.com, exitcode=0 Command end time 2019-09-06 09:59:23
========================== Update apt cache of repository... ==========================
Command start time 2019-09-06 09:59:23
0% [Working] Hit:1 http://public-repo-1.hortonworks.com/ambari/ubuntu14/2.x/updates/2.4.2.0 Ambari InRelease
0% [Connecting to ppa.launchpad.net (91.189.95.83)] 0% [1 InRelease gpgv 3,190 B] [Connecting to ppa.launchpad.net (91.189.95.83)] 0% [Waiting for headers] Hit:2 http://ppa.launchpad.net/webupd8team/java/ubuntu xenial InRelease
0% [Working] 0% [2 InRelease gpgv 17.6 kB] 20% [Working]
Reading package lists... 0%
Reading package lists... 0%
Reading package lists... 0%
Reading package lists... 0%
Reading package lists... 5%
Reading package lists... Done
W: http://public-repo-1.hortonworks.com/ambari/ubuntu14/2.x/updates/2.4.2.0/dists/Ambari/InRelease: Signature by key DF52ED4F7A3A5882C0994C66B9733A7A07513CAD uses weak digest algorithm (SHA1)
Connection to master.hadoop.com closed. SSH command execution finished host=master.hadoop.com, exitcode=0 Command end time 2019-09-06 09:59:24
========================== Copying setup script file... ==========================
Command start time 2019-09-06 09:59:24
scp /usr/lib/python2.6/site-packages/ambari_server/setupAgent.py host=master.hadoop.com, exitcode=0 Command end time 2019-09-06 09:59:24
========================== Running setup agent script... ==========================
Command start time 2019-09-06 09:59:24 (' File "/usr/lib/python2.6/site-packages/ambari_agent/Controller.py", line 496, in sendRequest raise IOError(\'Request to {0} failed due to {1}\'.format(url, str(exception))) IOError: Request to https://master.hadoop.com:8441/agent/v1/register/master.hadoop.com failed due to <urlopen error [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed (_ssl.c:590)> ERROR 2019-09-06 09:59:59,963 Controller.py:213 - Error:Request to https://master.hadoop.com:8441/agent/v1/register/master.hadoop.com failed due to <urlopen error [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed (_ssl.c:590)> WARNING 2019-09-06 09:59:59,963 Controller.py:214 - Sleeping for 0 seconds and then trying again INFO 2019-09-06 10:00:00,029 Controller.py:160 - Registering with master.hadoop.com (192.168.56.101) (agent=\'{"hardwareProfile": {"kernel": "Linux", "domain": "hadoop.com", "physicalprocessorcount": 4, "kernelrelease": "4.15.0-45-generic", "uptime_days": "0", "memorytotal": 16390376, "swapfree": "0.95 GB", "memorysize": 16390376, "osfamily": "ubuntu", "swapsize": "0.95 GB", "processorcount": 4, "netmask": "255.255.255.0", "timezone": "KST", "hardwareisa": "x86_64", "memoryfree": 14066700, "operatingsystem": "ubuntu", "kernelmajversion": "4.15", "kernelversion": "4.15.0", "macaddress": "30:9C:23:43:F8:99", "operatingsystemrelease": "16.04", "ipaddress": "192.168.56.101", "hostname": "master", "uptime_hours": "0", "fqdn": "master.hadoop.com", "id": "root", "architecture": "x86_64", "selinux": false, "mounts": [{"available": "8165428", "used": "0", "percent": "0%", "device": "udev", "mountpoint": "/dev", "type": "devtmpfs", "size": "8165428"}, {"available": "1629344", "used": "9696", "percent": "1%", "device": "tmpfs", "mountpoint": "/run", "type": "tmpfs", "size": "1639040"}, {"available": "903327312", "used": "8186120", "percent": "1%", "device": "/dev/sda1", "mountpoint": "/", "type": "ext4", "size": "960317832"}, {"available": "8194964", "used": "224", "percent": "1%", "device": "tmpfs", "mountpoint": "/dev/shm", "type": "tmpfs", "size": "8195188"}, {"available": "5116", "used": "4", "percent": "1%", "device": "tmpfs", "mountpoint": "/run/lock", "type": "tmpfs", "size": "5120"}, {"available": "1638996", "used": "44", "percent": "1%", "device": "tmpfs", "mountpoint": "/run/user/1000", "type": "tmpfs", "size": "1639040"}, {"available": "1639040", "used": "0", "percent": "0%", "device": "tmpfs", "mountpoint": "/run/user/0", "type": "tmpfs", "size": "1639040"}], "hardwaremodel": "x86_64", "uptime_seconds": "339", "interfaces": "enp0s31f6,lo"}, "currentPingPort": 8670, "prefix": "/var/lib/ambari-agent/data", "agentVersion": "2.4.2.0", "agentEnv": {"transparentHugePage": "madvise", "hostHealth": {"agentTimeStampAtReporting": 1567731600028, "activeJavaProcs": [], "liveServices": [{"status": "Unhealthy", "name": "ntp", "desc": "\\\\u25cf ntp.service\\\ Loaded: not-found (Reason: No such file or directory)\\\ Active: inactive (dead)\\\ "}]}, "reverseLookup": true, "alternatives": [], "umask": "18", "firewallName": "ufw", "stackFoldersAndFiles": [], "existingUsers": [], "firewallRunning": false}, "timestamp": 1567731599963, "hostname": "master.hadoop.com", "responseId": -1, "publicHostname": "master.hadoop.com"}\') INFO 2019-09-06 10:00:00,030 NetUtil.py:62 - Connecting to https://master.hadoop.com:8440/connection_info INFO 2019-09-06 10:00:00,043 security.py:100 - SSL Connect being called.. connecting to the server INFO 2019-09-06 10:00:00,044 security.py:67 - Insecure connection to https://master.hadoop.com:8441/ failed. Reconnecting using two-way SSL authentication.. INFO 2019-09-06 10:00:00,044 security.py:186 - Server certicate not exists, downloading INFO 2019-09-06 10:00:00,044 security.py:209 - Downloading server cert from https://master.hadoop.com:8440/cert/ca/ ERROR 2019-09-06 10:00:00,057 Controller.py:212 - Unable to connect to: https://master.hadoop.com:8441/agent/v1/register/master.hadoop.com Traceback (most recent call last): File "/usr/lib/python2.6/site-packages/ambari_agent/Controller.py", line 165, in registerWithServer ret = self.sendRequest(self.registerUrl, data) File "/usr/lib/python2.6/site-packages/ambari_agent/Controller.py", line 496, in sendRequest raise IOError(\'Request to {0} failed due to {1}\'.format(url, str(exception))) IOError: Request to https://master.hadoop.com:8441/agent/v1/register/master.hadoop.com failed due to <urlopen error [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed (_ssl.c:590)> ERROR 2019-09-06 10:00:00,057 Controller.py:213 - Error:Request to https://master.hadoop.com:8441/agent/v1/register/master.hadoop.com failed due to <urlopen error [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed (_ssl.c:590)> WARNING 2019-09-06 10:00:00,057 Controller.py:214 - Sleeping for 9 seconds and then trying again ', None) (' File "/usr/lib/python2.6/site-packages/ambari_agent/Controller.py", line 496, in sendRequest raise IOError(\'Request to {0} failed due to {1}\'.format(url, str(exception))) IOError: Request to https://master.hadoop.com:8441/agent/v1/register/master.hadoop.com failed due to <urlopen error [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed (_ssl.c:590)> ERROR 2019-09-06 09:59:59,963 Controller.py:213 - Error:Request to https://master.hadoop.com:8441/agent/v1/register/master.hadoop.com failed due to <urlopen error [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed (_ssl.c:590)> WARNING 2019-09-06 09:59:59,963 Controller.py:214 - Sleeping for 0 seconds and then trying again INFO 2019-09-06 10:00:00,029 Controller.py:160 - Registering with master.hadoop.com (192.168.56.101) (agent=\'{"hardwareProfile": {"kernel": "Linux", "domain": "hadoop.com", "physicalprocessorcount": 4, "kernelrelease": "4.15.0-45-generic", "uptime_days": "0", "memorytotal": 16390376, "swapfree": "0.95 GB", "memorysize": 16390376, "osfamily": "ubuntu", "swapsize": "0.95 GB", "processorcount": 4, "netmask": "255.255.255.0", "timezone": "KST", "hardwareisa": "x86_64", "memoryfree": 14066700, "operatingsystem": "ubuntu", "kernelmajversion": "4.15", "kernelversion": "4.15.0", "macaddress": "30:9C:23:43:F8:99", "operatingsystemrelease": "16.04", "ipaddress": "192.168.56.101", "hostname": "master", "uptime_hours": "0", "fqdn": "master.hadoop.com", "id": "root", "architecture": "x86_64", "selinux": false, "mounts": [{"available": "8165428", "used": "0", "percent": "0%", "device": "udev", "mountpoint": "/dev", "type": "devtmpfs", "size": "8165428"}, {"available": "1629344", "used": "9696", "percent": "1%", "device": "tmpfs", "mountpoint": "/run", "type": "tmpfs", "size": "1639040"}, {"available": "903327312", "used": "8186120", "percent": "1%", "device": "/dev/sda1", "mountpoint": "/", "type": "ext4", "size": "960317832"}, {"available": "8194964", "used": "224", "percent": "1%", "device": "tmpfs", "mountpoint": "/dev/shm", "type": "tmpfs", "size": "8195188"}, {"available": "5116", "used": "4", "percent": "1%", "device": "tmpfs", "mountpoint": "/run/lock", "type": "tmpfs", "size": "5120"}, {"available": "1638996", "used": "44", "percent": "1%", "device": "tmpfs", "mountpoint": "/run/user/1000", "type": "tmpfs", "size": "1639040"}, {"available": "1639040", "used": "0", "percent": "0%", "device": "tmpfs", "mountpoint": "/run/user/0", "type": "tmpfs", "size": "1639040"}], "hardwaremodel": "x86_64", "uptime_seconds": "339", "interfaces": "enp0s31f6,lo"}, "currentPingPort": 8670, "prefix": "/var/lib/ambari-agent/data", "agentVersion": "2.4.2.0", "agentEnv": {"transparentHugePage": "madvise", "hostHealth": {"agentTimeStampAtReporting": 1567731600028, "activeJavaProcs": [], "liveServices": [{"status": "Unhealthy", "name": "ntp", "desc": "\\\\u25cf ntp.service\\\ Loaded: not-found (Reason: No such file or directory)\\\ Active: inactive (dead)\\\ "}]}, "reverseLookup": true, "alternatives": [], "umask": "18", "firewallName": "ufw", "stackFoldersAndFiles": [], "existingUsers": [], "firewallRunning": false}, "timestamp": 1567731599963, "hostname": "master.hadoop.com", "responseId": -1, "publicHostname": "master.hadoop.com"}\') INFO 2019-09-06 10:00:00,030 NetUtil.py:62 - Connecting to https://master.hadoop.com:8440/connection_info INFO 2019-09-06 10:00:00,043 security.py:100 - SSL Connect being called.. connecting to the server INFO 2019-09-06 10:00:00,044 security.py:67 - Insecure connection to https://master.hadoop.com:8441/ failed. Reconnecting using two-way SSL authentication.. INFO 2019-09-06 10:00:00,044 security.py:186 - Server certicate not exists, downloading INFO 2019-09-06 10:00:00,044 security.py:209 - Downloading server cert from https://master.hadoop.com:8440/cert/ca/ ERROR 2019-09-06 10:00:00,057 Controller.py:212 - Unable to connect to: https://master.hadoop.com:8441/agent/v1/register/master.hadoop.com Traceback (most recent call last): File "/usr/lib/python2.6/site-packages/ambari_agent/Controller.py", line 165, in registerWithServer ret = self.sendRequest(self.registerUrl, data) File "/usr/lib/python2.6/site-packages/ambari_agent/Controller.py", line 496, in sendRequest raise IOError(\'Request to {0} failed due to {1}\'.format(url, str(exception))) IOError: Request to https://master.hadoop.com:8441/agent/v1/register/master.hadoop.com failed due to <urlopen error [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed (_ssl.c:590)> ERROR 2019-09-06 10:00:00,057 Controller.py:213 - Error:Request to https://master.hadoop.com:8441/agent/v1/register/master.hadoop.com failed due to <urlopen error [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed (_ssl.c:590)> WARNING 2019-09-06 10:00:00,057 Controller.py:214 - Sleeping for 9 seconds and then trying again ', None)
Connection to master.hadoop.com closed. SSH command execution finished host=master.hadoop.com, exitcode=0 Command end time 2019-09-06 10:00:00
Registering with the server... Registration with the server failed.
What is the correct Hostname(Red Color at the Registration logs) for the Registration course?
For this /// https://master.hadoop.com:8441/agent/v1/register/master.hadoop.com
Thanks for advice
... View more
- Tags:
- Ambari
Labels:
- Labels:
-
Apache Ambari
09-05-2019
12:28 AM
Oh.. Thanks, I upgrade my ambari version and my hostname master with master.hadoop.com togerther!
... View more
09-02-2019
04:27 AM
Thanks for advice,I changed my ambari-agent.ini file but received same error..... Registering with the server...
Registration with the server failed. I try to change First. Hostname = master Second. Hostnme = master.hadoop.com and ambari-agent restart but i receive same error # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific [server] hostname=master.hadoop.com url_port=8440 secured_url_port=8441 [agent] logdir=/var/log/ambari-agent piddir=/var/run/ambari-agent prefix=/var/lib/ambari-agent/data ;loglevel=(DEBUG/INFO) loglevel=INFO data_cleanup_interval=86400 data_cleanup_max_age=2592000 data_cleanup_max_size_MB = 100 ping_port=8670 cache_dir=/var/lib/ambari-agent/cache tolerate_download_failures=true run_as_user=root parallel_execution=0 alert_grace_period=5 alert_kinit_timeout=14400000 system_resource_overrides=/etc/resource_overrides ; memory_threshold_soft_mb=400 ; memory_threshold_hard_mb=1000 [security] keysdir=/var/lib/ambari-agent/keys server_crt=ca.crt passphrase_env_var_name=AMBARI_PASSPHRASE ssl_verify_cert=0 [services] pidLookupPath=/var/run/ [heartbeat] state_interval_seconds=60 dirs=/etc/hadoop,/etc/hadoop/conf,/etc/hbase,/etc/hcatalog,/etc/hive,/etc/oozie, /etc/sqoop,/etc/ganglia, /var/run/hadoop,/var/run/zookeeper,/var/run/hbase,/var/run/templeton,/var/run/oozie, /var/log/hadoop,/var/log/zookeeper,/var/log/hbase,/var/run/templeton,/var/log/hive ; 0 - unlimited log_lines_count=300 idle_interval_min=1 idle_interval_max=10 [logging] syslog_enabled=0
... View more
09-02-2019
01:09 AM
This is my error logs ......
==========================Creating target directory...==========================
Command start time 2019-09-02 17:01:05
Connection to master closed.SSH command execution finishedhost=master, exitcode=0Command end time 2019-09-02 17:01:05
==========================Copying ambari sudo script...==========================
Command start time 2019-09-02 17:01:05
scp /var/lib/ambari-server/ambari-sudo.shhost=master, exitcode=0Command end time 2019-09-02 17:01:05
==========================Copying common functions script...==========================
Command start time 2019-09-02 17:01:05
scp /usr/lib/python2.6/site-packages/ambari_commonshost=master, exitcode=0Command end time 2019-09-02 17:01:06
==========================Copying OS type check script...==========================
Command start time 2019-09-02 17:01:06
scp /usr/lib/python2.6/site-packages/ambari_server/os_check_type.pyhost=master, exitcode=0Command end time 2019-09-02 17:01:06
==========================Running OS type check...==========================
Command start time 2019-09-02 17:01:06Cluster primary/cluster OS family is ubuntu16 and local/current OS family is ubuntu16
Connection to master closed.SSH command execution finishedhost=master, exitcode=0Command end time 2019-09-02 17:01:06
==========================Checking 'sudo' package on remote host...==========================
Command start time 2019-09-02 17:01:06
Connection to master closed.SSH command execution finishedhost=master, exitcode=0Command end time 2019-09-02 17:01:06
==========================Copying repo file to 'tmp' folder...==========================
Command start time 2019-09-02 17:01:06
scp /etc/apt/sources.list.d/ambari.listhost=master, exitcode=0Command end time 2019-09-02 17:01:07
==========================Moving file to repo dir...==========================
Command start time 2019-09-02 17:01:07
Connection to master closed.SSH command execution finishedhost=master, exitcode=0Command end time 2019-09-02 17:01:07
==========================Changing permissions for ambari.repo...==========================
Command start time 2019-09-02 17:01:07
Connection to master closed.SSH command execution finishedhost=master, exitcode=0Command end time 2019-09-02 17:01:07
==========================Update apt cache of repository...==========================
Command start time 2019-09-02 17:01:07
0% [Working]0% [Connecting to public-repo-1.hortonworks.com] [Waiting for headers]Hit:1 http://ppa.launchpad.net/webupd8team/java/ubuntu xenial InRelease
0% [Connecting to public-repo-1.hortonworks.com]0% [1 InRelease gpgv 17.6 kB] [Connecting to public-repo-1.hortonworks.com]0% [Connecting to public-repo-1.hortonworks.com]0% [Connecting to public-repo-1.hortonworks.com]0% [Connecting to public-repo-1.hortonworks.com]0% [Connecting to public-repo-1.hortonworks.com]0% [Connecting to public-repo-1.hortonworks.com]0% [Connecting to public-repo-1.hortonworks.com]0% [Connecting to public-repo-1.hortonworks.com]0% [Connecting to public-repo-1.hortonworks.com]0% [Connecting to public-repo-1.hortonworks.com]Hit:2 http://public-repo-1.hortonworks.com/ambari/ubuntu14/2.x/updates/2.4.2.0 Ambari InRelease
0% [Working]0% [2 InRelease gpgv 3,190 B]20% [Working]
Reading package lists... 0%
Reading package lists... 0%
Reading package lists... 0%
Reading package lists... 0%
Reading package lists... 5%
Reading package lists... Done
W: http://public-repo-1.hortonworks.com/ambari/ubuntu14/2.x/updates/2.4.2.0/dists/Ambari/InRelease: Signature by key DF52ED4F7A3A5882C0994C66B9733A7A07513CAD uses weak digest algorithm (SHA1)
Connection to master closed.SSH command execution finishedhost=master, exitcode=0Command end time 2019-09-02 17:01:13
==========================Copying setup script file...==========================
Command start time 2019-09-02 17:01:13
scp /usr/lib/python2.6/site-packages/ambari_server/setupAgent.pyhost=master, exitcode=0Command end time 2019-09-02 17:01:13
==========================Running setup agent script...==========================
Command start time 2019-09-02 17:01:13('INFO 2019-09-02 17:01:47,369 logger.py:71 - call returned (0, \'\')INFO 2019-09-02 17:01:47,369 logger.py:71 - call[[\'test\', \'-w\', \'/run/user/0\']] {\'sudo\': True, \'timeout\': 5}INFO 2019-09-02 17:01:47,372 logger.py:71 - call returned (0, \'\')INFO 2019-09-02 17:01:47,378 Facter.py:194 - Directory: \'/etc/resource_overrides\' does not exist - it won\'t be used for gathering system resources.INFO 2019-09-02 17:01:47,449 Controller.py:160 - Registering with master (127.0.1.1) (agent=\'{"hardwareProfile": {"kernel": "Linux", "domain": "", "physicalprocessorcount": 4, "kernelrelease": "4.15.0-45-generic", "uptime_days": "3", "memorytotal": 16390376, "swapfree": "0.95 GB", "memorysize": 16390376, "osfamily": "ubuntu", "swapsize": "0.95 GB", "processorcount": 4, "netmask": null, "timezone": "KST", "hardwareisa": "x86_64", "memoryfree": 12196540, "operatingsystem": "ubuntu", "kernelmajversion": "4.15", "kernelversion": "4.15.0", "macaddress": "30:9C:23:43:F8:99", "operatingsystemrelease": "16.04", "ipaddress": "127.0.1.1", "hostname": "master", "uptime_hours": "73", "fqdn": "master", "id": "root", "architecture": "x86_64", "selinux": false, "mounts": [{"available": "8165416", "used": "0", "percent": "0%", "device": "udev", "mountpoint": "/dev", "type": "devtmpfs", "size": "8165416"}, {"available": "1627856", "used": "11184", "percent": "1%", "device": "tmpfs", "mountpoint": "/run", "type": "tmpfs", "size": "1639040"}, {"available": "903763276", "used": "7750156", "percent": "1%", "device": "/dev/sda1", "mountpoint": "/", "type": "ext4", "size": "960317832"}, {"available": "8195008", "used": "180", "percent": "1%", "device": "tmpfs", "mountpoint": "/dev/shm", "type": "tmpfs", "size": "8195188"}, {"available": "5116", "used": "4", "percent": "1%", "device": "tmpfs", "mountpoint": "/run/lock", "type": "tmpfs", "size": "5120"}, {"available": "1638984", "used": "56", "percent": "1%", "device": "tmpfs", "mountpoint": "/run/user/1000", "type": "tmpfs", "size": "1639040"}, {"available": "1639040", "used": "0", "percent": "0%", "device": "tmpfs", "mountpoint": "/run/user/0", "type": "tmpfs", "size": "1639040"}], "hardwaremodel": "x86_64", "uptime_seconds": "264311", "interfaces": "enp0s31f6,lo,wlx909f330d4aff"}, "currentPingPort": 8670, "prefix": "/var/lib/ambari-agent/data", "agentVersion": "2.4.2.0", "agentEnv": {"transparentHugePage": "madvise", "hostHealth": {"agentTimeStampAtReporting": 1567411307448, "activeJavaProcs": [], "liveServices": [{"status": "Healthy", "name": "ntp", "desc": ""}]}, "reverseLookup": true, "alternatives": [], "umask": "18", "firewallName": "ufw", "stackFoldersAndFiles": [], "existingUsers": [], "firewallRunning": false}, "timestamp": 1567411307380, "hostname": "master", "responseId": -1, "publicHostname": "master"}\')INFO 2019-09-02 17:01:47,449 NetUtil.py:62 - Connecting to https://192.168.56.101 master master.hadoop.com:8440/connection_infoWARNING 2019-09-02 17:01:47,460 NetUtil.py:85 - GET https://192.168.56.101 master master.hadoop.com:8440/connection_info -> 400, body:INFO 2019-09-02 17:01:47,461 security.py:100 - SSL Connect being called.. connecting to the serverINFO 2019-09-02 17:01:47,461 security.py:67 - Insecure connection to https://192.168.56.101 master master.hadoop.com:8441/ failed. Reconnecting using two-way SSL authentication..INFO 2019-09-02 17:01:47,461 security.py:186 - Server certicate not exists, downloadingINFO 2019-09-02 17:01:47,461 security.py:209 - Downloading server cert from https://192.168.56.101 master master.hadoop.com:8440/cert/ca/ERROR 2019-09-02 17:01:47,471 Controller.py:212 - Unable to connect to: https://192.168.56.101 master master.hadoop.com:8441/agent/v1/register/masterTraceback (most recent call last):File "/usr/lib/python2.6/site-packages/ambari_agent/Controller.py", line 165, in registerWithServerret = self.sendRequest(self.registerUrl, data)File "/usr/lib/python2.6/site-packages/ambari_agent/Controller.py", line 496, in sendRequestraise IOError(\'Request to {0} failed due to {1}\'.format(url, str(exception)))IOError: Request to https://192.168.56.101 master master.hadoop.com:8441/agent/v1/register/master failed due to <urlopen error [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed (_ssl.c:590)>ERROR 2019-09-02 17:01:47,471 Controller.py:213 - Error:Request to https://192.168.56.101 master master.hadoop.com:8441/agent/v1/register/master failed due to <urlopen error [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed (_ssl.c:590)>WARNING 2019-09-02 17:01:47,471 Controller.py:214 - Sleeping for 19 seconds and then trying again', None)('INFO 2019-09-02 17:01:47,369 logger.py:71 - call returned (0, \'\')INFO 2019-09-02 17:01:47,369 logger.py:71 - call[[\'test\', \'-w\', \'/run/user/0\']] {\'sudo\': True, \'timeout\': 5}INFO 2019-09-02 17:01:47,372 logger.py:71 - call returned (0, \'\')INFO 2019-09-02 17:01:47,378 Facter.py:194 - Directory: \'/etc/resource_overrides\' does not exist - it won\'t be used for gathering system resources.INFO 2019-09-02 17:01:47,449 Controller.py:160 - Registering with master (127.0.1.1) (agent=\'{"hardwareProfile": {"kernel": "Linux", "domain": "", "physicalprocessorcount": 4, "kernelrelease": "4.15.0-45-generic", "uptime_days": "3", "memorytotal": 16390376, "swapfree": "0.95 GB", "memorysize": 16390376, "osfamily": "ubuntu", "swapsize": "0.95 GB", "processorcount": 4, "netmask": null, "timezone": "KST", "hardwareisa": "x86_64", "memoryfree": 12196540, "operatingsystem": "ubuntu", "kernelmajversion": "4.15", "kernelversion": "4.15.0", "macaddress": "30:9C:23:43:F8:99", "operatingsystemrelease": "16.04", "ipaddress": "127.0.1.1", "hostname": "master", "uptime_hours": "73", "fqdn": "master", "id": "root", "architecture": "x86_64", "selinux": false, "mounts": [{"available": "8165416", "used": "0", "percent": "0%", "device": "udev", "mountpoint": "/dev", "type": "devtmpfs", "size": "8165416"}, {"available": "1627856", "used": "11184", "percent": "1%", "device": "tmpfs", "mountpoint": "/run", "type": "tmpfs", "size": "1639040"}, {"available": "903763276", "used": "7750156", "percent": "1%", "device": "/dev/sda1", "mountpoint": "/", "type": "ext4", "size": "960317832"}, {"available": "8195008", "used": "180", "percent": "1%", "device": "tmpfs", "mountpoint": "/dev/shm", "type": "tmpfs", "size": "8195188"}, {"available": "5116", "used": "4", "percent": "1%", "device": "tmpfs", "mountpoint": "/run/lock", "type": "tmpfs", "size": "5120"}, {"available": "1638984", "used": "56", "percent": "1%", "device": "tmpfs", "mountpoint": "/run/user/1000", "type": "tmpfs", "size": "1639040"}, {"available": "1639040", "used": "0", "percent": "0%", "device": "tmpfs", "mountpoint": "/run/user/0", "type": "tmpfs", "size": "1639040"}], "hardwaremodel": "x86_64", "uptime_seconds": "264311", "interfaces": "enp0s31f6,lo,wlx909f330d4aff"}, "currentPingPort": 8670, "prefix": "/var/lib/ambari-agent/data", "agentVersion": "2.4.2.0", "agentEnv": {"transparentHugePage": "madvise", "hostHealth": {"agentTimeStampAtReporting": 1567411307448, "activeJavaProcs": [], "liveServices": [{"status": "Healthy", "name": "ntp", "desc": ""}]}, "reverseLookup": true, "alternatives": [], "umask": "18", "firewallName": "ufw", "stackFoldersAndFiles": [], "existingUsers": [], "firewallRunning": false}, "timestamp": 1567411307380, "hostname": "master", "responseId": -1, "publicHostname": "master"}\')INFO 2019-09-02 17:01:47,449 NetUtil.py:62 - Connecting to https://192.168.56.101 master master.hadoop.com:8440/connection_infoWARNING 2019-09-02 17:01:47,460 NetUtil.py:85 - GET https://192.168.56.101 master master.hadoop.com:8440/connection_info -> 400, body:INFO 2019-09-02 17:01:47,461 security.py:100 - SSL Connect being called.. connecting to the serverINFO 2019-09-02 17:01:47,461 security.py:67 - Insecure connection to https://192.168.56.101 master master.hadoop.com:8441/ failed. Reconnecting using two-way SSL authentication..INFO 2019-09-02 17:01:47,461 security.py:186 - Server certicate not exists, downloadingINFO 2019-09-02 17:01:47,461 security.py:209 - Downloading server cert from https://192.168.56.101 master master.hadoop.com:8440/cert/ca/ERROR 2019-09-02 17:01:47,471 Controller.py:212 - Unable to connect to: https://192.168.56.101 master master.hadoop.com:8441/agent/v1/register/masterTraceback (most recent call last):File "/usr/lib/python2.6/site-packages/ambari_agent/Controller.py", line 165, in registerWithServerret = self.sendRequest(self.registerUrl, data)File "/usr/lib/python2.6/site-packages/ambari_agent/Controller.py", line 496, in sendRequestraise IOError(\'Request to {0} failed due to {1}\'.format(url, str(exception)))IOError: Request to https://192.168.56.101 master master.hadoop.com:8441/agent/v1/register/master failed due to <urlopen error [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed (_ssl.c:590)>ERROR 2019-09-02 17:01:47,471 Controller.py:213 - Error:Request to https://192.168.56.101 master master.hadoop.com:8441/agent/v1/register/master failed due to <urlopen error [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed (_ssl.c:590)>WARNING 2019-09-02 17:01:47,471 Controller.py:214 - Sleeping for 19 seconds and then trying again', None)
Connection to master closed.SSH command execution finishedhost=master, exitcode=0Command end time 2019-09-02 17:01:48
Registering with the server...Registration with the server failed.
And this is my ambari-agent.ini
# Licensed to the Apache Software Foundation (ASF) under one or more# contributor license agreements. See the NOTICE file distributed with# this work for additional information regarding copyright ownership.# The ASF licenses this file to You under the Apache License, Version 2.0# (the "License"); you may not use this file except in compliance with# the License. You may obtain a copy of the License at## http://www.apache.org/licenses/LICENSE-2.0## Unless required by applicable law or agreed to in writing, software# distributed under the License is distributed on an "AS IS" BASIS,# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.# See the License for the specific
[server]hostname=192.168.56.101 master master.hadoop.comurl_port=8440secured_url_port=8441
[agent]logdir=/var/log/ambari-agentpiddir=/var/run/ambari-agentprefix=/var/lib/ambari-agent/data;loglevel=(DEBUG/INFO)loglevel=INFOdata_cleanup_interval=86400data_cleanup_max_age=2592000data_cleanup_max_size_MB = 100ping_port=8670cache_dir=/var/lib/ambari-agent/cachetolerate_download_failures=truerun_as_user=rootparallel_execution=0alert_grace_period=5alert_kinit_timeout=14400000system_resource_overrides=/etc/resource_overrides; memory_threshold_soft_mb=400; memory_threshold_hard_mb=1000
[security]keysdir=/var/lib/ambari-agent/keysserver_crt=ca.crtpassphrase_env_var_name=AMBARI_PASSPHRASEssl_verify_cert=0
[services]pidLookupPath=/var/run/
[heartbeat]state_interval_seconds=60dirs=/etc/hadoop,/etc/hadoop/conf,/etc/hbase,/etc/hcatalog,/etc/hive,/etc/oozie,/etc/sqoop,/etc/ganglia,/var/run/hadoop,/var/run/zookeeper,/var/run/hbase,/var/run/templeton,/var/run/oozie,/var/log/hadoop,/var/log/zookeeper,/var/log/hbase,/var/run/templeton,/var/log/hive; 0 - unlimitedlog_lines_count=300idle_interval_min=1idle_interval_max=10
[logging]syslog_enabled=0
and This is my /etc/hosts
127.0.0.1 localhost127.0.1.1 master
192.168.56.101 master master.hadoop.com192.168.56.102 slave1 slave1.hadoop.com192.168.56.103 slave2 slave2.hadoop.com192.168.56.104 slave3 slave3.hadoop.com192.168.56.105 slave4 slave4.hadoop.com192.168.56.106 slave5 slave5.hadoop.com
# The following lines are desirable for IPv6 capable hosts::1 ip6-localhost ip6-loopbackfe00::0 ip6-localnetff00::0 ip6-mcastprefixff02::1 ip6-allnodesff02::2 ip6-allrouters
root@master:~# hostnamemasterroot@master:~# hostname -fmaster
How can i fix it???
... View more
Labels:
- Labels:
-
Apache Ambari