搭建Ambari集群

 

[root@hadoop001 ~]# visudo

[root@hadoop001 ssh]# useradd hadoop

SSH免密

[hadoop@hadoop001 ~]$ ssh-keygen

[hadoop@hadoop001 ~]$ cd .ssh

[hadoop@hadoop001 .ssh]$ pwd
/home/hadoop/.ssh

[hadoop@hadoop001 .ssh]$ cat id_rsa.pub >> authorized_keys

[hadoop@hadoop001 .ssh]$ chmod 700 ~/.ssh

[hadoop@hadoop001 .ssh]$ chmod 600 ~/.ssh/authorized_keys

[hadoop@hadoop001 .ssh]$ ssh hadoop001
The authenticity of host 'hadoop001 (172.31.36.137)' can't be established.
ECDSA key fingerprint is SHA256:AAM1VixV4qWn6aVj1liWEOFzmsYKTYxqOFKokwPIPwI.
ECDSA key fingerprint is MD5:2d:1b:1d:d2:c2:32:34:ea:fe:ba:52:37:c4:a3:c8:27.
Are you sure you want to continue connecting (yes/no)? yes
Warning: Permanently added 'hadoop001,172.31.36.137' (ECDSA) to the list of known hosts.
Last login: Tue Jan 21 14:55:08 2020

Welcome to Alibaba Cloud Elastic Compute Service !

安装Java

[root@hadoop001 software]# sudo tar -zxvf jdk-8u231-linux-x64.tar.gz -C /usr/local/

[root@hadoop001 software]# sudo ln -s /usr/local/jdk1.8.0_231/ /usr/local/jdk

[root@hadoop001 software]# sudo vi /etc/profile

export JAVA_HOME=/usr/local/jdk
export JRE_HOME=${JAVA_HOME}/jre
export CLASSPATH=.:${JAVA_HOME}/lib:${JRE_HOME}/lib
export PATH=${JAVA_HOME}/bin:$PATH

[root@hadoop001 software]# source /etc/profile

 

[hadoop@hadoop001 software]$ wget http://public-repo-1.hortonworks.com/ambari/centos7/2.x/updates/2.7.4.0/ambari-2.7.4.0-centos 7.tar.gz

http://public-repo-1.hortonworks.com/HDP/centos7/3.x/updates/3.1.4.0/HDP-3.1.4.0-centos7-rpm.tar.gz

http://public-repo-1.hortonworks.com/HDP-UTILS-1.1.0.22/repos/centos7/HDP-UTILS-1.1.0.22-centos7.tar.gz

http://public-repo-1.hortonworks.com/HDP-GPL/centos7/3.x/updates/3.1.4.0/HDP-GPL-3.1.4.0-centos7gpl.tar.gz

此处http://public-repo-1.hortonworks.com/HDP-GPL/centos7/3.x/updates/3.1.4.0/HDP-GPL-3.1.4.0-centos7gpl.tar.gz这条链接已失效

安装Apache

[hadoop@hadoop001 software]$ sudo yum -y install httpd

[hadoop@hadoop001 software]$ sudo systemctl enable httpd
Created symlink from /etc/systemd/system/multi-user.target.wants/httpd.service to /usr/lib/systemd/system/httpd.service.

[hadoop@hadoop001 software]$ sudo systemctl start httpd

[hadoop@hadoop001 ~]$ sudo mv ~/software/HDP-3.1.4.0-centos7-rpm.tar.gz /var/www/html/

[hadoop@hadoop001 ~]$ sudo mv ~/software/HDP-GPL-3.1.4.0-centos7-gpl.tar.gz /var/www/html/

[hadoop@hadoop001 ~]$ sudo mv ~/software/HDP-UTILS-1.1.0.22-centos7.tar.gz /var/www/html/

[hadoop@hadoop001 ~]$ sudo mv ~/software/ambari-2.7.4.0-centos7.tar.gz /var/www/html/

[hadoop@hadoop001 software]$ cd /var/www/html/

[hadoop@hadoop001 html]$ sudo tar -zxvf ambari-2.7.4.0-centos7.tar.gz

[hadoop@hadoop001 html]$ sudo tar -zxvf HDP-3.1.4.0-centos7-rpm.tar.gz

[hadoop@hadoop001 html]$ sudo tar -zxvf HDP-GPL-3.1.4.0-centos7-gpl.tar.gz

[hadoop@hadoop001 html]$ sudo tar -zxvf HDP-UTILS-1.1.0.22-centos7.tar.gz

 

[hadoop@hadoop001 html]$ sudo chmod -R ugo+rX /var/www/html/

 

[hadoop@hadoop001 html]$ cd /etc/yum.repos.d/

[hadoop@hadoop001 yum.repos.d]$ sudo vim ambari.repo


[ambari-repo]
name=ambari
baseurl=http://hadoop001/ambari/centos7/2.7.4.0-118/
gpgcheck=0
enabled=1

安装MySQL

[hadoop@hadoop001 ~]$ sudo yum -y localinstall https://dev.mysql.com/get/mysql57-community-release-el7-8.noarch.rpm

[hadoop@hadoop001 ~]$ sudo yum -y install mysql-community-server

启动MySQL

[hadoop@hadoop001 ~]$ sudo systemctl enable mysqld
[hadoop@hadoop001 ~]$ sudo systemctl start mysqld
[hadoop@hadoop001 ~]$ sudo systemctl status mysqld
● mysqld.service - MySQL Server
   Loaded: loaded (/usr/lib/systemd/system/mysqld.service; enabled; vendor preset: disabled)
   Active: active (running) since Tue 2020-01-21 16:13:56 CST; 4s ago
     Docs: man:mysqld(8)
           http://dev.mysql.com/doc/refman/en/using-systemd.html
  Process: 12030 ExecStart=/usr/sbin/mysqld --daemonize --pid-file=/var/run/mysqld/mysqld.pid $MYSQLD_OPTS (code=exited, status=0/SUCCESS)
  Process: 11980 ExecStartPre=/usr/bin/mysqld_pre_systemd (code=exited, status=0/SUCCESS)
 Main PID: 12034 (mysqld)
   CGroup: /system.slice/mysqld.service
           └─12034 /usr/sbin/mysqld --daemonize --pid-file=/var/run/mysqld/mysqld.pid

Jan 21 16:13:51 hadoop001 systemd[1]: Starting MySQL Server...
Jan 21 16:13:56 hadoop001 systemd[1]: Started MySQL Server.

[hadoop@hadoop001 ~]$ sudo grep 'temporary password' /var/log/mysqld.log
2020-01-21T08:13:54.177137Z 1 [Note] A temporary password is generated for root@localhost: :l%1LM8iddA?

[hadoop@hadoop001 ~]$ mysql -uroot -p

mysql> set global validate_password_policy=0;
Query OK, 0 rows affected (0.00 sec)

mysql> set global validate_password_mixed_case_count=0;
Query OK, 0 rows affected (0.00 sec)

mysql> set global validate_password_number_count=3;
Query OK, 0 rows affected (0.00 sec)

mysql> set global validate_password_special_char_count=0;
Query OK, 0 rows affected (0.00 sec)

mysql> set global validate_password_length=3;
Query OK, 0 rows affected (0.00 sec)

mysql> ALTER USER 'root'@'localhost' IDENTIFIED BY '123456';
Query OK, 0 rows affected (0.00 sec)

[hadoop@hadoop001 ~]$ sudo mkdir -p /usr/share/java

[hadoop@hadoop001 ~]$ exit
logout

[root@hadoop001 software]# mv mysql-connector-java-8.0.18.jar /usr/share/java/

[hadoop@hadoop001 ~]$ sudo yum -y install ambari-server

mysql> set global validate_password_policy=0;
Query OK, 0 rows affected (0.00 sec)

mysql>   set global validate_password_mixed_case_count=0;
Query OK, 0 rows affected (0.00 sec)

mysql>   set global validate_password_number_count=3;
Query OK, 0 rows affected (0.00 sec)

mysql>   set global validate_password_special_char_count=0;
Query OK, 0 rows affected (0.00 sec)

mysql>   set global validate_password_length=3;
Query OK, 0 rows affected (0.00 sec)

mysql> create database ambari;
Query OK, 1 row affected (0.01 sec)

mysql> CREATE USER 'ambari'@'%' IDENTIFIED BY '123456';
Query OK, 0 rows affected (0.00 sec)

mysql> GRANT ALL ON ambari.* TO 'ambari'@'%';
Query OK, 0 rows affected (0.01 sec)

mysql> FLUSH PRIVILEGES;
Query OK, 0 rows affected (0.00 sec)

mysql> use ambari;
Database changed
mysql> source /var/lib/ambari-server/resources/Ambari-DDL-MySQL-CREATE.sql;

[hadoop@hadoop001 ~]$ sudo ambari-server setup

启动Ambari

[hadoop@hadoop001 ~]$ sudo ambari-server start

如果出现错误,一定要查看日志: /var/log/ambari-server/ambari-server.log

[hadoop@hadoop001 ~]$ cat ~/.ssh/id_rsa

加入Ambari管理

 

 

如果hadoop002和hadoop003没有安装jdk就会安装不成功

 

 

安装Kafka

 

安装Storm

部署HBase

部署Hive

Type 'help;' or '\h' for help. Type '\c' to clear the current input statement.

mysql>
mysql> set global validate_password_policy=0;
Query OK, 0 rows affected (0.00 sec)

mysql>   set global validate_password_mixed_case_count=0;
Query OK, 0 rows affected (0.00 sec)

mysql>   set global validate_password_number_count=3;
Query OK, 0 rows affected (0.00 sec)

mysql>   set global validate_password_special_char_count=0;
Query OK, 0 rows affected (0.00 sec)

mysql>   set global validate_password_length=3;
Query OK, 0 rows affected (0.00 sec)

mysql> create database hive;
Query OK, 1 row affected (0.00 sec)

mysql> CREATE USER 'hive'@'%' IDENTIFIED BY '123456';
Query OK, 0 rows affected (0.01 sec)

mysql> GRANT ALL ON hive.* TO 'hive'@'%';
Query OK, 0 rows affected (0.02 sec)

mysql> FLUSH PRIVILEGES;
Query OK, 0 rows affected (0.01 sec)

mysql> exit
Bye

[hadoop@hadoop001 ~]$ sudo ambari-server setup --jdbc-db=mysql --jdbc-driver=/usr/share/java/mysql-connector-java-8.0.18.jar
Using python  /usr/bin/python
Setup ambari-server
Copying /usr/share/java/mysql-connector-java-8.0.18.jar to /var/lib/ambari-server/resources/mysql-connector-java-8.0.18.jar
Creating symlink /var/lib/ambari-server/resources/mysql-connector-java-8.0.18.jar to /var/lib/ambari-server/resources/mysql-connector-java.jar
If you are updating existing jdbc driver jar for mysql with mysql-connector-java-8.0.18.jar. Please remove the old driver jar, from all hosts. Restarting services that need the driver, will automatically copy the new jar to the hosts.
JDBC driver was successfully initialized.
Ambari Server 'setup' completed successfully.

注意,启动Timeline Service V2.0 Reader 这个服务内存得够

 

 

 

 

 

 

 

 

 

 

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值