- Hadoop会被安装在
/usr/local/Cellar/hadoop
目录下可以找到需要配置的5个文件
- 先去修改path ,
sudo nano /etc/profile
- 修改
HADOOP_HOME=/usr/local/Cellar/hadoop/2.6.0/libexec/etc/hadoop
PATH=$PATH:/usr/local/Cellar/hadoop/2.6.0/sbin
source /etc/profile
- hadoop-env.sh
export HADOOP_OPTS="-Djava.security.krb5.realm=OX.AC.UK -Djava.security.krb5.kdc=kdc0.ox.ac.uk:kdc1.ox.ac.uk"
export JAVA_HOME=/Library/Java/JavaVirtualMachines/jdk1.8.0_40.jdk/Contents/Home
- core-site.xml
<configuration>
<property>
<name>hadoop.tmp.dir</name>
<value>/usr/local/Cellar/hadoop/hdfs/tmp</value>
<description>A base for other temporary directories.</description>
</property>
<property>
<name>fs.default.name</name>
<value>hdfs://localhost:9000</value>
</property>
</configuration>
- yarn-site.xml
<configuration>
<!-- Site specific YARN configuration properties -->
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce_shuffle</value>
</property>
<property>
<name>yarn.nodemanager.aux-services.mapreduce.shuffle.class</name>
<value>org.apache.hadoop.mapred.ShuffleHandler</value>
</property>
</configuration>
- mapred-site.xml
<configuration>
<property>
<name>mapreduce.framework.name</name>
<value>yarn</value>
</property>
</configuration>
- hdfs-site.xml
<configuration>
<property>
<name>dfs.replication</name>
<value>3</value>
</property>
<property>
<name>dfs.namenode.name.dir</name>
<value>file:/usr/local/Cellar/hadoop/hdfs/name</value>
</property>
<property>
<name>dfs.datanode.data.dir</name>
<value>file:/usr/local/Cellar/hadoop/hdfs/data</value>
</property>
</configuration>
- 启动hadoop
- 格式化namenode
hadoop namenode -format
start-dfs.sh
start-yarn.sh
- 查看hadoop管理界面
可以在http://localhost:50070 看到hdfs管理页面,localhost 看到hadoop进程管理页面。
jps
查看所有启动的hadoop进程
- wordcount验证
- dfs上创建input目录:
hadoop fs -mkdir -p input
- 把hadoop目录下的README.txt拷贝到dfs新建的input里
hadoop fs -copyFromLocal /Users/Gao/Documents/学习/01-Hadoop/01-hadoop安装/hadoop-2.6.0/README.txt input
- 运行WordCount
hadoop jar /Users/Gao/Documents/学习/01-Hadoop/01-hadoop安装/hadoop-2.6.0/share/hadoop/mapreduce/sources/hadoop-mapreduce-examples-2.6.0-sources.jar org.apache.hadoop.examples.WordCount input output
- 运行完毕后,查看单词统计结果
hadoop fs -cat output/*