环境准备
准备lzo格式压缩文件
#检查是否有lzop命令
[hadoop@Gargantua software]$ which lzop
/bin/lzop
#若没有执行如下安装命令
[root@Gargantua ~]# yum install -y svn ncurses-devel
[root@Gargantua ~]# yum install -y gcc gcc-c++ make cmake
[root@Gargantua ~]# yum install -y openssl openssl-devel svn ncurses-devel zlib-devel libtool
[root@Gargantua ~]# yum install -y lzo lzo-devel lzop autoconf automake cmake
[root@Gargantua ~]# yum install -y lzo lzo-devel lzop autoconf automake cmake
lzo压缩:lzop -v filename
lzo解压:lzop -dv filename
下载、安装并编译LZO
wget http://www.oberhumer.com/opensource/lzo/download/lzo-2.10.tar.gz
tar -zxvf lzo-2.10.tar.gz
cd lzo-2.10
./configure -prefix=/usr/local/hadoop/lzo/
make
make install
编译hadoop-lzo源码
2.1 下载hadoop-lzo的源码,下载地址:https://github.com/twitter/hadoop-lzo/archive/master.zip
2.2 解压之后,修改pom.xml
<hadoop.current.version>3.2.2</hadoop.current.version>
2.3 声明两个临时环境变量
export C_INCLUDE_PATH=/usr/local/hadoop/lzo/include
export LIBRARY_PATH=/usr/local/hadoop/lzo/lib
2.4 编译
进入hadoop-lzo-master,执行maven编译命令
mvn package -Dmaven.test.skip=true
2.5 进入target,hadoop-lzo-0.4.21-SNAPSHOT.jar 即编译成功的hadoop-lzo组件
配置hadoop关联jar
将编译好后的 hadoop-lzo-0.4.21-SNAPSHOT.jar 放入 $HADOOP_HOME/share/hadoop/common/
core-site.xml增加配置支持LZO压缩
<property>
<name>io.compression.codecs</name>
<value>
org.apache.hadoop.io.compress.GzipCodec,
org.apache.hadoop.io.compress.DefaultCodec,
org.apache.hadoop.io.compress.BZip2Codec,
org.apache.hadoop.io.compress.SnappyCodec,
com.hadoop.compression.lzo.LzoCodec,
com.hadoop.compression.lzo.LzopCodec
</value>
</property>
<property>
<name>io.compression.codec.lzo.class</name>
<value>com.hadoop.compression.lzo.LzoCodec</value>
</property>
// 将一个大于128M的文件lzo压缩wc.data.lzo,并上传到hdfs
[liqiang@Gargantua data]$lzo wc.data
[liqiang@Gargantua data]$ hdfs dfs -put wc.data.lzo /input/wc.data.lzo
执行wc
hadoop jar $HADOOP_HOME/share/hadoop/mapreduce/hadoop-mapreduce-examples-3.2.2.jar wordcount -Dmapreduce.output.fileoutputformat.compress=true -Dmapreduce.output.fileoutputformat.compress.codec=com.hadoop.compression.lzo.LzopCodec /input/wc.data.lzo /output/wc2
对lzo文件建立索引
hadoop jar $HADOOP_HOME/share/hadoop/common/hadoop-lzo-0.4.21-SNAPSHOT.jar com.hadoop.compression.lzo.DistributedLzoIndexer /input/wc.data.lzo
hdfs dfs -rm -r /output/wc2
// 再次执行wc
出现 number of splits:2
参考博文:
https://www.cnblogs.com/xuziyu/p/10729992.html
https://blog.csdn.net/zmzdmx/article/details/113655883
https://blog.csdn.net/qq_31405633/article/details/89353295