您的位置:首页 > 大数据 > 人工智能

HDFS文件的tail

2013-12-05 18:00 190 查看
package com.xxx.hdfstail;

import java.io.BufferedReader;

import java.io.IOException;

import java.io.InputStreamReader;

import org.apache.hadoop.conf.Configuration;

import org.apache.hadoop.fs.FSDataInputStream;

import org.apache.hadoop.fs.FileSystem;

import org.apache.hadoop.fs.Path;

import org.apache.hadoop.io.IOUtils;

public class HDFSTailByLine_V4 {

String src;

Path path = null;

Configuration conf;

FileSystem srcFs;

public HDFSTailByLine_V4(String src) {

this.src = src;

path = new Path(src);

conf = new Configuration();

try {

srcFs = path.getFileSystem(conf);

} catch (IOException e) {

// TODO Auto-generated catch block

e.printStackTrace();

}

}

private void tail() throws IOException {

if (srcFs.isDirectory(path)) {

throw new IOException("Source must be a file.");

}

long fileSize = srcFs.getFileStatus(path).getLen();

// 跳到最后一1024处开始tail

long offset = 0;

while (true) {

System.out.println(fileSize + " " + offset);

FSDataInputStream in = srcFs.open(path);

in.seek(offset);

BufferedReader reader = new BufferedReader(

new InputStreamReader(in));

String tmp;

while ((tmp = reader.readLine()) != null) {

System.out.println(tmp);

System.out.println("===+++++===");

}

offset = in.getPos();

in.close();

reader.close();

System.out.println("=========");

try {

Thread.sleep(3000);

} catch (InterruptedException e) {

break;

}

System.out.println("sleep 3 s !");

}

}

public static void main(String[] args) throws Exception {

// String flumeAdd=args[1];

// String src=args[0];

String src = "hdfs://192.168.1.132:9000/user/hive/warehouse/log_asjh/ASJH_2013-12-05/ASJH_2013-12-05_00000";

// src=args[0];

HDFSTailByLine_V4 t = new HDFSTailByLine_V4(src);

t.tail();

}

}
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: