hdfs append加载不成功
0
使用append方法加载文件时候老是报无法找到系统指定路径,实际上我的系统指定了路径了
下面是源码 报错信息请看附件
package baiduyun;
import java.io.*;
import java.io.BufferedInputStream;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;
import java.net.URISyntaxException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
public class baiduyun_fangfa {
Configuration conf =null;
FileSystem fs =null;
//交互入口
public baiduyun_fangfa(){
conf=new Configuration();
try {
fs=FileSystem.get(new URI ("hdfs://192.168.70.10:8020"),conf,"hdfs");
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (URISyntaxException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
//加载文件
public void append3(String src,String dst){
try {
conf.setBoolean("dfs.support.append", true);
InputStream in = new BufferedInputStream(new FileInputStream(src));
//InputStream in =fs.open(new Path(src));
OutputStream out =fs.append(new Path(dst), 1024) ;
IOUtils.copyBytes(in, out, 4096, true);
in.close();
out.close();
} catch (IllegalArgumentException e) {
// TODO Auto-generated catch block
e.printStackTrace();
//return false;
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
//return false;
}
finally{
try {
fs.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
}
下面是源码 报错信息请看附件
package baiduyun;
import java.io.*;
import java.io.BufferedInputStream;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;
import java.net.URISyntaxException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
public class baiduyun_fangfa {
Configuration conf =null;
FileSystem fs =null;
//交互入口
public baiduyun_fangfa(){
conf=new Configuration();
try {
fs=FileSystem.get(new URI ("hdfs://192.168.70.10:8020"),conf,"hdfs");
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (URISyntaxException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
//加载文件
public void append3(String src,String dst){
try {
conf.setBoolean("dfs.support.append", true);
InputStream in = new BufferedInputStream(new FileInputStream(src));
//InputStream in =fs.open(new Path(src));
OutputStream out =fs.append(new Path(dst), 1024) ;
IOUtils.copyBytes(in, out, 4096, true);
in.close();
out.close();
} catch (IllegalArgumentException e) {
// TODO Auto-generated catch block
e.printStackTrace();
//return false;
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
//return false;
}
finally{
try {
fs.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
}
没有找到相关结果
重要提示:提问者不能发表回复,可以通过评论与回答者沟通,沟通后可以通过编辑功能完善问题描述,以便后续其他人能够更容易理解问题.
1 个回复
MarsJ - 大数据玩家~DS 2017-01-02 回答
赞同来自:
你要确定文件路径没问题