windows下 在eclipse中执行mapreduce 权限问题

来源:互联网 发布:linux可以用来干什么 编辑:程序博客网 时间:2024/05/16 10:38

windows下 在eclipse中执行mapreduce 权限问题

15/04/22 11:20:46 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable15/04/22 11:20:46 ERROR security.UserGroupInformation: PriviledgedActionException as:hadoop cause:java.io.IOException: Failed to set permissions of path: \tmp\hadoop-Administrator\mapred\staging\hadoop67840110\.staging to 0700java.io.IOException: Failed to set permissions of path: \tmp\hadoop-Administrator\mapred\staging\hadoop67840110\.staging to 0700at org.apache.hadoop.fs.FileUtil.checkReturnValue(FileUtil.java:691)at org.apache.hadoop.fs.FileUtil.setPermission(FileUtil.java:664)at org.apache.hadoop.fs.RawLocalFileSystem.setPermission(RawLocalFileSystem.java:514)at org.apache.hadoop.fs.RawLocalFileSystem.mkdirs(RawLocalFileSystem.java:349)at org.apache.hadoop.fs.FilterFileSystem.mkdirs(FilterFileSystem.java:193)at org.apache.hadoop.mapreduce.JobSubmissionFiles.getStagingDir(JobSubmissionFiles.java:126)at org.apache.hadoop.mapred.JobClient$2.run(JobClient.java:942)at org.apache.hadoop.mapred.JobClient$2.run(JobClient.java:936)at java.security.AccessController.doPrivileged(Native Method)at javax.security.auth.Subject.doAs(Unknown Source)at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1190)at org.apache.hadoop.mapred.JobClient.submitJobInternal(JobClient.java:936)at org.apache.hadoop.mapreduce.Job.submit(Job.java:550)at org.apache.hadoop.mapreduce.Job.waitForCompletion(Job.java:580)at com.hadoop.mapred.WordCount$1.run(WordCount.java:110)at com.hadoop.mapred.WordCount$1.run(WordCount.java:1)at java.security.AccessController.doPrivileged(Native Method)at javax.security.auth.Subject.doAs(Unknown Source)at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1172)at com.hadoop.mapred.WordCount.main(WordCount.java:85)

调整后的写法:

import java.security.PrivilegedAction;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.fs.Path;import org.apache.hadoop.io.IntWritable;import org.apache.hadoop.io.LongWritable;import org.apache.hadoop.io.Text;import org.apache.hadoop.mapreduce.Job;import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;import org.apache.hadoop.security.UserGroupInformation;public class EmpMain {    public static void main(String[] args) throws Exception {    System.setProperty("path.separator", ":");    UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hadoop");//这里    ugi.doAs(new PrivilegedAction<Void>(){//这里public Void run() {try {Configuration conf = new Configuration();conf.setStrings("fs.default.name", "hdfs://hadoop5:19000");//这里conf.set("mapred.job.tracker", "hadoop5:19001");//这里conf.set("mapred.jar", "D:\\workspace\\jobTask2\\EmpMain\\target\\EmpMain-0.0.1-SNAPSHOT.jar");Job job = new Job(conf, "empMain");job.setOutputKeyClass(LongWritable.class);job.setOutputValueClass(Employee.class);job.setJarByClass(EmpMain.class);job.setMapperClass(org.EmpMain.EmpMapper.class);job.setReducerClass(org.EmpMain.EmpReducer.class);job.setInputFormatClass(TextInputFormat.class);job.setOutputFormatClass(TextOutputFormat.class);FileInputFormat.addInputPath(job, new Path("/xiaoft/empno.txt"));FileOutputFormat.setOutputPath(job, new Path("/xiaoft/out29"));job.waitForCompletion(true);} catch (Exception e) {System.out.println(e);}return null;}    });    }}


0 0