编程题2:Windows系统的C盘根目录有文件windows.txt,现要求使用org.apache.hadoop.fs.FileSystem类将windows.txt上传至HDFS根目录,按要求如下补充代码。......public class HDFS_CRUD {......// 上传文件@Testpublic void UpLoadFileToHDFS() throws IOException{//设置上传的源路径(1)src = new Path("C:/hdfs.txt");//设置下载的源路径Path dst = new Path((2));//上传文件fs.(3)(src, (4));//关闭资源fs.(5)();}}
查看答案
编程题3:现要求使用org.apache.hadoop.fs.FileSystem类在HDFS分布式文件系统创建目录,重命名目录,删除目录,按要求如下补充代码。......public class HDFS_CRUD {......// 创建,删除,重命名目录@Testpublic void testMkdirAndDeleteAndRename() throws Exception {// 创建目录/x/y/zfs.(1)(new Path("/x/y/z"));// 创建目录/a/b/cfs.mkdirs(new Path((2)));// 重命名目录,将目录/a重名为a_newfs.(3)(new Path("/a"), new Path((4)));// 删除目录/x,如果是非空目录,第二个参数必须给值truefs.(5)(new Path("/x"), true);}}
编程题4:使用并行计算框架MapReduce实现单词统计,按要求如下补充代码。......// WordCountMapper类public class WordCountMapper extends Mapper {@Overrideprotected void map(LongWritable key, Text value, Context context) throws Exception {// 数据类型转换为StringString line = value.(1)();// 行内容按照分隔符逗号切割String[] words = line.split((2));// 遍历数组,标记<单词,1>for (String word : words) {// Map阶段处理的数据发送给Reduce阶段作为输入数据context.(3)(new Text(word), new IntWritable(1));}}}......// WordCountReducer类public class WordCountReducer extends Reducer {@Overrideprotected void reduce(Text key, Iterable value, Context context) throws Exception {//定义一个计数器int count = 0;//遍历一组迭代器,累加单词的总次数for ((4) iw : value) {count += iw.get();}// 把Reduce阶段处理的数据作为输出数据context.write(key, new IntWritable((5)));}}......// WordCount类......public class WordCountDriver {public static void main(String[] args) throws IOException, ClassNotFoundException, InterruptedException {Configuration conf = new (6)();// 创建配置对象Job job = Job.getInstance(conf);// 创建带配置的Job作业对象job.setJarByClass(WordCountDriver.class);// 装载主类job.(7)(WordCountMapper.class);// 装载WordCountMapperjob.setReducerClass(WordCountReducer.class);// 装载WordCountReducer//装载map阶段的输入类job.setMapOutputKeyClass((8));job.setMapOutputValueClass(IntWritable.class);//装载reducer阶段输出类job.(9)(Text.class);job.setOutputValueClass(IntWritable.class);FileInputFormat.setInputPaths(job, "/input");// 指定源路径FileOutputFormat.setOutputPath(job, new Path("/output"));// 指定目标路径// 提交jobboolean res =(10).waitForCompletion(true);System.out.println(res ? "job完成!" : "job未完成!");}}
编程题5:调用ZooKeeperJavaAPI,实现节点创建、获取、修改和删除等操作,按要求如下补充代码。public class ZookeeperTest {public static void main(String[] args) throws Exception {// 初始化ZooKeeper实例(zk地址、会话超时时间,与系统默认一致, watcher)ZooKeeper zk = new (1)("master:2181,slave1:2181,slave2:2181", 30000, new Watcher() {@Overridepublic void process(WatchedEvent event) {System.out.println("事件类型为: " + event.getType());System.out.println("事件发生的路径: " + event.(2)());System.out.println("通知状态为: " + event.getState());}});// 创建一个目录节点zk.create("/testRootPath", "testRootData".getBytes(), Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT);// 获取目录数据System.out.println(new String(zk.(3)("/testRootPath", false, null)));// 修改目录节点数据zk.(4)("/testRootPath", "modifytestRootData".getBytes(), -1);// 删除父目录节点zk.(5)("/testRootPath", -1);// 断开ZooKeeper客户端连接zk.close();}}
编程题6:使用并行计算框架MapReduce实现TopN,按要求如下补充代码。......public class TopNMapper extends Mapper{private TreeMap repToRecordMap = new TreeMap();@Overrideprotected void map(LongWritable key, Text value,Context context){String line = value.(1)();String[] nums = line.split(" ");for(String num :nums){repToRecordMap.put(Integer.parseInt(num), " ");if((2).size()>5){repToRecordMap.remove(repToRecordMap.(3));}}}@Overrideprotected void cleanup(Context context){for(Integer i:repToRecordMap.keySet()){try {(4).write(NullWritable.get(), new IntWritable(i));} catch (IOException e) {e.printStackTrace();} catch (InterruptedException e) {e.(5)();}}}}public class TopNReducer extends Reducer {private TreeMap repToRecordMap = (6)TreeMap(new Comparator() {public int (7)(Integer a,Integer b){return b-a;}});public void reduce(NullWritable key, Iterable values, Context context) throws IOException, InterruptedException{for((8)value:values){repToRecordMap.put(value.get(), " ");if(repToRecordMap.size()>5) {repToRecordMap.(9)(repToRecordMap.firstKey());}}for(Integer i:repToRecordMap.keySet()){context.write(NullWritable.get(),(10));}}}public class TopNRunner {public static void main(String[] args) throws Exception {Configuration conf = new Configuration();Job job = Job.getInstance(conf);job.setJarByClass((11));job.setMapperClass(TopNMapper.class);job.setReducerClass(TopNReducer.class);job.(12)(1);job.setMapOutputKeyClass(NullWritable.class);job.setMapOutputValueClass(IntWritable.class);job.setOutputKeyClass(NullWritable.class);job.setOutputValueClass(IntWritable.class);FileInputFormat.setInputPaths(job,new Path("/input"));(13).setOutputPath(job,new Path("/output"));boolean res = job.(14)(true);System.(15)(res?0:1);}}