
rm -rf /opt/linuxsir/hadoop/logs/*.*
ssh root@192.168.31.132 rm -rf /opt/linuxsir/hadoop/logs/*.*
ssh root@192.168.31.133 rm -rf /opt/linuxsir/hadoop/logs/*.*
clear
cd /opt/linuxsir/hadoop/sbin
./start-dfs.sh
./start-yarn.sh
clear
jps
ssh root@192.168.31.132 jps
ssh root@192.168.31.133 jps
在eclipse里面操作如下:
New
-Java Project
,名稱自定義即可,如 java-prj
New
-Package
,名稱自定義為com.pai.hdfs_demo
New
-Class
,名稱自定義為ReadWriteHDFSExample
package com.pai.hdfs_demo;
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import java.io.*;
import java.nio.charset.StandardCharsets;
public class ReadWriteHDFSExample {
// main 新建一個(gè)類ReadWriteHDFSExample,編寫main函數(shù)如下。main函數(shù)調(diào)用其它函數(shù),創(chuàng)建目錄,寫入數(shù)據(jù),添加數(shù)據(jù),然后再讀取數(shù)據(jù)
public static void main(String[] args) throws IOException {
// ReadWriteHDFSExample.checkExists();
ReadWriteHDFSExample.createDirectory();
ReadWriteHDFSExample.writeFileToHDFS();
ReadWriteHDFSExample.appendToHDFSFile();
ReadWriteHDFSExample.readFileFromHDFS();
}
// readFileFromHDFS 該函數(shù)讀取文件內(nèi)容,以字符串形式顯示出來
public static void readFileFromHDFS() throws IOException {
Configuration configuration = new Configuration();
configuration.set("fs.defaultFS", "hdfs://192.168.31.131:9000");
FileSystem fileSystem = FileSystem.get(configuration);
// Create a path
String fileName = "read_write_hdfs_example.txt";
Path hdfsReadPath = new Path("/javareadwriteexample/" + fileName);
// initialize input stream
FSDataInputStream inputStream = fileSystem.open(hdfsReadPath);
// Classical input stream usage
String out = IOUtils.toString(inputStream, "UTF-8");
System.out.println(out);
// BufferedReader bufferedReader = new BufferedReader(
// new InputStreamReader(inputStream, StandardCharsets.UTF_8));
// String line = null;
// while ((line=bufferedReader.readLine())!=null){
// System.out.println(line);
// }
inputStream.close();
fileSystem.close();
}
// writeFileToHDFS writeFileToHDFS函數(shù)打開文件,寫入一行文本
public static void writeFileToHDFS() throws IOException {
Configuration configuration = new Configuration();
configuration.set("fs.defaultFS", "hdfs://192.168.31.131:9000");
FileSystem fileSystem = FileSystem.get(configuration);
// Create a path
String fileName = "read_write_hdfs_example.txt";
Path hdfsWritePath = new Path("/javareadwriteexample/" + fileName);
FSDataOutputStream fsDataOutputStream = fileSystem.create(hdfsWritePath, true);
BufferedWriter bufferedWriter = new BufferedWriter(
new OutputStreamWriter(fsDataOutputStream, StandardCharsets.UTF_8));
bufferedWriter.write("Java API to write data in HDFS");
bufferedWriter.newLine();
bufferedWriter.close();
fileSystem.close();
}
// appendToHDFSFile 函數(shù)打開文件,添加一行文本。需要注意的是,需要對(duì)Configuration類的對(duì)象configuration進(jìn)行適當(dāng)設(shè)置,否則出錯(cuò)
public static void appendToHDFSFile() throws IOException {
Configuration configuration = new Configuration();
configuration.set("fs.defaultFS", "hdfs://192.168.31.131:9000");
//configuration.setBoolean("dfs.client.block.write.replace-datanode-on-failure.enabled", true);
configuration.set("dfs.client.block.write.replace-datanode-on-failure.policy","NEVER");
configuration.set("dfs.client.block.write.replace-datanode-on-failure.enable","true");
FileSystem fileSystem = FileSystem.get(configuration);
// Create a path
String fileName = "read_write_hdfs_example.txt";
Path hdfsWritePath = new Path("/javareadwriteexample/" + fileName);
FSDataOutputStream fsDataOutputStream = fileSystem.append(hdfsWritePath);
BufferedWriter bufferedWriter = new BufferedWriter(
new OutputStreamWriter(fsDataOutputStream, StandardCharsets.UTF_8));
bufferedWriter.write("Java API to append data in HDFS file");
bufferedWriter.newLine();
bufferedWriter.close();
fileSystem.close();
}
// createDirectory 函數(shù)創(chuàng)建一個(gè)目錄
public static void createDirectory() throws IOException {
Configuration configuration = new Configuration();
configuration.set("fs.defaultFS", "hdfs://192.168.31.131:9000");
FileSystem fileSystem = FileSystem.get(configuration);
String directoryName = "/javareadwriteexample";
Path path = new Path(directoryName);
fileSystem.mkdirs(path);
}
// checkExists checkExists檢查目錄或者文件是否存在。注意如下代碼的最后一個(gè)括號(hào)是ReadWriteHDFSExample類的結(jié)束括號(hào)
public static void checkExists() throws IOException {
Configuration configuration = new Configuration();
configuration.set("fs.defaultFS", "hdfs://192.168.31.131:9000");
FileSystem fileSystem = FileSystem.get(configuration);
String directoryName = "/javareadwriteexample";
Path path = new Path(directoryName);
if (fileSystem.exists(path)) {
System.out.println("File/Folder Exists : " + path.getName());
} else {
System.out.println("File/Folder does not Exists : " + path.getName());
}
}
}
為了編譯通過上述Java代碼,需要把如下目錄下的jar包導(dǎo)入Eclipse項(xiàng)目的Build Path
操作序列為 右鍵點(diǎn)擊Eclipse里的Java項(xiàng)目→Properties
→Java Build Path
→Libraries
→Add External Jars
# 添加如下路徑的包
D:hadoop-2.7.3sharehadoopcommonlib
D:hadoop-2.7.3sharehadoopcommon
D:hadoop-2.7.3sharehadoophdfs
D:hadoop-2.7.3sharehadoophdfslib
D:hadoop-2.7.3sharehadoopmapreducelib
D:hadoop-2.7.3sharehadoopmapreduce
D:hadoop-2.7.3sharehadoopyarnlib
D:hadoop-2.7.3sharehadoopyarn
就可以愉快地執(zhí)行了,執(zhí)行完畢上述代碼后,在hd-master主機(jī)上可以通過如下命令,檢查已經(jīng)寫入的文件
[root@hd-master bin]# cd /opt/linuxsir/hadoop/bin
[root@hd-master bin]# ./hdfs dfs -ls /javareadwriteexample/read_write_hdfs_example.txt
-rw-r--r-- 3 root supergroup 70 2024-10-10 04:47 /javareadwriteexample/read_write_hdfs_example.txt
[root@hd-master bin]# ./hdfs dfs -cat /javareadwriteexample/read_write_hdfs_example.txt
Java API to write data in HDFS
Java API to append data in HDFS file
為了多次進(jìn)行實(shí)驗(yàn)(或者為了調(diào)試代碼),可以把HDFS文件刪除,然后再執(zhí)行或者調(diào)試Java代碼,否則一經(jīng)存在該目錄,執(zhí)行創(chuàng)建目錄的代碼就會(huì)出錯(cuò)
cd /opt/linuxsir/hadoop/bin
./hdfs dfs -rm /javareadwriteexample/*
./hdfs dfs -rmdir /javareadwriteexample
cd /opt/linuxsir/hadoop/sbin
./stop-yarn.sh
./stop-dfs.sh
jps
ssh root@192.168.31.132 jps
ssh root@192.168.31.133 jps
package mywordcount;
import java.io.IOException;
import java.util.StringTokenizer;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
public class WordCount {
//定義WordCount類的內(nèi)部類TokenizerMapper 該類實(shí)現(xiàn)了map函數(shù),把從文件讀取的每個(gè)word變成一個(gè)形式為<word,1>的Key Value對(duì),輸出到map函數(shù)的參數(shù)context對(duì)象,由執(zhí)行引擎完成Shuffle
public static class TokenizerMapper extends Mapper<Object, Text, Text, IntWritable> {
private final static IntWritable one = new IntWritable(1);
private Text word = new Text();
public void map(Object key, Text value, Context context) throws IOException, InterruptedException {
StringTokenizer itr = new StringTokenizer(value.toString());
while (itr.hasMoreTokens()) {
word.set(itr.nextToken());
context.write(word, one);
}
}
}
//定義WordCount類的內(nèi)部類IntSumReducer 該類實(shí)現(xiàn)了reduce函數(shù),它收攏所有相同key的、形式為<word,1>的Key-Value對(duì),對(duì)Value部分進(jìn)行累加,輸出一個(gè)計(jì)數(shù)
public static class IntSumReducer extends Reducer<Text, IntWritable, Text, IntWritable> {
private IntWritable result = new IntWritable();
public void reduce(Text key, Iterable<IntWritable> values, Context context)
throws IOException, InterruptedException {
int sum = 0;
for (IntWritable val : values) {
sum += val.get();
}
result.set(sum);
context.write(key, result);
String thekey = key.toString();
int thevalue = sum;
}
}
// WordCount類的main函數(shù),負(fù)責(zé)配置Job的若干關(guān)鍵的參數(shù),并且啟動(dòng)這個(gè)Job。在main函數(shù)中,conf對(duì)象包含了一個(gè)屬性即“fs.defaultFS”,它的值為“hdfs://192.168.31.131:9000”,使得WordCount程序知道如何存取HDFS
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
if (otherArgs.length != 2) {
System.err.println("Usage: wordcount <in> <out>");
System.exit(2);
}
conf.set("fs.defaultFS", "hdfs://192.168.31.131:9000");
Job job = new Job(conf, "word count");
job.setJarByClass(WordCount.class);
job.setMapperClass(TokenizerMapper.class);
job.setCombinerClass(IntSumReducer.class);
job.setReducerClass(IntSumReducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
FileInputFormat.addInputPath(job, new Path(otherArgs[0]));
FileOutputFormat.setOutputPath(job, new Path(otherArgs[1]));
System.exit(job.waitForCompletion(true) ? 0 : 1);
}
}
[root@hd-master bin]# ./hdfs dfs -ls /output1
Found 2 items
-rw-r--r-- 3 root supergroup 0 2024-10-10 05:17 /output1/_SUCCESS
-rw-r--r-- 3 root supergroup 89 2024-10-10 05:17 /output1/part-r-00000
[root@hd-master bin]# ./hdfs dfs -cat /output1/part-r-00000
I 1
apache 1
cloudera 1
google 1
hadoop 8
hortonworks 1
ibm 1
intel 1
like 1
microsoft 1
數(shù)據(jù)分析咨詢請(qǐng)掃描二維碼
若不方便掃碼,搜微信號(hào):CDAshujufenxi
LSTM 模型輸入長度選擇技巧:提升序列建模效能的關(guān)鍵? 在循環(huán)神經(jīng)網(wǎng)絡(luò)(RNN)家族中,長短期記憶網(wǎng)絡(luò)(LSTM)憑借其解決長序列 ...
2025-07-11CDA 數(shù)據(jù)分析師報(bào)考條件詳解與準(zhǔn)備指南? ? 在數(shù)據(jù)驅(qū)動(dòng)決策的時(shí)代浪潮下,CDA 數(shù)據(jù)分析師認(rèn)證愈發(fā)受到矚目,成為眾多有志投身數(shù) ...
2025-07-11數(shù)據(jù)透視表中兩列相乘合計(jì)的實(shí)用指南? 在數(shù)據(jù)分析的日常工作中,數(shù)據(jù)透視表憑借其強(qiáng)大的數(shù)據(jù)匯總和分析功能,成為了 Excel 用戶 ...
2025-07-11尊敬的考生: 您好! 我們誠摯通知您,CDA Level I和 Level II考試大綱將于 2025年7月25日 實(shí)施重大更新。 此次更新旨在確保認(rèn) ...
2025-07-10BI 大數(shù)據(jù)分析師:連接數(shù)據(jù)與業(yè)務(wù)的價(jià)值轉(zhuǎn)化者? ? 在大數(shù)據(jù)與商業(yè)智能(Business Intelligence,簡(jiǎn)稱 BI)深度融合的時(shí)代,BI ...
2025-07-10SQL 在預(yù)測(cè)分析中的應(yīng)用:從數(shù)據(jù)查詢到趨勢(shì)預(yù)判? ? 在數(shù)據(jù)驅(qū)動(dòng)決策的時(shí)代,預(yù)測(cè)分析作為挖掘數(shù)據(jù)潛在價(jià)值的核心手段,正被廣泛 ...
2025-07-10數(shù)據(jù)查詢結(jié)束后:分析師的收尾工作與價(jià)值深化? ? 在數(shù)據(jù)分析的全流程中,“query end”(查詢結(jié)束)并非工作的終點(diǎn),而是將數(shù) ...
2025-07-10CDA 數(shù)據(jù)分析師考試:從報(bào)考到取證的全攻略? 在數(shù)字經(jīng)濟(jì)蓬勃發(fā)展的今天,數(shù)據(jù)分析師已成為各行業(yè)爭(zhēng)搶的核心人才,而 CDA(Certi ...
2025-07-09【CDA干貨】單樣本趨勢(shì)性檢驗(yàn):捕捉數(shù)據(jù)背后的時(shí)間軌跡? 在數(shù)據(jù)分析的版圖中,單樣本趨勢(shì)性檢驗(yàn)如同一位耐心的偵探,專注于從單 ...
2025-07-09year_month數(shù)據(jù)類型:時(shí)間維度的精準(zhǔn)切片? ? 在數(shù)據(jù)的世界里,時(shí)間是最不可或缺的維度之一,而year_month數(shù)據(jù)類型就像一把精準(zhǔn) ...
2025-07-09CDA 備考干貨:Python 在數(shù)據(jù)分析中的核心應(yīng)用與實(shí)戰(zhàn)技巧? ? 在 CDA 數(shù)據(jù)分析師認(rèn)證考試中,Python 作為數(shù)據(jù)處理與分析的核心 ...
2025-07-08SPSS 中的 Mann-Kendall 檢驗(yàn):數(shù)據(jù)趨勢(shì)與突變分析的有力工具? ? ? 在數(shù)據(jù)分析的廣袤領(lǐng)域中,準(zhǔn)確捕捉數(shù)據(jù)的趨勢(shì)變化以及識(shí)別 ...
2025-07-08備戰(zhàn) CDA 數(shù)據(jù)分析師考試:需要多久?如何規(guī)劃? CDA(Certified Data Analyst)數(shù)據(jù)分析師認(rèn)證作為國內(nèi)權(quán)威的數(shù)據(jù)分析能力認(rèn)證 ...
2025-07-08LSTM 輸出不確定的成因、影響與應(yīng)對(duì)策略? 長短期記憶網(wǎng)絡(luò)(LSTM)作為循環(huán)神經(jīng)網(wǎng)絡(luò)(RNN)的一種變體,憑借獨(dú)特的門控機(jī)制,在 ...
2025-07-07統(tǒng)計(jì)學(xué)方法在市場(chǎng)調(diào)研數(shù)據(jù)中的深度應(yīng)用? 市場(chǎng)調(diào)研是企業(yè)洞察市場(chǎng)動(dòng)態(tài)、了解消費(fèi)者需求的重要途徑,而統(tǒng)計(jì)學(xué)方法則是市場(chǎng)調(diào)研數(shù) ...
2025-07-07CDA數(shù)據(jù)分析師證書考試全攻略? 在數(shù)字化浪潮席卷全球的當(dāng)下,數(shù)據(jù)已成為企業(yè)決策、行業(yè)發(fā)展的核心驅(qū)動(dòng)力,數(shù)據(jù)分析師也因此成為 ...
2025-07-07剖析 CDA 數(shù)據(jù)分析師考試題型:解鎖高效備考與答題策略? CDA(Certified Data Analyst)數(shù)據(jù)分析師考試作為衡量數(shù)據(jù)專業(yè)能力的 ...
2025-07-04SQL Server 字符串截取轉(zhuǎn)日期:解鎖數(shù)據(jù)處理的關(guān)鍵技能? 在數(shù)據(jù)處理與分析工作中,數(shù)據(jù)格式的規(guī)范性是保證后續(xù)分析準(zhǔn)確性的基礎(chǔ) ...
2025-07-04CDA 數(shù)據(jù)分析師視角:從數(shù)據(jù)迷霧中探尋商業(yè)真相? 在數(shù)字化浪潮席卷全球的今天,數(shù)據(jù)已成為企業(yè)決策的核心驅(qū)動(dòng)力,CDA(Certifie ...
2025-07-04CDA 數(shù)據(jù)分析師:開啟數(shù)據(jù)職業(yè)發(fā)展新征程? ? 在數(shù)據(jù)成為核心生產(chǎn)要素的今天,數(shù)據(jù)分析師的職業(yè)價(jià)值愈發(fā)凸顯。CDA(Certified D ...
2025-07-03