打开节点
关掉防火墙
访问http://bigdata-pro01.kfk.com:50070/explorer.html#
上传json0427.json文件【数据json】
package hadoop_hdfs_20210413_01;
import java.net.URI;
import java.util.ArrayList;
import org.apache.avro.data.Json;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import com.alibaba.fastjson.JSONArray;
public class HDFS_Test {
public static void writeToHDFS(String file) throws Exception {
//第一步,定义好配置对象,加载配置文件
Configuration conf = new Configuration();
//第二步,初始化获得FS文件对象
FileSystem fs = FileSystem.get(URI.create(file),conf,"root");
//第三步,定义好要操作的文件
Path path = new Path(file);
//第四步,通过文件对象获得输出流
FSDataOutputStream out = fs.create(path);
//第五步:通过输出流写入数据到文件,完成操作
//out.write(words.getBytes("UTF-8"));
ArrayList data2=new ArrayList();
for(int i=0;i<7;i++) {
ArrayList data=new ArrayList();
data.add((int)(Math.random()*100+1));
data.add((int)(Math.random()*100+1));
data.add((int)(Math.random()*100+1));
data.add((int)(Math.random()*100+1));
data.add((int)(Math.random()*100+1));
data.add((int)(Math.random()*100+1));
data.add((int)(Math.random()*100+1));
data2.add(data);
}
JSONArray ja=new JSONArray(data2);
System.out.println(ja.toJSONString());
out.write(ja.toJSONString().getBytes("UTF-8"));
//第六步:关闭流
out.close();
}
public static byte[] readFromHDFS(String file) throws Exception {
//第一步,定义好配置对象,加载配置文件
Configuration conf = new Configuration();
//第二步,初始化获得FS文件对象
FileSystem fs = FileSystem.get(URI.create(file),conf,"root");
//第三步,定义好要操作的文件
Path path = new Path(file);
//第四步,通过文件对象打开文件,获得输入流
FSDataInputStream in = fs.open(path);
//第五步,通过输入流读取内容
IOUtils.copyBytes(in, System.out, 4096,true);
FileStatus stat = fs.getFileStatus(path);
// create the buffer
byte[] buffer = new byte[Integer.parseInt(String.valueOf(stat.getLen()))];
in.readFully(0, buffer);
in.close();
fs.close();
return buffer;
}
public static void deleteHDFSFile(String file) throws Exception {
//第一步,定义好配置对象,加载配置文件
Configuration conf = new Configuration();
//第二步,初始化获得FS文件对象
FileSystem fs = FileSystem.get(URI.create(file),conf,"root");
//第三步,定义好要操作的文件
Path path = new Path(file);
//第四步,通过文件对象删除文件
fs.delete(path,true);
fs.close();
}
public static void main(String[] args) throws Exception {
String file = "hdfs://bigdata-pro01.kfk.com:9000/user/qyx/json0427.json";
//String words = " this words is to write into file !!";
writeToHDFS(file);
readFromHDFS(file);
//deleteHDFSFile(file);
}
/*
* 1,在/user/kfk/目录下,通过代码建一个demo目录√
* 2,在C盘下创建demo目录,并分别建好s1,和s2两个字目录,两个子目录下面分别放有Test1.java和Hello1.java两个文件
* 3,要求将C盘DEMO目录下的所有目录和文件都存放在Hadoop demo目录下面
*
*
*/
}
GetData:
package com.hadoop;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.commons.io.*;
import com.alibaba.fastjson.JSONArray;
import hadoop_hdfs_20210413_01.HDFS_Test;
/**
* Servlet implementation class GetData
*/
@WebServlet("/GetData")
public class GetData extends HttpServlet {
private static final long serialVersionUID = 1L;
/**
* Default constructor.
*/
public GetData() {
// TODO Auto-generated constructor stub
}
/**
* @see HttpServlet#doGet(HttpServletRequest request, HttpServletResponse response)
*/
protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
// TODO Auto-generated method stub
//response.getWriter().append("Served at: ").append(request.getContextPath());
// ArrayList data=new ArrayList();
// data.add(100);
// data.add(200);
// data.add(300);
// data.add(400);
// data.add(500);
// data.add(600);
// data.add(700);
// //将这个数据转化成json
// JSONArray ja=new JSONArray(data);
// System.out.println(ja.toJSONString());
// response.getWriter().println(ja.toJSONString());
// TODO Auto-generated method stub
String file = "hdfs://bigdata-pro01.kfk.com:9000/user/qyx/json0427.json";
List<String> list = null;
try {
//第一步,定义好配置对象,加载配置文件
Configuration conf = new Configuration();
//第二步,初始化获得FS文件对象
FileSystem fs = FileSystem.get(URI.create(file),conf,"root");
//第三步,定义好要操作的文件
Path path = new Path(file);
//第四步,通过文件对象打开文件,获得输入流
FSDataInputStream in = fs.open(path);
byte[] data = new byte[1024];
//第五步,通过输入流读取内容
list = IOUtils.readLines(in);
System.out.println(list);
} catch (Exception ex) {
ex.printStackTrace();
}
// ArrayList data = new ArrayList<>();
// // [ 120, 100, 150, 80, 70, 110, 130 ]
// for()
//
// //将这个数据转换成json格式
JSONArray ja = JSONArray.parseArray(list.get(0));
System.out.println(ja);
//将数据发送给前端
response.getWriter().println(ja);
}
/**
* @see HttpServlet#doPost(HttpServletRequest request, HttpServletResponse response)
*/
protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
// TODO Auto-generated method stub
doGet(request, response);
}
}
html文件
<!--
THIS EXAMPLE WAS DOWNLOADED FROM https://echarts.apache.org/examples/en/editor.html?c=bar-background
-->
<!DOCTYPE html>
<html style="height: 100%">
<head>
<meta charset="utf-8">
</head>
<body style="height: 100%; margin: 0">
<button onclick=getData()> 切换</button>
<div id="container" style="height: 100%"></div>
<script type="text/javascript" src="https://cdn.jsdelivr.net/npm/echarts@5/dist/echarts.min.js"></script>
<!-- Uncomment this line if you want to dataTool extension
<script type="text/javascript" src="https://cdn.jsdelivr.net/npm/echarts@5/dist/extension/dataTool.min.js"></script>
-->
<!-- Uncomment this line if you want to use gl extension
<script type="text/javascript" src="https://cdn.jsdelivr.net/npm/echarts-gl@2/dist/echarts-gl.min.js"></script>
-->
<!-- Uncomment this line if you want to echarts-stat extension
<script type="text/javascript" src="https://cdn.jsdelivr.net/npm/echarts-stat@latest/dist/ecStat.min.js"></script>
-->
<!-- Uncomment this line if you want to use map
<script type="text/javascript" src="https://cdn.jsdelivr.net/npm/echarts@5/map/js/china.js"></script>
<script type="text/javascript" src="https://cdn.jsdelivr.net/npm/echarts@5/map/js/world.js"></script>
-->
<!-- Uncomment these two lines if you want to use bmap extension
<script type="text/javascript" src="https://api.map.baidu.com/api?v=2.0&ak=<Your Key Here>"></script>
<script type="text/javascript" src="https://cdn.jsdelivr.net/npm/echarts@5/dist/extension/bmap.min.js"></script>
-->
<script src="./js/jquery3.js"></script>
<script type="text/javascript">
var mydata;
var index=1;
var allData;
$.ajax({
"url":"http://localhost:8080/hadoop_0425web/GetData",
"data":'',
"type":"post",
"dataType":"json",
"success":function(obj) {
allData=obj;
mydata = obj[0];
alert(mydata);
dispEchart();
},
"error":function(obj) {
}
});
function getData()
{
mydata=allData[index];
dispEchart();
index++;
if(index>=allData.length){
index=0;
}
}
function dispEchart(){
var dom = document.getElementById("container");
var myChart = echarts.init(dom);
var app = {};
var option;
option = {
xAxis: {
type: 'category',
data: ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
},
yAxis: {
type: 'value'
},
series: [{
//data: [120, 200, 150, 80, 70, 110, 130],
data:mydata,
type: 'bar',
showBackground: true,
backgroundStyle: {
color: 'rgba(180, 180, 180, 0.2)'
}
}]
};
if (option && typeof option === 'object') {
myChart.setOption(option);
}
}
</script>
</body>
</html>
效果