继上篇第一阶段清洗数据并导入hive

本篇是剩下的两阶段

2、数据处理:

·统计最受欢迎的视频/文章的Top10访问次数 (video/article)

·按照地市统计最受欢迎的Top10课程 (ip)

·按照流量统计最受欢迎的Top10课程 (traffic)

3、数据可视化:将统计结果倒入MySql数据库中,通过图形化展示的方式展现出来。

2、

·统计最受欢迎的视频/文章的Top10访问次数 (video/article)

package mapreduce;

import java.io.IOException;

import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.Reducer.Context;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; public class GetVideoResult { public static void main(String[] args) {
try {
Job job = Job.getInstance();
job.setJobName("GetVideoResult");
job.setJarByClass(GetVideoResult.class);
job.setMapperClass(doMapper.class);
job.setReducerClass(doReducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
job.setInputFormatClass(TextInputFormat.class);
job.setOutputFormatClass(TextOutputFormat.class);
Path in = new Path("hdfs://192.168.137.67:9000/mymapreducelShiYan/out1/part-r-00000");
Path out = new Path("hdfs://192.168.137.67:9000/mymapreducelShiYan/out1.2");
FileInputFormat.addInputPath(job,in);
FileOutputFormat.setOutputPath(job,out);
//System.exit(job.waitForCompletion(true) ? 0:1);
if(job.waitForCompletion(true))
{
Job job1 = Job.getInstance();
job1.setJobName("Sort");
job1.setJarByClass(GetVideoResult.class);
job1.setMapperClass(doMapper1.class);
job1.setReducerClass(doReducer1.class);
job1.setOutputKeyClass(IntWritable.class);
job1.setOutputValueClass(Text.class);
job1.setSortComparatorClass(IntWritableDecreasingComparator.class);
job1.setInputFormatClass(TextInputFormat.class);
job1.setOutputFormatClass(TextOutputFormat.class);
Path in1 = new Path("hdfs://192.168.137.67:9000/mymapreducelShiYan/out1.2/part-r-00000");
Path out1 = new Path("hdfs://192.168.137.67:9000/mymapreducelShiYan/out1.3");
FileInputFormat.addInputPath(job1,in1);
FileOutputFormat.setOutputPath(job1,out1);
System.exit(job1.waitForCompletion(true) ? 0:1);
} } catch (Exception e) {
e.printStackTrace();
}
} public static class doMapper extends Mapper<Object,Text,Text,IntWritable>{
public static Text word = new Text();
public static final IntWritable id = new IntWritable(1);
@Override
protected void map(Object key,Text value,Context context) throws IOException,InterruptedException{
String[] data = value.toString().split("\t");
word.set(data[5]);
//id.set(Integer.parseInt(data[5])); context.write(word,id); }
} public static class doReducer extends Reducer< Text, IntWritable, IntWritable, Text>{
private static IntWritable result= new IntWritable(); public void reduce(Text key,Iterable<IntWritable> values,Context context) throws IOException, InterruptedException{
int sum = 0;
for(IntWritable value:values){
sum += value.get();
} result.set(sum);
context.write(result,key);
}
} public static class doMapper1 extends Mapper<Object , Text , IntWritable,Text >{
private static Text goods=new Text();
private static IntWritable num=new IntWritable();
public void map(Object key,Text value,Context context) throws IOException, InterruptedException{
String line=value.toString();
String arr[]=line.split("\t");
num.set(Integer.parseInt(arr[0]));
goods.set(arr[1]);
context.write(num,goods);
}
} public static class doReducer1 extends Reducer< IntWritable, Text, IntWritable, Text>{
private static IntWritable result= new IntWritable();
int i=0; public void reduce(IntWritable key,Iterable<Text> values,Context context) throws IOException, InterruptedException{
for(Text value:values){
if(i<10) {
context.write(key,value);
i++;
}
} }
} private static class IntWritableDecreasingComparator extends IntWritable.Comparator {
public int compare(WritableComparable a, WritableComparable b) {
return -super.compare(a, b);
}
public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
return -super.compare(b1, s1, l1, b2, s2, l2);
}
} }

自己一开始使用两个类完成的,先求和在排序,在网上查阅资料后发现可以有两个job,然后就在一个类中完成,然后MapReduce本来的排序是升序,而我们需要的是降序,所以在此引入了一个比较器。

按照地市统计最受欢迎的Top10课程 (ip)

package mapreduce;

import java.io.IOException;

import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.Reducer.Context;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; public class GetVideoResultip { public static void main(String[] args) {
try {
Job job = Job.getInstance();
job.setJobName("GetVideoResult");
job.setJarByClass(GetVideoResultip.class);
job.setMapperClass(doMapper.class);
job.setReducerClass(doReducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
job.setInputFormatClass(TextInputFormat.class);
job.setOutputFormatClass(TextOutputFormat.class);
Path in = new Path("hdfs://192.168.137.67:9000/mymapreducel/in/result.txt");
Path out = new Path("hdfs://192.168.137.67:9000/mymapreducelShiYan/out2.1");
FileInputFormat.addInputPath(job,in);
FileOutputFormat.setOutputPath(job,out);
//System.exit(job.waitForCompletion(true) ? 0:1);
if(job.waitForCompletion(true))
{
Job job1 = Job.getInstance();
job1.setJobName("Sort");
job1.setJarByClass(GetVideoResult.class);
job1.setMapperClass(doMapper1.class);
job1.setReducerClass(doReducer1.class);
job1.setOutputKeyClass(IntWritable.class);
job1.setOutputValueClass(Text.class);
job1.setSortComparatorClass(IntWritableDecreasingComparator.class);
job1.setInputFormatClass(TextInputFormat.class);
job1.setOutputFormatClass(TextOutputFormat.class);
Path in1 = new Path("hdfs://192.168.137.67:9000/mymapreducelShiYan/out2.1/part-r-00000");
Path out1 = new Path("hdfs://192.168.137.67:9000/mymapreducelShiYan/out2.2");
FileInputFormat.addInputPath(job1,in1);
FileOutputFormat.setOutputPath(job1,out1);
System.exit(job1.waitForCompletion(true) ? 0:1);
} } catch (Exception e) {
e.printStackTrace();
}
} public static class doMapper extends Mapper<Object,Text,Text,IntWritable>{
public static Text word = new Text();
public static final IntWritable id = new IntWritable(1);
@Override
protected void map(Object key,Text value,Context context) throws IOException,InterruptedException{
String[] data = value.toString().split(",");
String str=data[0]+"\t"+data[5];
System.out.println(str);
word.set(str);
//id.set(Integer.parseInt(data[5])); context.write(word,id);
}
} public static class doReducer extends Reducer< Text, IntWritable, IntWritable, Text>{
private static IntWritable result= new IntWritable();
public void reduce(Text key,Iterable<IntWritable> values,Context context) throws IOException, InterruptedException{
int sum = 0;
for(IntWritable value:values){
sum += value.get();
} result.set(sum);
context.write(result,key);
}
} public static class doMapper1 extends Mapper<Object , Text , IntWritable,Text >{
private static Text goods=new Text();
private static IntWritable num=new IntWritable();
public void map(Object key,Text value,Context context) throws IOException, InterruptedException{
String line=value.toString();
String arr[]=line.split("\t");
String str=arr[1]+"\t"+arr[2];
num.set(Integer.parseInt(arr[0]));
goods.set(str);
context.write(num,goods);
}
} public static class doReducer1 extends Reducer< IntWritable, Text, IntWritable, Text>{
private static IntWritable result= new IntWritable();
int i=0; public void reduce(IntWritable key,Iterable<Text> values,Context context) throws IOException, InterruptedException{
for(Text value:values){
if(i<10) {
context.write(key,value);
i++;
}
} }
} private static class IntWritableDecreasingComparator extends IntWritable.Comparator {
public int compare(WritableComparable a, WritableComparable b) {
return -super.compare(a, b);
}
public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
return -super.compare(b1, s1, l1, b2, s2, l2);
}
} }

·按照流量统计最受欢迎的Top10课程 (traffic)

package mapreduce;

import java.io.IOException;

import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.Reducer.Context;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; public class GetVideoResulttraffic { public static void main(String[] args) {
try {
Job job = Job.getInstance();
job.setJobName("GetVideoResult");
job.setJarByClass(GetVideoResultip.class);
job.setMapperClass(doMapper.class);
job.setReducerClass(doReducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
job.setInputFormatClass(TextInputFormat.class);
job.setOutputFormatClass(TextOutputFormat.class);
Path in = new Path("hdfs://192.168.137.67:9000/mymapreducel/in/result.txt");
Path out = new Path("hdfs://192.168.137.67:9000/mymapreducelShiYan/out3.1");
FileInputFormat.addInputPath(job,in);
FileOutputFormat.setOutputPath(job,out);
//System.exit(job.waitForCompletion(true) ? 0:1);
if(job.waitForCompletion(true))
{
Job job1 = Job.getInstance();
job1.setJobName("Sort");
job1.setJarByClass(GetVideoResult.class);
job1.setMapperClass(doMapper1.class);
job1.setReducerClass(doReducer1.class);
job1.setOutputKeyClass(IntWritable.class);
job1.setOutputValueClass(Text.class);
job1.setSortComparatorClass(IntWritableDecreasingComparator.class);
job1.setInputFormatClass(TextInputFormat.class);
job1.setOutputFormatClass(TextOutputFormat.class);
Path in1 = new Path("hdfs://192.168.137.67:9000/mymapreducelShiYan/out3.1/part-r-00000");
Path out1 = new Path("hdfs://192.168.137.67:9000/mymapreducelShiYan/out3.2");
FileInputFormat.addInputPath(job1,in1);
FileOutputFormat.setOutputPath(job1,out1);
System.exit(job1.waitForCompletion(true) ? 0:1);
} } catch (Exception e) {
e.printStackTrace();
}
} public static class doMapper extends Mapper<Object,Text,Text,IntWritable>{
public static Text word = new Text();
public static final IntWritable id = new IntWritable();
@Override
protected void map(Object key,Text value,Context context) throws IOException,InterruptedException{
String[] data = value.toString().split(",");
//String str=data[0]+"\t"+data[5];
data[3] = data[3].substring(0, data[3].length()-1);
word.set(data[5]);
id.set(Integer.parseInt(data[3])); context.write(word,id);
}
} public static class doReducer extends Reducer< Text, IntWritable, IntWritable, Text>{
private static IntWritable result= new IntWritable();
public void reduce(Text key,Iterable<IntWritable> values,Context context) throws IOException, InterruptedException{
int sum = 0;
for(IntWritable value:values){
sum += value.get();
} result.set(sum);
context.write(result,key);
}
} public static class doMapper1 extends Mapper<Object , Text , IntWritable,Text >{
private static Text goods=new Text();
private static IntWritable num=new IntWritable();
public void map(Object key,Text value,Context context) throws IOException, InterruptedException{
String line=value.toString();
String arr[]=line.split("\t");
num.set(Integer.parseInt(arr[0]));
goods.set(arr[1]);
context.write(num,goods);
}
} public static class doReducer1 extends Reducer< IntWritable, Text, IntWritable, Text>{
private static IntWritable result= new IntWritable();
int i=0; public void reduce(IntWritable key,Iterable<Text> values,Context context) throws IOException, InterruptedException{
for(Text value:values){
if(i<10) {
context.write(key,value);
i++;
}
} }
} private static class IntWritableDecreasingComparator extends IntWritable.Comparator {
public int compare(WritableComparable a, WritableComparable b) {
return -super.compare(a, b);
}
public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
return -super.compare(b1, s1, l1, b2, s2, l2);
}
} }

3、数据没有导入到mysql中,但是通过MapReduce进行了echarts可视化

先通过MapReduce进行清洗数据,然后在jsp中进行可视化

package mapreduce3;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.io.WritableComparator;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.Reducer.Context;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
public class Pai { public static List<String> Names=new ArrayList<String>();
public static List<String> Values=new ArrayList<String>(); public static class Sort extends WritableComparator
{
public Sort()
{
super(IntWritable.class,true);
}
@Override
public int compare(WritableComparable a, WritableComparable b)
{
return -a.compareTo(b);
}
}
public static class Map extends Mapper<Object , Text , IntWritable,Text >{
private static Text Name=new Text();
private static IntWritable num=new IntWritable();
public void map(Object key,Text value,Context context)throws IOException, InterruptedException
{
String line=value.toString();
String arr[]=line.split("\t");
if(!arr[0].startsWith(" "))
{
num.set(Integer.parseInt(arr[0]));
Name.set(arr[1]);
context.write(num, Name);
} }
}
public static class Reduce extends Reducer< IntWritable, Text, IntWritable, Text>{
private static IntWritable result= new IntWritable();
int i=0; public void reduce(IntWritable key,Iterable<Text> values,Context context) throws IOException, InterruptedException{
for(Text val:values)
{
if(i<10)
{i=i+1;
Names.add(val.toString());
Values.add(key.toString());
}
context.write(key,val);
}
}
} public static int run()throws IOException, ClassNotFoundException, InterruptedException{
Configuration conf=new Configuration();
conf.set("fs.defaultFS", "hdfs://192.168.137.67:9000");
FileSystem fs =FileSystem.get(conf);
Job job =new Job(conf,"OneSort");
job.setJarByClass(Pai.class);
job.setMapperClass(Map.class);
job.setReducerClass(Reduce.class);
job.setSortComparatorClass(Sort.class);
job.setOutputKeyClass(IntWritable.class);
job.setOutputValueClass(Text.class);
job.setInputFormatClass(TextInputFormat.class);
job.setOutputFormatClass(TextOutputFormat.class);
Path in = new Path("hdfs://192.168.137.67:9000/mymapreducelShiYan/out1.2/part-r-00000");
Path out = new Path("hdfs://192.168.137.67:9000/mymapreducelShiYan/out1.4");
FileInputFormat.addInputPath(job,in);
fs.delete(out,true);
FileOutputFormat.setOutputPath(job,out);
return(job.waitForCompletion(true) ? 0 : 1); } }

zhu.jsp

<%@page import="mapreduce3.Pai"%>
<%@page import="mapreduce3.GetVideoResult"%>
<%@ page language="java" import="java.util.*" contentType="text/html; charset=UTF-8"
pageEncoding="UTF-8"%>
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>Insert title here</title>
<script src="${pageContext.request.contextPath}/resource/echarts.js"></script>
</head>
<body>
<% Pai ss= new Pai();
ss.run();
String[] a=new String[11];
String[] b=new String[11];
int i=0,j=0; for(i = 0 ; i < 10 ; i++)
{
a[i] = ss.Values.get(i);
b[i] = ss.Names.get(i);
}
%>
<div id="main" style="width: 600px;height:400px;"></div>
<script type="text/javascript">
// 基于准备好的dom,初始化echarts实例
var myChart = echarts.init(document.getElementById('main')); // 指定图表的配置项和数据
var option = {
title: {
text: '最受欢迎的文章/视频 TOP10'
},
tooltip: {},
legend: {
data:['统计']
},
xAxis: {
data: [ <%
for( i=0;i<10;i++)
{
%><%=b[i]%>,<% }
%>]
},
yAxis: {},
series: [{
name: '最受欢迎的文章',
type: 'bar',
data: [
<%
for( i=0;i<10;i++)
{
%><%=a[i]%>,<% }
%> ]
}]
}; // 使用刚指定的配置项和数据显示图表。
myChart.setOption(option);
</script>
</body>
</html>

因为其他的数据清洗上边有,代码就不一一展示,只贴出jsp文件,如果想要改变可视化团,在echarts官网中直接复制代码到jsp中进行修改即可。

zhe.jsp

<%@page import="mapreduce3.Pai1"%>
<%@page import="mapreduce3.GetVideoResult"%>
<%@ page language="java" import="java.util.*" contentType="text/html; charset=UTF-8"
pageEncoding="UTF-8"%>
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>Insert title here</title>
<script src="${pageContext.request.contextPath}/resource/echarts.js"></script>
</head>
<body>
<% Pai1 ss= new Pai1();
ss.run();
String[] a=new String[11];
String[] b=new String[11];
int i=0,j=0; for(i = 0 ; i < 10 ; i++)
{
a[i] = ss.Values.get(i);
b[i] = ss.Names.get(i);
}
%>
<div id="main" style="width: 600px;height:400px;"></div>
<script type="text/javascript">
// 基于准备好的dom,初始化echarts实例
var myChart = echarts.init(document.getElementById('main')); // 指定图表的配置项和数据
var option = {
title: {
text: '按照地市最受欢迎'
},
tooltip: {},
legend: {
data:['统计']
},
xAxis: {
data: [

              <%
                for( i=0;i<10;i++)
                {
              %>'<%=b[i]%>',

                <%
                }
                %>

]
},
yAxis: {},
series: [{
name: '最受欢迎的文章',
type: 'line',
data: [
<%
for( i=0;i<10;i++)
{
%><%=a[i]%>,<% }
%> ]
}]
}; // 使用刚指定的配置项和数据显示图表。
myChart.setOption(option);
</script>
</body>
</html>

tu.jsp

<%@page import="mapreduce3.Pai2"%>
<%@page import="mapreduce3.GetVideoResult"%>
<%@ page language="java" import="java.util.*" contentType="text/html; charset=UTF-8"
pageEncoding="UTF-8"%>
<!DOCTYPE html>
<html>
<head>
<meta charset="UTF-8">
<title>Insert title here</title>
<script src="${pageContext.request.contextPath}/resource/echarts.js"></script>
</head>
<body>
<% Pai2 ss= new Pai2();
ss.run();
String[] a=new String[11];
String[] b=new String[11];
int i=0,j=0; for(i = 0 ; i < 10 ; i++)
{
a[i] = ss.Values.get(i);
b[i] = ss.Names.get(i);
}
%>
<div id="main" style="width: 600px;height:400px;"></div>
<script type="text/javascript">
// 基于准备好的dom,初始化echarts实例
var myChart = echarts.init(document.getElementById('main')); // 指定图表的配置项和数据
option = {
title : {
text: '按照流量最受欢迎',
x:'center'
},
tooltip : {
trigger: 'item',
formatter: "{a} <br/>{b} : {c} ({d}%)"
},
legend: {
orient: 'vertical',
left: 'left',
data: [
<%
for( i=0;i<10;i++)
{
%>'<%=b[i]%>', <%
}
%>
]
},
series : [
{
name: '访问来源',
type: 'pie',
radius : '55%',
center: ['50%', '60%'],
data:[ <%
for( i=0;i<10;i++)
{
%>{value:<%=a[i]%>,name:'<%=b[i]%>'}, <%
}
%>
],
itemStyle: {
emphasis: {
shadowBlur: 10,
shadowOffsetX: 0,
shadowColor: 'rgba(0, 0, 0, 0.5)'
}
}
}
]
}; // 使用刚指定的配置项和数据显示图表。
myChart.setOption(option);
</script>
</body>
</html>

MapReduce清洗数据进行可视化的更多相关文章

  1. mapreduce清洗数据

    继上篇 MapReduce清洗数据 package mapreduce; import java.io.IOException; import org.apache.hadoop.conf.Confi ...

  2. 视频网站数据MapReduce清洗及Hive数据分析

    一.需求描述 利用MapReduce清洗视频网站的原数据,用Hive统计出各种TopN常规指标: 视频观看数 Top10 视频类别热度 Top10 视频观看数 Top20 所属类别包含这 Top20 ...

  3. MapReduce处理数据1

    学了一段时间的hadoop了,一直没有什么正经练手的机会,今天老师给了一个课堂测试来进行练手,正好试一下. 项目已上传至github:https://github.com/yandashan/MapR ...

  4. discuz论坛apache日志hadoop大数据分析项目:清洗数据核心功能解说及代码实现

    discuz论坛apache日志hadoop大数据分析项目:清洗数据核心功能解说及代码实现http://www.aboutyun.com/thread-8637-1-1.html(出处: about云 ...

  5. 做Data Mining,其实大部分时间都花在清洗数据

    做Data Mining,其实大部分时间都花在清洗数据 时间 2016-12-12 18:45:50  51CTO 原文  http://bigdata.51cto.com/art/201612/52 ...

  6. 11,SFDC 管理员篇 - 报表和数据的可视化

    1,Report Builder 1,每一个report type 都有一个 primay object 和多个相关的object 2,Primary object with related obje ...

  7. MetricGraphics.js – 时间序列数据的可视化

    MetricsGraphics.js 是建立在D3的基础上,被用于可视化和布局的时间序列数据进行了优化.它提供以产生一个原则性的,一致的和响应式的方式的图形常见类型的简单方法.该库目前支持折线图,散点 ...

  8. 利用 t-SNE 高维数据的可视化

    利用 t-SNE 高维数据的可视化  具体软件和教程见: http://lvdmaaten.github.io/tsne/  简要介绍下用法: % Load data load ’mnist_trai ...

  9. MapReduce的数据流程、执行流程

    MapReduce的数据流程: 预先加载本地的输入文件 经过MAP处理产生中间结果 经过shuffle程序将相同key的中间结果分发到同一节点上处理 Recude处理产生结果输出 将结果输出保存在hd ...

随机推荐

  1. 【转】7本免费的Java电子书和教程

    本文由 ImportNew - 唐小娟 翻译自 Javapapers.如需转载本文,请先参见文章末尾处的转载要求. 1. Thinking in Java (Third Edition) 本书的作者是 ...

  2. lldb调试器知多少

    lldb调试器简介   lldb 是一个有着 REPL 的特性和 C++ .Python 插件的开源调试器.lldb调试器的由来是伴随着Xcode的版本升级而来. Xcode4.3之前使用的默认调试器 ...

  3. es5中数组的遍历方法

    //for循环 const arr = [1,2,3,4,5] for(let i = 0; i < arr.length; i++){ if(arr[i] === 2){ //break // ...

  4. 最短路Dijkstra算法模板

    // // dijkstra妯℃澘.cpp // algorithm // // Created by david.xu on 2018/8/6. // Copyright 漏 2018骞?david ...

  5. ASP .Net Core MVC如何利用vue提交包含List属性的form表单

    前言 遇到这个问题,是由于自己在mvc项目中使用vue而并不想jquery(人嘛,就是要折腾),并且表单中的一个属性是一个集合. 研究了下Razor如何实现的,用jquery感觉就挺麻烦了,vue用在 ...

  6. Python线程-死锁

    死锁产生的4个必要条件:    1.互斥:一个资源同一时刻只允许一个线程进行访问.    2.占有未释放:一个线程占有资源,且没有释放资源.    3.不可抢占:一个已经占有资源的线程无法抢占到其他线 ...

  7. Java框架之SpringMVC 05-拦截器-异常映射-Spring工作流程

    SpringMVC 拦截器 Spring MVC也可以使用拦截器对请求进行拦截处理,可以自定义拦截器来实现特定的功能,自定义的拦截器可以实现HandlerInterceptor接口中的三个方法,也可以 ...

  8. 「 从0到1学习微服务SpringCloud 」06 统一配置中心Spring Cloud Config

    系列文章(更新ing): 「 从0到1学习微服务SpringCloud 」01 一起来学呀! 「 从0到1学习微服务SpringCloud 」02 Eureka服务注册与发现 「 从0到1学习微服务S ...

  9. 网鼎杯题目“phone”--十六进制mysql注入

    注册后,即可点击查看谁的电话和我类似. 注册时有三个必填项,分别是用户名.密码和电话.电话要求必须数字. 注册个1111的电话后,点击查看,返回有1个人电话和我类似,在注册一个为1111的,返回有2人 ...

  10. python模拟鼠标拖动操作的方法

    本文实例讲述了python模拟鼠标拖动操作的方法.分享给大家供大家参考.具体如下: pdf中的书签只有页码,准备把现有书签拖到一个目录中,然后添加自己页签.重复的拖动工作实在无趣,还是让程序帮我实现吧 ...