hadoop 》》 django 简单操作hdfs 语句

》》
from django.shortcuts import render
# Create your views here. from hdfs.client import Client
from django.views import View
from hdfs.client import Client
import os #
# # 关于python操作hdfs的API可以查看官网:
# # https://hdfscli.readthedocs.io/en/latest/api.html
#
# # 读取hdfs文件内容,将每行存入数组返回
# def read_hdfs_file(client, filename):
# lines = []
# with client.read(filename, encoding='utf-8', delimiter='\n') as reader:
# for line in reader:
# # pass
# # print line.strip()
# lines.append(line.strip())
# return lines
#
#
# # 创建目录
# def mkdirs(client, hdfs_path):
# client.makedirs(hdfs_path)
#
#
# # 删除hdfs文件
# def delete_hdfs_file(client, hdfs_path):
# client.delete(hdfs_path)
#
#
# # 上传文件到hdfs
def put_to_hdfs(client, local_path, hdfs_path):
client.upload(hdfs_path, local_path, cleanup=True) #
# # 从hdfs获取文件到本地
# def get_from_hdfs(client, hdfs_path, local_path):
# download(hdfs_path, local_path, overwrite=False)
#
#
# # 追加数据到hdfs文件
# def append_to_hdfs(client, hdfs_path, data):
# client.write(hdfs_path, data, overwrite=False, append=True)
#
#
# # 覆盖数据写到hdfs文件
def write_to_hdfs(client, hdfs_path, data):
client.write(hdfs_path, data, overwrite=True, append=False) #
# # 移动或者修改文件
def move_or_rename(client, hdfs_src_path, hdfs_dst_path):
client.rename(hdfs_src_path, hdfs_dst_path)
#
#
# # 返回目录下的文件
# def list(client, hdfs_path):
# return client.list(hdfs_path, status=False)
#
#
# # root:连接的跟目录
# client = Client("http://192.168.88.129:50070",
# root="/", timeout=5 * 1000, session=False)
# # put_to_hdfs(client,'a.csv','/user/root/a.csv')
# # append_to_hdfs(client,'/b.txt','111111111111111'+'\n')
# # write_to_hdfs(client,'/b.txt','222222222222'+'\n')
# # move_or_rename(client,'/b.txt', '/user/b.txt')
# mkdirs(client, '/input1/python1')
# print(list(client,'/input'))
# read_hdfs_file(client,'/')
# client.list("/")
def mkdirs(client, hdfs_path):
client.makedirs(hdfs_path) def get_IS_File(client,hdfs_paht):
return client.status(hdfs_paht)['type'] == 'FILE'
from hadoop_hdfs.settings import UPLOAD_ROOT
import os def put_to_hdfs(client, local_path, hdfs_path):
client.upload(hdfs_path, local_path, cleanup=True)
client = Client("http://192.168.88.129:50070",
root="/", timeout=5 * 1000, session=False)
class Index(View):
def get(self,request):
return render(request,"index.html")
def post(self,request):
def uploadfile(img):
f = open(os.path.join(UPLOAD_ROOT, '', img.name), 'wb')
for chunk in img.chunks():
f.write(chunk)
f.close() def read_hdfs_file(client, filename):
lines = []
with client.read(filename, encoding='utf-8', delimiter='\n') as reader:
for line in reader:
# pass
# print line.strip()
lines.append(line.strip())
return lines file = request.FILES.get("file")
uploadfile(file)
all_file = client.list("/")
for i in all_file:
file_true =get_IS_File(client,"/{}".format(i)),i
print(file_true)
# if file_true == "True":
return render(request,"index.html",locals())
# else:
# pass
# # data = {"file_true":file_true}
# return render(request,"index.html",locals())
# else:
# pass
# get_IS_File(all_file,"/")
# lujin = "/upload/"+file.name
mkdirs(client,file.name)
# move_or_rename(client,file.name, '/c.txt')
# put_to_hdfs(client, "C:/Users/Lenovo/Desktop/hadoop_hdfs/upload/"+file.name, '/')
# write_to_hdfs(client,"upload"+file.name,'222222222222'+'\n')
return render(request,"index.html",locals())
Views.py
# 导入必要模块
import pandas as pd
from sqlalchemy import create_engine
from matplotlib import pylab as plt
from django.views import View
from django.shortcuts import render
import os
from hadoop_hdfs.settings import UPLOAD_ROOT
from web.models import *
def uploadfile(img):
f = open(os.path.join(UPLOAD_ROOT, '', img.name), 'wb')
for chunk in img.chunks():
f.write(chunk)
f.close()
class Upload(View):
def get(self,request):
# show = Image.objects.all()
return render(request,"haha.html",locals())
def post(self,request):
imgs = request.FILES.get('img')
uploadfile(imgs)
all = Image(img="/upload/"+imgs.name)
all.save()
return render(request,"haha.html")
# def post(self,request):
# # 初始化数据库连接,使用pymysql模块
# # MySQL的用户:root, 密码:147369, 端口:3306,数据库:mydb
# engine = create_engine('mysql+pymysql://root:@127.0.0.1/dj')
#
# # 查询语句,选出employee表中的所有数据
# sql = '''select * from haha;'''
#
# # read_sql_query的两个参数: sql语句, 数据库连接
# df = pd.read_sql_query(sql, engine)
#
# # 输出employee表的查询结果
# print(df)
#
# # 新建pandas中的DataFrame, 只有id,num两列
# # df = pd.DataFrame({'id':[1,2],'name': ['111','222'],'image_url':['http://111','http://222']})
# # df = pd.DataFrame({"id":df['sentiment'],"text":df['text'],})
# df.groupby(by='sentiment').count()['text'].plot.pie(autopct="%0.4f%%", subplots=True)
# plt.savefig("../upload/1.jpg")
# plt.show()
# # 将新建的DataFrame储存为MySQL中的数据表,不储存index列
# # df.to_sql(name='lallala', con=engine, index=False)
#
# print('Read from and write to Mysql table successfully!')
# return render(request,"haha.html",locals()) class Uploads(View):
def get(self,request):
show = Image.objects.all()
return render(request,"haha.html",locals())
haha.py
hadoop 》》 django 简单操作hdfs 语句的更多相关文章
- Java 简单操作hdfs API
注:图片如果损坏,点击文章链接:https://www.toutiao.com/i6632047118376780295/ 启动Hadoop出现问题:datanode的clusterID 和 name ...
- Hadoop Java API操作HDFS文件系统(Mac)
1.下载Hadoop的压缩包 tar.gz https://mirrors.tuna.tsinghua.edu.cn/apache/hadoop/common/stable/ 2.关联jar包 在 ...
- java 简单操作HDFS
创建java 项目 package com.yw.hadoop273; import org.apache.hadoop.conf.Configuration; import org.apache.h ...
- Django简单操作
一.静态文件配置 静态文件配置 STATIC_URL = '/static/' STATICFILES_DIRS = [ os.path.join(BASE_DIR,'static') ] # 暴露给 ...
- hadoop 使用java操作hdfs
1.创建目录 import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.ha ...
- mysql 首次安装后 简单操作与语句 新手入门
首先cd到安装目录中bin路径:这是我的安装路径以管理员身份打开cmd(防止权限不足)cd E:\>cd E:\mysql\mysql-5.5.40-winx64\bin 首次安装需要输入 my ...
- python django简单操作
准备: pip3 install django==1.10.3 cmd django-admin startproject guest 创建一个guest的项目 cd guest manage. ...
- HDFS介绍及简单操作
目录 1.HDFS是什么? 2.HDFS设计基础与目标 3.HDFS体系结构 3.1 NameNode(NN)3.2 DataNode(DN)3.3 SecondaryNameNode(SNN)3.4 ...
- Django简单的数据库操作
当然,本篇的前提是你已经配置好了相关的环境,这里就不详细介绍. 一. 在settings.py文件中设置数据库属性. 如下: DATABASES = { 'default': { 'ENGINE': ...
随机推荐
- 7月清北学堂培训 Day 6
今天是钟皓曦老师的讲授~ 合并石子拓展: 合并任意两堆石子,每次合并的代价是这两堆石子的重量的异或值,求合并成一堆的最小异或和. 状态设置:f [ s ] 把 s 所对应的石子合并的最小代价: 那么答 ...
- 面试题_Spring高级篇
Spring高级篇 1.什么是 Spring 框架? Spring 框架有哪些主要模块? Spring 框架是一个为 Java 应用程序的开发提供了综合.广泛的基础性支持的 Java 平台. Spr ...
- UVALive 4976 Defense Lines ——(LIS变形)
题意:给出序列,能够从这序列中删去连续的一段,问剩下的序列中的最长的严格上升子串的长度是多少. 这题颇有点LIS的味道.因为具体做法就是维护一个单调的集合,然后xjbg一下即可.具体的见代码吧: #i ...
- 清空echarts的option
将相应的echarts的option治为空 $("#tt5sbmc").html("");
- 【Python】使用Beautiful Soup等三种方式定制Jmeter测试脚本
背景介绍 我们在做性能调优时,时常需要根据实际压测的情况,调整线程组的参数,比如循环次数,线程数,所有线程启动的时间等. 如果是在一台Linux机器上,就免不了在本机打开图形页面修改,然后最后传递到压 ...
- Leetcode题目292.Nim游戏(脑筋急转弯)
题目描述: 你和你的朋友,两个人一起玩 Nim 游戏:桌子上有一堆石头,每次你们轮流拿掉 1 - 3 块石头. 拿掉最后一块石头的人就是获胜者.你作为先手. 你们是聪明人,每一步都是最优解. 编写一个 ...
- docker 命令 记录
获取指定容器的ip docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' container_nam ...
- Python——装饰器(Decorator)
1.什么是装饰器? 装饰器放在一个函数开始定义的地方,它就像一顶帽子一样戴在这个函数的头上.和这个函数绑定在一起.在我们调用这个函数的时候,第一件事并不是执行这个函数,而是将这个函数做为参数传入它头顶 ...
- websocket原理、为何能实现持久连接?
WebSocket 是 HTML5 一种新的协议.它实现了浏览器与服务器全双工通信,能更好的节省服务器资源和带宽并达到实时通讯,它建立在 TCP 之上,同 HTTP 一样通过 TCP 来传输数据,但是 ...
- shell 變數
echo $? 上个命令的退出状态,或函数的返回值. ref: http://c.biancheng.net/cpp/view/2739.html