hadoop 》》 django 简单操作hdfs 语句

》》
from django.shortcuts import render
# Create your views here. from hdfs.client import Client
from django.views import View
from hdfs.client import Client
import os #
# # 关于python操作hdfs的API可以查看官网:
# # https://hdfscli.readthedocs.io/en/latest/api.html
#
# # 读取hdfs文件内容,将每行存入数组返回
# def read_hdfs_file(client, filename):
# lines = []
# with client.read(filename, encoding='utf-8', delimiter='\n') as reader:
# for line in reader:
# # pass
# # print line.strip()
# lines.append(line.strip())
# return lines
#
#
# # 创建目录
# def mkdirs(client, hdfs_path):
# client.makedirs(hdfs_path)
#
#
# # 删除hdfs文件
# def delete_hdfs_file(client, hdfs_path):
# client.delete(hdfs_path)
#
#
# # 上传文件到hdfs
def put_to_hdfs(client, local_path, hdfs_path):
client.upload(hdfs_path, local_path, cleanup=True) #
# # 从hdfs获取文件到本地
# def get_from_hdfs(client, hdfs_path, local_path):
# download(hdfs_path, local_path, overwrite=False)
#
#
# # 追加数据到hdfs文件
# def append_to_hdfs(client, hdfs_path, data):
# client.write(hdfs_path, data, overwrite=False, append=True)
#
#
# # 覆盖数据写到hdfs文件
def write_to_hdfs(client, hdfs_path, data):
client.write(hdfs_path, data, overwrite=True, append=False) #
# # 移动或者修改文件
def move_or_rename(client, hdfs_src_path, hdfs_dst_path):
client.rename(hdfs_src_path, hdfs_dst_path)
#
#
# # 返回目录下的文件
# def list(client, hdfs_path):
# return client.list(hdfs_path, status=False)
#
#
# # root:连接的跟目录
# client = Client("http://192.168.88.129:50070",
# root="/", timeout=5 * 1000, session=False)
# # put_to_hdfs(client,'a.csv','/user/root/a.csv')
# # append_to_hdfs(client,'/b.txt','111111111111111'+'\n')
# # write_to_hdfs(client,'/b.txt','222222222222'+'\n')
# # move_or_rename(client,'/b.txt', '/user/b.txt')
# mkdirs(client, '/input1/python1')
# print(list(client,'/input'))
# read_hdfs_file(client,'/')
# client.list("/")
def mkdirs(client, hdfs_path):
client.makedirs(hdfs_path) def get_IS_File(client,hdfs_paht):
return client.status(hdfs_paht)['type'] == 'FILE'
from hadoop_hdfs.settings import UPLOAD_ROOT
import os def put_to_hdfs(client, local_path, hdfs_path):
client.upload(hdfs_path, local_path, cleanup=True)
client = Client("http://192.168.88.129:50070",
root="/", timeout=5 * 1000, session=False)
class Index(View):
def get(self,request):
return render(request,"index.html")
def post(self,request):
def uploadfile(img):
f = open(os.path.join(UPLOAD_ROOT, '', img.name), 'wb')
for chunk in img.chunks():
f.write(chunk)
f.close() def read_hdfs_file(client, filename):
lines = []
with client.read(filename, encoding='utf-8', delimiter='\n') as reader:
for line in reader:
# pass
# print line.strip()
lines.append(line.strip())
return lines file = request.FILES.get("file")
uploadfile(file)
all_file = client.list("/")
for i in all_file:
file_true =get_IS_File(client,"/{}".format(i)),i
print(file_true)
# if file_true == "True":
return render(request,"index.html",locals())
# else:
# pass
# # data = {"file_true":file_true}
# return render(request,"index.html",locals())
# else:
# pass
# get_IS_File(all_file,"/")
# lujin = "/upload/"+file.name
mkdirs(client,file.name)
# move_or_rename(client,file.name, '/c.txt')
# put_to_hdfs(client, "C:/Users/Lenovo/Desktop/hadoop_hdfs/upload/"+file.name, '/')
# write_to_hdfs(client,"upload"+file.name,'222222222222'+'\n')
return render(request,"index.html",locals())
Views.py
# 导入必要模块
import pandas as pd
from sqlalchemy import create_engine
from matplotlib import pylab as plt
from django.views import View
from django.shortcuts import render
import os
from hadoop_hdfs.settings import UPLOAD_ROOT
from web.models import *
def uploadfile(img):
f = open(os.path.join(UPLOAD_ROOT, '', img.name), 'wb')
for chunk in img.chunks():
f.write(chunk)
f.close()
class Upload(View):
def get(self,request):
# show = Image.objects.all()
return render(request,"haha.html",locals())
def post(self,request):
imgs = request.FILES.get('img')
uploadfile(imgs)
all = Image(img="/upload/"+imgs.name)
all.save()
return render(request,"haha.html")
# def post(self,request):
# # 初始化数据库连接,使用pymysql模块
# # MySQL的用户:root, 密码:147369, 端口:3306,数据库:mydb
# engine = create_engine('mysql+pymysql://root:@127.0.0.1/dj')
#
# # 查询语句,选出employee表中的所有数据
# sql = '''select * from haha;'''
#
# # read_sql_query的两个参数: sql语句, 数据库连接
# df = pd.read_sql_query(sql, engine)
#
# # 输出employee表的查询结果
# print(df)
#
# # 新建pandas中的DataFrame, 只有id,num两列
# # df = pd.DataFrame({'id':[1,2],'name': ['111','222'],'image_url':['http://111','http://222']})
# # df = pd.DataFrame({"id":df['sentiment'],"text":df['text'],})
# df.groupby(by='sentiment').count()['text'].plot.pie(autopct="%0.4f%%", subplots=True)
# plt.savefig("../upload/1.jpg")
# plt.show()
# # 将新建的DataFrame储存为MySQL中的数据表,不储存index列
# # df.to_sql(name='lallala', con=engine, index=False)
#
# print('Read from and write to Mysql table successfully!')
# return render(request,"haha.html",locals()) class Uploads(View):
def get(self,request):
show = Image.objects.all()
return render(request,"haha.html",locals())
haha.py
hadoop 》》 django 简单操作hdfs 语句的更多相关文章
- Java 简单操作hdfs API
注:图片如果损坏,点击文章链接:https://www.toutiao.com/i6632047118376780295/ 启动Hadoop出现问题:datanode的clusterID 和 name ...
- Hadoop Java API操作HDFS文件系统(Mac)
1.下载Hadoop的压缩包 tar.gz https://mirrors.tuna.tsinghua.edu.cn/apache/hadoop/common/stable/ 2.关联jar包 在 ...
- java 简单操作HDFS
创建java 项目 package com.yw.hadoop273; import org.apache.hadoop.conf.Configuration; import org.apache.h ...
- Django简单操作
一.静态文件配置 静态文件配置 STATIC_URL = '/static/' STATICFILES_DIRS = [ os.path.join(BASE_DIR,'static') ] # 暴露给 ...
- hadoop 使用java操作hdfs
1.创建目录 import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.ha ...
- mysql 首次安装后 简单操作与语句 新手入门
首先cd到安装目录中bin路径:这是我的安装路径以管理员身份打开cmd(防止权限不足)cd E:\>cd E:\mysql\mysql-5.5.40-winx64\bin 首次安装需要输入 my ...
- python django简单操作
准备: pip3 install django==1.10.3 cmd django-admin startproject guest 创建一个guest的项目 cd guest manage. ...
- HDFS介绍及简单操作
目录 1.HDFS是什么? 2.HDFS设计基础与目标 3.HDFS体系结构 3.1 NameNode(NN)3.2 DataNode(DN)3.3 SecondaryNameNode(SNN)3.4 ...
- Django简单的数据库操作
当然,本篇的前提是你已经配置好了相关的环境,这里就不详细介绍. 一. 在settings.py文件中设置数据库属性. 如下: DATABASES = { 'default': { 'ENGINE': ...
随机推荐
- laotech老师唠科mac 深入浅出MAC OS X ceshi ruguokeyi
laotech老师唠科mac 深入浅出MAC OS X http://study.163.com/plan/planLearn.htm?id=1637004#/learn/resVideo?lesso ...
- Js中Array常用方法小结
说起Array的方法,不免让人皱一下眉头,下面我们从增删改查角度依次来总结. 1.增 push: 将传入的参数 ,插入数组的尾部,并返回新数组的长度.不管传入参数为一个值还是一个数组,都作为插入数组的 ...
- 【C/C++】Linux的gcc和g++的区别
Windows中我们常用vs来编译编写好的C和C++代码:vs把编辑器,编译器和调试器等工具都集成在这一款工具中,在Linux下我们能用什么工具来编译所编写好的代码呢,其实Linux下这样的工具有很多 ...
- 安装Mysql-5.7.13脚本
安装Mysql-5.7.13,此脚本最后会查找到临时密码,后面登进数据库中更改密码 [root@ZHONG-LONG javascripts]# vim -mysql.sh #!/bin/bash # ...
- JAVA-Thread 线程的几种状态
Oracle JDK 定义中,线程一共有六种状态 https://docs.oracle.com/javase/8/docs/api/java/lang/Thread.State.html NEW:未 ...
- Flutter移动电商实战 --(18)首页_火爆专区商品接口制作
1.获取接口的方法 在service/service_method.dart里制作方法.我们先不接收参数,先把接口调通. Future getHomePageBeloConten() async{ t ...
- Linux学习:Makefile简介及模板
一.基本概念介绍: Makefile 文件就是告诉make命令需要怎么样的去编译和链接程序. 编写Makefile的基本规则: 1.如果这个工程没有编译过,那么我们的所有C文件都要编译并被链接. 2. ...
- Oracle查询表和字段
查看表字段.类型.注释 SELECT A.COLUMN_NAME,B.comments,A.DATA_TYPE FROM USER_TAB_COLUMNS A LEFT JOIN user_col_c ...
- python 时间对比
import datetimed1 = datetime.datetime.strptime('2015-03-05 17:41:20', '%Y-%m-%d %H:%M:%S')d2 = date ...
- JAVA 基础编程练习题25 【程序 25 求回文数】
25 [程序 25 求回文数] 题目:一个 5 位数,判断它是不是回文数.即 12321 是回文数,个位与万位相同,十位与千位相同. package cskaoyan; public class cs ...