json解析写入mysql
import json,requests,pymysql
from pprint import pprint
from datetime import datetime
dt=datetime.now()
todayy=datetime(dt.year,dt.month,dt.day,0,0,0)
mysql_110= {
"host": "172.18.28.110",
"port":3306,
"user": "rduser",
"password": "*****8",
"db": "test",
"charset":"utf8"}
#连接mysql
mysql_conn=pymysql.connect(**mysql_110)
sql0="""
CREATE TABLE `jrj_news_original` (
`seq` int(10) unsigned NOT NULL AUTO_INCREMENT COMMENT '序列号,代理主键',
`news_id` int(8) NOT NULL COMMENT '资讯id',
`title` varchar(300) NOT NULL COMMENT '资讯标题',
`detail` mediumtext DEFAULT NULL COMMENT '摘要',
`list_date` datetime NOT NULL COMMENT '发布时间',
`key_word` varchar(100) DEFAULT NULL COMMENT '关键词',
`stk_code` varchar(10) DEFAULT NULL COMMENT '股票代码',
`stk_sname` varchar(30) DEFAULT NULL COMMENT '股票简称',
`img_url` mediumtext DEFAULT NULL COMMENT '图片链接',
`info_url` varchar(100) DEFAULT NULL COMMENT '全文连接',
`content` text DEFAULT NULL COMMENT '内容',
`infocls` varchar(6) DEFAULT NULL COMMENT '栏目ID',
`channum` varchar(3) DEFAULT NULL COMMENT '频道ID',
`cls` int(6) DEFAULT NULL COMMENT '类别ID',
`from_type` int(3) NOT NULL COMMENT '来源类别',
PRIMARY KEY (`seq`),
KEY `list_date` (`list_date`,`news_id`,`stk_code`)
) ENGINE=InnoDB AUTO_INCREMENT=1341 DEFAULT CHARSET=utf8 COMMENT='资讯原表\r\n数据来源:北京提供js格式连接\r\n更新频率:每3分钟更新一次\r\n';
"""
#创建游标
cursor=mysql_conn.cursor()
#删除当天的数据
deleSql='''
delete from JRJ_NEWS_ORIGINAL
where list_date>=curdate()
'''
cursor.execute(deleSql)
#获取所有查询结果
#datas=cursor.fetchall()
companyUrl='http://stock.jrj.com.cn/share/news/app/company/'+todayy.strftime('%Y-%m-%d')+'.js'
#上市公司 100003 4
hml1=requests.get(companyUrl)
#数据全文
#print(hml1.text[27:][:-6])
tex=json.loads(hml1.text[27:][:-6])
print('上市公司 数据样例:\n',tex[0])
len1=len(tex)
print(len1)
for i in range(len1):
if datetime.strptime(tex[i][0]['makedate'],'%Y-%m-%d %H:%M:%S')<todayy:
break
print("----- has insert into( %d ) company datas-----"%(i+1))
#print(tex[i][0]['stockcode'].split(','))
if len(tex[i][0]['stockcode'].split(','))==0:
sql1=''' INSERT INTO JRJ_NEWS_ORIGINAL(NEWs_ID,TITLE,DETAIL,LIST_DATE,IMG_URL,INFO_URL,CLS,FROM_TYPE)
VALUES(%s,%s,%s,%s,%s,%s,100003,4);'''
args1=[tex[i][0]['iiid'],tex[i][0]['title'].replace('"',"'"),tex[i][0]['detail'].replace('"',"'"),tex[i][0]['makedate'],tex[i][0]['imgurl'],tex[i][0]['infourl']]
cursor.execute(sql1,args1)
elif len(tex[i][0]['stockcode'].split(',')) == 1:
sql2='''INSERT INTO JRJ_NEWS_ORIGINAL(NEWs_ID,TITLE,DETAIL,LIST_DATE,STK_CODE,STK_SNAME,IMG_URL,INFO_URL,CLS,FROM_TYPE)
VALUES(%s,%s,%s,%s,%s,%s,%s,%s,100003,4);'''
args2=[tex[i][0]['iiid'],tex[i][0]['title'].replace('"',"'"),tex[i][0]['detail'].replace('"',"'"),tex[i][0]['makedate'],tex[i][0]['stockcode']
,tex[i][0]['stockname'],tex[i][0]['imgurl'],tex[i][0]['infourl']]
cursor.execute(sql2,args2)
else :
for x in range(len(tex[i][0]['stockcode'].split(','))):
sql3='''INSERT INTO JRJ_NEWS_ORIGINAL(NEWs_ID,TITLE,DETAIL,LIST_DATE,STK_CODE,STK_SNAME,IMG_URL,INFO_URL,CLS,FROM_TYPE)
VALUES(%s,%s,%s,%s,%s,%s,%s,%s,100003,4);'''
args3=[tex[i][0]['iiid'],tex[i][0]['title'],tex[i][0]['detail'],tex[i][0]['makedate'],tex[i][0]['stockcode'].split(',')[x]
,tex[i][0]['stockname'].split(',')[x],tex[i][0]['imgurl'],tex[i][0]['infourl']]
cursor.execute(sql3,args3)
#保存提交cursor的执行结果
mysql_conn.commit()
#机会早知道 100001 2
chanceUrl='http://stock.jrj.com.cn/share/news/app/qingbao/'+todayy.strftime('%Y-%m-%d')+'.js'
html2=requests.get(chanceUrl)
#数据全文
#print(hml1.text[26:][:-2])
txt=json.loads(html2.text[26:][:-2])
print('机会早知道 数据样例:\n',txt[0])
len1=len(txt)
print(len1)
for i in range(len1):
#print(txt[i]['stockcode'].split(','))
if datetime.strptime(txt[i]['makedate'],'%Y-%m-%d %H:%M:%S')<todayy:
break
print("----- has insert into( %d ) Chance Early Know datas-----"%(i+1))
if len(txt[i]['stockcode'].split(','))==0:
sql1=''' INSERT INTO JRJ_NEWS_ORIGINAL(NEWs_ID,TITLE,DETAIL,LIST_DATE,KEY_WORD,IMG_URL,INFO_URL,CONTENT,CLS,FROM_TYPE)
VALUES(%s,%s,%s,%s,%s,%s,%s,%s,100001,2);'''
args1=[txt[i]['iiid'],txt[i]['title'],txt[i]['detail']
,txt[i]['makedate'],txt[i]['keyword'],txt[i]['imgurl'],txt[i]['infourl'],txt[i]['content']]
cursor.execute(sql1,args1)
#print(''' INSERT INTO NEWS_ORIGINAL(NEWs_ID,TITLE,DETAIL,LIST_DATE,KEY_WORD,IMG_URL,INFO_URL)
#VALUES({0},"{1}","{2}","{3}","{4}","{5}","{6}");'''.format(txt[i]['iiid'],txt[i]['title'],txt[i]['detail']
#,txt[i]['makedate'],txt[i]['keyword'],txt[i]['imgurl'],txt[i]['infourl']
#))
elif len(txt[i]['stockcode'].split(',')) == 1:
sql2=''' INSERT INTO JRJ_NEWS_ORIGINAL(NEWs_ID,TITLE,DETAIL,LIST_DATE,KEY_WORD,STK_CODE,IMG_URL,INFO_URL,CONTENT,CLS,FROM_TYPE)
VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,100001,2);'''
args2=[txt[i]['iiid'],txt[i]['title'],txt[i]['detail'],txt[i]['makedate'],txt[i]['keyword']
,txt[i]['stockcode'],txt[i]['imgurl'],txt[i]['infourl'],txt[i]['content']]
cursor.execute(sql2,args2)
#print('INSERT INTO NEWS_ORIGINAL(NEWs_ID,TITLE,DETAIL,LIST_DATE,KEY_WORD,STK_CODE,IMG_URL,INFO_URL)
#VALUES({0},"{1}","{2}","{3}","{4}","{5}","{6}","{7}");'.
#format(txt[i]['iiid'],txt[i]['title'],txt[i]['detail'],txt[i]['makedate'],default(txt[i]['keyword'])
#,txt[i]['stockcode'],default(txt[i]['imgurl']),default(txt[i]['infourl'])))
else :
for x in range(len(txt[i]['stockcode'].split(','))):
sql3=''' INSERT INTO JRJ_NEWS_ORIGINAL(NEWs_ID,TITLE,DETAIL,LIST_DATE,KEY_WORD,STK_CODE,IMG_URL,INFO_URL,CONTENT,CLS,FROM_TYPE)
VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,100001,2);'''
args3=[txt[i]['iiid'],txt[i]['title'],txt[i]['detail'],txt[i]['makedate'],txt[i]['keyword']
,txt[i]['stockcode'].split(',')[x],txt[i]['imgurl'],txt[i]['infourl'],txt[i]['content']]
cursor.execute(sql3,args3)
#print('INSERT INTO NEWS_ORIGINAL(NEWs_ID,TITLE,DETAIL,LIST_DATE,KEY_WORD,STK_CODE,IMG_URL,INFO_URL)
#VALUES({0},"{1}","{2}","{3}","{4}","{5}","{6}","{7}");'.
#format(txt[i]['iiid'],txt[i]['title'],txt[i]['detail'],txt[i]['makedate'],default(txt[i]['keyword'])
#,txt[i]['stockcode'].split(',')[x],default(txt[i]['imgurl']),default(txt[i]['infourl'])))
#保存提交cursor的执行结果
mysql_conn.commit()
#涨跌停揭秘 100002 3
secretUrl='http://stock.jrj.com.cn/share/news/app/zhangting/'+todayy.strftime('%Y-%m-%d')+'.js'
html3=requests.get(secretUrl)
txt2=json.loads(html3.text[26:][:-2])
len2=len(txt2)
for i in range(len2):
#print(txt2[i]['stockcode'].split(','))
if datetime.strptime(txt2[i]['makedate'],'%Y-%m-%d %H:%M:%S')<todayy:
break
print("----- has insert into( %d ) Secret Up and Down datas-----"%(i+1))
if len(txt2[i]['stockcode'].split(','))==0:
sql1=''' INSERT INTO JRJ_NEWS_ORIGINAL(NEWs_ID,TITLE,DETAIL,LIST_DATE,KEY_WORD,IMG_URL,INFO_URL,CONTENT,CLS,FROM_TYPE)
VALUES(%s,%s,%s,%s,%s,%s,%s,%s,100002,3);'''
args1=[txt2[i]['iiid'],txt2[i]['title'],txt2[i]['detail']
,txt2[i]['makedate'],txt2[i]['keyword'],txt2[i]['imgurl'],txt2[i]['infourl'],txt[i]['content']]
cursor.execute(sql1,args1)
elif len(txt2[i]['stockcode'].split(',')) == 1:
sql2=''' INSERT INTO JRJ_NEWS_ORIGINAL(NEWs_ID,TITLE,DETAIL,LIST_DATE,KEY_WORD,STK_CODE,IMG_URL,INFO_URL,CONTENT,CLS,FROM_TYPE)
VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,100002,3);'''
args2=[txt2[i]['iiid'],txt2[i]['title'],txt2[i]['detail'],txt2[i]['makedate'],txt2[i]['keyword']
,txt2[i]['stockcode'],txt2[i]['imgurl'],txt2[i]['infourl'],txt[i]['content']]
cursor.execute(sql2,args2)
else :
for x in range(len(txt2[i]['stockcode'].split(','))):
sql3=''' INSERT INTO JRJ_NEWS_ORIGINAL(NEWs_ID,TITLE,DETAIL,LIST_DATE,KEY_WORD,STK_CODE,IMG_URL,INFO_URL,CONTENT,CLS,FROM_TYPE)
VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,100002,3);'''
args3=[txt2[i]['iiid'],txt2[i]['title'],txt2[i]['detail'],txt2[i]['makedate'],txt2[i]['keyword']
,txt2[i]['stockcode'].split(',')[x],txt2[i]['imgurl'],txt2[i]['infourl'],txt[i]['content']]
cursor.execute(sql3,args3)
#保存提交cursor的执行结果
mysql_conn.commit()
import random
#乌兰木数据 null 1
noTypeUrl='http://mapp.jrj.com.cn/co/zk/1.js?'+str(random.randint(1,9))+str(random.randint(1,9))
print(noTypeUrl)
hml1=requests.get(noTypeUrl)
tx=hml1.json()
print(len(tx['data']))
len4=len(tx['data'])
data1=tx['data']
for i in range(len4):
print("----- has insert into( %d )datas-----"%i)
if datetime.strptime(data1[i]['makedate'],'%Y-%m-%d %H:%M:%S')<todayy:
break
if len(data1[i]['infostocks'])==0:
sql1=''' INSERT INTO JRJ_NEWS_ORIGINAL(NEWs_ID,TITLE,DETAIL,LIST_DATE,KEY_WORD,IMG_URL,INFO_URL,INFOCLS,CHANNUM,AUTHOR,FROM_TYPE)
VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,1);'''
args1=[data1[i]['iiid'],data1[i]['title'],data1[i]['detail'],data1[i]['makedate'],data1[i]['keyword']
,data1[i]['imgurl'],data1[i]['infourl'],data1[i]['infocls'],data1[i]['channum'],data1[i]['author']]
cursor.execute(sql1,args1)
elif len(data1[i]['infostocks'])== 1:
sql2=''' INSERT INTO JRJ_NEWS_ORIGINAL(NEWs_ID,TITLE,DETAIL,LIST_DATE,KEY_WORD,STK_CODE,STK_SNAME,IMG_URL,INFO_URL,INFOCLS,CHANNUM,AUTHOR,FROM_TYPE)
VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,1);'''
args2=[data1[i]['iiid'],data1[i]['title'],data1[i]['detail'],data1[i]['makedate'],data1[i]['keyword']
,data1[i]['infostocks'][0]['stockcode'],data1[i]['infostocks'][0]['stockname'],data1[i]['imgurl']
,data1[i]['infourl'],data1[i]['infocls'],data1[i]['channum'],data1[i]['author']]
cursor.execute(sql2,args2)
else :
for x in range(len(data1[i]['infostocks'])):
sql3=''' INSERT INTO JRJ_NEWS_ORIGINAL(NEWs_ID,TITLE,DETAIL,LIST_DATE,KEY_WORD,STK_CODE,STK_SNAME,IMG_URL,INFO_URL,INFOCLS,CHANNUM,AUTHOR,FROM_TYPE)
VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,1);'''
args3=[data1[i]['iiid'],data1[i]['title'],data1[i]['detail'],data1[i]['makedate'],data1[i]['keyword']
,data1[i]['infostocks'][x]['stockcode'],data1[i]['infostocks'][x]['stockname'],data1[i]['imgurl']
,data1[i]['infourl'],data1[i]['infocls'],data1[i]['channum'],data1[i]['author']]
cursor.execute(sql3,args3)
#保存提交cursor的执行结果
mysql_conn.commit()
cursor.close()
mysql_conn.close()
json解析写入mysql的更多相关文章
- 通过js获取前台数据向一般处理程序传递Json数据,并解析Json数据,将前台传来的Json数据写入数据库表中
摘自:http://blog.csdn.net/mazhaojuan/article/details/8592015 通过js获取前台数据向一般处理程序传递Json数据,并解析Json数据,将前台传来 ...
- JSON数据写入和解析
如何写入JSON 需要第三方jar包,JSON包 //写入json数据 public static String sendJson() { JSONObject json = new JSONObje ...
- 17-Flink消费Kafka写入Mysql
戳更多文章: 1-Flink入门 2-本地环境搭建&构建第一个Flink应用 3-DataSet API 4-DataSteam API 5-集群部署 6-分布式缓存 7-重启策略 8-Fli ...
- Flink 1.9 实战:使用 SQL 读取 Kafka 并写入 MySQL
上周六在深圳分享了<Flink SQL 1.9.0 技术内幕和最佳实践>,会后许多小伙伴对最后演示环节的 Demo 代码非常感兴趣,迫不及待地想尝试下,所以写了这篇文章分享下这份代码.希望 ...
- Json解析工具Jackson(使用注解)
原文http://blog.csdn.net/nomousewch/article/details/8955796 接上一篇文章Json解析工具Jackson(简单应用),jackson在实际应用中给 ...
- Json解析工具Jackson(简单应用)
原文http://blog.csdn.net/nomousewch/article/details/8955796 概述 Jackson库(http://jackson.codehaus.org),是 ...
- Tomjson - 一个"短小精悍"的 json 解析库
Tomjson,一个"短小精悍"的 json 解析库,tomjson使用Java语言编写,主要作用是把Java对象(JavaBean)序列化为json格式字符串,将json格式字符 ...
- Unity3d之json解析研究
Unity3d之json解析研究 json是好东西啊!JSON(JavaScript Object Notation) 是一种轻量级的数据交换格式 JSON简单易用,我要好好研究一下 ...
- JSON解析方案
在iOS中,JSON的常见解析方案有4种 第三方框架:JSONKit,SBJson,TouchJSON(性能从左到右,越差) 苹果原生(自带):NSJSONSerialization(性能最好) JS ...
随机推荐
- Angular 学习笔记 ( 创建 library, 转换老旧的 library )
更新 : 2018-10-28 不知道为什么在 ng 跑一直做不到 .d.ts 最后发现,如果有一个插件 propagating-hammerjs.ts 那么就在 root create 一个 pro ...
- DVWA渗透测试环境搭建
DVWA(Damn Vulnerable Web Application)是一个用来进行安全脆弱性鉴定的PHP/MySQL Web应用,旨在为安全专业人员测试自己的专业技能和工具提供合法的环境,帮助w ...
- 输出图片格式BARTENDER
try { BarTender.Application btApp = new BarTender.Application(); BarTe ...
- Spring Boot之Swagger2集成
一.Swagger2简单介绍 Swagger2,它可以轻松的整合到Spring Boot中,并与Spring MVC程序配合组织出强大RESTful API文档.它既可以减少我们创建文档的工作量,同时 ...
- PHP设计模式注意点
PHP命名空间 可以更好地组织代码,与Java中的包类似. Test1.php <?php namespace Test1;//命名空间Test1 function test(){ echo _ ...
- 【洛谷p5015】标题统计
(写上瘾了再来一篇吧) 标题统计[传送门] 洛谷算法标签 字符串这种东西,我看到是崩溃的.因为我们只学到了二维数组[这个梗自行get],总之我们当时还没有学.然后显然就是各种翻书,各种百度.大致了解了 ...
- 『计算机视觉』Mask-RCNN_推断网络其一:总览
在我们学习的这个项目中,模型主要分为两种状态,即进行推断用的inference模式和进行训练用的training模式.所谓推断模式就是已经训练好的的模型,我们传入一张图片,网络将其分析结果计算出来的模 ...
- IE的“浏览器模式”和“文档模式的区别”
1.浏览器模式 用于切换IE针对该网页的默认文档模式.对不同版本浏览器的条件备注解析.发送给网站服务器的用户代理(User_Agent)字符串的值.网站可以根据浏览器返回的不同用户代理字符串判断浏览器 ...
- HDU-4587-tarjin/割点
http://acm.hdu.edu.cn/showproblem.php?pid=4587 给出一幅无向图,问除去两个点之后子图的最大联通分量个数. 考虑每次ban一个点然后跑一遍tarjin统计下 ...
- PAT 1050 String Subtraction
1050 String Subtraction (20 分) Given two strings S1 and S2, S=S1−S2 is defined to be t ...