import json,requests,pymysql

from pprint import pprint

from datetime import datetime

dt=datetime.now()

todayy=datetime(dt.year,dt.month,dt.day,0,0,0)

mysql_110= {

"host": "172.18.28.110",

"port":3306,

"user": "rduser",

"password": "*****8",

"db": "test",

"charset":"utf8"}

#连接mysql

mysql_conn=pymysql.connect(**mysql_110)

sql0="""

CREATE TABLE `jrj_news_original` (

`seq` int(10) unsigned NOT NULL AUTO_INCREMENT COMMENT '序列号,代理主键',

`news_id` int(8) NOT NULL COMMENT '资讯id',

`title` varchar(300) NOT NULL COMMENT '资讯标题',

`detail` mediumtext DEFAULT NULL COMMENT '摘要',

`list_date` datetime NOT NULL COMMENT '发布时间',

`key_word` varchar(100) DEFAULT NULL COMMENT '关键词',

`stk_code` varchar(10) DEFAULT NULL COMMENT '股票代码',

`stk_sname` varchar(30) DEFAULT NULL COMMENT '股票简称',

`img_url` mediumtext DEFAULT NULL COMMENT '图片链接',

`info_url` varchar(100) DEFAULT NULL COMMENT '全文连接',

`content` text DEFAULT NULL COMMENT '内容',

`infocls` varchar(6) DEFAULT NULL COMMENT '栏目ID',

`channum` varchar(3) DEFAULT NULL COMMENT '频道ID',

`cls` int(6) DEFAULT NULL COMMENT '类别ID',

`from_type` int(3) NOT NULL COMMENT '来源类别',

PRIMARY KEY (`seq`),

KEY `list_date` (`list_date`,`news_id`,`stk_code`)

) ENGINE=InnoDB AUTO_INCREMENT=1341 DEFAULT CHARSET=utf8 COMMENT='资讯原表\r\n数据来源:北京提供js格式连接\r\n更新频率:每3分钟更新一次\r\n';

"""

#创建游标

cursor=mysql_conn.cursor()

#删除当天的数据

deleSql='''

delete from JRJ_NEWS_ORIGINAL

where list_date>=curdate()

'''

cursor.execute(deleSql)

#获取所有查询结果

#datas=cursor.fetchall()

companyUrl='http://stock.jrj.com.cn/share/news/app/company/'+todayy.strftime('%Y-%m-%d')+'.js'

#上市公司    100003   4

hml1=requests.get(companyUrl)

#数据全文

#print(hml1.text[27:][:-6])

tex=json.loads(hml1.text[27:][:-6])

print('上市公司  数据样例:\n',tex[0])

len1=len(tex)

print(len1)

for i in range(len1):

if datetime.strptime(tex[i][0]['makedate'],'%Y-%m-%d %H:%M:%S')<todayy:

break

print("----- has insert into( %d  ) company datas-----"%(i+1))

#print(tex[i][0]['stockcode'].split(','))

if len(tex[i][0]['stockcode'].split(','))==0:

sql1=''' INSERT INTO  JRJ_NEWS_ORIGINAL(NEWs_ID,TITLE,DETAIL,LIST_DATE,IMG_URL,INFO_URL,CLS,FROM_TYPE)

VALUES(%s,%s,%s,%s,%s,%s,100003,4);'''

args1=[tex[i][0]['iiid'],tex[i][0]['title'].replace('"',"'"),tex[i][0]['detail'].replace('"',"'"),tex[i][0]['makedate'],tex[i][0]['imgurl'],tex[i][0]['infourl']]

cursor.execute(sql1,args1)

elif len(tex[i][0]['stockcode'].split(',')) == 1:

sql2='''INSERT INTO  JRJ_NEWS_ORIGINAL(NEWs_ID,TITLE,DETAIL,LIST_DATE,STK_CODE,STK_SNAME,IMG_URL,INFO_URL,CLS,FROM_TYPE)

VALUES(%s,%s,%s,%s,%s,%s,%s,%s,100003,4);'''

args2=[tex[i][0]['iiid'],tex[i][0]['title'].replace('"',"'"),tex[i][0]['detail'].replace('"',"'"),tex[i][0]['makedate'],tex[i][0]['stockcode']

,tex[i][0]['stockname'],tex[i][0]['imgurl'],tex[i][0]['infourl']]

cursor.execute(sql2,args2)

else :

for x in range(len(tex[i][0]['stockcode'].split(','))):

sql3='''INSERT INTO  JRJ_NEWS_ORIGINAL(NEWs_ID,TITLE,DETAIL,LIST_DATE,STK_CODE,STK_SNAME,IMG_URL,INFO_URL,CLS,FROM_TYPE)

VALUES(%s,%s,%s,%s,%s,%s,%s,%s,100003,4);'''

args3=[tex[i][0]['iiid'],tex[i][0]['title'],tex[i][0]['detail'],tex[i][0]['makedate'],tex[i][0]['stockcode'].split(',')[x]

,tex[i][0]['stockname'].split(',')[x],tex[i][0]['imgurl'],tex[i][0]['infourl']]

cursor.execute(sql3,args3)

#保存提交cursor的执行结果

mysql_conn.commit()

#机会早知道  100001   2

chanceUrl='http://stock.jrj.com.cn/share/news/app/qingbao/'+todayy.strftime('%Y-%m-%d')+'.js'

html2=requests.get(chanceUrl)

#数据全文

#print(hml1.text[26:][:-2])

txt=json.loads(html2.text[26:][:-2])

print('机会早知道  数据样例:\n',txt[0])

len1=len(txt)

print(len1)

for i in range(len1):

#print(txt[i]['stockcode'].split(','))

if datetime.strptime(txt[i]['makedate'],'%Y-%m-%d %H:%M:%S')<todayy:

break

print("----- has insert into( %d  ) Chance Early Know datas-----"%(i+1))

if len(txt[i]['stockcode'].split(','))==0:

sql1=''' INSERT INTO  JRJ_NEWS_ORIGINAL(NEWs_ID,TITLE,DETAIL,LIST_DATE,KEY_WORD,IMG_URL,INFO_URL,CONTENT,CLS,FROM_TYPE)

VALUES(%s,%s,%s,%s,%s,%s,%s,%s,100001,2);'''

args1=[txt[i]['iiid'],txt[i]['title'],txt[i]['detail']

,txt[i]['makedate'],txt[i]['keyword'],txt[i]['imgurl'],txt[i]['infourl'],txt[i]['content']]

cursor.execute(sql1,args1)

#print(''' INSERT INTO  NEWS_ORIGINAL(NEWs_ID,TITLE,DETAIL,LIST_DATE,KEY_WORD,IMG_URL,INFO_URL)

#VALUES({0},"{1}","{2}","{3}","{4}","{5}","{6}");'''.format(txt[i]['iiid'],txt[i]['title'],txt[i]['detail']

#,txt[i]['makedate'],txt[i]['keyword'],txt[i]['imgurl'],txt[i]['infourl']

#))

elif len(txt[i]['stockcode'].split(',')) == 1:

sql2=''' INSERT INTO  JRJ_NEWS_ORIGINAL(NEWs_ID,TITLE,DETAIL,LIST_DATE,KEY_WORD,STK_CODE,IMG_URL,INFO_URL,CONTENT,CLS,FROM_TYPE)

VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,100001,2);'''

args2=[txt[i]['iiid'],txt[i]['title'],txt[i]['detail'],txt[i]['makedate'],txt[i]['keyword']

,txt[i]['stockcode'],txt[i]['imgurl'],txt[i]['infourl'],txt[i]['content']]

cursor.execute(sql2,args2)

#print('INSERT INTO  NEWS_ORIGINAL(NEWs_ID,TITLE,DETAIL,LIST_DATE,KEY_WORD,STK_CODE,IMG_URL,INFO_URL)

#VALUES({0},"{1}","{2}","{3}","{4}","{5}","{6}","{7}");'.

#format(txt[i]['iiid'],txt[i]['title'],txt[i]['detail'],txt[i]['makedate'],default(txt[i]['keyword'])

#,txt[i]['stockcode'],default(txt[i]['imgurl']),default(txt[i]['infourl'])))

else :

for x in range(len(txt[i]['stockcode'].split(','))):

sql3=''' INSERT INTO  JRJ_NEWS_ORIGINAL(NEWs_ID,TITLE,DETAIL,LIST_DATE,KEY_WORD,STK_CODE,IMG_URL,INFO_URL,CONTENT,CLS,FROM_TYPE)

VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,100001,2);'''

args3=[txt[i]['iiid'],txt[i]['title'],txt[i]['detail'],txt[i]['makedate'],txt[i]['keyword']

,txt[i]['stockcode'].split(',')[x],txt[i]['imgurl'],txt[i]['infourl'],txt[i]['content']]

cursor.execute(sql3,args3)

#print('INSERT INTO  NEWS_ORIGINAL(NEWs_ID,TITLE,DETAIL,LIST_DATE,KEY_WORD,STK_CODE,IMG_URL,INFO_URL)

#VALUES({0},"{1}","{2}","{3}","{4}","{5}","{6}","{7}");'.

#format(txt[i]['iiid'],txt[i]['title'],txt[i]['detail'],txt[i]['makedate'],default(txt[i]['keyword'])

#,txt[i]['stockcode'].split(',')[x],default(txt[i]['imgurl']),default(txt[i]['infourl'])))

#保存提交cursor的执行结果

mysql_conn.commit()

#涨跌停揭秘 100002  3

secretUrl='http://stock.jrj.com.cn/share/news/app/zhangting/'+todayy.strftime('%Y-%m-%d')+'.js'

html3=requests.get(secretUrl)

txt2=json.loads(html3.text[26:][:-2])

len2=len(txt2)

for i in range(len2):

#print(txt2[i]['stockcode'].split(','))

if datetime.strptime(txt2[i]['makedate'],'%Y-%m-%d %H:%M:%S')<todayy:

break

print("----- has insert into( %d  ) Secret Up and Down datas-----"%(i+1))

if len(txt2[i]['stockcode'].split(','))==0:

sql1=''' INSERT INTO  JRJ_NEWS_ORIGINAL(NEWs_ID,TITLE,DETAIL,LIST_DATE,KEY_WORD,IMG_URL,INFO_URL,CONTENT,CLS,FROM_TYPE)

VALUES(%s,%s,%s,%s,%s,%s,%s,%s,100002,3);'''

args1=[txt2[i]['iiid'],txt2[i]['title'],txt2[i]['detail']

,txt2[i]['makedate'],txt2[i]['keyword'],txt2[i]['imgurl'],txt2[i]['infourl'],txt[i]['content']]

cursor.execute(sql1,args1)

elif len(txt2[i]['stockcode'].split(',')) == 1:

sql2=''' INSERT INTO  JRJ_NEWS_ORIGINAL(NEWs_ID,TITLE,DETAIL,LIST_DATE,KEY_WORD,STK_CODE,IMG_URL,INFO_URL,CONTENT,CLS,FROM_TYPE)

VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,100002,3);'''

args2=[txt2[i]['iiid'],txt2[i]['title'],txt2[i]['detail'],txt2[i]['makedate'],txt2[i]['keyword']

,txt2[i]['stockcode'],txt2[i]['imgurl'],txt2[i]['infourl'],txt[i]['content']]

cursor.execute(sql2,args2)

else :

for x in range(len(txt2[i]['stockcode'].split(','))):

sql3=''' INSERT INTO  JRJ_NEWS_ORIGINAL(NEWs_ID,TITLE,DETAIL,LIST_DATE,KEY_WORD,STK_CODE,IMG_URL,INFO_URL,CONTENT,CLS,FROM_TYPE)

VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,100002,3);'''

args3=[txt2[i]['iiid'],txt2[i]['title'],txt2[i]['detail'],txt2[i]['makedate'],txt2[i]['keyword']

,txt2[i]['stockcode'].split(',')[x],txt2[i]['imgurl'],txt2[i]['infourl'],txt[i]['content']]

cursor.execute(sql3,args3)

#保存提交cursor的执行结果

mysql_conn.commit()

import random

#乌兰木数据  null  1

noTypeUrl='http://mapp.jrj.com.cn/co/zk/1.js?'+str(random.randint(1,9))+str(random.randint(1,9))

print(noTypeUrl)

hml1=requests.get(noTypeUrl)

tx=hml1.json()

print(len(tx['data']))

len4=len(tx['data'])

data1=tx['data']

for i in range(len4):

print("----- has insert into( %d  )datas-----"%i)

if datetime.strptime(data1[i]['makedate'],'%Y-%m-%d %H:%M:%S')<todayy:

break

if len(data1[i]['infostocks'])==0:

sql1=''' INSERT INTO  JRJ_NEWS_ORIGINAL(NEWs_ID,TITLE,DETAIL,LIST_DATE,KEY_WORD,IMG_URL,INFO_URL,INFOCLS,CHANNUM,AUTHOR,FROM_TYPE)

VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,1);'''

args1=[data1[i]['iiid'],data1[i]['title'],data1[i]['detail'],data1[i]['makedate'],data1[i]['keyword']

,data1[i]['imgurl'],data1[i]['infourl'],data1[i]['infocls'],data1[i]['channum'],data1[i]['author']]

cursor.execute(sql1,args1)

elif len(data1[i]['infostocks'])== 1:

sql2=''' INSERT INTO  JRJ_NEWS_ORIGINAL(NEWs_ID,TITLE,DETAIL,LIST_DATE,KEY_WORD,STK_CODE,STK_SNAME,IMG_URL,INFO_URL,INFOCLS,CHANNUM,AUTHOR,FROM_TYPE)

VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,1);'''

args2=[data1[i]['iiid'],data1[i]['title'],data1[i]['detail'],data1[i]['makedate'],data1[i]['keyword']

,data1[i]['infostocks'][0]['stockcode'],data1[i]['infostocks'][0]['stockname'],data1[i]['imgurl']

,data1[i]['infourl'],data1[i]['infocls'],data1[i]['channum'],data1[i]['author']]

cursor.execute(sql2,args2)

else :

for x in range(len(data1[i]['infostocks'])):

sql3=''' INSERT INTO  JRJ_NEWS_ORIGINAL(NEWs_ID,TITLE,DETAIL,LIST_DATE,KEY_WORD,STK_CODE,STK_SNAME,IMG_URL,INFO_URL,INFOCLS,CHANNUM,AUTHOR,FROM_TYPE)

VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,1);'''

args3=[data1[i]['iiid'],data1[i]['title'],data1[i]['detail'],data1[i]['makedate'],data1[i]['keyword']

,data1[i]['infostocks'][x]['stockcode'],data1[i]['infostocks'][x]['stockname'],data1[i]['imgurl']

,data1[i]['infourl'],data1[i]['infocls'],data1[i]['channum'],data1[i]['author']]

cursor.execute(sql3,args3)

#保存提交cursor的执行结果

mysql_conn.commit()

cursor.close()

mysql_conn.close()

json解析写入mysql的更多相关文章

  1. 通过js获取前台数据向一般处理程序传递Json数据,并解析Json数据,将前台传来的Json数据写入数据库表中

    摘自:http://blog.csdn.net/mazhaojuan/article/details/8592015 通过js获取前台数据向一般处理程序传递Json数据,并解析Json数据,将前台传来 ...

  2. JSON数据写入和解析

    如何写入JSON 需要第三方jar包,JSON包 //写入json数据 public static String sendJson() { JSONObject json = new JSONObje ...

  3. 17-Flink消费Kafka写入Mysql

    戳更多文章: 1-Flink入门 2-本地环境搭建&构建第一个Flink应用 3-DataSet API 4-DataSteam API 5-集群部署 6-分布式缓存 7-重启策略 8-Fli ...

  4. Flink 1.9 实战:使用 SQL 读取 Kafka 并写入 MySQL

    上周六在深圳分享了<Flink SQL 1.9.0 技术内幕和最佳实践>,会后许多小伙伴对最后演示环节的 Demo 代码非常感兴趣,迫不及待地想尝试下,所以写了这篇文章分享下这份代码.希望 ...

  5. Json解析工具Jackson(使用注解)

    原文http://blog.csdn.net/nomousewch/article/details/8955796 接上一篇文章Json解析工具Jackson(简单应用),jackson在实际应用中给 ...

  6. Json解析工具Jackson(简单应用)

    原文http://blog.csdn.net/nomousewch/article/details/8955796 概述 Jackson库(http://jackson.codehaus.org),是 ...

  7. Tomjson - 一个"短小精悍"的 json 解析库

    Tomjson,一个"短小精悍"的 json 解析库,tomjson使用Java语言编写,主要作用是把Java对象(JavaBean)序列化为json格式字符串,将json格式字符 ...

  8. Unity3d之json解析研究

    Unity3d之json解析研究     json是好东西啊!JSON(JavaScript Object Notation) 是一种轻量级的数据交换格式      JSON简单易用,我要好好研究一下 ...

  9. JSON解析方案

    在iOS中,JSON的常见解析方案有4种 第三方框架:JSONKit,SBJson,TouchJSON(性能从左到右,越差) 苹果原生(自带):NSJSONSerialization(性能最好) JS ...

随机推荐

  1. tchart5

    https://blog.csdn.net/wuyuanjingni/article/details/8585810

  2. Tomcat ----> 学习笔记

    源码之几个常见类和接口的关系 在学习Servlet的时候经常见到以下几个合成单词和非合成单词:Servlet.GenericServlet.HttpServlet.它们之间有联系的.接下来我把它们的联 ...

  3. mysql常用修改创建语句

    一.连接数据库 <?php $dbhost = 'localhost:3306'; // mysql服务器主机地址 $dbuser = 'root'; // mysql用户名 $dbpass = ...

  4. 『MXNet』第七弹_多GPU并行程序设计

    资料原文 一.概述思路 假设一台机器上有个GPU.给定需要训练的模型,每个GPU将分别独立维护一份完整的模型参数. 在模型训练的任意一次迭代中,给定一个小批量,我们将该批量中的样本划分成份并分给每个G ...

  5. 迭代FFT

    int reverse(int x, int len){ ; ; i < len; i <<= ){ t <<= ; ; } return t; } Complex A[ ...

  6. 解决Requests中文乱码【有用】,读取htm文件 读取txt文件报错:UnicodeDecodeError: 'utf-8' codec can't decode byte 0xc8 in position 0

    打开这个网址https://blog.csdn.net/chaowanghn/article/details/54889835 python在open读取txt文件时,出现UnicodeDecodeE ...

  7. Linux上部署多个tomcat端口设置

    在Linux上部署多个tomcat主要是防止端口冲突的问题, tomcat服务器需配置三个端口才能启动,安装时默认启用了这三个端口,当要运行多个tomcat服务时需要修改这三个端口,不能相同.端口一: ...

  8. WCF开发实战系列一:创建第一个WCF服务 转

    转 http://www.cnblogs.com/poissonnotes/archive/2010/08/28/1811064.html 在这个实战中我们将使用DataContract,Servic ...

  9. 【LeetCode】数组移除元素

    链表等复杂数据结构用多了,简单的数组操作也不能遗忘! 1. 给定一个有序数组,移除所有重复元素并返回新的数组长度,不能分配额外数组的内存空间. e.g. 给定输入的数组 = [1,1,2],函数应当返 ...

  10. 调整innodb redo log files数目和大小的具体方法和步骤

    相较于Oracle的在线调整redo日志的数目和大小,mysql这点则有所欠缺,即使目前的mysql80版本,也不能对innodb redo日志的数目和大小进行在线调整,下面仅就mysql调整inno ...