python做后台开发移动网站,哪种语言做网站,镇江唐唐网络科技有限公司,响应式网站建设需要注意什么本文实例讲述了Python3实现的爬虫爬取数据并存入mysql数据库操作。分享给大家供大家参考#xff0c;具体如下#xff1a;爬一个电脑客户端的订单。罗总推荐#xff0c;抓包工具用的是HttpAnalyzerStdV7#xff0c;与chrome自带的F12类似。客户端有接单大厅#xff0c;罗列…本文实例讲述了Python3实现的爬虫爬取数据并存入mysql数据库操作。分享给大家供大家参考具体如下爬一个电脑客户端的订单。罗总推荐抓包工具用的是HttpAnalyzerStdV7与chrome自带的F12类似。客户端有接单大厅罗列所有订单的简要信息。当单子被接了就不存在了。我要做的是新出订单就爬取记录到我的数据库zyc里。设置每10s爬一次。抓包工具页面如图首先是爬虫先找到数据存储的页面再用正则爬出。# -*- coding:utf-8 -*-import reimport requestsimport pymysql #Python3的mysql模块Python2 是mysqldbimport datetimeimport timedef GetResults():requests.adapters.DEFAULT_RETRIES  5 #有时候报错我在网上找的不知道啥意思好像也没用。reg  [rid:(.*?),,rorder_no:(.*?),,rorder_title:(.*?),,rpublish_desc:(.*?),,rgame_area:(.*?)\\/(.*?)\\/(.*?),,rorder_current:(.*?),,rorder_content:(.*?),,rorder_hours:(.*?),,rorder_price:(.*?),,radd_price:(.*?),,rsafe_money:(.*?),,rspeed_money:(.*?),,rorder_status_desc:(.*?),,rorder_lock_desc:(.*?),,rcancel_type_desc:(.*?),,rkf_status_desc:(.*?),,ris_show_pwd:(.*?),,rgame_pwd:(.*?),,rgame_account:(.*?),,rgame_actor:(.*?),,rleft_hours:(.*?),,rcreated_at:(.*?),,raccount_id:(.*?),,rmobile:(.*?),,rcontact:(.*?),,rqq:(.*?)},]results[]try:for l in range(1,2): #页码proxy  {HTTP:61.135.155.82:443} #代理iphtml  requests.get(https://www.dianjingbaozi.com/api/dailian/soldier/hall?access_token3ef3abbea1f6cf16b2420eb962cf1c9adan_enddan_startgame_id2kworderprice_descpage%d%lpagesize30price_end0price_start0server_code000200000000signca19072ea0acb55a2ed2486d6ff6c5256c7a0773timestamp1511235791typepublictype_id%20HTTP/1.1,proxiesproxy) # 用get的方式访问。网页解码成中文。接单大厅页。#htmlhtml.content.decode(utf-8)outcome_reg_order_no  re.findall(rorder_no:(.*?),game_area, html) #获取订单编号因为订单详情页url与订单编号有关。for j in range(len(outcome_reg_order_no)):html_order  requests.get(http://www.lpergame.com/api/dailian/order/detail?access_tokeneb547a14bad97e1ee5d835b32cb83ff1order_no outcome_reg_order_no[j]  signc9b503c0e4e8786c2945dc0dca0fabfa1ca4a870timestamp1511146154 HTTP/1.1,proxiesproxy) #订单详细页html_orderhtml_order.content.decode(utf-8)# print(html_order)outcome_reg  []for i in range(len(reg)):#每条订单outcome  re.findall(reg[i], html_order)if i  4:for k in range(len(outcome)):outcome_reg.extend(outcome[k])else:outcome_reg.extend(outcome)results.append(outcome_reg) #结果集return resultsexcept:time.sleep(5) #有时太频繁会报错。print(失败)pass根据爬虫结果建表这里变量名要准确。并且要设置唯一索引使每次爬的只有新订单入库。def mysql_create():mysql_host  mysql_db  zycmysql_user  zycmysql_password  mysql_port  3306db  pymysql.connect(hostmysql_host, portmysql_port, usermysql_user, passwordmysql_password, dbmysql_db,charsetutf8) # 连接数据库编码注意是utf8不然中文结果输出会乱码sql_create  CREATE TABLE DUMPLINGS (id CHAR(10),order_no CHAR(50),order_title VARCHAR(265),publish_desc VARCHAR(265),game_name VARCHAR(265),\game_area VARCHAR(265),game_area_distinct VARCHAR(265),order_current VARCHAR(3908),order_content VARCHAR(3908),order_hours CHAR(10), \order_price FLOAT(10),add_price FLOAT(10),safe_money FLOAT(10),speed_money FLOAT(10),order_status_desc VARCHAR(265),\order_lock_desc VARCHAR(265),cancel_type_desc VARCHAR(265),kf_status_desc VARCHAR(265),is_show_pwd TINYINT,game_pwd CHAR(50),\game_account VARCHAR(265),game_actor VARCHAR(265),left_hours VARCHAR(265),created_at VARCHAR(265),account_id CHAR(50),\mobile VARCHAR(265),mobile2 VARCHAR(265),contact VARCHAR(265),contact2 VARCHAR(265),qq VARCHAR(265),\PRIMARY KEY (id),UNIQUE KEY no(order_no))ENGINEInnoDB AUTO_INCREMENT12 DEFAULT CHARSETutf8sql_keyCREATE UNIQUE INDEX id ON DUMPLINGS(id)cursor  db.cursor()cursor.execute(DROP TABLE IF EXISTS DUMPLINGS)cursor.execute(sql_create)# 执行SQL语句cursor.execute(sql_key)db.close() # 关闭数据库连把数据导入Mysql注意编码和字段之间的匹配。def IntoMysql(results):mysql_host  mysql_db  zycmysql_user  zycmysql_password  mysql_port  3306db  pymysql.connect(hostmysql_host, portmysql_port, usermysql_user, passwordmysql_password, dbmysql_db,charsetutf8) # 连接数据库编码注意是utf8不然中文结果输出会乱码cursor  db.cursor()for j in range(len(results)):try:sql  INSERT INTO DUMPLINGS(id,order_no,order_title,publish_desc ,game_name, \game_area,game_area_distinct,order_current,order_content,order_hours, \order_price,add_price,safe_money,speed_money,order_status_desc, \order_lock_desc,cancel_type_desc,kf_status_desc,is_show_pwd,game_pwd, \game_account,game_actor,left_hours,created_at,account_id, \mobile,mobile2,contact,contact2,qq) VALUES (for i in range(len(results[j])):sql  sql    results[j][i]  ,sql  sql[:-1]  )sql  sql.encode(utf-8)cursor.execute(sql)db.commit()except:passdb.close()每十秒运行一次。mysql_create()i0while True:results  GetResults()IntoMysql(results)ii1print(爬虫次数:,i)time.sleep(10)结果如图希望本文所述对大家Python程序设计有所帮助。