from bs4 import BeautifulSoup
import re
import urllib.request, urllib.error
import xlwt
import sqlite3
import random
import requests
import urllib
import socket
import MySQLdb
def main():
baseurl = "https://www.123.top/?type=productinfo&id="
datalist = getData(baseurl)
findName = re.compile(r'<h2 class="c_38485a f18">(.*)</h2>')
db = MySQLdb.connect("47. 3", " h", "1 23", "hi h", charset='utf8')
def get_ua():
user_agents = [
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.95 Safari/537.36 OPR/26.0.1656.60',
'Opera/8.0 (Windows NT 5.1; U; en)',
'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; en) Opera 9.50',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:34.0) Gecko/20100101 Firefox/34.0',
'Mozilla/5.0 (X11; U; Linux x86_64; zh-CN; rv:1.9.2.10) Gecko/20100922 Ubuntu/10.10 (maverick) Firefox/3.6.10',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/534.57.2 (KHTML, like Gecko) Version/5.1.7 Safari/534.57.2 ',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.71 Safari/537.36',
'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11',
'Mozilla/5.0 (Windows; U; Windows NT 6.1; en-US) AppleWebKit/534.16 (KHTML, like Gecko) Chrome/10.0.648.133 Safari/534.16',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/30.0.1599.101 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.11 (KHTML, like Gecko) Chrome/20.0.1132.11 TaoBrowser/2.0 Safari/536.11',
'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/21.0.1180.71 Safari/537.1 LBBROWSER',
'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; QQDownload 732; .NET4.0C; .NET4.0E)',
'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.84 Safari/535.11 SE 2.X MetaSr 1.0',
'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; SV1; QQDownload 732; .NET4.0C; .NET4.0E; SE 2.X MetaSr 1.0) ',
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3865.90 Safari/537.36"
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.108 Safari/537.36"
"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:81.0) Gecko/20100101 Firefox/81.0"
]
user_agent = random.choice(user_agents)
return user_agent
def getData(baseurl):
datalist = []
html = askURL(baseurl)
for i in range(100, 1430):
try:
url = baseurl + str(i)
html = askURL(url)
bs = BeautifulSoup(html, 'html.parser')
img = bs.select(".img-responsive")
src = 'https://www.123.top/' + img[0].get('src')
text = bs.select(".c_38485a")
name = re.findall(findName, str(text[0]))[0]
title = bs.select(".course-info>div>a")
type = []
for item in title:
type.append(item.get_text())
info = bs.select(".main")
print('id为{0}的数据内容爬取中'.format(i))
str1 = ','.join(type)
delpl = bs.select(".shoplist")[0]
delpl2 = bs.select(".wrapbox")[0]
delinfo = str(info[0]).replace(str(delpl), '')
delinfo2 = delinfo.replace('本商品可参与分享赚佣金计划 【佣金0.5元】', '')
delinfo3 = delinfo2.replace('点击参与', '')
delinfo4 = delinfo3.replace(str(delpl2), '')
str2 = delinfo4.replace("'", '')
str3 = str2.replace('"', '^')
cursor = db.cursor()
print(1)
sql = "INSERT INTO newgood(cid,face,name,type,info)VALUES(%s,'%s','%s','%s','%s')" % (i, src, name, str1, str3)
print(sql)
cursor.execute(sql)
db.commit()
except:
print('id为{0}的数据爬取失败'.format(i))
cursor = db.cursor()
sql = "INSERT INTO bad(cid)VALUES (%s)" % (i)
cursor.execute(sql)
db.commit()
def askURL(url):
head = {
'User-Agent': get_ua()
}
request = urllib.request.Request(url, headers=head)
html = ""
try:
response = urllib.request.urlopen(request, timeout=random.randint(1,60))
html = response.read().decode("utf-8")
except urllib.error.URLError as e:
if hasattr(e, 'code'):
print(e.code)
if hasattr(e, 'reason'):
print(e.reason)
return html
def saveData(savepath):
print('save...')
if __name__ == "__main__":
main()
转载请注明原文地址:https://blackberry.8miu.com/read-30063.html