-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathex37.py
89 lines (80 loc) · 3.63 KB
/
ex37.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
#coding:utf-8
# rrequesst 模块的运用
sudo pip install requests
# from 公众号[智能制造专栏]URL:https://mp.weixin.qq.com/s/8wAHBPnQMbcrP9La7WZiJA
# 程序目的为爬取薪酬数据,基本逻辑:获取页面、获取页面的模块、获取行业领域,公司规模等具体信息
import random
import time
import requests
from openpyxl import Workbook
import pymysql.cursors
# 建立页面数据的连接
def get_conn():
'''建立数据库连接'''
conn = pymysql.connect(host='localhost',
user='root',
password='root',
db='python',
charset='utf8mb4',
cursorclass=pymysql.cursors.DictCursor)
return conn
# 插入数据库
def insert(conn, info):
'''数据写入数据库'''
with conn.cursor() as cursor:
sql = "INSERT INTO `python` (`shortname`, `fullname`, `industryfield`, `companySize`, `salary`, `city`, `education`) VALUES (%s, %s, %s, %s, %s, %s, %s)"
cursor.execute(sql, info)
conn.commit()
# 主体爬取部分,没看懂
def get_json(url, page, lang_name):
'''返回当前页面的信息列表'''
headers = {
'Host': 'www.lagou.com',
'Connection': 'keep-alive',
'Content-Length': '23',
'Origin': 'https://www.lagou.com',
'X-Anit-Forge-Code': '0',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:61.0) Gecko/20100101 Firefox/61.0',
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8',
'Accept': 'application/json, text/javascript, */*; q=0.01',
'X-Requested-With': 'XMLHttpRequest',
'X-Anit-Forge-Token': 'None',
'Referer': 'https://www.lagou.com/jobs/list_python?city=%E5%85%A8%E5%9B%BD&cl=false&fromSearch=true&labelWords=&suginput=',
'Accept-Encoding': 'gzip, deflate, br',
'Accept-Language': 'en-US,en;q=0.9,zh-CN;q=0.8,zh;q=0.7'
}
data = {'first': 'false', 'pn': page, 'kd': lang_name}
json = requests.post(url, data, headers=headers).json()
list_con = json['content']['positionResult']['result']
info_list = []
for i in list_con: # for 循环抓取
info = []
info.append(i.get('companyShortName', '无')) # 公司名 append使用
info.append(i.get('companyFullName', '无'))
info.append(i.get('industryField', '无')) # 行业领域
info.append(i.get('companySize', '无')) # 公司规模
info.append(i.get('salary', '无')) # 薪资
info.append(i.get('city', '无'))
info.append(i.get('education', '无')) # 学历
info_list.append(info)
return info_list # 返回列表
def main():
lang_name = 'python'
wb = Workbook() # 打开 excel 工作簿
conn = get_conn() # 建立数据库连接 不存数据库 注释此行
for i in ['北京', '上海', '广州', '深圳', '杭州']: # 五个城市 城市循环抓取
page = 1
ws1 = wb.active
ws1.title = lang_name
url = 'https://www.lagou.com/jobs/positionAjax.json?city={}&needAddtionalResult=false'.format(i)
while page < 31: # 每个城市30页信息
info = get_json(url, page, lang_name)
page += 1
time.sleep(random.randint(10, 20))
for row in info:
insert(conn, tuple(row)) # 插入数据库,若不想存入 注释此行
ws1.append(row)
conn.close() # 关闭数据库连接,不存数据库 注释此行
wb.save('{}职位信息.xlsx'.format(lang_name))
if __name__ == '__main__':
main()