-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathdb_input_crawling.py
53 lines (44 loc) · 1.68 KB
/
db_input_crawling.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
import requests
from bs4 import BeautifulSoup
from db_setting import con
def mainCrawler(proNum, proName, cate, rate):
try:
with con.cursor() as cursor:
# Create a new record
sql = "INSERT INTO `app_algoreader` (`problemNum`, `problemName`, `category`, `answerRate`) VALUES (%s, %s, %s, %s)"
cursor.execute(sql, (proNum, proName, cate, rate))
con.commit()
print('DB SAVE SUCCESS')
except:
con.rollback()
print('DB SAVE FAIL')
problem_category = []
res = requests.get('https://www.acmicpc.net/problem/tags')
soup = BeautifulSoup(res.content, 'html.parser')
tags = soup.findAll('tr')
tags.pop(0)
for c in tags:
name = c.select('td')[0].getText()
count = c.select('td')[1].getText()
problem_category.append(name)
for c in problem_category:
url_cate = requests.get('https://www.acmicpc.net/problem/tag/' + c)
soup = BeautifulSoup(url_cate.content, 'html.parser')
print("cate - " + c)
cate_length = len(soup.select('.pagination li'))
print(cate_length)
for page in range(cate_length):
url_cate = requests.get('https://www.acmicpc.net/problem/tag/' + c + "/" + str(page + 1))
soup = BeautifulSoup(url_cate.content, 'html.parser')
rows = soup.select('tr')
if rows:
rows.pop(0)
pro = {}
for r in rows:
proNum = int(r.select('td')[0].getText())
proName = r.select('td')[1].getText()
cate = c
rate = int(float(r.select('td')[5].getText()[:-1])*100)
print(proNum, proName, cate, rate)
mainCrawler(proNum, proName, cate, rate)
con.close()