-
Notifications
You must be signed in to change notification settings - Fork 5.2k
/
utilFunction.py
106 lines (90 loc) · 2.8 KB
/
utilFunction.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
# -*- coding: utf-8 -*-
# !/usr/bin/env python
"""
-------------------------------------------------
File Name: utilFunction.py
Description : tool function
Author : JHao
date: 2016/11/25
-------------------------------------------------
Change Activity:
2016/11/25: 添加robustCrawl、verifyProxy、getHtmlTree
-------------------------------------------------
"""
import requests
import time
from lxml import etree
from Util.LogHandler import LogHandler
from Util.WebRequest import WebRequest
logger = LogHandler(__name__, stream=False)
# noinspection PyPep8Naming
def robustCrawl(func):
def decorate(*args, **kwargs):
try:
return func(*args, **kwargs)
except Exception as e:
logger.info(u"sorry, 抓取出错。错误原因:")
logger.info(e)
return decorate
# noinspection PyPep8Naming
def verifyProxyFormat(proxy):
"""
检查代理格式
:param proxy:
:return:
"""
import re
verify_regex = r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}:\d{1,5}"
return True if re.findall(verify_regex, proxy) else False
# noinspection PyPep8Naming
def getHtmlTree(url, **kwargs):
"""
获取html树
:param url:
:param kwargs:
:return:
"""
header = {'Connection': 'keep-alive',
'Cache-Control': 'max-age=0',
'Upgrade-Insecure-Requests': '1',
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_3) AppleWebKit/537.36 (KHTML, like Gecko)',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
'Accept-Encoding': 'gzip, deflate, sdch',
'Accept-Language': 'zh-CN,zh;q=0.8',
}
# TODO 取代理服务器用代理服务器访问
wr = WebRequest()
# delay 2s for per request
time.sleep(2)
html = wr.get(url=url, header=header).content
return etree.HTML(html)
def tcpConnect(proxy):
"""
TCP 三次握手
:param proxy:
:return:
"""
from socket import socket, AF_INET, SOCK_STREAM
s = socket(AF_INET, SOCK_STREAM)
ip, port = proxy.split(':')
result = s.connect_ex((ip, int(port)))
return True if result == 0 else False
# noinspection PyPep8Naming
def validUsefulProxy(proxy):
"""
检验代理是否可用
:param proxy:
:return:
"""
if isinstance(proxy, bytes):
proxy = proxy.decode('utf8')
proxies = {"http": "http://{proxy}".format(proxy=proxy)}
try:
# 超过20秒的代理就不要了
r = requests.get('http://httpbin.org/ip', proxies=proxies, timeout=20, verify=False)
if r.status_code == 200:
logger.info('%s is ok' % proxy)
return True
except Exception as e:
logger.debug(e)
return False