Python3 Cookie
来源:互联网 发布:刑不上大夫知乎 编辑:程序博客网 时间:2024/05/17 02:57
import randomfrom http.cookiejar import CookieJarimport requestsfrom bs4 import BeautifulSoupclass EbaySpider(object): def __init__(self): self.SESSION = requests.session() self.SESSION.cookies = CookieJar() self.HEAD = self.randHeader() def randHeader(self): head_connection = ['Keep-Alive', 'close'] head_accept = ['text/html, application/xhtml+xml, */*'] head_accept_language = ['zh-CN,fr-FR;q=0.5', 'en-US,en;q=0.8,zh-Hans-CN;q=0.5,zh-Hans;q=0.3'] head_user_agent = ['Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; rv:11.0) like Gecko', 'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/28.0.1500.95 Safari/537.36', 'Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; rv:11.0) like Gecko)', 'Mozilla/5.0 (Windows; U; Windows NT 5.2) Gecko/2008070208 Firefox/3.0.1', 'Mozilla/5.0 (Windows; U; Windows NT 5.1) Gecko/20070309 Firefox/2.0.0.3', 'Mozilla/5.0 (Windows; U; Windows NT 5.1) Gecko/20070803 Firefox/1.5.0.12', 'Opera/9.27 (Windows NT 5.2; U; zh-cn)', 'Mozilla/5.0 (Macintosh; PPC Mac OS X; U; en) Opera 8.0', 'Opera/8.0 (Macintosh; PPC Mac OS X; U; en)', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1.12) Gecko/20080219 Firefox/2.0.0.12 Navigator/9.0.0.6', 'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; Win64; x64; Trident/4.0)', 'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 6.1; Trident/4.0)', 'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; InfoPath.2; .NET4.0C; .NET4.0E)', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Maxthon/4.0.6.2000 Chrome/26.0.1410.43 Safari/537.1 ', 'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; InfoPath.2; .NET4.0C; .NET4.0E; QQBrowser/7.3.9825.400)', 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:21.0) Gecko/20100101 Firefox/21.0 ', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/21.0.1180.92 Safari/537.1 LBBROWSER', 'Mozilla/5.0 (compatible; MSIE 10.0; Windows NT 6.1; WOW64; Trident/6.0; BIDUBrowser 2.x)', 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.11 (KHTML, like Gecko) Chrome/20.0.1132.11 TaoBrowser/3.0 Safari/536.11'] header = { 'Connection': head_connection[0], 'Accept': head_accept[0], 'Accept-Language': head_accept_language[1], 'User-Agent': head_user_agent[random.randrange(0, len(head_user_agent))] } return header def getBeautifulSoup(self , query_rl): r = self.SESSION.get(url=query_rl, headers=self.HEAD) soup = BeautifulSoup(r.text, 'html.parser') return soup def getRates(self , query_url): r = self.SESSION.get(url=query_url , headers = self.HEAD) soup = BeautifulSoup(r.text, 'html.parser') content = soup.find("span", "rcnt") itemSize = int(content.string) print("初次查询:" + str(itemSize) + "项") iid_one = str(soup.find("div", "lvpic pic img left")) l = iid_one.find("iid=") r = iid_one.find("\">") itm = str(iid_one[l + 5:r]) print("设置美国Code") getrates_url = "http://www.ebay.com/itm/getrates?item=" + itm + "&quantity=1&country=1&zipCode=&co=0&cb=jQuery1705300436743778978_1501488303970" r = self.SESSION.get(url=getrates_url, headers=self.HEAD) soup = BeautifulSoup(r.text, 'html.parser') def search(self , serchWords): serchWords = str(serchWords).replace(" ", "%20") #_ipg=100每页100 , query_rl = "https://www.ebay.com/sch/i.html?_from=R40&_sacat=0&_ipg=100&rt=nc&_nkw=" + serchWords + "&_pgn=1&_skc=0" self.getRates(query_url=query_rl) soup = self.getBeautifulSoup(query_rl) content = soup.find("span" , "rcnt") itemSize = int(content.string.replace("," , "")) pageSize = int(itemSize/100) if itemSize % 100 != 0 : pageSize += 1 print("总计" + str(itemSize) + "项,共" + str(pageSize) + "页(每页100条)") print("第1页....") result = [] content = soup.find_all("div" , "lvpic pic img left") for i in content: s = str(i) l = s.find("iid=") if l == -1: continue r = s.find("\">") result.append(str(s[l+5:r])) for _pgn in range(2 , pageSize+1): print("第" + str(_pgn) + "页....") query_rl = "https://www.ebay.com/sch/i.html?_from=R40&_sacat=0&_ipg=100&rt=nc&_nkw=" + serchWords + "&_pgn=" + str(_pgn) + "&_skc=" + str((_pgn-1)*100) soup = self.getBeautifulSoup(query_rl) content = soup.find_all("div", "lvpic pic img left") for i in content: s = str(i) l = s.find("iid=") if l == -1: continue r = s.find("\">") result.append(str(s[l + 5:r])) return resultif __name__ == '__main__': ebay = EbaySpider() result = ebay.search("2016 nerf bar Silverado 2500") for i in range(len(result)): print(str(i) , end="\t") print(result[i])
阅读全文
0 0
- Python3 Cookie
- python3使用cookie
- python3爬虫 - cookie登录实战
- Python3 使用cookiejar管理cookie
- Python3网络爬虫(3):Python3使用Cookie-模拟登陆
- python3实现带cookie的上传文件
- 手动实现cookie 的python3代码
- python3爬虫 - 利用浏览器cookie登录
- 关于python3实现cookie登录问题
- Python3发送post请求,自动记住cookie
- Python3爬虫代理服务器与cookie的使用
- Python3网络爬虫:使用Cookie-模拟登陆
- Python3之cookie与session会话技术
- Python3 Post登录并且保存cookie登录其他页面
- python3 模拟实现登录HDU并获取Cookie
- python3.4 获取cookie后继续访问其他页面
- Python3网络爬虫(六):Python3使用Cookie-模拟登陆获取妹子联系方式
- Python3网络爬虫(三):Python3使用Cookie-模拟登陆获取妹子联系方式
- Python初入门(八)(Head First Python 第八章 移动设备)
- 油田信息化:通往智慧之路(1.3-智慧油田在全球的实践)
- c++连接MySql数据库
- 传智播客-Java学习笔记day19
- Codeforces Round #418 (Div. 2)
- Python3 Cookie
- 119. Pascal's Triangle II
- HDU-A+B for Input-Output Practice (V)
- iOS 10.3之后Label设置删除线无效
- maven在eclipse中配置过程并导入maven工程,以及关于工程中jdk和git的设置,
- IE浏览器自身读缓存问题
- SQLAlchemy ORM 快速入门
- FaceNet论文笔记
- html中input设置为readonly与disabled的区别