worldkun 2020-04-21
近期有许多项目需要这个功能,由于Python实现起来比较简单就这么做了,代码贴下来觉得好点个赞吧~
# coding: utf-8
import os
import time
import requests
import urllib.parse
from bs4 import BeautifulSoup
from urllib.parse import urlparse
from fake_useragent import UserAgent
from multiprocessing.pool import ThreadPool
LOCATIONS = {}
GLOBAL_THREAD = 500
GLOBAL_TIMEOUT = 50
def get_links(keyword, generator, pages):
    links = []
    for page in range(int(pages.split("-")[0]), int(pages.split("-")[1]) + 1):
        for genera in range(int(generator.split("-")[0]), int(generator.split("-")[1]) + 1):
            links.append(
                "http://www.baidu.com.cn/s?wd=" + urllib.parse.quote(keyword + str(genera)) + "&pn=" + str(page * 10))
    return links
def get_page(url):
    headers = {"user-agent": UserAgent().chrome}
    req = requests.get(url, headers=headers)
    req.encoding = "utf-8"
    soup = BeautifulSoup(req.text, "lxml")
    for link in soup.select("div.result > h3.t > a"):
        req = requests.get(link.get("href"), headers=headers, allow_redirects=False)
        if "=" in req.headers["location"]:
            root = urlparse(req.headers["location"]).netloc
            LOCATIONS[root] = req.headers["location"]
def baidu_search():
    try:
        os.system("cls")
        print("-" * 56 + "\n")
        print("| BaiduSearch Engine By 美图博客[https://www.meitubk.com/] |\n")
        print("-" * 56 + "\n")
        keyword = input("Keyword: ")
        generator = input("Generator(1-10): ")
        pages = input("Pages(0-10): ")
        start = time.time()
        pool = ThreadPool(processes=GLOBAL_THREAD)
        pool.map(get_page, get_links(keyword, generator, pages))
        pool.close()
        pool.join()
        end = time.time()
        path = r"D:\Desktop\result.txt"
        save_result(path)
        print("\nSava in %s" % path)
        print("Result count: %d" % len(LOCATIONS.values()))
        print("Running time: %ds" % (end - start))
    except:
        print("\nInput Error!")
        exit(0)
def save_result(path):
    with open(path, "w") as file:
        for url in list(LOCATIONS.values()):
            file.write(url + "\n")
baidu_search()
根据不同的地区人的喜好,我们可以更好的做出营销,那么不同地区的人进行的搜索词汇也是不一样的,那么如何查看不同地区的人热搜的是什么呢,下面让我介绍一下查看方法。在搜索分类栏有一项为“更多”,点击打开更多