hilary0 2020-07-05
# 测试浏览器弹窗的验证: import requests from urllib import parse import logging logging.basicConfig(level=logging.INFO) class ScrapyManager(object): def __init__(self, url, project_name, spider=None, username=None, pwd=None): self.url = url self.project_name = project_name self.spider = spider self.auth = (username, pwd) def start_project(self): """ 爬虫项目启动 :return: """ if not self.spider: raise Exception(‘未提供爬虫名称!‘) data = dict( project=self.project_name, spider=self.spider, ) start_url = parse.urljoin(self.url, ‘schedule.json‘) res = requests.post(url=start_url, data=data, auth=self.auth) logging.info(res.text) def del_project(self): """ 项目删除 :return: """ data = dict( project=self.project_name, spider=self.spider, ) start_url = parse.urljoin(self.url, ‘delproject.json‘) res = requests.post(url=start_url, data=data, auth=self.auth) logging.info(res.text) def stop_job(self, job_id): """ 停止任务 :param job_id: 任务id :return: """ data = dict( project=self.project_name, job=job_id, ) start_url = parse.urljoin(self.url, ‘cancel.json‘) res = requests.post(url=start_url, data=data, auth=self.auth) logging.info(res.text)
class Singleton: def __new__: # 关键在于这,每一次实例化的时候,我们都只会返回这同一个instance对象 if not hasattr: cls.instance =