0
点赞
收藏
分享

微信扫一扫

python装饰器与try-catch的结合

朱悟能_9ad4 2022-02-25 阅读 76

python语法糖之装饰器try-catch**

def exception_warpper(func):
    functools.wraps(func)
    def inner(*args, **kwargs):
        func_name = ''
        ret = None
        try:
            func_name = func.__name__
            ret = func(*args, **kwargs)
        except Exception as e:
            logger.exception(e)
            Alarm.msg('exception, {}'.format(func_name))
        return ret
    return inner

alarm报警模块

class Alarm(object):
    @classmethod
    def msg(cls, txt):
        ret = False
        try:
            url = "http://prom.thor.today:8000/api/home/message/custom/send"
            payload = {
                "tousers": ["lijinze1"], 
                "msgtype": "text", 
                "text": {
                    "content": txt
                }
            }
            requests.request("POST", url, json=payload, timeout=10)
            ret = True
        except Exception as e:
            logger.exception(e)
        return ret
    
    @classmethod
    def call(cls, txt):
        ret = False
        try:
            url = "http://prom.thor.today:8000/api/home/message/custom/send"
            payload = {
                "tousers": ["lijinze1"], 
                "msgtype": "voice", 
                "voice": {
                    "message": txt
                }
            }
            requests.request("POST", url, json=payload, timeout=10)
            ret = True
        except Exception as e:
            logger.exception(e)
        return ret

当python多线程遇到异常处理

spider_dir = os.path.dirname(os.path.realpath(__file__))
class Spider(object):
    def _load_class(self, cls_path, cls_pkg, cls_name):
        pyfile_name = os.path.basename(cls_path)
        module_name = os.path.splitext(pyfile_name)[0]
        imp_module = importlib.import_module(
            '.' + module_name,
            package=cls_pkg
        )
        imp_cls = getattr(imp_module, cls_name)
        return imp_cls
    def _crawl(self):
        crawler_dir = os.path.join(spider_dir, 'crawler', 'crawler_*.py')
        with ThreadPoolExecutor(max_workers=self.max_worker) as pool:
            futures = set()
            for cls_path in glob.glob(crawler_dir):
                imp_cls = self._load_class(
                    cls_path,
                    'spider.crawler',
                    'CrawlerImpl'
                )
                if imp_cls is not None:
                    cls_obj = imp_cls()
                    job = pool.submit(exception_warpper(cls_obj.crawl))
                    futures.add(job)
                else:
                    logger.error('spider:{} load fail!', cls_path)
            for job in as_completed(futures):
                result = job.result()
                if result:
                    self.crawler_result_list.append(result)
                    logger.info('crawl:{}, {}', result.name, result.url)
                else:
                    logger.warning('crawl_result is null')
举报

相关推荐

0 条评论