| | |
| | | import requests |
| | | from scrapy import signals |
| | | |
| | | from aijuke_spider.config.db_config import matrix_session |
| | |
| | | spider.logger.error(f"Error while logging task data: {e}") |
| | | self.session.rollback() |
| | | finally: |
| | | self.session.close() |
| | | self.session.close() |
| | | |
| | | class TaskCallBackExtension: |
| | | def __init__(self, settings): |
| | | self.settings = settings |
| | | self.base_url='http://matrix.uj345.cc' |
| | | |
| | | @classmethod |
| | | def from_crawler(cls, crawler): |
| | | # 实例化扩展 |
| | | extension = cls(crawler.settings) |
| | | crawler.signals.connect(extension.spider_closed, signal=signals.spider_closed) |
| | | return extension |
| | | def spider_closed(self, spider): |
| | | params={ |
| | | 'uuid': self.settings.get('UUID'), |
| | | 'sec_id': self.settings.get('SEC_ID') |
| | | } |
| | | headers={ |
| | | 'token':'jwjk2024@!', |
| | | 'tenant_id':self.settings.get('TENANT_ID'), |
| | | } |
| | | response=requests.get(url=f'{self.base_url}/engine/api/task/callback',params=params,headers=headers) |
| | | if response.status_code == 200: |
| | | spider.logger.info(f"回调成功") |
| | | else: |
| | | spider.logger.info(f"回调失败") |