国产探花免费观看_亚洲丰满少妇自慰呻吟_97日韩有码在线_资源在线日韩欧美_一区二区精品毛片,辰东完美世界有声小说,欢乐颂第一季,yy玄幻小说排行榜完本

首頁 > 編程 > Python > 正文

python 監聽salt job狀態,并任務數據推送到redis中的方法

2020-02-16 00:38:26
字體:
來源:轉載
供稿:網友

salt分發后,主動將已完成的任務數據推送到redis中,使用redis的生產者模式,進行消息傳送

#coding=utf-8import fnmatch,json,loggingimport salt.configimport salt.utils.eventfrom salt.utils.redis import RedisPoolimport sys,os,datetime,randomimport multiprocessing,threadingfrom joi.utils.gobsAPI import PostWeblogger = logging.getLogger(__name__)opts = salt.config.client_config('/data/salt/saltstack/etc/salt/master')r_conn = RedisPool(opts.get('redis_db')).getConn()lock = threading.Lock()class RedisQueueDaemon(object):	'''	redis 隊列監聽器	'''	def __init__(self,r_conn):		self.r_conn = r_conn #redis 連接實例		self.task_queue = 'task:prod:queue' #任務消息隊列	def listen_task(self):		'''		監聽主函數		'''		while True:				queue_item = self.r_conn.blpop(self.task_queue,0)[1]				print "queue get",queue_item				#self.run_task(queue_item)				t = threading.Thread(target=self.run_task,args=(queue_item,))				t.start()	def run_task(self,info):		'''		執行操作函數		'''		lock.acquire()		info = json.loads(info)		if info['type'] == 'pushTaskData':			task_data = self.getTaskData(info['jid'])			task_data = json.loads(task_data) if task_data else []			logger.info('獲取緩存數據:%s' % task_data)			if task_data:				if self.sendTaskData2bs(task_data):					task_data = []			self.setTaskData(info['jid'], task_data)		elif info['type'] == 'setTaskState':			self.setTaskState(info['jid'],info['state'],info['message'])		elif info['type'] == 'setTaskData':			self.setTaskData(info['jid'], info['data'])		lock.release()	def getTaskData(self,jid):		return self.r_conn.hget('task:'+jid,'data')	def setTaskData(self,jid,data):		self.r_conn.hset('task:'+jid,'data',json.dumps(data))	def sendTaskData2bs(self,task_data):		logger.info('發送任務數據到后端...')		logger.info(task_data)		if task_data:			p = PostWeb('/jgapi/verify',task_data,'pushFlowTaskData')			result = p.postRes()			print result			if result['code']:				logger.info('發送成功!')				return True			else:				logger.error('發送失敗!')				return False		else:			return True	def setTaskState(self,jid,state,message=''):		logger.info('到后端設置任務【%s】狀態' % str(jid))		p = PostWeb('/jgapi/verify',{'code':jid,'state':'success','message':message},'setTaskState')		result = p.postRes()		if result['code']:			logger.info('設置任務【%s】狀態成功!' % str(jid))			return True,result		else:			logger.error('設置任務【%s】狀態失敗!' % str(jid))			return result		def salt_job_listener():	'''	salt job 監聽器	'''	sevent = salt.utils.event.get_event(			'master',			sock_dir=opts['sock_dir'],			transport=opts['transport'],			opts=opts)		while True:		ret = sevent.get_event(full=True)		if ret is None:			continue		if fnmatch.fnmatch(ret['tag'], 'salt/job/*/ret/*'):			task_key = 'task:'+ret['data']['jid']			task_state = r_conn.hget(task_key,'state')			task_data = r_conn.hget(task_key,'data')			if task_state:				jid_data = {					'code':ret['data']['jid'],					'project_id':settings.SALT_MASTER_OPTS['project_id'],					'serverip':ret['data']['id'],					'returns':ret['data']['return'],					'name':ret['data']['id'],					'state':'success' if ret['data']['success'] else 'failed',				}				task_data = json.loads(task_data) if task_data else []				task_data.append(jid_data)				logger.info("新增數據:%s" % json.dumps(task_data))				r_conn.lpush('task:prod:queue',json.dumps({'type':'setTaskData','jid':ret['data']['jid'],'data':task_data}))				#r_conn.hset(task_key,'data',json.dumps(task_data))										if task_state == 'running':					if len(task_data)>=1:						logger.info('新增消息到隊列:pushTaskData')						r_conn.lpush('task:prod:queue',json.dumps({'jid':ret['data']['jid'],'type':'pushTaskData'}))				else:					logger.info('任務{0}完成,發送剩下的數據到后端...'.format(task_key))					logger.info('新增消息到隊列:pushTaskData')					r_conn.lpush('task:prod:queue',json.dumps({'jid':ret['data']['jid'],'type':'pushTaskData'}))								print datetime.datetime.now() def run():	print 'start redis product queue listerner...'	logger.info('start redis product queue listerner...')	multiprocessing.Process(target=RedisQueueDaemon(r_conn).listen_task,args=()).start()	print 'start salt job listerner...'	logger.info('start salt job listerner...')	multiprocessing.Process(target=salt_job_listener,args=()).start() 	'''	p=multiprocessing.Pool(2)	print 'start redis product queue listerner...'	p.apply_async(redis_queue_listenr,())	print 'start salt job listerner...'	p.apply_async(salt_job_listener,())	p.close()	p.join()	'''            
發表評論 共有條評論
用戶名: 密碼:
驗證碼: 匿名發表
主站蜘蛛池模板: 营口市| 张北县| 屯昌县| 福安市| 额济纳旗| 武夷山市| 桦甸市| 祁连县| 石楼县| 安远县| 溆浦县| 延川县| 南漳县| 乐都县| 咸宁市| 桂东县| 松桃| 阳朔县| 枣庄市| 肥乡县| 临澧县| 河曲县| 彭山县| 卢龙县| 扶沟县| 丹阳市| 荔浦县| 乐陵市| 康乐县| 皋兰县| 刚察县| 株洲县| 镇江市| 丹凤县| 封丘县| 临西县| 桐城市| 陆丰市| 周口市| 涟水县| 宁波市|