国产探花免费观看_亚洲丰满少妇自慰呻吟_97日韩有码在线_资源在线日韩欧美_一区二区精品毛片,辰东完美世界有声小说,欢乐颂第一季,yy玄幻小说排行榜完本

首頁 > 編程 > Python > 正文

python實現多線程采集的2個代碼例子

2019-11-25 18:20:29
字體:
來源:轉載
供稿:網友

代碼一:

#!/usr/bin/python# -*- coding: utf-8 -*-#encoding=utf-8 import threadingimport Queueimport sysimport urllib2import reimport MySQLdb ## 數據庫變量設置#DB_HOST = '127.0.0.1'DB_USER = "XXXX"DB_PASSWD = "XXXXXXXX"DB_NAME = "xxxx" ## 變量設置#THREAD_LIMIT = 3jobs = Queue.Queue(5)singlelock = threading.Lock()info = Queue.Queue() def workerbee(inputlist):    for x in xrange(THREAD_LIMIT):        print 'Thead {0} started.'.format(x)        t = spider()        t.start()    for i in inputlist:        try:            jobs.put(i, block=True, timeout=5)        except:            singlelock.acquire()            print "The queue is full !"            singlelock.release()     # Wait for the threads to finish    singlelock.acquire()        # Acquire the lock so we can print    print "Waiting for threads to finish."    singlelock.release()        # Release the lock    jobs.join()              # This command waits for all threads to finish.    # while not jobs.empty():    #   print jobs.get() def getTitle(url,time=10):    response = urllib2.urlopen(url,timeout=time)    html = response.read()    response.close()    reg = r'<title>(.*?)</title>'    title = re.compile(reg).findall(html)    # title = title[0].decode('gb2312','replace').encode('utf-8')    title = title[0]    return title class spider(threading.Thread):    def run(self):        while 1:            try:                job = jobs.get(True,1)                singlelock.acquire()                title = getTitle(job[1])                info.put([job[0],title], block=True, timeout=5)                # print 'This {0} is {1}'.format(job[1],title)                singlelock.release()                jobs.task_done()            except:                break; if __name__ == '__main__':    con = None    urls = []    try:        con = MySQLdb.connect(DB_HOST,DB_USER,DB_PASSWD,DB_NAME)        cur = con.cursor()        cur.execute('SELECT id,url FROM `table_name` WHERE `status`=0 LIMIT 10')        rows = cur.fetchall()        for row in rows:            # print row            urls.append([row[0],row[1]])        workerbee(urls)        while not info.empty():            print info.get()    finally:        if con:            con.close()

代碼二:

#!/usr/bin/python# -*- coding: utf-8 -*-#encoding=utf-8#Filename:robot.py import threading,Queue,sys,urllib2,re## 變量設置#THREAD_LIMIT = 3        #設置線程數jobs = Queue.Queue(5)      #設置隊列長度singlelock = threading.Lock()    #設置一個線程鎖,避免重復調用 urls = ['http://games.sina.com.cn/w/n/2013-04-28/1634703505.shtml','http://games.sina.com.cn/w/n/2013-04-28/1246703487.shtml','http://games.sina.com.cn/w/n/2013-04-28/1028703471.shtml','http://games.sina.com.cn/w/n/2013-04-27/1015703426.shtml','http://games.sina.com.cn/w/n/2013-04-26/1554703373.shtml','http://games.sina.com.cn/w/n/2013-04-26/1512703346.shtml','http://games.sina.com.cn/w/n/2013-04-26/1453703334.shtml','http://games.sina.com.cn/w/n/2013-04-26/1451703333.shtml','http://games.sina.com.cn/w/n/2013-04-26/1445703329.shtml','http://games.sina.com.cn/w/n/2013-04-26/1434703322.shtml','http://games.sina.com.cn/w/n/2013-04-26/1433703321.shtml','http://games.sina.com.cn/w/n/2013-04-26/1433703320.shtml','http://games.sina.com.cn/w/n/2013-04-26/1429703318.shtml','http://games.sina.com.cn/w/n/2013-04-26/1429703317.shtml','http://games.sina.com.cn/w/n/2013-04-26/1409703297.shtml','http://games.sina.com.cn/w/n/2013-04-26/1406703296.shtml','http://games.sina.com.cn/w/n/2013-04-26/1402703292.shtml','http://games.sina.com.cn/w/n/2013-04-26/1353703286.shtml','http://games.sina.com.cn/w/n/2013-04-26/1348703284.shtml','http://games.sina.com.cn/w/n/2013-04-26/1327703275.shtml','http://games.sina.com.cn/w/n/2013-04-26/1239703265.shtml','http://games.sina.com.cn/w/n/2013-04-26/1238703264.shtml','http://games.sina.com.cn/w/n/2013-04-26/1231703262.shtml','http://games.sina.com.cn/w/n/2013-04-26/1229703261.shtml','http://games.sina.com.cn/w/n/2013-04-26/1228703260.shtml','http://games.sina.com.cn/w/n/2013-04-26/1223703259.shtml','http://games.sina.com.cn/w/n/2013-04-26/1218703258.shtml','http://games.sina.com.cn/w/n/2013-04-26/1202703254.shtml','http://games.sina.com.cn/w/n/2013-04-26/1159703251.shtml','http://games.sina.com.cn/w/n/2013-04-26/1139703233.shtml'] def workerbee(inputlist):  for x in xrange(THREAD_LIMIT):    print 'Thead {0} started.'.format(x)    t = spider()    t.start()  for i in inputlist:    try:      jobs.put(i, block=True, timeout=5)    except:      singlelock.acquire()      print "The queue is full !"      singlelock.release()   # Wait for the threads to finish  singlelock.acquire()    # Acquire the lock so we can print  print "Waiting for threads to finish."  singlelock.release()    # Release the lock  jobs.join()       # This command waits for all threads to finish.  # while not jobs.empty():  #  print jobs.get() def getTitle(url,time=10):  response = urllib2.urlopen(url,timeout=time)  html = response.read()  response.close()  reg = r'<title>(.*?)</title>'  title = re.compile(reg).findall(html)  title = title[0].decode('gb2312','replace').encode('utf-8')  return title class spider(threading.Thread):  def run(self):    while 1:      try:        job = jobs.get(True,1)        singlelock.acquire()        title = getTitle(job)        print 'This {0} is {1}'.format(job,title)        singlelock.release()        jobs.task_done()      except:        break; if __name__ == '__main__':  workerbee(urls)

發表評論 共有條評論
用戶名: 密碼:
驗證碼: 匿名發表
主站蜘蛛池模板: 河南省| 莫力| 浮山县| 衡东县| 秀山| 新邵县| 文安县| 始兴县| 虹口区| 内黄县| 贵德县| 四子王旗| 寿阳县| 安新县| 集安市| 若羌县| 台北县| 喀什市| 龙里县| 三明市| 东光县| 田阳县| 即墨市| 视频| 石狮市| 封开县| 田林县| 来凤县| 石景山区| 溧阳市| 乐亭县| 镇安县| 宝鸡市| 沙坪坝区| 万山特区| 闵行区| 密山市| 江陵县| 武定县| 共和县| 县级市|