当前位置:首页 >> 脚本专栏

Python多线程下载文件的方法

本文实例讲述了Python多线程下载文件的方法。分享给大家供大家参考。具体实现方法如下:

import httplib
import urllib2
import time
from threading import Thread
from Queue import Queue
from time import sleep
proxy = 'your proxy';
opener = urllib2.build_opener( urllib2.ProxyHandler({'http':proxy}) )
urllib2.install_opener( opener )
ids = {};
for i in range(1,110):
 try:
  listUrl = "http://www.someweb.net/sort/list_8_%d.shtml" % (i);
  print listUrl;
  page = urllib2.urlopen(listUrl).read();
  speUrl = "http://www.someweb.net/soft/";
  speUrlLen = len(speUrl);
  idx = page.find(speUrl,0);
  while idx!=-1:
   dotIdx = page.find(".",idx + speUrlLen);
   if dotIdx != -1:
    id = page[idx + speUrlLen:dotIdx];
    ids[id] = 1;
   idx = page.find("http://www.someweb.net/soft/",idx + speUrlLen);
 except:
  pass;
q = Queue()
NUM = 5
failedId = [];
def do_somthing_using(id):
 try:
  url = "http://www.someweb.net/download.php" % (id);
  h2 = httplib.HTTPConnection("your proxy", "you port");
  h2.request("HEAD", url);
  resp = h2.getresponse();
  header = resp.getheaders();
  location = header[3][1];  
  sContent = urllib2.urlopen(location).read();
  savePath = "C:\\someweb\\%s.rar" % (id);
  file=open(savePath,'wb');
  file.write(sContent);
  file.close(); 
  print savePath + " saved";
 except:
  pass;
def working():
 while True:
  arguments = q.get()
  do_somthing_using(arguments)
  sleep(1)
  q.task_done()
for i in range(NUM):
 t = Thread(target=working)
 t.setDaemon(True)
 t.start()
for id in ids:
 q.put(id)
q.join()

希望本文所述对大家的Python程序设计有所帮助。