繁体   English   中英

python:使用线程,应用线程超时

[英]python: using threading , apply thread timeout

我在多线程脚本中使用线程库。 我想在线程上实现超时。 因此,如果线程在指定的时间后仍未返回task_done ,则应从函数中退出并返回task_done

这是我的代码:

def create_workers():
    for _ in range(NUMBER_OF_THREADS):
        t = threading.Thread(target=work)
        t.daemon = True
        t.start()


def create_jobs():
    for d in Date_set :
        queue.put(d)
    queue.join()
    scrape()


def scrape_page(thread_name, page_url):
    print(thread_name + ' now working on ' + page_url)
    get_active_urls_perDay(session=s,Date=page_url,County=Cty, courtSystem=CS, PT=P)


def work():
    while True:
        url = queue.get()
        scrape_page(threading.current_thread().name, url)
        Date_set.remove(url)
        print str(len(Date_set)) + " days more to go!"
        print "Number of threads active", threading.activeCount()
        queue.task_done()


def scrape():
    queued_links = Date_set
    if len(queued_links) > 0:
        print(str(len(queued_links)) + ' days in the queue')
        create_jobs()

work功能中,我想在线程上实现超时。 否则,代码可以正常运行,但是尚未返回task_done的线程将暂停代码,并一直等待它们返回。

def create_jobs():
    for d in Date_set :
        queue.put(d)
    scrape()

def create_workers():
    thread_list=[]
    for _ in range(NUMBER_OF_THREADS):
        t = threading.Thread(target=work)
        thread_list.append(t)
        t.daemon = True
        t.start()
    return thread_list

def join_all(thread_list):
    [t.join(5) for t in thread_list]



def scrape_page(thread_name, page_url):
    print(thread_name + ' now working on ' + page_url)
    get_active_urls_perDay(session=s,Date=page_url,County=Cty, courtSystem=CS, PT=P)


def work():
    while True:
        url = queue.get()
        try:
            scrape_page(threading.current_thread().name, url)
            Date_set.remove(url)
            print str(len(Date_set)) + " days more to go!"
            print "Number of threads active", threading.activeCount()
        finally:
            queue.task_done()

def scrape():
    queued_links = Date_set
    if len(queued_links) > 0:
        print(str(len(queued_links)) + ' days in the queue')
        create_jobs()


s=session
Cty= County
CS= courtSystem
P= PT
Date_set = create_dates_set(start_filingDate, end_filingDate)
t_list= create_workers()
join_all(t_list)
scrape()
return case_urls
import threading
import Queue
import time

lock = threading.Lock()

Date_set = ['127.0.0.1/test1', '127.0.0.1/test3', '127.0.0.1/test3', '127.0.0.1/test4']
queue = Queue.Queue()
NUMBER_OF_THREADS = 3


def create_jobs():
    for d in Date_set:
        queue.put(d)
    # scrape()

thread_list = []

def create_workers():
    for _ in range(NUMBER_OF_THREADS):
        t = threading.Thread(target=work)
        thread_list.append(t)
        t.daemon = True
        t.start()


def join_all():
    [t.join(5) for t in thread_list]


def scrape_page(thread_name, page_url):
    time.sleep(1)
    lock.acquire()
    print(thread_name + ' now working on ' + page_url)
    print page_url + ' done'
    lock.release()
    # get_active_urls_perDay(session=s,Date=page_url,County=Cty, courtSystem=CS, PT=P)


def work():
    while True:
        if queue.empty() is True:
            break
        url = queue.get()
        try:
            scrape_page(threading.current_thread().name, url)
            # Date_set.remove(url)
            lock.acquire()
            print str(len(Date_set)) + " days more to go!"
            print "Number of threads active", threading.activeCount()
            lock.release()
        finally:
            queue.task_done()


def scrape():
    queued_links = Date_set
    if len(queued_links) > 0:
        print(str(len(queued_links)) + ' days in the queue')
        create_jobs()


# s=session
# Cty= County
# CS= courtSystem
# P= PT
# Date_set = create_dates_set(start_filingDate, end_filingDate)
create_jobs()
create_workers()
join_all()
print 'main thread quit and all worker thread quit even if it is not finished'
# scrape()
# return case_urls

这个例子可以工作,我使用sleep(200)模拟get_active_urls_perDay ,在15s之后,脚本将停止。 如果将sleep(200)替换为sleep(1),则所有线程将完成并且主线程退出。

暂无
暂无

声明:本站的技术帖子网页,遵循CC BY-SA 4.0协议,如果您需要转载,请注明本站网址或者原文地址。任何问题请咨询:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM