简体   繁体   中英

libcurl asynchronous requests and connection reuse

I want to perform 20 concurrent requests on a server example code is here.

#include <stdio.h>
#include <pthread.h>
#include <curl/curl.h>

#define URL "http://localhost/test"

/* number of threads to fire up in parallel */
#define NUM_THREADS 20

/* how many times each URL is transferred per thread */
#define URL_ITERATIONS 1000000

#define NUM_LOCKS CURL_LOCK_DATA_LAST
static pthread_mutex_t lockarray[NUM_LOCKS];

static size_t write_db(void *ptr, size_t size, size_t nmemb, void *data)
{
 /* not interested in the downloaded bytes, return the size */
 (void)ptr;  /* unused */
 (void)data; /* unused */
 return (size_t)(size * nmemb);
}

static void lock_cb(CURL *handle, curl_lock_data data,
                   curl_lock_access access, void *userptr)
{
 (void)access;
 (void)userptr;
 (void)handle;
 pthread_mutex_lock(&lockarray[data]);
}

static void unlock_cb(CURL *handle, curl_lock_data data,
                     void *userptr)
{
 (void)userptr;
 (void)handle;
 pthread_mutex_unlock(&lockarray[data]);
}

static void init_locks(void)
{
 int i;

 for(i = 0; i< NUM_LOCKS; i++)
   pthread_mutex_init(&lockarray[i], NULL);
}

static void kill_locks(void)
{
 int i;

 for(i = 0; i < NUM_LOCKS; i++)
   pthread_mutex_destroy(&(lockarray[i]));
}

struct initurl {
 const char *url;
 CURLSH *share;
 int threadno;
};

static void *run_thread(void *ptr)
{
 struct initurl *u = (struct initurl *)ptr;
 int i;

 for(i = 0; i < URL_ITERATIONS; i++) {
   CURL *curl = curl_easy_init();
   curl_easy_setopt(curl, CURLOPT_URL, u->url);
   curl_easy_setopt(curl, CURLOPT_SHARE, u->share);
   curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, write_db);
   curl_easy_perform(curl); /* ignores error */
   curl_easy_cleanup(curl);
   fprintf(stderr, "Tread %d completed one\n", u->threadno);
 }

 return NULL;
}

int main(void)
{
 pthread_t tid[NUM_THREADS];
 int i;
 int error;
 CURLSH *share;
 struct initurl url[NUM_THREADS];

 /* Must initialize libcurl before any threads are started */
 curl_global_init(CURL_GLOBAL_ALL);

 share = curl_share_init();
 curl_share_setopt(share, CURLSHOPT_LOCKFUNC, lock_cb);
 curl_share_setopt(share, CURLSHOPT_UNLOCKFUNC, unlock_cb);
 curl_share_setopt(share, CURLSHOPT_SHARE, CURL_LOCK_DATA_CONNECT);

 init_locks();

 for(i = 0; i< NUM_THREADS; i++) {
   url[i].url = URL;
   url[i].share = share;
   url[i].threadno = i;
   error = pthread_create(&tid[i], NULL, run_thread, &url[i]);
   if(0 != error)
     fprintf(stderr, "Couldn't run thread number %d, errno %d\n", i, error);
   else
     fprintf(stderr, "Thread %d, gets %s\n", i, URL);
 }

 /* now wait for all threads to terminate */
 for(i = 0; i< NUM_THREADS; i++) {
   error = pthread_join(tid[i], NULL);
   fprintf(stderr, "Thread %d terminated\n", i);
 }

 kill_locks();

 curl_share_cleanup(share);
 curl_global_cleanup();
 return 0;
}

Problem: curl_easy_perform blocks threads and requests/sec are not performed as expected in required time. Do curl_easy_perform block all threads or a single thread? In my case I am not sure but I think it blocks all threads. How I can make fix number of requests to server ie 20 requests/sec by using curl library?

You'll probably find that using the multi interface is much more convenient for doing multiple parallel transfers. That also makes the connection cache handled for you in a central place.

Secondly: the documentation for the CURL_LOCK_DATA_CONNECT option has this very important note that you seem to have ignored:

due to a known bug, it is not safe to share connections this way between multiple concurrent threads.

The technical post webpages of this site follow the CC BY-SA 4.0 protocol. If you need to reprint, please indicate the site URL or the original address.Any question please contact:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM