设置全局PHP-CURL上传&下载速度限制?

时间:2018-05-25 13:47:37

标签: php ubuntu curl php-curl

我在几个单独的PHP脚本中使用CURL来下载/上传文件,有没有办法设置GLOBAL(非per-curl-handle)UL / DL速率限制?

不幸的是,您只能在CURL设置单个会话的速度限制,但这不是动态的。

作为服务器操作系统使用Ubuntu,有没有另外一种方法来限制CURL进程?

由于

1 个答案:

答案 0 :(得分:0)

curl / libcurl没有任何功能可以在curl_easy句柄之间共享带宽限制,更不用说在不同进程之间。我建议使用curl守护进程来强制执行带宽限制。与客户看起来像

class curl_daemon_response{
    public $stdout;
    public $stderr;
}
function curl_daemon(array $curl_options):curl_daemon_response{
    $from_big_uint64_t=function(string $i): int {
        $arr = unpack ( 'Juint64_t', $i );
        return $arr ['uint64_t'];
    };
    $to_big_uint64_t=function(int $i): string {
        return pack ( 'J', $i );
    };
    $conn = stream_socket_client("unix:///var/run/curl_daemon", $errno, $errstr, 3);
    if (!$conn) {
        throw new \RuntimeError("failed to connect to /var/run/curl_daemon! $errstr ($errno)");
    }
    stream_set_blocking($conn,true);
    $curl_options=serialize($curl_options);
    fwrite($conn,$to_big_uint64_t(strlen($curl_options)).$curl_options);
    $stdoutLen=$from_big_uint64_t(fread($conn,8));
    $stdout=fread($conn,$stdoutLen);
    $stderrLen=$from_big_uint64_t(fread($conn,8));
    $stderr=fread($conn,$stderrLen);
    $ret=new curl_daemon_response();
    $ret->stdout=$stdout;
    $ret->stderr=$stderr;
    fclose($conn);
    return $ret;
}

守护进程看起来像

<?php
declare(strict_types=1);
const MAX_DOWNLOAD_SPEED=1000*1024; // 1000 kilobytes
const MINIMUM_DOWNLOAD_SPEED=100; // 100 bytes per second,
class Client{
    public $id;
    public $socket;
    public $curl;
    public $arguments;
    public $stdout;
    public $stderr;
}
$clients=[];
$mh=curl_multi_init();
$srv = stream_socket_server("unix:///var/run/curl_daemon", $errno, $errstr);
if (!$srv) {
  throw new \RuntimeError("failed to create unix socket /var/run/curl_daemon! $errstr ($errno)");
}
stream_set_blocking($srv,false);
while(true){
    getNewClients();
    $cc=count($clients);
    if(!$cc){
        sleep(1); // nothing to do.
        continue;
    }
    curl_multi_exec($mh, $running);
    if($running!==$cc){
        // at least 1 of the curls finished!
        while(false!==($info=curl_multi_info_read($mh))){
            $key=curl_getinfo($info['handle'],CURLINFO_PRIVATE);
            curl_multi_remove_handle($mh,$clients[$key]->curl);
            curl_close($clients[$key]->curl);
            $stdout=file_get_contents(stream_get_meta_data($clients[$key]->stdout)['uri']); // https://bugs.php.net/bug.php?id=76268
            fclose($clients[$key]->stdout);
            $stderr=file_get_contents(stream_get_meta_data($clients[$key]->stderr)['uri']); // https://bugs.php.net/bug.php?id=76268
            fclose($clients[$key]->stderr);
            $sock=$clients[$key]->socket;
            fwrite($sock,to_big_uint64_t(strlen($stdout)).$stdout.to_big_uint64_t(strlen($stderr)).$stderr);
            fclose($sock);
            echo "finished request #{$key}!\n";
            unset($clients[$key],$key,$stdout,$stderr,$sock);
        }
        updateSpeed();
    }
    curl_multi_select($mh);
}

function updateSpeed(){
    global $clients;
    static $old_speed=-1;
    if(empty($clients)){
        return;
    }
    $clientsn=count($clients);
    $per_handle_speed=MAX(MINIMUM_DOWNLOAD_SPEED,(MAX_DOWNLOAD_SPEED/$clientsn));
    if($per_handle_speed===$old_speed){
        return;
    }
    $old_speed=$per_handle_speed;
    echo "new per handle speed: {$per_handle_speed} - clients: {$clientsn}\n";
    foreach($clients as $client){
        /** @var Client $client */
        curl_setopt($client->curl,CURLOPT_MAX_RECV_SPEED_LARGE,$per_hande_speed);
    }
}


function getNewClients(){
    global $clients,$srv,$mh;
    static $counter=-1;
    $newClients=false;
    while(false!==($new=stream_socket_accept($srv,0))){
        ++$counter;
        $newClients=true;
        echo "new client! request #{$counter}\n";
        stream_set_blocking($new,true);
        $tmp=new Client();
        $tmp->id=$counter;
        $tmp->socket=$new;
        $tmp->curl=curl_init();
        $tmp->stdout=tmpfile();
        $tmp->stderr=tmpfile();
        $size=from_big_uint64_t(fread($new,8));
        $arguments=fread($new,$size);
        $arguments=unserialize($arguments);
        assert(is_array($arguments));
        $tmp->arguments=$arguments;
        curl_setopt_array($tmp->curl,$arguments);
        curl_setopt_array($tmp->curl,array(
            CURLOPT_FILE=>$tmp->stdout,
            CURLOPT_STDERR=>$tmp->stderr,
            CURLOPT_VERBOSE=>1,
            CURLOPT_PRIVATE=>$counter
        ));
        curl_multi_add_handle($mh,$tmp->curl);
    }
    if($newClients){
        updateSpeed();
    }
}

function from_big_uint64_t(string $i): int {
    $arr = unpack ( 'Juint64_t', $i );
    return $arr ['uint64_t'];
}
function to_big_uint64_t(int $i): string {
    return pack ( 'J', $i );
}

注意:这是完全未经测试的代码,因为我的开发环境几个小时前就已经死了,我用notepad ++写了所有这些。 (我的开发环境根本没有启动,它是一个虚拟机,不确定wtf是否已发生,但尚未修复它)

另外,对于大型文件传输,代码根本不是优化的,如果你需要以这种方式支持大文件传输(你不希望用ram打包的大小,比如千兆字节+),然后将守护进程修改为返回文件路径而不是通过unix套接字写入所有数据。