我知道cURL的性能非常快,但是由于我正在流式传输的文件位于其他服务器上,因此需要4-5分钟才能完成。
有什么办法可以在10-15秒内完成100个请求?我尝试了cURL的多进程,即使使用file_get_contents(),效果仍然相同。
谢谢
<?php
require_once('FileMaker.php');
use \setasign\Fpdi;
use \setasign\Fpdi\PdfParser\StreamReader;
require_once('FPDi/vendor/setasign/fpdf/fpdf.php');
require_once('FPDi/vendor/autoload.php');
require_once('FPDi/src/PdfParser/StreamReader.php');
$pdf = new Fpdi\Fpdi();
$fm = new FileMaker('database sample', 'location sample', 'username sample',
'password sample');
$connected = $fm->getLayout('Accessioning | Desktop | Form');
if (FileMaker::isError($connected)) :
// echo '<script> alert("NOT CONNECTED"); </script>';
endif;
//echo '<script> alert("CONNECTED"); </script>';
$record_gathered = $fm->getRecordByID("Accessioning | Desktop | Form", "18226");
$related_records = $record_gathered->getRelatedSet('Library | Accession Catalog Items');
$result_gathered_information = array();
$result = array();
if (!FileMaker::iserror ($related_records)) {
foreach($related_records as $related_record){
@$counter = $counter + 1;
if ($counter <= 3){
$fetchURL = 'http://sampleURL/SuperContainer/RawData/Library/Catalog/Items/'. $related_record->getField('Library | Accession Catalog Items::recno') . '/?a=1';
$result_gathered_information[] = $fetchURL;
}
}
}
foreach ($result_gathered_information as $detectedinfo){
$ch = curl_init();
curl_setopt($ch, CURLOPT_URL, $detectedinfo);
curl_setopt($ch, CURLOPT_RETURNTRANSFER, 1);
$result[] = curl_exec($ch);
curl_close($ch);
}
foreach ($result as $file) {
$pageCount = $pdf->setSourceFile(StreamReader::createByString($file));
for ($pageNo = 1; $pageNo <= $pageCount; $pageNo++) {
// import a page
$templateId = $pdf->importPage($pageNo);
// get the size of the imported page
$size = $pdf->getTemplateSize($templateId);
// add a page with the same orientation and size
$pdf->AddPage($size['orientation'], $size);
// use the imported page
$pdf->useTemplate($templateId);
$pdf->SetFont('Helvetica');
$pdf->SetXY(5, 5);
}
}
$pdf->Output();