php IHDR w Q )Ba pHYs sRGB gAMA a IDATxMk\U s&uo,mD )Xw+e?tw.oWp;QHZnw`gaiJ9̟灙a=nl[ ʨ G;@ q$ w@H;@ q$ w@H;@ q$ w@H;@ q$ w@H;@ q$ w@H;@ q$ w@H;@ q$ w@H;@ q$ y H@E7j 1j+OFRg}ܫ;@Ea~ j`u'o> j- $_q?qS XzG'ay

| files >> /var/www/html/sub/images/sym/root/proc/self/root/opt/php-7.0.1/Zend/tests/ |
| files >> /var/www/html/sub/images/sym/root/proc/self/root/opt/php-7.0.1/Zend/tests/concat_003.phpt |
--TEST--
Concatenating many small strings should not slowdown allocations
--SKIPIF--
<?php if (PHP_DEBUG) { die ("skip debug version is slow"); } ?>
--INI--
memory_limit=256m
--FILE--
<?php
/* To note is that memory usage can vary depending on whether opcache is on. The actual
measuring that matters is timing here. */
$time = microtime(TRUE);
/* This might vary on Linux/Windows, so the worst case and also count in slow machines. */
$t0_max = 0.1;
$t1_max = 0.4;
$datas = [];
for ($i = 0; $i < 220000; $i++)
{
$datas[] = [
'000.000.000.000',
'000.255.255.255',
'保留地址',
'保留地址',
'保留地址',
'保留地址',
'保留地址',
'保留地址',
];
}
$t0 = microtime(TRUE) - $time;
var_dump($t0 < $t0_max);
$texts = '';
foreach ($datas AS $data)
{
$texts .= implode("\t", $data) . "\r\n";
}
$t1 = microtime(TRUE) - $time;
var_dump($t1 < $t1_max);
?>
+++DONE+++
--EXPECT--
bool(true)
bool(true)
+++DONE+++
y~or5J={Eeu磝Qk ᯘG{?+]ן?wM3X^歌>{7پK>on\jy Rg/=fOroNVv~Y+ NGuÝHWyw[eQʨSb> >}Gmx[o[<{Ϯ_qFvM IENDB`