Copy docker run --rm zchencow/innozverse-php:latest php -r '
<?php
// ββ Generate large CSV (10,000 rows) in streaming fashion βββββββββββββββββββββ
echo "=== Streaming CSV (10,000 rows) ===" . PHP_EOL;
$csvPath = "/tmp/large_orders_" . getmypid() . ".csv";
$gzipPath = $csvPath . ".gz";
$regions = ["North", "South", "East", "West"];
$products = ["Surface Pro", "Surface Pen", "Office 365", "USB-C Hub", "Surface Book"];
$prices = [864.00, 49.99, 99.99, 29.99, 1299.00];
$statuses = ["pending", "confirmed", "shipped", "delivered"];
// Stream-write: never holds entire dataset in memory
$startMem = memory_get_usage();
$fh = fopen($csvPath, "w");
fputcsv($fh, ["id", "product", "qty", "unit_price", "total", "region", "status", "created_at"]);
$rng = new Random\Xoshiro256StarStar(42);
for ($i = 1; $i <= 10_000; $i++) {
$pidx = $rng->nextInt() % 5;
$qty = ($rng->nextInt() % 10) + 1;
$total = round($prices[$pidx] * $qty, 2);
fputcsv($fh, [
$i,
$products[$pidx],
$qty,
$prices[$pidx],
$total,
$regions[$rng->nextInt() % 4],
$statuses[$rng->nextInt() % 4],
date("Y-m-d", mktime(0,0,0,1,1,2026) + $i * 86400 % (365*86400)),
]);
}
fclose($fh);
$peakMem = memory_get_peak_usage() - $startMem;
$fileSize = filesize($csvPath);
echo "File size: " . number_format($fileSize) . " bytes" . PHP_EOL;
echo "Peak memory: " . number_format($peakMem) . " bytes" . PHP_EOL;
// GZIP compression via stream wrapper
$in = fopen($csvPath, "r");
$out = fopen("compress.zlib://{$gzipPath}", "w");
stream_copy_to_stream($in, $out);
fclose($in); fclose($out);
$gzipSize = filesize($gzipPath);
printf("Compressed: %s bytes (%.0f%% of original)%s",
number_format($gzipSize), $gzipSize * 100 / $fileSize, PHP_EOL);
// Stream-read and aggregate without loading all into memory
$fh = fopen($csvPath, "r");
fgetcsv($fh); // skip header
$revenue = 0.0;
$byProd = [];
$byRegion = [];
$rowCount = 0;
while (($row = fgetcsv($fh)) !== false) {
[,$product, $qty,, $total, $region] = $row;
$revenue += (float)$total;
$byProd[$product] = ($byProd[$product] ?? 0.0) + (float)$total;
$byRegion[$region] = ($byRegion[$region] ?? 0.0) + (float)$total;
$rowCount++;
}
fclose($fh);
echo PHP_EOL . "Streamed " . number_format($rowCount) . " rows" . PHP_EOL;
printf("Total revenue: \$%s%s", number_format($revenue, 2), PHP_EOL);
echo PHP_EOL . "By product:" . PHP_EOL;
arsort($byProd);
foreach ($byProd as $name => $rev) printf(" %-15s \$%s%s", $name, number_format($rev, 2), PHP_EOL);
echo PHP_EOL . "By region:" . PHP_EOL;
arsort($byRegion);
foreach ($byRegion as $name => $rev) printf(" %-8s \$%s%s", $name, number_format($rev, 2), PHP_EOL);
// ββ File locking (flock) ββββββββββββββββββββββββββββββββββββββββββββββββββββββ
echo PHP_EOL . "=== File Locking ===" . PHP_EOL;
$lockFile = "/tmp/counter_" . getmypid() . ".txt";
file_put_contents($lockFile, "0");
// Simulate concurrent writes with fork β each increments counter 100 times
$pids = [];
for ($i = 0; $i < 3; $i++) {
$pid = pcntl_fork();
if ($pid === 0) { // child
for ($j = 0; $j < 100; $j++) {
$fh = fopen($lockFile, "r+");
flock($fh, LOCK_EX); // exclusive write lock
$val = (int)fread($fh, 64);
ftruncate($fh, 0); rewind($fh);
fwrite($fh, $val + 1);
flock($fh, LOCK_UN); // release lock
fclose($fh);
}
exit(0);
}
$pids[] = $pid;
}
foreach ($pids as $pid) pcntl_waitpid($pid, $status);
$final = (int)file_get_contents($lockFile);
unlink($lockFile); unlink($csvPath); unlink($gzipPath);
echo "3 processes Γ 100 increments = " . $final . " (expected 300)" . PHP_EOL;
echo "File locking: " . ($final === 300 ? "β race-condition-free" : "β data corruption!") . PHP_EOL;
'