As I do not trust many benchmarks out there, because they are not actually handling I/O like database connections, just sending "Hello World" to user and recieving enormous throughputs. I am not building any scalable things at the moment but plan to do soon. I need better results and make sure it's realistic as much as possible.
I execute the following query at both scripts: "SELECT * FROM table" which returns 200 queries. The sample results are:
(Both MySQL, NodeJS anda Apache Servers are restarted before each test)
1000 requests, 100 concurrency: Apache: 564.20 req/s NodeJS: 102.61 req/s
1000 requests, 200 concurrency: Apache: 393.75 req/s NodeJS: 105.12 req/s
1000 requests, 400 concurrency: Apache: 42.75 req/s NodeJS: 105.07 req/s
1000 requests, 1000 concurrency Apache: 16.5 req/s NodeJS: 119.53 req/s
However, I do not trust these results, Because the query is always the same which is not likely in the real-time applications. How can I test the both suites myself? Would changing SQL query to an random insertion or random selection would help? Some caching in PHP-MySQL case helps if I do not restart the servers.
The code I used to bencmark: NodeJS:
var mysql = require("mysql");
var os = require('os');
var MySQLPool = require("mysql-pool").MySQLPool;
var client = new MySQLPool({
poolSize: 40,
user: 'root',
password: '123',
database: 'test'
});
var express = require('express'), routes = require('./routes');
var app = module.exports = express.createServer();
app.get('/tables', function(req,res){
client.query(
'SELECT * FROM sample s',
function selectCb(err, results, fields) {
if (err) {
throw err;
}
res.send(results);
}
);
});
var cluster = require("cluster");
if (cluster.isMaster) {
for (var i = 0; i < os.cpus().length * 2; i++) {
var worker = cluster.fork();
}
} else {
app.listen(3000);
}
PHP
$host = "localhost";
$username = "root";
$password = "123";
$db_name = "test";
$db=mysql_connect($host, $username, $password) or die('Could not connect');
mysql_select_db($db_name, $db) or die('no db');
$sth = mysql_query("SELECT * FROM sample s");
$rows = array();
while($r = mysql_fetch_assoc($sth)) {
$rows[] = $r;
}
echo json_encode($rows);
?>
I know there are other approaches like Redis and Memcached to improve the performance, but I am interested in raw MySQL performance.