实验过程: 使用各自技术栈提供lib(http/mysql)编写三个http接口

  • /fast, 直接返回response
  • /slow, 使用select sleep(3)查询mysql, 查询结果返回后, 返回response
  • /inf-loop, 一个死循环

其中, mysql使用相同配置的连接池(max-connections=10).实验过程如下:

  1. 依次执行ab -n 10 -c 10 /slow, ab -n 10 -c 10 /fast
  2. 依次执行curl /inf-loop, curl /fast

JAVA

经典的SpringBoot应用,tomcat线程池配置为10(匹配连接池大小)

<select id="sleep" resultType="int">
    SELECT SLEEP(30)
</select>
@Repository
public interface SleepDao {
    int sleep();
}

@RestController
@RequestMapping
public class JavaIOController {

    @Autowired
    private SleepDao sleepDao;

    @GetMapping("/fast")
    public ApiResult fast(){
        System.out.println(Thread.currentThread() + "\tfast");
        return ApiResult.success("fast");
    }

    @GetMapping("/slow")
    public ApiResult slow(){
        System.out.println(Thread.currentThread() + "\tslow");
        sleepDao.sleep();
        return ApiResult.success("slow");
    }

    @GetMapping("/inf-loop")
    public ApiResult infLoop(){
        while(true);
    }

}

实验1结果:

Percentage of the requests served within a certain time (ms)
  50%   3019
  66%   3019
  75%   3020
  80%   3020
  90%   3020
  95%   3020
  98%   3020
  99%   3020
 100%   3020 (longest request)

 Percentage of the requests served within a certain time (ms)
  50%   2007
  66%   2008
  75%   2008
  80%   2008
  90%   2008
  95%   2008
  98%   2008
  99%   2008
 100%   2008 (longest request)

实验2结果:

结论

IO模型: 基于内核线程的同步阻塞模型

Node

const http = require('http');
const util = require('util');
const mysql = require('mysql');
const HttpDispatcher = require('httpdispatcher');
const dispatcher = new HttpDispatcher();


var pool = mysql.createPool({
    connectionLimit: 10
});

const query = util.promisify(pool.query).bind(pool)

function handleRequest(request, response) {
    try {
        dispatcher.dispatch(request, response);
    } catch (err) {
        console.log(err);
    }
}

const server = http.createServer(handleRequest);

dispatcher.onGet("/fast", function (req, res) {
    res.writeHead(200, { 'Content-Type': 'text/html' });
    res.end('fast');
});

dispatcher.onGet("/slow", async function (req, res) {
    res.writeHead(200, { 'Content-Type': 'text/plain' });
    pool.query('SELECT 1 + 1 AS solution', function (error, results, fields) {
        if (error) throw error;
        console.log('The solution is: ', results[0].solution);
    });
    const data = await query("SELECT SLEEP(3), 'slow query in mysql' as result")
    res.end(data[0].result);
});

dispatcher.onGet("/inf-loop", async function (req, res) {
    while (true);
});

server.listen(8080);

实验1结果:

Percentage of the requests served within a certain time (ms)
  50%   3026
  66%   3027
  75%   3029
  80%   3030
  90%   3039
  95%   3039
  98%   3039
  99%   3039
 100%   3039 (longest request)

Percentage of the requests served within a certain time (ms)
  50%      5
  66%      5
  75%      5
  80%      5
  90%      5
  95%      5
  98%      5
  99%      5
 100%      5 (longest request)

实验2结果:

curl /inf-loop => hang
curl /fast => hang

结论

IO模型: 非阻塞IO,单线程event loop

Golang

package main

import (
	"database/sql"
	"fmt"
	"net/http"
	"time"

	_ "github.com/go-sql-driver/mysql"
)

var db = newDB()

func newDB() *sql.DB {
	db, err := sql.Open("mysql", "")
	if err != nil {
		panic(err)
	}
	db.SetConnMaxLifetime(time.Minute * 3)
	db.SetMaxOpenConns(10)
	db.SetMaxIdleConns(10)
	return db
}

func fast(w http.ResponseWriter, req *http.Request) {
	fmt.Fprintf(w, "fast\n")
}

func slow(w http.ResponseWriter, req *http.Request) {
	rows, err := db.Query("SELECT SLEEP(3), 'slow query in mysql' as result")
	columns, err := rows.Columns()
	if err != nil {
		panic(err.Error())
	}

	values := make([]sql.RawBytes, len(columns))
	scanArgs := make([]interface{}, len(values))
	for i := range values {
		scanArgs[i] = &values[i]
	}

	for rows.Next() {
		err = rows.Scan(scanArgs...)
		if err != nil {
			panic(err.Error())
		}
		fmt.Fprintf(w, string(values[1])+"\n")
	}
	if err != nil {
		panic(err.Error())
	}
}

func infLoop(w http.ResponseWriter, req *http.Request) {
	for {
	}
}

func main() {
	http.HandleFunc("/fast", fast)
	http.HandleFunc("/slow", slow)
	http.HandleFunc("/inf-loop", infLoop)

	http.ListenAndServe(":8090", nil)
}

实验1结果:

Percentage of the requests served within a certain time (ms)
  50%   3005
  66%   3005
  75%   3005
  80%   3005
  90%   3005
  95%   3005
  98%   3005
  99%   3005
 100%   3005 (longest request)

Percentage of the requests served within a certain time (ms)
  50%      0
  66%      0
  75%      0
  80%      0
  90%      1
  95%      1
  98%      1
  99%      1
 100%      1 (longest request)

实验2结果:

结论

IO模型: 基于用户态线程(goroutine)的thread-based非阻塞模型