mirror of
https://github.com/fluencelabs/redis
synced 2025-04-03 08:11:03 +00:00
Merge pull request #5315 from soloestoy/optimize-parsing-large-bulk
networking: optimize parsing large bulk greater than 32k
This commit is contained in:
commit
d60c17cbb3
@ -1294,19 +1294,22 @@ int processMultibulkBuffer(client *c) {
|
|||||||
|
|
||||||
c->qb_pos = newline-c->querybuf+2;
|
c->qb_pos = newline-c->querybuf+2;
|
||||||
if (ll >= PROTO_MBULK_BIG_ARG) {
|
if (ll >= PROTO_MBULK_BIG_ARG) {
|
||||||
size_t qblen;
|
|
||||||
|
|
||||||
/* If we are going to read a large object from network
|
/* If we are going to read a large object from network
|
||||||
* try to make it likely that it will start at c->querybuf
|
* try to make it likely that it will start at c->querybuf
|
||||||
* boundary so that we can optimize object creation
|
* boundary so that we can optimize object creation
|
||||||
* avoiding a large copy of data. */
|
* avoiding a large copy of data.
|
||||||
sdsrange(c->querybuf,c->qb_pos,-1);
|
*
|
||||||
c->qb_pos = 0;
|
* But only when the data we have not parsed is less than
|
||||||
qblen = sdslen(c->querybuf);
|
* or equal to ll+2. If the data length is greater than
|
||||||
/* Hint the sds library about the amount of bytes this string is
|
* ll+2, trimming querybuf is just a waste of time, because
|
||||||
* going to contain. */
|
* at this time the querybuf contains not only our bulk. */
|
||||||
if (qblen < (size_t)ll+2)
|
if (sdslen(c->querybuf)-c->qb_pos <= (size_t)ll+2) {
|
||||||
c->querybuf = sdsMakeRoomFor(c->querybuf,ll+2-qblen);
|
sdsrange(c->querybuf,c->qb_pos,-1);
|
||||||
|
c->qb_pos = 0;
|
||||||
|
/* Hint the sds library about the amount of bytes this string is
|
||||||
|
* going to contain. */
|
||||||
|
c->querybuf = sdsMakeRoomFor(c->querybuf,ll+2);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
c->bulklen = ll;
|
c->bulklen = ll;
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user