mirror of
https://github.com/fluencelabs/redis
synced 2025-03-18 08:30:51 +00:00
Don't over-allocate the sds string for large bulk requests.
The call to sdsMakeRoomFor() did not accounted for the amount of data already present in the query buffer, resulting into over-allocation.
This commit is contained in:
parent
e21803348a
commit
ff9d66c4a9
@ -989,13 +989,13 @@ int processMultibulkBuffer(redisClient *c) {
|
||||
if (ll >= REDIS_MBULK_BIG_ARG) {
|
||||
/* If we are going to read a large object from network
|
||||
* try to make it likely that it will start at c->querybuf
|
||||
* boundary so that we can optimized object creation
|
||||
* boundary so that we can optimize object creation
|
||||
* avoiding a large copy of data. */
|
||||
sdsrange(c->querybuf,pos,-1);
|
||||
pos = 0;
|
||||
/* Hint the sds library about the amount of bytes this string is
|
||||
* going to contain. */
|
||||
c->querybuf = sdsMakeRoomFor(c->querybuf,ll+2);
|
||||
c->querybuf = sdsMakeRoomFor(c->querybuf,ll+2-sdslen(c->querybuf));
|
||||
}
|
||||
c->bulklen = ll;
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user