mirror of
https://github.com/fluencelabs/redis
synced 2025-03-18 16:40:50 +00:00
RDB hashes loading, fixed another bug in the loading of HT-encoded hashes: when the hash entry is too big for ziplist, add the field, then convert. The code used to break before the new entry was inserted, resulting into missing fields in the loaded Hash object.
This commit is contained in:
parent
c3c856228d
commit
c3e7441dad
@ -856,6 +856,10 @@ robj *rdbLoadObject(int rdbtype, rio *rdb) {
|
||||
if (value == NULL) return NULL;
|
||||
redisAssert(field->encoding == REDIS_ENCODING_RAW);
|
||||
|
||||
/* Add pair to ziplist */
|
||||
o->ptr = ziplistPush(o->ptr, field->ptr, sdslen(field->ptr), ZIPLIST_TAIL);
|
||||
o->ptr = ziplistPush(o->ptr, value->ptr, sdslen(value->ptr), ZIPLIST_TAIL);
|
||||
|
||||
/* Convert to hash table if size threshold is exceeded */
|
||||
if (sdslen(field->ptr) > server.hash_max_ziplist_value ||
|
||||
sdslen(value->ptr) > server.hash_max_ziplist_value)
|
||||
@ -863,10 +867,6 @@ robj *rdbLoadObject(int rdbtype, rio *rdb) {
|
||||
hashTypeConvert(o, REDIS_ENCODING_HT);
|
||||
break;
|
||||
}
|
||||
|
||||
/* Add pair to ziplist */
|
||||
o->ptr = ziplistPush(o->ptr, field->ptr, sdslen(field->ptr), ZIPLIST_TAIL);
|
||||
o->ptr = ziplistPush(o->ptr, value->ptr, sdslen(value->ptr), ZIPLIST_TAIL);
|
||||
}
|
||||
|
||||
/* Load remaining fields and values into the hash table */
|
||||
|
Loading…
x
Reference in New Issue
Block a user