如何使用 pyarrow set/get Pandas 数据帧到 Redis
How to set/get Pandas dataframes into Redis using pyarrow
使用
dd = {'ID': ['H576','H577','H578','H600', 'H700'],
'CD': ['AAAAAAA', 'BBBBB', 'CCCCCC','DDDDDD', 'EEEEEEE']}
df = pd.DataFrame(dd)
Pre Pandas 0.25,以下有效。
set: redisConn.set("key", df.to_msgpack(compress='zlib'))
get: pd.read_msgpack(redisConn.get("key"))
现在,有已弃用的警告..
FutureWarning: to_msgpack is deprecated and will be removed in a future version.
It is recommended to use pyarrow for on-the-wire transmission of pandas objects.
The read_msgpack is deprecated and will be removed in a future version.
It is recommended to use pyarrow for on-the-wire transmission of pandas objects.
pyarrow 是如何工作的?而且,我如何将 pyarrow 对象传入和传出 Redis。
参考:
这是一个完整的示例,使用 pyarrow 序列化 pandas 数据帧以存储在 redis
apt-get install python3 python3-pip redis-server
pip3 install pandas pyarrow redis
然后在 python
import pandas as pd
import pyarrow as pa
import redis
df=pd.DataFrame({'A':[1,2,3]})
r = redis.Redis(host='localhost', port=6379, db=0)
context = pa.default_serialization_context()
r.set("key", context.serialize(df).to_buffer().to_pybytes())
context.deserialize(r.get("key"))
A
0 1
1 2
2 3
我刚刚将 PR 28494 提交给 pandas 以将此 pyarrow 示例包含在文档中。
参考文档:
这是我的做法,因为 default_serialization_context 已被弃用,事情也更简单了:
import pyarrow as pa
import redis
pool = redis.ConnectionPool(host='localhost', port=6379, db=0)
r = redis.Redis(connection_pool=pool)
def storeInRedis(alias, df):
df_compressed = pa.serialize(df).to_buffer().to_pybytes()
res = r.set(alias,df_compressed)
if res == True:
print(f'{alias} cached')
def loadFromRedis(alias):
data = r.get(alias)
try:
return pa.deserialize(data)
except:
print("No data")
storeInRedis('locations', locdf)
loadFromRedis('locations')
如果您想压缩 Redis 中的数据,可以使用对 parquet 和 gzip 的内置支持
def openRedisCon():
pool = redis.ConnectionPool(host=REDIS_HOST, port=REDIS_PORT, db=0)
r = redis.Redis(connection_pool=pool)
return r
def storeDFInRedis(alias, df):
"""Store the dataframe object in Redis
"""
buffer = io.BytesIO()
df.to_parquet(buffer, compression='gzip')
buffer.seek(0) # re-set the pointer to the beginning after reading
r = openRedisCon()
res = r.set(alias,buffer.read())
def loadDFFromRedis(alias, useStale: bool = False):
"""Load the named key from Redis into a DataFrame and return the DF object
"""
r = openRedisCon()
try:
buffer = io.BytesIO(r.get(alias))
buffer.seek(0)
df = pd.read_parquet(buffer)
return df
except:
return None
Pickle 和 zlib 可以替代 pyarrow:
import pandas as pd
import redis
import zlib
import pickle
df=pd.DataFrame({'A':[1,2,3]})
r = redis.Redis(host='localhost', port=6379, db=0)
r.set("key", zlib.compress( pickle.dumps(df)))
df=pickle.loads(zlib.decompress(r.get("key")))
使用
dd = {'ID': ['H576','H577','H578','H600', 'H700'],
'CD': ['AAAAAAA', 'BBBBB', 'CCCCCC','DDDDDD', 'EEEEEEE']}
df = pd.DataFrame(dd)
Pre Pandas 0.25,以下有效。
set: redisConn.set("key", df.to_msgpack(compress='zlib'))
get: pd.read_msgpack(redisConn.get("key"))
现在,有已弃用的警告..
FutureWarning: to_msgpack is deprecated and will be removed in a future version.
It is recommended to use pyarrow for on-the-wire transmission of pandas objects.
The read_msgpack is deprecated and will be removed in a future version.
It is recommended to use pyarrow for on-the-wire transmission of pandas objects.
pyarrow 是如何工作的?而且,我如何将 pyarrow 对象传入和传出 Redis。
参考:
这是一个完整的示例,使用 pyarrow 序列化 pandas 数据帧以存储在 redis
apt-get install python3 python3-pip redis-server
pip3 install pandas pyarrow redis
然后在 python
import pandas as pd
import pyarrow as pa
import redis
df=pd.DataFrame({'A':[1,2,3]})
r = redis.Redis(host='localhost', port=6379, db=0)
context = pa.default_serialization_context()
r.set("key", context.serialize(df).to_buffer().to_pybytes())
context.deserialize(r.get("key"))
A
0 1
1 2
2 3
我刚刚将 PR 28494 提交给 pandas 以将此 pyarrow 示例包含在文档中。
参考文档:
这是我的做法,因为 default_serialization_context 已被弃用,事情也更简单了:
import pyarrow as pa
import redis
pool = redis.ConnectionPool(host='localhost', port=6379, db=0)
r = redis.Redis(connection_pool=pool)
def storeInRedis(alias, df):
df_compressed = pa.serialize(df).to_buffer().to_pybytes()
res = r.set(alias,df_compressed)
if res == True:
print(f'{alias} cached')
def loadFromRedis(alias):
data = r.get(alias)
try:
return pa.deserialize(data)
except:
print("No data")
storeInRedis('locations', locdf)
loadFromRedis('locations')
如果您想压缩 Redis 中的数据,可以使用对 parquet 和 gzip 的内置支持
def openRedisCon():
pool = redis.ConnectionPool(host=REDIS_HOST, port=REDIS_PORT, db=0)
r = redis.Redis(connection_pool=pool)
return r
def storeDFInRedis(alias, df):
"""Store the dataframe object in Redis
"""
buffer = io.BytesIO()
df.to_parquet(buffer, compression='gzip')
buffer.seek(0) # re-set the pointer to the beginning after reading
r = openRedisCon()
res = r.set(alias,buffer.read())
def loadDFFromRedis(alias, useStale: bool = False):
"""Load the named key from Redis into a DataFrame and return the DF object
"""
r = openRedisCon()
try:
buffer = io.BytesIO(r.get(alias))
buffer.seek(0)
df = pd.read_parquet(buffer)
return df
except:
return None
Pickle 和 zlib 可以替代 pyarrow:
import pandas as pd
import redis
import zlib
import pickle
df=pd.DataFrame({'A':[1,2,3]})
r = redis.Redis(host='localhost', port=6379, db=0)
r.set("key", zlib.compress( pickle.dumps(df)))
df=pickle.loads(zlib.decompress(r.get("key")))