基本代码
1
2
3
4
5
6
7
8
9
|
if __name__ = = '__main__' : sr = StrictRedis(host = 'localhost' , port = 6379 , db = 0 ) result = sr. set ( 'name' , 'python' ) print (result) result1 = sr.get( 'name' ) print (result1) |
运行结果:
True
b'python'
这里我们存进去的是字符串类型的数据,取出来却是字节类型的,这是由于python3的与redis交互的驱动的问题,Python2取出来的就是字符串类型的。
为了得到字符串类型的数据,你可以每次取出来decode一下,但是太繁琐了,可以这样设置:
sr = StrictRedis(host='localhost', port=6379, db=0,decode_responses=True)
即在连接数据库的时候加上decode_responses=True即可
补充知识:python读并写入redis 使用pipline管道
日常开发中,我们总是需要将一些文件写入到缓存中。而读文件较快的方式就是python了,另外python提供了非常好用的api帮助我们连接redis。本例中将会用rediscluster包来连接redis集群,并使用pipeline管道插入文件
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
|
# encoding: utf-8 from rediscluster import StrictRedisCluster import sys import os import datetime # redis_nodes = [{"host": "10.80.23.175", "port": 7000}, # {"host": "10.80.23.175", "port": 7001}, # {"host": "10.80.24.175", "port": 7000}, # {"host": "10.80.24.175", "port": 7001}, # {"host": "10.80.25.175", "port": 7000}, # {"host": "10.80.25.175", "port": 7001} # ] def redis_cluster(): redis_nodes = [{ "host" : "10.80.23.175" , "port" : 7000 }, { "host" : "10.80.23.175" , "port" : 7001 }, { "host" : "10.80.24.175" , "port" : 7000 }, { "host" : "10.80.24.175" , "port" : 7001 }, { "host" : "10.80.25.175" , "port" : 7000 }, { "host" : "10.80.25.175" , "port" : 7001 } ] try : redisconn = StrictRedisCluster(startup_nodes = redis_nodes, skip_full_coverage_check = True ) return redisconn except Exception as e: print ( "Connect Error!" ) sys.exit( 1 ) def to_redis(redis_conn1, file_name): # file_name = "D:\data\logs\hippo.log" pipe = redis_conn1.pipeline() # pos = [] index = 0 count = 0 with open (file_name, 'r' ) as file_to_read: while True : lines = file_to_read.readline() lines = lines.replace( "\n" , "") if not lines: break pass s = lines.split( "\t" ) value = s[ 1 ] key = s[ 0 ] result = pipe.lpush(key, value) # print(file_name + s) index = index + 1 if index > 5000 : pipe.execute() index = 0 count = count + 1 print ( "execute insert! count is %d" % count) pass pass pipe.execute() def read_file(path): if os.path.isfile(path): print ( "start execute file %s" % path) to_redis(path) else : for root, dirs, files in os.walk(path): # print('root_dir:', root) # 当前目录路径 # print('sub_dirs:', dirs) # 当前路径下所有子目录 print ( 'files:' , files) # 当前路径下所有非目录子文件 for fileName in files: all_name = root + "/" + fileName print ( "start execute file %s" % all_name) to_redis(redis_conn, all_name) start_time = datetime.datetime.now() redis_conn = redis_cluster() file_paths = sys.argv[ 1 ] # 第一个参数是本文件 故去掉 #file_paths.pop[0] #for file_name in file_paths: #print(file_paths) read_file(file_paths) end_time = datetime.datetime.now() print ( "use times is %d " % (end_time - start_time).seconds) |
在使用的时候需要将要插入的文件以参数形式传入到命令中
例如,将 /data/a.log 插入到redis中
python RedisFIleToRedis.py /data/a.log
以上这篇解决redis与Python交互取出来的是bytes类型的问题就是小编分享给大家的全部内容了,希望能给大家一个参考,也希望大家多多支持服务器之家。
原文链接:https://blog.csdn.net/weixin_40612082/article/details/83958864