##// END OF EJS Templates
pickleshare compression
vivainio -
Show More
@@ -45,6 +45,8 b' from sets import Set as set'
45 45 def gethashfile(key):
46 46 return ("%02x" % abs(hash(key) % 256))[-2:]
47 47
48 _sentinel = object()
49
48 50 class PickleShareDB(UserDict.DictMixin):
49 51 """ The main 'connection' object for PickleShare database """
50 52 def __init__(self,root):
@@ -89,6 +91,7 b' class PickleShareDB(UserDict.DictMixin):'
89 91 raise
90 92
91 93 def hset(self, hashroot, key, value):
94 """ hashed set """
92 95 hroot = self.root / hashroot
93 96 if not hroot.isdir():
94 97 hroot.makedirs()
@@ -97,19 +100,37 b' class PickleShareDB(UserDict.DictMixin):'
97 100 d.update( {key : value})
98 101 self[hfile] = d
99 102
100 def hget(self, hashroot, key, default = None):
103
104
105 def hget(self, hashroot, key, default = _sentinel, fast_only = True):
106 """ hashed get """
101 107 hroot = self.root / hashroot
102 108 hfile = hroot / gethashfile(key)
103 d = self.get(hfile, None)
109
110 d = self.get(hfile, _sentinel )
104 111 #print "got dict",d,"from",hfile
105 if d is None:
106 return default
112 if d is _sentinel:
113 if fast_only:
114 if default is _sentinel:
115 raise KeyError(key)
116
117 return default
118
119 # slow mode ok, works even after hcompress()
120 d = self.hdict(hashroot)
121
107 122 return d.get(key, default)
108 123
109 124 def hdict(self, hashroot):
110 buckets = self.keys(hashroot + "/*")
111 hfiles = [f for f in buckets]
125 """ Get all data contained in hashed category 'hashroot' as dict """
126 hfiles = self.keys(hashroot + "/*")
127 last = len(hfiles) and hfiles[-1] or ''
128 if last.endswith('xx'):
129 print "using xx"
130 hfiles = [last] + hfiles[:-1]
131
112 132 all = {}
133
113 134 for f in hfiles:
114 135 # print "using",f
115 136 all.update(self[f])
@@ -117,6 +138,29 b' class PickleShareDB(UserDict.DictMixin):'
117 138
118 139 return all
119 140
141 def hcompress(self, hashroot):
142 """ Compress category 'hashroot', so hset is fast again
143
144 hget will fail if fast_only is True for compressed items (that were
145 hset before hcompress).
146
147 """
148 hfiles = self.keys(hashroot + "/*")
149 all = {}
150 for f in hfiles:
151 # print "using",f
152 all.update(self[f])
153 self.uncache(f)
154
155 self[hashroot + '/xx'] = all
156 for f in hfiles:
157 p = self.root / f
158 if p.basename() == 'xx':
159 continue
160 p.remove()
161
162
163
120 164 def __delitem__(self,key):
121 165 """ del db["key"] """
122 166 fil = self.root / key
@@ -164,8 +164,8 b' class ShadowHist:'
164 164 self.db = db
165 165
166 166 def inc_idx(self):
167 idx = self.db.hget('shadowhist', '__histidx', 0)
168 self.db.hset('shadowhist', '__histidx', idx + 1)
167 idx = self.db.get('shadowhist_idx', 1)
168 self.db['shadowhist_idx'] = idx + 1
169 169 return idx
170 170
171 171 def add(self, ent):
@@ -6,7 +6,7 b' Requires Python 2.3 or newer.'
6 6
7 7 This file contains all the classes and helper functions specific to IPython.
8 8
9 $Id: iplib.py 2440 2007-06-14 19:31:36Z vivainio $
9 $Id: iplib.py 2442 2007-06-14 21:20:10Z vivainio $
10 10 """
11 11
12 12 #*****************************************************************************
@@ -2016,8 +2016,10 b' want to merge them back into the new files.""" % locals()'
2016 2016 pass # re{move,place}_history_item are new in 2.4.
2017 2017 else:
2018 2018 self.input_hist_raw.append('%s\n' % line)
2019
2020 self.shadowhist.add(line)
2019
2020 if line.lstrip() == line:
2021 self.shadowhist.add(line.strip())
2022
2021 2023 try:
2022 2024 lineout = self.prefilter(line,continue_prompt)
2023 2025 except:
General Comments 0
You need to be logged in to leave comments. Login now