1
2
3
4
5
6
7 import os
8 import md5
9 import time
10 import cPickle
11 from pygccxml import utils
12 import config as cxx_parsers_cfg
15 if not os.path.isfile( filename ):
16 return None
17 if not os.path.exists( filename ):
18 return None
19
20
21
22 sig = md5.new()
23 f = file(filename,'r')
24 sig.update(f.read())
25 f.close()
26 return sig.hexdigest()
27
48
50 logger = utils.loggers.declarations_cache
51
54
56 """ Flush (write out) the cache to disk if needed. """
57 raise NotImplementedError()
58
59 - def update(self, source_file, configuration, declarations, included_files):
60 """ Update cache entry.
61 @param source_file: path to the C++ source file being parsed
62 @param configuration: configuration used in parsing (config_t)
63 @param declarations: declaration tree found when parsing
64 @param included_files: files included by parsing.
65 """
66 raise NotImplementedError()
67
69 """ Return declarations we have cached for the source_file and configuration
70 given.
71 @param source_file: path to the C++ source file being parsed.
72 @param configuration: configuration to use for parsing (config_t)
73 """
74 raise NotImplementedError()
75
123
125 """ Cache implementation to store data in a pickled form in a file.
126 This class contains some cache logic that keeps track of which entries
127 have been 'hit' in the cache and if an entry has not been hit then
128 it is deleted at the time of the flush(). This keeps the cache from
129 growing larger when files change and are not used again.
130 """
131
133 """
134 @param name: name of the cache file.
135 """
136 cache_base_t.__init__( self )
137 self.__name = name
138 self.__cache = self.__load( self.__name )
139 self.__needs_flushed = not bool( self.__cache )
140 for entry in self.__cache.itervalues():
141 entry.was_hit = False
142
143 @staticmethod
145 " Load pickled cache from file and return the object. "
146 cache = None
147 if os.path.exists( file_name ) and not os.path.isfile( file_name ):
148 raise RuntimeError( 'Cache should be initialized with valid full file name' )
149 if not os.path.exists( file_name ):
150 file( file_name, 'w+b' ).close()
151 return {}
152 cache_file_obj = file( file_name, 'rb' )
153 try:
154 file_cache_t.logger.info( 'Loading cache file "%s".' % file_name )
155 start_time = time.clock()
156 cache = cPickle.load( cache_file_obj )
157 file_cache_t.logger.debug( "Cache file has been loaded in %.1f secs"%( time.clock() - start_time ) )
158 file_cache_t.logger.debug( "Found cache in file: [%s] entries: %s"
159 % ( file_name, len( cache.keys() ) ) )
160 except Exception, error:
161 file_cache_t.logger.exception( "Error occured while reading cache file: %s", error )
162 cache_file_obj.close()
163 file_cache_t.logger.info( "Invalid cache file: [%s] Regenerating." % file_name )
164 file(file_name, 'w+b').close()
165 cache = {}
166 return cache
167
169
170 if not self.__needs_flushed:
171 self.logger.debug("Cache did not change, ignoring flush.")
172 return
173
174
175 num_removed = 0
176 for key in self.__cache.keys():
177 if not self.__cache[key].was_hit:
178 num_removed += 1
179 del self.__cache[key]
180 if num_removed > 0:
181 self.logger.debug( "There are %s removed entries from cache." % num_removed )
182
183 cache_file = file( self.__name, 'w+b' )
184 cPickle.dump( self.__cache, cache_file, cPickle.HIGHEST_PROTOCOL )
185 cache_file.close()
186
187 - def update(self, source_file, configuration, declarations, included_files):
200
202 """ Attempt to lookup the cached decls for the given file and configuration.
203 If not found or signature check fails, returns None.
204 """
205 key = record_t.create_key(source_file, configuration)
206 if not self.__cache.has_key( key ):
207 return None
208 record = self.__cache[key]
209 if self.__is_valid_signature( record ):
210 record.was_hit = True
211 return record.declarations
212 else:
213 del self.__cache[key]
214 return None
215
224
229
232
233 - def update(self, source_file, configuration, declarations, included_files):
235
238