else:
datalist[id] = {"id": id, "num": num, "title": title,
"res": res, "lineCount": BOARD_DATA_INVALID_VALUE,
- "lastModified": "", "average": average}
+ "lastModified": 0, "average": average, "oldRes": 0}
def merge_local_subjecttxt(self, datalist):
- iterable = self._load_subjecttxt()
-
- for id, title, res, num, lastmod in iterable:
- self._merge_new_thread(datalist, id, title, res, num, lastmod)
-
- status = "Complete subject file."
- gobject.idle_add(self.set_status, status)
+ try:
+ for id, title, res, num, lastmod in self._load_subjecttxt():
+ self._merge_new_thread(datalist, id, title, res, num, lastmod)
+ except IOError:
+ pass
+ except:
+ tracebakc.print_exc()
+ else:
+ status = "Complete subject file."
+ gobject.idle_add(self.set_status, status)
def merge_remote_subjecttxt(self, datalist):
- iterable = self._get_subjecttxt()
-
- for id, title, res, num, lastmod in iterable:
- self._merge_new_thread(datalist, id, title, res, num, lastmod)
+ try:
+ for id, title, res, num, lastmod in self._get_subjecttxt():
+ self._merge_new_thread(datalist, id, title, res, num, lastmod)
+ except IOError:
+ pass
+ except:
+ traceback.print_exc()
def _init_extra_data(self, dic):
dic["num"] = 0
dic["res"] = 0
dic["average"] = 0
+ dic["oldRes"] = 0
return dic
def _progressing(self, iterable):
for before, fraction in follow(iterable):
if int(before*10) != int(fraction*10):
- gtk.threads_enter()
+ gtk.gdk.threads_enter()
try:
self.set_fraction(fraction)
finally:
- gtk.threads_leave()
+ gtk.gdk.threads_leave()
yield fraction
+ def _modify_dict(self, item_dict):
+ # lastModified, httpdate to second
+ httpdate = item_dict["lastModified"]
+ try:
+ secs = misc.httpdate_to_secs(httpdate)
+ except ValueError:
+ item_dict["lastModified"] = 0
+ else:
+ item_dict["lastModified"] = secs
+ return item_dict
+
def load_idxfiles(self):
print "load_cache"
- datalist = self._load_cache()
+ try:
+ datalist = self._load_cache()
+ except IOError:
+ datalist = {}
print "load_idx"
self._load_modified_idxfiles(datalist)
print "save_cache"
- cachefile.save_cache(self.bbs_type, datalist)
+ try:
+ self._save_cache(datalist)
+ except IOError:
+ traceback.print_exc()
+
+ # adjustment after cache save, before load subject.txt
+ iterable = datalist.itervalues()
+ iterable = itertools.imap(self._modify_dict, iterable)
+ for i in iterable: -1
status = "Complete index files."
gobject.idle_add(self.set_status, status)
except OSError:
total = -1
- iterable = cachefile.load_cache(self.bbs_type)
+ iterable = file(misc.get_board_cache_path(self.bbs_type))
# split
iterable_dic, iterable_line = itertools.tee(iterable)
- iterable_dic = itertools.starmap(lambda x, y: x, iterable_dic)
- iterable_line = itertools.starmap(lambda x, y: y, iterable_line)
+
+ iterable_dic = itertools.imap(lambda l: l.rstrip(), iterable_dic)
+ iterable_dic = cachefile.formatted_to_dict(iterable_dic)
iterable_line = itertools.imap(lambda x :len(x), iterable_line)
iterable_line = accumulate(iterable_line)
def id_and_lastmod(file_path):
thread_id = os.path.basename(file_path)[:len(ext)*-1]
try:
- idxlastModified = os.path.getmtime(file_path)
+ idxlastModified = int(os.path.getmtime(file_path))
return thread_id, idxlastModified
except OSError:
pass
del datalist[key]
print "del", key
+ def _save_cache(self, datalist):
+ iterable = datalist.iteritems()
+ iterable = cachefile.dict_to_formatted(iterable)
+ c_file = misc.FileWrap(misc.get_board_cache_path(self.bbs_type), "w")
+ c_file.writelines(iterable)
+
def _split_record(self, line_encoded):
line = line_encoded.decode(self.bbs_type.encoding, "replace")
m = self.bbs_type.subject_reg.match(line)
iterable_len = itertools.imap(lambda l: len(l), iterable_len)
iterable_len = accumulate(iterable_len)
iterable_len = itertools.imap(
- lambda value: float(value) / total / 2 + 0.5, iterable_len)
+ lambda value: float(value) / total, iterable_len)
iterable_len = self._progressing(iterable_len)
# union