+ print "Fetching journal entry %s (%s)" % (item['item'], item['action'])
+ try:
+ e = server.LJ.XMLRPC.getevents(dochallenge({
+ 'username': Username,
+ 'ver': 1,
+ 'selecttype': "one",
+ 'itemid': item['item'][2:],
+ }, Password))
+ writedump("%s/%s" % (Username, item['item']), e['events'][0])
+ newentries += 1
+ except xmlrpclib.Fault, x:
+ print "Error getting item: %s" % item['item']
+ pprint.pprint(x)
+ errors += 1
+ lastsync = item['time']
+
+# The following code doesn't work because the server rejects our repeated calls.
+# http://www.livejournal.com/doc/server/ljp.csp.xml-rpc.getevents.html
+# contains the statement "You should use the syncitems selecttype in
+# conjuntions [sic] with the syncitems protocol mode", but provides
+# no other explanation about how these two function calls should
+# interact. Therefore we just do the above slow one-at-a-time method.
+
+#while True:
+# r = server.LJ.XMLRPC.getevents(dochallenge({
+# 'username': Username,
+# 'ver': 1,
+# 'selecttype': "syncitems",
+# 'lastsync': lastsync,
+# }, Password))
+# pprint.pprint(r)
+# if len(r['events']) == 0:
+# break
+# for item in r['events']:
+# writedump("%s/L-%d" % (Username, item['itemid']), item)
+# newentries += 1
+# lastsync = item['eventtime']
+
+print "Fetching journal comments for: %s" % Username
+
+try:
+ f = open("%s/comment.meta" % Username)
+ metacache = pickle.load(f)
+ f.close()
+except:
+ metacache = {}
+
+try:
+ f = open("%s/user.map" % Username)
+ usermap = pickle.load(f)
+ f.close()
+except:
+ usermap = {}
+
+maxid = lastmaxid
+while True:
+ r = urllib2.urlopen(urllib2.Request(Server+"/export_comments.bml?get=comment_meta&startid=%d" % (maxid+1), headers = {'Cookie': "ljsession="+ljsession}))
+ meta = xml.dom.minidom.parse(r)
+ r.close()
+ for c in meta.getElementsByTagName("comment"):
+ id = int(c.getAttribute("id"))
+ metacache[id] = {
+ 'posterid': c.getAttribute("posterid"),
+ 'state': c.getAttribute("state"),
+ }
+ if id > maxid:
+ maxid = id
+ for u in meta.getElementsByTagName("usermap"):
+ usermap[u.getAttribute("id")] = u.getAttribute("user")
+ if maxid >= int(meta.getElementsByTagName("maxid")[0].firstChild.nodeValue):
+ break
+
+f = open("%s/comment.meta" % Username, "w")
+pickle.dump(metacache, f)
+f.close()
+
+f = open("%s/user.map" % Username, "w")
+pickle.dump(usermap, f)
+f.close()
+
+print "Fetching userpics for: %s" % Username
+f = open("%s/userpics.xml" % Username, "w")
+print >>f, """<?xml version="1.0"?>"""
+print >>f, "<userpics>"
+for p in userpics:
+ print >>f, """<userpic keyword="%s" url="%s" />""" % (p, userpics[p])
+ pic = urllib2.urlopen(userpics[p])
+ ext = MimeExtensions.get(pic.info()["Content-Type"], "")
+ picf = open("%s/%s%s" % (Username, codecs.utf_8_decode(p)[0], ext), "wb")
+ shutil.copyfileobj(pic, picf)
+ pic.close()
+ picf.close()
+print >>f, "</userpics>"
+f.close()
+
+newmaxid = maxid
+maxid = lastmaxid
+while True:
+ r = urllib2.urlopen(urllib2.Request(Server+"/export_comments.bml?get=comment_body&startid=%d" % (maxid+1), headers = {'Cookie': "ljsession="+ljsession}))
+ meta = xml.dom.minidom.parse(r)
+ r.close()
+ for c in meta.getElementsByTagName("comment"):
+ id = int(c.getAttribute("id"))
+ jitemid = c.getAttribute("jitemid")
+ comment = {
+ 'id': str(id),
+ 'parentid': c.getAttribute("parentid"),
+ 'subject': gettext(c.getElementsByTagName("subject")),
+ 'date': gettext(c.getElementsByTagName("date")),
+ 'body': gettext(c.getElementsByTagName("body")),
+ 'state': metacache[id]['state'],
+ }
+ if usermap.has_key(c.getAttribute("posterid")):
+ comment["user"] = usermap[c.getAttribute("posterid")]
+ try:
+ entry = xml.dom.minidom.parse("%s/C-%s" % (Username, jitemid))
+ except:
+ entry = xml.dom.minidom.getDOMImplementation().createDocument(None, "comments", None)
+ found = False
+ for d in entry.getElementsByTagName("comment"):
+ if int(d.getElementsByTagName("id")[0].firstChild.nodeValue) == id:
+ found = True
+ break
+ if found:
+ print "Warning: downloaded duplicate comment id %d in jitemid %s" % (id, jitemid)
+ else:
+ entry.documentElement.appendChild(createxml(entry, "comment", comment))
+ f = codecs.open("%s/C-%s" % (Username, jitemid), "w", "UTF-8")
+ entry.writexml(f)
+ f.close()
+ newcomments += 1
+ if id > maxid:
+ maxid = id
+ if maxid >= newmaxid:
+ break
+
+lastmaxid = maxid
+
+f = open("%s/.last" % Username, "w")
+f.write("%s\n" % lastsync)
+f.write("%s\n" % lastmaxid)
+f.close()
+
+if origlastsync:
+ print "%d new entries, %d new comments (since %s)" % (newentries, newcomments, origlastsync)
+else:
+ print "%d new entries, %d new comments" % (newentries, newcomments)
+if errors > 0:
+ print "%d errors" % errors