#
# ljdump.py - livejournal archiver
# Greg Hewgill <greg@hewgill.com> http://hewgill.com
-# Version 1.1
+# Version 1.2
#
# $Id$
#
# This program may be run as often as needed to bring the backup copy up
# to date. Both new and updated items are downloaded.
#
+# The community http://ljdump.livejournal.com has been set up for questions
+# or comments.
+#
# LICENSE
#
# This software is provided 'as-is', without any express or implied
#
# Copyright (c) 2005-2006 Greg Hewgill
-import codecs, md5, os, pickle, pprint, sys, urllib2, xml.dom.minidom, xmlrpclib
+import codecs, md5, os, pickle, pprint, re, shutil, sys, urllib2, xml.dom.minidom, xmlrpclib
from xml.sax import saxutils
+MimeExtensions = {
+ "image/gif": ".gif",
+ "image/jpeg": ".jpg",
+ "image/png": ".png",
+}
+
def calcchallenge(challenge, password):
return md5.new(challenge+md5.new(password).hexdigest()).hexdigest()
return r
def getljsession(username, password):
- r = urllib2.urlopen("http://livejournal.com/interface/flat", "mode=getchallenge")
+ r = urllib2.urlopen(Server+"/interface/flat", "mode=getchallenge")
response = flatresponse(r)
r.close()
- r = urllib2.urlopen("http://livejournal.com/interface/flat", "mode=sessiongenerate&user=%s&auth_method=challenge&auth_challenge=%s&auth_response=%s" % (username, response['challenge'], calcchallenge(response['challenge'], password)))
+ r = urllib2.urlopen(Server+"/interface/flat", "mode=sessiongenerate&user=%s&auth_method=challenge&auth_challenge=%s&auth_response=%s" % (username, response['challenge'], calcchallenge(response['challenge'], password)))
response = flatresponse(r)
r.close()
return response['ljsession']
dumpelement(f, "event", event)
f.close()
+def writelast():
+ f = open("%s/.last" % Username, "w")
+ f.write("%s\n" % lastsync)
+ f.write("%s\n" % lastmaxid)
+ f.close()
+
def createxml(doc, name, map):
e = doc.createElement(name)
for k in map.keys():
Username = config.documentElement.getElementsByTagName("username")[0].childNodes[0].data
Password = config.documentElement.getElementsByTagName("password")[0].childNodes[0].data
+m = re.search("(.*)/interface/xmlrpc", Server)
+if m:
+ Server = m.group(1)
+
print "Fetching journal entries for: %s" % Username
try:
os.mkdir(Username)
ljsession = getljsession(Username, Password)
-server = xmlrpclib.ServerProxy(Server)
+server = xmlrpclib.ServerProxy(Server+"/interface/xmlrpc")
newentries = 0
newcomments = 0
pass
origlastsync = lastsync
+r = server.LJ.XMLRPC.login(dochallenge({
+ 'username': Username,
+ 'ver': 1,
+ 'getpickws': 1,
+ 'getpickwurls': 1,
+}, Password))
+userpics = dict(zip(map(str, r['pickws']), r['pickwurls']))
+userpics['*'] = r['defaultpicurl']
+
while True:
r = server.LJ.XMLRPC.syncitems(dochallenge({
'username': Username,
'selecttype': "one",
'itemid': item['item'][2:],
}, Password))
- writedump("%s/%s" % (Username, item['item']), e['events'][0])
- newentries += 1
+ if e['events']:
+ writedump("%s/%s" % (Username, item['item']), e['events'][0])
+ newentries += 1
+ else:
+ print "Unexpected empty item: %s" % item['item']
+ errors += 1
except xmlrpclib.Fault, x:
print "Error getting item: %s" % item['item']
pprint.pprint(x)
errors += 1
lastsync = item['time']
+ writelast()
# The following code doesn't work because the server rejects our repeated calls.
# http://www.livejournal.com/doc/server/ljp.csp.xml-rpc.getevents.html
maxid = lastmaxid
while True:
- r = urllib2.urlopen(urllib2.Request("http://livejournal.com/export_comments.bml?get=comment_meta&startid=%d" % (maxid+1), headers = {'Cookie': "ljsession="+ljsession}))
+ r = urllib2.urlopen(urllib2.Request(Server+"/export_comments.bml?get=comment_meta&startid=%d" % (maxid+1), headers = {'Cookie': "ljsession="+ljsession}))
meta = xml.dom.minidom.parse(r)
r.close()
for c in meta.getElementsByTagName("comment"):
pickle.dump(usermap, f)
f.close()
+print "Fetching userpics for: %s" % Username
+f = open("%s/userpics.xml" % Username, "w")
+print >>f, """<?xml version="1.0"?>"""
+print >>f, "<userpics>"
+for p in userpics:
+ print >>f, """<userpic keyword="%s" url="%s" />""" % (p, userpics[p])
+ pic = urllib2.urlopen(userpics[p])
+ ext = MimeExtensions.get(pic.info()["Content-Type"], "")
+ picf = open("%s/%s%s" % (Username, codecs.utf_8_decode(p)[0], ext), "wb")
+ shutil.copyfileobj(pic, picf)
+ pic.close()
+ picf.close()
+print >>f, "</userpics>"
+f.close()
+
newmaxid = maxid
maxid = lastmaxid
while True:
- r = urllib2.urlopen(urllib2.Request("http://livejournal.com/export_comments.bml?get=comment_body&startid=%d" % (maxid+1), headers = {'Cookie': "ljsession="+ljsession}))
+ r = urllib2.urlopen(urllib2.Request(Server+"/export_comments.bml?get=comment_body&startid=%d" % (maxid+1), headers = {'Cookie': "ljsession="+ljsession}))
meta = xml.dom.minidom.parse(r)
r.close()
for c in meta.getElementsByTagName("comment"):
lastmaxid = maxid
-f = open("%s/.last" % Username, "w")
-f.write("%s\n" % lastsync)
-f.write("%s\n" % lastmaxid)
-f.close()
+writelast()
if origlastsync:
print "%d new entries, %d new comments (since %s)" % (newentries, newcomments, origlastsync)