#
# ljdump.py - livejournal archiver
# Greg Hewgill <greg@hewgill.com> http://hewgill.com
-# Version 1.1
+# Version 1.2
#
# $Id$
#
#
# Copyright (c) 2005-2006 Greg Hewgill
-import codecs, md5, os, pickle, pprint, sys, urllib2, xml.dom.minidom, xmlrpclib
+import codecs, md5, os, pickle, pprint, re, shutil, sys, urllib2, xml.dom.minidom, xmlrpclib
from xml.sax import saxutils
+MimeExtensions = {
+ "image/gif": ".gif",
+ "image/jpeg": ".jpg",
+ "image/png": ".png",
+}
+
def calcchallenge(challenge, password):
return md5.new(challenge+md5.new(password).hexdigest()).hexdigest()
return r
def getljsession(username, password):
- r = urllib2.urlopen("http://livejournal.com/interface/flat", "mode=getchallenge")
+ r = urllib2.urlopen(Server+"/interface/flat", "mode=getchallenge")
response = flatresponse(r)
r.close()
- r = urllib2.urlopen("http://livejournal.com/interface/flat", "mode=sessiongenerate&user=%s&auth_method=challenge&auth_challenge=%s&auth_response=%s" % (username, response['challenge'], calcchallenge(response['challenge'], password)))
+ r = urllib2.urlopen(Server+"/interface/flat", "mode=sessiongenerate&user=%s&auth_method=challenge&auth_challenge=%s&auth_response=%s" % (username, response['challenge'], calcchallenge(response['challenge'], password)))
response = flatresponse(r)
r.close()
return response['ljsession']
Username = config.documentElement.getElementsByTagName("username")[0].childNodes[0].data
Password = config.documentElement.getElementsByTagName("password")[0].childNodes[0].data
+m = re.search("(.*)/interface/xmlrpc", Server)
+if m:
+ Server = m.group(1)
+
print "Fetching journal entries for: %s" % Username
try:
os.mkdir(Username)
ljsession = getljsession(Username, Password)
-server = xmlrpclib.ServerProxy(Server)
+server = xmlrpclib.ServerProxy(Server+"/interface/xmlrpc")
newentries = 0
newcomments = 0
pass
origlastsync = lastsync
+r = server.LJ.XMLRPC.login(dochallenge({
+ 'username': Username,
+ 'ver': 1,
+ 'getpickws': 1,
+ 'getpickwurls': 1,
+}, Password))
+userpics = dict(zip(map(str, r['pickws']), r['pickwurls']))
+userpics['*'] = r['defaultpicurl']
+
while True:
r = server.LJ.XMLRPC.syncitems(dochallenge({
'username': Username,
maxid = lastmaxid
while True:
- r = urllib2.urlopen(urllib2.Request("http://livejournal.com/export_comments.bml?get=comment_meta&startid=%d" % (maxid+1), headers = {'Cookie': "ljsession="+ljsession}))
+ r = urllib2.urlopen(urllib2.Request(Server+"/export_comments.bml?get=comment_meta&startid=%d" % (maxid+1), headers = {'Cookie': "ljsession="+ljsession}))
meta = xml.dom.minidom.parse(r)
r.close()
for c in meta.getElementsByTagName("comment"):
pickle.dump(usermap, f)
f.close()
+print "Fetching userpics for: %s" % Username
+f = open("%s/userpics.xml" % Username, "w")
+print >>f, """<?xml version="1.0"?>"""
+print >>f, "<userpics>"
+for p in userpics:
+ print >>f, """<userpic keyword="%s" url="%s" />""" % (p, userpics[p])
+ pic = urllib2.urlopen(userpics[p])
+ ext = MimeExtensions.get(pic.info()["Content-Type"], "")
+ picf = open("%s/%s%s" % (Username, codecs.utf_8_decode(p)[0], ext), "wb")
+ shutil.copyfileobj(pic, picf)
+ pic.close()
+ picf.close()
+print >>f, "</userpics>"
+f.close()
+
newmaxid = maxid
maxid = lastmaxid
while True:
- r = urllib2.urlopen(urllib2.Request("http://livejournal.com/export_comments.bml?get=comment_body&startid=%d" % (maxid+1), headers = {'Cookie': "ljsession="+ljsession}))
+ r = urllib2.urlopen(urllib2.Request(Server+"/export_comments.bml?get=comment_body&startid=%d" % (maxid+1), headers = {'Cookie': "ljsession="+ljsession}))
meta = xml.dom.minidom.parse(r)
r.close()
for c in meta.getElementsByTagName("comment"):