Implemented iter_revisions. Various minor fixes.
The methode iter_revisions(page) is now implemented, and yiels the last ten revisions of the page, as 'Revision' objects. Various fixes, like removing unused imports.
This commit is contained in:
parent
200ab648bb
commit
5d24cec733
2 changed files with 33 additions and 7 deletions
|
|
@ -50,8 +50,9 @@ class MediawikiBackend(BaseBackend, ICapContent):
|
|||
content.content = data
|
||||
return content
|
||||
|
||||
def log_content(self, id):
|
||||
raise NotImplementedError()
|
||||
def iter_revisions(self, _id):
|
||||
for rev in self.browser.iter_wiki_revisions(_id):
|
||||
yield rev
|
||||
|
||||
|
||||
def push_content(self, content, message=None, minor=False):
|
||||
|
|
@ -59,6 +60,3 @@ class MediawikiBackend(BaseBackend, ICapContent):
|
|||
|
||||
def get_content_preview(self, content):
|
||||
return self.browser.get_wiki_preview(content)
|
||||
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -17,9 +17,11 @@
|
|||
|
||||
from urlparse import urlsplit
|
||||
import urllib
|
||||
import lxml.html
|
||||
import datetime
|
||||
|
||||
|
||||
from weboob.tools.browser import BaseBrowser
|
||||
from weboob.capabilities.content import Revision
|
||||
|
||||
try:
|
||||
import simplejson
|
||||
|
|
@ -67,6 +69,7 @@ class MediawikiBrowser(BaseBrowser):
|
|||
return result['query']['pages'][str(pageid)]['revisions'][0]['*']
|
||||
|
||||
def get_token(self, page, _type):
|
||||
''' _type can be edit, delete, protect, move, block, unblock, email or import'''
|
||||
if not self.is_logged():
|
||||
self.login()
|
||||
|
||||
|
|
@ -99,7 +102,6 @@ class MediawikiBrowser(BaseBrowser):
|
|||
data['minor'] = 'true'
|
||||
|
||||
result = self.API_post(data)
|
||||
print result
|
||||
|
||||
def get_wiki_preview(self, content, message=None):
|
||||
data = {'action': 'parse',
|
||||
|
|
@ -136,6 +138,32 @@ class MediawikiBrowser(BaseBrowser):
|
|||
data['lgtoken'] = result['login']['token']
|
||||
result2 = self.API_post(data)
|
||||
|
||||
def iter_wiki_revisions(self, page):
|
||||
'''Yield 'Revision' objects for the last 10 Revisions of the specified page.'''
|
||||
data = {'action': 'query',
|
||||
'titles': page,
|
||||
'prop': 'revisions',
|
||||
'rvprop': 'ids|timestamp|comment|user|flags',
|
||||
'rvlimit': '10',
|
||||
}
|
||||
|
||||
result = self.API_get(data)
|
||||
pageid = str(result['query']['pages'].keys()[0])
|
||||
|
||||
for rev in result['query']['pages'][pageid]['revisions']:
|
||||
rev_content = Revision(str(rev['revid']))
|
||||
rev_content.comment = rev['comment']
|
||||
rev_content.revision = str(rev['revid'])
|
||||
rev_content.author = rev['user']
|
||||
rev_content.timestamp = datetime.datetime.strptime(rev['timestamp'], '%Y-%m-%dT%H:%M:%SZ')
|
||||
if rev.has_key('minor'):
|
||||
rev_content.minor = True
|
||||
else:
|
||||
rev_content.minor = False
|
||||
yield rev_content
|
||||
|
||||
|
||||
|
||||
def home(self):
|
||||
'''We don't need to change location, we're using the JSON API here.'''
|
||||
pass
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue