Strip out our API and use simplemediawiki
authorIan Weller <ian@ianweller.org>
Mon, 31 Jan 2011 21:20:39 +0000 (14:20 -0700)
committerIan Weller <ian@ianweller.org>
Mon, 31 Jan 2011 21:20:39 +0000 (14:20 -0700)
src/mw/api.py [deleted file]
src/mw/clicommands.py
src/mw/metadir.py

diff --git a/src/mw/api.py b/src/mw/api.py
deleted file mode 100644 (file)
index 6aec91c..0000000
+++ /dev/null
@@ -1,96 +0,0 @@
-###
-# mw - VCS-like nonsense for MediaWiki websites
-# Copyright (C) 2010  Ian Weller <ian@ianweller.org>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation; either version 2 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program.  If not, see <http://www.gnu.org/licenses/>.
-###
-
-import cookielib
-import gzip
-import json
-import mw
-import mw.metadir
-import os
-from StringIO import StringIO
-import urllib
-import urllib2
-
-
-class API(object):
-
-    def __init__(self, api_url, metadir):
-        self.api_url = api_url
-        self.metadir = metadir
-        self.cookiejar = cookielib.MozillaCookieJar(os.path.join(
-                self.metadir.location, 'cookies'))
-        try:
-            self.cookiejar.load()
-        except IOError:
-            self.cookiejar.save()
-            self.cookiejar.load()
-        self.opener = urllib2.build_opener(
-                urllib2.HTTPCookieProcessor(self.cookiejar))
-        self._high_limits = None
-
-    def call(self, data):
-        data['format'] = 'json'
-        user_agent = 'mw/%s +http://github.com/ianweller/mw' % mw.version
-        request = urllib2.Request(self.api_url, urllib.urlencode(data),
-                                  {'User-Agent': user_agent})
-        request.add_header('Accept-encoding', 'gzip')
-        response = self.opener.open(request)
-        self.cookiejar.save()
-        if response.headers.get('Content-Encoding') == 'gzip':
-            compressed = StringIO(response.read())
-            gzipper = gzip.GzipFile(fileobj=compressed)
-            data = gzipper.read()
-        else:
-            data = response.read()
-        the_data = json.loads(data)
-        if 'error' in the_data.keys():
-            raise APIError(the_data['error']['info'])
-        return the_data
-
-    def limits(self, low, high):
-        if self._high_limits == None:
-            result = self.call({'action': 'query',
-                                'meta': 'userinfo',
-                                'uiprop': 'rights'})
-            self._high_limits = 'apihighlimits' in \
-                    result['query']['userinfo']['rights']
-        if self._high_limits:
-            return high
-        else:
-            return low
-
-
-class APIError(Exception):
-
-    def __init__(self, info):
-        self.info = info
-
-    def __str__(self):
-        return self.info
-
-
-def pagename_to_filename(name):
-    name = name.replace(' ', '_')
-    name = name.replace('/', '!')
-    return name
-
-
-def filename_to_pagename(name):
-    name = name.replace('!', '/')
-    name = name.replace('_', ' ')
-    return name
index 4f5fc48ab7c2ea060701e00a6fb57bff89ab2c7e..6e5075b528cea34240e41deba86c14251bf96488 100644 (file)
 ###
 
 import codecs
+import cookielib
 import getpass
 import hashlib
-import mw.api
 import mw.metadir
 from optparse import OptionParser, OptionGroup
 import os
+import simplemediawiki
 import sys
 
 
@@ -76,8 +77,11 @@ class CommandBase(object):
             sys.exit(1)
 
     def _api_setup(self):
+        cookie_file = os.path.join(self.metadir.location, 'cookies')
+        print cookie_file
         self.api_url = self.metadir.config.get('remote', 'api_url')
-        self.api = mw.api.API(self.api_url, self.metadir)
+        self.api = simplemediawiki.MediaWiki(self.api_url,
+                                             cookie_file=cookie_file)
 
 
 class InitCommand(CommandBase):
@@ -148,7 +152,7 @@ class PullCommand(CommandBase):
                 self.metadir.pagedict_add(pagename, pageid, revids[-1])
                 self.metadir.pages_add_rv(int(pageid),
                                           response[pageid]['revisions'][0])
-                filename = mw.api.pagename_to_filename(pagename)
+                filename = mw.metadir.pagename_to_filename(pagename)
                 with file(os.path.join(self.metadir.root, filename + '.wiki'),
                           'w') as fd:
                     data = response[pageid]['revisions'][0]['*']
@@ -180,7 +184,7 @@ class DiffCommand(CommandBase):
         for file in status:
             if status[file] == 'U':
                 print self.metadir.diff_rv_to_working(
-                        mw.api.filename_to_pagename(file[:-5])),
+                        mw.metadir.filename_to_pagename(file[:-5])),
 
 
 class CommitCommand(CommandBase):
@@ -221,7 +225,7 @@ class CommitCommand(CommandBase):
                         'action': 'query',
                         'prop': 'info|revisions',
                         'intoken': 'edit',
-                        'titles': mw.api.filename_to_pagename(file[:-5]),
+                        'titles': mw.metadir.filename_to_pagename(file[:-5]),
                 }
                 response = self.api.call(data)
                 pageid = response['query']['pages'].keys()[0]
@@ -244,7 +248,7 @@ class CommitCommand(CommandBase):
                 textmd5 = md5.hexdigest()
                 data = {
                         'action': 'edit',
-                        'title': mw.api.filename_to_pagename(file[:-5]),
+                        'title': mw.metadir.filename_to_pagename(file[:-5]),
                         'token': edittoken,
                         'text': text,
                         'md5': textmd5,
index 05dcdb85abda5f4f45fb0021b841550ae7b652eb..07043c6ebd005c4310259577a49f7b6fae106e5c 100644 (file)
@@ -20,7 +20,6 @@ import bzrlib.diff
 import codecs
 import ConfigParser
 import json
-import mw.api
 import os
 from StringIO import StringIO
 import sys
@@ -79,7 +78,7 @@ class Metadir(object):
         os.mkdir(os.path.join(self.location, 'cache', 'pages'), 0755)
 
     def clean_page(self, pagename):
-        filename = mw.api.pagename_to_filename(pagename) + '.wiki'
+        filename = pagename_to_filename(pagename) + '.wiki'
         cur_content = codecs.open(filename, 'r', 'utf-8').read()
         if len(cur_content) != 0 and cur_content[-1] == '\n':
             cur_content = cur_content[:-1]
@@ -157,7 +156,7 @@ class Metadir(object):
         for full in check:
             name = os.path.split(full)[1]
             if name[-5:] == '.wiki':
-                pagename = mw.api.filename_to_pagename(name[:-5])
+                pagename = filename_to_pagename(name[:-5])
                 pageid = self.get_pageid_from_pagename(pagename)
                 if not pageid:
                     status[os.path.relpath(full, self.root)] = '?'
@@ -174,7 +173,7 @@ class Metadir(object):
     def diff_rv_to_working(self, pagename, oldrvid=0, newrvid=0):
         # oldrvid=0 means latest fetched revision
         # newrvid=0 means working copy
-        filename = mw.api.pagename_to_filename(pagename) + '.wiki'
+        filename = pagename_to_filename(pagename) + '.wiki'
         filename = filename.decode('utf-8')
         pageid = self.get_pageid_from_pagename(pagename)
         if not pageid:
@@ -203,3 +202,15 @@ class Metadir(object):
             if diff[-1] == '\n':
                 diff = diff[:-1]
             return diff
+
+
+def pagename_to_filename(name):
+    name = name.replace(' ', '_')
+    name = name.replace('/', '!')
+    return name
+
+
+def filename_to_pagename(name):
+    name = name.replace('!', '/')
+    name = name.replace('_', ' ')
+    return name

Benjamin Mako Hill || Want to submit a patch?