]> projects.mako.cc - mw/commitdiff
Merge git://github.com/esby/mw
authorIan Weller <ian@ianweller.org>
Mon, 31 Jan 2011 23:04:29 +0000 (16:04 -0700)
committerIan Weller <ian@ianweller.org>
Mon, 31 Jan 2011 23:04:29 +0000 (16:04 -0700)
Conflicts:
src/mw/clicommands.py

HACKING
README
src/mw/api.py [deleted file]
src/mw/clicommands.py
src/mw/metadir.py

diff --git a/HACKING b/HACKING
index 93100834f33a70918bebc28d7ee63e12a2c46936..cdb6c6b973ac2ee9543c744905f1cf5414e9e710 100644 (file)
--- a/HACKING
+++ b/HACKING
@@ -1,6 +1,18 @@
-Run this awesome whatnot with:
-  PYTHONPATH=$PWD/src bin/mw
+ _____________________________
+< Patches are always welcome! >
+ -----------------------------
+       \   ,__,
+        \  (oo)____
+           (__)    )\
+              ||--|| *
+
+The preferred method of submitting patches is by forking the repository,
+committing changes, and then making the repository accessible. This is most
+easily done on GitHub, but you can put it anywhere I can get it.
 
 Changing how something already works in the .mw metadir requires a damn
-good reason and we don't want to introduce incompatibilities at all in
-the tree. On the other hand, patches are greatly welcomed!
+good reason since we don't want to introduce incompatibilities at all in
+the tree.
+
+Code submitted should follow PEP 8. If it doesn't, I'll modify your changes (in
+later commits) until they are in line that style.
diff --git a/README b/README
index 71dddb8376525b77cac4b23100bec456471e72e5..1887524e83db88a36625eba702d814791b547311 100644 (file)
--- a/README
+++ b/README
@@ -2,14 +2,16 @@ mw - VCS-like nonsense for MediaWiki websites
 Copyright (C) 2010  Ian Weller <ian@ianweller.org>
 
 == Basic workflow ==
-See HACKING on how to run this; a nice setup.py isn't done yet.
+We don't have a nice installation process yet, so set the following alias:
+  alias mw="PYTHONPATH=PATH_TO/mw/src PATH_TO/mw/bin/mw"
+where PATH_TO is the path to your local mw repository.
 
 mw init http://example.com/w/api.php
 mw login # if you need/want to
 mw pull 'Main Page'
 $EDITOR Main_Page.wiki
-mw fetch # check for newer revisions
-mw update # apply newer revisions
+mw fetch # check for newer revisions; this command doesn't exist yet
+mw update # apply newer revisions; this command doesn't exist yet
 mw commit
 
 == License ==
diff --git a/src/mw/api.py b/src/mw/api.py
deleted file mode 100644 (file)
index 6aec91c..0000000
+++ /dev/null
@@ -1,96 +0,0 @@
-###
-# mw - VCS-like nonsense for MediaWiki websites
-# Copyright (C) 2010  Ian Weller <ian@ianweller.org>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation; either version 2 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program.  If not, see <http://www.gnu.org/licenses/>.
-###
-
-import cookielib
-import gzip
-import json
-import mw
-import mw.metadir
-import os
-from StringIO import StringIO
-import urllib
-import urllib2
-
-
-class API(object):
-
-    def __init__(self, api_url, metadir):
-        self.api_url = api_url
-        self.metadir = metadir
-        self.cookiejar = cookielib.MozillaCookieJar(os.path.join(
-                self.metadir.location, 'cookies'))
-        try:
-            self.cookiejar.load()
-        except IOError:
-            self.cookiejar.save()
-            self.cookiejar.load()
-        self.opener = urllib2.build_opener(
-                urllib2.HTTPCookieProcessor(self.cookiejar))
-        self._high_limits = None
-
-    def call(self, data):
-        data['format'] = 'json'
-        user_agent = 'mw/%s +http://github.com/ianweller/mw' % mw.version
-        request = urllib2.Request(self.api_url, urllib.urlencode(data),
-                                  {'User-Agent': user_agent})
-        request.add_header('Accept-encoding', 'gzip')
-        response = self.opener.open(request)
-        self.cookiejar.save()
-        if response.headers.get('Content-Encoding') == 'gzip':
-            compressed = StringIO(response.read())
-            gzipper = gzip.GzipFile(fileobj=compressed)
-            data = gzipper.read()
-        else:
-            data = response.read()
-        the_data = json.loads(data)
-        if 'error' in the_data.keys():
-            raise APIError(the_data['error']['info'])
-        return the_data
-
-    def limits(self, low, high):
-        if self._high_limits == None:
-            result = self.call({'action': 'query',
-                                'meta': 'userinfo',
-                                'uiprop': 'rights'})
-            self._high_limits = 'apihighlimits' in \
-                    result['query']['userinfo']['rights']
-        if self._high_limits:
-            return high
-        else:
-            return low
-
-
-class APIError(Exception):
-
-    def __init__(self, info):
-        self.info = info
-
-    def __str__(self):
-        return self.info
-
-
-def pagename_to_filename(name):
-    name = name.replace(' ', '_')
-    name = name.replace('/', '!')
-    return name
-
-
-def filename_to_pagename(name):
-    name = name.replace('!', '/')
-    name = name.replace('_', ' ')
-    return name
index eaadb7ce2960ff9c2e77bf94817426215e5d9508..9efffd4a2883a68476d1723befe4f446170b81b3 100644 (file)
 ###
 
 import codecs
+import cookielib
 import getpass
 import hashlib
-import mw.api
 import mw.metadir
 from optparse import OptionParser, OptionGroup
 import os
+import simplemediawiki
 import sys
 import time
 
@@ -77,8 +78,10 @@ class CommandBase(object):
             sys.exit(1)
 
     def _api_setup(self):
+        cookie_file = os.path.join(self.metadir.location, 'cookies')
         self.api_url = self.metadir.config.get('remote', 'api_url')
-        self.api = mw.api.API(self.api_url, self.metadir)
+        self.api = simplemediawiki.MediaWiki(self.api_url,
+                                             cookie_file=cookie_file)
 
 
 class InitCommand(CommandBase):
@@ -176,7 +179,7 @@ class PullCommand(CommandBase):
                 self.metadir.pagedict_add(pagename, pageid, revids[-1])
                 self.metadir.pages_add_rv(int(pageid),
                                           response[pageid]['revisions'][0])
-                filename = mw.api.pagename_to_filename(pagename)
+                filename = mw.metadir.pagename_to_filename(pagename)
                 with file(os.path.join(self.metadir.root, filename + '.wiki'),
                           'w') as fd:
                     data = response[pageid]['revisions'][0]['*']
@@ -208,7 +211,7 @@ class DiffCommand(CommandBase):
         for file in status:
             if status[file] == 'U':
                 print self.metadir.diff_rv_to_working(
-                        mw.api.filename_to_pagename(file[:-5])),
+                        mw.metadir.filename_to_pagename(file[:-5])),
 
 
 class CommitCommand(CommandBase):
@@ -249,7 +252,7 @@ class CommitCommand(CommandBase):
                         'action': 'query',
                         'prop': 'info|revisions',
                         'intoken': 'edit',
-                        'titles': mw.api.filename_to_pagename(file[:-5]),
+                        'titles': mw.metadir.filename_to_pagename(file[:-5]),
                 }
                 response = self.api.call(data)
                 pageid = response['query']['pages'].keys()[0]
@@ -272,7 +275,7 @@ class CommitCommand(CommandBase):
                 textmd5 = md5.hexdigest()
                 data = {
                         'action': 'edit',
-                        'title': mw.api.filename_to_pagename(file[:-5]),
+                        'title': mw.metadir.filename_to_pagename(file[:-5]),
                         'token': edittoken,
                         'text': text,
                         'md5': textmd5,
index 3b7828dfb5194e4e7e0a79c779549231ec48d006..07043c6ebd005c4310259577a49f7b6fae106e5c 100644 (file)
@@ -20,7 +20,6 @@ import bzrlib.diff
 import codecs
 import ConfigParser
 import json
-import mw.api
 import os
 from StringIO import StringIO
 import sys
@@ -78,16 +77,13 @@ class Metadir(object):
         # create cache/pages/
         os.mkdir(os.path.join(self.location, 'cache', 'pages'), 0755)
 
-
-
     def clean_page(self, pagename):
-        filename = mw.api.pagename_to_filename(pagename) + '.wiki'
+        filename = pagename_to_filename(pagename) + '.wiki'
         cur_content = codecs.open(filename, 'r', 'utf-8').read()
-        if ( (len(cur_content) != 0) and (cur_content[-1] == '\n') ):
-           cur_content = cur_content[:-1]
-
+        if len(cur_content) != 0 and cur_content[-1] == '\n':
+            cur_content = cur_content[:-1]
         fd = file(filename, 'w')
-        fd.write(cur_content.encode('utf-8'))   
+        fd.write(cur_content.encode('utf-8'))
         fd.close()
 
     def pagedict_add(self, pagename, pageid, currentrv):
@@ -160,7 +156,7 @@ class Metadir(object):
         for full in check:
             name = os.path.split(full)[1]
             if name[-5:] == '.wiki':
-                pagename = mw.api.filename_to_pagename(name[:-5])
+                pagename = filename_to_pagename(name[:-5])
                 pageid = self.get_pageid_from_pagename(pagename)
                 if not pageid:
                     status[os.path.relpath(full, self.root)] = '?'
@@ -177,7 +173,7 @@ class Metadir(object):
     def diff_rv_to_working(self, pagename, oldrvid=0, newrvid=0):
         # oldrvid=0 means latest fetched revision
         # newrvid=0 means working copy
-        filename = mw.api.pagename_to_filename(pagename) + '.wiki'
+        filename = pagename_to_filename(pagename) + '.wiki'
         filename = filename.decode('utf-8')
         pageid = self.get_pageid_from_pagename(pagename)
         if not pageid:
@@ -187,9 +183,11 @@ class Metadir(object):
                 oldrvid = self.pages_get_rv_list(pageid)[-1]
             oldrv = self.pages_get_rv(pageid, oldrvid)
             oldname = 'a/%s (revision %i)' % (filename, oldrvid)
-            old = [i + '\n' for i in oldrv['content'].encode('utf-8').split('\n')]
+            old = [i + '\n' for i in \
+                   oldrv['content'].encode('utf-8').split('\n')]
             if newrvid == 0:
-                cur_content = codecs.open(filename, 'r', 'utf-8').read().encode('utf-8')
+                cur_content = codecs.open(filename, 'r', 'utf-8').read()
+                cur_content = cur_content.encode('utf-8')
                 if (len(cur_content) != 0) and (cur_content[-1] == '\n'):
                     cur_content = cur_content[:-1]
                 newname = 'b/%s (working copy)' % filename
@@ -204,3 +202,15 @@ class Metadir(object):
             if diff[-1] == '\n':
                 diff = diff[:-1]
             return diff
+
+
+def pagename_to_filename(name):
+    name = name.replace(' ', '_')
+    name = name.replace('/', '!')
+    return name
+
+
+def filename_to_pagename(name):
+    name = name.replace('!', '/')
+    name = name.replace('_', ' ')
+    return name

Benjamin Mako Hill || Want to submit a patch?