2 # mw - VCS-like nonsense for MediaWiki websites
3 # Copyright (C) 2011 Ian Weller <ian@ianweller.org> and others
5 # This program is free software; you can redistribute it and/or modify
6 # it under the terms of the GNU General Public License as published by
7 # the Free Software Foundation; either version 2 of the License, or
8 # (at your option) any later version.
10 # This program is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # GNU General Public License for more details.
15 # You should have received a copy of the GNU General Public License along
16 # with this program. If not, see <http://www.gnu.org/licenses/>.
24 from optparse import OptionParser, OptionGroup
26 import simplemediawiki
31 class CommandBase(object):
33 def __init__(self, name, description, usage=None):
34 self.me = os.path.basename(sys.argv[0])
35 self.description = description
37 usage = '%prog ' + name
39 usage = '%%prog %s %s' % (name, usage)
40 self.parser = OptionParser(usage=usage, description=description)
42 self.metadir = mw.metadir.Metadir()
46 (self.options, self.args) = self.parser.parse_args()
47 self.args = self.args[1:] # don't need the first thing
50 def _do_command(self):
54 user = raw_input('Username: ')
55 passwd = getpass.getpass()
56 result = self.api.call({'action': 'login',
58 'lgpassword': passwd})
59 if result['login']['result'] == 'Success':
60 # cookies are saved to a file
61 print 'Login successful! (yay)'
62 elif result['login']['result'] == 'NeedToken':
63 print 'Login with token'
64 result = self.api.call({'action': 'login',
67 'lgtoken': result['login']['token']})
68 if result['login']['result'] == 'Success':
69 print 'Login successful! (yay)'
71 print 'Login failed: %s' % result['login']['result']
73 print 'Login failed: %s' % result['login']['result']
75 def _die_if_no_init(self):
76 if self.metadir.config is None:
77 print '%s: not a mw repo' % self.me
81 cookie_filename = os.path.join(self.metadir.location, 'cookies')
82 self.api_url = self.metadir.config.get('remote', 'api_url')
83 self.api = simplemediawiki.MediaWiki(self.api_url,
84 cookie_file=cookie_filename)
87 class InitCommand(CommandBase):
91 CommandBase.__init__(self, 'init', 'start a mw repo', usage)
93 def _do_command(self):
94 if len(self.args) < 1:
95 self.parser.error('must have URL to remote api.php')
96 elif len(self.args) > 1:
97 self.parser.error('too many arguments')
98 self.metadir.create(self.args[0])
101 class LoginCommand(CommandBase):
104 CommandBase.__init__(self, 'login', 'authenticate with wiki')
106 def _do_command(self):
107 self._die_if_no_init()
112 class LogoutCommand(CommandBase):
115 CommandBase.__init__(self, 'logout', 'forget authentication')
117 def _do_command(self):
118 self._die_if_no_init()
120 os.unlink(os.path.join(self.metadir.location, 'cookies'))
125 class PullCategoryMembersCommand(CommandBase):
128 usage = '[options] PAGENAME ...'
129 CommandBase.__init__(self, 'pullcat', 'add remote pages to repo '
130 'belonging to the given category', usage)
132 def _do_command(self):
133 self._die_if_no_init()
137 for these_pages in [pages[i:i + 25] for i in range(0, len(pages), 25)]:
140 'gcmtitle': '|'.join(these_pages),
141 'generator': 'categorymembers',
144 response = self.api.call(data)['query']['pages']
145 for pageid in response.keys():
146 pagename = response[pageid]['title']
148 pullc = PullCommand()
149 pullc.args = [pagename.encode('utf-8')]
153 class PullCommand(CommandBase):
156 usage = '[options] PAGENAME ...'
157 CommandBase.__init__(self, 'pull', 'add remote pages to repo', usage)
159 def _do_command(self):
160 self._die_if_no_init()
165 # Pull should work with pagename, filename, or working directory
168 pages = self.metadir.working_dir_status().keys()
169 for pagename in pages:
170 if '.wiki' in pagename:
171 converted_pages.append(
172 mw.metadir.filename_to_pagename(pagename[:-5]))
174 converted_pages.append(pagename)
175 pages = converted_pages
177 for these_pages in [pages[i:i + 25] for i in range(0, len(pages), 25)]: # ?
180 'titles': '|'.join(these_pages),
181 'prop': 'info|revisions',
182 'rvprop': 'ids|flags|timestamp|user|comment|content',
184 response = self.api.call(data)['query']['pages']
185 for pageid in response.keys():
186 pagename = response[pageid]['title']
187 # if pagename exists as file and its status is 'M' warn not pulled
188 status = self.metadir.working_dir_status()
189 filename = mw.metadir.pagename_to_filename(pagename)
190 if filename + '.wiki' in status and \
191 status[filename + '.wiki' ] in ['M']:
192 print('%s: "%s" has uncommitted modifications '
193 '-- skipping!' % (self.me, pagename))
195 if 'missing' in response[pageid].keys():
196 print '%s: %s: page does not exist, file not created' % \
199 revids = [x['revid'] for x in response[pageid]['revisions']]
201 self.metadir.pagedict_add(pagename, pageid, revids[-1])
202 self.metadir.pages_add_rv(int(pageid),
203 response[pageid]['revisions'][0])
204 with file(os.path.join(self.metadir.root, filename + '.wiki'),
206 data = response[pageid]['revisions'][0]['*']
207 data = data.encode('utf-8')
211 class StatusCommand(CommandBase):
214 CommandBase.__init__(self, 'status', 'check repo status')
215 self.shortcuts.append('st')
217 def _do_command(self):
218 self._die_if_no_init()
219 status = self.metadir.working_dir_status()
220 for filename in status:
221 print '%s %s' % (status[filename], filename)
224 class DiffCommand(CommandBase):
227 CommandBase.__init__(self, 'diff', 'diff wiki to working directory')
229 def _do_command(self):
230 self._die_if_no_init()
231 status = self.metadir.working_dir_status()
232 for filename in status:
233 if status[filename] == 'M':
234 print self.metadir.diff_rv_to_working(
235 mw.metadir.filename_to_pagename(filename[:-5])),
238 class CommitCommand(CommandBase):
242 CommandBase.__init__(self, 'commit', 'commit changes to wiki', usage)
243 self.shortcuts.append('ci')
244 self.parser.add_option('-m', '--message', dest='edit_summary',
245 help='don\'t prompt for edit summary and '
247 self.parser.add_option('-b', '--bot', dest='bot', action='store_true',
248 help='mark actions as a bot (won\'t affect '
249 'anything if you don\'t have the bot right',
252 def _do_command(self):
253 self._die_if_no_init()
255 status = self.metadir.working_dir_status(files=self.args)
256 nothing_to_commit = True
257 for filename in status:
258 print '%s %s' % (status[filename], filename)
259 if status[filename] in ['M']:
260 nothing_to_commit = False
261 if nothing_to_commit:
262 print 'nothing to commit'
264 if self.options.edit_summary == None:
265 print 'Edit summary:',
266 edit_summary = raw_input()
268 edit_summary = self.options.edit_summary
269 for file_num, filename in enumerate(status):
270 if status[filename] in ['M']:
274 'prop': 'info|revisions',
276 'titles': mw.metadir.filename_to_pagename(filename[:-5]),
278 response = self.api.call(data)
279 pages = response['query']['pages']
280 pageid = pages.keys()[0]
281 revid = pages[pageid]['revisions'][0]['revid']
283 self.metadir.pages_get_rv_list({'id': pageid})[0]
284 if revid != awaitedrevid:
285 print 'warning: edit conflict detected on %s (%s -> %s) ' \
286 '-- skipping!' % (file, awaitedrevid, revid)
288 edittoken = pages[pageid]['edittoken']
289 filename = os.path.join(self.metadir.root, filename)
290 text = codecs.open(filename, 'r', 'utf-8').read()
291 text = text.encode('utf-8')
292 if (len(text) != 0) and (text[-1] == '\n'):
296 textmd5 = md5.hexdigest()
299 'title': mw.metadir.filename_to_pagename(filename[:-5]),
303 'summary': edit_summary,
307 response = self.api.call(data)
308 if response['edit']['result'] == 'Success':
309 if 'nochange' in response['edit']:
310 print 'warning: no changes detected in %s - ' \
311 'skipping and removing ending LF' % filename
312 self.metadir.clean_page(filename[:-5])
314 if response['edit']['oldrevid'] != revid:
315 print 'warning: edit conflict detected on %s -- ' \
316 'skipping!' % filename
320 'revids': response['edit']['newrevid'],
321 'prop': 'info|revisions',
323 'ids|flags|timestamp|user|comment|content',
325 response = self.api.call(data)['query']['pages']
326 self.metadir.pages_add_rv(int(pageid),
327 response[pageid]['revisions'][0])
328 if file_num != len(status) - 1:
329 print 'waiting 3s before processing the next file'
332 print 'error: committing %s failed: %s' % \
333 (filename, response['edit']['result'])