Handle revs that don't have content
[mw] / src / mw / metadir.py
index 22b9c5cf0049144092ecc6c0b2d1aee4e7365020..f02422330b0edd816687b354de146ec1ebd3eb78 100644 (file)
@@ -20,9 +20,10 @@ import ConfigParser
 import json
 import os
 import sys
-import time
+
 
 class Metadir(object):
+
     def __init__(self):
         self.me = os.path.basename(sys.argv[0])
         root = os.getcwd()
@@ -30,7 +31,7 @@ class Metadir(object):
             if '.mw' in os.listdir(root):
                 self.root = root
                 break
-            (head, tail) = os.path.split(root)
+            head = os.path.split(root)[0]
             if head == root:
                 self.root = os.getcwd()
                 break
@@ -46,45 +47,78 @@ class Metadir(object):
 
     def create(self, api_url):
         # create the directory
-        try:
-            os.mkdir(self.location, 0755)
-        except OSError, e:
+        if os.path.isdir(self.location):
             print '%s: you are already in a mw repo' % self.me
             sys.exit(1)
+        else:
+            os.mkdir(self.location, 0755)
+        # metadir versioning
+        fd = file(os.path.join(self.location, 'version'), 'w')
+        fd.write('1') # XXX THIS API VERSION NOT LOCKED IN YET
+        fd.close()
         # create config
         self.config = ConfigParser.RawConfigParser()
         self.config.add_section('remote')
         self.config.set('remote', 'api_url', api_url)
         with open(self.config_loc, 'wb') as config_file:
             self.config.write(config_file)
-        # create cache
+        # create cache/
         os.mkdir(os.path.join(self.location, 'cache'))
-        # create cache/page
-        fd = file(os.path.join(self.location, 'cache', 'page'), 'w')
-        fd.write(json.dumps({}))
-        # create cache/rv
-        fd = file(os.path.join(self.location, 'cache', 'rv'), 'w')
+        # create cache/pagedict
+        fd = file(os.path.join(self.location, 'cache', 'pagedict'), 'w')
         fd.write(json.dumps({}))
+        fd.close()
+        # create cache/pages/
+        os.mkdir(os.path.join(self.location, 'cache', 'pages'), 0755)
 
-    def add_page_info(self, pageid, pagename, rvids):
-        lulz = file(os.path.join(self.location, 'cache', 'page'), 'r')
-        conf = json.loads(lulz.read())
-        conf[pageid] = {'name': pagename, 'rv': rvids}
-        fd = file(os.path.join(self.location, 'cache', 'page'), 'w')
-        fd.write(json.dumps(conf))
+    def pagedict_add(self, pagename, pageid, currentrv):
+        fd = file(os.path.join(self.location, 'cache', 'pagedict'), 'r+')
+        pagedict = json.loads(fd.read())
+        pagedict[pagename] = {'id': int(pageid), 'currentrv': int(currentrv)}
+        fd.seek(0)
+        fd.write(json.dumps(pagedict))
+        fd.truncate()
+        fd.close()
 
-    def add_rv_info(self, rv):
-        lulz = file(os.path.join(self.location, 'cache', 'rv'), 'r')
-        conf = json.loads(lulz.read())
+    def get_pageid_from_pagename(self, pagename):
+        fd = file(os.path.join(self.location, 'cache', 'pagedict'), 'r')
+        pagedict = json.loads(fd.read())
+        if pagename in pagedict.keys():
+            return pagedict[pagename]
+        else:
+            return None
+
+    def pages_add_rv(self, pageid, rv):
+        pagefile = os.path.join(self.location, 'cache', 'pages', str(pageid))
+        fd = file(pagefile, 'w+')
+        pagedata_raw = fd.read()
+        if pagedata_raw == '':
+            pagedata = {}
+        else:
+            pagedata = json.loads(pagedata_raw)
         rvid = int(rv['revid'])
-        conf[rvid] = {
-                'user': rv['user'], 'timestamp': rv['timestamp'],
-                'content': rv['*']
+        pagedata[rvid] = {
+                'user': rv['user'], 'timestamp': rv['timestamp']
         }
-        conf[rvid]['minor'] = 'minor' in rv
-        if 'comment' in rv:
-            conf[rvid]['comment'] = rv['comment']
-        else:
-            conf[rvid]['comment'] = None
-        fd = file(os.path.join(self.location, 'cache', 'rv'), 'w')
-        fd.write(json.dumps(conf))
+        if '*' in rv.keys():
+            pagedata[rvid]['content'] = rv['*']
+        fd.seek(0)
+        fd.write(json.dumps(pagedata))
+        fd.truncate()
+        fd.close()
+
+    def pages_get_rv_list(self, pageid):
+        pagefile = os.path.join(self.location, 'cache', 'pages',
+                                str(pageid['id']))
+        fd = file(pagefile, 'r')
+        pagedata = json.loads(fd.read())
+        rvs = [int(x) for x in pagedata.keys()]
+        rvs.sort()
+        return rvs
+
+    def pages_get_rv(self, pageid, rvid):
+        pagefile = os.path.join(self.location, 'cache', 'pages',
+                                str(pageid['id']))
+        fd = file(pagefile, 'r')
+        pagedata = json.loads(fd.read())
+        return pagedata[str(rvid)]

Benjamin Mako Hill || Want to submit a patch?