--- /dev/null
+This script downloads the latest version of the the mapfeed from
+SWOHOA[1] and then converts it into an SQLite database.
+
+1. Create the SQLite3 database
+
+Use a command like:
+
+$ sqlite3 ./swohoa.db
+
+And then, once in SQLite, simply exit like:
+
+sqlite> .exit
+
+2. Export from SQLite3
+
+sqlite3 ./swohao.db .dump > swohao.sql
+
+[1] http://shewentofherownaccord.com/mapfeed
+
--- /dev/null
+#!/usr/bin/env python
+
+import re
+import urllib2
+import json
+import sqlite3 as lite
+import sys
+
+from pprint import pprint
+
+# download the raw data from the website
+raw_data = urllib2.urlopen("http://shewentofherownaccord.com/mapfeed").read()
+
+# fix some errors in the json
+raw_data = raw_data.replace('\r\n', '')
+raw_data = raw_data.replace('""', '"')
+raw_data = re.sub(';$', '', raw_data)
+
+print raw_data
+sys.exit()
+
+data = json.loads(raw_data)
+
+# interate through the data from the json website and create a list of
+# data we want to store
+places = []
+entries = data['places']
+# interate through the list of things and import it
+for entry in entries:
+ joke_id = entry['entries'][0]['id']
+ joke = entry['entries'][0]['text']
+ (coord_lat, coord_long) = entry['location']['point']
+ location = entry['location']['name']
+ url = "http://shewentofherownaccord.com/joke/%s" % joke_id
+
+ place_values = (joke_id, joke, coord_lat, coord_long, location, url)
+ places.append(place_values)
+
+
+# try to connect to the database and make sure that things work
+con = None
+try:
+ con = lite.connect('swohoa.db')
+
+ cur = con.cursor()
+ cur.execute('SELECT SQLITE_VERSION()')
+ data = cur.fetchone()
+ print "SQLite version: %s" % data
+
+except lite.Error, e:
+ print "Error %s:" % e.args[0]
+ sys.exit(1)
+
+cur = con.cursor()
+cur.execute("DROP TABLE IF EXISTS jokes")
+cur.execute("CREATE TABLE jokes(id INT, joke TEXT, lat FLOAT, long FLOAT, location TEXT, url TEXT)")
+
+cur.executemany("INSERT INTO jokes VALUES(?, ?, ?, ?, ?, ?)", places)
+con.commit()
+
+if con:
+ con.close()
+