#!/usr/bin/env python
# coding=utf-8
+import encoding_fix
import requests
# get_article_revisions is a function that takes an article title in
# create a base url for the api and then a normal url which is initially
# just a copy of it
# The following line is what the requests call is doing, basically.
- # "http://en.wikipedia.org/w/api.php/?action=query&titles={0}&prop=revisions&rvprop=flags|timestamp|user|size|ids&rvlimit=500&format=json".format(title)
+ # "http://en.wikipedia.org/w/api.php/?action=query&titles={0}&prop=revisions&rvprop=flags|timestamp|user|size|ids&rvlimit=500&format=json&continue=".format(title)
wp_api_url = "http://en.wikipedia.org/w/api.php/"
parameters = {'action' : 'query',
# for every revision, first we do some cleaning up
for rev in query_revisions:
+ #print(rev)
# let's continue/skip this revision if the user is hidden
if "userhidden" in rev:
continue
#
# The following requests call basically does the same thing as this string:
# "http://tools.wmflabs.org/catscan2/catscan2.php?depth=10&categories={0}&doit=1&format=json".format(category)
-url_catscan = "http://tools.wmflabs.org/catscan2/catscan2.php"
+url_catscan = "http://tools.wmflabs.org/catscan3/catscan2.php"
parameters = {'depth' : 10,
'categories' : category,
'format' : 'json',
'doit' : 1}
+# r = requests.get("http://tools.wmflabs.org/catscan2/catscan2.php?depth=10&categories=Harry Potter&doit=1&format=json"
+
r = requests.get(url_catscan, params=parameters)
articles_json = r.json()
-articles = articles_json["*"][0]["a"]["*"]
+articles = articles_json["*"][0]["*"]
# open a file to write all the output
output = open("hp_wiki.tsv", "w", encoding="utf-8")
# first grab the article's title
title = article["a"]["title"]
+ print(title)
# get the list of revisions from our function and then iterate through it,
# printing it to our output file