User:Tim1357/Source/Googlemaps.py

from wikipedia import Page, getSite,handleArgs,showDiff,inputChoice
site = getSite()
del getSite
from urllib import quote
from urllib2 import urlopen
from time import sleep
from pagegenerators import GeneratorFactory,PreloadingGenerator
from re import sub,search
from cosmetic_changes import CosmeticChangesToolkit
#from reflinks import DuplicateReferences


general_fixes = CosmeticChangesToolkit(site).change
del CosmeticChangesToolkit


try:
	from json import loads
except:
	try:
		from simplejson import loads
	except:
		print 'Danger, no JSON decoder available, using eval()'
		loads=eval


def getcoord(page, location=None):
	sleep(.5)
	print 'Getting results for [[%s]]'%page.title()
	if location:print '\t in %s'%location
	url ='''http://maps.google.com/maps/api/geocode/json?address=%s%s&sensor=false'''%(quote(page.title().encode('utf8')), quote(' '+location) if location else '')
	del page, location
	results = urlopen(url).read()
	#print results
	if "OVER_QUERY_LIMIT" in results:
		print "Over Query Limit"
		quit()
	results = loads(results)['results'][0]
	if 'partial_match' in results.keys():
		print '\tPartial match...'
		return False
	results=results['geometry']['location']
	lat = results['lat']
	lang=results['lng']
	del results
	return str(lat),str(lang)


class Bot:
	def __init__(self,gen):
		self.gen=gen
		self.all=False
	def run(self, location):
		for page in self.gen:
			self.treat(page,location)
	def treat(self,page,location):
		try:
			text = page.get()
		except:
			print 'Error getting [[%s]]'%page.title()
			return
		if not location:
			r=search('{{\s?[Cc]oord[_ ][Mm]issing\|(.*?)}}',text)
			if r: location=r.group(1).strip()
		try:
			l = getcoord(page,location)
		except Exception, e:
			print '\tError: %s'%str(e)
			return
		if not l:
			return
		else:
			lat,lang=l
		del l
		#except Exception, e:
		#	print 'ERROR: %s'%str(e)
		
		text = sub('{{([cC]oords missing|[mM]issing coords?|[nN]o geolocation|[cC]oord missing).*?}}','',text)
		template = '{{Coord|%s|%s|display=title}}'%(lat,lang)
		try:
			g = search('(\[\[Category\:.+|\[\[[a-z][a-z]\:.+|.\Z)',text).start()
			text=text[:g-1]+'\n'+template+'\n'+text[g:]
		except:
			text = text+'\n'+template
		comment = 'Adding Coordinates.'
		showDiff(page.get(),text)
		if not self.all:
			choice = inputChoice(u'Do you want to accept these changes?', ['Yes', 'No','All'], ['y', 'n','a'], 'n')
			if choice == 'a':
				self.all=True
			if choice not in ('a','y'):
				return
		
		page.put(general_fixes(text),comment=comment)


def main():
    # This factory is responsible for processing command line arguments
    # that are also used by other scripts and that determine on which pages
    # to work on.
    genFactory = GeneratorFactory()
    # The generator gives the pages that should be worked upon.
    gen = None
    # This temporary array is used to read the page title if one single
    # page to work on is specified by the arguments.
    pageTitleParts = []
    # If debug is True, doesn't do any real changes, but only show
    # what would have been changed.
    debug = False
    location = None
    # Parse command line arguments
    for arg in handleArgs():
        if arg.startswith("-debug"):
            debug = True
        elif arg.startswith('-location:'):
        	location = arg[len('location:'):]
        	
        else:
            # check if a standard argument like
            # -start:XYZ or -ref:Asdf was given.
            if not genFactory.handleArg(arg):
                pageTitleParts.append(arg)


    if pageTitleParts != []:
        # We will only work on a single page.
        pageTitle = ' '.join(pageTitleParts)
        page = Page(site, pageTitle)
        gen = iter([page])


    if not gen:
        gen = genFactory.getCombinedGenerator()
    if gen:
        # The preloading generator is responsible for downloading multiple
        # pages from the wiki simultaneously.
        gen = PreloadingGenerator(gen)
        print gen
        bot = Bot(gen)
        bot.run(location)


main()