User:AllyUnion/VFD bot code

These are a copy all the files that are run for User:VFD Bot. This page and its contents are copyrighted under the Creative Commons Attribution ShareAlike 1.0 License: http://creativecommons.org/licenses/by-sa/1.0/. The information here is not licensed by GNU Free Documentation License. The purpose of hosting this code is to allow the public viewing of the code being run on the Wikipedia. -- AllyUnion (talk) 23:06, 7 Apr 2005 (UTC)
 * Should I happen to leave the Wikipedia or I am unable to continue run User:VFD Bot, I politely ask that you email me before using my code. -- AllyUnion (talk) 23:08, 7 Apr 2005 (UTC)

en-wp-vfd-list.py

 * 1) !/usr/bin/python2.3
 * 2) Author: Jason Y. Lee (AllyUnion)
 * 3) Purpose: Automatically update a VFD List on User:AllyUnion/VFD List
 * 4) 	  every hour.  Keeps 7 days, presumes for exactly 7 sections
 * 5) 	  on specified page.  (Seven days, seven sections)
 * 6) 	   To be run by a cron job.
 * 7) 	  Also removes the top section once the next UTC day comes


 * 1) Revision 3.07

import wikipedia, config import os import commands import sys import datetime

if __name__ == "__main__": utc = datetime.datetime.utcnow # Get the dates
 * 1) 	yyyy = int(datetime.datetime.utcnow.strftime('%Y'))
 * 2) 	mm = int(datetime.datetime.utcnow.strftime('%m'))
 * 3) 	dd = int(datetime.datetime.utcnow.strftime('%d'))

# Today's date, exactly at 0000 hours today = utc.replace(hour=0,minute=0,second=0,microsecond=0)
 * 1) 	today = datetime.datetime(yyyy, mm, dd, 0, 0, 0, 0)

# Today's date, exactly at 1000 hours onehour = utc.replace(hour=1,minute=0,second=0,microsecond=0)
 * 1) 	onehour = datetime.datetime(yyyy, mm, dd, 1, 0, 0, 0)

# Tomorrow's date, exactly at 0000 hours tomorrow = today + datetime.timedelta(1)

# Yesterday's date, exactly at 0000 hours yesterday = today - datetime.timedelta(1)

# Seven days prior to today's date at 0000 hours sevendaysago = today - datetime.timedelta(7)

# Check the time now utctime = datetime.datetime.utcnow# - datetime.timedelta(0, 14400)

# Wikipedia Variable Setup # VFD Page log name vfdlog = "Wikipedia:Votes_for_deletion/Log/" # Which site, as specified in user-config.py	mysite = wikipedia.getSite

# Page: User:AllyUnion/VFD List and sections oldvpage = wikipedia.getPage(mysite, 'User:AllyUnion/VFD List', True, True, False) section1 = wikipedia.getPage(mysite, 'User:AllyUnion/VFD List&section=1', True, True, False) section2 = wikipedia.getPage(mysite, 'User:AllyUnion/VFD List&section=2', True, True, False) section3 = wikipedia.getPage(mysite, 'User:AllyUnion/VFD List&section=3', True, True, False) section4 = wikipedia.getPage(mysite, 'User:AllyUnion/VFD List&section=4', True, True, False) section5 = wikipedia.getPage(mysite, 'User:AllyUnion/VFD List&section=5', True, True, False) section6 = wikipedia.getPage(mysite, 'User:AllyUnion/VFD List&section=6', True, True, False) section7 = wikipedia.getPage(mysite, 'User:AllyUnion/VFD List&section=7', True, True, False)

# Top heading notice = str('This is a list of VFD discussions, updated hourly. \n\n').encode('iso-8859-1')

comment = 'Hourly Automatic Update of VFD List: '

# Newline newline = '\n'

# Temporary Log File logfile = 'tmp/vfd.log'

# Temporary Old Log File difffile = 'tmp/diff-vfd.log'

# Temporary Parse File parsefile = 'tmp/vfd-parse.log'

# Temporary Yesterday Parse File yparsefile = 'tmp/vfd-yparse.log'

# Grep command grepcmd = ' | grep -v \'\''

# Perl command to parse file perlcmd = ' | perl -pi -e \'s//]]/g\''

# Diff command diffcmd = 'diff -u ' + difffile + ' ' + logfile + ' | grep ^+ | grep \'* \[\[\' | perl -pi -e \'s/\*.\[\[Wikipedia:Votes.for.deletion\// /g\' | perl -pi -e \'s/\]\]//g\''

diffcmd2 = 'diff -u ' + difffile + ' ' + logfile + ' | grep ^- | grep \'* \[\[\'| perl -pi -e \'s/\*.\[\[Wikipedia:Votes.for.deletion\// /g\' | perl -pi -e \'s/\]\]//g\''

# Login file, full path and filename
 * 1) 	loginfile = 'pywikipediabot/login.py'

log = file(difffile, 'w') log.write(oldvpage.encode('iso-8859-1')) log.close

# today <= utctime <= onehour if (today <= utctime <= onehour): print 'Operation: Remove top, add new day' # Perform top removal procedure # Get yesterday's VFD and convert # Get today's VFD and convert # Replace sections 6 and 7

# Open log for writing log = file(logfile, 'w')

# Write notice log.write(notice.encode('iso-8859-1'))

# Plus newline log.write(newline.encode('iso-8859-1'))

# Write sections 2, 3, 4, 5, 6 with a newline between each one # Since, we removed section 1, sections 2-6 become our new sections 1-5 log.write(section2.encode('iso-8859-1')) log.write(newline.encode('iso-8859-1')) log.write(section3.encode('iso-8859-1')) log.write(newline.encode('iso-8859-1')) log.write(section4.encode('iso-8859-1')) log.write(newline.encode('iso-8859-1')) log.write(section5.encode('iso-8859-1')) log.write(newline.encode('iso-8859-1')) log.write(section6.encode('iso-8859-1')) log.write(newline.encode('iso-8859-1'))

# Get the VFD page from yesterday vfdpage = vfdlog + str(yesterday.strftime('%Y_%B_')) + str(int(yesterday.strftime('%d'))) toparse = wikipedia.getPage(mysite, vfdpage, True, True, False)

# Write the VFD yesterday to a temporary parse log parselog = file(yparsefile, 'w') parselog.write(toparse.encode('iso-8859-1')) parselog.close

# Yesterday's VFD, parsed into a list yparsecmd = 'cat ' + yparsefile + grepcmd + perlcmd yparsed = commands.getoutput(yparsecmd) yparsed = yparsed.decode('iso-8859-1')

# Link to VFD page # Long Date: example: 2005_January_1 ydate1 = yesterday.strftime('%Y_%B_') + str(int(yesterday.strftime('%d')))

# Short Date: example: January 1 ydate2 = yesterday.strftime('%B') + ' ' + str(int(yesterday.strftime('%d')))

# Give the page name yfind1 =  + ydate2 +  yfind2 = '[[Wikipedia:Votes for deletion/Log/' + ydate1 + '|' + ydate2 + ']]'

# Section space remove yparsed = yparsed.replace('== ', '==') yparsed = yparsed.replace(' ==', '==')

# First, replace it once, so a link is established yparsed = yparsed.replace('==' + ydate2 + '==', '==' + yfind1 + '==', 1)

# Second, if it has been done before, this will fix (hopefully), the internal link to the proper form yparsed = yparsed.replace(yfind2, yfind1, 1)

yplines = yparsed.splitlines; ypnum = yplines.index('==' + yfind1 + '==')

log.write(yplines[ypnum].encode('iso-8859-1')) log.write(newline.encode('iso-8859-1'))

for x in range(ypnum-1): log.write(yplines[x].encode('iso-8859-1')) log.write(newline.encode('iso-8859-1'))

for x in range(len(yplines) - ypnum - 1): x = x + ypnum + 1 log.write(yplines[x].encode('iso-8859-1')) log.write(newline.encode('iso-8859-1'))

# Write yesterday's stuff to the log log.write(newline.encode('iso-8859-1'))
 * 1) 		log.write(yparsed.encode('iso-8859-1'))

# Get the VFD page for today vfdpage = vfdlog + str(today.strftime('%Y_%B_')) + str(int(today.strftime('%d'))) toparse = wikipedia.getPage(mysite, vfdpage, True, True, False)

# Write the VFD page for today to a temporary parse log parselog = file(parsefile, 'w') parselog.write(toparse.encode('iso-8859-1')) parselog.close

# Today's VFD, parsed into a list parsecmd = 'cat ' + parsefile + grepcmd + perlcmd parsed = commands.getoutput(parsecmd) parsed = parsed.decode('iso-8859-1')

# Link to VFD page

# Long Date: example: 2005_January_1 date1 = today.strftime('%Y_%B_') + str(int(today.strftime('%d')))

# Short Date: example: January 1 date2 = today.strftime('%B') + ' ' + str(int(today.strftime('%d')))

# Give the page name find1 =  + date2 +  find2 = '[[Wikipedia:Votes for deletion/Log/' + date1 + '|' + date2 + ']]'

# Section space remove parsed = parsed.replace('== ', '==') parsed = parsed.replace(' ==', '==')

# First, replace it once, so a link is established parsed = parsed.replace('==' + date2 + '==', '==' + find1 + '==', 1)

# Second, if it has been done before, this will fix (hopefully), the internal link to the proper form parsed = parsed.replace(find2, find1, 1)

plines = parsed.splitlines; pnum = plines.index('==' + find1 + '==')

log.write(plines[pnum].encode('iso-8859-1')) log.write(newline.encode('iso-8859-1'))

for x in range(pnum-1): log.write(plines[x].encode('iso-8859-1')) log.write(newline.encode('iso-8859-1'))

for x in range(len(plines) - pnum - 1): x = x + pnum + 1 log.write(plines[x].encode('iso-8859-1')) log.write(newline.encode('iso-8859-1'))

# Write today's stuff to the log log.write(newline.encode('iso-8859-1'))
 * 1) 		log.write(parsed.encode('iso-8859-1'))

# Close the file, making sure all the contents are written to the log log.close

# User:Mozzerati Feature request diffcomment = commands.getoutput(diffcmd) diffcomment = diffcomment.decode('iso-8859-1') difflist = diffcomment.splitlines

diffcomment2 = commands.getoutput(diffcmd2) diffcomment2 = diffcomment2.decode('iso-8859-1') difflist2 = diffcomment2.splitlines

for check in difflist: for checking in difflist2: if (checking[1:] == check[1:]): difflist.remove(check) for x in range(len(difflist) - 1): comment += difflist[x] + ', ' comment += difflist[x+1] + '.'
 * 1) 		for check in difflist:
 * 2) 			comment += check + ', '
 * 3) 		comment[-2:] = '.'

# Reopen the log file log = file(logfile, 'r')

# Read the whole log into a variable post = log.read

# Close log file log.close

# Log in to Wikipedia
 * 1) 		cmd = 'python2.3 ' + loginfile
 * 2) 		os.system(cmd)

page = wikipedia.PageLink(mysite, 'User:AllyUnion/VFD List') # Post to the Wikipedia page.put(post, comment)

cmd = 'rm -f ' + logfile + ' ' + parsefile + ' ' + yparsefile + ' ' + difffile os.system(cmd)

# onehour < utctime <= tomorrow elif (onehour < utctime <= tomorrow): print 'Operation: Normal - Update last section' # Get today's VFD and convert # Replace section 7

# Open log for writing log = file(logfile, 'w')

# Write notice log.write(notice.encode('iso-8859-1'))

# Plus newline log.write(newline.encode('iso-8859-1'))

# Write sections 1, 2, 3, 4, 5, 6 with a newline between each one log.write(section1.encode('iso-8859-1')) log.write(newline.encode('iso-8859-1')) log.write(section2.encode('iso-8859-1')) log.write(newline.encode('iso-8859-1')) log.write(section3.encode('iso-8859-1')) log.write(newline.encode('iso-8859-1')) log.write(section4.encode('iso-8859-1')) log.write(newline.encode('iso-8859-1')) log.write(section5.encode('iso-8859-1')) log.write(newline.encode('iso-8859-1')) log.write(section6.encode('iso-8859-1')) log.write(newline.encode('iso-8859-1'))

# Get the VFD page for today vfdpage = vfdlog + str(today.strftime('%Y_%B_')) + str(int(today.strftime('%d'))) toparse = wikipedia.getPage(mysite, vfdpage, True, True, False)

# Write the VFD page for today to a temporary parse log parselog = file(parsefile, 'w') parselog.write(toparse.encode('iso-8859-1')) parselog.close

# Today's VFD, parsed into a list parsecmd = 'cat ' + parsefile + grepcmd + perlcmd parsed = commands.getoutput(parsecmd) parsed = parsed.decode('iso-8859-1')

# Link to VFD page

# Long Date: example: 2005_January_1 date1 = today.strftime('%Y_%B_') + str(int(today.strftime('%d')))

# Short Date: example: January 1 date2 = today.strftime('%B') + ' ' + str(int(today.strftime('%d')))

# Give the page name find1 =  + date2 +  find2 = '[[Wikipedia:Votes for deletion/Log/' + date1 + '|' + date2 + ']]'

# Section space remove parsed = parsed.replace('== ', '==') parsed = parsed.replace(' ==', '==') # First, replace it once, so a link is established parsed = parsed.replace('==' + date2 + '==', '==' + find1 + '==', 1)

# Second, if it has been done before, this will fix (hopefully), the internal link to the proper form parsed = parsed.replace(find2, find1, 1)

plines = parsed.splitlines; pnum = plines.index('==' + find1 + '==')

log.write(plines[pnum].encode('iso-8859-1')) log.write(newline.encode('iso-8859-1'))

for x in range(pnum-1): log.write(plines[x].encode('iso-8859-1')) log.write(newline.encode('iso-8859-1'))

for x in range(len(plines) - pnum - 1): x = x + pnum + 1 log.write(plines[x].encode('iso-8859-1')) log.write(newline.encode('iso-8859-1'))

# Write today's stuff to the log log.write(newline.encode('iso-8859-1'))
 * 1) 		log.write(parsed.encode('iso-8859-1'))

# Close the file, making sure all the contents are written to the log log.close

# User:Mozzerati Feature request diffcomment = commands.getoutput(diffcmd) diffcomment = diffcomment.decode('iso-8859-1') difflist = diffcomment.splitlines

diffcomment2 = commands.getoutput(diffcmd2) diffcomment2 = diffcomment2.decode('iso-8859-1') difflist2 = diffcomment2.splitlines

for check in difflist: for checking in difflist2: if (checking[1:] == check[1:]): difflist.remove(check) for x in range(len(difflist) - 1): comment += difflist[x] + ', ' comment += difflist[x+1] + '.'
 * 1) 		comment[-2:] = '.'

# Reopen the log file log = file(logfile, 'r')

# Read the whole log into a variable post = log.read

# Close log file log.close

# Log in to Wikipedia
 * 1) 		cmd = 'python2.3 ' + loginfile
 * 2) 		os.system(cmd)

page = wikipedia.PageLink(mysite, 'User:AllyUnion/VFD List') # Post to the Wikipedia page.put(post, comment)

cmd = 'rm -f ' + logfile + ' ' + parsefile + ' ' + difffile os.system(cmd)

# Possibility is that utctime == tomorrow, but should never happen else: sys.exit(1)

sys.exit(0)

en-wp-vfd-list-fix.py

 * 1) !/usr/bin/python2.3
 * 2) Author: Jason Y. Lee (AllyUnion)
 * 3) Purpose: Automatically update a VFD List on User:AllyUnion/VFD List
 * 4) 	  every hour.  Keeps 7 days, presumes for exactly 7 sections
 * 5) 	  on specified page.  (Seven days, seven sections)
 * 6) 	   To be run by a cron job.
 * 7) 	  Also removes the top section once the next UTC day comes

import wikipedia, config import os import commands import sys import datetime

def vfdsection(vfddate): vfdslogfile = 'tmp/vfdsection.log' # Grep command # VFD Page log name vfdlog = "Wikipedia:Votes_for_deletion/Log/" # Which site, as specified in user-config.py	mysite = wikipedia.getSite

# Grep command to parse file grepcmd = ' | grep -v \'\''

# Perl command to parse file perlcmd = ' | perl -pi -e \'s//]]/g\''

vfdpage = vfdlog + str(vfddate.strftime('%Y_%B_')) + str(int(vfddate.strftime('%d')))

fixparse = wikipedia.getPage(mysite, vfdpage, True, True, False)

vfdslog = file(vfdslogfile, 'w') vfdslog.write(fixparse.encode('iso-8859-1')) vfdslog.close

vfdscmd = 'cat ' + vfdslogfile + grepcmd + perlcmd vfdsparsed = commands.getoutput(vfdscmd) vfdsparsed = vfdsparsed.decode('iso-8859-1')

os.system('rm -f ' + vfdslogfile)

vdate1 = vfddate.strftime('%Y_%B_') + str(int(vfddate.strftime('%d'))) vdate2 = vfddate.strftime('%B') + ' ' + str(int(vfddate.strftime('%d'))) vfind1 =  + vdate2 +  vfind2 = '[[Wikipedia:Votes for deletion/Log/' + vdate1 + '|' + vdate2 + ']]' vfdsparsed = vfdsparsed.replace(vdate2, vfind1, 1) vfdsparsed = vfdsparsed.replace(vfind2, vfind1, 1)

return vfdsparsed

if __name__ == "__main__": # Get the dates utc = datetime.datetime.utcnow
 * 1) 	yyyy = int(datetime.datetime.utcnow.strftime('%Y'))
 * 2) 	mm = int(datetime.datetime.utcnow.strftime('%m'))
 * 3) 	dd = int(datetime.datetime.utcnow.strftime('%d'))

# Today's date, exactly at 0000 hours today = utc.replace(hour=0,minute=0,second=0,microsecond=0)

# Today's date, exactly at 1000 hours onehour = utc.replace(hour=1,minute=0,second=0,microsecond=0)
 * 1) 	onehour = datetime.datetime(yyyy, mm, dd, 1, 0, 0, 0)

# Tomorrow's date, exactly at 0000 hours tomorrow = today + datetime.timedelta(1)

# Yesterday's date, exactly at 0000 hours yesterday = today - datetime.timedelta(1)

# Seven days prior to today's date at 0000 hours sevendaysago = today - datetime.timedelta(6)

# Check the time now utctime = datetime.datetime.utcnow

# Wikipedia Variable Setup # VFD Page log name vfdlog = "Wikipedia:Votes_for_deletion/Log/" # Which site, as specified in user-config.py	mysite = wikipedia.getSite

# Page: User:AllyUnion/VFD List and sections
 * 1) 	page = wikipedia.PageLink(mysite, 'User:AllyUnion/VFD List')

# Top heading notice = str('This is a list of VFD discussions, updated hourly. \n\n').encode('iso-8859-1')

# Newline newline = '\n'

# Temporary Log File logfile = 'tmp/vfd.log'

# Temporary Parse File parsefile = 'tmp/vfd-parse.log'

# Temporary Yesterday Parse File yparsefile = 'tmp/vfd-yparse.log'

# Grep command grepcmd = ' | grep -v \'\''

# Perl command to parse file perlcmd = ' | perl -pi -e \'s//]]/g\''

# Login file, full path and filename loginfile = 'pywikipediabot/login.py'

# today <= utctime <= onehour if (today <= utctime <= onehour): # Perform top removal procedure # Get yesterday's VFD and convert # Get today's VFD and convert # Replace sections 6 and 7

# Open log for writing log = file(logfile, 'w')

# Write notice log.write(notice.encode('iso-8859-1'))

# Plus newline log.write(newline.encode('iso-8859-1'))

# Write sections 2, 3, 4, 5, 6 with a newline between each one # Since, we removed section 1, sections 2-6 become our new sections 1-5 datecounter = sevendaysago + datetime.timedelta(2) while datecounter < yesterday: fixsection = vfdsection(datecounter) log.write(fixsection.encode('iso-8859-1')) log.write(newline.encode('iso-8859-1')) datecounter = datecounter + datetime.timedelta(1)

# Get the VFD page from yesterday vfdpage = vfdlog + str(yesterday.strftime('%Y_%B_')) + str(int(yesterday.strftime('%d'))) toparse = wikipedia.getPage(mysite, vfdpage, True, True, False)

# Write the VFD yesterday to a temporary parse log parselog = file(yparsefile, 'w') parselog.write(toparse.encode('iso-8859-1')) parselog.close

# Yesterday's VFD, parsed into a list yparsecmd = 'cat ' + yparsefile + grepcmd + perlcmd yparsed = commands.getoutput(yparsecmd) yparsed = yparsed.decode('iso-8859-1')

# Link to VFD page # Long Date: example: 2005_January_1 ydate1 = yesterday.strftime('%Y_%B_') + str(int(yesterday.strftime('%d')))

# Short Date: example: January 1 ydate2 = yesterday.strftime('%B') + ' ' + str(int(yesterday.strftime('%d')))

# Give the page name yfind1 =  + ydate2 +  yfind2 = '[[Wikipedia:Votes for deletion/Log/' + ydate1 + '|' + ydate2 + ']]'

# Section space remove yparsed = yparsed.replace('== ', '==') yparsed = yparsed.replace(' ==', '==')

# First, replace it once, so a link is established yparsed = yparsed.replace(ydate2, yfind1, 1)

# Second, if it has been done before, this will fix (hopefully), the internal link to the proper form yparsed = yparsed.replace(yfind2, yfind1, 1)

# Write yesterday's stuff to the log log.write(yparsed.encode('iso-8859-1')) log.write(newline.encode('iso-8859-1'))

# Get the VFD page for today vfdpage = vfdlog + str(today.strftime('%Y_%B_')) + str(int(today.strftime('%d'))) toparse = wikipedia.getPage(mysite, vfdpage, True, True, False)

# Write the VFD page for today to a temporary parse log parselog = file(parsefile, 'w') parselog.write(toparse.encode('iso-8859-1')) parselog.close

# Today's VFD, parsed into a list parsecmd = 'cat ' + parsefile + grepcmd + perlcmd parsed = commands.getoutput(parsecmd) parsed = parsed.decode('iso-8859-1')

# Link to VFD page

# Long Date: example: 2005_January_1 date1 = today.strftime('%Y_%B_') + str(int(today.strftime('%d')))

# Short Date: example: January 1 date2 = today.strftime('%B') + ' ' + str(int(today.strftime('%d')))

# Give the page name find1 =  + date2 +  find2 = '[[Wikipedia:Votes for deletion/Log/' + date1 + '|' + date2 + ']]'

# Section space remove parsed = parsed.replace('== ', '==') parsed = parsed.replace(' ==', '==')

# First, replace it once, so a link is established parsed = parsed.replace(date2, find1, 1)

# Second, if it has been done before, this will fix (hopefully), the internal link to the proper form parsed = parsed.replace(find2, find1, 1)

# Write today's stuff to the log log.write(parsed.encode('iso-8859-1')) log.write(newline.encode('iso-8859-1'))

# Close the file, making sure all the contents are written to the log log.close

# Reopen the log file log = file(logfile, 'r')

# Read the whole log into a variable post = log.read

# Close log file log.close

# Log in to Wikipedia
 * 1) 		cmd = 'python2.3 ' + loginfile
 * 2) 		os.system(cmd)

page = wikipedia.PageLink(mysite, 'User:AllyUnion/VFD List') # Post to the Wikipedia page.put(post, 'Fixing VFD List...')

cmd = 'rm -f ' + logfile + ' ' + parsefile + ' ' + yparsefile os.system(cmd)

# onehour < utctime <= tomorrow elif (onehour < utctime < tomorrow): # Get today's VFD and convert # Replace section 7

# Open log for writing log = file(logfile, 'w')

# Write notice log.write(notice.encode('iso-8859-1'))

# Plus newline log.write(newline.encode('iso-8859-1'))

# Write sections 1, 2, 3, 4, 5, 6 with a newline between each one datecounter = sevendaysago# - datetime.timedelta(1) while datecounter < today: fixsection = vfdsection(datecounter) log.write(fixsection.encode('iso-8859-1')) log.write(newline.encode('iso-8859-1')) datecounter = datecounter + datetime.timedelta(1)

# Get the VFD page for today vfdpage = vfdlog + str(today.strftime('%Y_%B_')) + str(int(today.strftime('%d'))) toparse = wikipedia.getPage(mysite, vfdpage, True, True, False)

# Write the VFD page for today to a temporary parse log parselog = file(parsefile, 'w') parselog.write(toparse.encode('iso-8859-1')) parselog.close

# Today's VFD, parsed into a list parsecmd = 'cat ' + parsefile + grepcmd + perlcmd parsed = commands.getoutput(parsecmd) parsed = parsed.decode('iso-8859-1')

# Link to VFD page

# Long Date: example: 2005_January_1 date1 = today.strftime('%Y_%B_') + str(int(today.strftime('%d')))

# Short Date: example: January 1 date2 = today.strftime('%B') + ' ' + str(int(today.strftime('%d')))

# Give the page name find1 =  + date2 +  find2 = '[[Wikipedia:Votes for deletion/Log/' + date1 + '|' + date2 + ']]'

# Section space remove parsed = parsed.replace('== ', '==') parsed = parsed.replace(' ==', '==')

# First, replace it once, so a link is established parsed = parsed.replace(date2, find1, 1)

# Second, if it has been done before, this will fix (hopefully), the internal link to the proper form parsed = parsed.replace(find2, find1, 1)

# Write today's stuff to the log log.write(parsed.encode('iso-8859-1')) log.write(newline.encode('iso-8859-1'))

# Close the file, making sure all the contents are written to the log log.close

# Reopen the log file log = file(logfile, 'r')

# Read the whole log into a variable post = log.read

# Close log file log.close

# Log in to Wikipedia
 * 1) 		cmd = 'python2.3 ' + loginfile
 * 2) 		os.system(cmd)

page = wikipedia.PageLink(mysite, 'User:AllyUnion/VFD List') # Post to the Wikipedia page.put(post, 'Fixing VFD List...')

cmd = 'rm -f ' + logfile + ' ' + parsefile os.system(cmd)

# Possibility is that utctime == tomorrow, but should never happen else: sys.exit(1)

sys.exit(0)

en-wp-vfd-old-update.py

 * 1) !/usr/bin/python2.3
 * 2) -*- coding: utf-8 -*-

import os, sys if (not (sys.path[0] == '')): os.chdir(sys.path[0])


 * 1) Automatic VFD Update script
 * 2) Written by Jason Y. Lee (AllyUnion)
 * 3) Assumes to be run by a cron job, 10 minutes prior to the next 00:00 UTC.
 * 4) Rewritten for any time zone, so it can be run hourly if anyone wishes.


 * 1) Author's note:
 * 2) Yes, I do realize I can make an array for all the variables below
 * 3) but I rather have everything clearly spelled out just to make absolutely
 * 4) certain that whatever error is going on can be clearly seen

import datetime
 * 1) import wikiipedia, config

if __name__ == "__main__": utc = datetime.datetime.utcnow# - datetime.timedelta(1) if (not (utc.replace(hour=23,minute=0,second=0,microsecond=0) <= utc <= (utc.replace(hour=0,minute=0,second=0,microsecond=0) + datetime.timedelta(1)))): sys.exit(1)

# Get today's date:
 * 1) 	yyyy = int(datetime.datetime.utcnow.strftime('%Y'))
 * 2) 	mm = int(datetime.datetime.utcnow.strftime('%m'))
 * 3) 	dd = int(datetime.datetime.utcnow.strftime('%d'))

import wikipedia, config

# Today's date, exactly at 0000 hours # Required as a point of reference today = utc.replace(hour=0,minute=0,second=0,microsecond=0)
 * 1) 	today = datetime.datetime(yyyy, mm, dd, 0, 0, 0, 0)

tag = ''

# Five days prior to today's date at 0000 hours # The day that will be moved to Old fivedaysago = today - datetime.timedelta(5)

# Six days prior to today's date at 0000 hours # The day which we need to search for
 * 1) 	sixdaysago = today - datetime.timedelta(6)

# Today's transinclude
 * 1) 	today_vfdtag = '* Votes for deletion/Log/' + today.strftime('%Y %B ') + str(int(today.strftime('%d'))) + ''

# Five days ago (transinclude) fivedaysago_vfdtag = '* Votes for deletion/Log/' + fivedaysago.strftime('%Y %B ') + str(int(fivedaysago.strftime('%d'))) + ''

# Six days ago (transinclude)
 * 1) 	sixdaysago_vfdtag = '* Votes for deletion/Log/' + sixdaysago.strftime('%Y %B ') + str(int(sixdaysago.strftime('%d'))) + ''


 * 1) 	print today_vfdtag
 * 2) 	print fivedaysago_vfdtag
 * 3) 	print sixdaysago_vfdtag


 * 1) 	import sys
 * 2) 	sys.exit(0)

# Site configured in user-config.py	mysite = wikipedia.getSite

# Get vfd pages vfdold = wikipedia.Page(mysite, 'Wikipedia:Votes_for_deletion/Old').get(False, True)

# Search and replace

# Transinclude on VFD Old # Replace six days ago transinclude with six days ago transinclude + '\n' + five days ago transinclude (once) if (vfdold.find(fivedaysago_vfdtag) == -1): vfdold = vfdold.replace(tag, fivedaysago_vfdtag + '\n' + tag, 1)

# Page links vfdoldpage = wikipedia.Page(mysite, 'Wikipedia:Votes for deletion/Old')

vfdoldpage.put(vfdold, 'VFD Bot (talk) (contributions): Auto-update of VFD Old - Bot work')

en-wp-vfd-yesterday.py

 * 1) !/usr/bin/python2.3
 * 2) -*- coding: utf-8 -*-

import os, sys if (not (sys.path[0] == '')): os.chdir(sys.path[0])


 * 1) Will only run between 00:00 UTC and 01:00 UTC.

import datetime

if __name__ == "__main__": utc = datetime.datetime.utcnow
 * 1) 	if (not (utc.replace(hour=0,minute=0,second=0,microsecond=0) <= utc <= utc.replace(hour=1,minute=0,second=0,microsecond=0))):
 * 2) 		sys.exit

import wikipedia, config

page = 'Wikipedia:Votes for deletion/Log/Yesterday' yesterday = utc.replace(hour=0,minute=0,second=0,microsecond=0) - datetime.timedelta(1)

header = u'' header += '\n' header += '\n' header += '\n' header += ' \'\'\'[ Purge page cache] if page isn\'t updating.\'\'\' \n' header += '\n' header += '\n'

transinclude = u''

footer = u'' footer += '\n\n' footer += '\n' footer += '== VfD footer ==\n' footer += '\n' footer += '\n' footer += '\n' footer += '\n' footer += '\n' footer += '\n' footer += '\n' footer += '&\n' footer += '&\n' footer += 'cs:Wikipedie:Hlasov%C3%A1n%C3%AD_o_smaz%C3%A1n%C3%AD\n' footer += 'da:Wikipedia:Sider_der_b%C3%B8r_slettes\n' footer += 'de:Wikipedia:L%C3%B6schkandidaten\n' footer += 'es:Wikipedia:P%C3%A1ginas para borrar\n' footer += 'eo:Vikipedio:Forigendaj artikoloj\n' footer += 'fi:Wikipedia:Poistettavat sivut\n' footer += 'fr:Wikip%C3%A9dia:Pages_%C3%A0_supprimer\n' footer += 'fy:Wikipedy:Siden wiskje\n' footer += '&\n' footer += 'hu:Wikip%C3%A9dia:Szavaz%C3%A1s_t%C3%B6rl%C3%A9sr%C5%91l\n' footer += 'it:Wikipedia:Pagine da cancellare\n' footer += 'ja:Wikipedia:&\n' footer += '&\n' footer += 'lb:Wikipedia:L%C3%A4schen\n' footer += 'na:Wikipedia:Animwen ijababa\n' footer += 'nl:Wikipedia:Te verwijderen pagina\'s\n' footer += 'no:Wikipedia:Sletting\n' footer += 'pl:Wikipedia:Strony do usuni&\n' footer += 'pt:Wikipedia:P%C3%A1ginas_para_eliminar\n' footer += 'ro:Wikipedia:Pagini de &\n' footer += 'ru:%D0%92%D0%B8%D0%BA%D0%B8%D0%BF%D0%B5%D0%B4%D0%B8%D1%8F:%D0%9A_%D1%83%D0%B4%D0%B0%D0%BB%D0%B5%D0%BD%D0%B8%D1%8E' footer += 'simple:Wikipedia:Requests for deletion\n' footer += 'sk:Wikip%C3%A9dia:Str%C3%A1nky_na_zmazanie\n' footer += 'sl:Wikipedija:Predlogi za brisanje\n' footer += 'sv:Wikipedia:Sidor_som_b%C3%B6r_raderas\n' footer += 'vi:Wikipedia:Bi%E1%BB%83u_quy%E1%BA%BFt_xo%C3%A1_b%C3%A0i\n' footer += 'zh:Wikipedia:&\n'


 * 1) 	text = header.encode('utf-8') + transinclude.encode('utf-8') + footer.encode('utf-8')
 * 2) 	text = text.decode('utf-8')

attempt = True while(attempt): try: wikipedia.Page(wikipedia.getSite, page).put(header + transinclude + footer, 'VFD Bot (talk) (contributions): Updating page with VFD page from ' + yesterday.strftime('%A, %B %d, %Y.')) attempt = False except wikipedia.EditConflict: wikipedia.Page(wikipedia.getSite, page).put(header + transinclude + footer, 'VFD Bot (talk) (contributions): Updating page with VFD page from ' + yesterday.strftime('%A, %B %d, %Y.')) attempt = True

en-wp-vfd-newday.py

 * 1) !/usr/bin/python2.3
 * 2) -*- coding: utf-8 -*-

import os, sys if (not (sys.path[0] == '')): os.chdir(sys.path[0])


 * 1) Automatic VFD Update script
 * 2) Written by Jason Y. Lee (AllyUnion)
 * 3) Assumes to be run by a cron job, some time prior to the next 00:00 UTC.


 * 1) Due to DST errors, the new assumption is that is run hourly, so that no
 * 2) matter what weird time zone this is run from, it will always work
 * 3) Will only run IF time is some time IS between one hour prior to the next
 * 00:00 UTC and 00:00 UTC

import datetime
 * 1) import wikipedia, config

if __name__ == "__main__": utc = datetime.datetime.utcnow# - datetime.timedelta(1) if (not (utc.replace(hour=22,minute=50,second=0,microsecond=0) <= utc <= (utc.replace(hour=0,minute=10,second=0,microsecond=0) + datetime.timedelta(1)))): sys.exit(1)

# Get today's date: import wikipedia, config # Today's date, exactly at 0000 hours # Required as a point of reference today = utc.replace(hour=0,minute=0,second=0,microsecond=0)

# Tomorrow's date, exactly at 0000 hours # The new day going up	tomorrow = today + datetime.timedelta(1)

# Site configured in user-config.py	mysite = wikipedia.getSite

# Section heading for tomorrow sectionheading = '\'\'\'Guide to Votes for Deletion\'\'\' \n' sectionheading += '\n' sectionheading += ' \n' sectionheading += '== ' + tomorrow.strftime('%B ') + str(int(tomorrow.strftime('%d'))) + ' ==\n\n'

# Tomorrow's page name tomorrow_pagename = 'Wikipedia:Votes_for_deletion/Log/' + tomorrow.strftime('%Y_%B_') + str(int(tomorrow.strftime('%d')))

# Post section heading for tomorrow's VFD page tomorrow_page = wikipedia.Page(mysite, tomorrow_pagename) tomorrow_page.put(sectionheading, 'VFD Bot (talk) (contributions): Creating new VFD day - automatic VFD bot work')
 * 1) 	else:
 * 2) 		print "False\n";

en-wp-vfd-update2.py

 * 1) !/usr/bin/python2.3
 * 2) -*- coding: utf-8 -*-

import os, sys if (not (sys.path[0] == '')): os.chdir(sys.path[0])


 * 1) Automatic VFD Update script
 * 2) Written by Jason Y. Lee (AllyUnion)
 * 3) Assumes to be run by a cron job, 10 minutes prior to the next 00:00 UTC.


 * 1) Author's note:
 * 2) Yes, I do realize I can make an array for all the variables below
 * 3) but I rather have everything clearly spelled out just to make absolutely
 * 4) certain that whatever error is going on can be clearly seen

import datetime
 * 1) import wikipedia, config

def stndrdth(n): '''Return the appropriate st, nd, rd, or th to a number Example: stndrdth(2), returns "2nd" as the result''' remain = n % 10 if (remain == 1): if (n == 11): return str(n) + 'th' return str(n) + 'st' elif (remain == 2): if (n == 12): return str(n) + 'th' return str(n) + 'nd' elif (remain == 3): if (n == 13): return str(n) + 'th' return str(n) + 'rd' else: return str(n) + 'th'

if __name__ == "__main__": utc = datetime.datetime.utcnow# - datetime.timedelta(1) if (not (utc.replace(hour=23,minute=0,second=0,microsecond=0) <= utc <= (utc.replace(hour=0,minute=0,second=0,microsecond=0) + datetime.timedelta(1)))): sys.exit(1)

import wikipedia, config

# Get today's date:
 * 1) 	yyyy = int(datetime.datetime.utcnow.strftime('%Y'))
 * 2) 	mm = int(datetime.datetime.utcnow.strftime('%m'))
 * 3) 	dd = int(datetime.datetime.utcnow.strftime('%d'))

# Today's date, exactly at 0000 hours # Required as a point of reference today = utc.replace(hour=0,minute=0,second=0,microsecond=0)
 * 1) 	today = datetime.datetime(yyyy, mm, dd, 0, 0, 0, 0)

# Tomorrow's date, exactly at 0000 hours # The new day going up	tomorrow = today + datetime.timedelta(1)

# Five days prior to today's date at 0000 hours # The day that will be moved to Old fivedaysago = today - datetime.timedelta(5)

# Six days prior to today's date at 0000 hours # The day which we need to search for sixdaysago = today - datetime.timedelta(6)

# Today's transinclude today_vfdtag = '*' + today.strftime('%A, ') + str(int(today.strftime('%d'))) + today.strftime(' %B') + ''

# Tomorrow's transinclude tomorrow_vfdtag = '*' + tomorrow.strftime('%A, ') + str(int(tomorrow.strftime('%d'))) + tomorrow.strftime(' %B') + ''

# Five days ago (transinclude) fivedaysago_vfdtag = '*' + fivedaysago.strftime('%A, ') + str(int(fivedaysago.strftime('%d'))) + fivedaysago.strftime(' %B') + ''


 * 1) 	print fivedaysago_vfdtag
 * 2) 	sys.exit(0)

# Current votes section links # Need: Tomorrow's, today's, and five days ago # Today's	today_sectionlink =  + stndrdth(int(today.strftime('%d'))) + 

# Tomorrow's	tomorrow_sectionlink =  + stndrdth(int(tomorrow.strftime('%d'))) + 

# Five days ago fivedaysago_sectionlink =  + stndrdth(int(fivedaysago.strftime('%d'))) + 

# Old votes section links # Need: Five days ago, and six days ago

# Five days ago fivedaysago_oldsectionlink = '\'\ + stndrdth(int(fivedaysago.strftime('%d'))) + '\'\

# Six days ago # Five days ago
 * 1) 	sixdaysago_oldsectionlink = '\'\ + stndrdth(int(sixdaysago.strftime('%d'))) + '\'\
 * 1) 	fivedaysago_oldsectionlink = '\'\ + stndrdth(int(fivedaysago.strftime('%d'))) + '\'\

# Six days ago # Site configured in user-config.py	mysite = wikipedia.getSite
 * 1) 	sixdaysago_oldsectionlink = '\'\ + stndrdth(int(sixdaysago.strftime('%d'))) + '\'\

# Get vfd pages vfd = wikipedia.Page(mysite, 'Wikipedia:Votes_for_deletion').get(False, True)

# Search and replace

# Section links (taxobox) # Today's section link, replace with today's section link + newline + tomorrow's section link (replace once) vfd = vfd.replace(today_sectionlink, tomorrow_sectionlink + '\n' + today_sectionlink, 1)

# Five days section link + newline, replace with nothing (replace once) vfd = vfd.replace(fivedaysago_sectionlink + '\n', '', 1)

# Six days ago old section link, replace with five days ago old section link + '\n' + six days ago old section link (replace once) # Replace using section title vfd = vfd.replace('==Old votes==', '==Old votes==\n' + fivedaysago_oldsectionlink, 1)
 * 1) 	vfd = vfd.replace(sixdaysago_oldsectionlink, fivedaysago_oldsectionlink + '\n' + sixdaysago_oldsectionlink, 1)

# Transincludes on VFD # Replace today's transinclude with today's transinclude + '\n' + tomorrow's transinclude vfd = vfd.replace(today_vfdtag, tomorrow_vfdtag + '\n' + today_vfdtag, 1)

# Remove five days ago transinclude vfd = vfd.replace(fivedaysago_vfdtag + '\n', '', 1)

# Page links vfdpage = wikipedia.Page(mysite, 'Wikipedia:Votes for deletion')

vfdpage.put(vfd, 'VFD Bot (talk) (contributions): Auto-update of VFD')

en-wp-vfd-update3.py

 * 1) !/usr/bin/python2.3
 * 2) -*- coding: utf-8 -*-

import sys sys.exit(1)

import os, sys if (not (sys.path[0] == '')): os.chdir(sys.path[0])


 * 1) Automatic VFD Update script
 * 2) Written by Jason Y. Lee (AllyUnion)
 * 3) Assumes to be run by a cron job, 10 minutes prior to the next 00:00 UTC.


 * 1) Author's note:
 * 2) Yes, I do realize I can make an array for all the variables below
 * 3) but I rather have everything clearly spelled out just to make absolutely
 * 4) certain that whatever error is going on can be clearly seen

import datetime
 * 1) import wikipedia, config

def stndrdth(n): '''Return the appropriate st, nd, rd, or th to a number Example: stndrdth(2), returns "2nd" as the result''' remain = n % 10 if (remain == 1): if (n == 11): return str(n) + 'th' return str(n) + 'st' elif (remain == 2): if (n == 12): return str(n) + 'th' return str(n) + 'nd' elif (remain == 3): if (n == 13): return str(n) + 'th' return str(n) + 'rd' else: return str(n) + 'th'

if __name__ == "__main__": utc = datetime.datetime.utcnow# - datetime.timedelta(1)
 * 1) 	if (not (utc.replace(hour=23,minute=0,second=0,microsecond=0) <= utc <= (utc.replace(hour=0,minute=0,second=0,microsecond=0) + datetime.timedelta(1)))):
 * 2) 		sys.exit(1)

import wikipedia, config

# Get today's date:
 * 1) 	yyyy = int(datetime.datetime.utcnow.strftime('%Y'))
 * 2) 	mm = int(datetime.datetime.utcnow.strftime('%m'))
 * 3) 	dd = int(datetime.datetime.utcnow.strftime('%d'))

# Today's date, exactly at 0000 hours # Required as a point of reference today = utc.replace(hour=0,minute=0,second=0,microsecond=0)
 * 1) 	today = datetime.datetime(yyyy, mm, dd, 0, 0, 0, 0)

# Tomorrow's date, exactly at 0000 hours # The new day going up	tomorrow = today + datetime.timedelta(1)

# Five days prior to today's date at 0000 hours # The day that will be moved to Old fivedaysago = today - datetime.timedelta(5)

# Six days prior to today's date at 0000 hours # The day which we need to search for sixdaysago = today - datetime.timedelta(6)

# Today's transinclude today_vfdtag = '*' + today.strftime('%A, ') + str(int(today.strftime('%d'))) + today.strftime(' %B') + ''

# Tomorrow's transinclude tomorrow_vfdtag = '*' + tomorrow.strftime('%A, ') + str(int(tomorrow.strftime('%d'))) + tomorrow.strftime(' %B') + ''

# Five days ago (transinclude) fivedaysago_vfdtag = '*' + fivedaysago.strftime('%A, ') + str(int(fivedaysago.strftime('%d'))) + fivedaysago.strftime(' %B') + ''


 * 1) 	print fivedaysago_vfdtag
 * 2) 	sys.exit(0)

# Site configured in user-config.py	mysite = wikipedia.getSite

# Get vfd pages pagename = wikipedia.Page(mysite, 'WP:NAC').getRedirectTo vfd = wikipedia.getPage(mysite, pagename, True, True, False)
 * 1) 	vfd = wikipedia.getPage(mysite, 'Wikipedia:Votes_for_deletion', True, True, False)

# Search and replace

# Transincludes on VFD # Replace today's transinclude with today's transinclude + '\n' + tomorrow's transinclude vfd = vfd.replace(today_vfdtag, tomorrow_vfdtag + '\n' + today_vfdtag, 1)

# Remove five days ago transinclude vfd = vfd.replace(fivedaysago_vfdtag + '\n', '', 1)

# Page links vfdpage = wikipedia.Page(mysite, pagename)

vfdpage.put(vfd, 'VFD Bot (talk) (contributions): Updating Votes for deletion section')