Skip to content

Commit

Permalink
new version
Browse files Browse the repository at this point in the history
  • Loading branch information
James Michael DuPont committed Jun 23, 2012
1 parent 83480d8 commit 6adace5
Show file tree
Hide file tree
Showing 2 changed files with 41 additions and 32 deletions.
17 changes: 13 additions & 4 deletions pushzip.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,18 +11,27 @@ def push_zip (file):

year = d.year
month= d.month
block= "wikipedia-delete-%0.4d-%02d" % (year, month)
block= "wikipedia-delete-v2-%0.4d-%02d" % (year, month)
print "going to use %s" % block
conn = boto.connect_s3(host='s3.us.archive.org', is_secure=False)
bucket = conn.get_bucket(block)
if not bucket:
bucket = conn.create_bucket(block)
k = Key(bucket)
k.key = file
headers = {}
headers['x-archive-queue-derive'] = '0'
hdrs = {}
hdrs['x-archive-queue-derive'] = '0'
hdrs['x-archive-queue-derive'] = '0'
hdrs['x-archive-meta-mediatype']= "texts"
hdrs['x-archive-meta-collection']="opensource"
hdrs['x-archive-meta-title']="fosm node index "
hdrs['x-archive-meta-description']="fosm index object"
hdrs['x-archive-meta-creator']="james michael dupont<jamesmikedupont@gmail.com>"
hdrs['x-archive-meta-subject']="fosm,osm"
hdrs['x-archive-meta-licenseurl']='http://creativecommons.org/licenses/by-nc/3.0/'

k.set_contents_from_filename(file,
headers=headers,
headers=hdrs,
cb=percent_cb,
num_cb=10)
print "Uploaded %s" % file
Expand Down
56 changes: 28 additions & 28 deletions speedydeletion.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,35 +11,35 @@
from shove import Shove
file_store = Shove('file://wikiaupload')

def signpage(site,pagename) :
# def signpage(site,pagename) :

generator = [pywikibot.Page(
site,
pagename
)]
# Main Loop
for page in generator:
print "going to process %s" % page.urlname()
try:
text = page.get()
except:
text = ""
# generator = [pywikibot.Page(
# site,
# pagename
# )]
# # Main Loop
# for page in generator:
# print "going to process %s" % page.urlname()
# try:
# text = page.get()
# except:
# text = ""

m = re.search("==archived on speedy deletion wikia==" , text)
if not(m):
m = re.search("==archived==" , text)
if not( m):
summary="notification of speedy deletion page"
newname =page.urlname()
newname = newname.replace('Talk%3A', '')
newtext= "==archived on speedy deletion wikia==\nThis endangered article has been archived here http://speedydeletion.wikia.com/wiki/%s so that it is not lost if deleted. Changes made after the archiving will not be copied.\n~~~~" % newname
(text, newtext, always) = add_text(page, newtext, summary, regexSkip,
regexSkipUrl, always, up, True, reorderEnabled=reorderEnabled,
create=talkPage)
else:
print "skipping %s" % page.urlname()
else:
print "skipping %s" % page.urlname()
# m = re.search("==archived on speedy deletion wikia==" , text)
# if not(m):
# m = re.search("==archived==" , text)
# if not( m):
# summary="notification of speedy deletion page"
# newname =page.urlname()
# newname = newname.replace('Talk%3A', '')
# newtext= "==archived on speedy deletion wikia==\nThis endangered article has been archived here http://speedydeletion.wikia.com/wiki/%s so that it is not lost if deleted. Changes made after the archiving will not be copied.\n~~~~" % newname
# (text, newtext, always) = add_text(page, newtext, summary, regexSkip,
# regexSkipUrl, always, up, True, reorderEnabled=reorderEnabled,
# create=talkPage)
# else:
# print "skipping %s" % page.urlname()
# else:
# print "skipping %s" % page.urlname()

def main(*args):
genFactory = pagegenerators.GeneratorFactory()
Expand Down Expand Up @@ -99,7 +99,7 @@ def main(*args):
print outpage.site.lang
outpage.put(contents)

signpage(insite,"Talk:%s" % pagename)
# signpage(insite,"Talk:%s" % pagename)

try :
file_store[title] = 1
Expand Down

0 comments on commit 6adace5

Please sign in to comment.