Skip to content

Commit

Permalink
Changes to storage so that we're not deleting all keys all the time :/
Browse files Browse the repository at this point in the history
  • Loading branch information
rolando3 committed Mar 22, 2013
1 parent 56c0423 commit d968936
Show file tree
Hide file tree
Showing 4 changed files with 38 additions and 12 deletions.
Empty file removed .env
Empty file.
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
venv
*.pyc
.DS_Store
.env
.env
*.swp
8 changes: 8 additions & 0 deletions checks.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
#!/usr/bin/env python

from migrastorage import fileStorage

def checkstorage():
print ( fileStorage().list_keys() )

checkstorage()
39 changes: 28 additions & 11 deletions migrastorage.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,18 +49,25 @@ def __key(cls,k=None):
from calendar import timegm
from random import choice

key = Key(cls.__bucket())
if k is None:
key.set_metadata('time',timegm(gmtime()))

key = Key(cls.__bucket())
from string import ascii_lowercase as letters
k = ''
for i in range(12):
k += choice(letters)

key.key = k
key.key = k
key.set_metadata('time',timegm(gmtime()))
else:
key = cls.__bucket().get_key(k)

return key

@classmethod
def test_key(cls,k=None):
k = cls.__key(k)
print k.get_metadata('time')
return k

@classmethod
def store_file(cls,d):
awsKey = cls.__key()
Expand All @@ -80,9 +87,17 @@ def check_key(cls,k):

@classmethod
def list_keys(cls):
from boto.s3.connection import Key
from time import gmtime
from calendar import timegm
b = cls.__bucket()
return [ k.key for k in b.list() ]
result = []
for k in b.list():
if k.get_metadata('time') is None:
result.append ( ( k.key, None ) )
else:
result.append ( ( k.key, k.get_metadata('time') - timegm(gmtime()) ) )

return result

@classmethod
def cleanup(cls, age):
Expand All @@ -97,11 +112,13 @@ def cleanup(cls, age):
curtime = timegm(gmtime())
b = cls.__bucket()
for k in b.list():
key = Key(b)
key.key = k
key = b.get_key(k)
t = key.get_metadata('time')
if t is None or ( curtime - t > age ):
if t is None:
#do nothing
pass
elif ( curtime - t > age ):
delcount =+ 1
b.delete_key(k)

logging.info ( "Deleted %s old files." % delcount )
logging.info ( "Deleted %s old files." % delcount )

0 comments on commit d968936

Please sign in to comment.