This repository has been archived by the owner on Aug 31, 2021. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 10
/
Copy pathloadbackupdb.py
107 lines (96 loc) · 3.46 KB
/
loadbackupdb.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
import os
import boto
from datetime import datetime
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
class Command(BaseCommand):
args = '<date YYYY-MM-DD>'
help = 'Load a database snapshot from our nightly archive. Pulls latest' \
+ ' by default. Specify date for an older one.'
def add_arguments(self, parser):
parser.add_argument(
"--name",
action="store",
dest="name",
default='',
help="A custom name for the database we're creating locally"
)
parser.add_argument(
"--env",
action="store",
dest="env",
default='prod',
help=("The deployment environment you want pull the database "
" from. By default it's prod.")
)
def set_options(self, *args, **kwargs):
# If the user provides a date, try to use that
if args:
try:
dt = datetime.strptime(args[0], '%Y-%m-%d')
dt = dt.strftime("%Y-%m-%d")
except ValueError:
raise CommandError("The date you submitted is not valid.")
# Otherwise just use the today minus one day
else:
dt = 'latest'
self.filename = '%s_%s.sql.gz' % (kwargs['env'], dt)
# Get all our S3 business straight
self.bucket_name = settings.AWS_BACKUP_BUCKET_NAME
self.boto_conn = boto.connect_s3(
settings.AWS_ACCESS_KEY_ID,
settings.AWS_SECRET_ACCESS_KEY
)
self.bucket = self.boto_conn.get_bucket(self.bucket_name)
if hasattr(settings, 'AWS_BACKUP_BUCKET_DIRECTORY'):
self.key_path = '%s/%s' % (
settings.AWS_BACKUP_BUCKET_DIRECTORY,
self.filename
)
else:
self.key_path = self.filename
# Set local database settings
db = settings.DATABASES['default']
os.environ['PGPASSWORD'] = db['PASSWORD']
self.db_user = db['USER']
self.db_name = kwargs.get('name') or db['NAME']
def handle(self, *args, **options):
# Initialize the options
self.set_options(*args, **options)
# Download the snapshot
self.download(self.key_path)
# Load the snapshot into the database
self.load(self.filename)
def load(self, source):
"""
Load a database snapshot into our postgres installation.
"""
print "Loading to new database: %s" % self.db_name
# If the db already exists, we need to drop it.
try:
os.system("dropdb -U %s %s" % (self.db_user, self.db_name))
except:
pass
# Create the database
os.system("createdb -U %s %s" % (self.db_user, self.db_name))
# Load the data
os.system(
"pg_restore -U %s -Fc -d %s ./%s" % (
self.db_user,
self.db_name,
source
)
)
# Delete the snapshot
os.system("rm ./%s" % source)
def download(self, dt):
"""
Download a database snapshot.
"""
print "Downloading database: %s" % self.key_path
self.key = self.bucket.get_key(self.key_path)
if not self.key:
raise CommandError("%s does not exist in the database archive." % (
self.key_path
))
self.key.get_contents_to_filename(self.filename)