python 下载阿里云mysql的备份文件及binlog到本地
生活随笔
收集整理的這篇文章主要介紹了
python 下载阿里云mysql的备份文件及binlog到本地
小編覺得挺不錯的,現在分享給大家,幫大家做個參考.
#廢話不多說,直接上代碼。您有好的意見,可以留言!共同探討,一起進步!
宗旨:
# !/usr/bin/python
# coding=utf-8
import json
import os
import datetime
import urllib
from aliyunsdkcore import client
from aliyunsdkrds.request.v20140815.DescribeBackupsRequest import DescribeBackupsRequest
from aliyunsdkrds.request.v20140815.DescribeBinlogFilesRequest import DescribeBinlogFilesRequest
class PullBackupfile():
def __init__(self,accessid,key,region,instanceid):
self.accessid = accessid
self.key = key
self.region = region
self.instanceid = instanceid
#自動創建備份目錄
def mkdir(self):
now = datetime.date.today() - datetime.timedelta(days=1)
path = 'D:\databack\%s' % now
data_path = os.path.join(path, 'data')
bin_log_path = os.path.join(path, 'binlog')
path_list = [path, data_path, bin_log_path]
for i in path_list:
if os.path.isdir(i):
continue
else:
os.popen('mkdir %s' % i)
return data_path,bin_log_path
def login(self):
clt = client.AcsClient(self.accessid,self.key,self.region)
return clt
# 阿里云返回的數據為UTC時間,因此要轉換為東八區時間。2013-08-15T12:00:00Z為北京時間2013年8月15日20點0分0秒。
def backup_time(self,name):
now = datetime.datetime.now()
#零點
end_time = now - datetime.timedelta(hours=now.hour + 8, minutes=now.minute, seconds=now.second,
microseconds=now.microsecond)
start_time = end_time - datetime.timedelta(days=1)
#時間格式不相同
if name == 'datafile':
starttime = start_time.strftime('%Y-%m-%dT%H:%MZ')
endtime = end_time.strftime('%Y-%m-%dT%H:%MZ')
if name == 'binlog':
starttime = start_time.strftime('%Y-%m-%dT%H:%M:%SZ')
endtime = end_time.strftime('%Y-%m-%dT%H:%M:%SZ')
return starttime,endtime
def download_rds_backfile(self):
data_path,bin_log_path = self.mkdir()
starttime,endtime = self.backup_time('datafile')
try:
req_bakup = DescribeBackupsRequest()
req_bakup.set_DBInstanceId(self.instanceid)
req_bakup.set_accept_format('json')
req_bakup.set_StartTime(starttime)
req_bakup.set_EndTime(endtime)
clt = self.login()
backup = clt.do_action_with_exception(req_bakup)
jsload = json.loads(backup)
num = jsload["PageRecordCount"]
print("backfiles:" + str(num))
i = 0
back_path = os.path.join(data_path,'')
while i < num:
bak_url = jsload["Items"]["Backup"][i]["BackupDownloadURL"]
bak_host = jsload["Items"]["Backup"][i]["HostInstanceID"]
bak_id = jsload["Items"]["Backup"][i]["BackupId"]
print ("BackupId:" + str(bak_id), "HostInstanceID:" + str(bak_host), "downloadurl:" + bak_url)
save_name = back_path + bak_url.split('?')[0].split('/')[-1]
u = urllib.request.urlopen(bak_url)
f_header = u.info()
print(f_header)
bak_size = int(f_header["Content-Length"])
print ("backup file size: %s M ,fime nema: %s" % (bak_size / 1024 / 1024, save_name))
with open(save_name, "wb") as f:
file_size_dl = 0
block_sz = 8192
while True:
buffer = u.read(block_sz)
if not buffer:
break
file_size_dl += len(buffer)
f.write(buffer)
status = r"%10d [%3.2f%%]" % (file_size_dl, file_size_dl * 100. / bak_size)
# status = status + chr(8) * (len(status) + 1)
print (status)
i = i + 1
print ("download complet!")
except:
print("無備份")
# 備份binlog 下載到本地服務器
def download_rds_binlog(self):
data_path, bin_log_path = self.mkdir()
print(bin_log_path)
starttime,endtime = self.backup_time('binlog')
try:
request = DescribeBinlogFilesRequest()
request.set_DBInstanceId(self.instanceid)
request.set_accept_format('json')
request.set_StartTime(starttime)
request.set_EndTime(endtime)
clt = self.login()
binlog_backup = clt.do_action_with_exception(request)
jsload = json.loads(binlog_backup)
num = jsload["TotalRecordCount"]
print("backfiles:" + str(num))
i = 0
back_path = os.path.join(bin_log_path,'')
print(back_path)
while i < num:
bak_url = jsload["Items"]["BinLogFile"][i]["DownloadLink"]
bak_host = jsload["Items"]["BinLogFile"][i]["HostInstanceID"]
bak_name = jsload["Items"]["BinLogFile"][i]["LogFileName"]
bak_size = jsload["Items"]["BinLogFile"][i]["FileSize"]
bak_time = datetime.datetime.strptime(jsload["Items"]["BinLogFile"][i]['LogEndTime'],'%Y-%m-%dT%H:%M:%SZ')
# print("LogFileName:" + str(bak_name), "HostInstanceID:" + str(bak_host), "downloadurl:" + bak_url)
save_name = back_path + bak_name + '_' + str(bak_time).replace(' ','').replace(':','').replace('-','')
# print("backup file size: %s M ,fime nema: %s" % (bak_size / 1024 / 1024, save_name))
with open(save_name, "wb") as f:
urllib.request.urlretrieve(bak_url,save_name)
i = i + 1
print("download complet!")
except:
print('無備份')
#刪除超過7天的文件夾及子目錄,windows 命令
def remove_file(self):
os.popen('forfiles /p "D:\databack" /d -7 /c "cmd /c echo deleting @path ... && rd @path /s /q" ')
if __name__ == '__main__':
pull_file = PullBackupfile('accessid','key','region','instanceid') pull_file.download_rds_backfile()
pull_file.download_rds_binlog()
pull_file.remove_file()
宗旨:
? ? ? ?一起努力,一起進步!
# !/usr/bin/python
# coding=utf-8
import json
import os
import datetime
import urllib
from aliyunsdkcore import client
from aliyunsdkrds.request.v20140815.DescribeBackupsRequest import DescribeBackupsRequest
from aliyunsdkrds.request.v20140815.DescribeBinlogFilesRequest import DescribeBinlogFilesRequest
class PullBackupfile():
def __init__(self,accessid,key,region,instanceid):
self.accessid = accessid
self.key = key
self.region = region
self.instanceid = instanceid
#自動創建備份目錄
def mkdir(self):
now = datetime.date.today() - datetime.timedelta(days=1)
path = 'D:\databack\%s' % now
data_path = os.path.join(path, 'data')
bin_log_path = os.path.join(path, 'binlog')
path_list = [path, data_path, bin_log_path]
for i in path_list:
if os.path.isdir(i):
continue
else:
os.popen('mkdir %s' % i)
return data_path,bin_log_path
def login(self):
clt = client.AcsClient(self.accessid,self.key,self.region)
return clt
# 阿里云返回的數據為UTC時間,因此要轉換為東八區時間。2013-08-15T12:00:00Z為北京時間2013年8月15日20點0分0秒。
def backup_time(self,name):
now = datetime.datetime.now()
#零點
end_time = now - datetime.timedelta(hours=now.hour + 8, minutes=now.minute, seconds=now.second,
microseconds=now.microsecond)
start_time = end_time - datetime.timedelta(days=1)
#時間格式不相同
if name == 'datafile':
starttime = start_time.strftime('%Y-%m-%dT%H:%MZ')
endtime = end_time.strftime('%Y-%m-%dT%H:%MZ')
if name == 'binlog':
starttime = start_time.strftime('%Y-%m-%dT%H:%M:%SZ')
endtime = end_time.strftime('%Y-%m-%dT%H:%M:%SZ')
return starttime,endtime
def download_rds_backfile(self):
data_path,bin_log_path = self.mkdir()
starttime,endtime = self.backup_time('datafile')
try:
req_bakup = DescribeBackupsRequest()
req_bakup.set_DBInstanceId(self.instanceid)
req_bakup.set_accept_format('json')
req_bakup.set_StartTime(starttime)
req_bakup.set_EndTime(endtime)
clt = self.login()
backup = clt.do_action_with_exception(req_bakup)
jsload = json.loads(backup)
num = jsload["PageRecordCount"]
print("backfiles:" + str(num))
i = 0
back_path = os.path.join(data_path,'')
while i < num:
bak_url = jsload["Items"]["Backup"][i]["BackupDownloadURL"]
bak_host = jsload["Items"]["Backup"][i]["HostInstanceID"]
bak_id = jsload["Items"]["Backup"][i]["BackupId"]
print ("BackupId:" + str(bak_id), "HostInstanceID:" + str(bak_host), "downloadurl:" + bak_url)
save_name = back_path + bak_url.split('?')[0].split('/')[-1]
u = urllib.request.urlopen(bak_url)
f_header = u.info()
print(f_header)
bak_size = int(f_header["Content-Length"])
print ("backup file size: %s M ,fime nema: %s" % (bak_size / 1024 / 1024, save_name))
with open(save_name, "wb") as f:
file_size_dl = 0
block_sz = 8192
while True:
buffer = u.read(block_sz)
if not buffer:
break
file_size_dl += len(buffer)
f.write(buffer)
status = r"%10d [%3.2f%%]" % (file_size_dl, file_size_dl * 100. / bak_size)
# status = status + chr(8) * (len(status) + 1)
print (status)
i = i + 1
print ("download complet!")
except:
print("無備份")
# 備份binlog 下載到本地服務器
def download_rds_binlog(self):
data_path, bin_log_path = self.mkdir()
print(bin_log_path)
starttime,endtime = self.backup_time('binlog')
try:
request = DescribeBinlogFilesRequest()
request.set_DBInstanceId(self.instanceid)
request.set_accept_format('json')
request.set_StartTime(starttime)
request.set_EndTime(endtime)
clt = self.login()
binlog_backup = clt.do_action_with_exception(request)
jsload = json.loads(binlog_backup)
num = jsload["TotalRecordCount"]
print("backfiles:" + str(num))
i = 0
back_path = os.path.join(bin_log_path,'')
print(back_path)
while i < num:
bak_url = jsload["Items"]["BinLogFile"][i]["DownloadLink"]
bak_host = jsload["Items"]["BinLogFile"][i]["HostInstanceID"]
bak_name = jsload["Items"]["BinLogFile"][i]["LogFileName"]
bak_size = jsload["Items"]["BinLogFile"][i]["FileSize"]
bak_time = datetime.datetime.strptime(jsload["Items"]["BinLogFile"][i]['LogEndTime'],'%Y-%m-%dT%H:%M:%SZ')
# print("LogFileName:" + str(bak_name), "HostInstanceID:" + str(bak_host), "downloadurl:" + bak_url)
save_name = back_path + bak_name + '_' + str(bak_time).replace(' ','').replace(':','').replace('-','')
# print("backup file size: %s M ,fime nema: %s" % (bak_size / 1024 / 1024, save_name))
with open(save_name, "wb") as f:
urllib.request.urlretrieve(bak_url,save_name)
i = i + 1
print("download complet!")
except:
print('無備份')
#刪除超過7天的文件夾及子目錄,windows 命令
def remove_file(self):
os.popen('forfiles /p "D:\databack" /d -7 /c "cmd /c echo deleting @path ... && rd @path /s /q" ')
if __name__ == '__main__':
pull_file = PullBackupfile('accessid','key','region','instanceid') pull_file.download_rds_backfile()
pull_file.download_rds_binlog()
pull_file.remove_file()
轉載于:https://www.cnblogs.com/leeInvisible/p/11582608.html
總結
以上是生活随笔為你收集整理的python 下载阿里云mysql的备份文件及binlog到本地的全部內容,希望文章能夠幫你解決所遇到的問題。
- 上一篇: .net Redis使用公共方法引用CS
- 下一篇: 黑马lavarel教程---8、sess