Remove six of dir cinder/backup/*
Replace the following items with Python 3 style code. - six.add_metaclass - six.PY3 - six.text_type - six.moves - six.BytesIO - six.string_types Implements: blueprint six-removal Change-Id: Ic3350bf354556d86fe40d9831db07b1378a9790e
This commit is contained in:
parent
6ad1ab0c72
commit
58108e1b66
@ -33,7 +33,6 @@ from oslo_log import log as logging
|
||||
from oslo_service import loopingcall
|
||||
from oslo_utils import excutils
|
||||
from oslo_utils import units
|
||||
import six
|
||||
|
||||
from cinder.backup import driver
|
||||
from cinder import exception
|
||||
@ -67,8 +66,7 @@ CONF.register_opts(backup_opts)
|
||||
# (https://github.com/eventlet/eventlet/issues/432) that would result in
|
||||
# failures.
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class ChunkedBackupDriver(driver.BackupDriver):
|
||||
class ChunkedBackupDriver(driver.BackupDriver, metaclass=abc.ABCMeta):
|
||||
"""Abstract chunked backup driver.
|
||||
|
||||
Implements common functionality for backup drivers that store volume
|
||||
@ -266,8 +264,7 @@ class ChunkedBackupDriver(driver.BackupDriver):
|
||||
if extra_metadata:
|
||||
metadata['extra_metadata'] = extra_metadata
|
||||
metadata_json = json.dumps(metadata, sort_keys=True, indent=2)
|
||||
if six.PY3:
|
||||
metadata_json = metadata_json.encode('utf-8')
|
||||
metadata_json = metadata_json.encode('utf-8')
|
||||
with self._get_object_writer(container, filename) as writer:
|
||||
writer.write(metadata_json)
|
||||
LOG.debug('_write_metadata finished. Metadata: %s.', metadata_json)
|
||||
@ -283,12 +280,11 @@ class ChunkedBackupDriver(driver.BackupDriver):
|
||||
sha256file['volume_id'] = volume_id
|
||||
sha256file['backup_name'] = backup['display_name']
|
||||
sha256file['backup_description'] = backup['display_description']
|
||||
sha256file['created_at'] = six.text_type(backup['created_at'])
|
||||
sha256file['created_at'] = str(backup['created_at'])
|
||||
sha256file['chunk_size'] = self.sha_block_size_bytes
|
||||
sha256file['sha256s'] = sha256_list
|
||||
sha256file_json = json.dumps(sha256file, sort_keys=True, indent=2)
|
||||
if six.PY3:
|
||||
sha256file_json = sha256file_json.encode('utf-8')
|
||||
sha256file_json = sha256file_json.encode('utf-8')
|
||||
with self._get_object_writer(container, filename) as writer:
|
||||
writer.write(sha256file_json)
|
||||
LOG.debug('_write_sha256file finished.')
|
||||
@ -301,8 +297,7 @@ class ChunkedBackupDriver(driver.BackupDriver):
|
||||
{'container': container, 'filename': filename})
|
||||
with self._get_object_reader(container, filename) as reader:
|
||||
metadata_json = reader.read()
|
||||
if six.PY3:
|
||||
metadata_json = metadata_json.decode('utf-8')
|
||||
metadata_json = metadata_json.decode('utf-8')
|
||||
metadata = json.loads(metadata_json)
|
||||
LOG.debug('_read_metadata finished. Metadata: %s.', metadata_json)
|
||||
return metadata
|
||||
@ -315,8 +310,7 @@ class ChunkedBackupDriver(driver.BackupDriver):
|
||||
{'container': container, 'filename': filename})
|
||||
with self._get_object_reader(container, filename) as reader:
|
||||
sha256file_json = reader.read()
|
||||
if six.PY3:
|
||||
sha256file_json = sha256file_json.decode('utf-8')
|
||||
sha256file_json = sha256file_json.decode('utf-8')
|
||||
sha256file = json.loads(sha256file_json)
|
||||
LOG.debug('_read_sha256file finished.')
|
||||
return sha256file
|
||||
|
@ -20,7 +20,6 @@ import abc
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log as logging
|
||||
from oslo_serialization import jsonutils
|
||||
import six
|
||||
|
||||
from cinder.db import base
|
||||
from cinder import exception
|
||||
@ -346,8 +345,7 @@ class BackupMetadataAPI(base.Base):
|
||||
LOG.debug("No metadata of type '%s' to restore", type)
|
||||
|
||||
|
||||
@six.add_metaclass(abc.ABCMeta)
|
||||
class BackupDriver(base.Base):
|
||||
class BackupDriver(base.Base, metaclass=abc.ABCMeta):
|
||||
|
||||
def __init__(self, context, db=None):
|
||||
super(BackupDriver, self).__init__(db)
|
||||
|
@ -55,7 +55,6 @@ from oslo_config import cfg
|
||||
from oslo_log import log as logging
|
||||
from oslo_utils import excutils
|
||||
from oslo_utils import units
|
||||
from six.moves import range
|
||||
|
||||
from cinder.backup import driver
|
||||
from cinder import exception
|
||||
|
@ -28,6 +28,7 @@ Server-centric flow is used for authentication.
|
||||
|
||||
import base64
|
||||
import hashlib
|
||||
import io
|
||||
import os
|
||||
|
||||
try:
|
||||
@ -56,7 +57,6 @@ from oslo_config import cfg
|
||||
from oslo_log import log as logging
|
||||
from oslo_utils import timeutils
|
||||
from packaging import version
|
||||
import six
|
||||
|
||||
from cinder.backup import chunkeddriver
|
||||
from cinder import exception
|
||||
@ -328,7 +328,7 @@ class GoogleObjectWriter(object):
|
||||
|
||||
@gcs_logger
|
||||
def close(self):
|
||||
media = http.MediaIoBaseUpload(six.BytesIO(self.data),
|
||||
media = http.MediaIoBaseUpload(io.BytesIO(self.data),
|
||||
'application/octet-stream',
|
||||
chunksize=self.chunk_size,
|
||||
resumable=self.resumable)
|
||||
@ -339,9 +339,8 @@ class GoogleObjectWriter(object):
|
||||
media_body=media).execute(num_retries=self.num_retries)
|
||||
etag = resp['md5Hash']
|
||||
md5 = hashlib.md5(self.data).digest()
|
||||
if six.PY3:
|
||||
md5 = md5.encode('utf-8')
|
||||
etag = bytes(etag, 'utf-8')
|
||||
md5 = md5.encode('utf-8')
|
||||
etag = bytes(etag, 'utf-8')
|
||||
md5 = base64.b64encode(md5)
|
||||
if etag != md5:
|
||||
err = _('MD5 of object: %(object_name)s before: '
|
||||
@ -377,7 +376,7 @@ class GoogleObjectReader(object):
|
||||
req = self.conn.objects().get_media(
|
||||
bucket=self.bucket,
|
||||
object=self.object_name)
|
||||
fh = six.BytesIO()
|
||||
fh = io.BytesIO()
|
||||
downloader = GoogleMediaIoBaseDownload(
|
||||
fh, req, chunksize=self.chunk_size)
|
||||
done = False
|
||||
@ -401,7 +400,7 @@ class GoogleMediaIoBaseDownload(http.MediaIoBaseDownload):
|
||||
self._sleep(self._rand() * 2 ** retry_num)
|
||||
|
||||
resp, content = gcs_http.request(self._uri, headers=headers)
|
||||
if resp.status < 500 and (six.text_type(resp.status)
|
||||
if resp.status < 500 and (str(resp.status)
|
||||
not in error_codes):
|
||||
break
|
||||
if resp.status in [200, 206]:
|
||||
|
@ -44,12 +44,12 @@
|
||||
"""
|
||||
|
||||
import hashlib
|
||||
import io
|
||||
import socket
|
||||
|
||||
from oslo_config import cfg
|
||||
from oslo_log import log as logging
|
||||
from oslo_utils import timeutils
|
||||
import six
|
||||
from swiftclient import client as swift
|
||||
|
||||
from cinder.backup import chunkeddriver
|
||||
@ -282,7 +282,7 @@ class SwiftBackupDriver(chunkeddriver.ChunkedBackupDriver):
|
||||
self.data += data
|
||||
|
||||
def close(self):
|
||||
reader = six.BytesIO(self.data)
|
||||
reader = io.BytesIO(self.data)
|
||||
try:
|
||||
etag = self.conn.put_object(self.container, self.object_name,
|
||||
reader,
|
||||
|
@ -44,7 +44,6 @@ from oslo_service import periodic_task
|
||||
from oslo_utils import excutils
|
||||
from oslo_utils import importutils
|
||||
from oslo_utils import timeutils
|
||||
import six
|
||||
|
||||
from cinder.backup import rpcapi as backup_rpcapi
|
||||
from cinder import context
|
||||
@ -408,7 +407,7 @@ class BackupManager(manager.SchedulerDependentManager):
|
||||
context, volume_id,
|
||||
{'status': previous_status,
|
||||
'previous_status': 'error_backing-up'})
|
||||
volume_utils.update_backup_error(backup, six.text_type(err))
|
||||
volume_utils.update_backup_error(backup, str(err))
|
||||
|
||||
# Restore the original status.
|
||||
if snapshot_id:
|
||||
@ -471,7 +470,7 @@ class BackupManager(manager.SchedulerDependentManager):
|
||||
backup_device.is_snapshot)
|
||||
try:
|
||||
device_path = attach_info['device']['path']
|
||||
if (isinstance(device_path, six.string_types) and
|
||||
if (isinstance(device_path, str) and
|
||||
not os.path.isdir(device_path)):
|
||||
if backup_device.secure_enabled:
|
||||
with open(device_path, 'rb') as device_file:
|
||||
@ -501,7 +500,7 @@ class BackupManager(manager.SchedulerDependentManager):
|
||||
|
||||
def _is_our_backup(self, backup):
|
||||
# Accept strings and Service OVO
|
||||
if not isinstance(backup, six.string_types):
|
||||
if not isinstance(backup, str):
|
||||
backup = backup.service
|
||||
|
||||
if not backup:
|
||||
@ -644,7 +643,7 @@ class BackupManager(manager.SchedulerDependentManager):
|
||||
try:
|
||||
device_path = attach_info['device']['path']
|
||||
open_mode = 'rb+' if os.name == 'nt' else 'wb'
|
||||
if (isinstance(device_path, six.string_types) and
|
||||
if (isinstance(device_path, str) and
|
||||
not os.path.isdir(device_path)):
|
||||
if secure_enabled:
|
||||
with open(device_path, open_mode) as device_file:
|
||||
@ -750,8 +749,7 @@ class BackupManager(manager.SchedulerDependentManager):
|
||||
backup_service.delete_backup(backup)
|
||||
except Exception as err:
|
||||
with excutils.save_and_reraise_exception():
|
||||
volume_utils.update_backup_error(backup,
|
||||
six.text_type(err))
|
||||
volume_utils.update_backup_error(backup, str(err))
|
||||
|
||||
# Get reservations
|
||||
try:
|
||||
@ -841,7 +839,7 @@ class BackupManager(manager.SchedulerDependentManager):
|
||||
backup_url = backup.encode_record(driver_info=driver_info)
|
||||
backup_record['backup_url'] = backup_url
|
||||
except Exception as err:
|
||||
msg = six.text_type(err)
|
||||
msg = str(err)
|
||||
raise exception.InvalidBackup(reason=msg)
|
||||
|
||||
LOG.info('Export record finished, backup %s exported.', backup.id)
|
||||
@ -895,7 +893,7 @@ class BackupManager(manager.SchedulerDependentManager):
|
||||
backup_service = self.service(context)
|
||||
backup_service.import_record(backup, driver_options)
|
||||
except Exception as err:
|
||||
msg = six.text_type(err)
|
||||
msg = str(err)
|
||||
volume_utils.update_backup_error(backup, msg)
|
||||
raise exception.InvalidBackup(reason=msg)
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user