Fix all occurences of H404 Hacking warning

This fixes all triggers of hacking 0.6.x's warning:
H404 - multi line docstring should start with a summary.
and enables gating check on H404.

Change-Id: I034bd1f05da3f279d8d79aa14a7f6ce8bef5047c
This commit is contained in:
Dirk Mueller 2013-06-29 13:38:13 +02:00 committed by Avishay Traeger
parent 226b824e73
commit 6e7ecda166
40 changed files with 316 additions and 424 deletions

View File

@ -126,10 +126,7 @@ class VolumeAdminController(AdminController):
@wsgi.action('os-force_detach') @wsgi.action('os-force_detach')
def _force_detach(self, req, id, body): def _force_detach(self, req, id, body):
""" """Roll back a bad detach after the volume been disconnected."""
Roll back a bad detach after the volume been disconnected from
the hypervisor.
"""
context = req.environ['cinder.context'] context = req.environ['cinder.context']
self.authorize(context, 'force_detach') self.authorize(context, 'force_detach')
try: try:

View File

@ -68,8 +68,9 @@ class ServicesUpdateTemplate(xmlutil.TemplateBuilder):
class ServiceController(object): class ServiceController(object):
@wsgi.serializers(xml=ServicesIndexTemplate) @wsgi.serializers(xml=ServicesIndexTemplate)
def index(self, req): def index(self, req):
""" """Return a list of all running services.
Return a list of all running services. Filter by host & service name.
Filter by host & service name.
""" """
context = req.environ['cinder.context'] context = req.environ['cinder.context']
authorize(context) authorize(context)

View File

@ -41,7 +41,8 @@ LOG = logging.getLogger(__name__)
class LimitingReader(object): class LimitingReader(object):
"""Reader to limit the size of an incoming request.""" """Reader to limit the size of an incoming request."""
def __init__(self, data, limit): def __init__(self, data, limit):
""" """Initialize LimitingReader.
:param data: Underlying data object :param data: Underlying data object
:param limit: maximum number of bytes the reader should allow :param limit: maximum number of bytes the reader should allow
""" """

View File

@ -54,10 +54,7 @@ class ProjectMapper(APIMapper):
class APIRouter(base_wsgi.Router): class APIRouter(base_wsgi.Router):
""" """Routes requests on the API to the appropriate controller and method."""
Routes requests on the OpenStack API to the appropriate controller
and method.
"""
ExtensionManager = None # override in subclasses ExtensionManager = None # override in subclasses
@classmethod @classmethod

View File

@ -154,7 +154,8 @@ class JSONDeserializer(TextDeserializer):
class XMLDeserializer(TextDeserializer): class XMLDeserializer(TextDeserializer):
def __init__(self, metadata=None): def __init__(self, metadata=None):
""" """Initialize XMLDeserializer.
:param metadata: information needed to deserialize xml into :param metadata: information needed to deserialize xml into
a dictionary. a dictionary.
""" """
@ -269,7 +270,8 @@ class JSONDictSerializer(DictSerializer):
class XMLDictSerializer(DictSerializer): class XMLDictSerializer(DictSerializer):
def __init__(self, metadata=None, xmlns=None): def __init__(self, metadata=None, xmlns=None):
""" """Initialize XMLDictSerializer.
:param metadata: information needed to deserialize xml into :param metadata: information needed to deserialize xml into
a dictionary. a dictionary.
:param xmlns: XML namespace to include with serialized xml :param xmlns: XML namespace to include with serialized xml
@ -631,7 +633,8 @@ class Resource(wsgi.Application):
""" """
def __init__(self, controller, action_peek=None, **deserializers): def __init__(self, controller, action_peek=None, **deserializers):
""" """Initialize Resource.
:param controller: object that implement methods created by routes lib :param controller: object that implement methods created by routes lib
:param action_peek: dictionary of routines for peeking into an action :param action_peek: dictionary of routines for peeking into an action
request body to determine the desired action request body to determine the desired action
@ -1122,14 +1125,10 @@ def _set_request_id_header(req, headers):
class OverLimitFault(webob.exc.HTTPException): class OverLimitFault(webob.exc.HTTPException):
""" """Rate-limited request response."""
Rate-limited request response.
"""
def __init__(self, message, details, retry_time): def __init__(self, message, details, retry_time):
""" """Initialize new `OverLimitFault` with relevant information."""
Initialize new `OverLimitFault` with relevant information.
"""
hdrs = OverLimitFault._retry_after(retry_time) hdrs = OverLimitFault._retry_after(retry_time)
self.wrapped_exc = webob.exc.HTTPRequestEntityTooLarge(headers=hdrs) self.wrapped_exc = webob.exc.HTTPRequestEntityTooLarge(headers=hdrs)
self.content = { self.content = {

View File

@ -75,15 +75,11 @@ class LimitsTemplate(xmlutil.TemplateBuilder):
class LimitsController(object): class LimitsController(object):
""" """Controller for accessing limits in the OpenStack API."""
Controller for accessing limits in the OpenStack API.
"""
@wsgi.serializers(xml=LimitsTemplate) @wsgi.serializers(xml=LimitsTemplate)
def index(self, req): def index(self, req):
""" """Return all global and rate limit information."""
Return all global and rate limit information.
"""
context = req.environ['cinder.context'] context = req.environ['cinder.context']
quotas = QUOTAS.get_project_quotas(context, context.project_id, quotas = QUOTAS.get_project_quotas(context, context.project_id,
usages=False) usages=False)
@ -102,9 +98,7 @@ def create_resource():
class Limit(object): class Limit(object):
""" """Stores information about a limit for HTTP requests."""
Stores information about a limit for HTTP requests.
"""
UNITS = { UNITS = {
1: "SECOND", 1: "SECOND",
@ -116,8 +110,7 @@ class Limit(object):
UNIT_MAP = dict([(v, k) for k, v in UNITS.items()]) UNIT_MAP = dict([(v, k) for k, v in UNITS.items()])
def __init__(self, verb, uri, regex, value, unit): def __init__(self, verb, uri, regex, value, unit):
""" """Initialize a new `Limit`.
Initialize a new `Limit`.
@param verb: HTTP verb (POST, PUT, etc.) @param verb: HTTP verb (POST, PUT, etc.)
@param uri: Human-readable URI @param uri: Human-readable URI
@ -147,8 +140,7 @@ class Limit(object):
self.error_message = msg % self.__dict__ self.error_message = msg % self.__dict__
def __call__(self, verb, url): def __call__(self, verb, url):
""" """Represent a call to this limit from a relevant request.
Represents a call to this limit from a relevant request.
@param verb: string http verb (POST, GET, etc.) @param verb: string http verb (POST, GET, etc.)
@param url: string URL @param url: string URL
@ -216,15 +208,15 @@ DEFAULT_LIMITS = [
class RateLimitingMiddleware(base_wsgi.Middleware): class RateLimitingMiddleware(base_wsgi.Middleware):
""" """Rate-limits requests passing through this middleware.
Rate-limits requests passing through this middleware. All limit information
is stored in memory for this implementation. All limit information is stored in memory for this implementation.
""" """
def __init__(self, application, limits=None, limiter=None, **kwargs): def __init__(self, application, limits=None, limiter=None, **kwargs):
""" """Initialize new `RateLimitingMiddleware`
Initialize new `RateLimitingMiddleware`, which wraps the given WSGI
application and sets up the given limits. This wraps the given WSGI application and sets up the given limits.
@param application: WSGI application to wrap @param application: WSGI application to wrap
@param limits: String describing limits @param limits: String describing limits
@ -248,10 +240,10 @@ class RateLimitingMiddleware(base_wsgi.Middleware):
@webob.dec.wsgify(RequestClass=wsgi.Request) @webob.dec.wsgify(RequestClass=wsgi.Request)
def __call__(self, req): def __call__(self, req):
""" """Represent a single call through this middleware.
Represents a single call through this middleware. We should record the
request if we have a limit relevant to it. If no limit is relevant to We should record the request if we have a limit relevant to it.
the request, ignore it. If no limit is relevant to the request, ignore it.
If the request should be rate limited, return a fault telling the user If the request should be rate limited, return a fault telling the user
they are over the limit and need to retry later. they are over the limit and need to retry later.
@ -278,13 +270,10 @@ class RateLimitingMiddleware(base_wsgi.Middleware):
class Limiter(object): class Limiter(object):
""" """Rate-limit checking class which handles limits in memory."""
Rate-limit checking class which handles limits in memory.
"""
def __init__(self, limits, **kwargs): def __init__(self, limits, **kwargs):
""" """Initialize the new `Limiter`.
Initialize the new `Limiter`.
@param limits: List of `Limit` objects @param limits: List of `Limit` objects
""" """
@ -298,14 +287,11 @@ class Limiter(object):
self.levels[username] = self.parse_limits(value) self.levels[username] = self.parse_limits(value)
def get_limits(self, username=None): def get_limits(self, username=None):
""" """Return the limits for a given user."""
Return the limits for a given user.
"""
return [limit.display() for limit in self.levels[username]] return [limit.display() for limit in self.levels[username]]
def check_for_delay(self, verb, url, username=None): def check_for_delay(self, verb, url, username=None):
""" """Check the given verb/user/user triplet for limit.
Check the given verb/user/user triplet for limit.
@return: Tuple of delay (in seconds) and error message (or None, None) @return: Tuple of delay (in seconds) and error message (or None, None)
""" """
@ -329,9 +315,9 @@ class Limiter(object):
# default limit parsing. # default limit parsing.
@staticmethod @staticmethod
def parse_limits(limits): def parse_limits(limits):
""" """Convert a string into a list of Limit instances.
Convert a string into a list of Limit instances. This
implementation expects a semicolon-separated sequence of This implementation expects a semicolon-separated sequence of
parenthesized groups, where each group contains a parenthesized groups, where each group contains a
comma-separated sequence consisting of HTTP method, comma-separated sequence consisting of HTTP method,
user-readable URI, a URI reg-exp, an integer number of user-readable URI, a URI reg-exp, an integer number of
@ -384,8 +370,9 @@ class Limiter(object):
class WsgiLimiter(object): class WsgiLimiter(object):
""" """Rate-limit checking from a WSGI application.
Rate-limit checking from a WSGI application. Uses an in-memory `Limiter`.
Uses an in-memory `Limiter`.
To use, POST ``/<username>`` with JSON data such as:: To use, POST ``/<username>`` with JSON data such as::
@ -400,8 +387,7 @@ class WsgiLimiter(object):
""" """
def __init__(self, limits=None): def __init__(self, limits=None):
""" """Initialize the new `WsgiLimiter`.
Initialize the new `WsgiLimiter`.
@param limits: List of `Limit` objects @param limits: List of `Limit` objects
""" """
@ -409,10 +395,11 @@ class WsgiLimiter(object):
@webob.dec.wsgify(RequestClass=wsgi.Request) @webob.dec.wsgify(RequestClass=wsgi.Request)
def __call__(self, request): def __call__(self, request):
""" """Handles a call to this application.
Handles a call to this application. Returns 204 if the request is
acceptable to the limiter, else a 403 is returned with a relevant Returns 204 if the request is acceptable to the limiter, else a 403
header indicating when the request *will* succeed. is returned with a relevant header indicating when the request
*will* succeed.
""" """
if request.method != "POST": if request.method != "POST":
raise webob.exc.HTTPMethodNotAllowed() raise webob.exc.HTTPMethodNotAllowed()
@ -436,13 +423,10 @@ class WsgiLimiter(object):
class WsgiLimiterProxy(object): class WsgiLimiterProxy(object):
""" """Rate-limit requests based on answers from a remote source."""
Rate-limit requests based on answers from a remote source.
"""
def __init__(self, limiter_address): def __init__(self, limiter_address):
""" """Initialize the new `WsgiLimiterProxy`.
Initialize the new `WsgiLimiterProxy`.
@param limiter_address: IP/port combination of where to request limit @param limiter_address: IP/port combination of where to request limit
""" """
@ -473,9 +457,7 @@ class WsgiLimiterProxy(object):
# decisions are made by a remote server. # decisions are made by a remote server.
@staticmethod @staticmethod
def parse_limits(limits): def parse_limits(limits):
""" """Ignore a limits string--simply doesn't apply for the limit proxy.
Ignore a limits string--simply doesn't apply for the limit
proxy.
@return: Empty list. @return: Empty list.
""" """

View File

@ -37,10 +37,7 @@ LOG = logging.getLogger(__name__)
class APIRouter(cinder.api.openstack.APIRouter): class APIRouter(cinder.api.openstack.APIRouter):
""" """Routes requests on the API to the appropriate controller and method."""
Routes requests on the OpenStack API to the appropriate controller
and method.
"""
ExtensionManager = extensions.ExtensionManager ExtensionManager = extensions.ExtensionManager
def _setup_routes(self, mapper, ext_mgr): def _setup_routes(self, mapper, ext_mgr):

View File

@ -75,15 +75,11 @@ class LimitsTemplate(xmlutil.TemplateBuilder):
class LimitsController(object): class LimitsController(object):
""" """Controller for accessing limits in the OpenStack API."""
Controller for accessing limits in the OpenStack API.
"""
@wsgi.serializers(xml=LimitsTemplate) @wsgi.serializers(xml=LimitsTemplate)
def index(self, req): def index(self, req):
""" """Return all global and rate limit information."""
Return all global and rate limit information.
"""
context = req.environ['cinder.context'] context = req.environ['cinder.context']
quotas = QUOTAS.get_project_quotas(context, context.project_id, quotas = QUOTAS.get_project_quotas(context, context.project_id,
usages=False) usages=False)
@ -102,9 +98,7 @@ def create_resource():
class Limit(object): class Limit(object):
""" """Stores information about a limit for HTTP requests."""
Stores information about a limit for HTTP requests.
"""
UNITS = { UNITS = {
1: "SECOND", 1: "SECOND",
@ -116,8 +110,7 @@ class Limit(object):
UNIT_MAP = dict([(v, k) for k, v in UNITS.items()]) UNIT_MAP = dict([(v, k) for k, v in UNITS.items()])
def __init__(self, verb, uri, regex, value, unit): def __init__(self, verb, uri, regex, value, unit):
""" """Initialize a new `Limit`.
Initialize a new `Limit`.
@param verb: HTTP verb (POST, PUT, etc.) @param verb: HTTP verb (POST, PUT, etc.)
@param uri: Human-readable URI @param uri: Human-readable URI
@ -147,8 +140,7 @@ class Limit(object):
self.error_message = msg % self.__dict__ self.error_message = msg % self.__dict__
def __call__(self, verb, url): def __call__(self, verb, url):
""" """Represent a call to this limit from a relevant request.
Represents a call to this limit from a relevant request.
@param verb: string http verb (POST, GET, etc.) @param verb: string http verb (POST, GET, etc.)
@param url: string URL @param url: string URL
@ -216,14 +208,13 @@ DEFAULT_LIMITS = [
class RateLimitingMiddleware(base_wsgi.Middleware): class RateLimitingMiddleware(base_wsgi.Middleware):
""" """Rate-limits requests passing through this middleware.
Rate-limits requests passing through this middleware. All limit information
is stored in memory for this implementation. All limit information is stored in memory for this implementation.
""" """
def __init__(self, application, limits=None, limiter=None, **kwargs): def __init__(self, application, limits=None, limiter=None, **kwargs):
""" """Initialize new `RateLimitingMiddleware`, which wraps the given WSGI
Initialize new `RateLimitingMiddleware`, which wraps the given WSGI
application and sets up the given limits. application and sets up the given limits.
@param application: WSGI application to wrap @param application: WSGI application to wrap
@ -248,13 +239,12 @@ class RateLimitingMiddleware(base_wsgi.Middleware):
@webob.dec.wsgify(RequestClass=wsgi.Request) @webob.dec.wsgify(RequestClass=wsgi.Request)
def __call__(self, req): def __call__(self, req):
""" """Represents a single call through this middleware.
Represents a single call through this middleware. We should record the
request if we have a limit relevant to it. If no limit is relevant to
the request, ignore it.
If the request should be rate limited, return a fault telling the user We should record the request if we have a limit relevant to it.
they are over the limit and need to retry later. If no limit is relevant to the request, ignore it. If the request
should be rate limited, return a fault telling the user they are
over the limit and need to retry later.
""" """
verb = req.method verb = req.method
url = req.url url = req.url
@ -278,13 +268,10 @@ class RateLimitingMiddleware(base_wsgi.Middleware):
class Limiter(object): class Limiter(object):
""" """Rate-limit checking class which handles limits in memory."""
Rate-limit checking class which handles limits in memory.
"""
def __init__(self, limits, **kwargs): def __init__(self, limits, **kwargs):
""" """Initialize the new `Limiter`.
Initialize the new `Limiter`.
@param limits: List of `Limit` objects @param limits: List of `Limit` objects
""" """
@ -298,14 +285,11 @@ class Limiter(object):
self.levels[username] = self.parse_limits(value) self.levels[username] = self.parse_limits(value)
def get_limits(self, username=None): def get_limits(self, username=None):
""" """Return the limits for a given user."""
Return the limits for a given user.
"""
return [limit.display() for limit in self.levels[username]] return [limit.display() for limit in self.levels[username]]
def check_for_delay(self, verb, url, username=None): def check_for_delay(self, verb, url, username=None):
""" """Check the given verb/user/user triplet for limit.
Check the given verb/user/user triplet for limit.
@return: Tuple of delay (in seconds) and error message (or None, None) @return: Tuple of delay (in seconds) and error message (or None, None)
""" """
@ -329,9 +313,9 @@ class Limiter(object):
# default limit parsing. # default limit parsing.
@staticmethod @staticmethod
def parse_limits(limits): def parse_limits(limits):
""" """Convert a string into a list of Limit instances.
Convert a string into a list of Limit instances. This
implementation expects a semicolon-separated sequence of This implementation expects a semicolon-separated sequence of
parenthesized groups, where each group contains a parenthesized groups, where each group contains a
comma-separated sequence consisting of HTTP method, comma-separated sequence consisting of HTTP method,
user-readable URI, a URI reg-exp, an integer number of user-readable URI, a URI reg-exp, an integer number of
@ -384,8 +368,9 @@ class Limiter(object):
class WsgiLimiter(object): class WsgiLimiter(object):
""" """Rate-limit checking from a WSGI application.
Rate-limit checking from a WSGI application. Uses an in-memory `Limiter`.
Uses an in-memory `Limiter`.
To use, POST ``/<username>`` with JSON data such as:: To use, POST ``/<username>`` with JSON data such as::
@ -400,8 +385,7 @@ class WsgiLimiter(object):
""" """
def __init__(self, limits=None): def __init__(self, limits=None):
""" """Initialize the new `WsgiLimiter`.
Initialize the new `WsgiLimiter`.
@param limits: List of `Limit` objects @param limits: List of `Limit` objects
""" """
@ -409,10 +393,11 @@ class WsgiLimiter(object):
@webob.dec.wsgify(RequestClass=wsgi.Request) @webob.dec.wsgify(RequestClass=wsgi.Request)
def __call__(self, request): def __call__(self, request):
""" """Handles a call to this application.
Handles a call to this application. Returns 204 if the request is
acceptable to the limiter, else a 403 is returned with a relevant Returns 204 if the request is acceptable to the limiter, else a 403
header indicating when the request *will* succeed. is returned with a relevant header indicating when the request
*will* succeed.
""" """
if request.method != "POST": if request.method != "POST":
raise webob.exc.HTTPMethodNotAllowed() raise webob.exc.HTTPMethodNotAllowed()
@ -436,13 +421,10 @@ class WsgiLimiter(object):
class WsgiLimiterProxy(object): class WsgiLimiterProxy(object):
""" """Rate-limit requests based on answers from a remote source."""
Rate-limit requests based on answers from a remote source.
"""
def __init__(self, limiter_address): def __init__(self, limiter_address):
""" """Initialize the new `WsgiLimiterProxy`.
Initialize the new `WsgiLimiterProxy`.
@param limiter_address: IP/port combination of where to request limit @param limiter_address: IP/port combination of where to request limit
""" """
@ -473,9 +455,7 @@ class WsgiLimiterProxy(object):
# decisions are made by a remote server. # decisions are made by a remote server.
@staticmethod @staticmethod
def parse_limits(limits): def parse_limits(limits):
""" """Ignore a limits string--simply doesn't apply for the limit proxy.
Ignore a limits string--simply doesn't apply for the limit
proxy.
@return: Empty list. @return: Empty list.
""" """

View File

@ -37,10 +37,7 @@ LOG = logging.getLogger(__name__)
class APIRouter(cinder.api.openstack.APIRouter): class APIRouter(cinder.api.openstack.APIRouter):
""" """Routes requests on the API to the appropriate controller and method."""
Routes requests on the OpenStack API to the appropriate controller
and method.
"""
ExtensionManager = extensions.ExtensionManager ExtensionManager = extensions.ExtensionManager
def _setup_routes(self, mapper, ext_mgr): def _setup_routes(self, mapper, ext_mgr):

View File

@ -26,7 +26,8 @@ def get_view_builder(req):
class ViewBuilder(object): class ViewBuilder(object):
def __init__(self, base_url): def __init__(self, base_url):
""" """Initialize ViewBuilder.
:param base_url: url of the root wsgi application :param base_url: url of the root wsgi application
""" """
self.base_url = base_url self.base_url = base_url

View File

@ -910,9 +910,7 @@ class TemplateBuilder(object):
def make_links(parent, selector=None): def make_links(parent, selector=None):
""" """Attach an Atom <links> element to the parent."""
Attach an Atom <links> element to the parent.
"""
elem = SubTemplateElement(parent, '{%s}link' % XMLNS_ATOM, elem = SubTemplateElement(parent, '{%s}link' % XMLNS_ATOM,
selector=selector) selector=selector)
@ -925,14 +923,17 @@ def make_links(parent, selector=None):
def make_flat_dict(name, selector=None, subselector=None, ns=None): def make_flat_dict(name, selector=None, subselector=None, ns=None):
""" """Utility for simple XML templates.
Utility for simple XML templates that traditionally used
XMLDictSerializer with no metadata. Returns a template element Simple templates are templates that traditionally used
where the top-level element has the given tag name, and where XMLDictSerializer with no metadata.
sub-elements have tag names derived from the object's keys and
text derived from the object's values. This only works for flat Returns a template element where the top-level element has the
dictionary objects, not dictionaries containing nested lists or given tag name, and where sub-elements have tag names derived
dictionaries. from the object's keys and text derived from the object's values.
This only works for flat dictionary objects, not dictionaries
containing nested lists or dictionaries.
""" """
# Set up the names we need... # Set up the names we need...

View File

@ -85,7 +85,7 @@ class API(base.Base):
return backups return backups
def _is_backup_service_enabled(self, volume, volume_host): def _is_backup_service_enabled(self, volume, volume_host):
"""Check if there is an backup service available""" """Check if there is a backup service available."""
topic = CONF.backup_topic topic = CONF.backup_topic
ctxt = context.get_admin_context() ctxt = context.get_admin_context()
services = self.db.service_get_all_by_topic(ctxt, topic) services = self.db.service_get_all_by_topic(ctxt, topic)

View File

@ -61,7 +61,8 @@ class RemoteFsClient(object):
return hashlib.md5(base_str).hexdigest() return hashlib.md5(base_str).hexdigest()
def get_mount_point(self, device_name): def get_mount_point(self, device_name):
""" """Get Mount Point.
:param device_name: example 172.18.194.100:/var/nfs :param device_name: example 172.18.194.100:/var/nfs
""" """
return os.path.join(self._mount_base, return os.path.join(self._mount_base,

View File

@ -47,7 +47,8 @@ class RequestContext(object):
timestamp=None, request_id=None, auth_token=None, timestamp=None, request_id=None, auth_token=None,
overwrite=True, quota_class=None, service_catalog=None, overwrite=True, quota_class=None, service_catalog=None,
**kwargs): **kwargs):
""" """Initialize RequestContext.
:param read_deleted: 'no' indicates deleted records are hidden, 'yes' :param read_deleted: 'no' indicates deleted records are hidden, 'yes'
indicates deleted records are visible, 'only' indicates that indicates deleted records are visible, 'only' indicates that
*only* deleted records are visible. *only* deleted records are visible.

View File

@ -560,20 +560,21 @@ def volume_snapshot_glance_metadata_get(context, snapshot_id):
def volume_glance_metadata_copy_to_snapshot(context, snapshot_id, volume_id): def volume_glance_metadata_copy_to_snapshot(context, snapshot_id, volume_id):
""" """Update the Glance metadata for a snapshot.
Update the Glance metadata for a snapshot by copying all of the key:value
pairs from the originating volume. This is so that a volume created from This will copy all of the key:value pairs from the originating volume,
the snapshot will retain the original metadata. to ensure that a volume created from the snapshot will retain the
original metadata.
""" """
return IMPL.volume_glance_metadata_copy_to_snapshot(context, snapshot_id, return IMPL.volume_glance_metadata_copy_to_snapshot(context, snapshot_id,
volume_id) volume_id)
def volume_glance_metadata_copy_to_volume(context, volume_id, snapshot_id): def volume_glance_metadata_copy_to_volume(context, volume_id, snapshot_id):
""" """Update the Glance metadata from a volume (created from a snapshot).
Update the Glance metadata from a volume (created from a snapshot) by
copying all of the key:value pairs from the originating snapshot. This is This will copy all of the key:value pairs from the originating snapshot,
so that the Glance metadata from the original volume is retained. to ensure that the Glance metadata from the original volume is retained.
""" """
return IMPL.volume_glance_metadata_copy_to_volume(context, volume_id, return IMPL.volume_glance_metadata_copy_to_volume(context, volume_id,
snapshot_id) snapshot_id)
@ -592,10 +593,11 @@ def volume_glance_metadata_delete_by_snapshot(context, snapshot_id):
def volume_glance_metadata_copy_from_volume_to_volume(context, def volume_glance_metadata_copy_from_volume_to_volume(context,
src_volume_id, src_volume_id,
volume_id): volume_id):
""" """Update the Glance metadata for a volume by copying all of the key:value
Update the Glance metadata for a volume by copying all of the key:value pairs from the originating volume.
pairs from the originating volume. This is so that a volume created from
the volume (clone) will retain the original metadata. This is so that a volume created from the volume (clone) will retain the
original metadata.
""" """
return IMPL.volume_glance_metadata_copy_from_volume_to_volume( return IMPL.volume_glance_metadata_copy_from_volume_to_volume(
context, context,
@ -768,8 +770,7 @@ def backup_get_all_by_project(context, project_id):
def backup_update(context, backup_id, values): def backup_update(context, backup_id, values):
""" """Set the given properties on a backup and update it.
Set the given properties on a backup and update it.
Raises NotFound if backup does not exist. Raises NotFound if backup does not exist.
""" """

View File

@ -1681,9 +1681,7 @@ def volume_type_create(context, values):
@require_context @require_context
def volume_type_get_all(context, inactive=False, filters=None): def volume_type_get_all(context, inactive=False, filters=None):
""" """Returns a dict describing all volume_types with name as key."""
Returns a dict describing all volume_types with name as key.
"""
filters = filters or {} filters = filters or {}
read_deleted = "yes" if inactive else "no" read_deleted = "yes" if inactive else "no"
@ -2338,8 +2336,8 @@ def volume_snapshot_glance_metadata_get(context, snapshot_id):
@require_context @require_context
@require_volume_exists @require_volume_exists
def volume_glance_metadata_create(context, volume_id, key, value): def volume_glance_metadata_create(context, volume_id, key, value):
""" """Update the Glance metadata for a volume by adding a new key:value pair.
Update the Glance metadata for a volume by adding a new key:value pair.
This API does not support changing the value of a key once it has been This API does not support changing the value of a key once it has been
created. created.
""" """
@ -2368,10 +2366,11 @@ def volume_glance_metadata_create(context, volume_id, key, value):
@require_context @require_context
@require_snapshot_exists @require_snapshot_exists
def volume_glance_metadata_copy_to_snapshot(context, snapshot_id, volume_id): def volume_glance_metadata_copy_to_snapshot(context, snapshot_id, volume_id):
""" """Update the Glance metadata for a snapshot.
Update the Glance metadata for a snapshot by copying all of the key:value
pairs from the originating volume. This is so that a volume created from This copies all of the key:value pairs from the originating volume, to
the snapshot will retain the original metadata. ensure that a volume created from the snapshot will retain the
original metadata.
""" """
session = get_session() session = get_session()
@ -2392,10 +2391,11 @@ def volume_glance_metadata_copy_to_snapshot(context, snapshot_id, volume_id):
def volume_glance_metadata_copy_from_volume_to_volume(context, def volume_glance_metadata_copy_from_volume_to_volume(context,
src_volume_id, src_volume_id,
volume_id): volume_id):
""" """Update the Glance metadata for a volume.
Update the Glance metadata for a volume by copying all of the key:value
pairs from the originating volume. This is so that a volume created from This copies all all of the key:value pairs from the originating volume,
the volume (clone) will retain the original metadata. to ensure that a volume created from the volume (clone) will
retain the original metadata.
""" """
session = get_session() session = get_session()
@ -2415,10 +2415,10 @@ def volume_glance_metadata_copy_from_volume_to_volume(context,
@require_context @require_context
@require_volume_exists @require_volume_exists
def volume_glance_metadata_copy_to_volume(context, volume_id, snapshot_id): def volume_glance_metadata_copy_to_volume(context, volume_id, snapshot_id):
""" """Update the Glance metadata from a volume (created from a snapshot) by
Update the Glance metadata from a volume (created from a snapshot) by copying all of the key:value pairs from the originating snapshot.
copying all of the key:value pairs from the originating snapshot. This is
so that the Glance metadata from the original volume is retained. This is so that the Glance metadata from the original volume is retained.
""" """
session = get_session() session = get_session()

View File

@ -56,7 +56,8 @@ LOG = logging.getLogger(__name__)
class ConfKeyManager(key_mgr.KeyManager): class ConfKeyManager(key_mgr.KeyManager):
""" """Key Manager that supports one key defined by the fixed_key conf option.
This key manager implementation supports all the methods specified by the This key manager implementation supports all the methods specified by the
key manager interface. This implementation creates a single key in response key manager interface. This implementation creates a single key in response
to all invocations of create_key. Side effects (e.g., raising exceptions) to all invocations of create_key. Side effects (e.g., raising exceptions)

View File

@ -55,9 +55,7 @@ class Key(object):
class SymmetricKey(Key): class SymmetricKey(Key):
""" """This class represents symmetric keys."""
This class represents symmetric keys
"""
def __init__(self, alg, key): def __init__(self, alg, key):
"""Create a new SymmetricKey object. """Create a new SymmetricKey object.

View File

@ -65,10 +65,11 @@ CONF.register_opts(quota_opts)
class DbQuotaDriver(object): class DbQuotaDriver(object):
"""
Driver to perform necessary checks to enforce quotas and obtain """Driver to perform check to enforcement of quotas.
quota information. The default driver utilizes the local
database. Also allows to obtain quota information.
The default driver utilizes the local database.
""" """
def get_by_project(self, context, project_id, resource_name): def get_by_project(self, context, project_id, resource_name):
@ -115,9 +116,7 @@ class DbQuotaDriver(object):
def get_class_quotas(self, context, resources, quota_class, def get_class_quotas(self, context, resources, quota_class,
defaults=True): defaults=True):
""" """Given list of resources, retrieve the quotas for given quota class.
Given a list of resources, retrieve the quotas for the given
quota class.
:param context: The request context, for access checks. :param context: The request context, for access checks.
:param resources: A dictionary of the registered resources. :param resources: A dictionary of the registered resources.
@ -147,8 +146,7 @@ class DbQuotaDriver(object):
def get_project_quotas(self, context, resources, project_id, def get_project_quotas(self, context, resources, project_id,
quota_class=None, defaults=True, quota_class=None, defaults=True,
usages=True): usages=True):
""" """Given a list of resources, retrieve the quotas for the given
Given a list of resources, retrieve the quotas for the given
project. project.
:param context: The request context, for access checks. :param context: The request context, for access checks.
@ -210,10 +208,10 @@ class DbQuotaDriver(object):
return quotas return quotas
def _get_quotas(self, context, resources, keys, has_sync, project_id=None): def _get_quotas(self, context, resources, keys, has_sync, project_id=None):
""" """A helper method which retrieves the quotas for specific resources.
A helper method which retrieves the quotas for the specific
resources identified by keys, and which apply to the current This specific resource is identified by keys, and which apply to the
context. current context.
:param context: The request context, for access checks. :param context: The request context, for access checks.
:param resources: A dictionary of the registered resources. :param resources: A dictionary of the registered resources.
@ -392,9 +390,9 @@ class DbQuotaDriver(object):
db.reservation_rollback(context, reservations, project_id=project_id) db.reservation_rollback(context, reservations, project_id=project_id)
def destroy_all_by_project(self, context, project_id): def destroy_all_by_project(self, context, project_id):
""" """Destroy all that is associated with a project.
Destroy all quotas, usages, and reservations associated with a
project. This includes quotas, usages and reservations.
:param context: The request context, for access checks. :param context: The request context, for access checks.
:param project_id: The ID of the project being deleted. :param project_id: The ID of the project being deleted.
@ -418,8 +416,7 @@ class BaseResource(object):
"""Describe a single resource for quota checking.""" """Describe a single resource for quota checking."""
def __init__(self, name, flag=None): def __init__(self, name, flag=None):
""" """Initializes a Resource.
Initializes a Resource.
:param name: The name of the resource, i.e., "volumes". :param name: The name of the resource, i.e., "volumes".
:param flag: The name of the flag or configuration option :param flag: The name of the flag or configuration option
@ -431,9 +428,7 @@ class BaseResource(object):
self.flag = flag self.flag = flag
def quota(self, driver, context, **kwargs): def quota(self, driver, context, **kwargs):
""" """Given a driver and context, obtain the quota for this resource.
Given a driver and context, obtain the quota for this
resource.
:param driver: A quota driver. :param driver: A quota driver.
:param context: The request context. :param context: The request context.
@ -526,10 +521,7 @@ class AbsoluteResource(BaseResource):
class CountableResource(AbsoluteResource): class CountableResource(AbsoluteResource):
""" """Describe a resource where counts aren't based only on the project ID."""
Describe a resource where the counts aren't based solely on the
project ID.
"""
def __init__(self, name, count, flag=None): def __init__(self, name, count, flag=None):
"""Initializes a CountableResource. """Initializes a CountableResource.
@ -568,8 +560,7 @@ class VolumeTypeResource(ReservableResource):
"""ReservableResource for a specific volume type.""" """ReservableResource for a specific volume type."""
def __init__(self, part_name, volume_type): def __init__(self, part_name, volume_type):
""" """Initializes a VolumeTypeResource.
Initializes a VolumeTypeResource.
:param part_name: The kind of resource, i.e., "volumes". :param part_name: The kind of resource, i.e., "volumes".
:param volume_type: The volume type for this resource. :param volume_type: The volume type for this resource.
@ -802,8 +793,7 @@ class QuotaEngine(object):
"%s") % reservations) "%s") % reservations)
def destroy_all_by_project(self, context, project_id): def destroy_all_by_project(self, context, project_id):
""" """Destroy all quotas, usages, and reservations associated with a
Destroy all quotas, usages, and reservations associated with a
project. project.
:param context: The request context, for access checks. :param context: The request context, for access checks.

View File

@ -45,11 +45,11 @@ LOG = logging.getLogger(__name__)
class SchedulerOptions(object): class SchedulerOptions(object):
""" """SchedulerOptions monitors a local .json file for changes.
SchedulerOptions monitors a local .json file for changes and loads it
if needed. This file is converted to a data structure and passed into The file is reloaded if needed and converted to a data structure and
the filtering and weighing functions which can use it for dynamic passed into the filtering and weighing functions which can use it
configuration. for dynamic configuration.
""" """
def __init__(self): def __init__(self):

View File

@ -31,10 +31,11 @@ ATOMNS = "{http://www.w3.org/2005/Atom}"
class LimiterTest(test.TestCase): class LimiterTest(test.TestCase):
""" """Unit tests for the `cinder.api.common.limited` method.
Unit tests for the `cinder.api.common.limited` method which takes
in a list of items and, depending on the 'offset' and 'limit' GET params, This method takes in a list of items and, depending on the 'offset'
returns a subset or complete set of the given items. and 'limit' GET params, returns a subset or complete set of the given
items.
""" """
def setUp(self): def setUp(self):
@ -161,9 +162,9 @@ class LimiterTest(test.TestCase):
class PaginationParamsTest(test.TestCase): class PaginationParamsTest(test.TestCase):
""" """Unit tests for `cinder.api.common.get_pagination_params` method.
Unit tests for the `cinder.api.common.get_pagination_params`
method which takes in a request object and returns 'marker' and 'limit' This method takes in a request object and returns 'marker' and 'limit'
GET params. GET params.
""" """

View File

@ -67,9 +67,7 @@ class BaseLimitTestSuite(test.TestCase):
class LimitsControllerTest(BaseLimitTestSuite): class LimitsControllerTest(BaseLimitTestSuite):
""" """Tests for `limits.LimitsController` class."""
Tests for `limits.LimitsController` class.
"""
def setUp(self): def setUp(self):
"""Run before each test.""" """Run before each test."""
@ -233,9 +231,7 @@ class TestLimiter(limits.Limiter):
class LimitMiddlewareTest(BaseLimitTestSuite): class LimitMiddlewareTest(BaseLimitTestSuite):
""" """Tests for the `limits.RateLimitingMiddleware` class."""
Tests for the `limits.RateLimitingMiddleware` class.
"""
@webob.dec.wsgify @webob.dec.wsgify
def _empty_app(self, request): def _empty_app(self, request):
@ -301,9 +297,7 @@ class LimitMiddlewareTest(BaseLimitTestSuite):
class LimitTest(BaseLimitTestSuite): class LimitTest(BaseLimitTestSuite):
""" """Tests for the `limits.Limit` class."""
Tests for the `limits.Limit` class.
"""
def test_GET_no_delay(self): def test_GET_no_delay(self):
"""Test a limit handles 1 GET per second.""" """Test a limit handles 1 GET per second."""
@ -333,10 +327,7 @@ class LimitTest(BaseLimitTestSuite):
class ParseLimitsTest(BaseLimitTestSuite): class ParseLimitsTest(BaseLimitTestSuite):
""" """Tests for the default limits parser in the `limits.Limiter` class."""
Tests for the default limits parser in the in-memory
`limits.Limiter` class.
"""
def test_invalid(self): def test_invalid(self):
"""Test that parse_limits() handles invalid input correctly.""" """Test that parse_limits() handles invalid input correctly."""
@ -399,9 +390,7 @@ class ParseLimitsTest(BaseLimitTestSuite):
class LimiterTest(BaseLimitTestSuite): class LimiterTest(BaseLimitTestSuite):
""" """Tests for the in-memory `limits.Limiter` class."""
Tests for the in-memory `limits.Limiter` class.
"""
def setUp(self): def setUp(self):
"""Run before each test.""" """Run before each test."""
@ -422,23 +411,19 @@ class LimiterTest(BaseLimitTestSuite):
return sum(item for item in results if item) return sum(item for item in results if item)
def test_no_delay_GET(self): def test_no_delay_GET(self):
""" """no delay on a single call for a limit verb we didn"t set."""
Simple test to ensure no delay on a single call for a limit verb we
didn"t set.
"""
delay = self.limiter.check_for_delay("GET", "/anything") delay = self.limiter.check_for_delay("GET", "/anything")
self.assertEqual(delay, (None, None)) self.assertEqual(delay, (None, None))
def test_no_delay_PUT(self): def test_no_delay_PUT(self):
""" """no delay on a single call for a known limit."""
Simple test to ensure no delay on a single call for a known limit.
"""
delay = self.limiter.check_for_delay("PUT", "/anything") delay = self.limiter.check_for_delay("PUT", "/anything")
self.assertEqual(delay, (None, None)) self.assertEqual(delay, (None, None))
def test_delay_PUT(self): def test_delay_PUT(self):
""" """test delay on 11th put request.
Ensure the 11th PUT will result in a delay of 6.0 seconds until
the 11th PUT will result in a delay of 6.0 seconds until
the next request will be granced. the next request will be granced.
""" """
expected = [None] * 10 + [6.0] expected = [None] * 10 + [6.0]
@ -447,9 +432,10 @@ class LimiterTest(BaseLimitTestSuite):
self.assertEqual(expected, results) self.assertEqual(expected, results)
def test_delay_POST(self): def test_delay_POST(self):
""" """test delay of 8th post request.
Ensure the 8th POST will result in a delay of 6.0 seconds until
the next request will be granced. Ensure that the 8th POST will result in a delay of 6.0 seconds
until the next request will be granced.
""" """
expected = [None] * 7 expected = [None] * 7
results = list(self._check(7, "POST", "/anything")) results = list(self._check(7, "POST", "/anything"))
@ -460,9 +446,7 @@ class LimiterTest(BaseLimitTestSuite):
self.failUnlessAlmostEqual(expected, results, 8) self.failUnlessAlmostEqual(expected, results, 8)
def test_delay_GET(self): def test_delay_GET(self):
""" """Ensure the 11th GET will result in NO delay."""
Ensure the 11th GET will result in NO delay.
"""
expected = [None] * 11 expected = [None] * 11
results = list(self._check(11, "GET", "/anything")) results = list(self._check(11, "GET", "/anything"))
self.assertEqual(expected, results) self.assertEqual(expected, results)
@ -472,10 +456,11 @@ class LimiterTest(BaseLimitTestSuite):
self.assertEqual(expected, results) self.assertEqual(expected, results)
def test_delay_PUT_volumes(self): def test_delay_PUT_volumes(self):
""" """Test limit of PUT on /volumes.
Ensure PUT on /volumes limits at 5 requests, and PUT elsewhere is still
OK after 5 requests...but then after 11 total requests, PUT limiting Ensure PUT on /volumes limits at 5 requests, and PUT elsewhere is
kicks in. still OK after 5 requests...
but then after 11 total requests, PUT limiting kicks in.
""" """
# First 6 requests on PUT /volumes # First 6 requests on PUT /volumes
expected = [None] * 5 + [12.0] expected = [None] * 5 + [12.0]
@ -488,7 +473,8 @@ class LimiterTest(BaseLimitTestSuite):
self.assertEqual(expected, results) self.assertEqual(expected, results)
def test_delay_PUT_wait(self): def test_delay_PUT_wait(self):
""" """Test limit on PUT is lifted.
Ensure after hitting the limit and then waiting for the correct Ensure after hitting the limit and then waiting for the correct
amount of time, the limit will be lifted. amount of time, the limit will be lifted.
""" """
@ -504,9 +490,7 @@ class LimiterTest(BaseLimitTestSuite):
self.assertEqual(expected, results) self.assertEqual(expected, results)
def test_multiple_delays(self): def test_multiple_delays(self):
""" """Ensure multiple requests still get a delay."""
Ensure multiple requests still get a delay.
"""
expected = [None] * 10 + [6.0] * 10 expected = [None] * 10 + [6.0] * 10
results = list(self._check(20, "PUT", "/anything")) results = list(self._check(20, "PUT", "/anything"))
self.assertEqual(expected, results) self.assertEqual(expected, results)
@ -522,16 +506,12 @@ class LimiterTest(BaseLimitTestSuite):
self.assertEqual(expected, results) self.assertEqual(expected, results)
def test_user_limit(self): def test_user_limit(self):
""" """Test user-specific limits."""
Test user-specific limits.
"""
self.assertEqual(self.limiter.levels['user3'], []) self.assertEqual(self.limiter.levels['user3'], [])
self.assertEqual(len(self.limiter.levels['user0']), 2) self.assertEqual(len(self.limiter.levels['user0']), 2)
def test_multiple_users(self): def test_multiple_users(self):
""" """Tests involving multiple users."""
Tests involving multiple users.
"""
# User0 # User0
expected = [None] * 2 + [30.0] * 8 expected = [None] * 2 + [30.0] * 8
@ -580,9 +560,7 @@ class LimiterTest(BaseLimitTestSuite):
class WsgiLimiterTest(BaseLimitTestSuite): class WsgiLimiterTest(BaseLimitTestSuite):
""" """Tests for `limits.WsgiLimiter` class."""
Tests for `limits.WsgiLimiter` class.
"""
def setUp(self): def setUp(self):
"""Run before each test.""" """Run before each test."""
@ -594,9 +572,13 @@ class WsgiLimiterTest(BaseLimitTestSuite):
return jsonutils.dumps({"verb": verb, "path": path}) return jsonutils.dumps({"verb": verb, "path": path})
def _request(self, verb, url, username=None): def _request(self, verb, url, username=None):
"""Make sure that POSTing to the given url causes the given username """Assert that POSTing to given url triggers given action.
to perform the given action. Make the internal rate limiter return
delay and make sure that the WSGI app returns the correct response. Ensure POSTing to the given url causes the given username
to perform the given action.
Make the internal rate limiter return delay and make sure that the
WSGI app returns the correct response.
""" """
if username: if username:
request = webob.Request.blank("/%s" % username) request = webob.Request.blank("/%s" % username)
@ -651,9 +633,7 @@ class WsgiLimiterTest(BaseLimitTestSuite):
class FakeHttplibSocket(object): class FakeHttplibSocket(object):
""" """Fake `httplib.HTTPResponse` replacement."""
Fake `httplib.HTTPResponse` replacement.
"""
def __init__(self, response_string): def __init__(self, response_string):
"""Initialize new `FakeHttplibSocket`.""" """Initialize new `FakeHttplibSocket`."""
@ -665,22 +645,19 @@ class FakeHttplibSocket(object):
class FakeHttplibConnection(object): class FakeHttplibConnection(object):
""" """Fake `httplib.HTTPConnection`."""
Fake `httplib.HTTPConnection`.
"""
def __init__(self, app, host): def __init__(self, app, host):
""" """Initialize `FakeHttplibConnection`."""
Initialize `FakeHttplibConnection`.
"""
self.app = app self.app = app
self.host = host self.host = host
def request(self, method, path, body="", headers=None): def request(self, method, path, body="", headers=None):
""" """Fake method for request.
Requests made via this connection actually get translated and routed
into our WSGI app, we then wait for the response and turn it back into Requests made via this connection actually get translated and
an `httplib.HTTPResponse`. routed into our WSGI app, we then wait for the response and turn
it back into an `httplib.HTTPResponse`.
""" """
if not headers: if not headers:
headers = {} headers = {}
@ -741,12 +718,11 @@ def wire_HTTPConnection_to_WSGI(host, app):
class WsgiLimiterProxyTest(BaseLimitTestSuite): class WsgiLimiterProxyTest(BaseLimitTestSuite):
""" """Tests for the `limits.WsgiLimiterProxy` class."""
Tests for the `limits.WsgiLimiterProxy` class.
"""
def setUp(self): def setUp(self):
""" """setUp for test suite.
Do some nifty HTTP/WSGI magic which allows for WSGI to be called Do some nifty HTTP/WSGI magic which allows for WSGI to be called
directly by something like the `httplib` library. directly by something like the `httplib` library.
""" """

View File

@ -412,9 +412,7 @@ class SnapshotSerializerTest(test.TestCase):
class SnapshotsUnprocessableEntityTestCase(test.TestCase): class SnapshotsUnprocessableEntityTestCase(test.TestCase):
""" """Tests of places we throw 422 Unprocessable Entity."""
Tests of places we throw 422 Unprocessable Entity from
"""
def setUp(self): def setUp(self):
super(SnapshotsUnprocessableEntityTestCase, self).setUp() super(SnapshotsUnprocessableEntityTestCase, self).setUp()

View File

@ -1131,9 +1131,7 @@ class TestVolumeCreateRequestXMLDeserializer(test.TestCase):
class VolumesUnprocessableEntityTestCase(test.TestCase): class VolumesUnprocessableEntityTestCase(test.TestCase):
""" """Tests of places we throw 422 Unprocessable Entity from."""
Tests of places we throw 422 Unprocessable Entity from
"""
def setUp(self): def setUp(self):
super(VolumesUnprocessableEntityTestCase, self).setUp() super(VolumesUnprocessableEntityTestCase, self).setUp()

View File

@ -67,9 +67,8 @@ class BaseLimitTestSuite(test.TestCase):
class LimitsControllerTest(BaseLimitTestSuite): class LimitsControllerTest(BaseLimitTestSuite):
"""
Tests for `limits.LimitsController` class. """Tests for `limits.LimitsController` class."""
"""
def setUp(self): def setUp(self):
"""Run before each test.""" """Run before each test."""
@ -233,9 +232,8 @@ class TestLimiter(limits.Limiter):
class LimitMiddlewareTest(BaseLimitTestSuite): class LimitMiddlewareTest(BaseLimitTestSuite):
"""
Tests for the `limits.RateLimitingMiddleware` class. """Tests for the `limits.RateLimitingMiddleware` class."""
"""
@webob.dec.wsgify @webob.dec.wsgify
def _empty_app(self, request): def _empty_app(self, request):
@ -301,9 +299,8 @@ class LimitMiddlewareTest(BaseLimitTestSuite):
class LimitTest(BaseLimitTestSuite): class LimitTest(BaseLimitTestSuite):
"""
Tests for the `limits.Limit` class. """Tests for the `limits.Limit` class."""
"""
def test_GET_no_delay(self): def test_GET_no_delay(self):
"""Test a limit handles 1 GET per second.""" """Test a limit handles 1 GET per second."""
@ -333,10 +330,8 @@ class LimitTest(BaseLimitTestSuite):
class ParseLimitsTest(BaseLimitTestSuite): class ParseLimitsTest(BaseLimitTestSuite):
"""
Tests for the default limits parser in the in-memory """Tests for the default limits parser in the `limits.Limiter` class."""
`limits.Limiter` class.
"""
def test_invalid(self): def test_invalid(self):
"""Test that parse_limits() handles invalid input correctly.""" """Test that parse_limits() handles invalid input correctly."""
@ -399,9 +394,8 @@ class ParseLimitsTest(BaseLimitTestSuite):
class LimiterTest(BaseLimitTestSuite): class LimiterTest(BaseLimitTestSuite):
"""
Tests for the in-memory `limits.Limiter` class. """Tests for the in-memory `limits.Limiter` class."""
"""
def setUp(self): def setUp(self):
"""Run before each test.""" """Run before each test."""
@ -422,22 +416,18 @@ class LimiterTest(BaseLimitTestSuite):
return sum(item for item in results if item) return sum(item for item in results if item)
def test_no_delay_GET(self): def test_no_delay_GET(self):
""" """Ensure no delay on a single call for a limit verb we didn't set."""
Simple test to ensure no delay on a single call for a limit verb we
didn"t set.
"""
delay = self.limiter.check_for_delay("GET", "/anything") delay = self.limiter.check_for_delay("GET", "/anything")
self.assertEqual(delay, (None, None)) self.assertEqual(delay, (None, None))
def test_no_delay_PUT(self): def test_no_delay_PUT(self):
""" """Ensure no delay on a single call for a known limit."""
Simple test to ensure no delay on a single call for a known limit.
"""
delay = self.limiter.check_for_delay("PUT", "/anything") delay = self.limiter.check_for_delay("PUT", "/anything")
self.assertEqual(delay, (None, None)) self.assertEqual(delay, (None, None))
def test_delay_PUT(self): def test_delay_PUT(self):
""" """Test delay on 11th PUT request.
Ensure the 11th PUT will result in a delay of 6.0 seconds until Ensure the 11th PUT will result in a delay of 6.0 seconds until
the next request will be granced. the next request will be granced.
""" """
@ -447,7 +437,8 @@ class LimiterTest(BaseLimitTestSuite):
self.assertEqual(expected, results) self.assertEqual(expected, results)
def test_delay_POST(self): def test_delay_POST(self):
""" """Test delay on 8th POST request.
Ensure the 8th POST will result in a delay of 6.0 seconds until Ensure the 8th POST will result in a delay of 6.0 seconds until
the next request will be granced. the next request will be granced.
""" """
@ -460,9 +451,7 @@ class LimiterTest(BaseLimitTestSuite):
self.failUnlessAlmostEqual(expected, results, 8) self.failUnlessAlmostEqual(expected, results, 8)
def test_delay_GET(self): def test_delay_GET(self):
""" """Ensure the 11th GET will result in NO delay."""
Ensure the 11th GET will result in NO delay.
"""
expected = [None] * 11 expected = [None] * 11
results = list(self._check(11, "GET", "/anything")) results = list(self._check(11, "GET", "/anything"))
self.assertEqual(expected, results) self.assertEqual(expected, results)
@ -472,10 +461,11 @@ class LimiterTest(BaseLimitTestSuite):
self.assertEqual(expected, results) self.assertEqual(expected, results)
def test_delay_PUT_volumes(self): def test_delay_PUT_volumes(self):
""" """Test delay on /volumes.
Ensure PUT on /volumes limits at 5 requests, and PUT elsewhere is still
OK after 5 requests...but then after 11 total requests, PUT limiting Ensure PUT on /volumes limits at 5 requests, and PUT elsewhere
kicks in. is still OK after 5 requests...but then after 11 total requests,
PUT limiting kicks in.
""" """
# First 6 requests on PUT /volumes # First 6 requests on PUT /volumes
expected = [None] * 5 + [12.0] expected = [None] * 5 + [12.0]
@ -488,9 +478,10 @@ class LimiterTest(BaseLimitTestSuite):
self.assertEqual(expected, results) self.assertEqual(expected, results)
def test_delay_PUT_wait(self): def test_delay_PUT_wait(self):
""" """Test limit is lifted again.
Ensure after hitting the limit and then waiting for the correct
amount of time, the limit will be lifted. Ensure after hitting the limit and then waiting for
the correct amount of time, the limit will be lifted.
""" """
expected = [None] * 10 + [6.0] expected = [None] * 10 + [6.0]
results = list(self._check(11, "PUT", "/anything")) results = list(self._check(11, "PUT", "/anything"))
@ -504,9 +495,7 @@ class LimiterTest(BaseLimitTestSuite):
self.assertEqual(expected, results) self.assertEqual(expected, results)
def test_multiple_delays(self): def test_multiple_delays(self):
""" """Ensure multiple requests still get a delay."""
Ensure multiple requests still get a delay.
"""
expected = [None] * 10 + [6.0] * 10 expected = [None] * 10 + [6.0] * 10
results = list(self._check(20, "PUT", "/anything")) results = list(self._check(20, "PUT", "/anything"))
self.assertEqual(expected, results) self.assertEqual(expected, results)
@ -522,16 +511,12 @@ class LimiterTest(BaseLimitTestSuite):
self.assertEqual(expected, results) self.assertEqual(expected, results)
def test_user_limit(self): def test_user_limit(self):
""" """Test user-specific limits."""
Test user-specific limits.
"""
self.assertEqual(self.limiter.levels['user3'], []) self.assertEqual(self.limiter.levels['user3'], [])
self.assertEqual(len(self.limiter.levels['user0']), 2) self.assertEqual(len(self.limiter.levels['user0']), 2)
def test_multiple_users(self): def test_multiple_users(self):
""" """Tests involving multiple users."""
Tests involving multiple users.
"""
# User0 # User0
expected = [None] * 2 + [30.0] * 8 expected = [None] * 2 + [30.0] * 8
@ -580,9 +565,8 @@ class LimiterTest(BaseLimitTestSuite):
class WsgiLimiterTest(BaseLimitTestSuite): class WsgiLimiterTest(BaseLimitTestSuite):
"""
Tests for `limits.WsgiLimiter` class. """Tests for `limits.WsgiLimiter` class."""
"""
def setUp(self): def setUp(self):
"""Run before each test.""" """Run before each test."""
@ -650,9 +634,8 @@ class WsgiLimiterTest(BaseLimitTestSuite):
class FakeHttplibSocket(object): class FakeHttplibSocket(object):
"""
Fake `httplib.HTTPResponse` replacement. """Fake `httplib.HTTPResponse` replacement."""
"""
def __init__(self, response_string): def __init__(self, response_string):
"""Initialize new `FakeHttplibSocket`.""" """Initialize new `FakeHttplibSocket`."""
@ -664,22 +647,20 @@ class FakeHttplibSocket(object):
class FakeHttplibConnection(object): class FakeHttplibConnection(object):
"""
Fake `httplib.HTTPConnection`. """Fake `httplib.HTTPConnection`."""
"""
def __init__(self, app, host): def __init__(self, app, host):
""" """Initialize `FakeHttplibConnection`."""
Initialize `FakeHttplibConnection`.
"""
self.app = app self.app = app
self.host = host self.host = host
def request(self, method, path, body="", headers=None): def request(self, method, path, body="", headers=None):
""" """Fake request handler.
Requests made via this connection actually get translated and routed
into our WSGI app, we then wait for the response and turn it back into Requests made via this connection actually get translated and
an `httplib.HTTPResponse`. routed into our WSGI app, we then wait for the response and turn
it back into an `httplib.HTTPResponse`.
""" """
if not headers: if not headers:
headers = {} headers = {}
@ -740,12 +721,12 @@ def wire_HTTPConnection_to_WSGI(host, app):
class WsgiLimiterProxyTest(BaseLimitTestSuite): class WsgiLimiterProxyTest(BaseLimitTestSuite):
"""
Tests for the `limits.WsgiLimiterProxy` class. """Tests for the `limits.WsgiLimiterProxy` class."""
"""
def setUp(self): def setUp(self):
""" """setUp() for WsgiLimiterProxyTest.
Do some nifty HTTP/WSGI magic which allows for WSGI to be called Do some nifty HTTP/WSGI magic which allows for WSGI to be called
directly by something like the `httplib` library. directly by something like the `httplib` library.
""" """

View File

@ -37,7 +37,9 @@ from cinder import utils
class MockKeyManager(key_mgr.KeyManager): class MockKeyManager(key_mgr.KeyManager):
"""
"""Mocking manager for integration tests.
This mock key manager implementation supports all the methods specified This mock key manager implementation supports all the methods specified
by the key manager interface. This implementation stores keys within a by the key manager interface. This implementation stores keys within a
dictionary, and as a result, it is not acceptable for use across different dictionary, and as a result, it is not acceptable for use across different

View File

@ -36,9 +36,7 @@ class FilterSchedulerTestCase(test_scheduler.SchedulerTestCase):
driver_cls = filter_scheduler.FilterScheduler driver_cls = filter_scheduler.FilterScheduler
def test_create_volume_no_hosts(self): def test_create_volume_no_hosts(self):
""" """Ensure empty hosts/child_zones result in NoValidHosts exception."""
Ensure empty hosts & child_zones result in NoValidHosts exception.
"""
def _fake_empty_call_zone_method(*args, **kwargs): def _fake_empty_call_zone_method(*args, **kwargs):
return [] return []

View File

@ -34,9 +34,9 @@ DATA = ''
def stub_out_https_backend(stubs): def stub_out_https_backend(stubs):
""" """Stub out the httplib.HTTPRequest.getresponse.
Stubs out the httplib.HTTPRequest.getresponse to return
faked-out data instead of grabbing actual contents of a resource return faked-out data instead of grabbing actual contents of a resource.
The stubbed getresponse() returns an iterator over The stubbed getresponse() returns an iterator over
the data "I am a teapot, short and stout\n" the data "I am a teapot, short and stout\n"

View File

@ -63,8 +63,8 @@ class BackupTestCase(test.TestCase):
size=0, size=0,
object_count=0, object_count=0,
project_id='fake'): project_id='fake'):
""" """Create a backup entry in the DB.
Create a backup entry in the DB.
Return the entry ID Return the entry ID
""" """
backup = {} backup = {}
@ -87,8 +87,8 @@ class BackupTestCase(test.TestCase):
display_description='this is a test volume', display_description='this is a test volume',
status='backing-up', status='backing-up',
size=1): size=1):
""" """Create a volume entry in the DB.
Create a volume entry in the DB.
Return the entry ID Return the entry ID
""" """
vol = {} vol = {}

View File

@ -47,9 +47,10 @@ def _get_connect_string(backend,
user="openstack_citest", user="openstack_citest",
passwd="openstack_citest", passwd="openstack_citest",
database="openstack_citest"): database="openstack_citest"):
""" """Return connect string.
Try to get a connection with a very specific set of values, if we get Try to get a connection with a very specific set of values, if we get
these then we'll run the tests, otherwise they are skipped these then we'll run the tests, otherwise they are skipped.
""" """
if backend == "postgres": if backend == "postgres":
backend = "postgresql+psycopg2" backend = "postgresql+psycopg2"
@ -227,7 +228,8 @@ class TestMigrations(test.TestCase):
os.unsetenv('PGUSER') os.unsetenv('PGUSER')
def test_walk_versions(self): def test_walk_versions(self):
""" """Test walk versions.
Walks all version scripts for each tested database, ensuring Walks all version scripts for each tested database, ensuring
that there are no errors in the version scripts for each engine that there are no errors in the version scripts for each engine
""" """
@ -235,7 +237,8 @@ class TestMigrations(test.TestCase):
self._walk_versions(engine, self.snake_walk) self._walk_versions(engine, self.snake_walk)
def test_mysql_connect_fail(self): def test_mysql_connect_fail(self):
""" """Test for mysql connection failure.
Test that we can trigger a mysql connection failure and we fail Test that we can trigger a mysql connection failure and we fail
gracefully to ensure we don't break people without mysql gracefully to ensure we don't break people without mysql
""" """
@ -244,9 +247,7 @@ class TestMigrations(test.TestCase):
@testtools.skipUnless(_have_mysql(), "mysql not available") @testtools.skipUnless(_have_mysql(), "mysql not available")
def test_mysql_innodb(self): def test_mysql_innodb(self):
""" """Test that table creation on mysql only builds InnoDB tables."""
Test that table creation on mysql only builds InnoDB tables
"""
# add this to the global lists to make reset work with it, it's removed # add this to the global lists to make reset work with it, it's removed
# automaticaly in tearDown so no need to clean it up here. # automaticaly in tearDown so no need to clean it up here.
connect_string = _get_connect_string('mysql') connect_string = _get_connect_string('mysql')
@ -277,9 +278,10 @@ class TestMigrations(test.TestCase):
self.assertEqual(count, 0, "%d non InnoDB tables created" % count) self.assertEqual(count, 0, "%d non InnoDB tables created" % count)
def test_postgresql_connect_fail(self): def test_postgresql_connect_fail(self):
""" """Test connection failure on PostgrSQL.
Test that we can trigger a postgres connection failure and we fail Test that we can trigger a postgres connection failure and we fail
gracefully to ensure we don't break people without postgres gracefully to ensure we don't break people without postgres.
""" """
if _is_backend_avail('postgres', user="openstack_cifail"): if _is_backend_avail('postgres', user="openstack_cifail"):
self.fail("Shouldn't have connected") self.fail("Shouldn't have connected")
@ -346,7 +348,7 @@ class TestMigrations(test.TestCase):
TestMigrations.REPOSITORY)) TestMigrations.REPOSITORY))
def _migrate_up(self, engine, version, with_data=False): def _migrate_up(self, engine, version, with_data=False):
"""migrate up to a new version of the db. """Migrate up to a new version of the db.
We allow for data insertion and post checks at every We allow for data insertion and post checks at every
migration version with special _prerun_### and migration version with special _prerun_### and

View File

@ -75,7 +75,8 @@ class FakeSnapshot(object):
class FakeResponce(object): class FakeResponce(object):
def __init__(self, status): def __init__(self, status):
""" """Initialize FakeResponce.
:param status: Either 'failed' or 'passed' :param status: Either 'failed' or 'passed'
""" """
self.Status = status self.Status = status

View File

@ -44,9 +44,7 @@ class XIVDS8KFakeProxyDriver(object):
"""Fake IBM XIV and DS8K Proxy Driver.""" """Fake IBM XIV and DS8K Proxy Driver."""
def __init__(self, xiv_ds8k_info, logger, expt, driver=None): def __init__(self, xiv_ds8k_info, logger, expt, driver=None):
""" """Initialize Proxy."""
Initialize Proxy
"""
self.xiv_ds8k_info = xiv_ds8k_info self.xiv_ds8k_info = xiv_ds8k_info
self.logger = logger self.logger = logger

View File

@ -59,9 +59,7 @@ class API(base.Base):
return dict(rv.iteritems()) return dict(rv.iteritems())
def delete(self, context, transfer_id): def delete(self, context, transfer_id):
""" """Make the RPC call to delete a volume transfer."""
Make the RPC call to delete a volume transfer.
"""
volume_api.check_policy(context, 'delete_transfer') volume_api.check_policy(context, 'delete_transfer')
transfer = self.db.transfer_get(context, transfer_id) transfer = self.db.transfer_get(context, transfer_id)

View File

@ -226,11 +226,12 @@ class SSHPool(pools.Pool):
raise paramiko.SSHException(msg) raise paramiko.SSHException(msg)
def get(self): def get(self):
""" """Return an item from the pool, when one is available.
Return an item from the pool, when one is available. This may
cause the calling greenthread to block. Check if a connection is active This may cause the calling greenthread to block. Check if a
before returning it. For dead connections create and return a new connection is active before returning it.
connection.
For dead connections create and return a new connection.
""" """
conn = super(SSHPool, self).get() conn = super(SSHPool, self).get()
if conn: if conn:

View File

@ -281,7 +281,8 @@ class NexentaNfsDriver(nfs.NfsDriver): # pylint: disable=R0921
volume['name'], 'volume') volume['name'], 'volume')
def _get_mount_point_for_share(self, nfs_share): def _get_mount_point_for_share(self, nfs_share):
""" """Get Mount point for a share.
:param nfs_share: example 172.18.194.100:/var/nfs :param nfs_share: example 172.18.194.100:/var/nfs
""" """
return os.path.join(self.configuration.nexenta_mount_point_base, return os.path.join(self.configuration.nexenta_mount_point_base,

View File

@ -127,8 +127,7 @@ class HP3PARFCDriver(cinder.volume.driver.FibreChannelDriver):
@utils.synchronized('3par', external=True) @utils.synchronized('3par', external=True)
def create_volume_from_snapshot(self, volume, snapshot): def create_volume_from_snapshot(self, volume, snapshot):
""" """Create a volume from a snapshot.
Creates a volume from a snapshot.
TODO: support using the size from the user. TODO: support using the size from the user.
""" """

View File

@ -198,8 +198,7 @@ class HP3PARISCSIDriver(cinder.volume.driver.ISCSIDriver):
@utils.synchronized('3par', external=True) @utils.synchronized('3par', external=True)
def create_volume_from_snapshot(self, volume, snapshot): def create_volume_from_snapshot(self, volume, snapshot):
""" """Creates a volume from a snapshot.
Creates a volume from a snapshot.
TODO: support using the size from the user. TODO: support using the size from the user.
""" """

View File

@ -279,8 +279,8 @@ class ZadaraVPSAISCSIDriver(driver.ISCSIDriver):
self.configuration.append_config_values(zadara_opts) self.configuration.append_config_values(zadara_opts)
def do_setup(self, context): def do_setup(self, context):
""" """Any initialization the volume driver does while starting.
Any initialization the volume driver does while starting.
Establishes initial connection with VPSA and retrieves access_key. Establishes initial connection with VPSA and retrieves access_key.
""" """
self.vpsa = ZadaraVPSAConnection(self.configuration) self.vpsa = ZadaraVPSAConnection(self.configuration)
@ -295,8 +295,7 @@ class ZadaraVPSAISCSIDriver(driver.ISCSIDriver):
def _xml_parse_helper(self, xml_tree, first_level, search_tuple, def _xml_parse_helper(self, xml_tree, first_level, search_tuple,
first=True): first=True):
""" """Helper for parsing VPSA's XML output.
Helper for parsing VPSA's XML output.
Returns single item if first==True or list for multiple selection. Returns single item if first==True or list for multiple selection.
If second argument in search_tuple is None - returns all items with If second argument in search_tuple is None - returns all items with
@ -406,8 +405,7 @@ class ZadaraVPSAISCSIDriver(driver.ISCSIDriver):
size=volume['size']) size=volume['size'])
def delete_volume(self, volume): def delete_volume(self, volume):
""" """Delete volume.
Delete volume.
Return ok if doesn't exist. Auto detach from all servers. Return ok if doesn't exist. Auto detach from all servers.
""" """
@ -565,8 +563,7 @@ class ZadaraVPSAISCSIDriver(driver.ISCSIDriver):
pass pass
def initialize_connection(self, volume, connector): def initialize_connection(self, volume, connector):
""" """Attach volume to initiator/host.
Attach volume to initiator/host.
During this call VPSA exposes volume to particular Initiator. It also During this call VPSA exposes volume to particular Initiator. It also
creates a 'server' entity for Initiator (if it was not created before) creates a 'server' entity for Initiator (if it was not created before)
@ -628,9 +625,7 @@ class ZadaraVPSAISCSIDriver(driver.ISCSIDriver):
'data': properties} 'data': properties}
def terminate_connection(self, volume, connector, **kwargs): def terminate_connection(self, volume, connector, **kwargs):
""" """Detach volume from the initiator."""
Detach volume from the initiator.
"""
# Get server name for IQN # Get server name for IQN
initiator_name = connector['initiator'] initiator_name = connector['initiator']
vpsa_srv = self._get_server_name(initiator_name) vpsa_srv = self._get_server_name(initiator_name)

View File

@ -51,8 +51,9 @@ import testtools
class _AnsiColorizer(object): class _AnsiColorizer(object):
""" """ANSI colorizer that wraps a stream object.
A colorizer is an object that loosely wraps around a stream, allowing
colorizer is an object that loosely wraps around a stream, allowing
callers to write text to the stream in a particular color. callers to write text to the stream in a particular color.
Colorizer classes must implement C{supported()} and C{write(text, color)}. Colorizer classes must implement C{supported()} and C{write(text, color)}.
@ -64,9 +65,12 @@ class _AnsiColorizer(object):
self.stream = stream self.stream = stream
def supported(cls, stream=sys.stdout): def supported(cls, stream=sys.stdout):
""" """Check if platform is supported.
A class method that returns True if the current platform supports A class method that returns True if the current platform supports
coloring terminal output using this method. Returns False otherwise. coloring terminal output using this method.
Returns False otherwise.
""" """
if not stream.isatty(): if not stream.isatty():
return False # auto color only on TTYs return False # auto color only on TTYs
@ -87,8 +91,7 @@ class _AnsiColorizer(object):
supported = classmethod(supported) supported = classmethod(supported)
def write(self, text, color): def write(self, text, color):
""" """Write the given text to the stream in the given color.
Write the given text to the stream in the given color.
@param text: Text to be written to the stream. @param text: Text to be written to the stream.
@ -99,9 +102,7 @@ class _AnsiColorizer(object):
class _Win32Colorizer(object): class _Win32Colorizer(object):
""" """See _AnsiColorizer docstring."""
See _AnsiColorizer docstring.
"""
def __init__(self, stream): def __init__(self, stream):
import win32console import win32console
red, green, blue, bold = (win32console.FOREGROUND_RED, red, green, blue, bold = (win32console.FOREGROUND_RED,
@ -149,9 +150,7 @@ class _Win32Colorizer(object):
class _NullColorizer(object): class _NullColorizer(object):
""" """See _AnsiColorizer docstring."""
See _AnsiColorizer docstring.
"""
def __init__(self, stream): def __init__(self, stream):
self.stream = stream self.stream = stream

View File

@ -43,6 +43,6 @@ commands =
commands = {posargs} commands = {posargs}
[flake8] [flake8]
ignore = E711,E712,F401,F403,F841,H302,H303,H304,H402,H404,H803 ignore = E711,E712,F401,F403,F841,H302,H303,H304,H402,H803
builtins = _ builtins = _
exclude = .git,.venv,.tox,dist,doc,common,*egg,build exclude = .git,.venv,.tox,dist,doc,common,*egg,build