Apply black formatter to dcorch/api
This commit applies the Black format to the `dcorch/api` files to ensure that it adheres to the Black code style guidelines. Test Plan: PASS: Success in stx-distcloud-tox-black Story: 2011149 Task: 50445 Change-Id: I9e5bd8ca9dd33a7cdd2ce36e8dcbd4be65c436d7 Signed-off-by: Hugo Brito <hugo.brito@windriver.com>
This commit is contained in:
parent
686abd1cbc
commit
56e359bf8a
@ -1,4 +1,5 @@
|
||||
# Copyright 2015 Huawei Technologies Co., Ltd.
|
||||
# Copyright (c) 2024 Wind River Systems, Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
@ -30,29 +31,36 @@ from dcorch.common import version
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
common_opts = [
|
||||
cfg.StrOpt('bind_host', default='0.0.0.0',
|
||||
help=_("The host IP to bind to")),
|
||||
cfg.IntOpt('bind_port', default=8118,
|
||||
help=_("The port to bind to")),
|
||||
cfg.IntOpt('api_workers', default=2,
|
||||
help=_("number of api workers")),
|
||||
cfg.StrOpt('state_path',
|
||||
default=os.path.join(os.path.dirname(__file__), '../'),
|
||||
help='Top-level directory for maintaining dcorch state'),
|
||||
cfg.StrOpt('api_extensions_path', default="",
|
||||
help=_("The path for API extensions")),
|
||||
cfg.StrOpt('auth_strategy', default='keystone',
|
||||
help=_("The type of authentication to use")),
|
||||
cfg.BoolOpt('allow_bulk', default=True,
|
||||
help=_("Allow the usage of the bulk API")),
|
||||
cfg.BoolOpt('allow_pagination', default=False,
|
||||
help=_("Allow the usage of the pagination")),
|
||||
cfg.BoolOpt('allow_sorting', default=False,
|
||||
help=_("Allow the usage of the sorting")),
|
||||
cfg.StrOpt('pagination_max_limit', default="-1",
|
||||
help=_("The maximum number of items returned in a single "
|
||||
"response, value was 'infinite' or negative integer "
|
||||
"means no limit")),
|
||||
cfg.StrOpt("bind_host", default="0.0.0.0", help=_("The host IP to bind to")),
|
||||
cfg.IntOpt("bind_port", default=8118, help=_("The port to bind to")),
|
||||
cfg.IntOpt("api_workers", default=2, help=_("number of api workers")),
|
||||
cfg.StrOpt(
|
||||
"state_path",
|
||||
default=os.path.join(os.path.dirname(__file__), "../"),
|
||||
help="Top-level directory for maintaining dcorch state",
|
||||
),
|
||||
cfg.StrOpt(
|
||||
"api_extensions_path", default="", help=_("The path for API extensions")
|
||||
),
|
||||
cfg.StrOpt(
|
||||
"auth_strategy", default="keystone", help=_("The type of authentication to use")
|
||||
),
|
||||
cfg.BoolOpt("allow_bulk", default=True, help=_("Allow the usage of the bulk API")),
|
||||
cfg.BoolOpt(
|
||||
"allow_pagination", default=False, help=_("Allow the usage of the pagination")
|
||||
),
|
||||
cfg.BoolOpt(
|
||||
"allow_sorting", default=False, help=_("Allow the usage of the sorting")
|
||||
),
|
||||
cfg.StrOpt(
|
||||
"pagination_max_limit",
|
||||
default="-1",
|
||||
help=_(
|
||||
"The maximum number of items returned in a single "
|
||||
"response, value was 'infinite' or negative integer "
|
||||
"means no limit"
|
||||
),
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
@ -64,9 +72,12 @@ def init(args, **kwargs):
|
||||
# auth.register_conf_options(cfg.CONF)
|
||||
logging.register_options(cfg.CONF)
|
||||
|
||||
cfg.CONF(args=args, project='dcorch',
|
||||
version='%%(prog)s %s' % version.version_info.release_string(),
|
||||
**kwargs)
|
||||
cfg.CONF(
|
||||
args=args,
|
||||
project="dcorch",
|
||||
version="%%(prog)s %s" % version.version_info.release_string(),
|
||||
**kwargs
|
||||
)
|
||||
|
||||
|
||||
def setup_logging():
|
||||
@ -74,9 +85,10 @@ def setup_logging():
|
||||
product_name = "dcorch"
|
||||
logging.setup(cfg.CONF, product_name)
|
||||
LOG.info("Logging enabled!")
|
||||
LOG.info("%(prog)s version %(version)s",
|
||||
{'prog': sys.argv[0],
|
||||
'version': version.version_info.release_string()})
|
||||
LOG.info(
|
||||
"%(prog)s version %(version)s",
|
||||
{"prog": sys.argv[0], "version": version.version_info.release_string()},
|
||||
)
|
||||
LOG.debug("command line: %s", " ".join(sys.argv))
|
||||
|
||||
|
||||
|
@ -30,20 +30,14 @@ def setup_app(*args, **kwargs):
|
||||
|
||||
opts = cfg.CONF.pecan
|
||||
config = {
|
||||
'server': {
|
||||
'port': cfg.CONF.bind_port,
|
||||
'host': cfg.CONF.bind_host
|
||||
},
|
||||
'app': {
|
||||
'root': 'dcorch.api.controllers.root.RootController',
|
||||
'modules': ['dcorch.api'],
|
||||
"server": {"port": cfg.CONF.bind_port, "host": cfg.CONF.bind_host},
|
||||
"app": {
|
||||
"root": "dcorch.api.controllers.root.RootController",
|
||||
"modules": ["dcorch.api"],
|
||||
"debug": opts.debug,
|
||||
"auth_enable": opts.auth_enable,
|
||||
'errors': {
|
||||
400: '/error',
|
||||
'__force_dict__': True
|
||||
}
|
||||
}
|
||||
"errors": {400: "/error", "__force_dict__": True},
|
||||
},
|
||||
}
|
||||
|
||||
pecan_config = pecan.configuration.conf_from_dict(config)
|
||||
@ -56,7 +50,7 @@ def setup_app(*args, **kwargs):
|
||||
wrap_app=_wrap_app,
|
||||
force_canonical=False,
|
||||
hooks=lambda: [ctx.AuthHook()],
|
||||
guess_content_type_from_ext=True
|
||||
guess_content_type_from_ext=True,
|
||||
)
|
||||
|
||||
return app
|
||||
@ -74,10 +68,10 @@ def load_paste_app(app_name=None):
|
||||
|
||||
def _wrap_app(app):
|
||||
app = request_id.RequestId(app)
|
||||
if cfg.CONF.pecan.auth_enable and cfg.CONF.auth_strategy == 'keystone':
|
||||
if cfg.CONF.pecan.auth_enable and cfg.CONF.auth_strategy == "keystone":
|
||||
conf = dict(cfg.CONF.keystone_authtoken)
|
||||
# Change auth decisions of requests to the app itself.
|
||||
conf.update({'delay_auth_decision': True})
|
||||
conf.update({"delay_auth_decision": True})
|
||||
|
||||
# NOTE: Policy enforcement works only if Keystone
|
||||
# authentication is enabled. No support for other authentication
|
||||
@ -93,7 +87,7 @@ _launcher = None
|
||||
def serve(api_service, conf, workers=1):
|
||||
global _launcher
|
||||
if _launcher:
|
||||
raise RuntimeError(_('serve() can only be called once'))
|
||||
raise RuntimeError(_("serve() can only be called once"))
|
||||
|
||||
_launcher = service.launch(conf, api_service, workers=workers)
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
# Copyright (c) 2015 Huawei Tech. Co., Ltd.
|
||||
# Copyright (c) 2020-2022 Wind River Systems, Inc.
|
||||
# Copyright (c) 2020-2022, 2024 Wind River Systems, Inc.
|
||||
# All Rights Reserved.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
||||
@ -21,24 +21,26 @@ import dcorch.common.context as k_context
|
||||
|
||||
|
||||
def extract_context_from_environ():
|
||||
context_paras = {'auth_token': 'HTTP_X_AUTH_TOKEN',
|
||||
'user': 'HTTP_X_USER_ID',
|
||||
'project': 'HTTP_X_TENANT_ID',
|
||||
'user_name': 'HTTP_X_USER_NAME',
|
||||
'tenant_name': 'HTTP_X_PROJECT_NAME',
|
||||
'domain': 'HTTP_X_DOMAIN_ID',
|
||||
'roles': 'HTTP_X_ROLE',
|
||||
'user_domain': 'HTTP_X_USER_DOMAIN_ID',
|
||||
'project_domain': 'HTTP_X_PROJECT_DOMAIN_ID',
|
||||
'request_id': 'openstack.request_id'}
|
||||
context_paras = {
|
||||
"auth_token": "HTTP_X_AUTH_TOKEN",
|
||||
"user": "HTTP_X_USER_ID",
|
||||
"project": "HTTP_X_TENANT_ID",
|
||||
"user_name": "HTTP_X_USER_NAME",
|
||||
"tenant_name": "HTTP_X_PROJECT_NAME",
|
||||
"domain": "HTTP_X_DOMAIN_ID",
|
||||
"roles": "HTTP_X_ROLE",
|
||||
"user_domain": "HTTP_X_USER_DOMAIN_ID",
|
||||
"project_domain": "HTTP_X_PROJECT_DOMAIN_ID",
|
||||
"request_id": "openstack.request_id",
|
||||
}
|
||||
|
||||
environ = request.environ
|
||||
|
||||
for key, val in context_paras.items():
|
||||
context_paras[key] = environ.get(val)
|
||||
role = environ.get('HTTP_X_ROLE')
|
||||
role = environ.get("HTTP_X_ROLE")
|
||||
|
||||
# context_paras['is_admin'] = role == 'admin'
|
||||
# In order to work in TiC
|
||||
context_paras['is_admin'] = 'admin' in role.split(',')
|
||||
context_paras["is_admin"] = "admin" in role.split(",")
|
||||
return k_context.RequestContext(**context_paras)
|
||||
|
@ -22,16 +22,16 @@ from dcorch.api.controllers.v1 import root as v1_root
|
||||
|
||||
class RootController(object):
|
||||
|
||||
@pecan.expose('json')
|
||||
@pecan.expose("json")
|
||||
def _lookup(self, version, *remainder):
|
||||
version = str(version)
|
||||
minor_version = version[-1]
|
||||
major_version = version[1]
|
||||
remainder = remainder + (minor_version,)
|
||||
if major_version == '1':
|
||||
if major_version == "1":
|
||||
return v1_root.Controller(), remainder
|
||||
|
||||
@pecan.expose(generic=True, template='json')
|
||||
@pecan.expose(generic=True, template="json")
|
||||
def index(self):
|
||||
return {
|
||||
"versions": [
|
||||
@ -40,19 +40,19 @@ class RootController(object):
|
||||
"links": [
|
||||
{
|
||||
"rel": "self",
|
||||
"href": pecan.request.application_url + "/v1.0/"
|
||||
"href": pecan.request.application_url + "/v1.0/",
|
||||
}
|
||||
],
|
||||
"id": "v1.0",
|
||||
"updated": "2016-03-07"
|
||||
"updated": "2016-03-07",
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
@index.when(method='POST')
|
||||
@index.when(method='PUT')
|
||||
@index.when(method='DELETE')
|
||||
@index.when(method='HEAD')
|
||||
@index.when(method='PATCH')
|
||||
@index.when(method="POST")
|
||||
@index.when(method="PUT")
|
||||
@index.when(method="DELETE")
|
||||
@index.when(method="HEAD")
|
||||
@index.when(method="PATCH")
|
||||
def not_supported(self):
|
||||
pecan.abort(405)
|
||||
|
@ -28,9 +28,8 @@ class Controller(object):
|
||||
minor_version = remainder[-1]
|
||||
remainder = remainder[:-1]
|
||||
sub_controllers = dict()
|
||||
if minor_version == '0':
|
||||
sub_controllers["subclouds"] = subcloud_manager.\
|
||||
SubcloudController
|
||||
if minor_version == "0":
|
||||
sub_controllers["subclouds"] = subcloud_manager.SubcloudController
|
||||
for name, ctrl in sub_controllers.items():
|
||||
setattr(self, name, ctrl)
|
||||
|
||||
|
@ -32,7 +32,7 @@ LOG = logging.getLogger(__name__)
|
||||
|
||||
class SubcloudController(object):
|
||||
VERSION_ALIASES = {
|
||||
'Newton': '1.0',
|
||||
"Newton": "1.0",
|
||||
}
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
@ -44,26 +44,24 @@ class SubcloudController(object):
|
||||
version_cap = 1.0
|
||||
return version_cap
|
||||
|
||||
@expose(generic=True, template='json')
|
||||
@expose(generic=True, template="json")
|
||||
def index(self):
|
||||
# Route the request to specific methods with parameters
|
||||
pass
|
||||
|
||||
@index.when(method='POST', template='json')
|
||||
@index.when(method="POST", template="json")
|
||||
def post(self, project):
|
||||
"""Sync resources present in one region to another region.
|
||||
|
||||
"""
|
||||
"""Sync resources present in one region to another region."""
|
||||
context = restcomm.extract_context_from_environ()
|
||||
payload = eval(request.body)
|
||||
if not payload:
|
||||
pecan.abort(400, _('Body required'))
|
||||
if not payload.get('subcloud'):
|
||||
pecan.abort(400, _('subcloud required'))
|
||||
pecan.abort(400, _("Body required"))
|
||||
if not payload.get("subcloud"):
|
||||
pecan.abort(400, _("subcloud required"))
|
||||
job_id = uuidutils.generate_uuid()
|
||||
return self._add_subcloud(job_id, payload, context)
|
||||
|
||||
@index.when(method='delete', template='json')
|
||||
@index.when(method="delete", template="json")
|
||||
def delete(self, project, subcloud):
|
||||
"""Delete the database entries of a given job_id.
|
||||
|
||||
@ -74,10 +72,10 @@ class SubcloudController(object):
|
||||
context = restcomm.extract_context_from_environ()
|
||||
try:
|
||||
self.rpc_client.del_subcloud(context, subcloud)
|
||||
return {'deleted': {'subcloud': subcloud}}
|
||||
return {"deleted": {"subcloud": subcloud}}
|
||||
except oslo_messaging.RemoteError as ex:
|
||||
if ex.exc_type == 'SubcloudNotFound':
|
||||
pecan.abort(404, _('Subcloud not found'))
|
||||
if ex.exc_type == "SubcloudNotFound":
|
||||
pecan.abort(404, _("Subcloud not found"))
|
||||
|
||||
def _add_subcloud(self, job_id, payload, context):
|
||||
"""Make an rpc call to engine.
|
||||
@ -88,8 +86,8 @@ class SubcloudController(object):
|
||||
:param context: context of the request.
|
||||
:param result: Result object to return an output.
|
||||
"""
|
||||
name = payload['subcloud']
|
||||
management_ip = payload['management_ip']
|
||||
version = '17.06'
|
||||
name = payload["subcloud"]
|
||||
management_ip = payload["management_ip"]
|
||||
version = "17.06"
|
||||
self.rpc_client.add_subcloud(context, name, version, management_ip)
|
||||
return {'added': {'subcloud': name}}
|
||||
return {"added": {"subcloud": name}}
|
||||
|
@ -1,5 +1,5 @@
|
||||
#
|
||||
# Copyright (c) 2022 Wind River Systems, Inc.
|
||||
# Copyright (c) 2022, 2024 Wind River Systems, Inc.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
@ -10,6 +10,4 @@ from dcorch.api.policies import base
|
||||
|
||||
|
||||
def list_rules():
|
||||
return itertools.chain(
|
||||
base.list_rules()
|
||||
)
|
||||
return itertools.chain(base.list_rules())
|
||||
|
@ -1,28 +1,26 @@
|
||||
#
|
||||
# Copyright (c) 2022 Wind River Systems, Inc.
|
||||
# Copyright (c) 2022, 2024 Wind River Systems, Inc.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
|
||||
from oslo_policy import policy
|
||||
|
||||
ADMIN_IN_SYSTEM_PROJECTS = 'admin_in_system_projects'
|
||||
READER_IN_SYSTEM_PROJECTS = 'reader_in_system_projects'
|
||||
ADMIN_IN_SYSTEM_PROJECTS = "admin_in_system_projects"
|
||||
READER_IN_SYSTEM_PROJECTS = "reader_in_system_projects"
|
||||
|
||||
|
||||
base_rules = [
|
||||
policy.RuleDefault(
|
||||
name=ADMIN_IN_SYSTEM_PROJECTS,
|
||||
check_str='role:admin and (project_name:admin or ' +
|
||||
'project_name:services)',
|
||||
check_str="role:admin and (project_name:admin or " + "project_name:services)",
|
||||
description="Base rule.",
|
||||
),
|
||||
policy.RuleDefault(
|
||||
name=READER_IN_SYSTEM_PROJECTS,
|
||||
check_str='role:reader and (project_name:admin or ' +
|
||||
'project_name:services)',
|
||||
description="Base rule."
|
||||
)
|
||||
check_str="role:reader and (project_name:admin or " + "project_name:services)",
|
||||
description="Base rule.",
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
|
@ -36,22 +36,24 @@ def reset():
|
||||
_ENFORCER = None
|
||||
|
||||
|
||||
def init(policy_file='policy.yaml'):
|
||||
def init(policy_file="policy.yaml"):
|
||||
"""Init an Enforcer class.
|
||||
|
||||
:param policy_file: Custom policy file to be used.
|
||||
:param policy_file: Custom policy file to be used.
|
||||
|
||||
:return: Returns a Enforcer instance.
|
||||
:return: Returns a Enforcer instance.
|
||||
"""
|
||||
global _ENFORCER
|
||||
if not _ENFORCER:
|
||||
|
||||
# https://docs.openstack.org/oslo.policy/latest/user/usage.html
|
||||
_ENFORCER = policy.Enforcer(CONF,
|
||||
policy_file=policy_file,
|
||||
default_rule='default',
|
||||
use_conf=True,
|
||||
overwrite=True)
|
||||
_ENFORCER = policy.Enforcer(
|
||||
CONF,
|
||||
policy_file=policy_file,
|
||||
default_rule="default",
|
||||
use_conf=True,
|
||||
overwrite=True,
|
||||
)
|
||||
_ENFORCER.register_defaults(controller_policies.list_rules())
|
||||
return _ENFORCER
|
||||
|
||||
@ -59,5 +61,6 @@ def init(policy_file='policy.yaml'):
|
||||
def authorize(rule, target, creds, do_raise=True):
|
||||
"""A wrapper around 'authorize' from 'oslo_policy.policy'."""
|
||||
init()
|
||||
return _ENFORCER.authorize(rule, target, creds, do_raise=do_raise,
|
||||
exc=exc.HTTPForbidden)
|
||||
return _ENFORCER.authorize(
|
||||
rule, target, creds, do_raise=do_raise, exc=exc.HTTPForbidden
|
||||
)
|
||||
|
@ -74,13 +74,16 @@ class Acceptor(Router):
|
||||
|
||||
for key, value in proxy_consts.COMPUTE_PATH_MAP.items():
|
||||
for k, v in value.items():
|
||||
self._add_resource(mapper, api_controller, v, k,
|
||||
CONF.type, key)
|
||||
self._add_resource(mapper, api_controller, v, k, CONF.type, key)
|
||||
|
||||
self._add_resource(mapper, orch_controller,
|
||||
proxy_consts.QUOTA_DETAIL_PATHS,
|
||||
consts.RESOURCE_TYPE_COMPUTE_QUOTA_SET,
|
||||
CONF.type, method=['GET'])
|
||||
self._add_resource(
|
||||
mapper,
|
||||
orch_controller,
|
||||
proxy_consts.QUOTA_DETAIL_PATHS,
|
||||
consts.RESOURCE_TYPE_COMPUTE_QUOTA_SET,
|
||||
CONF.type,
|
||||
method=["GET"],
|
||||
)
|
||||
|
||||
def add_platform_routes(self, app, conf, mapper):
|
||||
api_controller = SysinvAPIController(app, conf)
|
||||
@ -93,8 +96,7 @@ class Acceptor(Router):
|
||||
|
||||
for key, value in proxy_consts.CINDER_PATH_MAP.items():
|
||||
for k, v in value.items():
|
||||
self._add_resource(mapper, api_controller, v, k,
|
||||
CONF.type, key)
|
||||
self._add_resource(mapper, api_controller, v, k, CONF.type, key)
|
||||
|
||||
def add_network_routes(self, app, conf, mapper):
|
||||
api_controller = NeutronAPIController(app, conf)
|
||||
@ -103,10 +105,14 @@ class Acceptor(Router):
|
||||
for key, value in proxy_consts.NEUTRON_PATH_MAP.items():
|
||||
self._add_resource(mapper, api_controller, value, key, CONF.type)
|
||||
|
||||
self._add_resource(mapper, orch_controller,
|
||||
proxy_consts.NEUTRON_QUOTA_DETAIL_PATHS,
|
||||
consts.RESOURCE_TYPE_NETWORK_QUOTA_SET,
|
||||
CONF.type, method=['GET'])
|
||||
self._add_resource(
|
||||
mapper,
|
||||
orch_controller,
|
||||
proxy_consts.NEUTRON_QUOTA_DETAIL_PATHS,
|
||||
consts.RESOURCE_TYPE_NETWORK_QUOTA_SET,
|
||||
CONF.type,
|
||||
method=["GET"],
|
||||
)
|
||||
|
||||
def add_patch_routes(self, app, conf, mapper):
|
||||
api_controller = PatchAPIController(app, conf)
|
||||
@ -127,6 +133,9 @@ class VersionAcceptor(Router):
|
||||
self._conf = conf
|
||||
mapper = routes.Mapper()
|
||||
api_controller = VersionController(app, conf)
|
||||
mapper.connect(proxy_consts.VERSION_ROOT, controller=api_controller,
|
||||
conditions=dict(method=['GET']))
|
||||
mapper.connect(
|
||||
proxy_consts.VERSION_ROOT,
|
||||
controller=api_controller,
|
||||
conditions=dict(method=["GET"]),
|
||||
)
|
||||
super(VersionAcceptor, self).__init__(app, conf, mapper, app)
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,4 +1,4 @@
|
||||
# Copyright 2017 Wind River
|
||||
# Copyright 2017, 2024 Wind River
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@ -24,12 +24,12 @@ from dcorch.api.proxy.common import utils
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
dispatch_opts = [
|
||||
cfg.StrOpt('remote_host',
|
||||
default="192.168.204.2",
|
||||
help='remote host for api proxy to forward the request'),
|
||||
cfg.IntOpt('remote_port',
|
||||
default=18774,
|
||||
help='listen port for remote host'),
|
||||
cfg.StrOpt(
|
||||
"remote_host",
|
||||
default="192.168.204.2",
|
||||
help="remote host for api proxy to forward the request",
|
||||
),
|
||||
cfg.IntOpt("remote_port", default=18774, help="listen port for remote host"),
|
||||
]
|
||||
|
||||
CONF = cfg.CONF
|
||||
@ -43,15 +43,15 @@ class APIDispatcher(object):
|
||||
"""
|
||||
|
||||
def __init__(self, app):
|
||||
self._remote_host, self._remote_port = \
|
||||
utils.get_remote_host_port_options(CONF)
|
||||
self._remote_host, self._remote_port = utils.get_remote_host_port_options(CONF)
|
||||
self.app = app
|
||||
|
||||
@webob.dec.wsgify
|
||||
def __call__(self, req):
|
||||
"""Route the incoming request to a remote host"""
|
||||
LOG.debug("APIDispatcher dispatch the request to remote host: (%s), "
|
||||
"port: (%d)" % (self._remote_host, self._remote_port))
|
||||
utils.set_request_forward_environ(req, self._remote_host,
|
||||
self._remote_port)
|
||||
LOG.debug(
|
||||
"APIDispatcher dispatch the request to remote host: (%s), "
|
||||
"port: (%d)" % (self._remote_host, self._remote_port)
|
||||
)
|
||||
utils.set_request_forward_environ(req, self._remote_host, self._remote_port)
|
||||
return self.app
|
||||
|
@ -29,9 +29,11 @@ from dcorch.api.proxy.common import utils
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
filter_opts = [
|
||||
cfg.StrOpt('user_header',
|
||||
default=dccommon_consts.USER_HEADER_VALUE,
|
||||
help='An application specific header'),
|
||||
cfg.StrOpt(
|
||||
"user_header",
|
||||
default=dccommon_consts.USER_HEADER_VALUE,
|
||||
help="An application specific header",
|
||||
),
|
||||
]
|
||||
|
||||
CONF = cfg.CONF
|
||||
@ -40,8 +42,7 @@ CONF.register_opts(filter_opts)
|
||||
|
||||
|
||||
def is_load_import(content_type, url_path):
|
||||
if (content_type == "multipart/form-data" and
|
||||
url_path == "/v1/loads/import_load"):
|
||||
if content_type == "multipart/form-data" and url_path == "/v1/loads/import_load":
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
@ -61,8 +62,7 @@ class ApiFiller(Middleware):
|
||||
|
||||
def __init__(self, app, conf):
|
||||
self._default_dispatcher = Proxy()
|
||||
self._remote_host, self._remote_port = \
|
||||
utils.get_remote_host_port_options(CONF)
|
||||
self._remote_host, self._remote_port = utils.get_remote_host_port_options(CONF)
|
||||
super(ApiFiller, self).__init__(app)
|
||||
|
||||
@webob.dec.wsgify(RequestClass=Request)
|
||||
@ -72,8 +72,7 @@ class ApiFiller(Middleware):
|
||||
# 3 times the file size is needed:
|
||||
# 2 times on webob temporary copies
|
||||
# 1 time on internal temporary copy to be shared with sysinv
|
||||
if not utils.is_space_available("/scratch",
|
||||
3 * req.content_length):
|
||||
if not utils.is_space_available("/scratch", 3 * req.content_length):
|
||||
msg = _(
|
||||
"Insufficient space on /scratch for request %s, "
|
||||
"/scratch must have at least %d bytes of free space. "
|
||||
@ -84,10 +83,11 @@ class ApiFiller(Middleware):
|
||||
|
||||
raise webob.exc.HTTPInternalServerError(explanation=msg)
|
||||
|
||||
if ('HTTP_USER_HEADER' in req.environ and
|
||||
req.environ['HTTP_USER_HEADER'] == CONF.user_header):
|
||||
utils.set_request_forward_environ(req, self._remote_host,
|
||||
self._remote_port)
|
||||
if (
|
||||
"HTTP_USER_HEADER" in req.environ
|
||||
and req.environ["HTTP_USER_HEADER"] == CONF.user_header
|
||||
):
|
||||
utils.set_request_forward_environ(req, self._remote_host, self._remote_port)
|
||||
LOG.debug("Forward dcorch-engine request to the API service")
|
||||
return self._default_dispatcher
|
||||
else:
|
||||
|
@ -48,66 +48,75 @@ class ParseError(Middleware):
|
||||
state = {}
|
||||
|
||||
def replacement_start_response(status, headers, exc_info=None):
|
||||
"""Overrides the default response to make errors parsable.
|
||||
|
||||
"""
|
||||
"""Overrides the default response to make errors parsable."""
|
||||
try:
|
||||
status_code = int(status.split(' ')[0])
|
||||
state['status_code'] = status_code
|
||||
status_code = int(status.split(" ")[0])
|
||||
state["status_code"] = status_code
|
||||
except (ValueError, TypeError): # pragma: nocover
|
||||
raise Exception((
|
||||
'ErrorDocumentMiddleware received an invalid '
|
||||
'status %s' % status
|
||||
))
|
||||
raise Exception(
|
||||
("ErrorDocumentMiddleware received an invalid status %s" % status)
|
||||
)
|
||||
else:
|
||||
if (state['status_code'] // 100) not in (2, 3):
|
||||
if (state["status_code"] // 100) not in (2, 3):
|
||||
# Remove some headers so we can replace them later
|
||||
# when we have the full error message and can
|
||||
# compute the length.
|
||||
headers = [(h, v)
|
||||
for (h, v) in headers
|
||||
if h not in ('Content-Length', 'Content-Type')
|
||||
]
|
||||
headers = [
|
||||
(h, v)
|
||||
for (h, v) in headers
|
||||
if h not in ("Content-Length", "Content-Type")
|
||||
]
|
||||
# Save the headers in case we need to modify them.
|
||||
state['headers'] = headers
|
||||
state["headers"] = headers
|
||||
return start_response(status, headers, exc_info)
|
||||
|
||||
app_iter = self.app(environ, replacement_start_response)
|
||||
if (state['status_code'] // 100) not in (2, 3):
|
||||
if (state["status_code"] // 100) not in (2, 3):
|
||||
req = webob.Request(environ)
|
||||
if (req.accept.best_match(['application/json', 'application/xml']) ==
|
||||
'application/xml'):
|
||||
if (
|
||||
req.accept.best_match(["application/json", "application/xml"])
|
||||
== "application/xml"
|
||||
):
|
||||
|
||||
try:
|
||||
# simple check xml is valid
|
||||
body = [et.ElementTree.tostring(
|
||||
et.ElementTree.fromstring('<error_message>' +
|
||||
'\n'.join(app_iter) +
|
||||
'</error_message>'))]
|
||||
body = [
|
||||
et.ElementTree.tostring(
|
||||
et.ElementTree.fromstring(
|
||||
"<error_message>"
|
||||
+ "\n".join(app_iter)
|
||||
+ "</error_message>"
|
||||
)
|
||||
)
|
||||
]
|
||||
except et.ElementTree.ParseError as err:
|
||||
LOG.error('Error parsing HTTP response: %s' % err)
|
||||
body = ['<error_message>%s' % state['status_code'] +
|
||||
'</error_message>']
|
||||
state['headers'].append(('Content-Type', 'application/xml'))
|
||||
LOG.error("Error parsing HTTP response: %s" % err)
|
||||
body = [
|
||||
"<error_message>%s" % state["status_code"] + "</error_message>"
|
||||
]
|
||||
state["headers"].append(("Content-Type", "application/xml"))
|
||||
else:
|
||||
app_iter = [i.decode('utf-8') for i in app_iter]
|
||||
app_iter = [i.decode("utf-8") for i in app_iter]
|
||||
# Parse explanation field from webob.exc and add it as
|
||||
# 'faulstring' to be processed by cgts-client
|
||||
fault = None
|
||||
app_data = '\n'.join(app_iter)
|
||||
app_data = "\n".join(app_iter)
|
||||
for data in app_data.split("\n"):
|
||||
if WEBOB_EXPL_SEP in str(data):
|
||||
# Remove separator, trailing and leading white spaces
|
||||
fault = str(data).replace(WEBOB_EXPL_SEP, "").strip()
|
||||
break
|
||||
if fault is None:
|
||||
body = [json.dumps({'error_message': app_data})]
|
||||
body = [json.dumps({"error_message": app_data})]
|
||||
else:
|
||||
body = [json.dumps({'error_message':
|
||||
json.dumps({'faultstring': fault})})]
|
||||
body = [item.encode('utf-8') for item in body]
|
||||
state['headers'].append(('Content-Type', 'application/json'))
|
||||
state['headers'].append(('Content-Length', str(len(body[0]))))
|
||||
body = [
|
||||
json.dumps(
|
||||
{"error_message": json.dumps({"faultstring": fault})}
|
||||
)
|
||||
]
|
||||
body = [item.encode("utf-8") for item in body]
|
||||
state["headers"].append(("Content-Type", "application/json"))
|
||||
state["headers"].append(("Content-Length", str(len(body[0]))))
|
||||
else:
|
||||
body = app_iter
|
||||
return body
|
||||
|
@ -41,9 +41,11 @@ LOG = logging.getLogger(__name__)
|
||||
|
||||
CONF = cfg.CONF
|
||||
patch_opts = [
|
||||
cfg.StrOpt('patch_vault',
|
||||
default='/opt/dc-vault/patches/',
|
||||
help='file system for patch storage on SystemController'),
|
||||
cfg.StrOpt(
|
||||
"patch_vault",
|
||||
default="/opt/dc-vault/patches/",
|
||||
help="file system for patch storage on SystemController",
|
||||
),
|
||||
]
|
||||
|
||||
CONF.register_opts(patch_opts, CONF.type)
|
||||
@ -58,8 +60,8 @@ class PatchAPIController(Middleware):
|
||||
webob.exc.HTTPOk.code,
|
||||
]
|
||||
|
||||
PATCH_META_DATA = 'metadata.xml'
|
||||
SOFTWARE_VERSION = 'sw_version'
|
||||
PATCH_META_DATA = "metadata.xml"
|
||||
SOFTWARE_VERSION = "sw_version"
|
||||
|
||||
def __init__(self, app, conf):
|
||||
super(PatchAPIController, self).__init__(app)
|
||||
@ -89,7 +91,7 @@ class PatchAPIController(Middleware):
|
||||
# check if the request was successful
|
||||
if response.status_int in self.OK_STATUS_CODE:
|
||||
data = json.loads(response.text)
|
||||
if 'error' in data and data["error"] != "":
|
||||
if "error" in data and data["error"] != "":
|
||||
rc = False
|
||||
else:
|
||||
rc = False
|
||||
@ -103,8 +105,7 @@ class PatchAPIController(Middleware):
|
||||
msg = "Unable to fetch release version from patch"
|
||||
LOG.error(msg)
|
||||
raise webob.exc.HTTPUnprocessableEntity(explanation=msg)
|
||||
versioned_vault = CONF.patching.patch_vault + \
|
||||
sw_version
|
||||
versioned_vault = CONF.patching.patch_vault + sw_version
|
||||
if not os.path.isdir(versioned_vault):
|
||||
os.makedirs(versioned_vault)
|
||||
try:
|
||||
@ -125,8 +126,9 @@ class PatchAPIController(Middleware):
|
||||
os.remove(fn)
|
||||
return
|
||||
except OSError:
|
||||
msg = (f"Unable to remove patch file {fn} from the central "
|
||||
"storage.")
|
||||
msg = (
|
||||
f"Unable to remove patch file {fn} from the central " "storage."
|
||||
)
|
||||
raise webob.exc.HTTPUnprocessableEntity(explanation=msg)
|
||||
LOG.info(f"Patch {patch} was not found in {vault}")
|
||||
|
||||
@ -136,8 +138,8 @@ class PatchAPIController(Middleware):
|
||||
# chunk, rather than reading the file into memory as a whole
|
||||
|
||||
# write the patch to a temporary directory first
|
||||
tempdir = tempfile.mkdtemp(prefix="patch_proxy_", dir='/scratch')
|
||||
fn = tempdir + '/' + os.path.basename(filename)
|
||||
tempdir = tempfile.mkdtemp(prefix="patch_proxy_", dir="/scratch")
|
||||
fn = tempdir + "/" + os.path.basename(filename)
|
||||
dst = os.open(fn, os.O_WRONLY | os.O_CREAT | os.O_TRUNC)
|
||||
size = 64 * 1024
|
||||
n = size
|
||||
@ -154,19 +156,22 @@ class PatchAPIController(Middleware):
|
||||
|
||||
def patch_upload_req(self, request, response):
|
||||
# stores patch in the patch storage
|
||||
file_item = request.POST['file']
|
||||
file_item = request.POST["file"]
|
||||
try:
|
||||
self.store_patch_file(file_item.filename, file_item.file.fileno())
|
||||
except Exception:
|
||||
LOG.exception("Failed to store the patch to vault")
|
||||
# return a warning and prompt the user to try again
|
||||
if hasattr(response, 'text'):
|
||||
if hasattr(response, "text"):
|
||||
from builtins import str as text
|
||||
|
||||
data = json.loads(response.text)
|
||||
if 'warning' in data:
|
||||
msg = _('The patch file could not be stored in the vault, '
|
||||
'please upload the patch again!')
|
||||
data['warning'] += msg
|
||||
if "warning" in data:
|
||||
msg = _(
|
||||
"The patch file could not be stored in the vault, "
|
||||
"please upload the patch again!"
|
||||
)
|
||||
data["warning"] += msg
|
||||
response.text = text(json.dumps(data))
|
||||
proxy_utils.cleanup(request.environ)
|
||||
return response
|
||||
@ -175,7 +180,7 @@ class PatchAPIController(Middleware):
|
||||
files = []
|
||||
for key, path in request.GET.items():
|
||||
LOG.info("upload-dir: Retrieving patches from %s" % path)
|
||||
for f in glob.glob(path + '/*.patch'):
|
||||
for f in glob.glob(path + "/*.patch"):
|
||||
if os.path.isfile(f):
|
||||
files.append(f)
|
||||
|
||||
@ -190,7 +195,8 @@ class PatchAPIController(Middleware):
|
||||
self.dcmanager_state_rpc_client.update_subcloud_endpoint_status(
|
||||
self.ctxt,
|
||||
endpoint_type=self.ENDPOINT_TYPE,
|
||||
sync_status=dccommon_consts.SYNC_STATUS_UNKNOWN)
|
||||
sync_status=dccommon_consts.SYNC_STATUS_UNKNOWN,
|
||||
)
|
||||
return response
|
||||
|
||||
def notify_usm(self, request, response):
|
||||
@ -199,18 +205,19 @@ class PatchAPIController(Middleware):
|
||||
self.dcmanager_state_rpc_client.update_subcloud_endpoint_status(
|
||||
self.ctxt,
|
||||
endpoint_type=self.USM_ENDPOINT_TYPE,
|
||||
sync_status=dccommon_consts.SYNC_STATUS_UNKNOWN)
|
||||
sync_status=dccommon_consts.SYNC_STATUS_UNKNOWN,
|
||||
)
|
||||
return response
|
||||
|
||||
def patch_delete_req(self, request, response):
|
||||
patch_ids = proxy_utils.get_routing_match_value(request.environ,
|
||||
'patch_id')
|
||||
patch_ids = proxy_utils.get_routing_match_value(request.environ, "patch_id")
|
||||
LOG.info("Deleting patches: %s", patch_ids)
|
||||
patch_list = os.path.normpath(patch_ids).split(os.path.sep)
|
||||
for patch_file in patch_list:
|
||||
LOG.debug("Patch file:(%s)", patch_file)
|
||||
self.delete_patch_from_version_vault(os.path.basename(patch_file)
|
||||
+ '.patch')
|
||||
self.delete_patch_from_version_vault(
|
||||
os.path.basename(patch_file) + ".patch"
|
||||
)
|
||||
return response
|
||||
|
||||
def process_request(self, req):
|
||||
@ -222,7 +229,7 @@ class PatchAPIController(Middleware):
|
||||
if CONF.show_response:
|
||||
LOG.info("Response: (%s)", str(response))
|
||||
LOG.info("Response status: (%s)", response.status)
|
||||
action = proxy_utils.get_routing_match_value(request.environ, 'action')
|
||||
action = proxy_utils.get_routing_match_value(request.environ, "action")
|
||||
if self.ok_response(response) and action in self.response_hander_map:
|
||||
handler = self.response_hander_map[action]
|
||||
return handler(request, response)
|
||||
|
@ -21,7 +21,7 @@ from dcorch.api.proxy.common.service import Application
|
||||
|
||||
LOG = logging.getLogger(__name__)
|
||||
|
||||
HEADERS = ['HTTP_X_DOMAIN_ID', 'HTTP_X_DOMAIN_NAME', 'HTTP_OPENSTACK_SYSTEM_SCOPE']
|
||||
HEADERS = ["HTTP_X_DOMAIN_ID", "HTTP_X_DOMAIN_NAME", "HTTP_OPENSTACK_SYSTEM_SCOPE"]
|
||||
|
||||
|
||||
class Proxy(Application):
|
||||
@ -34,8 +34,7 @@ class Proxy(Application):
|
||||
self.proxy_app = TransparentProxy()
|
||||
|
||||
def __call__(self, environ, start_response):
|
||||
LOG.debug("Proxy the request to the remote host: (%s)", environ[
|
||||
'HTTP_HOST'])
|
||||
LOG.debug("Proxy the request to the remote host: (%s)", environ["HTTP_HOST"])
|
||||
# The http/client.py added validation for illegal headers in python3
|
||||
# which doesn't allow None values. If we don't inject these headers
|
||||
# and set them to empty string here,
|
||||
@ -44,6 +43,6 @@ class Proxy(Application):
|
||||
# an TypeError due to the None values.
|
||||
for header in HEADERS:
|
||||
if not environ.get(header):
|
||||
environ[header] = ''
|
||||
environ[header] = ""
|
||||
result = self.proxy_app(environ, start_response)
|
||||
return result
|
||||
|
@ -1,4 +1,4 @@
|
||||
# Copyright 2017 Wind River
|
||||
# Copyright 2017, 2024 Wind River
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
@ -33,15 +33,10 @@ CONF = cfg.CONF
|
||||
|
||||
|
||||
class Router(Middleware):
|
||||
"""WSGI middleware that maps incoming requests to WSGI apps.
|
||||
|
||||
"""
|
||||
"""WSGI middleware that maps incoming requests to WSGI apps."""
|
||||
|
||||
def __init__(self, app, conf, mapper, forwarder):
|
||||
|
||||
"""Create a router for the given routes.Mapper.
|
||||
|
||||
"""
|
||||
"""Create a router for the given routes.Mapper."""
|
||||
|
||||
self.map = mapper
|
||||
self.forwarder = forwarder
|
||||
@ -50,39 +45,40 @@ class Router(Middleware):
|
||||
|
||||
@webob.dec.wsgify(RequestClass=Request)
|
||||
def __call__(self, req):
|
||||
|
||||
"""Route the incoming request to a controller based on self.map.
|
||||
|
||||
"""
|
||||
"""Route the incoming request to a controller based on self.map."""
|
||||
|
||||
return self._router
|
||||
|
||||
@webob.dec.wsgify
|
||||
def _dispatch(self, req):
|
||||
|
||||
"""Called by self._router after matching the incoming request to a
|
||||
|
||||
route and putting the information into req.environ.
|
||||
"""
|
||||
|
||||
match = req.environ['wsgiorg.routing_args'][1]
|
||||
match = req.environ["wsgiorg.routing_args"][1]
|
||||
if not match:
|
||||
if self.forwarder:
|
||||
return self.forwarder
|
||||
msg = _('The request is not allowed in System Controller')
|
||||
msg = _("The request is not allowed in System Controller")
|
||||
proxy_utils.cleanup(req.environ)
|
||||
raise webob.exc.HTTPForbidden(explanation=msg)
|
||||
LOG.debug("Found match action!")
|
||||
app = match['controller']
|
||||
app = match["controller"]
|
||||
return app
|
||||
|
||||
@staticmethod
|
||||
def _add_resource(mapper, controller, paths, tag, endpoint_type,
|
||||
action=None, method=None):
|
||||
def _add_resource(
|
||||
mapper, controller, paths, tag, endpoint_type, action=None, method=None
|
||||
):
|
||||
if action is None:
|
||||
action = tag
|
||||
if method is None:
|
||||
method = constants.ROUTE_METHOD_MAP[endpoint_type].get(tag)
|
||||
for path in paths:
|
||||
mapper.connect(path, controller=controller, action=action,
|
||||
conditions=dict(method=method))
|
||||
mapper.connect(
|
||||
path,
|
||||
controller=controller,
|
||||
action=action,
|
||||
conditions=dict(method=method),
|
||||
)
|
||||
|
@ -17,56 +17,52 @@ from dcorch.common import consts
|
||||
|
||||
# Version could be any of the following: /, /v1, /v1/
|
||||
# but must deny regular paths such as /v1/isystems
|
||||
VERSION_ROOT = '/{version:[^/]*?(\/$)?}'
|
||||
VERSION_ROOT = "/{version:[^/]*?(\/$)?}"
|
||||
|
||||
# Compute
|
||||
FLAVOR_RESOURCE_TAG = 'flavors'
|
||||
FLAVOR_ACCESS_RESOURCE_TAG = 'action'
|
||||
FLAVOR_EXTRA_SPECS_RESOURCE_TAG = 'os-extra_specs'
|
||||
KEYPAIRS_RESOURCE_TAG = 'os-keypairs'
|
||||
QUOTA_RESOURCE_TAG = 'os-quota-sets'
|
||||
QUOTA_CLASS_RESOURCE_TAG = 'os-quota-class-sets'
|
||||
FLAVOR_RESOURCE_TAG = "flavors"
|
||||
FLAVOR_ACCESS_RESOURCE_TAG = "action"
|
||||
FLAVOR_EXTRA_SPECS_RESOURCE_TAG = "os-extra_specs"
|
||||
KEYPAIRS_RESOURCE_TAG = "os-keypairs"
|
||||
QUOTA_RESOURCE_TAG = "os-quota-sets"
|
||||
QUOTA_CLASS_RESOURCE_TAG = "os-quota-class-sets"
|
||||
|
||||
FLAVOR_PATHS = [
|
||||
'/v2.1/{project_id:.*?}/flavors',
|
||||
'/v2.1/{project_id:.*?}/flavors/{flavor_id}'
|
||||
"/v2.1/{project_id:.*?}/flavors",
|
||||
"/v2.1/{project_id:.*?}/flavors/{flavor_id}",
|
||||
]
|
||||
|
||||
FLAVOR_ACCESS_PATHS = [
|
||||
'/v2.1/{project_id:.*?}/flavors/{flavor_id}/action'
|
||||
]
|
||||
FLAVOR_ACCESS_PATHS = ["/v2.1/{project_id:.*?}/flavors/{flavor_id}/action"]
|
||||
|
||||
EXTRA_SPECS_PATHS = [
|
||||
'/v2.1/{project_id:.*?}/flavors/{flavor_id}/os-extra_specs',
|
||||
'/v2.1/{project_id:.*?}/flavors/{flavor_id}/os-extra_specs/{extra_spec}'
|
||||
"/v2.1/{project_id:.*?}/flavors/{flavor_id}/os-extra_specs",
|
||||
"/v2.1/{project_id:.*?}/flavors/{flavor_id}/os-extra_specs/{extra_spec}",
|
||||
]
|
||||
|
||||
KEYPAIRS_PATHS = [
|
||||
'/v2.1/{project_id:.*?}/os-keypairs',
|
||||
'/v2.1/{project_id:.*?}/os-keypairs/{keypair}'
|
||||
"/v2.1/{project_id:.*?}/os-keypairs",
|
||||
"/v2.1/{project_id:.*?}/os-keypairs/{keypair}",
|
||||
]
|
||||
|
||||
QUOTA_PATHS = [
|
||||
'/v2.1/{project_id:.*?}/os-quota-sets/{tenant_id}',
|
||||
"/v2.1/{project_id:.*?}/os-quota-sets/{tenant_id}",
|
||||
]
|
||||
|
||||
QUOTA_DETAIL_PATHS = [
|
||||
'/v2.1/{project_id:.*?}/os-quota-sets/{tenant_id}/detail',
|
||||
"/v2.1/{project_id:.*?}/os-quota-sets/{tenant_id}/detail",
|
||||
]
|
||||
|
||||
QUOTA_CLASS_PATHS = [
|
||||
'/v2.1/{project_id:.*?}/os-quota-class-sets/{id}',
|
||||
"/v2.1/{project_id:.*?}/os-quota-class-sets/{id}",
|
||||
]
|
||||
|
||||
COMPUTE_PATH_MAP = {
|
||||
consts.RESOURCE_TYPE_COMPUTE_FLAVOR: {
|
||||
FLAVOR_RESOURCE_TAG: FLAVOR_PATHS,
|
||||
FLAVOR_ACCESS_RESOURCE_TAG: FLAVOR_ACCESS_PATHS,
|
||||
FLAVOR_EXTRA_SPECS_RESOURCE_TAG: EXTRA_SPECS_PATHS
|
||||
},
|
||||
consts.RESOURCE_TYPE_COMPUTE_KEYPAIR: {
|
||||
KEYPAIRS_RESOURCE_TAG: KEYPAIRS_PATHS
|
||||
FLAVOR_EXTRA_SPECS_RESOURCE_TAG: EXTRA_SPECS_PATHS,
|
||||
},
|
||||
consts.RESOURCE_TYPE_COMPUTE_KEYPAIR: {KEYPAIRS_RESOURCE_TAG: KEYPAIRS_PATHS},
|
||||
consts.RESOURCE_TYPE_COMPUTE_QUOTA_SET: {
|
||||
QUOTA_RESOURCE_TAG: QUOTA_PATHS,
|
||||
},
|
||||
@ -76,24 +72,13 @@ COMPUTE_PATH_MAP = {
|
||||
}
|
||||
|
||||
# Sysinv
|
||||
CERTIFICATE_PATHS = [
|
||||
'/v1/certificate/certificate_install',
|
||||
'/v1/certificate/{uuid}'
|
||||
]
|
||||
CERTIFICATE_PATHS = ["/v1/certificate/certificate_install", "/v1/certificate/{uuid}"]
|
||||
|
||||
USER_PATHS = [
|
||||
'/v1/iuser/{uuid}'
|
||||
]
|
||||
USER_PATHS = ["/v1/iuser/{uuid}"]
|
||||
|
||||
LOAD_PATHS = [
|
||||
'/v1/loads/import_load',
|
||||
'/v1/loads/{id}'
|
||||
]
|
||||
LOAD_PATHS = ["/v1/loads/import_load", "/v1/loads/{id}"]
|
||||
|
||||
DEVICE_IMAGE_PATHS = [
|
||||
'/v1/device_images',
|
||||
'/v1/device_images/{uuid}'
|
||||
]
|
||||
DEVICE_IMAGE_PATHS = ["/v1/device_images", "/v1/device_images/{uuid}"]
|
||||
|
||||
SYSINV_PATH_MAP = {
|
||||
consts.RESOURCE_TYPE_SYSINV_CERTIFICATE: CERTIFICATE_PATHS,
|
||||
@ -102,19 +87,19 @@ SYSINV_PATH_MAP = {
|
||||
consts.RESOURCE_TYPE_SYSINV_DEVICE_IMAGE: DEVICE_IMAGE_PATHS,
|
||||
}
|
||||
|
||||
LOAD_FILES_STAGING_DIR = '/scratch/tmp_load'
|
||||
IMPORT_LOAD_FILES = ['path_to_iso', 'path_to_sig']
|
||||
LOAD_FILES_STAGING_DIR = "/scratch/tmp_load"
|
||||
IMPORT_LOAD_FILES = ["path_to_iso", "path_to_sig"]
|
||||
IMPORTED_LOAD_MAX_COUNT = 1
|
||||
|
||||
DEVICE_IMAGE_VAULT_DIR = '/opt/dc-vault/device_images'
|
||||
DEVICE_IMAGE_VAULT_DIR = "/opt/dc-vault/device_images"
|
||||
|
||||
# Cinder
|
||||
CINDER_QUOTA_PATHS = [
|
||||
'/{version}/{admin_project_id}/os-quota-sets/{project_id}',
|
||||
"/{version}/{admin_project_id}/os-quota-sets/{project_id}",
|
||||
]
|
||||
|
||||
CINDER_QUOTA_CLASS_PATHS = [
|
||||
'/{version}/{admin_project_id}/os-quota-class-sets/{quota_class_name}',
|
||||
"/{version}/{admin_project_id}/os-quota-class-sets/{quota_class_name}",
|
||||
]
|
||||
|
||||
CINDER_PATH_MAP = {
|
||||
@ -127,152 +112,142 @@ CINDER_PATH_MAP = {
|
||||
}
|
||||
|
||||
# Neutron
|
||||
NEUTRON_SECURITY_GROUPS_PATHS = [
|
||||
'/v2.0/security-groups',
|
||||
'/v2.0/security-groups/{security_group_id}',
|
||||
NEUTRON_SEC_GROUPS_PATHS = [
|
||||
"/v2.0/security-groups",
|
||||
"/v2.0/security-groups/{security_group_id}",
|
||||
]
|
||||
|
||||
NEUTRON_SECURITY_GROUP_RULES_PATHS = [
|
||||
'/v2.0/security-group-rules',
|
||||
'/v2.0/security-group-rules/{security_group_rule_id}',
|
||||
NEUTRON_SEC_GROUP_RULES_PATHS = [
|
||||
"/v2.0/security-group-rules",
|
||||
"/v2.0/security-group-rules/{security_group_rule_id}",
|
||||
]
|
||||
|
||||
NEUTRON_QOS_PATHS = [
|
||||
'/v2.0/qos/policies',
|
||||
'/v2.0/wrs-tm/qoses',
|
||||
'/v2.0/qos/policies/{policy_id}',
|
||||
'/v2.0/wrs-tm/qoses/{policy_id}',
|
||||
"/v2.0/qos/policies",
|
||||
"/v2.0/wrs-tm/qoses",
|
||||
"/v2.0/qos/policies/{policy_id}",
|
||||
"/v2.0/wrs-tm/qoses/{policy_id}",
|
||||
]
|
||||
|
||||
NEUTRON_BANDWIDTH_LIMIT_RULES_PATHS = [
|
||||
'/v2.0/qos/policies/{policy_id}/bandwidth_limit_rules',
|
||||
"/v2.0/qos/policies/{policy_id}/bandwidth_limit_rules",
|
||||
]
|
||||
|
||||
NEUTRON_DSCP_MARKING_RULES_PATHS = [
|
||||
'/v2.0/qos/policies/{policy_id}/dscp_marking_rules',
|
||||
"/v2.0/qos/policies/{policy_id}/dscp_marking_rules",
|
||||
]
|
||||
|
||||
NEUTRON_MINIMUM_BANDWIDTH_RULES_PATHS = [
|
||||
'/v2.0/qos/policies/{policy_id}/minimum_bandwidth_rules',
|
||||
'/v2.0/qos/policies/{policy_id}/minimum_bandwidth_rules/{rule_id}',
|
||||
"/v2.0/qos/policies/{policy_id}/minimum_bandwidth_rules",
|
||||
"/v2.0/qos/policies/{policy_id}/minimum_bandwidth_rules/{rule_id}",
|
||||
]
|
||||
|
||||
NEUTRON_QUOTA_PATHS = [
|
||||
'/v2.0/quotas/{project_id}',
|
||||
"/v2.0/quotas/{project_id}",
|
||||
]
|
||||
|
||||
NEUTRON_QUOTA_DETAIL_PATHS = [
|
||||
'/v2.0/quotas/{project_id}/details.json',
|
||||
"/v2.0/quotas/{project_id}/details.json",
|
||||
]
|
||||
|
||||
NEUTRON_PATH_MAP = {
|
||||
consts.RESOURCE_TYPE_NETWORK_SECURITY_GROUP:
|
||||
NEUTRON_SECURITY_GROUPS_PATHS,
|
||||
consts.RESOURCE_TYPE_NETWORK_SECURITY_GROUP_RULE:
|
||||
NEUTRON_SECURITY_GROUP_RULES_PATHS,
|
||||
consts.RESOURCE_TYPE_NETWORK_QUOTA_SET:
|
||||
NEUTRON_QUOTA_PATHS,
|
||||
consts.RESOURCE_TYPE_QOS_POLICY:
|
||||
NEUTRON_QOS_PATHS,
|
||||
consts.RESOURCE_TYPE_NETWORK_SECURITY_GROUP: NEUTRON_SEC_GROUPS_PATHS,
|
||||
consts.RESOURCE_TYPE_NETWORK_SECURITY_GROUP_RULE: NEUTRON_SEC_GROUP_RULES_PATHS,
|
||||
consts.RESOURCE_TYPE_NETWORK_QUOTA_SET: NEUTRON_QUOTA_PATHS,
|
||||
consts.RESOURCE_TYPE_QOS_POLICY: NEUTRON_QOS_PATHS,
|
||||
}
|
||||
|
||||
|
||||
# Software
|
||||
SOFTWARE_ACTION_QUERY = 'query'
|
||||
SOFTWARE_ACTION_QUERY_DEPENDENCIES = 'query_dependencies'
|
||||
SOFTWARE_ACTION_COMMIT_PATCH = 'commit-patch'
|
||||
SOFTWARE_ACTION_SHOW = 'show'
|
||||
SOFTWARE_ACTION_QUERY = "query"
|
||||
SOFTWARE_ACTION_QUERY_DEPENDENCIES = "query_dependencies"
|
||||
SOFTWARE_ACTION_COMMIT_PATCH = "commit-patch"
|
||||
SOFTWARE_ACTION_SHOW = "show"
|
||||
|
||||
|
||||
SOFTWARE_QUERY_PATHS = [
|
||||
'/v1/query',
|
||||
'/v1/software/query',
|
||||
"/v1/query",
|
||||
"/v1/software/query",
|
||||
]
|
||||
|
||||
SOFTWARE_SHOW_PATHS = [
|
||||
'/v1/show/{release_id}',
|
||||
'/v1/software/show/{release_id:.*?}',
|
||||
"/v1/show/{release_id}",
|
||||
"/v1/software/show/{release_id:.*?}",
|
||||
]
|
||||
|
||||
SOFTWARE_COMMIT_PATCH_PATHS = [
|
||||
'/v1/software/commit_dry_run/{release_id:.*?}',
|
||||
'/v1/software/commit_patch/{release_id:.*?}',
|
||||
"/v1/software/commit_dry_run/{release_id:.*?}",
|
||||
"/v1/software/commit_patch/{release_id:.*?}",
|
||||
]
|
||||
|
||||
SOFTWARE_QUERY_DEPENDENCIES_PATHS = [
|
||||
'/v1/software/query_dependencies/{release_id:.*?}',
|
||||
"/v1/software/query_dependencies/{release_id:.*?}",
|
||||
]
|
||||
|
||||
SOFTWARE_PATH_MAP = {
|
||||
SOFTWARE_ACTION_QUERY: SOFTWARE_QUERY_PATHS,
|
||||
SOFTWARE_ACTION_SHOW: SOFTWARE_SHOW_PATHS,
|
||||
SOFTWARE_ACTION_COMMIT_PATCH: SOFTWARE_COMMIT_PATCH_PATHS,
|
||||
SOFTWARE_ACTION_QUERY_DEPENDENCIES: SOFTWARE_QUERY_DEPENDENCIES_PATHS
|
||||
SOFTWARE_ACTION_QUERY_DEPENDENCIES: SOFTWARE_QUERY_DEPENDENCIES_PATHS,
|
||||
}
|
||||
|
||||
# Patching
|
||||
# allow version request
|
||||
PATCH_ACTION_GET_VERSION = 'version'
|
||||
PATCH_ACTION_UPLOAD = 'upload'
|
||||
PATCH_ACTION_UPLOAD_DIR = 'upload_dir'
|
||||
PATCH_ACTION_APPLY = 'apply'
|
||||
PATCH_ACTION_REMOVE = 'remove'
|
||||
PATCH_ACTION_DELETE = 'delete'
|
||||
PATCH_ACTION_QUERY = 'query'
|
||||
PATCH_ACTION_SHOW = 'show'
|
||||
PATCH_ACTION_COMMIT = 'commit'
|
||||
PATCH_ACTION_WHAT_REQS = 'what_requires'
|
||||
PATCH_ACTION_QUERY_DEPS = 'query_dependencies'
|
||||
PATCH_ACTION_GET_VERSION = "version"
|
||||
PATCH_ACTION_UPLOAD = "upload"
|
||||
PATCH_ACTION_UPLOAD_DIR = "upload_dir"
|
||||
PATCH_ACTION_APPLY = "apply"
|
||||
PATCH_ACTION_REMOVE = "remove"
|
||||
PATCH_ACTION_DELETE = "delete"
|
||||
PATCH_ACTION_QUERY = "query"
|
||||
PATCH_ACTION_SHOW = "show"
|
||||
PATCH_ACTION_COMMIT = "commit"
|
||||
PATCH_ACTION_WHAT_REQS = "what_requires"
|
||||
PATCH_ACTION_QUERY_DEPS = "query_dependencies"
|
||||
|
||||
PATCH_API_VERSION = ['/']
|
||||
PATCH_API_VERSION = ["/"]
|
||||
|
||||
PATCH_UPLOAD_PATHS = [
|
||||
'/v1/upload',
|
||||
'/patch/upload',
|
||||
"/v1/upload",
|
||||
"/patch/upload",
|
||||
]
|
||||
|
||||
# upload_dir is not supported for REST API access
|
||||
PATCH_UPLOAD_DIR_PATHS = [
|
||||
'/patch/upload_dir'
|
||||
]
|
||||
PATCH_UPLOAD_DIR_PATHS = ["/patch/upload_dir"]
|
||||
|
||||
PATCH_APPLY_PATHS = [
|
||||
'/v1/apply/{patch_id}',
|
||||
'/patch/apply/{patch_id:.*?}',
|
||||
"/v1/apply/{patch_id}",
|
||||
"/patch/apply/{patch_id:.*?}",
|
||||
]
|
||||
|
||||
PATCH_REMOVE_PATHS = [
|
||||
'/v1/remove/{patch_id}',
|
||||
'/patch/remove/{patch_id:.*?}',
|
||||
"/v1/remove/{patch_id}",
|
||||
"/patch/remove/{patch_id:.*?}",
|
||||
]
|
||||
|
||||
PATCH_DELETE_PATHS = [
|
||||
'/v1/delete/{patch_id}',
|
||||
'/patch/delete/{patch_id:.*?}',
|
||||
"/v1/delete/{patch_id}",
|
||||
"/patch/delete/{patch_id:.*?}",
|
||||
]
|
||||
|
||||
PATCH_QUERY_PATHS = [
|
||||
'/v1/query',
|
||||
'/patch/query',
|
||||
"/v1/query",
|
||||
"/patch/query",
|
||||
]
|
||||
|
||||
PATCH_SHOW_PATHS = [
|
||||
'/v1/show/{patch_id}',
|
||||
'/patch/show/{patch_id:.*?}',
|
||||
"/v1/show/{patch_id}",
|
||||
"/patch/show/{patch_id:.*?}",
|
||||
]
|
||||
|
||||
PATCH_COMMIT_PATHS = [
|
||||
'/patch/commit_dry_run/{patch_id:.*?}',
|
||||
'/patch/commit/{patch_id:.*?}',
|
||||
"/patch/commit_dry_run/{patch_id:.*?}",
|
||||
"/patch/commit/{patch_id:.*?}",
|
||||
]
|
||||
|
||||
PATCH_WHAT_REQS_PATHS = [
|
||||
'/patch/what_requires/{patch_id:.*?}'
|
||||
]
|
||||
PATCH_WHAT_REQS_PATHS = ["/patch/what_requires/{patch_id:.*?}"]
|
||||
|
||||
PATCH_QUERY_DEPS_PATHS = [
|
||||
'/patch/query_dependencies/{patch_id:.*?}'
|
||||
]
|
||||
PATCH_QUERY_DEPS_PATHS = ["/patch/query_dependencies/{patch_id:.*?}"]
|
||||
|
||||
PATCH_PATH_MAP = {
|
||||
PATCH_ACTION_GET_VERSION: PATCH_API_VERSION,
|
||||
@ -290,37 +265,37 @@ PATCH_PATH_MAP = {
|
||||
|
||||
# Identity
|
||||
IDENTITY_USERS_PATH = [
|
||||
'/v3/users',
|
||||
'/v3/users/{user_id}',
|
||||
"/v3/users",
|
||||
"/v3/users/{user_id}",
|
||||
]
|
||||
|
||||
IDENTITY_USERS_PW_PATH = [
|
||||
'/v3/users/{user_id}/password',
|
||||
"/v3/users/{user_id}/password",
|
||||
]
|
||||
|
||||
IDENTITY_USER_GROUPS_PATH = [
|
||||
'/v3/groups',
|
||||
'/v3/groups/{group_id}',
|
||||
'/v3/groups/{group_id}/users/{user_id}',
|
||||
"/v3/groups",
|
||||
"/v3/groups/{group_id}",
|
||||
"/v3/groups/{group_id}/users/{user_id}",
|
||||
]
|
||||
|
||||
IDENTITY_ROLES_PATH = [
|
||||
'/v3/roles',
|
||||
'/v3/roles/{role_id}',
|
||||
"/v3/roles",
|
||||
"/v3/roles/{role_id}",
|
||||
]
|
||||
|
||||
IDENTITY_PROJECTS_PATH = [
|
||||
'/v3/projects',
|
||||
'/v3/projects/{project_id}',
|
||||
"/v3/projects",
|
||||
"/v3/projects/{project_id}",
|
||||
]
|
||||
|
||||
IDENTITY_PROJECTS_ROLE_PATH = [
|
||||
'/v3/projects/{project_id}/users/{user_id}/roles/{role_id}',
|
||||
'/v3/projects/{project_id}/groups/{group_id}/roles/{role_id}',
|
||||
"/v3/projects/{project_id}/users/{user_id}/roles/{role_id}",
|
||||
"/v3/projects/{project_id}/groups/{group_id}/roles/{role_id}",
|
||||
]
|
||||
|
||||
IDENTITY_TOKEN_REVOKE_EVENTS_PATH = [
|
||||
'/v3/auth/tokens',
|
||||
IDENTITY_TOKEN_EVENTS_PATH = [
|
||||
"/v3/auth/tokens",
|
||||
]
|
||||
|
||||
IDENTITY_PATH_MAP = {
|
||||
@ -329,74 +304,64 @@ IDENTITY_PATH_MAP = {
|
||||
consts.RESOURCE_TYPE_IDENTITY_GROUPS: IDENTITY_USER_GROUPS_PATH,
|
||||
consts.RESOURCE_TYPE_IDENTITY_ROLES: IDENTITY_ROLES_PATH,
|
||||
consts.RESOURCE_TYPE_IDENTITY_PROJECTS: IDENTITY_PROJECTS_PATH,
|
||||
consts.RESOURCE_TYPE_IDENTITY_PROJECT_ROLE_ASSIGNMENTS:
|
||||
IDENTITY_PROJECTS_ROLE_PATH,
|
||||
consts.RESOURCE_TYPE_IDENTITY_TOKEN_REVOKE_EVENTS:
|
||||
IDENTITY_TOKEN_REVOKE_EVENTS_PATH,
|
||||
consts.RESOURCE_TYPE_IDENTITY_PROJECT_ROLE_ASSIGNMENTS: IDENTITY_PROJECTS_ROLE_PATH,
|
||||
consts.RESOURCE_TYPE_IDENTITY_TOKEN_REVOKE_EVENTS: IDENTITY_TOKEN_EVENTS_PATH,
|
||||
}
|
||||
|
||||
ROUTE_METHOD_MAP = {
|
||||
consts.ENDPOINT_TYPE_COMPUTE: {
|
||||
FLAVOR_RESOURCE_TAG: ['POST', 'DELETE'],
|
||||
FLAVOR_ACCESS_RESOURCE_TAG: ['POST'],
|
||||
FLAVOR_EXTRA_SPECS_RESOURCE_TAG: ['POST', 'PUT', 'DELETE'],
|
||||
KEYPAIRS_RESOURCE_TAG: ['POST', 'DELETE'],
|
||||
QUOTA_RESOURCE_TAG: ['PUT', 'DELETE', 'GET'],
|
||||
QUOTA_CLASS_RESOURCE_TAG: ['PUT'],
|
||||
FLAVOR_RESOURCE_TAG: ["POST", "DELETE"],
|
||||
FLAVOR_ACCESS_RESOURCE_TAG: ["POST"],
|
||||
FLAVOR_EXTRA_SPECS_RESOURCE_TAG: ["POST", "PUT", "DELETE"],
|
||||
KEYPAIRS_RESOURCE_TAG: ["POST", "DELETE"],
|
||||
QUOTA_RESOURCE_TAG: ["PUT", "DELETE", "GET"],
|
||||
QUOTA_CLASS_RESOURCE_TAG: ["PUT"],
|
||||
},
|
||||
consts.ENDPOINT_TYPE_VOLUME: {
|
||||
QUOTA_RESOURCE_TAG: ['PUT', 'DELETE', 'GET'],
|
||||
QUOTA_CLASS_RESOURCE_TAG: ['PUT'],
|
||||
QUOTA_RESOURCE_TAG: ["PUT", "DELETE", "GET"],
|
||||
QUOTA_CLASS_RESOURCE_TAG: ["PUT"],
|
||||
},
|
||||
dccommon_consts.ENDPOINT_TYPE_PLATFORM: {
|
||||
consts.RESOURCE_TYPE_SYSINV_CERTIFICATE: ['POST', 'DELETE'],
|
||||
consts.RESOURCE_TYPE_SYSINV_USER: ['PATCH', 'PUT'],
|
||||
consts.RESOURCE_TYPE_SYSINV_LOAD: ['POST', 'DELETE'],
|
||||
consts.RESOURCE_TYPE_SYSINV_DEVICE_IMAGE: ['POST', 'PATCH', 'DELETE'],
|
||||
consts.RESOURCE_TYPE_SYSINV_CERTIFICATE: ["POST", "DELETE"],
|
||||
consts.RESOURCE_TYPE_SYSINV_USER: ["PATCH", "PUT"],
|
||||
consts.RESOURCE_TYPE_SYSINV_LOAD: ["POST", "DELETE"],
|
||||
consts.RESOURCE_TYPE_SYSINV_DEVICE_IMAGE: ["POST", "PATCH", "DELETE"],
|
||||
},
|
||||
consts.ENDPOINT_TYPE_NETWORK: {
|
||||
consts.RESOURCE_TYPE_NETWORK_SECURITY_GROUP: ['POST', 'PUT', 'DELETE'],
|
||||
consts.RESOURCE_TYPE_NETWORK_SECURITY_GROUP_RULE: ['POST', 'DELETE'],
|
||||
consts.RESOURCE_TYPE_NETWORK_QUOTA_SET: ['PUT', 'DELETE'],
|
||||
consts.RESOURCE_TYPE_QOS_POLICY: ['POST', 'PUT', 'DELETE'],
|
||||
consts.RESOURCE_TYPE_NETWORK_SECURITY_GROUP: ["POST", "PUT", "DELETE"],
|
||||
consts.RESOURCE_TYPE_NETWORK_SECURITY_GROUP_RULE: ["POST", "DELETE"],
|
||||
consts.RESOURCE_TYPE_NETWORK_QUOTA_SET: ["PUT", "DELETE"],
|
||||
consts.RESOURCE_TYPE_QOS_POLICY: ["POST", "PUT", "DELETE"],
|
||||
},
|
||||
dccommon_consts.ENDPOINT_TYPE_PATCHING: {
|
||||
PATCH_ACTION_GET_VERSION: ['GET'],
|
||||
PATCH_ACTION_UPLOAD: ['POST'],
|
||||
PATCH_ACTION_UPLOAD_DIR: ['POST'],
|
||||
PATCH_ACTION_APPLY: ['POST'],
|
||||
PATCH_ACTION_REMOVE: ['POST'],
|
||||
PATCH_ACTION_DELETE: ['POST'],
|
||||
PATCH_ACTION_QUERY: ['GET'],
|
||||
PATCH_ACTION_SHOW: ['POST', 'GET'],
|
||||
PATCH_ACTION_COMMIT: ['POST'],
|
||||
PATCH_ACTION_WHAT_REQS: ['GET'],
|
||||
PATCH_ACTION_QUERY_DEPS: ['GET'],
|
||||
SOFTWARE_ACTION_QUERY: ['GET'],
|
||||
SOFTWARE_ACTION_SHOW: ['GET'],
|
||||
SOFTWARE_ACTION_QUERY_DEPENDENCIES: ['GET'],
|
||||
SOFTWARE_ACTION_COMMIT_PATCH: ['POST'],
|
||||
PATCH_ACTION_GET_VERSION: ["GET"],
|
||||
PATCH_ACTION_UPLOAD: ["POST"],
|
||||
PATCH_ACTION_UPLOAD_DIR: ["POST"],
|
||||
PATCH_ACTION_APPLY: ["POST"],
|
||||
PATCH_ACTION_REMOVE: ["POST"],
|
||||
PATCH_ACTION_DELETE: ["POST"],
|
||||
PATCH_ACTION_QUERY: ["GET"],
|
||||
PATCH_ACTION_SHOW: ["POST", "GET"],
|
||||
PATCH_ACTION_COMMIT: ["POST"],
|
||||
PATCH_ACTION_WHAT_REQS: ["GET"],
|
||||
PATCH_ACTION_QUERY_DEPS: ["GET"],
|
||||
SOFTWARE_ACTION_QUERY: ["GET"],
|
||||
SOFTWARE_ACTION_SHOW: ["GET"],
|
||||
SOFTWARE_ACTION_QUERY_DEPENDENCIES: ["GET"],
|
||||
SOFTWARE_ACTION_COMMIT_PATCH: ["POST"],
|
||||
},
|
||||
dccommon_consts.ENDPOINT_TYPE_IDENTITY: {
|
||||
consts.RESOURCE_TYPE_IDENTITY_USERS:
|
||||
['POST', 'PATCH', 'DELETE'],
|
||||
consts.RESOURCE_TYPE_IDENTITY_GROUPS:
|
||||
['POST', 'PUT', 'PATCH', 'DELETE'],
|
||||
consts.RESOURCE_TYPE_IDENTITY_USERS_PASSWORD:
|
||||
['POST'],
|
||||
consts.RESOURCE_TYPE_IDENTITY_ROLES:
|
||||
['POST', 'PATCH', 'DELETE'],
|
||||
consts.RESOURCE_TYPE_IDENTITY_PROJECTS:
|
||||
['POST', 'PATCH', 'DELETE'],
|
||||
consts.RESOURCE_TYPE_IDENTITY_PROJECT_ROLE_ASSIGNMENTS:
|
||||
['PUT', 'DELETE'],
|
||||
consts.RESOURCE_TYPE_IDENTITY_TOKEN_REVOKE_EVENTS:
|
||||
['DELETE']
|
||||
|
||||
}
|
||||
consts.RESOURCE_TYPE_IDENTITY_USERS: ["POST", "PATCH", "DELETE"],
|
||||
consts.RESOURCE_TYPE_IDENTITY_GROUPS: ["POST", "PUT", "PATCH", "DELETE"],
|
||||
consts.RESOURCE_TYPE_IDENTITY_USERS_PASSWORD: ["POST"],
|
||||
consts.RESOURCE_TYPE_IDENTITY_ROLES: ["POST", "PATCH", "DELETE"],
|
||||
consts.RESOURCE_TYPE_IDENTITY_PROJECTS: ["POST", "PATCH", "DELETE"],
|
||||
consts.RESOURCE_TYPE_IDENTITY_PROJECT_ROLE_ASSIGNMENTS: ["PUT", "DELETE"],
|
||||
consts.RESOURCE_TYPE_IDENTITY_TOKEN_REVOKE_EVENTS: ["DELETE"],
|
||||
},
|
||||
}
|
||||
|
||||
LOAD_VAULT_DIR = '/opt/dc-vault/loads'
|
||||
LOAD_VAULT_TMP_DIR = '/opt/dc-vault/loads/load_tmpdir'
|
||||
LOAD_VAULT_DIR = "/opt/dc-vault/loads"
|
||||
LOAD_VAULT_TMP_DIR = "/opt/dc-vault/loads/load_tmpdir"
|
||||
ENDPOINT_TYPE_PATCHING_TMPDIR = "/scratch/patch-api-proxy-tmpdir"
|
||||
ENDPOINT_TYPE_PLATFORM_TMPDIR = "/scratch/platform-api-proxy-tmpdir"
|
||||
|
@ -25,17 +25,14 @@ class Application(object):
|
||||
|
||||
@classmethod
|
||||
def factory(cls, global_config, **local_config):
|
||||
"""Used for paste app factories in paste.deploy config files.
|
||||
|
||||
"""
|
||||
"""Used for paste app factories in paste.deploy config files."""
|
||||
return cls(**local_config)
|
||||
|
||||
def __call__(self, environ, start_response):
|
||||
raise NotImplementedError('You must implement __call__')
|
||||
raise NotImplementedError("You must implement __call__")
|
||||
|
||||
|
||||
class Middleware(Application):
|
||||
|
||||
"""Base WSGI middleware wrapper.
|
||||
|
||||
These classes require an application to be
|
||||
@ -46,7 +43,6 @@ class Middleware(Application):
|
||||
|
||||
@classmethod
|
||||
def factory(cls, global_config, **local_config):
|
||||
|
||||
"""Used for paste app factories in paste.deploy config files.
|
||||
|
||||
Any local configuration (that is, values under the [filter:APPNAME]
|
||||
@ -58,13 +54,13 @@ class Middleware(Application):
|
||||
# https://bugs.launchpad.net/starlingx/+bug/1865085
|
||||
# pylint: disable-next=too-many-function-args
|
||||
return cls(app, global_config, **local_config)
|
||||
|
||||
return _factory
|
||||
|
||||
def __init__(self, application):
|
||||
self.application = application
|
||||
|
||||
def process_request(self, req):
|
||||
|
||||
"""Called on each request.
|
||||
|
||||
If this returns None, the next application down the stack will be
|
||||
|
@ -24,7 +24,7 @@ import psutil
|
||||
|
||||
from dccommon import consts as dccommon_consts
|
||||
from dccommon.drivers.openstack.sdk_platform import (
|
||||
OptimizedOpenStackDriver as OpenStackDriver
|
||||
OptimizedOpenStackDriver as OpenStackDriver,
|
||||
)
|
||||
from dcorch.common import consts
|
||||
|
||||
@ -96,11 +96,11 @@ def get_sync_endpoint(cfg):
|
||||
|
||||
def get_url_path_components(url):
|
||||
result = urlparse(url)
|
||||
return result.path.split('/')
|
||||
return result.path.split("/")
|
||||
|
||||
|
||||
def get_routing_match_arguments(environ):
|
||||
return environ['wsgiorg.routing_args'][1]
|
||||
return environ["wsgiorg.routing_args"][1]
|
||||
|
||||
|
||||
def get_routing_match_value(environ, key):
|
||||
@ -115,37 +115,36 @@ def get_routing_match_value(environ, key):
|
||||
|
||||
|
||||
def get_operation_type(environ):
|
||||
return environ['REQUEST_METHOD'].lower()
|
||||
return environ["REQUEST_METHOD"].lower()
|
||||
|
||||
|
||||
def get_id_from_query_string(environ, id):
|
||||
import urllib.parse as six_urlparse
|
||||
params = six_urlparse.parse_qs(environ.get('QUERY_STRING', ''))
|
||||
|
||||
params = six_urlparse.parse_qs(environ.get("QUERY_STRING", ""))
|
||||
return params.get(id, [None])[0]
|
||||
|
||||
|
||||
def get_user_id(environ):
|
||||
return get_id_from_query_string(environ, 'user_id')
|
||||
return get_id_from_query_string(environ, "user_id")
|
||||
|
||||
|
||||
def show_usage(environ):
|
||||
return get_id_from_query_string(environ, 'usage') == 'True'
|
||||
return get_id_from_query_string(environ, "usage") == "True"
|
||||
|
||||
|
||||
def get_tenant_id(environ):
|
||||
return get_routing_match_value(environ, 'tenant_id')
|
||||
return get_routing_match_value(environ, "tenant_id")
|
||||
|
||||
|
||||
def set_request_forward_environ(req, remote_host, remote_port):
|
||||
req.environ['HTTP_X_FORWARDED_SERVER'] = req.environ.get(
|
||||
'HTTP_HOST', '')
|
||||
req.environ['HTTP_X_FORWARDED_SCHEME'] = req.environ['wsgi.url_scheme']
|
||||
req.environ['HTTP_HOST'] = remote_host + ':' + str(remote_port)
|
||||
req.environ['SERVER_NAME'] = remote_host
|
||||
req.environ['SERVER_PORT'] = remote_port
|
||||
if ('REMOTE_ADDR' in req.environ and 'HTTP_X_FORWARDED_FOR' not in
|
||||
req.environ):
|
||||
req.environ['HTTP_X_FORWARDED_FOR'] = req.environ['REMOTE_ADDR']
|
||||
req.environ["HTTP_X_FORWARDED_SERVER"] = req.environ.get("HTTP_HOST", "")
|
||||
req.environ["HTTP_X_FORWARDED_SCHEME"] = req.environ["wsgi.url_scheme"]
|
||||
req.environ["HTTP_HOST"] = remote_host + ":" + str(remote_port)
|
||||
req.environ["SERVER_NAME"] = remote_host
|
||||
req.environ["SERVER_PORT"] = remote_port
|
||||
if "REMOTE_ADDR" in req.environ and "HTTP_X_FORWARDED_FOR" not in req.environ:
|
||||
req.environ["HTTP_X_FORWARDED_FOR"] = req.environ["REMOTE_ADDR"]
|
||||
|
||||
|
||||
def _get_fernet_keys():
|
||||
@ -157,18 +156,21 @@ def _get_fernet_keys():
|
||||
)
|
||||
try:
|
||||
key_list = os_client.sysinv_client.get_fernet_keys()
|
||||
return [str(getattr(key, 'key')) for key in key_list]
|
||||
except (keystone_exceptions.connection.ConnectTimeout,
|
||||
keystone_exceptions.ConnectFailure) as e:
|
||||
LOG.info("get_fernet_keys: cloud {} is not reachable [{}]"
|
||||
.format(dccommon_consts.CLOUD_0, str(e)))
|
||||
return [str(getattr(key, "key")) for key in key_list]
|
||||
except (
|
||||
keystone_exceptions.connection.ConnectTimeout,
|
||||
keystone_exceptions.ConnectFailure,
|
||||
) as e:
|
||||
LOG.info(
|
||||
"get_fernet_keys: cloud {} is not reachable [{}]".format(
|
||||
dccommon_consts.CLOUD_0, str(e)
|
||||
)
|
||||
)
|
||||
OpenStackDriver.delete_region_clients(dccommon_consts.CLOUD_0)
|
||||
return None
|
||||
except (AttributeError, TypeError) as e:
|
||||
LOG.info("get_fernet_keys error {}".format(e))
|
||||
OpenStackDriver.delete_region_clients(
|
||||
dccommon_consts.CLOUD_0, clear_token=True
|
||||
)
|
||||
OpenStackDriver.delete_region_clients(dccommon_consts.CLOUD_0, clear_token=True)
|
||||
return None
|
||||
except Exception as e:
|
||||
LOG.exception(e)
|
||||
@ -186,7 +188,7 @@ def _restore_padding(token):
|
||||
mod_returned = len(token) % 4
|
||||
if mod_returned:
|
||||
missing_padding = 4 - mod_returned
|
||||
token += b'=' * missing_padding
|
||||
token += b"=" * missing_padding
|
||||
return token
|
||||
|
||||
|
||||
@ -230,8 +232,11 @@ def retrieve_token_audit_id(fernet_token):
|
||||
unpacked_token = _unpack_token(fernet_token, fernet_keys)
|
||||
if unpacked_token:
|
||||
audit_id = unpacked_token[-1][0]
|
||||
audit_id = base64.urlsafe_b64encode(
|
||||
audit_id.encode('utf-8')).rstrip(b'=').decode('utf-8')
|
||||
audit_id = (
|
||||
base64.urlsafe_b64encode(audit_id.encode("utf-8"))
|
||||
.rstrip(b"=")
|
||||
.decode("utf-8")
|
||||
)
|
||||
|
||||
return audit_id
|
||||
|
||||
@ -243,12 +248,12 @@ def cleanup(environ):
|
||||
:return: None
|
||||
"""
|
||||
|
||||
if 'webob._parsed_post_vars' in environ:
|
||||
post_vars, body_file = environ['webob._parsed_post_vars']
|
||||
if "webob._parsed_post_vars" in environ:
|
||||
post_vars, body_file = environ["webob._parsed_post_vars"]
|
||||
# the content is copied into a BytesIO or temporary file
|
||||
if not isinstance(body_file, bytes):
|
||||
body_file.close()
|
||||
for f in post_vars.keys():
|
||||
item = post_vars[f]
|
||||
if hasattr(item, 'file'):
|
||||
if hasattr(item, "file"):
|
||||
item.file.close()
|
||||
|
@ -22,7 +22,11 @@ modules = [
|
||||
]
|
||||
|
||||
# List of modules that are already formatted with black
|
||||
formatted_modules = ["dccommon", "dcdbsync"]
|
||||
formatted_modules = [
|
||||
"dccommon",
|
||||
"dcdbsync",
|
||||
"dcorch/api",
|
||||
]
|
||||
|
||||
|
||||
# Function to run black check
|
||||
|
Loading…
x
Reference in New Issue
Block a user