gnocchi/notification: allow to configure the maximum number of requests in parallel
This overrides the number of $executor_thread_pool_size with a global option that is also used to set the number of parallel requests to Gnocchi that can be done. Change-Id: Iaa7e3d0739a63d571dd2afc262d191dffe5a0eef
This commit is contained in:
parent
910140d535
commit
c84c113c0a
@ -13,6 +13,7 @@
|
||||
|
||||
from gnocchiclient import client
|
||||
from gnocchiclient import exceptions as gnocchi_exc
|
||||
import keystoneauth1.session
|
||||
from oslo_log import log
|
||||
|
||||
from ceilometer import keystone_client
|
||||
@ -26,6 +27,10 @@ def get_gnocchiclient(conf, timeout_override=False):
|
||||
timeout_override)
|
||||
else conf.dispatcher_gnocchi.request_timeout)
|
||||
session = keystone_client.get_session(conf, group=group, timeout=timeout)
|
||||
adapter = keystoneauth1.session.TCPKeepAliveAdapter(
|
||||
pool_maxsize=conf.max_parallel_requests)
|
||||
session.mount("http://", adapter)
|
||||
session.mount("https://", adapter)
|
||||
return client.Client('1', session,
|
||||
interface=conf[group].interface,
|
||||
region_name=conf[group].region_name,
|
||||
|
@ -260,7 +260,9 @@ class NotificationService(cotyledon.Service):
|
||||
# to maintain sequencing as much as possible.
|
||||
listener = messaging.get_batch_notification_listener(
|
||||
transport, targets, endpoints)
|
||||
listener.start()
|
||||
listener.start(
|
||||
override_pool_size=self.conf.max_parallel_requests
|
||||
)
|
||||
self.listeners.append(listener)
|
||||
|
||||
def _refresh_agent(self, event):
|
||||
@ -312,7 +314,8 @@ class NotificationService(cotyledon.Service):
|
||||
batch_timeout=self.conf.notification.batch_timeout)
|
||||
# NOTE(gordc): set single thread to process data sequentially
|
||||
# if batching enabled.
|
||||
batch = (1 if self.conf.notification.batch_size > 1 else None)
|
||||
batch = (1 if self.conf.notification.batch_size > 1
|
||||
else self.conf.max_parallel_requests)
|
||||
self.pipeline_listener.start(override_pool_size=batch)
|
||||
|
||||
def terminate(self):
|
||||
|
@ -68,6 +68,11 @@ OPTS = [
|
||||
default=600,
|
||||
help='Timeout seconds for HTTP requests. Set it to None to '
|
||||
'disable timeout.'),
|
||||
cfg.IntOpt('max_parallel_requests',
|
||||
default=64,
|
||||
min=1,
|
||||
help='Maximum number of parallel requests for '
|
||||
'services to handle at the same time.'),
|
||||
]
|
||||
|
||||
|
||||
|
@ -37,7 +37,6 @@ class HttpPublisher(publisher.ConfigPublisherBase):
|
||||
- ssl certificate verification can be disabled by setting `verify_ssl`
|
||||
to False
|
||||
- batching can be configured by `batch`
|
||||
- connection pool size configured using `poolsize`
|
||||
- Basic authentication can be configured using the URL authentication
|
||||
scheme: http://username:password@example.com
|
||||
- For certificate authentication, `clientcert` and `clientkey` are the
|
||||
@ -121,9 +120,9 @@ class HttpPublisher(publisher.ConfigPublisherBase):
|
||||
self.raw_only = strutils.bool_from_string(
|
||||
self._get_param(params, 'raw_only', False))
|
||||
|
||||
pool_size = self._get_param(params, 'poolsize', 10, int)
|
||||
kwargs = {'max_retries': self.max_retries,
|
||||
'pool_connections': pool_size, 'pool_maxsize': pool_size}
|
||||
'pool_connections': conf.max_parallel_requests,
|
||||
'pool_maxsize': conf.max_parallel_requests}
|
||||
self.session = requests.Session()
|
||||
|
||||
# authentication & config params have been removed, so use URL with
|
||||
|
@ -304,7 +304,8 @@ class TestRealNotificationHA(BaseRealNotification):
|
||||
def test_notification_threads(self, m_listener):
|
||||
self.CONF.set_override('batch_size', 1, group='notification')
|
||||
self.srv.run()
|
||||
m_listener.assert_called_with(override_pool_size=None)
|
||||
m_listener.assert_called_with(
|
||||
override_pool_size=self.CONF.max_parallel_requests)
|
||||
m_listener.reset_mock()
|
||||
self.CONF.set_override('batch_size', 2, group='notification')
|
||||
self.srv.run()
|
||||
|
@ -0,0 +1,6 @@
|
||||
---
|
||||
features:
|
||||
- |
|
||||
A new option named `max_parallel_requests` is available to control the
|
||||
maximum number of parallel requests that can be executed by the agents.
|
||||
This option also replaces the `poolsize` option of the HTTP publisher.
|
Loading…
x
Reference in New Issue
Block a user