Currently we set the gnutls log function when creating a
TLS context, however, the setting is in fact global, not
per context. So we should be setting it when we first call
gnutls_global_init() instead.
Signed-off-by: Daniel P. Berrange <berrange(a)redhat.com>
---
src/rpc/virnettlscontext.c | 21 ++++++++++-----------
1 file changed, 10 insertions(+), 11 deletions(-)
diff --git a/src/rpc/virnettlscontext.c b/src/rpc/virnettlscontext.c
index fa9ca41..425f7ff 100644
--- a/src/rpc/virnettlscontext.c
+++ b/src/rpc/virnettlscontext.c
@@ -708,7 +708,6 @@ static virNetTLSContextPtr virNetTLSContextNew(const char *cacert,
bool isServer)
{
virNetTLSContextPtr ctxt;
- const char *gnutlsdebug;
int err;
if (virNetTLSContextInitialize() < 0)
@@ -717,16 +716,6 @@ static virNetTLSContextPtr virNetTLSContextNew(const char *cacert,
if (!(ctxt = virObjectLockableNew(virNetTLSContextClass)))
return NULL;
- if ((gnutlsdebug = virGetEnvAllowSUID("LIBVIRT_GNUTLS_DEBUG")) != NULL) {
- int val;
- if (virStrToLong_i(gnutlsdebug, NULL, 10, &val) < 0)
- val = 10;
- gnutls_global_set_log_level(val);
- gnutls_global_set_log_function(virNetTLSLog);
- VIR_DEBUG("Enabled GNUTLS debug");
- }
-
-
err = gnutls_certificate_allocate_credentials(&ctxt->x509cred);
if (err) {
virReportError(VIR_ERR_SYSTEM_ERROR,
@@ -1440,5 +1429,15 @@ void virNetTLSSessionDispose(void *obj)
*/
void virNetTLSInit(void)
{
+ const char *gnutlsdebug;
+ if ((gnutlsdebug = virGetEnvAllowSUID("LIBVIRT_GNUTLS_DEBUG")) != NULL) {
+ int val;
+ if (virStrToLong_i(gnutlsdebug, NULL, 10, &val) < 0)
+ val = 10;
+ gnutls_global_set_log_level(val);
+ gnutls_global_set_log_function(virNetTLSLog);
+ VIR_DEBUG("Enabled GNUTLS debug");
+ }
+
gnutls_global_init();
}
--
2.5.5