1
0
Fork 0
mirror of https://github.com/munin-monitoring/contrib.git synced 2025-07-22 14:16:00 +00:00

Plugin solr4_: fix code style issues reported by flake8

This commit is contained in:
Lars Kruse 2019-08-13 23:55:35 +02:00
parent 117c508716
commit be1dfde5da
2 changed files with 42 additions and 28 deletions

View file

@ -59,10 +59,11 @@ Project repo: https://github.com/averni/munin-solr
""" """
import sys
import os
import httplib
import json import json
import httplib
import os
import sys
def parse_params(): def parse_params():
plugname = os.path.basename(sys.argv[0]).split('_', 2)[1:] plugname = os.path.basename(sys.argv[0]).split('_', 2)[1:]
@ -72,7 +73,7 @@ def parse_params():
'core': plugname[1] if len(plugname) > 1 else '', 'core': plugname[1] if len(plugname) > 1 else '',
'params': {} 'params': {}
} }
if plugname[0] in[ 'qps', 'requesttimes']: if plugname[0] in['qps', 'requesttimes']:
data = params['core'].rsplit('_', 1) data = params['core'].rsplit('_', 1)
handler = data.pop() handler = data.pop()
params['params'] = { params['params'] = {
@ -86,15 +87,17 @@ def parse_params():
params['params']['core'] = params['core'] params['params']['core'] = params['core']
return params return params
############################################################################# #############################################################################
# Datasources # Datasources
class CheckException(Exception): class CheckException(Exception):
pass pass
class JSONReader: class JSONReader:
@classmethod @classmethod
def readValue(cls, struct, path, convert = None): def readValue(cls, struct, path, convert=None):
if not path[0] in struct: if not path[0] in struct:
return -1 return -1
obj = struct[path[0]] obj = struct[path[0]]
@ -106,6 +109,7 @@ class JSONReader:
return convert(obj) return convert(obj)
return obj return obj
class SolrCoresAdmin: class SolrCoresAdmin:
def __init__(self, host, solrurl): def __init__(self, host, solrurl):
self.host = host self.host = host
@ -119,7 +123,8 @@ class SolrCoresAdmin:
res = conn.getresponse() res = conn.getresponse()
data = res.read() data = res.read()
if res.status != 200: if res.status != 200:
raise CheckException("Cores status fetch failed: %s\n%s" %( str(res.status), res.read())) raise CheckException("Cores status fetch failed: %s\n%s"
% (str(res.status), res.read()))
self.data = json.loads(data) self.data = json.loads(data)
def getCores(self): def getCores(self):
@ -128,7 +133,7 @@ class SolrCoresAdmin:
cores = JSONReader.readValue(self.data, ['status']) cores = JSONReader.readValue(self.data, ['status'])
return cores.keys() return cores.keys()
def indexsize(self, core = None): def indexsize(self, core=None):
if not self.data: if not self.data:
self.fetchcores() self.fetchcores()
if core: if core:
@ -138,9 +143,11 @@ class SolrCoresAdmin:
else: else:
ret = {} ret = {}
for core in self.getCores(): for core in self.getCores():
ret[core] = JSONReader.readValue(self.data, ['status', core, 'index', 'sizeInBytes']) ret[core] = JSONReader.readValue(self.data,
['status', core, 'index', 'sizeInBytes'])
return ret return ret
class SolrCoreMBean: class SolrCoreMBean:
def __init__(self, host, solrurl, core): def __init__(self, host, solrurl, core):
self.host = host self.host = host
@ -155,7 +162,7 @@ class SolrCoreMBean:
res = conn.getresponse() res = conn.getresponse()
data = res.read() data = res.read()
if res.status != 200: if res.status != 200:
raise CheckException("MBean fetch failed: %s\n%s" %( str(res.status), res.read())) raise CheckException("MBean fetch failed: %s\n%s" % (str(res.status), res.read()))
raw_data = json.loads(data) raw_data = json.loads(data)
data = {} data = {}
self.data = { self.data = {
@ -176,24 +183,24 @@ class SolrCoreMBean:
res = conn.getresponse() res = conn.getresponse()
data = res.read() data = res.read()
if res.status != 200: if res.status != 200:
raise CheckException("System fetch failed: %s\n%s" %( str(res.status), res.read())) raise CheckException("System fetch failed: %s\n%s" % (str(res.status), res.read()))
self.data['system'] = json.loads(data) self.data['system'] = json.loads(data)
def _readInt(self, path): def _readInt(self, path):
return self._read(path, int) return self._read(path, int)
def _readFloat(self, path): def _readFloat(self, path):
return self._read(path, float) return self._read(path, float)
def _read(self, path, convert = None): def _read(self, path, convert=None):
if self.data is None: if self.data is None:
self._fetch() self._fetch()
return JSONReader.readValue(self.data, path, convert) return JSONReader.readValue(self.data, path, convert)
def _readCache(self, cache): def _readCache(self, cache):
result = {} result = {}
for key, ftype in [('lookups', int), ('hits', int), ('inserts', int), ('evictions', int), ('hitratio', float)]: for key, ftype in [('lookups', int), ('hits', int), ('inserts', int), ('evictions', int),
('hitratio', float)]:
path = ['solr-mbeans', 'CACHE', cache, 'stats', 'cumulative_%s' % key] path = ['solr-mbeans', 'CACHE', cache, 'stats', 'cumulative_%s' % key]
result[key] = self._read(path, ftype) result[key] = self._read(path, ftype)
result['size'] = self._readInt(['solr-mbeans', 'CACHE', cache, 'stats', 'size']) result['size'] = self._readInt(['solr-mbeans', 'CACHE', cache, 'stats', 'size'])
@ -240,6 +247,7 @@ class SolrCoreMBean:
data[k] = int(data[k]) data[k] = int(data[k])
return data return data
############################################################################# #############################################################################
# Graph Templates # Graph Templates
@ -337,6 +345,7 @@ max.label Max
max.colour ff0000 max.colour ff0000
""" """
############################################################################# #############################################################################
# Graph management # Graph management
@ -351,7 +360,8 @@ class SolrMuninGraph:
return SolrCoreMBean(self.hostport, self.solrurl, core) return SolrCoreMBean(self.hostport, self.solrurl, core)
def _cacheConfig(self, cacheType, cacheName): def _cacheConfig(self, cacheType, cacheName):
return CACHE_GRAPH_TPL.format(core=self.params['core'], cacheType=cacheType, cacheName=cacheName) return CACHE_GRAPH_TPL.format(core=self.params['core'], cacheType=cacheType,
cacheName=cacheName)
def _format4Value(self, value): def _format4Value(self, value):
if isinstance(value, basestring): if isinstance(value, basestring):
@ -362,18 +372,20 @@ class SolrMuninGraph:
return "%.6f" return "%.6f"
return "%s" return "%s"
def _cacheFetch(self, cacheType, fields = None): def _cacheFetch(self, cacheType, fields=None):
fields = fields or ['size', 'lookups', 'hits', 'inserts', 'evictions'] fields = fields or ['size', 'lookups', 'hits', 'inserts', 'evictions']
hits_fields = ['lookups', 'hits', 'inserts'] hits_fields = ['lookups', 'hits', 'inserts']
size_fields = ['size', 'evictions'] size_fields = ['size', 'evictions']
results = [] results = []
solrmbean = self._getMBean(self.params['core']) solrmbean = self._getMBean(self.params['core'])
data = getattr(solrmbean, cacheType)() data = getattr(solrmbean, cacheType)()
results.append('multigraph solr_{core}_{cacheType}_hit_rates'.format(core=self.params['core'], cacheType=cacheType)) results.append('multigraph solr_{core}_{cacheType}_hit_rates'
.format(core=self.params['core'], cacheType=cacheType))
for label in hits_fields: for label in hits_fields:
vformat = self._format4Value(data[label]) vformat = self._format4Value(data[label])
results.append(("%s.value " + vformat) % (label, data[label])) results.append(("%s.value " + vformat) % (label, data[label]))
results.append('multigraph solr_{core}_{cacheType}_size'.format(core=self.params['core'], cacheType=cacheType)) results.append('multigraph solr_{core}_{cacheType}_size'
.format(core=self.params['core'], cacheType=cacheType))
for label in size_fields: for label in size_fields:
results.append("%s.value %d" % (label, data[label])) results.append("%s.value %d" % (label, data[label]))
return "\n".join(results) return "\n".join(results)
@ -397,12 +409,14 @@ class SolrMuninGraph:
def qpsConfig(self): def qpsConfig(self):
cores = self._getCores() cores = self._getCores()
graph = [QPSCORE_GRAPH_TPL.format(core=c, gtype='LINESTACK1') for pos,c in enumerate(cores) ] graph = [QPSCORE_GRAPH_TPL.format(core=c, gtype='LINESTACK1')
for pos, c in enumerate(cores)]
return QPSMAIN_GRAPH_TPL.format( return QPSMAIN_GRAPH_TPL.format(
cores_qps_graphs='\n'.join(graph), cores_qps_graphs='\n'.join(graph),
handler=self.params['params']['handler'], handler=self.params['params']['handler'],
core=self.params['core'], core=self.params['core'],
cores_qps_cdefs='%s,%s' % (','.join(map(lambda x: 'qps_%s' % x, cores)),','.join(['+']*(len(cores)-1))), cores_qps_cdefs='%s,%s' % (','.join(map(lambda x: 'qps_%s' % x, cores)),
','.join(['+'] * (len(cores)-1))),
gorder=','.join(cores) gorder=','.join(cores)
) )
@ -411,12 +425,14 @@ class SolrMuninGraph:
cores = self._getCores() cores = self._getCores()
for c in cores: for c in cores:
mbean = self._getMBean(c) mbean = self._getMBean(c)
results.append('qps_%s.value %d' % (c, mbean.requestcount(self.params['params']['handler']))) results.append('qps_%s.value %d'
% (c, mbean.requestcount(self.params['params']['handler'])))
return '\n'.join(results) return '\n'.join(results)
def requesttimesConfig(self): def requesttimesConfig(self):
cores = self._getCores() cores = self._getCores()
graphs = [REQUESTTIMES_GRAPH_TPL.format(core=c, handler=self.params['params']['handler']) for c in cores ] graphs = [REQUESTTIMES_GRAPH_TPL.format(core=c, handler=self.params['params']['handler'])
for c in cores]
return '\n'.join(graphs) return '\n'.join(graphs)
def requesttimes(self): def requesttimes(self):
@ -438,7 +454,7 @@ class SolrMuninGraph:
def indexsizeConfig(self): def indexsizeConfig(self):
cores = self._getCores() cores = self._getCores()
graph = [ INDEXSIZECORE_GRAPH_TPL.format(core=c) for c in cores] graph = [INDEXSIZECORE_GRAPH_TPL.format(core=c) for c in cores]
return INDEXSIZE_GRAPH_TPL.format(cores=" ".join(cores), cores_config="\n".join(graph)) return INDEXSIZE_GRAPH_TPL.format(cores=" ".join(cores), cores_config="\n".join(graph))
def indexsize(self): def indexsize(self):
@ -458,7 +474,6 @@ class SolrMuninGraph:
return MEMORYUSAGE_GRAPH_TPL.format(availableram=memory['max'] * 1.05) return MEMORYUSAGE_GRAPH_TPL.format(availableram=memory['max'] * 1.05)
def memory(self): def memory(self):
results = []
cores = self._getCores() cores = self._getCores()
mbean = self._getMBean(cores[0]) mbean = self._getMBean(cores[0])
memory = mbean.memory() memory = mbean.memory()
@ -488,6 +503,7 @@ class SolrMuninGraph:
def queryresultcache(self): def queryresultcache(self):
return self._cacheFetch('queryresultcache') return self._cacheFetch('queryresultcache')
if __name__ == '__main__': if __name__ == '__main__':
params = parse_params() params = parse_params()
SOLR_HOST_PORT = os.environ.get('host_port', 'localhost:8080').replace('http://', '') SOLR_HOST_PORT = os.environ.get('host_port', 'localhost:8080').replace('http://', '')
@ -496,5 +512,4 @@ if __name__ == '__main__':
SOLR_URL = '/' + SOLR_URL SOLR_URL = '/' + SOLR_URL
mb = SolrMuninGraph(SOLR_HOST_PORT, SOLR_URL, params) mb = SolrMuninGraph(SOLR_HOST_PORT, SOLR_URL, params)
if hasattr(mb, params['op']): if hasattr(mb, params['op']):
print getattr(mb, params['op'])(params['type']) print(getattr(mb, params['op'])(params['type']))

View file

@ -421,7 +421,6 @@ plugins/solaris/io_disk
plugins/solaris/zones_cpu plugins/solaris/zones_cpu
plugins/solaris/zones_mem plugins/solaris/zones_mem
plugins/solr/solr plugins/solr/solr
plugins/solr/solr4_
plugins/solr/solrmulticore plugins/solr/solrmulticore
plugins/spamassasin/sa-learn plugins/spamassasin/sa-learn
plugins/sphinx/sphinx_documents plugins/sphinx/sphinx_documents