1
0
Fork 0
mirror of https://github.com/munin-monitoring/contrib.git synced 2025-07-25 10:28:36 +00:00

- have some dirs

This commit is contained in:
Steve Schnepp 2012-02-13 18:24:46 +01:00
parent 0b089ea777
commit 08346aac58
687 changed files with 0 additions and 0 deletions

304
plugins/wiki/mediawiki Executable file
View file

@ -0,0 +1,304 @@
#!/usr/bin/php
<?php
# mediawiki plugin for munin v0.2
# Reads number of edits,views,articles,pages,users,admins and images from a Mediawiki
# http://www.mediawiki.org/wiki/MediaWiki | http://munin.projects.linpro.no/wiki
# by mutante of S23 | http://s23.org/wiki | greets to hundfred
# 2007-02-12 | v0.1 first version, didnt really work yet
# 2007-02-16 | v0.2 introduced different retrieval methods, seperate graphs for the different values that can be symlinked..
# What you need to config:
# - a getmethod
# -- if its the mysql method, a valid dbhost,dbname,dbuser and dbpass
# -- if its an URL grabbing method, the URL to your Special:Statistics?action=raw page
# - the name of your wiki
# Read below for details
### Get Method - There are different ways to get the stats from Mediawiki:
## A - Database
# getmethod="mysql"
# reads directly from the Mediawiki MySQL database from table "site_stats"
# Note that this may not give you accurate numbers depending on your Mediawiki version.
# Mediawiki is currently not using this table itself to generate (all) numbers for Special:Statistics?action=raw
# but this may change in the near future
# The database method needs a valid mysql user to connect to the wiki database
# Comment this out, if you use an URL method or supply the database credentials inside this script
# require_once("/home/mutante/wiki_mysql_conf.php");
# I include the database settings from elsewhere, so i dont have to show the password in /usr/share/..
# I also set "[mediawiki] user mutante" in plugin-conf.d/ so that my user can read the config
# alternatively set them in here like:
# $dbhost="localhost";
# $dbname="wikidb";
# $dbuser="wikiuser";
# $dbpass="yourpassword";
## B - URL reading
# These methods all retrieve the Special:Statistics?action=raw URL from Mediawiki via the webserver
# This is the preferred method to get accurate stats currently, because Mediawiki doesnt use site_stats correctly atm
# getmethod="curl"
# uses curl via libcurl from PHP, should be fastest but you need the lib installed. if it works, use this.
# if it fails you may try one of the following ones and test what works for you
# getmethod="file_get_contents"
# uses the PHP function file_get_contents() read the Special:Statistics?action=raw URL from the webserver
# getmethod="fgets"
# uses the PHP function fgets() read the Special:Statistics?action=raw URL from the webserver
# getmethod="fopen"
# uses the PHP function fopen() to read the Special:Statistics?action=raw URL from the webserver
# getmethod="lynx"
# uses "lynx -dump" to read the Special:Statistics?action=raw URL from the webserver
# probably slow, if all others fail...
## CONFIG HERE
$getmethod="curl";
# IF you use one of the URL methods you need to supply your Special:Statistics?action=raw URL
$statsurl="http://s23.org/wiki/Special:Statistics?action=raw";
# Name of your wiki
$wikiname="S23-Wiki";
## END CONFIG
# Parsing function for the URL retrieving methods
function parsebuffer($buffer)
{
$pieces = explode(";",$buffer);
$total = explode("=",$pieces[0]);
$total = $total[1];
$good = explode("=",$pieces[1]);
$good = $good[1];
$views = explode("=",$pieces[2]);
$views = $views[1];
$edits = explode("=",$pieces[3]);
$edits = $edits[1];
$users = explode("=",$pieces[4]);
$users = $users[1];
$images = explode("=",$pieces[6]);
$images = $images[1];
$images = explode("<",$images);
$images = $images[0];
$admins = explode("=",$pieces[5]);
$admins = $admins[1];
$admins = trim($admins);
return array ($total,$good,$views,$edits,$users,$images,$admins);
}
# Output
# Check the filename suffix _ (from the symlink) to decide which value to output
# symlink the file for each value you want displayed
# example: ln -s /usr/share/munin/plugins/mediawiki /etc/munin/plugins/mediawiki_views
$basename = preg_replace( '/^.+[\\\\\\/]/', '', $_SERVER['PHP_SELF'] );
$suffix=explode("_",$basename);
$suffix=$suffix[1];
# Print the config if called as "mediawiki config"
switch ($argv[1]) {
case config:
print <<<CONFIG
graph_title $wikiname $suffix
graph_vlabel number
graph_category wiki
graph_scale no
graph_info Reads the total number of $suffix from $wikiname\n
CONFIG;
switch ($suffix) {
case views:
print <<<VIEWS
views.info Total number of page views
views.label views
views.type COUNTER\n
VIEWS;
break;
case edits:
print <<<EDITS
edits.info Total number of page edits
edits.label edits
edits.type COUNTER\n
EDITS;
break;
case articles:
print <<<ARTICLES
articles.info Total number of 'good' pages (articles)
articles.label articles
articles.type GAUGE\n
ARTICLES;
break;
case pages:
print <<<PAGES
pages.info Total number of all pages
pages.label pages
pages.type GAUGE\n
PAGES;
break;
case users:
print <<<USERS
users.info Total number of user accounts
users.label users
users.type GAUGE\n
USERS;
break;
case images:
print <<<IMAGES
images.info Total number of uploaded images
images.label images
images.type GAUGE\n
IMAGES;
break;
case admins:
print <<<ADMINS
admins.info Total number of admins (sysops)
admins.label admins
admins.type GAUGE\n
ADMINS;
break;
default:
print <<<ERROR
Error: link me as mediawiki_<type>, where type can be one of: views, edits, articles, pages, users, images or admins.\n
ERROR;
}
break;
default:
# Default Output
# The different methods to grab stats ..
switch ($getmethod) {
case mysql:
mysql_connect("$dbhost", "$dbuser", "$dbpass") or die(mysql_error());
mysql_select_db("$dbname") or die(mysql_error());
$query="select * from site_stats";
$result = mysql_query("$query") or die(mysql_error());
$row = mysql_fetch_array( $result );
$views=$row['ss_total_views'];
$edits=$row['ss_total_edits'];
$articles=$row['ss_good_articles'];
$pages=$row['ss_total_pages'];
$users=$row['ss_users'];
$images=$row['ss_images'];
break;
case curl:
$ch = curl_init();
curl_setopt($ch, CURLOPT_URL, "$statsurl");
curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
curl_setopt($ch, CURLOPT_CONNECTTIMEOUT, 10);
curl_setopt($ch, CURLOPT_TIMEOUT, 20);
curl_setopt($ch, CURLOPT_HEADER, false);
$buffer = curl_exec($ch);
curl_close($ch);
break;
case fopen:
$buffer = fopen($statsurl,"r");
break;
case file_get_contents:
$buffer = file_get_contents($statsurl);
break;
case fgets:
$buffer = fgets($statsurl);
break;
case lynx:
$buffer = `lynx -dump "$statsurl"`;
break;
default:
$buffer = file_get_contents($statsurl);
}
# Parse
$buffer=trim($buffer);
list($total,$good,$views,$edits,$users,$images,$admins) = parsebuffer($buffer);
# Output
# Check the filename suffix _ (from the symlink) to decide which value to output
# symlink the file for each value you want displayed
# example: ln -s /usr/share/munin/plugins/mediawiki /etc/munin/plugins/mediawiki_views
$basename = preg_replace( '/^.+[\\\\\\/]/', '', $_SERVER['PHP_SELF'] );
$suffix=explode("_",$basename);
$suffix=$suffix[1];
switch ($suffix) {
case views:
print "views.value $views\n";
break;
case edits:
print "edits.value $edits\n";
break;
case articles:
print "articles.value $good\n";
break;
case pages:
print "pages.value $total\n";
break;
case users:
print "users.value $users\n";
break;
case images:
print "images.value $images\n";
break;
case admins:
print "admins.value $admins\n";
default:
print "link me as mediawiki_<type>, where type can be one of: views, edits, articles, pages, users, images or admins. \n";
}
}
?>

113
plugins/wiki/moinoin_pages Executable file
View file

@ -0,0 +1,113 @@
#! /usr/local/bin/python
# Overview
# --------
#
# this is a munin plugin that lists the number of pages (including ACL-protected pages) in all wikis of a MoinMoin wikifarm
#
# Installation
# ------------
#
# Put this in your munin plugins directory. You probably want to set the following block in plugin-conf.d/munin-node so that it runs under the riht user for your wiki:
#
# [moinmoin_*]
# user www
#
# Implementation notes
# --------------------
#
# it is quite koumbit-specific:
# 1. the wikifarm config is hardcoded
# 2. it relies on the "wikilist.py" file to contain the list of wiki -> url patterns
# 3. it assumes the url patterns are simple enough that they are decodable into an url
#
# also note that it reuses code from MoinMoin/wikimacro.py's SystemInfo macro
#
# finally, i tried using XMLRPC instead of native functions to fetch the data, but it ended up being slower. For the record, here is what the getPageList() call would have looked like:
# xmlrpclib.ServerProxy("http://wiki.koumbit.net/?action=xmlrpc2").getAllPages()
#
# the quick benchmark i did yieled those results for the getAllPages() vs getPageList() calls:
# xmlrpc: 2.35 real 0.12 user 0.04 sys
# native: 1.44 real 1.07 user 0.35 sys
#
# so the plugin is spending more time in the CPU (all time, actually), but it's doing in faster. it is highly possible that the CPU time spared in XMLRPC is in fact used by the server
#
# (C) Copyleft 2007, The Anarcat <anarcat@koumbit.org>
# Licensed under the GPLv2 or any later version
import sys, operator, os
os.chdir('/export/wiki/config')
sys.path.insert(0, '/export/wiki/config')
from MoinMoin import wikiutil
from MoinMoin.Page import Page
from farmconfig import wikis
from re import sub
import farmconfig
from MoinMoin.request import RequestCLI
def _formatInReadableUnits(size):
size = float(size)
unit = u' Byte'
if size > 9999:
unit = u' KiB'
size /= 1024
if size > 9999:
unit = u' MiB'
size /= 1024
if size > 9999:
unit = u' GiB'
size /= 1024
return u"%.1f %s" % (size, unit)
def _getDirectorySize(path):
try:
dirsize = 0
for root, dirs, files in os.walk(path):
dirsize += sum([os.path.getsize(os.path.join(root, name)) for name in files])
except EnvironmentError, e:
dirsize = -1
return dirsize
def main():
for wiki in wikis:
name = wiki[0]
url = wiki[1]
# XXX, hack: transform the regexp into a canonical url
# we need canonical urls in the config for this to be clean
# look for (foo|bar) and replace with foo
url = sub('\(([^\|]*)(\|[^\)]*\))+', '\\1', url)
# remove common regexp patterns and slap a protocol to make this a real url
url = sub('[\^\$]|(\.\*)', '', url)
mod = getattr(__import__(name), 'Config')
#print "Upgradeing wiki %s (%s)" % (getattr(mod, 'sitename'), url)
request = RequestCLI(url)
pagelist = request.rootpage.getPageList(user='')
systemPages = [page for page in pagelist
if wikiutil.isSystemPage(request, page)]
print(name + '.value ' + str(len(pagelist)-len(systemPages)))
#totalsize = reduce(operator.add, [Page(request, name).size() for name in pagelist])
#print('Accumulated page sizes' + _formatInReadableUnits(totalsize))
def config():
print("""graph_title Wiki size
graph_vlabel Number of pages
graph_args --base 1000 -l 0
graph_scale no
graph_category Wiki
graph_info The number of pages excludes system pages but includes ACL-protected pages.""")
for wiki in wikis:
name = wiki[0]
mod = getattr(__import__(name), 'Config')
print(name + '.label ' + getattr(mod, 'sitename'))
if __name__ == "__main__":
if len(sys.argv) > 1 and sys.argv[1] == 'config':
config()
else:
main()