mirror of
https://github.com/munin-monitoring/contrib.git
synced 2025-07-21 18:41:03 +00:00
- have some dirs
This commit is contained in:
parent
0b089ea777
commit
08346aac58
687 changed files with 0 additions and 0 deletions
572
plugins/http/http_load_
Executable file
572
plugins/http/http_load_
Executable file
|
@ -0,0 +1,572 @@
|
|||
#!/usr/bin/perl
|
||||
# -*- perl -*-
|
||||
#
|
||||
# Plugin to graph http performance
|
||||
# Version: 0.8.7
|
||||
#
|
||||
# The purpose of this plugin is to monitor several properties of a web page.
|
||||
# All measurements are done for the complete web page, including images, css
|
||||
# and other content a standard browser would download automatically.
|
||||
#
|
||||
# This version supports monitoring:
|
||||
# * The total time to download a complete web page (using serial GET requests)
|
||||
# * The total size of a web page
|
||||
# * The different response codes (200, 404, 500, etc)
|
||||
# * The different tags (img src, a href, etc)
|
||||
# * The the different content types (image/png, text/css/, etc)
|
||||
# * The number of elements the web page consists of
|
||||
#
|
||||
# Author: Espen Braastad / Linpro AS
|
||||
# espen@linpro.no
|
||||
#
|
||||
##### Short usage guide: #####
|
||||
#
|
||||
# Requirements:
|
||||
# * The server running this plugin must be allowed to connect to the web
|
||||
# server(s) you are going to monitor.
|
||||
# * Some perl modules:
|
||||
# Time::HiRes, LWP::UserAgent, HTML::LinkExtor, LWP::ConnCache
|
||||
#
|
||||
# Initial configuration:
|
||||
# 1. Copy this file to /usr/share/munin/plugins/
|
||||
#
|
||||
# 2. Create a file (/etc/munin/http_load_urls.txt) with one
|
||||
# full url per line, as many as you want, i.e.:
|
||||
# $ echo "http://www.dn.no/" >> /etc/munin/urls.txt
|
||||
# $ echo "http://www.intrafish.no/" >> /etc/munin/urls.txt
|
||||
#
|
||||
# 3. Add a cron job running the plugin with cron as the argument:
|
||||
# */15 * * * * <user> /usr/share/munin/plugins/http_load_ cron
|
||||
# <user> should be the user that has write permission to
|
||||
# the $cachedir directory set below. Set the intervals to
|
||||
# whatever you want.
|
||||
#
|
||||
# For verbose output (for debugging) you can do:
|
||||
# sudo -u <user> /usr/share/munin/plugins/http_load_ cron verbose
|
||||
#
|
||||
# 4. Run munin-node-configure --suggest --shell and run the symlink
|
||||
# commands manually to update the munin-node plugin list.
|
||||
#
|
||||
# (5. If you want to change the filter which the plugin uses to select which
|
||||
# tags to follow in a web page, edit the subroutine called "filter" below.)
|
||||
#
|
||||
# Add a new url to monitor:
|
||||
# 1. Add a new line in /etc/munin/urls.txt with the full URL, i.e.:
|
||||
# $ echo "http://www.linpro.no/" >> /etc/munin/http_load_urls.txt
|
||||
#
|
||||
# 2. Run munin-node-configure --suggest --shell and manually
|
||||
# add the new symlink(s)
|
||||
#
|
||||
# 3. /etc/init.d/munin-node restart
|
||||
#
|
||||
# Remove a url from monitoring:
|
||||
# 1. Remove it from /etc/munin/http_load_urls.txt
|
||||
#
|
||||
# 2. Remove ${cachedir}/http_load_<url_id>*
|
||||
#
|
||||
# 3. Remove /etc/munin/plugins/http_load_<url_id>*
|
||||
#
|
||||
# 4. /etc/init.d/munin-node restart
|
||||
#
|
||||
#####
|
||||
#
|
||||
# Todo:
|
||||
# * Add support for forking to simulate real browsers
|
||||
# * Use checksums as fieldnames
|
||||
#
|
||||
# $Id: $
|
||||
#
|
||||
# Magic markers:
|
||||
#%# family=auto
|
||||
#%# capabilities=autoconf suggest
|
||||
|
||||
use strict;
|
||||
use Time::HiRes qw( gettimeofday tv_interval );
|
||||
use LWP::UserAgent;
|
||||
use HTML::LinkExtor;
|
||||
use LWP::ConnCache;
|
||||
|
||||
my $url_file="/etc/munin/http_load_urls.txt";
|
||||
my $cachedir="/var/lib/munin/plugin-state";
|
||||
|
||||
my $debug=0;
|
||||
my $timeout=10;
|
||||
my $max_redirects=10;
|
||||
my $scriptname="http_load_";
|
||||
my $category="network"; # The munin graph category
|
||||
my $useragent="Mozilla/5.0";
|
||||
|
||||
# Function to read the $url_file and return the contents in a hash
|
||||
sub read_urls{
|
||||
my $file=$_[0];
|
||||
my %urls=();
|
||||
if(-r $file){
|
||||
open(FILE,'<'.$file);
|
||||
while (<FILE>) {
|
||||
my $url=$_;
|
||||
chomp($url);
|
||||
my $id=get_id($url);
|
||||
if(length($id)>0){
|
||||
$urls{$id}=$url;
|
||||
}
|
||||
}
|
||||
close (FILE);
|
||||
}
|
||||
return %urls;
|
||||
}
|
||||
|
||||
# Function to read cache, return a hash
|
||||
sub read_cache{
|
||||
my $file=$_[0];
|
||||
my %cache=();
|
||||
if(-r $file){
|
||||
open(FILE,'<'.$file);
|
||||
while (<FILE>) {
|
||||
m/^(\S*)\s+(.*)$/;
|
||||
$cache{ $1 } = $2;
|
||||
}
|
||||
close (FILE);
|
||||
}
|
||||
return %cache;
|
||||
}
|
||||
|
||||
# Function to filter the html tags, which files do we want to download
|
||||
sub filter{
|
||||
my $tag=$_[0];
|
||||
my $status=1;
|
||||
|
||||
# Some example data:
|
||||
# link href http://www.intrafish.no/template/include/css/intrafish.css
|
||||
# script src http://www.intrafish.no/template/include/js/intrafish.js
|
||||
# a href http://adserver.adtech.de/?adlink%7C2.0%7C405%7C119488%7C1%7C16%7CADTECH;grp=8491;loc=300;
|
||||
# img src http://adserver.adtech.de/?adserv%7C2.0%7C405%7C119488%7C1%7C16%7CADTECH;grp=8491;
|
||||
# area href http://go.vg.no/cgi-bin/go.cgi/sol/http://www.sol.no/sgo/vg/http://www.sol.no/underholdning/humor/?partnerid=vg
|
||||
|
||||
# status=1 => do download (default)
|
||||
# status=0 => do not download
|
||||
|
||||
if("$tag" eq "form action"){
|
||||
$status=0;
|
||||
}
|
||||
if("$tag" eq "a href"){
|
||||
$status=0;
|
||||
}
|
||||
if("$tag" eq "area href"){
|
||||
$status=0;
|
||||
}
|
||||
return $status;
|
||||
}
|
||||
|
||||
# Return the cache file name for this plugin
|
||||
sub get_cache_file_name{
|
||||
my $scriptname=$_[0];
|
||||
my $id=$_[1];
|
||||
my $type=$_[2];
|
||||
my $file="";
|
||||
|
||||
$file = $scriptname . $id . ".cache";
|
||||
$debug && print "Cache file: " . $file . "\n";
|
||||
|
||||
return $file;
|
||||
}
|
||||
|
||||
# Get fieldname (making sure it is munin "compatible" as a fieldname)
|
||||
# 1. Remove all non-word characters from a string)
|
||||
# 2. Make sure it has maximum 19 characters
|
||||
sub get_fieldname{
|
||||
my $url=$_[0];
|
||||
$url =~ s/\W//g;
|
||||
if(length($url) > 19){
|
||||
$url = substr($url, 0, 19);
|
||||
}
|
||||
return $url;
|
||||
}
|
||||
|
||||
# Same as get_fieldname except it doesn't substr
|
||||
sub get_id{
|
||||
my $url=$_[0];
|
||||
$url =~ s/\W//g;
|
||||
return $url;
|
||||
}
|
||||
|
||||
$debug && print "Scriptname: " . $scriptname . "\n";
|
||||
|
||||
# Get the url id and the type of the graph
|
||||
#
|
||||
# The filename format is http_load_X_Y where
|
||||
# X: The line number in urls.txt
|
||||
# X: The type of graph (elements, size, loadtime, ..)
|
||||
|
||||
my ($id,$type);
|
||||
$0 =~ /http_load(?:_([^_]+)|)_(.+)\s*$/;
|
||||
$id = $1;
|
||||
$type = $2;
|
||||
|
||||
$debug && print "Id: $id, Type: $type\n";
|
||||
|
||||
if($ARGV[0] and $ARGV[0] eq "autoconf") {
|
||||
my %urls=&read_urls($url_file);
|
||||
if(keys(%urls) gt 0){
|
||||
print "yes\n";
|
||||
exit(0);
|
||||
} else {
|
||||
print "no\n";
|
||||
exit(1);
|
||||
}
|
||||
|
||||
} elsif($ARGV[0] and $ARGV[0] eq "suggest") {
|
||||
# get the url list, print suggestions for usage
|
||||
my %urls=&read_urls($url_file);
|
||||
while ( my ($id, $url) = each(%urls) ) {
|
||||
$debug && print "id: $id => url: $url\n";
|
||||
print $id . "_size\n";
|
||||
print $id . "_loadtime\n";
|
||||
print $id . "_response\n";
|
||||
print $id . "_tags\n";
|
||||
print $id . "_type\n";
|
||||
print $id . "_elements\n";
|
||||
}
|
||||
exit(0);
|
||||
|
||||
} elsif($ARGV[0] and $ARGV[0] eq "cron") {
|
||||
# This thing is run by cron and should write a cache file for munin-node to
|
||||
# read from
|
||||
|
||||
my $verbose=0;
|
||||
if($ARGV[1] and $ARGV[1] eq "verbose") {
|
||||
$verbose=1;
|
||||
print "Verbose output\n";
|
||||
}
|
||||
|
||||
my %urls=&read_urls($url_file);
|
||||
my %output;
|
||||
my %res;
|
||||
my $t0;
|
||||
my ($request,$response,$status,$link,$contents,$page_parser,$cachefile);
|
||||
|
||||
while ( my ($id, $url) = each(%urls) ) {
|
||||
$verbose && print "Fetching $url (id: $id)... \n";
|
||||
|
||||
$t0=0;
|
||||
$status=0;
|
||||
%output=();
|
||||
my $host="";
|
||||
if($url =~ m/\w+\:\/\/([^\/]+).*/){
|
||||
$host=$1;
|
||||
$verbose && print " Host: $host\n";
|
||||
}
|
||||
|
||||
$output{"url"}=$url;
|
||||
$output{"timestamp"}=time();
|
||||
$verbose && print " Timestamp: " . $output{"timestamp"} . "\n";
|
||||
|
||||
my $browser = LWP::UserAgent->new();
|
||||
|
||||
$browser->agent($useragent);
|
||||
$browser->timeout(${timeout});
|
||||
$browser->max_redirect( $max_redirects );
|
||||
$browser->conn_cache(LWP::ConnCache->new());
|
||||
|
||||
$response = $browser->get($url);
|
||||
|
||||
# Calculating time from now:
|
||||
$t0 = [gettimeofday];
|
||||
if ($response->is_success()) {
|
||||
$status=1;
|
||||
$output{"elements_" . $host}+=1;
|
||||
}
|
||||
|
||||
$contents = $response->content();
|
||||
$output{"loadtime_" . $host} += sprintf("%.6f",tv_interval ( $t0, [gettimeofday]));
|
||||
$output{"size_" . $host}+=length($contents);
|
||||
$output{"response_" . $host . "_" . $response->code}+=1;
|
||||
$output{"type_" . $response->content_type}+=1;
|
||||
|
||||
$page_parser = HTML::LinkExtor->new(undef, $url);
|
||||
$page_parser->parse($contents)->eof;
|
||||
my @links = $page_parser->links;
|
||||
$verbose && print " Processing links:\n";
|
||||
|
||||
%res=();
|
||||
foreach $link (@links){
|
||||
my $tag=$$link[0] . " " . $$link[1];
|
||||
|
||||
$output{"tags_" . $$link[0] . "-" . $$link[1]}+=1;
|
||||
|
||||
if(filter($tag)){
|
||||
$verbose && print " Processing: " . $$link[0] . " " . $$link[1] . " " . $$link[2] . "\n";
|
||||
|
||||
# Extract the hostname and add it to the hash
|
||||
if($$link[2] =~ m/http\:\/\/([^\/]+).*/){
|
||||
$host=$1;
|
||||
$output{"elements_" . $host}+=1;
|
||||
}
|
||||
|
||||
my $suburl=$$link[2];
|
||||
|
||||
$t0 = [gettimeofday];
|
||||
$response = $browser->get($suburl);
|
||||
$output{"loadtime_" . $host} += sprintf("%.6f",tv_interval ( $t0, [gettimeofday]));
|
||||
|
||||
$contents = $response->content();
|
||||
$output{"size_" . $host}+=length($contents);
|
||||
$output{"response_" . $host . "_" . $response->code}+=1;
|
||||
$output{"type_" . $response->content_type}+=1;
|
||||
|
||||
$verbose && print " Response: " . $response->code . " Size: " . length($contents) . "\n";
|
||||
} else {
|
||||
$verbose && print " Skipping: " . $$link[0] . " " . $$link[1] . " " . $$link[2] . "\n";
|
||||
}
|
||||
}
|
||||
|
||||
$cachefile=$cachedir . "/" . &get_cache_file_name($scriptname,$id,$type);
|
||||
$debug && print "Reading cache file: " . $cachefile . "... ";
|
||||
|
||||
my %input=read_cache($cachefile);
|
||||
|
||||
$debug && print "done\n";
|
||||
|
||||
# Resetting all values to 0 before adding new values
|
||||
while ( my ($id, $value) = each(%input) ) {
|
||||
$input{$id}="U";
|
||||
}
|
||||
|
||||
# Adding new values
|
||||
while ( my ($id, $value) = each(%output) ) {
|
||||
$input{$id}=$value;
|
||||
$verbose && print " Result: " . $id . " -> " . $value . "\n";
|
||||
}
|
||||
|
||||
# Writing the cache
|
||||
$verbose && print "Writing cache file: " . $cachefile . "... ";
|
||||
open(FILE,">".$cachefile);
|
||||
while ( my ($id, $value) = each(%input) ) {
|
||||
print FILE $id . " " . $value . "\n";
|
||||
}
|
||||
close(FILE);
|
||||
$verbose && print "done\n";
|
||||
}
|
||||
exit(0);
|
||||
}elsif($ARGV[0] and $ARGV[0] eq "config") {
|
||||
my %urls=&read_urls($url_file);
|
||||
|
||||
print "graph_title $urls{$id} ${type}\n";
|
||||
print "graph_args -l 0 --base 1000\n";
|
||||
print "graph_category " . $category . "\n";
|
||||
$debug && print "Reading cache file\n";
|
||||
my $cachefile=$cachedir . "/" . &get_cache_file_name($scriptname,$id,$type);
|
||||
my %cache=read_cache($cachefile);
|
||||
|
||||
my $count=0;
|
||||
$debug && print "The cache file contains " . keys(%cache) . " lines\n";
|
||||
|
||||
if($type eq "size"){
|
||||
print "graph_vlabel Bytes\n";
|
||||
print "graph_total Total\n";
|
||||
print "graph_info This graph is generated by a set of serial GETs to calculate the total size of $urls{$id}.\n";
|
||||
|
||||
if(keys(%cache)>0){
|
||||
for my $key ( sort reverse keys %cache ){
|
||||
my $value=$cache{$key};
|
||||
|
||||
if($key =~ m/^size_(\S+)$/){
|
||||
my $host=$1;
|
||||
my $value=$value;
|
||||
|
||||
my $name=$1;
|
||||
$name=get_fieldname($name);
|
||||
|
||||
print "$name.label from $host\n";
|
||||
print "$name.min 0\n";
|
||||
print "$name.max 20000000\n";
|
||||
if($count eq 0){
|
||||
print "$name.draw AREA\n";
|
||||
} else {
|
||||
print "$name.draw STACK\n";
|
||||
}
|
||||
$count+=1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}elsif($type eq "loadtime"){
|
||||
print "graph_vlabel Seconds\n";
|
||||
print "graph_total Total\n";
|
||||
print "graph_info This graph is generated by a set of serial GETs to calculate the total time to load $urls{$id}. ";
|
||||
print "Note that browsers usually fork() the GET requests, resulting in a shorter total loading time.\n";
|
||||
|
||||
if(keys(%cache)>0){
|
||||
for my $key ( sort reverse keys %cache ){
|
||||
my $value=$cache{$key};
|
||||
|
||||
if($key =~ m/^loadtime_(\S+)$/){
|
||||
my $host=$1;
|
||||
my $value=$value;
|
||||
|
||||
my $name=$1;
|
||||
$name=get_fieldname($name);
|
||||
|
||||
print "$name.label from $host\n";
|
||||
print "$name.min 0\n";
|
||||
print "$name.max 400\n";
|
||||
if($count eq 0){
|
||||
print "$name.draw AREA\n";
|
||||
} else {
|
||||
print "$name.draw STACK\n";
|
||||
}
|
||||
$count+=1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}elsif($type eq "elements"){
|
||||
print "graph_vlabel Number of elements\n";
|
||||
print "graph_total Total\n";
|
||||
print "graph_info This graph is generated by a set of serial GETs to count the number of elements (images, CSS files, etc) from $urls{$id}.\n";
|
||||
|
||||
if(keys(%cache)>0){
|
||||
for my $key ( sort reverse keys %cache ){
|
||||
my $value=$cache{$key};
|
||||
|
||||
if($key =~ m/^elements_(\S+)$/){
|
||||
my $host=$1;
|
||||
my $value=$value;
|
||||
|
||||
my $name=$1;
|
||||
$name=get_fieldname($name);
|
||||
|
||||
print "$name.label from $host\n";
|
||||
print "$name.min 0\n";
|
||||
print "$name.max 10000\n";
|
||||
if($count eq 0){
|
||||
print "$name.draw AREA\n";
|
||||
} else {
|
||||
print "$name.draw STACK\n";
|
||||
}
|
||||
$count+=1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}elsif($type eq "response"){
|
||||
print "graph_vlabel Server response code count\n";
|
||||
print "graph_total Total\n";
|
||||
print "graph_info This graph is generated by a set of serial GETs to visualize the server response codes received while loading $urls{$id}.\n";
|
||||
|
||||
if(keys(%cache)>0){
|
||||
for my $key ( sort reverse keys %cache ){
|
||||
my $value=$cache{$key};
|
||||
|
||||
if($key =~ m/^response_(\S+)$/){
|
||||
my $host=$1;
|
||||
my $value=$value;
|
||||
|
||||
my $name=$1;
|
||||
$name=get_fieldname($name);
|
||||
|
||||
$host =~ s/\_/ /g;
|
||||
$host =~ s/(\S+)\s(\d+)/ /g;
|
||||
$host=$1;
|
||||
my $code=$2;
|
||||
|
||||
print "$name.label $host ($code)\n";
|
||||
print "$name.min 0\n";
|
||||
print "$name.max 10000\n";
|
||||
if($count eq 0){
|
||||
print "$name.draw AREA\n";
|
||||
} else {
|
||||
print "$name.draw STACK\n";
|
||||
}
|
||||
$count+=1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}elsif($type eq "type"){
|
||||
print "graph_vlabel Content type count\n";
|
||||
print "graph_total Total\n";
|
||||
print "graph_info This graph is generated by a set of serial GETs to visualize the different content types $urls{$id} consists of.\n";
|
||||
|
||||
if(keys(%cache)>0){
|
||||
for my $key ( sort reverse keys %cache ){
|
||||
my $value=$cache{$key};
|
||||
|
||||
if($key =~ m/^type_(\S+)$/){
|
||||
my $type=$1;
|
||||
my $value=$value;
|
||||
|
||||
my $name=$1;
|
||||
$name=get_fieldname($name);
|
||||
|
||||
#$host =~ s/\_/ /g;
|
||||
#$host =~ s/(\S+)\s(\S+)/ /g;
|
||||
#$host=$1;
|
||||
#my $type=$2;
|
||||
|
||||
print "$name.label $type\n";
|
||||
print "$name.min 0\n";
|
||||
print "$name.max 100000\n";
|
||||
if($count eq 0){
|
||||
print "$name.draw AREA\n";
|
||||
} else {
|
||||
print "$name.draw STACK\n";
|
||||
}
|
||||
$count+=1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}elsif($type eq "tags"){
|
||||
print "graph_vlabel HTML tag count\n";
|
||||
print "graph_total Total\n";
|
||||
print "graph_info This graph is generated by a set of serial GETs to visualize the different tags $urls{$id} consists of.\n";
|
||||
|
||||
if(keys(%cache)>0){
|
||||
for my $key ( sort reverse keys %cache ){
|
||||
my $value=$cache{$key};
|
||||
|
||||
if($key =~ m/^tags_(\S+)$/){
|
||||
my $host=$1;
|
||||
my $value=$value;
|
||||
|
||||
my $name=$1;
|
||||
$name=get_fieldname($name);
|
||||
|
||||
$host =~ s/\W/ /g;
|
||||
|
||||
print "$name.label $host\n";
|
||||
print "$name.min 0\n";
|
||||
print "$name.max 100000\n";
|
||||
if($count eq 0){
|
||||
print "$name.draw AREA\n";
|
||||
} else {
|
||||
print "$name.draw STACK\n";
|
||||
}
|
||||
$count+=1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
exit(0);
|
||||
} else {
|
||||
my $cachefile=$cachedir . "/" . &get_cache_file_name($scriptname,$id,$type);
|
||||
$debug && print "Reading cache file: " . $cachefile . "\n";
|
||||
my %cache=read_cache($cachefile);
|
||||
$debug && print "Number of lines in cache file: " . keys(%cache) . "\n";
|
||||
|
||||
if(keys(%cache)>0){
|
||||
for my $key ( sort keys %cache ){
|
||||
my $value=$cache{$key};
|
||||
if($key =~ m/^([A-Za-z]+)\_(\S+)$/){
|
||||
my $name=$2;
|
||||
|
||||
if ($1 eq $type){
|
||||
$name=get_fieldname($name);
|
||||
print $name . ".value " . $value . "\n";
|
||||
}
|
||||
} elsif(m/^(\S+)\s+(\S+)$/){
|
||||
if ($1 eq $type){
|
||||
print $1 . ".value " . $2 . "\n";
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# vim:syntax=perl
|
143
plugins/http/http_request_time
Executable file
143
plugins/http/http_request_time
Executable file
|
@ -0,0 +1,143 @@
|
|||
#!/usr/bin/perl
|
||||
|
||||
=head1 INSTALLATION
|
||||
|
||||
This plugin does http requests to specified URLs and takes the response time.
|
||||
Use it to monitor remote sites.
|
||||
|
||||
LWP::UserAgent and Time::HiRes are required
|
||||
|
||||
=head1 CONFIGURATION
|
||||
|
||||
[http_request_time]
|
||||
env.url http://127.0.0.1/1 http://127.0.0.1/2 http://127.0.0.1/3
|
||||
|
||||
=head1 MAGIC MARKERS
|
||||
|
||||
#%# family=auto
|
||||
#%# capabilities=autoconf
|
||||
|
||||
=head1 LICENSE
|
||||
|
||||
GPLv2
|
||||
|
||||
=cut
|
||||
|
||||
use strict;
|
||||
use warnings;
|
||||
use Munin::Plugin;
|
||||
use Time::HiRes qw(gettimeofday tv_interval);
|
||||
my $ret = undef;
|
||||
|
||||
need_multigraph();
|
||||
|
||||
sub clean {
|
||||
my $surl=shift;
|
||||
$surl=~s/^https?:\/\///;
|
||||
$surl=~s|%[\w\d]|_|g;
|
||||
$surl=~s|[^\w\d_]|_|g;
|
||||
$surl=~s|_*$||g;
|
||||
$surl=~s|^_*||g;
|
||||
return $surl;
|
||||
};
|
||||
|
||||
|
||||
if (! eval "require LWP::UserAgent;")
|
||||
{
|
||||
$ret = "LWP::UserAgent not found";
|
||||
if ( ! defined $ARGV[0] ) {
|
||||
die $ret;
|
||||
}
|
||||
}
|
||||
|
||||
my $URL = $ENV{'url'}?$ENV{'url'}:"http://127.0.0.1/";
|
||||
my %URLS;
|
||||
foreach $_ (split(/ /,$URL)){
|
||||
$URLS{$_}={
|
||||
url=>$_,
|
||||
surl=>clean($_),
|
||||
time=>'U'
|
||||
};
|
||||
}
|
||||
|
||||
if ( defined $ARGV[0] and $ARGV[0] eq "autoconf" )
|
||||
{
|
||||
if ($ret)
|
||||
{
|
||||
print "no ($ret)\n";
|
||||
exit 0;
|
||||
}
|
||||
|
||||
my $ua = LWP::UserAgent->new(timeout => 30);
|
||||
|
||||
foreach my $url (keys %URLS) {
|
||||
my $response = $ua->request(HTTP::Request->new('GET',$url));
|
||||
if ($response->is_success) {
|
||||
next;
|
||||
}
|
||||
else {
|
||||
print "no (URL $url: ". $response->message .")\n";
|
||||
exit 0;
|
||||
}
|
||||
}
|
||||
print "yes\n";
|
||||
exit 0;
|
||||
}
|
||||
|
||||
if ( defined $ARGV[0] and $ARGV[0] eq "config" )
|
||||
{
|
||||
# master graph
|
||||
print "multigraph http_request_time\n";
|
||||
print "graph_title HTTP(S) Request response times\n";
|
||||
print "graph_args --base 1000\n";
|
||||
print "graph_vlabel response time in ms\n";
|
||||
print "graph_category other\n";
|
||||
|
||||
my @go;
|
||||
foreach my $url (values %URLS) {
|
||||
print "$$url{'surl'}.label $$url{'url'}\n";
|
||||
print "$$url{'surl'}.info The response time of a single request\n";
|
||||
print "$$url{'surl'}.min 0\n";
|
||||
print "$$url{'surl'}.draw LINE1\n";
|
||||
push(@go,$$url{'surl'});
|
||||
}
|
||||
|
||||
# multigraphs
|
||||
|
||||
foreach my $url (values %URLS) {
|
||||
print "\nmultigraph http_request_time.$$url{'surl'}\n";
|
||||
print "graph_title $$url{'url'}\n";
|
||||
print "graph_args --base 1000\n";
|
||||
print "graph_vlabel response time in ms\n";
|
||||
print "graph_category other\n";
|
||||
print "$$url{'surl'}.label $$url{'url'}\n";
|
||||
print "$$url{'surl'}.info The response time of a single request\n";
|
||||
print "$$url{'surl'}.min 0\n";
|
||||
print "$$url{'surl'}.draw LINE1\n";
|
||||
}
|
||||
|
||||
exit 0;
|
||||
}
|
||||
|
||||
my $ua = LWP::UserAgent->new(timeout => 15);
|
||||
|
||||
foreach my $url (values %URLS) {
|
||||
my $t1=[gettimeofday];
|
||||
my $response = $ua->request(HTTP::Request->new('GET',$$url{'url'}));
|
||||
my $t2=[gettimeofday];
|
||||
if ($response->is_success) {
|
||||
$$url{'time'}=sprintf("%d",tv_interval($t1,$t2)*1000);
|
||||
};
|
||||
};
|
||||
|
||||
print("multigraph http_request_time\n");
|
||||
foreach my $url (values %URLS) {
|
||||
print("$$url{'surl'}.value $$url{'time'}\n");
|
||||
}
|
||||
foreach my $url (values %URLS) {
|
||||
print("\nmultigraph http_request_time.$$url{'surl'}\n");
|
||||
print("$$url{'surl'}.value $$url{'time'}\n");
|
||||
}
|
||||
|
||||
|
||||
# vim:syntax=perl
|
74
plugins/http/http_responsetime
Executable file
74
plugins/http/http_responsetime
Executable file
|
@ -0,0 +1,74 @@
|
|||
#! /usr/bin/perl
|
||||
# anders@aftenposten.no, 2007-04-11
|
||||
# Shows the response time to fetch a web page
|
||||
|
||||
use Sys::Hostname;
|
||||
use Time::HiRes qw( time );
|
||||
use IO::Socket;
|
||||
|
||||
# ----- config -----
|
||||
$url = "http://cache.mydomain.org/img/logocomp.gif";
|
||||
$host = "localhost";
|
||||
$comment = "2K Comp logo from localhost";
|
||||
#$host = hostname;
|
||||
# ----- config -----
|
||||
|
||||
sub geturl {
|
||||
my $data;
|
||||
my $sock = new IO::Socket::INET (
|
||||
PeerAddr => $host,
|
||||
PeerPort => 80,
|
||||
Proto => 'tcp'
|
||||
);
|
||||
return(0) unless ($sock);
|
||||
print $sock "GET $baseurl HTTP/1.1\nHost: $vhost\nConnection: close\n\n";
|
||||
while (<$sock>) {
|
||||
$data .= $_;
|
||||
}
|
||||
close($sock);
|
||||
|
||||
# Debug
|
||||
#my @response = split(/\n/, $data);
|
||||
#my $httpresponse = $response[0];
|
||||
#chomp($httpresponse);
|
||||
#$httpresponse =~ s@\r@@g;
|
||||
#print "HTTP response code: $httpresponse\n";
|
||||
}
|
||||
|
||||
sub cktime {
|
||||
$vhost = $url;
|
||||
$vhost =~ s@^\w+://(.+?)/.*@\1@;
|
||||
|
||||
$proto = $url;
|
||||
$proto =~ s@^(\w+)://.*@\1@;
|
||||
|
||||
$baseurl = $url;
|
||||
$baseurl =~ s@^\w+://.+?(/)@\1@;
|
||||
|
||||
$tick1 = time();
|
||||
geturl;
|
||||
$tick2 = time();
|
||||
|
||||
$tspent = $tick2-$tick1;
|
||||
$msecs = ($tspent * 1000);
|
||||
|
||||
printf "timespent.value %.3f\n", $msecs;
|
||||
}
|
||||
|
||||
if ($ARGV[0] && $ARGV[0] eq "autoconf") {
|
||||
print "yes\n";
|
||||
} elsif ($ARGV[0] && $ARGV[0] eq "config") {
|
||||
if ($comment) {
|
||||
print "graph_title HTTP response time ($comment)\n";
|
||||
} else {
|
||||
print "graph_title HTTP response time\n";
|
||||
}
|
||||
print "graph_vlabel ms\n";
|
||||
print "graph_category HTTP\n";
|
||||
print "graph_info This graph shows the response time in milliseconds, to load a web page\n";
|
||||
print "timespent.label timespent\n";
|
||||
print "timespent.type GAUGE\n";
|
||||
print "timespent.graph yes\n";
|
||||
} else {
|
||||
cktime;
|
||||
}
|
116
plugins/http/multi_http_responsetime
Executable file
116
plugins/http/multi_http_responsetime
Executable file
|
@ -0,0 +1,116 @@
|
|||
#! /usr/bin/perl
|
||||
# This plugin based on http_responestime designed by Anders Nordby
|
||||
#
|
||||
# It is written to control the quality of an internet conneting by
|
||||
# downloading a favicon.ico file from a lot - unlimited - count of
|
||||
# domains.
|
||||
#
|
||||
# Don't forget zu fill in the following lines into the munin-node
|
||||
# - ormally at /etc/muni/plugin-conf.d/ - an than restart munin
|
||||
#
|
||||
# [multi_http_responsetime]
|
||||
# user root
|
||||
# Jo Hartmann (Version 08-0912)
|
||||
#
|
||||
# Now working under munin 1.4.5
|
||||
# Jo Hartmann (Version 11-0426)
|
||||
|
||||
|
||||
|
||||
|
||||
use Sys::Hostname;
|
||||
use Time::HiRes qw( time );
|
||||
use IO::Socket;
|
||||
|
||||
# ----- config -----
|
||||
push(@url_array, "http://www.google.de");
|
||||
push(@url_array, "http://www.t-online.de");
|
||||
push(@url_array, "http://www.telekom.de");
|
||||
push(@url_array, "http://www.ebay.de");
|
||||
push(@url_array, "http://www.tus-vahrenwald.de");
|
||||
$host = "localhost";
|
||||
$comment = "HTTP-Ladezeiten von Web-Seiten";
|
||||
#$host = hostname;
|
||||
# ----- config -----
|
||||
|
||||
sub geturl {
|
||||
my $data;
|
||||
my $sock = new IO::Socket::INET (
|
||||
PeerAddr => $host,
|
||||
PeerPort => 80,
|
||||
Proto => 'tcp'
|
||||
);
|
||||
return(0) unless ($sock);
|
||||
print $sock "GET $baseurl HTTP/1.1\nHost: $vhost\nConnection: close\n\n";
|
||||
while (<$sock>) {
|
||||
$data .= $_;
|
||||
}
|
||||
close($sock);
|
||||
|
||||
# Debug
|
||||
#my @response = split(/\n/, $data);
|
||||
#my $httpresponse = $response[0];
|
||||
#chomp($httpresponse);
|
||||
#$httpresponse =~ s@\r@@g;
|
||||
#print "HTTP response code: $httpresponse\n";
|
||||
}
|
||||
|
||||
sub cktime {
|
||||
for ($i=0; $i <= $max_index; ++$i) {
|
||||
$url = $url_array[$i], "/favicon.ico";
|
||||
|
||||
$vhost = $url;
|
||||
$vhost =~ s@^\w+://(.+?)/.*@\1@;
|
||||
|
||||
$proto = $url;
|
||||
$proto =~ s@^(\w+)://.*@\1@;
|
||||
|
||||
$baseurl = $url;
|
||||
$baseurl =~ s@^\w+://.+?(/)@\1@;
|
||||
|
||||
$tick1 = time();
|
||||
geturl;
|
||||
$tick2 = time();
|
||||
|
||||
$tspent = $tick2-$tick1;
|
||||
$msecs = ($tspent * 1000);
|
||||
|
||||
printf "timespent$i.value %.3f\n", $msecs;
|
||||
}
|
||||
}
|
||||
|
||||
# Count of urls
|
||||
$max_index = $#url_array;
|
||||
|
||||
if ($ARGV[0] && $ARGV[0] eq "autoconf") {
|
||||
print "yes\n";
|
||||
} elsif ($ARGV[0] && $ARGV[0] eq "config") {
|
||||
if ($comment) {
|
||||
print "graph_title $comment\n";
|
||||
} else {
|
||||
print "graph_title HTTP response time \n";
|
||||
}
|
||||
print "graph_scale no\n";
|
||||
print "graph_vlabel ms\n";
|
||||
print "graph_category HTTP\n";
|
||||
print "graph_info This graph shows the response time in milliseconds, to load a web page\n";
|
||||
for ($i=0; $i <= $max_index; ++$i) {
|
||||
$vhost = $url_array[$i];
|
||||
$proto = $url_array[$i];
|
||||
$vhost =~ s@^\w+://(.+?)/.*@\1@;
|
||||
$proto =~ s@^(\w+)://.*@\1@;
|
||||
|
||||
# If url_array[] is a domain, vhost will be contain the the strinf "http://"
|
||||
if($vhost =~ /http/) {
|
||||
print "timespent$i.label $vhost\n";
|
||||
} else {
|
||||
print "timespent$i.label $proto://$vhost\n";
|
||||
}
|
||||
|
||||
print "timespent$i.info Ladezeit von $url_array[$i]/favicon.ico\n";
|
||||
print "timespent$i.type GAUGE\n";
|
||||
print "timespent$i.graph yes\n";
|
||||
}
|
||||
} else {
|
||||
cktime;
|
||||
}
|
56
plugins/http/speedport_300
Executable file
56
plugins/http/speedport_300
Executable file
|
@ -0,0 +1,56 @@
|
|||
#!/bin/sh
|
||||
#
|
||||
#
|
||||
# Munin plugin to show the up- / download stream of the actual
|
||||
# internet connection by reading the top_status.htm from the
|
||||
# Speedport 300
|
||||
#
|
||||
#
|
||||
# Don't forget zu fill in the following lines into the munin-node
|
||||
# - ormally at /etc/muni/plugin-conf.d/ - an than restart munin
|
||||
#
|
||||
# [speedport_300]
|
||||
# user root
|
||||
# Jo Hartmann (Version 08.0912)
|
||||
|
||||
# Personal config Section Begin ##
|
||||
router="192.168.0.111"
|
||||
# Personal config section End ####
|
||||
|
||||
# Standard Config Section Begin ##
|
||||
if [ "$1" = "autoconf" ]; then
|
||||
echo yes
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ "$1" = "config" ]; then
|
||||
|
||||
echo 'graph_title DSL Up- / Downstream'
|
||||
echo 'graph_args --base 1000 -l 0'
|
||||
echo 'graph_scale no'
|
||||
echo 'graph_vlabel Up- / Downstream in kBit/s'
|
||||
echo 'graph_category Http'
|
||||
echo 'download.label Downstream'
|
||||
echo 'upload.label Upstream'
|
||||
echo 'graph_info Information from the top_status.htm of the Speedport 300'
|
||||
exit 0
|
||||
fi
|
||||
# Standard Config Section End ####
|
||||
|
||||
# Measure Section Begin ##########
|
||||
up_down=($(wget -q -O - $router/top_status.htm | grep "+'kBit" | awk -F"(" '{print $2}'))
|
||||
down=${up_down[0]%+*}
|
||||
up=${up_down[1]%+*}
|
||||
|
||||
if [ "$down" = "" ]; then
|
||||
echo download.value 0
|
||||
else
|
||||
echo download.value ${up_down[0]%+*}
|
||||
fi
|
||||
|
||||
if [ "$up" = "" ]; then
|
||||
echo upload.value 0
|
||||
else
|
||||
echo upload.value ${up_down[1]%+*}
|
||||
fi
|
||||
# Measure Section End ############
|
254
plugins/http/wget_page
Executable file
254
plugins/http/wget_page
Executable file
|
@ -0,0 +1,254 @@
|
|||
#!/bin/sh
|
||||
#
|
||||
# Plugin to graph response times of the specified websites/URLs.
|
||||
#
|
||||
# Parameters:
|
||||
#
|
||||
# config (required)
|
||||
# autoconf (optional - used by lrrd-config)
|
||||
#
|
||||
# Configuration example:
|
||||
#
|
||||
# [wget_page]
|
||||
# timeout 30
|
||||
# env.names url1 url2
|
||||
# env.timeout 20
|
||||
# env.error_value 60
|
||||
# env.max 120
|
||||
#
|
||||
# env.url_url1 http://www1.example.com/path1/page1
|
||||
# env.label_url1 Example URL#1
|
||||
# env.timeout_url1 10
|
||||
# env.warning_url1 5
|
||||
# env.critical_url1 8
|
||||
#
|
||||
# env.url_url2 https://www2.example.com/path2/page2
|
||||
# env.label_url2 Example URL#2
|
||||
# env.timeout_url2 30
|
||||
# env.warning_url2 15
|
||||
# env.critical_url2 20
|
||||
# env.wget_opts_url2 --no-cache --tries=1 --no-check-certificate
|
||||
#
|
||||
# URL options:
|
||||
#
|
||||
# You can define the following options for each specified URL
|
||||
# as seen in the above example.
|
||||
#
|
||||
# - url: the URL to be downloaded with Wget
|
||||
# - label: the label assigned to the line of the given in URL in the graph
|
||||
# - timeout: the value passed to Wget through the "--timeout" option.
|
||||
# - warning: the value for the given URL that stands for the warning level
|
||||
# - critical: the value for the given URL that stands for the critical level
|
||||
# - max: the maximum value for the given URL (values above this will be
|
||||
# discarded)
|
||||
# - error_value: the value for the given URL that will be used to mark when
|
||||
# Wget returned an error. A zero error_value causes the plugin to ignore
|
||||
# Wget's return value.
|
||||
# - regex_error_value: the value for the given URL that will be used to mark
|
||||
# when a regular expression match failed (either for the HTTP response
|
||||
# headers or the body).
|
||||
# - regex_header_<n>: a regular expression that the HTTP response header must
|
||||
# match or the plugin will return regex_error_value for the given URL.
|
||||
# By default the plugin uses egrep, thus extended regexps are expected.
|
||||
# You can define any number of regexps, but you've to start the index at
|
||||
# "1" and increase it sequentially.
|
||||
# I.e. regex_header_1, regex_header_2, ...
|
||||
# - regex_body_<n>: same as regex_header_<n>, but matches the HTTP response
|
||||
# body.
|
||||
# - grep_opts: various options supplied to grep for regexp matches for the
|
||||
# given URL. By default these are: -E (use of extended regexps)
|
||||
# and -i (case insensitive regexp matching)
|
||||
# - wget_opts: various options supplied to the Wget command for the given URL.
|
||||
# - join_lines: if "true" and regexp matching is applied, the HTTP response body
|
||||
# is stripped of newline ("\n") characters. This helps with complex regexps
|
||||
# since grep can match only in a single line at a time and it would not be
|
||||
# possible to match on complex HTML/XML structures otherwise.
|
||||
# This is enabled by default.
|
||||
#
|
||||
# $Log$
|
||||
#
|
||||
# Revision 1.0 2006/07/11 08:49:43 cipixul@gmail.com
|
||||
# Initial version
|
||||
#
|
||||
# Revision 2.0 2010/03/25 13:46:13 muzso@muzso.hu
|
||||
# Rewrote most of the code. Added multips-like options.
|
||||
#
|
||||
# Revision 2.1 2010/04/22 11:43:53 muzso@muzso.hu
|
||||
# Added regular expression matching against the contents of the checked URL.
|
||||
#
|
||||
# Revision 2.2 2010/04/23 15:21:12 muzso@muzso.hu
|
||||
# Bugfix. Regexp matching on HTTP response bodies with a trailing newline
|
||||
# was flawed.
|
||||
#
|
||||
#%# family=auto
|
||||
#%# capabilities=autoconf
|
||||
|
||||
[ -n "${wget_bin}" ] || wget_bin=$(which wget)
|
||||
[ -n "${time_bin}" ] || time_bin=$(which time)
|
||||
[ -n "${mktemp_bin}" ] || mktemp_bin=$(which mktemp)
|
||||
[ -n "${grep_bin}" ] || grep_bin=$(which grep)
|
||||
[ -n "${tail_bin}" ] || tail_bin=$(which tail)
|
||||
|
||||
default_error_value=30
|
||||
default_regex_error_value=40
|
||||
default_grep_opts="-E -i"
|
||||
default_wget_opts="--no-cache --tries=1"
|
||||
default_timeout=20
|
||||
default_join_lines=true
|
||||
|
||||
if [ "${1}" = "autoconf" ]; then
|
||||
result=0
|
||||
if [ -z "${wget_bin}" -o ! -f "${wget_bin}" -o ! -x "${wget_bin}" ]; then
|
||||
result=1
|
||||
else
|
||||
if [ -z "${time_bin}" -o ! -f "${time_bin}" -o ! -x "${time_bin}" ]; then
|
||||
result=2
|
||||
else
|
||||
if [ -z "${mktemp_bin}" -o ! -f "${mktemp_bin}" -o ! -x "${mktemp_bin}" ]; then
|
||||
result=3
|
||||
else
|
||||
if [ -z "${grep_bin}" -o ! -f "${grep_bin}" -o ! -x "${grep_bin}" ]; then
|
||||
result=4
|
||||
else
|
||||
[ -z "${tail_bin}" -o ! -f "${tail_bin}" -o ! -x "${tail_bin}" ] && result=5
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
if [ ${result} -eq 0 ]; then
|
||||
echo "yes"
|
||||
else
|
||||
echo "no"
|
||||
fi
|
||||
exit $result
|
||||
fi
|
||||
|
||||
if [ -z "${names}" ]; then
|
||||
echo "Configuration required"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
[ -n "${error_value}" ] || error_value=${default_error_value}
|
||||
[ -n "${regex_error_value}" ] || regex_error_value=${default_regex_error_value}
|
||||
[ -n "${grep_opts}" ] || grep_opts=${default_grep_opts}
|
||||
[ -n "${wget_opts}" ] || wget_opts=${default_wget_opts}
|
||||
[ -n "${timeout}" ] || timeout=${default_timeout}
|
||||
[ -n "${join_lines}" ] || join_lines=${default_join_lines}
|
||||
[ -n "${warning}" ] || warning=$((timeout/2))
|
||||
[ -n "${critical}" ] || critical=${timeout}
|
||||
[ -n "${max}" ] || max=$((timeout*2))
|
||||
|
||||
if [ "${1}" = "config" ]; then
|
||||
echo "graph_title wget loadtime of webpages"
|
||||
echo "graph_args --base 1000 -l 0"
|
||||
echo "graph_scale no"
|
||||
echo "graph_vlabel Load time in seconds"
|
||||
echo "graph_category http"
|
||||
echo "graph_info This graph shows load time in seconds of one or more urls"
|
||||
I=1
|
||||
for name in ${names}; do
|
||||
eval iurl='${url_'${name}'}'
|
||||
if [ -n "${iurl}" ]; then
|
||||
eval ilabel='${label_'${name}':-url${I}}'
|
||||
eval iwarning='${warning_'${name}':-${warning}}'
|
||||
eval icritical='${critical_'${name}':-${critical}}'
|
||||
eval imax='${max_'${name}':-${max}}'
|
||||
cat << EOH
|
||||
loadtime${I}.label ${ilabel}
|
||||
loadtime${I}.info Load time for ${iurl}
|
||||
loadtime${I}.min 0
|
||||
loadtime${I}.max ${imax}
|
||||
EOH
|
||||
[ ${iwarning} -gt 0 ] && echo "loadtime${I}.warning ${iwarning}"
|
||||
[ ${icritical} -gt 0 ] && echo "loadtime${I}.critical ${icritical}"
|
||||
I=$((I+1))
|
||||
fi
|
||||
done
|
||||
exit 0
|
||||
fi
|
||||
|
||||
I=1
|
||||
for name in ${names}; do
|
||||
eval iurl='${url_'${name}'}'
|
||||
if [ -n "${iurl}" ]; then
|
||||
eval ierror_value='${error_value_'${name}':-${error_value}}'
|
||||
eval iregex_error_value='${regex_error_value_'${name}':-${regex_error_value}}'
|
||||
eval igrep_opts='${grep_opts_'${name}':-${grep_opts}}'
|
||||
eval iwget_opts='${wget_opts_'${name}':-${wget_opts}}'
|
||||
eval iwget_post_data='${wget_post_data_'${name}':-${wget_post_data}}'
|
||||
eval ijoin_lines='${join_lines_'${name}':-${join_lines}}'
|
||||
eval itimeout='${timeout_'${name}':-${timeout}}'
|
||||
loadtime=""
|
||||
tempfile=$(mktemp)
|
||||
if [ -z "${iwget_post_data}" ]; then
|
||||
timing=$(${time_bin} -p ${wget_bin} --save-headers --no-directories --output-document "${tempfile}" --timeout ${itimeout} ${iwget_opts} "${iurl}" 2>&1)
|
||||
else
|
||||
timing=$(${time_bin} -p ${wget_bin} --post-data "${iwget_post_data}" --save-headers --no-directories --output-document "${tempfile}" --timeout ${itimeout} ${iwget_opts} "${iurl}" 2>&1)
|
||||
fi
|
||||
wget_result=$?
|
||||
if [ -f "${tempfile}" ]; then
|
||||
if [ ${wget_result} -ne 0 -a ${ierror_value} -gt 0 ]; then
|
||||
loadtime=${ierror_value}
|
||||
else
|
||||
tempheader=""
|
||||
tempbody=""
|
||||
K=0
|
||||
while [ -z "${loadtime}" ]; do
|
||||
K=$((K+1))
|
||||
eval iregex_header='${regex_header_'${K}'_'${name}':-${regex_header_'${K}'}}'
|
||||
eval iregex_body='${regex_body_'${K}'_'${name}':-${regex_body_'${K}'}}'
|
||||
[ -z "${iregex_header}" -a -z "${iregex_body}" ] && break
|
||||
if [ ${K} -eq 1 ]; then
|
||||
OIFS="${IFS}"
|
||||
# we skip carrige return characters from the end of header lines
|
||||
IFS=$(echo -en "\r")
|
||||
inheader=0
|
||||
# The "read" command reads only lines terminated by a specific
|
||||
# character (which by default the newline char).
|
||||
# To read the end of the file (the bytes after the last newline) too
|
||||
# we append a newline.
|
||||
echo "" >> "${tempfile}"
|
||||
while read -r line; do
|
||||
if [ -z "${line}" ]; then
|
||||
inheader=1
|
||||
# We reached the border of the header and the body.
|
||||
# Setting IFS to an empty string puts the entire read lines from
|
||||
# the body into our "line" variable.
|
||||
IFS=""
|
||||
else
|
||||
if [ ${inheader} -eq 0 ]; then
|
||||
tempheader="${tempheader}${line}
|
||||
"
|
||||
else
|
||||
if [ "${ijoin_lines}" = "true" ]; then
|
||||
tempbody="${tempbody}${line}"
|
||||
else
|
||||
tempbody="${tempbody}${line}
|
||||
"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
done < "${tempfile}"
|
||||
IFS="${OIFS}"
|
||||
fi
|
||||
if [ -n "${iregex_header}" ] && ! echo "${tempheader}" | ${grep_bin} -qs ${igrep_opts} "${iregex_header}" 2> /dev/null; then
|
||||
loadtime=${iregex_error_value}
|
||||
else
|
||||
if [ -n "${iregex_body}" ] && ! echo "${tempbody}" | ${grep_bin} -qs ${igrep_opts} "${iregex_body}" 2> /dev/null; then
|
||||
loadtime=${iregex_error_value}
|
||||
fi
|
||||
fi
|
||||
done
|
||||
if [ -z "${loadtime}" ]; then
|
||||
loadtime=$(echo "${timing}" | grep "^real *[0-9]" | cut -d ' ' -f 2)
|
||||
fi
|
||||
fi
|
||||
rm -f "${tempfile}" > /dev/null 2>&1
|
||||
else
|
||||
loadtime=$((ierror_value*2))
|
||||
fi
|
||||
echo "loadtime${I}.value ${loadtime}"
|
||||
I=$((I+1))
|
||||
fi
|
||||
done
|
||||
|
Loading…
Add table
Add a link
Reference in a new issue