TT#131253 remove kamailio yaml flow info options

no longer support of parsing of the kamailio log to produce flow info

* remove ulog_parser.pl, no longer used
* graph_flow.pl: remove -j option
* check.py: remove --yaml/--json options
* check.sh: remove SKIP_PARSE and JSON_KAM options

Change-Id: I0df00ffff3e7d8c2129cbbce12652ddff9730821
mr10.1.1
Victor Seva 5 years ago
parent e35be54f15
commit 9bc3d5d520

@ -330,13 +330,6 @@ def load_json(filepath):
def main(args):
# default -y
load_check = load_yaml
if args.yaml:
load_check = load_yaml
if args.json:
load_check = load_json
if args.debug:
logging.basicConfig(level=logging.DEBUG)
@ -345,7 +338,7 @@ def main(args):
test = Test()
try:
check = load_check(args.kam_file)
check = load_json(args.kam_file)
except Exception:
check = {"flow": [], "sip_in": "", "sip_out": []}
test.error("Error loading file:%s" % args[1])
@ -363,12 +356,10 @@ def main(args):
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='generate TAP result')
parser = argparse.ArgumentParser(description="generate TAP result")
grp = parser.add_mutually_exclusive_group()
grp.add_argument('-y', '--yaml', action='store_true', help='YAML kam_file')
grp.add_argument('-j', '--json', action='store_true', help='JSON kam_file')
parser.add_argument('test_yaml_file', help='YAML file with checks')
parser.add_argument('kam_file', help='kamailio file')
parser.add_argument('-d', '--debug', action='store_true')
parser.add_argument("test_yaml_file", help="YAML file with checks")
parser.add_argument("kam_file", help="JSON cfgt kamailio file")
parser.add_argument("-d", "--debug", action="store_true")
args = parser.parse_args()
main(args)

@ -24,12 +24,10 @@ SKIP=false
MEMDBG=false
SKIP_DELDOMAIN=false
CHECK_TYPE=all
SKIP_PARSE=false
SKIP_RUNSIPP=false
FIX_RETRANS=false
GRAPH=false
GRAPH_FAIL=false
JSON_KAM=true
SKIP_MOVE_JSON_KAM=false
CDR=false
ERR_FLAG=0
@ -44,12 +42,8 @@ SIPP_VERSION=$(sipp -v | awk -F- '/SIPp/ { print $1 }' | awk '{print $2}')
# $1 kamailio msg parsed to yml
# $2 destination png filename
graph() {
local OPTS
if "${JSON_KAM}" ; then
OPTS="--json"
fi
if [ -f "$1" ]; then
"${BIN_DIR}/graph_flow.pl" $OPTS "$1" "$2"
"${BIN_DIR}/graph_flow.pl" "$1" "$2"
else
echo "No $1 found"
ERR_FLAG=1
@ -112,14 +106,12 @@ check_retrans_next() {
# testing next json file, if exist. Necessary in case of retransmissions or wrong timing/order.
local next_msg
local next_tap
local kam_type
local step
local err_type
step=${4:-1}
step=${3:-1}
next_test_filepath "$1" "${step}"
kam_type=$2
next_tap=${3/_test.tap/_test_next.tap}
next_tap=${2/_test.tap/_test_next.tap}
echo "$(date) - Fix retransmissions enabled: try to test the next[+${step}] json file"
kam_msg=$(basename "${next_msg}")
@ -129,11 +121,11 @@ check_retrans_next() {
return 1
fi
echo -n "$(date) - Testing $(basename "$1") against ${kam_msg} -> $(basename "${next_tap}")"
if "${BIN_DIR}/check.py" "${kam_type}" "$1" "${next_msg}" > "${next_tap}" ; then
if "${BIN_DIR}/check.py" "$1" "${next_msg}" > "${next_tap}" ; then
# Test using the next json file was fine. That means that, with high probability, there was a retransmission.
# Next step is to backup the failed tap test and overwrite it with the working one
mv "$3" "${3}_retrans"
mv "${next_tap}" "$3"
mv "$2" "${2}_retrans"
mv "${next_tap}" "$2"
test_ok+=("${kam_msg}")
return 0
else
@ -157,15 +149,13 @@ check_retrans_prev() {
# testing previous json file, if exist. Necessary in case of wrong timing/order.
local prev_msg
local prev_tap
local kam_type
local step
local err_type
local kam_msg
step=${4:-1}
step=${3:-1}
prev_test_filepath "$1" "${step}"
kam_type=$2
prev_tap=${3/_test.tap/_test_prev.tap}
prev_tap=${2/_test.tap/_test_prev.tap}
echo "$(date) - Fix retransmissions enabled: try to test the previous[-${step}] json file"
kam_msg=$(basename "${prev_msg}")
@ -175,11 +165,11 @@ check_retrans_prev() {
return 1
fi
echo -n "$(date) - Testing $(basename "$1") against ${kam_msg} -> $(basename "${prev_tap}")"
if "${BIN_DIR}/check.py" "${kam_type}" "$1" "${prev_msg}" > "${prev_tap}" ; then
if "${BIN_DIR}/check.py" "$1" "${prev_msg}" > "${prev_tap}" ; then
# Test using the previous json file was fine. That means that, with high probability, there was a wrong timing/order.
# Next step is to backup the failed tap test and overwrite it with the working one
mv "$3" "${3}_retrans"
mv "${prev_tap}" "$3"
mv "$2" "${2}_retrans"
mv "${prev_tap}" "$2"
test_ok+=("${kam_msg}")
return 0
else
@ -254,7 +244,6 @@ check_sip_test() {
# $3 destination tap filename
check_test() {
local dest
local kam_type="--yaml"
local err_type
local kam_msg
@ -271,13 +260,9 @@ check_test() {
return 1
fi
if "${JSON_KAM}" ; then
kam_type="--json"
fi
kam_msg=$(basename "$2")
echo -n "$(date) - Testing $(basename "$1") against ${kam_msg} -> $(basename "$3")"
if "${BIN_DIR}/check.py" "${kam_type}" "$1" "$2" > "$3" ; then
if "${BIN_DIR}/check.py" "$1" "$2" > "$3" ; then
echo " ok"
test_ok+=("${kam_msg}")
return 0
@ -288,7 +273,7 @@ check_test() {
echo " NOT ok[${err_type}]"
if "${FIX_RETRANS}" ; then
check_retrans_block "$1" "${kam_type}" "$3" "${RETRANS_SIZE}" && return 0
check_retrans_block "$1" "$3" "${RETRANS_SIZE}" && return 0
fi
ERR_FLAG=1
@ -545,19 +530,15 @@ run_sipp() {
test_filepath() {
local msg_name
if ! "${JSON_KAM}" ; then
msg_name=${1/_test.yml/.yml}
else
if grep -q '^retrans: true' "${1}"; then
echo "$(date) - Detected a test for a retransmission"
msg_name=${1/_test_retransmission.yml/.json_retransmission}
msg="${LOG_DIR}/$(basename "${msg_name}")"
if [ -f "${msg}" ]; then
return
fi
if grep -q '^retrans: true' "${1}"; then
echo "$(date) - Detected a test for a retransmission"
msg_name=${1/_test_retransmission.yml/.json_retransmission}
msg="${LOG_DIR}/$(basename "${msg_name}")"
if [ -f "${msg}" ]; then
return
fi
msg_name=${1/_test.yml/.json}
fi
msg_name=${1/_test.yml/.json}
msg="${LOG_DIR}/$(basename "${msg_name}")"
}
@ -567,11 +548,7 @@ next_test_filepath() {
local new_json
local step=${2:-1}
if ! "${JSON_KAM}" ; then
msg_name=${1/_test.yml/.yml}
else
msg_name=${1/_test.yml/.json}
fi
msg_name=${1/_test.yml/.json}
msg_name=$(basename "${msg_name}")
old_json="${msg_name:0:4}"
@ -588,12 +565,7 @@ prev_test_filepath() {
local new_json
local step=${2:-1}
if ! "${JSON_KAM}" ; then
msg_name=${1/_test.yml/.yml}
else
msg_name=${1/_test.yml/.json}
fi
msg_name=${1/_test.yml/.json}
msg_name=$(basename "${msg_name}")
old_json="${msg_name:0:4}"
new_json=$(((10#$old_json)-step)) # There should not be any problem since they start from 0001
@ -619,7 +591,7 @@ cdr_check() {
}
usage() {
echo "Usage: check.sh [-hCDRGgJKm] [-T <all|none|cfgt|sipp>] [-p PROFILE ] [-s GROUP] check_name"
echo "Usage: check.sh [-hCDRGgKm] [-T <all|none|cfgt|sipp>] [-p PROFILE ] [-s GROUP] check_name"
echo "Options:"
echo -e "\\t-I: SIP_SERVER IP, default:127.0.0.1"
echo -e "\\t-C: skip creation of domain and subscribers"
@ -631,7 +603,6 @@ usage() {
echo -e "\\t-g: creation of graphviz image only if test fails"
echo -e "\\t-r: fix retransmission issues"
echo -e "\\t-p: CE|PRO default is CE"
echo -e "\\t-J: kamailio json output OFF"
echo -e "\\t-M: skip move of kamailio json output to log folder"
echo -e "\\t-K: enable tcpdump capture"
echo -e "\\t-s: scenario group. Default: scenarios"
@ -641,7 +612,7 @@ usage() {
echo -e "\\tcheck_name. Scenario name to check. This is the name of the directory on GROUP dir."
}
while getopts 'hI:Cp:Rs:DT:PGgrcJKMmw:' opt; do
while getopts 'hI:Cp:Rs:DT:GgrcKMmw:' opt; do
case $opt in
h) usage; exit 0;;
I) SIP_SERVER=${OPTARG};;
@ -651,12 +622,10 @@ while getopts 'hI:Cp:Rs:DT:PGgrcJKMmw:' opt; do
s) GROUP=${OPTARG};;
D) SKIP_DELDOMAIN=true;;
T) CHECK_TYPE=${OPTARG};;
P) SKIP_PARSE=true;;
K) CAPTURE=true;;
G) GRAPH=true;;
g) GRAPH_FAIL=true;;
r) FIX_RETRANS=true;;
J) JSON_KAM=false;;
M) SKIP_MOVE_JSON_KAM=true;;
m) MEMDBG=true;;
c) CDR=true;;
@ -725,7 +694,6 @@ if ! "$SKIP" ; then
fi
if ! "$SKIP_RUNSIPP" ; then
if "${JSON_KAM}" ; then
if ! [ -d "${KAM_DIR}" ] ; then
echo "$(date) - dir and perms for ${KAM_DIR}"
mkdir -p "${KAM_DIR}"
@ -735,7 +703,6 @@ if ! "$SKIP_RUNSIPP" ; then
echo "$(date) - clean cfgt scenario ${test_uuid}"
ngcp-kamcmd proxy cfgt.clean "${test_uuid}"
fi
fi
echo "$(date) - Running sipp scenarios"
run_sipp
@ -745,25 +712,23 @@ if ! "$SKIP_RUNSIPP" ; then
cp "${SCEN_CHECK_DIR}/scenario_ids.yml" "${LOG_DIR}"
echo "$(date) - Done"
if "${JSON_KAM}" ; then
if ! "${SKIP_MOVE_JSON_KAM}" ; then
echo "$(date) - Move kamailio json files"
if [ -d "${JSON_DIR}" ] ; then
for i in "${JSON_DIR}"/*.json ; do
json_size_before=$(stat -c%s "${i}")
moved_file="${LOG_DIR}/$(printf "%04d.json" "$(basename "${i}" .json)")"
expand -t1 "${i}" > "${moved_file}"
json_size_after=$(stat -c%s "${moved_file}")
echo "$(date) - Moved file ${i} with size before: ${json_size_before} and after: ${json_size_after}"
rm "${i}"
done
else
echo "$(date) - No json files found"
fi
echo "$(date) - clean cfgt scenario ${test_uuid}"
ngcp-kamcmd proxy cfgt.clean "${test_uuid}"
echo "$(date) - Done"
if ! "${SKIP_MOVE_JSON_KAM}" ; then
echo "$(date) - Move kamailio json files"
if [ -d "${JSON_DIR}" ] ; then
for i in "${JSON_DIR}"/*.json ; do
json_size_before=$(stat -c%s "${i}")
moved_file="${LOG_DIR}/$(printf "%04d.json" "$(basename "${i}" .json)")"
expand -t1 "${i}" > "${moved_file}"
json_size_after=$(stat -c%s "${moved_file}")
echo "$(date) - Moved file ${i} with size before: ${json_size_before} and after: ${json_size_after}"
rm "${i}"
done
else
echo "$(date) - No json files found"
fi
echo "$(date) - clean cfgt scenario ${test_uuid}"
ngcp-kamcmd proxy cfgt.clean "${test_uuid}"
echo "$(date) - Done"
fi
if "${FIX_RETRANS}" ; then
@ -805,20 +770,10 @@ if ! "$SKIP_RUNSIPP" ; then
check_rtp
fi
if ! "${SKIP_DELDOMAIN}" ; then
"${BIN_DIR}/provide_scenario.sh" "${SCEN_CHECK_DIR}" delete
fi
if ! "${SKIP_PARSE}" ; then
if ! "${JSON_KAM}" ; then
echo "$(date) - Parsing ${LOG_DIR}/kamailio.log"
"${BIN_DIR}/ulog_parser.pl" "${LOG_DIR}/kamailio.log ${LOG_DIR}"
echo "$(date) - Done"
fi
fi
# let's check the results
if [[ ${CHECK_TYPE} != none ]] ; then
echo "$(date) - ================================================================================="

@ -25,10 +25,12 @@ use Cwd 'abs_path';
use Data::Dumper;
use Getopt::Long;
use English;
use utf8;
use JSON;
sub usage
{
my $output = "usage: graph_flow.pl [-h] [-j] file_in file_out.png\n";
my $output = "usage: graph_flow.pl [-h] file_in file_out.png\n";
$output .= "Options:\n";
$output .= "\t-h: this help\n";
$output .= "\t-j: file_in is json\n";
@ -37,7 +39,7 @@ sub usage
my $help = 0;
my $json_in = 0;
GetOptions ("h|help" => \$help, "j|json" => \$json_in)
GetOptions ("h|help" => \$help)
or die("Error in command line arguments\n".usage());
if($#ARGV!=1)
@ -56,22 +58,14 @@ my $g = GraphViz->new();
my $filename = abs_path($ARGV[0]);
my $outfilename = $ARGV[1];
my $inlog;
if($json_in) {
use utf8;
use JSON;
my $json;
{
local $INPUT_RECORD_SEPARATOR = undef; #Enable 'slurp' mode
open my $fh, "<", $filename;
$json = <$fh>;
close $fh;
}
$inlog = decode_json($json);
}
else {
use YAML::XS;
$inlog = YAML::XS::LoadFile($filename);
my $json;
{
local $INPUT_RECORD_SEPARATOR = undef; #Enable 'slurp' mode
open my $fh, "<", $filename;
$json = <$fh>;
close $fh;
}
$inlog = decode_json($json);
my @prevs = ();
my $name = '';
my $action = '';

@ -1,230 +0,0 @@
#!/usr/bin/env perl
#
# Copyright: 2013 Sipwise Development Team <support@sipwise.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This package is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# On Debian systems, the complete text of the GNU General
# Public License version 3 can be found in "/usr/share/common-licenses/GPL-3".
#
use 5.014;
use strict;
use warnings;
use English;
use JSON;
use YAML::XS;
use File::Spec;
use Cwd 'abs_path';
use Data::Dumper;
use Tie::File;
use Getopt::Long;
my $filename = "/var/log/ngcp/kamailio-proxy.log";
my $output_dir = "log";
sub usage
{
my $output = "usage: ulog_parser.pl [-h] [kamailio_log] [dest_dir]\n";
$output .= "Options:\n";
$output .= "\t-h: this help\n";
$output .= "\tkamailio_log default:$filename\n";
$output .= "\tdest_dir default:$output_dir\n";
return $output
}
my $help = 0;
GetOptions ("h|help" => \$help)
or die("Error in command line arguments\n".usage());
if($#ARGV>=2 || $help)
{
die(usage())
}
my $path;
my $data = {
msgid => '',
callid => '',
flow => [],
sip_in => '',
sip_out => [],
};
sub save_data
{
if(!$data->{'msgid'})
{
print "data with no msgid\n";
print Dumper $data;
exit;
}
else
{
if (!$data->{'sip_out'}) { print "no sip_out\n"; }
$path = File::Spec->catfile( $output_dir, (sprintf "%04i", $data->{'msgid'}).".yml");
YAML::XS::DumpFile($path, $data);
#print "$data->{'msgid'} saved\n";
# This tries to fix problems with string values '-' being saved
# without quotes.
tie my @array, 'Tie::File', $path or die ('Can not open $path');
for (@array)
{
s/(.*\$\w+\(\w+\):) -/$1 '-'/g
}
untie @array;
}
$data = {
msgid => '',
callid => '',
flow => [],
sip_in => '',
sip_out => [],
};
return;
}
my $pid;
my $log;
my $line;
sub first_line
{
$pid="unknown";
my $pid_read;
do
{
$line = <$log>;
#print "read line\n";
if(!$line) { $line = ''; return ($line ne '');}
else
{
($pid_read) = ($line =~ m/.+proxy\[(\d+)\]: DEBUG: <script>: start of route MAIN.*$/);
if($pid_read) {
$pid = $pid_read;
#print "pid:".$pid."\n";
}
else {
$pid_read = '';
#print "what?".$line."\n";
}
}
} while($pid_read ne $pid);
return ($line ne '');
}
sub next_line
{
my $pid_read;
do
{
$line = <$log>;
#print "read line\n";
if(!$line) { $line = ''; return ($line ne '');}
else
{
($pid_read) = ($line =~ m/.+proxy\[(\d+)\]: DEBUG:.*$/);
if($pid_read) {
if(!$pid) {
$pid = $pid_read;
#print "pid:".$pid."\n";
}
}
else {
$pid_read = '';
#print "what?".$line."\n";
}
}
} while($pid_read ne $pid);
return ($line ne '');
}
if (@ARGV == 2) {
$filename = $ARGV[0]; $output_dir = $ARGV[1];
} elsif (@ARGV == 1) {
$filename = $ARGV[0];
}
$filename = abs_path($filename);
$output_dir = abs_path($output_dir);
my $out;
open($log, '<', "$filename") or die "Couldn't open kamailio log, $ERRNO";
first_line();
do
{
my ($mode, $route, $msgid, $msgid_t, $json, $msg, $pjson, $callid, $method);
# Jun 25 14:52:16 spce proxy[11248]: DEBUG: debugger [debugger_api.c:427]: dbg_cfg_dump(): msg out:{
if(($msg) = ($line =~ m/.+msg out:\{(.+)\}$/))
{
do
{
if(($callid) = ($msg =~ m/.+Call-ID: ([^#]+)#015#012.+$/si))
{
if($data->{'callid'} eq $callid)
{
$msg =~ s/#015#012/\n/g;
push @{$data->{'sip_out'}}, $msg;
}
else
{
print "Not this Call-ID[$data->{'callid'}]:$callid\n$msg\n"
}
}
else
{
print "No Call-ID\n";
}
next_line();
}while(($msg) = ($line =~ m/.+msg out:\{(.+)\}$/));
#print "msg_out\n";
}
if(($mode, $route, $msgid, $method) = ($line =~ m/.+DEBUG: <script>: (\w+) of route (\w+) - (\d+) (.*)$/))
{
if($route eq "MAIN")
{
#if ($mode eq "start") { print "$msgid start MAIN\n"; }
if(($data->{'msgid'}) && ($data->{'msgid'} ne $msgid)) {
#print "$msgid!=$data->{'msgid'} MAIN->save\n";
save_data();
}
$data->{'msgid'} = $msgid;
}
my $prev_line = $line;
next_line();
if(!$line)
{
print $prev_line;
close($log);
die("Error parsing $filename. Malformed debug output\n");
}
else
{
if(($json) = ($line =~ m/.+dbg_dump_json\(\): (\{.*\})$/))
{
$pjson = from_json($json);
push @{$data->{'flow'}}, { $mode."|".$route => $pjson };
if ($route eq "MAIN" && $mode eq "start")
{
($msg) = $method;
if(($callid) = ($msg =~ m/.+Call-ID: ([^#]+)#015#012.+$/si)) { $data->{'callid'} = $callid; }
$msg =~ s/#015#012/\n/g;
if($mode eq "start") { $data->{'sip_in'} = $msg; }
}
#print $mode."|".$route."\n";
}
}
}
} while(next_line());
if($data->{'msgid'} ne '') { save_data(); }
close($log);
#EOF

@ -16,7 +16,7 @@ TMP_LOG_DIR="/tmp"
KAM_DIR="/tmp/cfgtest"
COREDUMP_DIR="/ngcp-data/coredumps"
PROFILE="${PROFILE:-}"
OPTS=(-P -Tnone -M -C) #SKIP_PARSE=true, SKIP_TESTS=true, SKIP_MOVE_JSON_KAM=true, SKIP=true
OPTS=(-Tnone -M -C) #SKIP_TESTS=true, SKIP_MOVE_JSON_KAM=true, SKIP=true
SHOW_SCENARIOS=false
SKIP_CONFIG=false

Loading…
Cancel
Save