can't locate object method new in perl - perl-module

<>
while running a perl script in unix , which works perfectly in windows strawberry, i am getting following error:
cant locate object method "new" via package "Text::CSV"
Any insights to identify this is highly appreciated
Scripts:
#!/usr/bin/perl
use strict;
use warnings;
use Net::LDAP;
use Text::CSV;
use Net::LDAP::E#ntry;
use File::Basename;
use File::chmod;
use Config::Tiny;
use File::Copy;
use Text::Trim;
use Data::Dumper qw(Dumper);
use Net::LDAP::Util qw(ldap_error_text);
use Net::LDAP::Constant;
my $config = Config::Tiny->read('config.ini');
#Variable Declaration section
my ($bindhost,$port,$bindpwd,$binddn,$base_search,$ldap,$customerCode,$logDir,$entry,$result,$csv,$file,$line,$data,$cn,$dn,$entry2,$start_timestamp,$new,$u,$ct,$old,$logfile,$max,$stop_timestamp);
my ($sec,$min,$hour,$mday,$mon,$year,$wday,$yday,$isdst)=localtime(time);
$start_timestamp = sprintf ( "%04d%02d%02d %02d:%02d:%02d",$year+1900,$mon+1,$mday,$hour,$min,$sec);
foreach my $section (keys %{$config}) {
#LDAP Binding Connectivity variables declaration
$bindhost = $config->{$section}->{'ldap_host'};
$port = $config->{$section}->{'ldap_port'};
$bindpwd = $config->{$section}->{'ldap_password'};
$binddn = $config->{$section}->{'ldap_user'};
$base_search = $config->{$section}->{'ldap_customers_ou_dn'};
$logDir = $config->{$section}->{'log_dir'};
# connect to the ldap server
my $ldap = Net::LDAP->new($bindhost,port=>$port,timeout=>240) or die "Could not bind to ldap server: $! - $#\n";
$result = $ldap->bind
(
dn => trim($binddn), password=>trim($bindpwd)
);
#Open Script directory over here
opendir(DIR, ".");
my #files = grep(/\.csv$/,readdir(DIR));
closedir(DIR);
$csv = Text::CSV->new({ sep_char => ',' });
#print "\n Script starts processing for the timings $start_timestamp";
#Visit each .csv file by checking its naming convention over here
my $fileCt = 0;
if($file=$ARGV[0]){
print "\n Script starts processing for the timings $start_timestamp";
$ct = 1;
open($data, '<', $file) or die "Could not open given file \n";
open($logfile, '>>', 'logfile.txt');
print $logfile "Script started running for file $file at ".$start_timestamp."\n";
close $logfile;
while ($line = <$data>){
if ($csv->parse($line)) {
my #fields = $csv->fields();
$customerCode = $fields[0];
$result = $ldap->search(
base => "$base_search",
filter => "(&(customerCode=$customerCode))",
);
die ldap_error_text($result->code) if $result->code;
$max = $result->count;
if($max == 0) {
open($logfile, '>>', 'logfile.txt');
print $logfile "This customerCode $customerCode was not found in LDAP and was not reset\n";
close $logfile
}
else {
open($logfile, '>>', 'logfile.txt');
print $logfile "This customerCode $customerCode was found in LDAP and is reset\n";
close $logfile
}
for (my $index = 0 ; $index < $max ; $index++) {
my $entry = $result->entry($index);
$u = ${$entry->get('uid')}[0];
$dn = "uid=$u,$base_search";
}
my #all = ();
#all = trim($result->entries);
foreach $entry (#all){}
$entry = Net::LDAP::Entry->new;
$entry->dn($dn);
$entry->replace(
'cn' => " ",
'userPassword'=> "",
'challengeQuestion'=> "",
'challengeAnswer'=> "",
'ctscPasswordCreationDate'=> "",
'ctscPasswordExpirationDate'=> "",
'ctscPasswordHistory'=> "",
'ctscPasswordResetAttempts'=> "",
'ctscPasswordLockoutEnable'=> "",
'ctscLastResetDate'=> "",
'ctscFailedLoginCount'=> "",
);
$entry->update ($ldap);
$old = ${$entry->get('cn')}[0];
$old = ${$entry->get('userPassword')}[0];
$old = ${$entry->get('challengeQuestion')}[0];
$old = ${$entry->get('challengeAnswer')}[0];
$old = ${$entry->get('ctscPasswordCreationDate')}[0];
$old = ${$entry->get('ctscPasswordExpirationDate')}[0];
$old = ${$entry->get('ctscPasswordHistory')}[0];
$old = ${$entry->get('ctscPasswordResetAttempts')}[0];
$old = ${$entry->get('ctscPasswordLockoutEnable')}[0];
$old = ${$entry->get('ctscLastResetDate')}[0];
$old = ${$entry->get('ctscFailedLoginCount')}[0];
$entry2 = $entry->clone; # copies entry
$ldap->modify($dn, replace => {'cn' => "" });
$ldap->modify($dn, replace => {'userPassword' => "" });
$ldap->modify($dn, replace => {'challengeQuestion' => "" });
$ldap->modify($dn, replace => {'challengeAnswer' => "" });
$ldap->modify($dn, replace => {'ctscPasswordCreationDate' => "" });
$ldap->modify($dn, replace => {'ctscPasswordExpirationDate' => "" });
$ldap->modify($dn, replace => {'ctscPasswordHistory' => "" });
$ldap->modify($dn, replace => {'ctscPasswordResetAttempts' => "" });
$ldap->modify($dn, replace => {'ctscPasswordLockoutEnable' => "" });
$ldap->modify($dn, replace => {'ctscLastResetDate' => "" });
$ldap->modify($dn, replace => {'ctscFailedLoginCount' => "" });
}
else {
warn "Line could not be parsed: $line\n";
}
$ct++;
} #End while loop
my ($sec1,$min1,$hour1,$mday1,$mon1,$year1,$wday1,$yday1,$isdst1)=localtime(time);
$stop_timestamp = sprintf ( "%04d%02d%02d %02d:%02d:%02d",$year1+1900,$mon1+1,$mday1,$hour1,$min1,$sec1);
print "\n Script ends Here for the timings - $stop_timestamp ";
open($logfile, '>>', 'logfile.txt');
print $logfile "Processing stopped at ".$stop_timestamp."\n";
close $logfile;
close $data;
} #if file pattern checking loop ends
else {
print "\n Please provide a .csv file as an input";
}
}
CSV.pm:
use Text::CSV;
my #rows;
my $csv = Text::CSV->new ( { binary => 1 } ) # should set binary attribute.
or die "Cannot use CSV: ".Text::CSV->error_diag ();
open my $fh, "<:encoding(utf8)", "test.csv" or die "test.csv: $!";
while ( my $row = $csv->getline( $fh ) ) {
$row->[2] =~ m/pattern/ or next; # 3rd field should match
push #rows, $row;
}
$csv->eof or $csv->error_diag();
close $fh;
$csv->eol ("\r\n");
open $fh, ">:encoding(utf8)", "new.csv" or die "new.csv: $!";
$csv->print ($fh, $_) for #rows;
close $fh or die "new.csv: $!";
#
# parse and combine style
#
$status = $csv->combine(#columns); # combine columns into a string
$line = $csv->string(); # get the combined string
$status = $csv->parse($line); # parse a CSV string into fields
#columns = $csv->fields(); # get the parsed fields
$status = $csv->status (); # get the most recent status
$bad_argument = $csv->error_input (); # get the most recent bad argument
$diag = $csv->error_diag (); # if an error occured, explains WHY
$status = $csv->print ($io, $colref); # Write an array of fields
# immediately to a file $io
$colref = $csv->getline ($io); # Read a line from file $io,
# parse it and return an array
# ref of fields
$csv->column_names (#names); # Set column names for getline_hr ()
$ref = $csv->getline_hr ($io); # getline (), but returns a hashref
$eof = $csv->eof (); # Indicate if last parse or
# getline () hit End Of File
$csv->types(\#t_array); # Set column types

I don't know what your second block of code is for. It looks like you copied the SYNOPSIS from the CPAN page of Text::CSV.
However, in your program you have a use TEXT::CSV and then you get this error message:
cant locate object method "new" via package "Text::CSV"
That error message is a dead givaway.
You don't have Text::CSV installed on your Unix box. Install it from CPAN.

Related

I need to create a file with all hashicorp vault key value pairs data using shell script

I need to create a file with all hashicorp vault key value pairs data using shell script.
I want to dump all the data from vault to a flat file.
please advice best way to do it.
Thanks in advance
Prudhvi
Just for keys and values you can use my little Perl script 'vault-backup', that also freezes the data using the correct vault commands.
Please note that this does NOT create a full backup of your Vault! There are no methods being backed up, or any other (unlistable) stuff outside the secrets. It's only usable for simple keys and values. It also probably isn't usable for multiline or binary values. You can patch the script to support that, if you like. ;)
#!/usr/bin/perl
#
# Usage: vault-backup [<PATH> [stdout]]
use Data::Dumper;
use Storable qw(freeze thaw);
# Set vault environment variables
# Always end with a " && " for the actual command
my $setenv =
"VAULT_ADDR=https://myvault.somewhere.com:8200 && ".
"VAULT_CA_PATH=/etc/yourcertificates/ && ";
my $path = $ARGV[0] || "secret/";
if ($path!~/\/$/) {
$path="$path/";
}
push #list, getData($path);
if ($ARGV[1] eq "stdout") {
print Dumper(\#list);
} else {
my $fn="vault-backup-frozen-".time().".dat";
open W,">$fn";
print W freeze(\#list);
close W;
print STDERR "Wrote data to $fn\n";
}
sub getData {
my $path=shift;
print STDERR "Starting getData($path)\n";
my #ret=();
my $command="$setenv vault kv list -tls-skip-verify $path | tail -n+3 ";
print STDERR "starting command: $command\n";
my #lines = `$command`;
chomp #lines;
foreach my $line (#lines) {
if ($line=~/\/$/) {
my #result = getData($path.$line);
if (scalar(#result)>0) {
# Find deeper results
push #ret, #result;
} else {
# empty final dir, no values
push #ret, { path => $path.$line };
}
} else {
# Found a key!
my $command="$setenv vault kv get -tls-skip-verify $path$line";
print STDERR "starting command: $command\n";
my $values = `$command`;
push #ret, {path=>$path.$line, value=>$values};
}
}
return #ret;
}
To restore the data, you can use the script below. It handles data only, it does not act on metadata.
#!/usr/bin/perl
# Usage: vault-restore <backup-filename>
use Data::Dumper;
use Storable qw(thaw);
my %all_entries;
# Set vault environment variables
# Always end with a " && " for the actual command
my $setenv =
"VAULT_ADDR=https://myothervault.somewhere.com:8200 && ".
"VAULT_CA_PATH=/etc/mycertificates/ && ";
# Read the data
my $fn = $ARGV[0] || die("I need a filename with the frozen data");
open F,"<$fn";
my #list = #{ thaw(join("",<F>)) };
close F;
print STDERR "Read ".scalar(#list)." entries.\n";
# Process the data
foreach my $entry (#list) {
print STDERR "\n# adding entry -> $entry->{path}\n";
addEntry($entry);
}
foreach my $path (keys %all_entries) {
my $keyvalues="";
foreach my $key (keys %{$all_entries{$path}}) {
my $value=$all_entries{$path}{$key};
$keyvalues.="'$key=$value' ";
}
print STDERR "vault kv put $path $keyvalues\n";
# `$command`;
}
sub addEntry {
my $entry=shift;
my $path = $entry->{'path'};
if ($entry->{'value'}) {
my $values = $entry->{value};
my #list=split("\n", $values);
my $metadata_engage=0;
my $data_engage=0;
foreach my $keyvalue (#list) {
if ($keyvalue=~/==== Metadata ====/) {
$metadata_engage=1;
$data_engage=0;
} elsif ($keyvalue=~/==== Data ====/) {
$metadata_engage=0;
$data_engage=1;
} elsif ($data_engage) {
my ($key,$value)=($keyvalue=~/^([^ ]+) +(.*)$/);
if ($key ne "Key" && $key ne "---") {
# print STDERR "key=$key ; value=$value\n";
$all_entries{$path}{$key}=$value;
} else {
# print STDERR "-- separator\n";
}
}
}
} else {
print STDERR "Found a final but empty path: $path\n";
}
}

Redirect of STDERR not closing

I am redirecting STDERR to an error file but am unable to unlink the error file if it is empty. I believe that I am not releasing STDERR which is rendering the error file busy and cannot be deleted. What do you think? Thank you!
$errFile = $outFile . "-error";
open (ERRFILE, '>', $errFile) or die $!;
#Redirect STDERR from the console to the error log
open (STDERR, '>', $errFile) or die $!;
# Do stuff....
close(STDERR);
close(ERRFILE);
#Remove blank error files
opendir(DIR, 'c:\LMITS');
#errFiles = grep /error/, readdir DIR;
closedir DIR;
foreach $errFile (#errFiles) {
$errFileSize = -s $errFile;
if ($errFileSize == 0) {
unlink $errFile;
}
}
readdir returns file names, not paths.
foreach (#errFiles) {
my $errFile = 'c:\\LMITS\\' . $_;
...
}
This code works but if I move the commands to close SDTERR and ERRFILE down in the script any a blank ERRFILE will not be deleted. I'm ok for now but I will keep researching for the sake of just knowing.
use CQPerlExt;
use POSIX qw(strftime);
use Strict;
my $checkForBlanks;
my $dbConfig;
my $dbConfigRecord;
my $entitydef;
my $errFile;
my #errFiles;
my $errFileSize;
my $fileDate;
my #fieldNames;
my $fieldName;
my $lastSync;
my $outFile;
my $queryDef;
my $queryResults;
my $recordCount = 0;
my $recordType;
my $session;
my $scriptStartTime;
my $swCR;
my $swData;
my $swID;
# ##############################################
# ##### Process administrative items and
# ##### establish a ClearQuest session
# ##############################################
$scriptStartTime = strftime("%Y-%m-%d %I:%M:%S %p", localtime());
$fileDate = strftime("%Y%m%d_%I%M%S%p", localtime());
#Create and open the output and error files
$outFile = "MSTU_Unclass_Export"."_".$fileDate.".txt";
open (OUTFILE, ">", $outFile) or die $!;
$errFile = $outFile . "-error";
open (ERRFILE, '>', $errFile) or die $!;
#Redirect STDERR from the console to the error log
open (STDERR, '>', $errFile) or die $!;
$session = CQSession::Build();
CQSession::UserLogon($session, "uname", "pw", "db", "schema");
$dbConfigRecord = $session->GetEntity("DB_CONFIG", "33554467");
$lastSync = $dbConfigRecord->GetFieldStringValue("LastSyncDate");
# ##############################################
# ##### Query the database for all SWCRs
# ##### updated after lastSyncDate
# ##############################################
$queryDef = $session->BuildQuery("SWCR");
$queryDef->BuildField("dbid");
#lastSyncDate = ($lastSync);
$operator = $queryDef->BuildFilterOperator($CQPerlExt::CQ_BOOL_OP_AND);
$operator->BuildFilter ("history.action_timestamp", $CQPerlExt::CQ_COMP_OP_GTE,\#lastSyncDate);
$queryResults = $session->BuildResultSet($queryDef);
$queryResults->Execute();
# ##############################################
# ##### Build a text file with SWCR data associated
# ##### with the dbids returned above
# ##############################################
#Get all of the fieldnames you want to export
$recordType = 'SWCR';
$entitydef = $session->GetEntityDef($recordType);
#fieldNames = #{$entitydef->GetFieldDefNames()};
#Remove any fields you don't want
#fieldNames = grep ! /dbid|history|RecordID|CCObjects|MergeSWCRs|AssociatedIntegrationSet|Level1TestResults|
Level2TestResults|Level3TestResults|Level4TestResults|Reviews|WithdrawCR|
AssociatedWithdrawnCR|Attachments|AssociatedPRs|OriginatingSolution|AssociatedSWRsFull|
AssociatedSWRsDelta|ClonedFrom|ClonedTo|AssociatedComment|ExternalLinks|ratl_mastership/x, #fieldNames;
while ($queryResults->MoveNext() == $CQPerlExt::CQ_SUCCESS) {
$swCR = $session->GetEntityByDbId("SWCR", $queryResults->GetColumnValue(1));
#Gather data
$swID = $swCR->GetFieldValue("RecordID")->GetValue();
$swData = "<RecordID>" . $swID . "</RecordID>";
foreach $fieldName (#fieldNames)
{
$checkForBlanks = $swCR->GetFieldStringValue($fieldName);
if ($checkForBlanks ne ""){
$swData = $swData . "<" . $fieldName . ">" . $swCR->GetFieldStringValue($fieldName) . "</" . $fieldName . ">";
}
}
#Build file with records seperated by custom line delimiter
print OUTFILE $swData . "~~lineDelimiter~~\n";
#Keep track of the amount of records being exported
$recordCount++;
}
close(STDERR);
close(ERRFILE);
close(OUTFILE);
# ##############################################
# ##### Process administrative items and
# ##### close ClearQuest session
# ##############################################
#Remove extra carriage return at bottom of export file because this will throw an error when an import is performed
truncate($outFile, (-s $outFile) - 2);
#Add amount of records exported to the export log
open (EXPLOG, ">>", 'Export_Log.txt') or die $!;
print EXPLOG "$scriptStartTime: $recordCount record(s) written to $outFile for export.\n";
close (EXPLOG);
#Set the LastSyncDate field to the time the export script started
$dbConfigRecord = $session->GetEntity("DB_CONFIG", "33554467");
$session->EditEntity($dbConfigRecord, "Modify");
$dbConfigRecord->SetFieldValue("LastSyncDate",$scriptStartTime);
$dbConfigRecord->Validate();
$dbConfigRecord->Commit();
#Remove blank error files
opendir(DIR, 'c:\LMITS');
#errFiles = grep /error/, readdir DIR;
closedir DIR;
foreach $errFile (#errFiles) {
$errFileSize = -s $errFile;
if ($errFileSize == 0) {
unlink $errFile;
}
}
CQSession::Unbuild($session);

Perl Hash of hash issue

I have hash which looks like this:
$VAR1 = {
'638047' => {
'commands' => 'perl nSWIFT/bin/tqtest.pl -var clist=',
},
'638049' => {
'commands' => 'perl nSWIFT/bin/tqtest.pl-var clist=',
}
};
I want to create a directory such that the file name starts with hash values and it stores the numbers that are generated by running the commands given as perl nswift/bin/tqtest.pl -var clist=. The numbers should be stored in the same file as their key. For example:
'638047' -> '638050' and '638049' -> '638051'
then it should be stored accordingly.
It have tried this but not getting there.
my %stat
my #qbsid_filename = keys %stat;
foreach (#qbsid_filename){
open QBS, ">/root/qbsid/$_";
}
my $newqbsid, #files;
opendir (DIREC, '/root/qbsid') or die $!;
foreach my $qbsid ( keys %stat){
my $cmd = $stat{$qbsid}->{commands};
if ($cmd =~ m/perl(.*)/){
$ex_cmd = $1;
}
$newqbsid = qx | perl $ex_cmd|;
}
close (DIREC);
I am not very good with algorithms. So, finding it difficult.
Please inform me if I am going wrong somewhere or you need more information. Thanks.
Try this:
my %stat = { ... };
while (my ($qbsid_filename, $qbsid) = each %stat) {
my $cmd = $qbsid->{commands};
next unless $cmd =~ m/^perl/;
open my $qbs, '>', "/root/qbsid/$qbsid_filename"
or die "Cannot open $qbsid_filename: $!";
open my $qbscmd, '-|', "$cmd"
or die "Cannot execute $cmd: $!";
while (<$qbscmd>) {
print $qbs $_;
}
close ($qbscmd);
close ($qbs);
}

how to save the selected data in csv file by using perl

Mine Perl Script
my$sth = $dbh->prepare("SELECT city,state address FROM address ");
$sth->execute;
DBI::dump_results($sth);
The above script works fine and it showing output well as bellow
'aaa', 'aa'
'aaa', 'aa'
'city', 'KA'
'city', 'KA'
'city', 'KA'
My question is the resultent output is should be saved in .csv file in my local machine . Is it possible achieve as my requirement, Please let me know
Try this:
my $csv = Text::CSV->new ( { eol => "\n" } );
my $sql = "SELECT city,state address FROM address";
my $rows = $dbh->selectall_arrayref($sql);
open $fh, ">", "new.csv" or die "new.csv: $!";
for my $row (#$rows) {
$csv->print ($fh, $row);
}
close $fh or die "new.csv: $!";
Fetch from the $sth and use Text::CSV_XS or Text::CSV to produce CSV data:
my $select = $db->prepare('select city, state address from address');
$select->execute;
my $c = 'Text::CSV'->new({ eol => "\n" });
open my $OUT, '>', 'new.csv' or die $!;
while (my $row = $select->fetchrow_arrayref) {
$c->print($OUT, $row);
}
close $OUT or die $!;
This is all you need, tested code.
my $csv = Text::CSV->new( { eol => "\n" } );
open my $fh, ">", "data.csv" or die $!;
foreach my $row ( #{ $dbh->selectall_arrayref("SELECT city,state address FROM address") } ) {
$csv->print( $fh, $row );
}
close $fh;

Convert Yahoo Messenger Logs to Adium Logs

Is there a way to convert logs from YM for mac to Adium ?
Thanks
Cezar
#!/usr/bin/perl
use warnings;
use strict;
use File::Find;
use File::Copy;
use Getopt::Long;
my $inDir = undef;
my $outDir = undef ;
my $adiumUser = undef;
my $force = 0;
my $foundLogs = 0;
my $help = 0;
my %Protocols = ( #Map gaim protocol IDs to Adium ones
"aim" => "AIM",
"yahoo" => "Yahoo!",
"msn" => "MSN"
#Add the rest here, or tell me what they are, someone who uses other protocols
);
sub usage
{
my $msg = shift;
print "Error: $msg\n" if $msg;
print "Usage: gaim2adium.pl [--adiumUser <user> | --outDir <output dir>] [--inDir <input dir>] [--yes]\n";
print "Options: (defaults)\n\n";
print "\tinDir:\t\t\tDirectory to import logs from (~/.gaim/logs)\n";
print "\toutDir:\t\t\tDirectory to import logs to (./Logs)\n";
print "\tadiumUser:\t\tAttempt to automatically import logs to the specified Adium user\n";
print "\tyes:\t\t\tDon't prompt before overwriting existing logs\n";
print "\thelp:\t\t\tDisplay this help.\n";
print "\nOnce the logs have been imported, the contents of outDir can be dragged to your Adium log folder\n\n";
exit(1);
}
sub process_log
{
-f or return;
#gaim logs are LOG_BASE/Protocol/Account/Contact/YYYY-MM-DD-TIME.(html|txt)
if($File::Find::name =~ m!^$inDir(?:/)?(.*?)/(.*?)/(.*?)/(\d{4})-(\d{2})-(\d{2})\.(\d{4})(\d{2}).(html|txt)!)
{
my ($proto,$acct,$contact,$year,$month,$day,$hour,$seconds,$ext) = ($1,$2,$3,$4,$5,$6,$7,$8,$9);
return unless defined ($proto = $Protocols{lc $proto});
$foundLogs = 1; #Set the logs found flag
my $outFN = "$contact ($year|$month|$day).";
$outFN .= ((lc $ext) eq "html") ? "html" : "adiumLog";
mkdir("$outDir/$proto.$acct");
mkdir("$outDir/$proto.$acct/$contact");
my $file = "$outDir/$proto.$acct/$contact/$outFN";
if(-e $file && !$force)
{
# print(($adiumUser?"$adiumUser already has":"There already exists"),
# " a log from $proto.$acct to $contact on $day/$month/$year.\n");
`cat '$File::Find::name' >> '$file'`;
} else {
copy($File::Find::name,$file);
}
`touch -t $year$month$day$hour.$seconds '$file'`;
}
}
#Sort a list of log files by time
sub sort_logs
{
my #files = #_;
return sort logcmp #files;
}
sub logcmp
{
my ($t1,$t2);
$t1 = $& if $a =~ /\d{6}/;
$t2 = $& if $b =~ /\d{6}/;
return 0 unless defined($t1) && defined($t2);
return $t1 <=> $t2;
}
GetOptions( "adiumUser=s" => \$adiumUser,
"inDir=s" => \$inDir,
"outDir=s" => \$outDir,
"yes" => \$force,
"help" => \$help)
or usage();
usage() if $help;
usage("You must supply at most one of adiumUser and outDir") if defined($outDir) && defined($adiumUser);
$outDir ||= "$ENV{HOME}/Library/Application Support/Adium 2.0/Users/$adiumUser/Logs" if defined $adiumUser;
$outDir ||= "$ENV{PWD}/Logs";
$inDir ||= shift;
$inDir ||= "$ENV{HOME}/.gaim/logs";
print "NOTE: Output directory exists, existing logs will be appended to.\n" if(-d $outDir);
mkdir($outDir) unless -e $outDir;
usage("Output dir must be a directory") unless -d $outDir;
usage("Output dir must be writeable") unless -w $outDir;
usage("Input directory '$inDir' does not exist") unless -d $inDir;
usage("Input directory '$inDir' is not readable") unless -r $inDir;
#Spider the logs dir
find({wanted => \&process_log,
preprocess => \&sort_logs}, $inDir);
#Warn if we didn't find any logs
unless($foundLogs)
{
print "Warning: No recognized logs found.\n";
print "Note:\tThis script only supports logs generated by gaim 0.73 and above.\n";
print "\tYou may be able to update older gaim logs to the new format using the script from\n";
print "\thttp://sourceforge.net/forum/message.php?msg_id=2392758\n";
exit(1);
}
exit(0);

Resources