#!/usr/bin/env perl
#----------------------------------------------------------------------------
#
# Name: clean_ingest.pl
#
# This perl script is used for the CLNING task of the Ingest pipeline or 
# interactively. In the pipeline, it is designed to be controlled by the
# XPOLL tasks where both pipeline and resource file variables are passed as
# ENV variables. There are five modes of operation for this script.
# First the process must check if the ENV variable EVENT_TYPE is present, which 
# indicates that the process is being run in the pipeline. In pipeline mode, 
# there are two cases:
#
#  1) EVENT_TYPE = EVENT_OSF_EVENT -- check if OSF is deleted able, using the
#  data from the ingest_cleanup table. - if this dataset is not deletable at this
#  time and if OSF event is for a install request OSF, then update the
#  ids_clean_delay_days value so it can be deleted as a timed event. 
#
#  2) EVENT_TYPE = EVENT_TIME_EVENT -- search the entire tabole to get 
#  deleteable datasets using the ids_clean_delay_days value. Also
#  delete problem_log data where the file creation date exceeds the
#  resource variable PROBLEM_LOG_DELETE_DELAY.
#
# In interactine mode, there are three cases:
#  1) One argument -- the four character ingest pipeline id - perform the same
#  as pipeline EVENT_TIME_EVENT except problem logs are not deleted.
#
#  2) Two arguments -- the four character ingest pipeline id and the install
#  request id(ids_ins_request_id) - delete unconditionally the files for the
#  specified request if the ingest_data_set_info records still exists. Ignore
#  the ingest_cleanup table.
#
#  3) Three arguments -- the four character ingest pipeline id, the group name
#  (ids_group_name) and the group data_id (ids_group_data_id) -- delete 
#  unconditionally the files for all the datasets identified in 
#  ingest_data_set_info for this group name and data_id.
#
# Pipeline Usage:
#       command:clean_ingest.pl 
#     
#       In the pipeline, the resource file must set the following ENV 
#       variables: INGEST_PIPE_ID, PROBLEM_LOG_DIR, PROBLEM_LOG_DELETE_DELAY,
#       NSA_PATH_ROOT, OK_TO_UPDATE_DATABASE, ARCH_SERVER and ARCH_DB.
#
#       This scripts uses the following ENV variables that are set by the 
#       XPOLL process: EVENT_TYPE, and optionally OSF_DATASET and OSF_DATA_ID.
#
# Interactive Usage:
#       command clean_ingest.pl [<pipe_id> (<install_request_id> | 
#                                  <group_name> <group_data_id>) ]
#
#       The ENV variables ARCH_SERVER amd ARCH_DB must be defined.
#
#       The ENV variables PROBLEM_LOG_DIR, PROBLEM_LOG_DELETE_DELAY are
#       optional but either both or none must be supplied.
#
#       The ENV variable NSA_PATH_ROOT is optional.
#
#       If PROBLEM_LOG_DIR and PROBLEM_LOG_DELETE_DELAY are both available, the
#       script will also delete any problem logs older that the delay days.
#       If this script is used in a "cron job" these values should be defined so
#       that the script behave the same as the pipeline time poller.
#
#       If NSA_ROOT_PATH is available, the script
#       will also delete the NSA request directory and its contents that might
#       be left after a failed installation request. If this script is used in
#       a "cron job" this values should be defined so that the script behaves
#       the same as the pipeline time poller.
#
# Logging note: 
#    Only process log messages are generated. In interactive mode these go to
#    the user's STDOUT (the terminal or the "cron job" log file). 
#
# History:
# Date     OPR      Who         Reason
# -------- -------- ----------  ---------------------------------------------
# 01/28/04 49925    Baum        Initial code
# 03/03/04 50521    Baum        Cleanup TRIGGER_ROOT_DIR subdirectories.
# 04/09/04 50936    Baum        Cleanup any virtual files by a query UNION in
#                               the get_file_list() subroutine.
# 03/25/10 64273    MSwam       Replace ST_DBlib with DBI
# 06/28/10 65360    MSwam       use single quotes for SQLServer
# 07/29/10 64432    Gardner/MSwam   adjust *= to LEFT OUTER JOIN
# 09/18/12 72328    Sherbert    get rid of DSQUERY
#----------------------------------------------------------------------------
# set up external routines
unshift @INC,(split /:/, $ENV{PATH});
require 'printmsg.pl';       # prints a formatted status message
require 'do_dbi_pkg.pl'; # run queries that return records

    #specify exit status values

    $OSF_FAILURE =      7;   # exit status for XPOLL
    $OSF_SUCCESS =      9;   # exit status for XPOLL
    
    # other constants
    $true = 1;
    $false = 0;

    # check mode of operation
    $event_type = $ENV{"EVENT_TYPE"};
    $num_args = scalar @ARGV;
    
    if (!defined($event_type)) {
       $interactive_mode = $true;
       if ($num_args == 1) {
          $process_mode = "TIME";
          PrintMsg("I",
	     "Cleaning up datasets whose deletion delays have expired.");
       } elsif ($num_args == 2) {
	  $install_id = $ARGV[1];
          $process_mode = "DATASET";
	  PrintMsg("I","Cleaning up dataset for install id $install_id.");
       } elsif ($num_args == 3) {
          $process_mode = "GROUP";
	  $group_name = $ARGV[1];
	  $data_id = $ARGV[2];
	  PrintMsg("I",
	     "Cleaning up all datasets of request $group_name $data_id");
       } else {
           PrintMsg("E","Invalid number of arguments = $num_args");
	   PrintMsg("I",
              "NOTE: pipe ID is exactly four characters and it must match \n".
              "  the value of INGEST_PIPE_ID in the path file of the pipeline\n".
              "  being cleaned up. The value appears in all the Install IDs.");
	   PrintMsg("I",
              "Usage: \n".
              "  1 arg  - pipe ID - for delayed deletion,\n".
              "  2 args - pipe ID and request ID - for dataset deletion,\n".
              "  3 args - pipe ID, group_name and data_id - for request ".
              "  deletion."); 
	   ErrorExit("Aborted. Try again with arguments.");
       }
       $pipe_id = $ARGV[0];
       # validate size of pipe_id
       if ( length($pipe_id) != 4) {
           ErrorExit("Arg 1: INGEST_PIPE_ID is not 4 characters.");
       }
    } else {
       $interactive_mode = $false;
       $pipe_id      = $ENV{"INGEST_PIPE_ID"};
    
       if ($event_type eq "EVENT_OSF_EVENT") {
          $process_mode = "DATASET";
	  $install_id = $ENV{"OSF_DATASET"};
	  $data_id = $ENV{"OSF_DATA_ID"};
	  
	  # check if this is a group request OSF
	  if ($data_id ne uc($data_id) ) { # not all uppercase is group data id
             if ($data_id eq lc($data_id)) {
	        PrintMsg("I","No cleaning needed for $install_id $data_id.");
	        exit ( $OSF_SUCCESS);
             } else {
                # cleanup trigger directories, immediately
                $trigger_root_dir = $ENV{"TRIGGER_ROOT_DIR"};
                if (!defined($trigger_root_dir)) {
                   PrintMsg("E","Must define ENV: TRIGGER_ROOT_DIR");
                   exit ( $OSF_FAILURE);
                }
                $trigger_name = glob ($trigger_root_dir."/*/".$install_id.".*"); 
	        PrintMsg("I","Cleanup trigger file $trigger_name");
                $unlink_cnt = unlink ( $trigger_name);
	        PrintMsg("I","$unlink_cnt files deleted");
	        exit ( $OSF_SUCCESS);
             }
	  }
	  PrintMsg("I","Checking install request $install_id");
       } elsif ($event_type eq "EVENT_TIME_EVENT") {
          $process_mode = "TIME";
       } else {
          ErrorExit( "Unsupported EVENT_TYPE = $event_type.");
       }
    }
    $ARCH_SERVER     = $ENV{"ARCH_SERVER"};
    $ARCH_DB         = $ENV{"ARCH_DB"};
    $ok_to_update_db = $ENV{"OK_TO_UPDATE_DATABASE"};
    $problem_dir     = $ENV{"PROBLEM_LOG_DIR"};
    $problem_delay   = $ENV{"PROBLEM_LOG_DELETE_DELAY"};
    $nsa_path_root   = $ENV{"NSA_PATH_ROOT"};
           
    # Verify ENV variables used by both pipeline and interactive jobs

    if (!defined($ARCH_SERVER) || !defined($ARCH_DB)) {
       ErrorExit("Missing ENV variables: ARCH_SERVER or ARCH_DB.");
    }
    if ($interactive_mode) {
       # check optional ENV variables
       if (defined($problem_dir) || defined($problem_delay) ) {
          if (defined($problem_dir) && defined($problem_delay) ) {
             $check_problem_dir = $true;
	  } else {
	     ErrorExit("The ENV variables PROBLEM_LOG_DIR and ".
	        "PROBLEM_LOG_DELETE_DELAY are not both defined."); 
	  }   
       } else {
          $check_problem_dir = $false;
       }
       if (defined($nsa_path_root)) {
          $check_nsa_links = $true;   
       } else {
          $check_nsa_links = $false;  
       }
    } else {
       # Verify resource file variables for pipeline mode

       if (!defined($ok_to_update_db) || !defined($problem_dir) || 
           !defined($problem_delay) || !defined($nsa_path_root) ||
           !defined($pipe_id) )
       { 
           PrintMsg("E","Missing ENV variables: OK_TO_UPDATE_DATABASE, ".
	      "PROBLEM_LOG_DIR, PROBLEM_LOG_DELETE_DELAY, ...");
           ErrorExit(" NSA_PATH_ROOT, or INGEST_PIPE_ID.");
       }
       if ($ok_to_update_db ne "TRUE") {
       	   ErrorExit("ENV variable OK_TO_UPDATE_DATABASE not set to TRUE.");
       }
       $check_problem_dir = $true; # always for pipeline mode
       $check_nsa_links = $true;   # always for pipeline mode
    }
    if ($check_problem_dir) {
       if ( !-d $problem_dir) {
           ErrorExit("ENV variable PROBLEM_LOG_DIR is not a directory.");
       }
    }          
    if ($check_nsa_links) {
       if ( !-d $nsa_path_root) {
           ErrorExit("ENV variable NSA_PATH_ROOT is not a directory.");
       }
    }          
    # end of parameter validation - start real work
    
    # format current date for future comparisons with directory names
    $current_day = format_current_day();

    # generate query mask that selects request ids used by this pipeline
    $request_mask = "I".$pipe_id."%"; 
    
    # open database for queries
    $db = DoDBIopen( $ARCH_SERVER, $ARCH_DB, $OSF_FAILURE);
    
    %deleted_class_path = ();  # Empty hash table for path names accessed by
          # archive class values that have had deletions. For every
	  # entry in this hash, all the time-based subdirectories will be 
	  # searched to find empty directories. The hash structure guarantees
	  # that this search will only be done once even if many datasets 
	  # were deleted for a single archive class at a single time event.
     	      
    if ($process_mode eq "TIME") {
        delete_expired_datasets();
	
	if ($check_problem_dir) {
	   delete_problem_logs();
	}
    } elsif ($process_mode eq "DATASET") {
        if ($interactive_mode || ready_for_deletion($install_id)) {
           delete_dataset($install_id);
        }
    } else {   # $process_mode must be GROUP
        delete_group();
    }
    # end of all queries
    DoDBIclose($db);

    if (scalar %deleted_class_path) {  # test if hash table not empty
	delete_empty_directories();
    }
    exit( $OSF_SUCCESS);
#----------------------------------------------------------------------------
sub format_current_day {
    my @time_record = localtime(time);  # get lots of date values
    my $mday = $time_record[3];         # day of month (1-31)
    my $mon  = $time_record[4] + 1;     # Perl is 0 origin counter
    my $year = $time_record[5] + 1900;  # get four digit year
    
    # format current date as yyyymmdd 
    $current_day = sprintf("%4d%02d%02d", $year, $mon, $mday);
}
#----------------------------------------------------------------------------
sub delete_expired_datasets {
    # Query the database for all datasets that have either been installed or
    # failed installation and that have their receipt_date plus their
    # clean_delay_days greater than now. For each dataset (identified by their
    # request_id), delete this dataset and its database records.
    #
    # The following query uses db data functions to find records within an hour
    # of the deletion time defined by the sum of ids_ckean_delay_days and
    # the ids_receipt_date that is always valid. 

    my $query = <<"EOQ";       
SELECT ids_ins_request_id 
FROM ingest_data_set_info
WHERE ids_install_flag != 'N' and ids_ins_request_id like '$request_mask' and
    (datediff(hh, 
             dateadd( dy, ids_clean_delay_days, ids_receipt_date),
	     getdate()) 
    > -1)
EOQ

    my $err_msg1 = "Cannot access first ingest_data_set_info record";
    my $err_msg2 = "Cannot access next ingest_data_set_info record";
    my $ds_count = 0;
    my $request_id;
    my @ds_list;

    $sth = DoDBIexecute( $db, $query);
    while ( ( @ds_record ) = DoDBIfetch( $db, $query, $sth) ) {
        $ds_count += 1;
	push @ds_list, $ds_record[0];
    }
    if ($ds_count == 0) {
       PrintMsg("I","No datasets have expired cleanup delays");
    } else {
       PrintMsg("I","Found $ds_count datasets with expired cleanup delays.");
             
       foreach $request_id (@ds_list) {
           delete_dataset($request_id);
       }
    }  
}
#----------------------------------------------------------------------------
sub delete_problem_logs {  # no args
    # Get list of all files in the problem log directory. For each file,
    # check if the days since modification (-M operator) exceed the delete 
    # delay days.
    
    my @files = glob( $problem_dir."*"); # get directory contents
    my $delete_count = 0;
    my $problem_file;
       
    if (scalar @files) {
       foreach $problem_file (@files) {
	  if ( (-M $problem_file) > $problem_delay ) {
	     unlink $problem_file;
	     $delete_count += 1;
	  }
       }
    } else {
       PrintMsg("I", "No problem logs found in directory $problem_dir");
    }
    if ($delete_count) {
       PrintMsg("I","Deleting problem logs not modified since $problem_delay ".
          "days ago.");
       PrintMsg("I", "$delete_count old log files removed from directory ".
          "$problem_dir");
    }
}
#----------------------------------------------------------------------------
sub ready_for_deletion {  # one argument - install request name
    #
    # This routine uses a query with an outer join to get the number of delay
    # days for deletion and the archive class of the request. If the number of
    # delay days is zero, then the request should be deleted. Otherwise, the
    # ingest_data_set_info field ids_clean_delay_days must be set to this
    # delay value. It returns the scalar $true or $false.
    
    my ($request_name) = @_;    
    my ($archive_class, $clean_delay) = get_delay_days( $request_name);
    my $delete_request;
    
    if ($clean_delay) {
        # update ingest_data_set_info with clean_delay value
        update_clean_delay( $request_name, $clean_delay);
        $delete_request = $false;
    } else {
        PrintMsg("I","No delay time defined for archive class $archive_class");
	$delete_request = $true;
    }
    return $delete_request;      	
}
#----------------------------------------------------------------------------
sub get_delay_days {  # one argument -- request name
    # get archive class and clean delay days using outer join with an isnull 
    # function that returns a zero value for a missing ingest_cleanup record
    
    my ($request_name) = @_;
    my $query = <<"EOQ";       
SELECT ids_archive_class, isnull(icl_clean_delay_days, 0) clean_delay_days
FROM ingest_data_set_info a
LEFT OUTER JOIN ingest_cleanup b
ON a.ids_archive_class = b.icl_archive_class
WHERE a.ids_ins_request_id = '$request_name' 
EOQ
    my $err_msg = "Cannot get ingest_data_set_info for $request_name";
    my @record = DoDBIselect( $db, $query);

    if (!defined($record[0])) {
        ErrorExit("Cannot access ingest_data_set_info for $request_name.");
    }
    return @record;
}
#----------------------------------------------------------------------------
sub  update_clean_delay {   # two arguments -- request name, delay days 
    # update ingest_data_set_info with clean_delay value    
    my ($request_name, $clean_delay) = @_;
    PrintMsg("I","Updating ingest_data_set_info.ids_clean_delay_days to ".
       "$clean_delay.");
    my $query = <<EOQ;
UPDATE ingest_data_set_info
SET ids_clean_delay_days = $clean_delay
WHERE ids_ins_request_id = '$request_name' 
EOQ
    my $count = DoDBI($db, $query);
    if ($count == 0) {
       ErrorExit("Cannot update ingest_data_set_info.");
    }
}
#----------------------------------------------------------------------------
sub delete_dataset {   # one argument - install request name
    # This routine uses a query to get the list of all files to be deleted.
    # It deletes the NSA request directory if it is found.
    # In unlinks each file (removes it). It deletes the ingest DB records for
    # this request.

    my ($request_name) = @_;
    
    # get dataset descriptors
    local ($gen_date,$arch_class,$dataset_name,$mission,$path,$filecnt) = 
       query_dataset_description($request_name);

    PrintMsg("I","$filecnt files expected in dataset for $request_name.");

    if ($check_nsa_links) {
       # delete NSA links directory that may be left after a response error
       delete_nsa_dir( $request_name);
    }
    # get list of files to delete from ingest_files table
    my @file_list = get_file_list();
    my $file_count = scalar @file_list;
    
    if ($file_count == 0) {
       PrintMsg("W",
          "No ingest_files records found for dataset $request_name.");
    } else {
       PrintMsg("I","Deleting dataset files from directory $path");
       PrintMsg("I","Queried $file_count ingest_files and virtual archive ".
          "files records for dataset");
       
       if ($filecnt != $file_count) {
          PrintMsg("W",
             "Expected to get $filecnt ingest_files/archive_files records, ".
	     "actually got $file_count records");
       }
       my $unlink_cnt = unlink @file_list;
       
       if ($unlink_cnt != $file_count) {
          PrintMsg("W","Expected to delete $file_count files, ".
	     "actually deleted $unlink_cnt files");
       }
    }
    # add to %deleted_class_path hash table if necessary 
    add_class_path( $arch_class, $path);
    
    # delete request database records
    delete_ingest_records( $request_name); 
}
#----------------------------------------------------------------------------
sub query_dataset_description { # one argument - request name
    # Get dataset description for this install request.
    # The data format 109 is "mon.dd.yyyy hh:mi:ss.mmmAM (or PM)" 
    
    my ($request_name) = @_;
    my $query = <<"EOQ";       
SELECT CONVERT(varchar,ids_generation_date,109) generation_date, 
   ids_archive_class, ids_data_set_name, ids_mission, ids_path_name, 
   ids_file_count 
FROM ingest_data_set_info
WHERE ids_ins_request_id = '$request_name'
EOQ
    my $err_msg = "Cannot find latest record in ingest_data_set_info";
    my @record = DoDBIselect( $db, $query);

    if (!defined($record[0])) {
        ErrorExit("Cannot get ingest_data_set_info data");
    } 
    return @record;
}
#----------------------------------------------------------------------------
sub get_file_list {
    # get list of files to delete from ingest_files table and archive_files 
    # table using local variables. Only virtual files are taken from the
    # archive_files table

    my $query = <<"EOQ";       
SELECT ifi_file_name
FROM ingest_files
WHERE ifi_archive_class = '$arch_class' and ifi_data_set_name = '$dataset_name'
    and ifi_generation_date = '$gen_date' and ifi_mission='$mission'
UNION
SELECT afi_file_name
FROM archive_files
WHERE afi_archive_class = '$arch_class' and afi_data_set_name = '$dataset_name'
    and afi_generation_date = '$gen_date' and afi_mission='$mission' and
    afi_virtual = 'Y'
EOQ
    my @file_list = ();  # defined but empty list
    my $err_msg1 = "Cannot get first ingest_files record.";
    my $err_msg2 = "Cannot get next ingest_files record.";

    $sth = DoDBIexecute( $db, $query);
    while ( ( @file_record ) = DoDBIfetch( $db, $query, $sth) ) {
	push @file_list, $path.$file_record[0];
    }
    return @file_list;
}
#----------------------------------------------------------------------------
sub add_class_path {   # two arguments - archive class and file path name
    #
    # add to %deleted_class_path if necessary 

    my ($arch_class, $path) = @_;
    my @subdirs;
    my $arch_path;
    
    if (!defined $deleted_class_path{$arch_class}) {
       @subdirs = split( m"/", $path);
       pop @subdirs;  # remove last entry containing a dated name
       $arch_path = join ("/", @subdirs); # recontruct path name
       $arch_path = $arch_path."/";  # insert final slash
       PrintMsg("I","Adding $arch_path to list to check later for empty ".
          "subdirectories.");
       $deleted_class_path{$arch_class} = $arch_path;
    }
}    
#----------------------------------------------------------------------------
sub delete_ingest_records { # one argument -- request name
    # delete the ingest_files and ingest_data_set_info records
    
    my ($request_name) = @_;
    
    # first, delete any ingest_files records using join to ingest_data_set_info
    my $query = <<EOQ;
DELETE ingest_files 
FROM ingest_files, ingest_data_set_info
WHERE ids_ins_request_id = '$request_name' and
   ifi_archive_class = ids_archive_class and
   ifi_data_set_name = ids_data_set_name and
   ifi_generation_date = ids_generation_date and
   ifi_mission = ids_mission
EOQ

   my $file_del_cnt = DoDBI($db, $query);
   PrintMsg("I","$file_del_cnt ingest_files records deleted.");
   
    # second, delete the ingest_data_set_info records directly
    $query = <<EOQ;
DELETE ingest_data_set_info
WHERE ids_ins_request_id = '$request_name' 
EOQ

   my $info_cnt = DoDBI($db, $query);
   
   if ($info_cnt) { 
     PrintMsg("I","Deleted ingest_data_set_info record.");
   } else {
     ErrorExit("Cannot delete ingest_data_set_info record for $request_name.");
   }   
}
#----------------------------------------------------------------------------
sub delete_nsa_dir {  # one argument - request name
    #
    # The NSA interface requires a subdirectory with the name of the
    # request id and this subdirectory contains nsa request and response files
    # and a subdirectory that contains links to the original files. These file 
    # links use the NSA naming convention. After successful installation the 
    # IngestResponse program deletes this file and its contents, but if there
    # is a failure, the directory is left untouched. The cleanup after a
    # failure requires that if its is found this directory must be cleaned up.
    
    my ($request_name) = @_;
    my $req_directory = $nsa_path_root.$request_name."/";
    my @files;
    my $file;
    my $link_count;
    my $delete_count = 0;
    
    if (-d $req_directory) {  # do we have a request directory
       PrintMsg("I","Cleaning up temporary directory $req_directory");
       @files = glob( $req_directory."*"); # get directory contents

       foreach $file (@files) {
          if (-d $file) {
	      # this is a subdirectory - delete its contents
	      $link_count = unlink glob( $file."/*");
              if ($link_count) {
                 PrintMsg("I",
	            "$link_count links removed from $file directory");
	      } else {
                 PrintMsg("I", "No files found in $file directory");
	      }
              if (!rmdir( $file)) {
                 PrintMsg("W","Cannot remove directory: $file");
              }       
	  } else {  # not a directory
	     $delete_count += unlink $file;
	  }
       }
       PrintMsg("I", "$delete_count files removed from $req_directory");
       
       if (!rmdir( $req_directory)) {
          PrintMsg("W","Cannot remove directory: $req_directory");
       }       
    }
}
#----------------------------------------------------------------------------
sub delete_group {
    # Query the database for all datasets that belong to the group. For each 
    # dataset (identified by their request_id), delete this dataset and its
    # database records. Only get requests that belong to the pipeline.

    my $query = <<"EOQ";       
SELECT ids_ins_request_id 
FROM ingest_data_set_info
WHERE ids_group_name = '$group_name' and ids_group_data_id = '$data_id' 
   and ids_ins_request_id like '$request_mask'
EOQ

    my $err_msg1 = "Cannot access first group ingest_data_set_info record";
    my $err_msg2 = "Cannot access next group ingest_data_set_info record";
    my $ds_count = 0;
    my $request_id;
    my @ds_list;

    $sth = DoDBIexecute( $db, $query);
    while ( ( @ds_record ) = DoDBIfetch( $db, $query, $sth) ) {
        $ds_count += 1;
	push @ds_list, $ds_record[0];
    }
    if ($ds_count == 0) {
       PrintMsg("I",
          "No datasets found for request group $group_name $data_id.");
    } else {
       PrintMsg("I","Found $ds_count datasets to be deleted");
       
       # setting global variable for delete_dataset subroutine
       foreach $request_id (@ds_list) {
           delete_dataset( $request_id);
       }
    }  
}
#----------------------------------------------------------------------------
sub delete_empty_directories {
    # check for an unneeded empty subdirectory that is not todays date
    my $path;       # a single path to check
    my @subdirs;    # all the subdirectories in the $path
    my $subdir;     # a single subdirectory of $path
    my @path_parts; # all the components of the $subdir path
    my (@paths) = values( %deleted_class_path);  # get all paths to check
    
    foreach $path (@paths) {
       @subdirs = glob ($path."*");
       
       if (scalar @subdirs) {
          # do not delete a directory with todays date so check each name
	  foreach $subdir (@subdirs) {
	     @path_parts = split (m"/",$subdir);

             # check that last $path_parts name is older that current date
	     if ($current_day gt $path_parts[$#path_parts]) {
                # if any entry in $subdir is not empty or not a directory then
                # the rmdir function will not delete it. The rmdir function
                # returns the value one if $subdir is deleted.
                
                if (rmdir ($subdir) ) { 
                   PrintMsg("I","Deleted empty directory $subdir/");
		}
	     }
	  } 
       }
    }
}
#----------------------------------------------------------------------------
sub ErrorExit {   # one argument - the error message
   # exit the script with the error condition after closing the $db database
   # objest and writing the error message to the log file.
   
   my ($msg) = @_;
   if (defined($db)) {
      DoDBIclose($db);
   }
   PrintMsg("E",$msg);
   exit ( $OSF_FAILURE);
}
