diff --git a/bin/12IDB/dm-12idb-daq b/bin/12IDB/dm-12idb-daq
new file mode 100755
index 0000000000000000000000000000000000000000..daed1d22c634a005247f6665f6b57c6587009ed1
--- /dev/null
+++ b/bin/12IDB/dm-12idb-daq
@@ -0,0 +1,18 @@
+#!/bin/sh
+
+# Run command
+
+if [ -z $DM_ROOT_DIR ]; then
+    cd `dirname $0` && myDir=`pwd`
+    setupFile=$myDir/../../setup.sh
+    if [ ! -f $setupFile ]; then
+        echo "Cannot find setup file: $setupFile"
+        exit 1
+    fi
+    source $setupFile > /dev/null
+fi
+source $DM_ROOT_DIR/bin/dm_command_setup.sh
+
+eval "$DM_ROOT_DIR/src/python/dm/aps_beamline_tools/cli/daqCli.py $DM_COMMAND_ARGS"
+
+
diff --git a/bin/12IDB/dm-12idb-upload b/bin/12IDB/dm-12idb-upload
new file mode 100755
index 0000000000000000000000000000000000000000..9f753a812397f21b40fb9e9308449d903925fdf7
--- /dev/null
+++ b/bin/12IDB/dm-12idb-upload
@@ -0,0 +1,18 @@
+#!/bin/sh
+
+# Run command
+
+if [ -z $DM_ROOT_DIR ]; then
+    cd `dirname $0` && myDir=`pwd`
+    setupFile=$myDir/../../setup.sh
+    if [ ! -f $setupFile ]; then
+        echo "Cannot find setup file: $setupFile"
+        exit 1
+    fi
+    source $setupFile > /dev/null
+fi
+source $DM_ROOT_DIR/bin/dm_command_setup.sh
+
+eval "$DM_ROOT_DIR/src/python/dm/aps_beamline_tools/cli/uploadCli.py $DM_COMMAND_ARGS"
+
+
diff --git a/etc/daq-web-service.conf.template b/etc/daq-web-service.conf.template
index 0a6ea49dce549f6d179e74d1a6ebc9a72957adea..b3206abd6a1ebac7cda6a8009072f040fedd1c2d 100644
--- a/etc/daq-web-service.conf.template
+++ b/etc/daq-web-service.conf.template
@@ -19,13 +19,14 @@ principalAuthenticator2=LdapPasswordPrincipalAuthenticator(serverUrl='ldaps://ph
 
 [FileSystemObserver]
 # Minimum file processing delay since last update
-minFileProcessingDelayInSeconds=10
+minFileProcessingDelayInSeconds=30
 fileSystemEventTimeoutInSeconds=10
-fileSystemObserverAgent=dm.daq_web_service.service.impl.watchdogFileSystemObserverAgent.WatchdogFileSystemObserverAgent()
+#fileSystemObserverAgent=dm.daq_web_service.service.impl.watchdogFileSystemObserverAgent.WatchdogFileSystemObserverAgent()
 #fileSystemObserverAgent=dm.daq_web_service.service.impl.ftpFileSystemObserverAgent.FtpFileSystemObserverAgent('dmdaq', 2811)
+fileSystemObserverAgent=dm.daq_web_service.service.impl.pollingFileSystemObserverAgent.PollingFileSystemObserverAgent(pollingPeriod=30)
 
 [FileProcessingManager]
-numberOfProcessingThreads=5
+numberOfProcessingThreads=10
 defaultNumberOfRetries=1
 defaultRetryWaitPeriodInSeconds=60
 statUtility=dm.common.utility.fileUtility.FileUtility()
diff --git a/etc/init.d/dm-ds-services b/etc/init.d/dm-ds-services
index 51d71fee22c0a868263cd0993a359908c2f65f00..e3fe9847ab0c6ecf68b1eb89df32746c9f59294b 100755
--- a/etc/init.d/dm-ds-services
+++ b/etc/init.d/dm-ds-services
@@ -8,8 +8,8 @@ MY_FILE=`readlink -f $0` && MY_DIR=`dirname $MY_FILE`
 # The list below defines order of starting/stopping services
 #startList="postgresql glassfish ds-web-service"
 #stopList="ds-web-service glassfish postgresql"
-startList="glassfish ds-web-service aps-db-web-service"
-stopList="aps-db-web-service ds-web-service glassfish"
+startList="payara ds-web-service aps-db-web-service"
+stopList="aps-db-web-service ds-web-service payara"
 restartList=$startList
 statusList=$startList
 
diff --git a/etc/init.d/dm-payara b/etc/init.d/dm-payara
new file mode 100755
index 0000000000000000000000000000000000000000..947460e30f8c561d67dd8654d2ff92fccddbbf27
--- /dev/null
+++ b/etc/init.d/dm-payara
@@ -0,0 +1,104 @@
+#! /bin/sh
+### BEGIN INIT INFO
+# Provides:          payara
+# Required-Start:    $remote_fs $network $syslog
+# Required-Stop:     $remote_fs $network $syslog
+# Default-Start:     2 3 4 5
+# Default-Stop:      0 1 6
+# Short-Description: Starts Payara
+# Description:       Starts Payara application server
+### END INIT INFO
+
+# Source function library.
+. /etc/rc.d/init.d/functions
+
+# Source networking configuration.
+. /etc/sysconfig/network
+
+# Set root to default if needed.
+#MY_DIR=`dirname $0` && cd $MY_DIR && MY_DIR=`pwd`
+MY_FILE=`readlink -f $0` && MY_DIR=`dirname $MY_FILE`
+if [ -z "${DM_ROOT_DIR}" ]; then
+    DM_ROOT_DIR=$MY_DIR/../..
+fi
+        
+# Source environment file.
+DM_USER=`id -nu`
+DM_HOST=`hostname -s`
+DM_ENV_FILE=${DM_ROOT_DIR}/setup.sh
+if [ ! -f ${DM_ENV_FILE} ]; then
+    echo "Environment file ${DM_ENV_FILE} does not exist." 
+    exit 2
+fi
+. ${DM_ENV_FILE} > /dev/null
+
+export AS_JAVA=$DM_OPT_DIR/java/$DM_HOST_ARCH
+DM_PAYARA_DIR=$DM_OPT_DIR/payara
+DERBY_DIR=$DM_OPT_DIR/payara/javadb/bin
+
+DM_DAEMON_NAME=payara
+DM_DAEMON_CONTROL_CMD=$DM_PAYARA_DIR/bin/asadmin
+DM_DAEMON_START_ARGS="start-domain domain1"
+DM_DAEMON_STOP_ARGS="stop-domain domain1"
+DM_DAEMON_STATUS_CMDS="uptime list-domains list-applications list-jdbc-resources"
+
+start() {
+    echo -n $"Starting ${DM_DAEMON_NAME}: "       
+    # Check if we're a privileged user
+    if [ `id -u` = 0 -a ${DM_USER} != "root" ]; then
+        su -m -c "${DM_DAEMON_CONTROL_CMD} ${DM_DAEMON_START_ARGS}" ${DM_USER}
+    else
+        ${DM_DAEMON_CONTROL_CMD} ${DM_DAEMON_START_ARGS}
+    fi
+    RETVAL=$?
+    [ $RETVAL -eq 0 ] && success $"${DM_DAEMON_NAME} startup" || failure $"${DM_DAEMON_NAME} startup"
+    echo
+}
+
+stop() {
+    echo -n $"Stopping ${DM_DAEMON_NAME}: "       
+    # Check if we're a privileged user
+    if [ `id -u` = 0 -a ${DM_USER} != "root" ]; then
+        su -m -c "${DM_DAEMON_CONTROL_CMD} ${DM_DAEMON_STOP_ARGS}" ${DM_USER}
+    else
+        ${DM_DAEMON_CONTROL_CMD} ${DM_DAEMON_STOP_ARGS}
+    fi
+    RETVAL=$?
+    [ $RETVAL -eq 0 ] && success $"${DM_DAEMON_NAME} shutdown" || failure $"${DM_DAEMON_NAME} shutdown"
+    echo
+}
+
+status() {
+    # Check if we're a privileged user
+    if [ `id -u` = 0 -a ${DM_USER} != "root" ]; then
+        for cmd in ${DM_DAEMON_STATUS_CMDS}; do
+            su -m -c "${DM_DAEMON_CONTROL_CMD} ${cmd}" ${DM_USER}
+        done
+    else
+        for cmd in ${DM_DAEMON_STATUS_CMDS}; do
+            ${DM_DAEMON_CONTROL_CMD} ${cmd}
+        done
+    fi
+}
+
+case "$1" in
+    start)
+        start
+    ;;
+    stop)
+        stop
+    ;;
+    restart)
+        stop
+        start
+    ;;
+    status)
+        status
+    ;;
+    *)
+        echo "Usage: $0 {start|stop|restart|status}"
+        exit 1
+    ;;
+esac
+
+
diff --git a/sbin/11IDB/dm_11idb_system_test_daq.sh b/sbin/11IDB/dm_11idb_system_test_daq.sh
index 04e2b5fe71bc271869cf81308f4e6a1ed934eb90..664d6cb6d18f8c56cf630b1e8d46127d0a7096ad 100755
--- a/sbin/11IDB/dm_11idb_system_test_daq.sh
+++ b/sbin/11IDB/dm_11idb_system_test_daq.sh
@@ -1,6 +1,6 @@
 #!/bin/sh
 
 cd `dirname $0`/.. && DM_SBIN_DIR=`pwd`
-$DM_SBIN_DIR/dm_system_test_daq.sh /gdata/dm/test/11IDB dtn-int-01:/gdata/dm/test/11IDB
+$DM_SBIN_DIR/dm_system_test_daq.sh /home/dm_idb/test
 
 
diff --git a/sbin/11IDB/dm_11idb_system_test_upload.sh b/sbin/11IDB/dm_11idb_system_test_upload.sh
index b8b4b58338b10da9d25e34d23036973673b9da70..af73933125fc04464d128438bce0cddb2033fe31 100755
--- a/sbin/11IDB/dm_11idb_system_test_upload.sh
+++ b/sbin/11IDB/dm_11idb_system_test_upload.sh
@@ -1,6 +1,6 @@
 #!/bin/sh
 
 cd `dirname $0`/.. && DM_SBIN_DIR=`pwd`
-$DM_SBIN_DIR/dm_system_test_upload.sh /gdata/dm/test/11IDB dtn-int-01:/gdata/dm/test/11IDB
+$DM_SBIN_DIR/dm_system_test_upload.sh /home/dm_idb/test
 
 
diff --git a/sbin/11IDC/dm_11idc_system_test_daq.sh b/sbin/11IDC/dm_11idc_system_test_daq.sh
index b3ab1a4aebdcad38e53c98922ac6927d5dff05ba..f36c5d7e62b9dd2b28a2e6945393caf7066a1b75 100755
--- a/sbin/11IDC/dm_11idc_system_test_daq.sh
+++ b/sbin/11IDC/dm_11idc_system_test_daq.sh
@@ -1,5 +1,5 @@
 #!/bin/sh
 
 cd `dirname $0`/.. && DM_SBIN_DIR=`pwd`
-$DM_SBIN_DIR/dm_system_test_daq.sh /gdata/dm/test/11IDC dtn-int-01:/gdata/dm/test/11IDC
+$DM_SBIN_DIR/dm_system_test_daq.sh /home/dm_idc/test
 
diff --git a/sbin/11IDC/dm_11idc_system_test_upload.sh b/sbin/11IDC/dm_11idc_system_test_upload.sh
index 9e0462930b2a568ef0be5bd55516ce100b74971d..5ddcd19b7aab3fa0d033f57e76f8254b4ad3a3ed 100755
--- a/sbin/11IDC/dm_11idc_system_test_upload.sh
+++ b/sbin/11IDC/dm_11idc_system_test_upload.sh
@@ -1,5 +1,5 @@
 #!/bin/sh
 
 cd `dirname $0`/.. && DM_SBIN_DIR=`pwd`
-$DM_SBIN_DIR/dm_system_test_upload.sh /gdata/dm/test/11IDC dtn-int-01:/gdata/dm/test/11IDC
+$DM_SBIN_DIR/dm_system_test_upload.sh /home/dm_idc/test
 
diff --git a/sbin/12IDB/dm_12idb_system_test_daq.sh b/sbin/12IDB/dm_12idb_system_test_daq.sh
new file mode 100755
index 0000000000000000000000000000000000000000..664d6cb6d18f8c56cf630b1e8d46127d0a7096ad
--- /dev/null
+++ b/sbin/12IDB/dm_12idb_system_test_daq.sh
@@ -0,0 +1,6 @@
+#!/bin/sh
+
+cd `dirname $0`/.. && DM_SBIN_DIR=`pwd`
+$DM_SBIN_DIR/dm_system_test_daq.sh /home/dm_idb/test
+
+
diff --git a/sbin/12IDB/dm_12idb_system_test_upload.sh b/sbin/12IDB/dm_12idb_system_test_upload.sh
new file mode 100755
index 0000000000000000000000000000000000000000..af73933125fc04464d128438bce0cddb2033fe31
--- /dev/null
+++ b/sbin/12IDB/dm_12idb_system_test_upload.sh
@@ -0,0 +1,6 @@
+#!/bin/sh
+
+cd `dirname $0`/.. && DM_SBIN_DIR=`pwd`
+$DM_SBIN_DIR/dm_system_test_upload.sh /home/dm_idb/test
+
+
diff --git a/sbin/17BM/dm_17bm_system_test_daq.sh b/sbin/17BM/dm_17bm_system_test_daq.sh
index 3454d6eb007a75ab1f70cc4060bbce5cf18669d9..7127a19a7ab246689148cf2471fae72774dd41f5 100755
--- a/sbin/17BM/dm_17bm_system_test_daq.sh
+++ b/sbin/17BM/dm_17bm_system_test_daq.sh
@@ -1,5 +1,5 @@
 #!/bin/sh
 
 cd `dirname $0`/.. && DM_SBIN_DIR=`pwd`
-$DM_SBIN_DIR/dm_system_test_daq.sh /gdata/dm/test/17BM dtn-int-01:/gdata/dm/test/17BM
+$DM_SBIN_DIR/dm_system_test_daq.sh /home/dm_bm/test
 
diff --git a/sbin/17BM/dm_17bm_system_test_upload.sh b/sbin/17BM/dm_17bm_system_test_upload.sh
index 0ce5a3b903bb7a1e0031ed2385f5552e339d52ce..467a9cdbdfe4713f6361a655c7702a6abedcac10 100755
--- a/sbin/17BM/dm_17bm_system_test_upload.sh
+++ b/sbin/17BM/dm_17bm_system_test_upload.sh
@@ -1,5 +1,5 @@
 #!/bin/sh
 
 cd `dirname $0`/.. && DM_SBIN_DIR=`pwd`
-$DM_SBIN_DIR/dm_system_test_upload.sh /gdata/dm/test/17BM dtn-int-01:/gdata/dm/test/17BM
+$DM_SBIN_DIR/dm_system_test_upload.sh /home/dm_bm/test
 
diff --git a/sbin/dm_backup.sh b/sbin/dm_backup.sh
index 7e55ee864775a474b6c75bb76482154a7b01ae9f..be2528173c80487fdae47102abca02421a6d1bf7 100755
--- a/sbin/dm_backup.sh
+++ b/sbin/dm_backup.sh
@@ -114,7 +114,7 @@ rm -f $processingFile
 
 # Backup web app
 #echo "Backing up $DM_DB_NAME web app"
-#rsync -arlvP $DM_SUPPORT_DIR/opt/glassfish/glassfish/domains/domain1/autodeploy/$DM_DB_NAME.war $DM_BACKUP_DIR
+#rsync -arlvP $DM_SUPPORT_DIR/opt/payara/glassfish/domains/domain1/autodeploy/$DM_DB_NAME.war $DM_BACKUP_DIR
 
 cleanup
 echo "Backup of DB $DM_DB_NAME is done."
diff --git a/sbin/dm_configure_web_portal.sh b/sbin/dm_configure_web_portal.sh
index fa25a14f04c2e1858278be2895befaceefe5d491..6665b06ed8bb5613d9c07859f012bf4559cb5492 100755
--- a/sbin/dm_configure_web_portal.sh
+++ b/sbin/dm_configure_web_portal.sh
@@ -42,10 +42,10 @@ DM_SYSTEM_PASSWORD_FILE=${DM_INSTALL_DIR}/etc/${DM_SYSTEM_USER}.system.passwd
 DM_HOST_ARCH=`uname | tr [A-Z] [a-z]`-`uname -m`
 
 JAVA_HOME=$DM_OPT_DIR/java/$DM_HOST_ARCH
-GLASSFISH_DIR=$DM_OPT_DIR/glassfish
+PAYARA_DIR=$DM_OPT_DIR/payara
 
 export AS_JAVA=$JAVA_HOME
-ASADMIN_CMD=$GLASSFISH_DIR/bin/asadmin 
+ASADMIN_CMD=$PAYARA_DIR/bin/asadmin 
 
 DM_DB_HOST=${DM_DB_HOST:=localhost}
 DM_DB_PORT=${DM_DB_PORT:=11136}
@@ -57,7 +57,7 @@ DM_DOMAIN=domain1
 
 # copy db driver
 echo "Copying db driver"
-rsync -ar $DM_ROOT_DIR/src/java/DmWebPortal/lib/postgresql*.jdbc41.jar $GLASSFISH_DIR/glassfish/domains/${DM_DOMAIN}/lib/ext
+rsync -ar $DM_ROOT_DIR/src/java/DmWebPortal/lib/postgresql*.jdbc41.jar $PAYARA_DIR/glassfish/domains/${DM_DOMAIN}/lib/ext
 
 # Check password from file
 passwordFile=$DM_INSTALL_DIR/etc/$DM_DB_NAME.db.passwd
@@ -66,7 +66,7 @@ if [ -f $passwordFile ]; then
 fi
 
 # restart server
-echo "Restarting glassfish"
+echo "Restarting payara"
 $ASADMIN_CMD stop-domain ${DM_DOMAIN}
 $ASADMIN_CMD start-domain ${DM_DOMAIN}
 
@@ -78,6 +78,9 @@ $ASADMIN_CMD create-jdbc-connection-pool --datasourceclassname org.postgresql.ds
 echo "Creating JDBC resource $DM_DATA_SOURCE"
 $ASADMIN_CMD create-jdbc-resource --connectionpoolid ${DM_DB_POOL} ${DM_DATA_SOURCE}
 
+# Remove unsecure access on port 8080
+$ASADMIN_CMD delete-http-listener "http-listener-1"
+
 # test the connection settings 
 echo "Testing connection"
 $ASADMIN_CMD ping-connection-pool $DM_DB_POOL || exit 1
diff --git a/sbin/dm_deploy_web_portal.sh b/sbin/dm_deploy_web_portal.sh
index 394b48759bfbd846b2b58b0135f494168080ba44..24f51257fc021afc9930b6d682ce8f7b988b6029 100755
--- a/sbin/dm_deploy_web_portal.sh
+++ b/sbin/dm_deploy_web_portal.sh
@@ -42,8 +42,8 @@ DM_HOST_ARCH=`uname | tr [A-Z] [a-z]`-`uname -m`
 DM_HOSTNAME=`hostname -f`
 DM_DATE=`date +%Y.%m.%d`
 DM_CONTEXT_ROOT=${DM_CONTEXT_ROOT:=dm}
-GLASSFISH_DIR=$DM_OPT_DIR/glassfish
-DM_DEPLOY_DIR=$GLASSFISH_DIR/glassfish/domains/domain1/autodeploy
+PAYARA_DIR=$DM_OPT_DIR/payara
+DM_DEPLOY_DIR=$PAYARA_DIR/glassfish/domains/domain1/autodeploy
 DM_PORTAL_DIST_DIR=$DM_ROOT_DIR/src/java/DmWebPortal/dist
 DM_BASE_DIST_DIR=$DM_ROOT_DIR/src/java/DmBase/dist
 DM_API_DIST_DIR=$DM_ROOT_DIR/src/java/DmApi/dist
@@ -116,7 +116,7 @@ cd ../..
 jar cf ../$DM_WAR_FILE *
 
 export AS_JAVA=$JAVA_HOME
-ASADMIN_CMD=$GLASSFISH_DIR/bin/asadmin
+ASADMIN_CMD=$PAYARA_DIR/bin/asadmin
 
 # copy war file
 echo "Copying war file $DM_PORTAL_DIST_DIR/$DM_WAR_FILE to $DM_DEPLOY_DIR"
diff --git a/sbin/dm_remove_catalog_duplicates.sh b/sbin/dm_remove_catalog_duplicates.sh
old mode 100644
new mode 100755
index 080599ced66e798ea1850e79a543cca1cf5d604e..572e17e108ecc227a689499178f41b152353e3ad
--- a/sbin/dm_remove_catalog_duplicates.sh
+++ b/sbin/dm_remove_catalog_duplicates.sh
@@ -1,8 +1,92 @@
-e=Cycle2018_2_Axalta; for f in `cat $e.diff`; do echo ; echo
-"*******************************" ; echo $f; efp=`echo $f | sed
-'s?Cycle2018_2_Axalta/??'`; echo $efp; dm-stat-file --experiment=$e
---relative-path=$efp; echo ; echo LIST; dm-list-experiment-files
---experiment=$e experimentFilePath:$efp; echo ; read -p "Enter file id
-to be deleted: " fileId; dm-delete-file --keep-in-storage
---experiment=$e --file-id=$fileId; done
+#!/bin/bash
 
+# Script for removing duplicate/bad entries from DM Catalog
+
+usage() {
+    echo "Usage:"
+    echo "    $0 <experiment name> [<work dir>]"
+    echo ""
+}
+EXPERIMENT_NAME=$1
+if [ -z "$EXPERIMENT_NAME" ]; then
+    usage
+    exit 1
+fi
+WORK_DIR=${2:-/tmp}
+mkdir -p $WORK_DIR || exit 1
+echo "Using work directory $WORK_DIR for experiment $EXPERIMENT_NAME"
+
+FULL_LIST_FILE=`realpath $WORK_DIR/$EXPERIMENT_NAME.all`
+UNIQUE_LIST_FILE=`realpath $WORK_DIR/$EXPERIMENT_NAME.unique`
+STAT_FILE=`realpath $WORK_DIR/$EXPERIMENT_NAME.stat`
+GOOD_LIST_FILE=`realpath $WORK_DIR/$EXPERIMENT_NAME.good`
+DELETED_LIST_FILE=`realpath $WORK_DIR/$EXPERIMENT_NAME.deleted`
+BAD_LIST_FILE=`realpath $WORK_DIR/$EXPERIMENT_NAME.bad`
+DUPLICATE_LIST_FILE=`realpath $WORK_DIR/$EXPERIMENT_NAME.duplicate`
+rm -f $GOOD_LIST_FILE $BAD_LIST_FILE $DELETED_LIST_FILE $DUPLICATE_LIST_FILE
+touch $GOOD_LIST_FILE $BAD_LIST_FILE $DELETED_LIST_FILE $DUPLICATE_LIST_FILE
+
+echo "Retrieving list of all catalog entries"
+dm-list-experiment-files --experiment=$EXPERIMENT_NAME --display-keys=id,experimentFilePath,fileSize,md5Sum > $FULL_LIST_FILE || exit 1
+echo "Retrieving list of unique file paths"
+dm-list-experiment-files --experiment=$EXPERIMENT_NAME --display-keys=experimentFilePath | sort -u | sed 's?experimentFilePath=??' | awk '{print $0}' > $UNIQUE_LIST_FILE || exit 1
+
+nUnique=`wc -l $UNIQUE_LIST_FILE | awk '{print $1}'`
+nFiles=`wc -l $FULL_LIST_FILE | awk '{print $1}'`
+echo "Total number of catalog entries: $nFiles"
+echo "Total number of unique files: $nUnique"
+
+storageHost=`dm-get-experiment --experiment=$EXPERIMENT_NAME --display-keys=storageHost | sed 's?storageHost=??'`
+storageDirectory=`dm-get-experiment --experiment=$EXPERIMENT_NAME --display-keys=storageDirectory | sed 's?storageDirectory=??'`
+nFilesInStorage=`ssh $storageHost "find $storageDirectory -type f | wc -l"`
+echo "Total number of files in storage: $nFilesInStorage $storageDirectory"
+echo
+
+OLD_IFS=$IFS
+IFS=
+fCount=0
+while read -r f; do
+    fCount=`expr $fCount + 1`
+    IFS=$OLD_IFS
+    f=`echo $f | sed 's/^[[:blank:]]*//;s/[[:blank:]]*$//'`
+    echo "**********"
+    echo "Working on file: \"$f\" ($fCount / $nUnique)"
+    dm-stat-file --experiment=$EXPERIMENT_NAME --relative-path="$f" --md5sum > $STAT_FILE || exit 1
+    fileSize=`cat $STAT_FILE | sed -e '1,/STAT INFO/d' | awk '{print $2}' | sed 's?fileSize=??'`
+    md5Sum=`cat $STAT_FILE | sed -e '1,/STAT INFO/d' | awk '{print $3}' | sed 's?md5Sum=??'`
+    echo "File size: $fileSize, md5Sum: $md5Sum"
+    nEntries=`cat $FULL_LIST_FILE | grep "experimentFilePath=$f " | wc -l`
+    echo "There are $nEntries catalog entries"
+    goodId=""
+    idList=`cat $FULL_LIST_FILE | grep "experimentFilePath=$f " | awk '{print $1}'`
+    for id in $idList; do
+        catFileSize=`cat $FULL_LIST_FILE | grep "$id" | awk '{print $3}' | sed 's?fileSize=??' `
+        catMd5Sum=`cat $FULL_LIST_FILE | grep "$id" | awk '{print $4}' | sed 's?md5Sum=??' `
+        if [ "$catFileSize" = "$fileSize" -a "$catMd5Sum" = "$md5Sum" ]; then
+            echo "Catalog info is correct for $f, $id"
+            if [ "x$goodId" = "x" ]; then
+                echo "File $id is marked as good"
+                echo "$f $id" >> $GOOD_LIST_FILE
+                goodId=$id
+            else
+                echo "File $id is marked as duplicate of $goodId"
+                echo "$f $id" >> $DUPLICATE_LIST_FILE
+                echo dm-delete-file --keep-in-storage --force --experiment=$EXPERIMENT_NAME --file-$id >> $DELETED_LIST_FILE
+            fi
+        else
+            echo "Catalog info is not correct for $f, file size: $catFileSize, md5Sum: $catMd5Sum"
+            echo "$f $id" >> $BAD_LIST_FILE
+            echo dm-delete-file --keep-in-storage --force --experiment=$EXPERIMENT_NAME --file-$id >> $DELETED_LIST_FILE
+        fi
+    done
+done < "$UNIQUE_LIST_FILE"
+
+echo
+echo "**********"
+echo
+echo "Total number of files in storage  : $nFilesInStorage $storageDirectory"
+echo "Number of all catalog entries     : `wc -l $FULL_LIST_FILE`"
+echo "Number of unique catalog entries  : `wc -l $UNIQUE_LIST_FILE`"
+echo "Number of good catalog entries    : `wc -l $GOOD_LIST_FILE`"
+echo "Number of bad catalog entries     : `wc -l $BAD_LIST_FILE`"
+echo "Number of deleted catalog entries : `wc -l $DELETED_LIST_FILE`"
diff --git a/sbin/dm_station_upgrade_stage4.sh b/sbin/dm_station_upgrade_stage4.sh
index 9a2955b914394c65e77a8616df577de95da3c9ae..fb0f7c542400664228e89b278abce11895285551 100755
--- a/sbin/dm_station_upgrade_stage4.sh
+++ b/sbin/dm_station_upgrade_stage4.sh
@@ -38,7 +38,7 @@ if [ -f $DM_UPLOAD_LIST_FILE ]; then
     while IFS='' read -r line || [[ -n "$line" ]]; do
         uploadArgs=`echo $line | sed 's?experimentName?--experiment?' | sed 's?dataDirectory?--data-directory?'`
         echo "Restarting upload: $uploadArgs"
-        dm-upload $uploadArgs || exit 1
+        dm-upload $uploadArgs --reprocess || exit 1
     done < "$DM_UPLOAD_LIST_FILE"
 else
     echo "There is no uploads file: $DM_UPLOAD_LIST_FILE"
diff --git a/sbin/dm_unconfigure_web_portal.sh b/sbin/dm_unconfigure_web_portal.sh
index de8836c9580d5060eba82d283a79bb5e02ab7f80..99d3ba7e4d98d9dea2ea8e4396fd70bd90339ac5 100755
--- a/sbin/dm_unconfigure_web_portal.sh
+++ b/sbin/dm_unconfigure_web_portal.sh
@@ -39,17 +39,17 @@ fi
 
 DM_HOST_ARCH=`uname | tr [A-Z] [a-z]`-`uname -m`
 JAVA_HOME=$DM_OPT_DIR/java/$DM_HOST_ARCH
-GLASSFISH_DIR=$DM_OPT_DIR/glassfish
+PAYARA_DIR=$DM_OPT_DIR/payara
 
 export AS_JAVA=$JAVA_HOME
-ASADMIN_CMD=$GLASSFISH_DIR/bin/asadmin 
+ASADMIN_CMD=$PAYARA_DIR/bin/asadmin 
 
 DM_DB_POOL=postgresql_${DM_DB_NAME}_DbPool
 DM_DATA_SOURCE=${DM_DB_NAME}_DataSource
 DM_DOMAIN=domain1
 
 # restart server
-echo "Restarting glassfish"
+echo "Restarting payara"
 $ASADMIN_CMD stop-domain ${DM_DOMAIN}
 $ASADMIN_CMD start-domain ${DM_DOMAIN}
 
diff --git a/sbin/dm_undeploy_web_portal.sh b/sbin/dm_undeploy_web_portal.sh
index cf7f9069f17a3e5f8f023cad5569ee43bf3480b3..1efdd8458b0dd835b4b31b30ec79b77868c5980c 100755
--- a/sbin/dm_undeploy_web_portal.sh
+++ b/sbin/dm_undeploy_web_portal.sh
@@ -40,14 +40,14 @@ fi
 DM_HOST_ARCH=`uname | tr [A-Z] [a-z]`-`uname -m`
 DM_CONTEXT_ROOT=${DM_CONTEXT_ROOT:=dm}
 JAVA_HOME=$DM_OPT_DIR/java/$DM_HOST_ARCH
-GLASSFISH_DIR=$DM_OPT_DIR/glassfish
-DM_DEPLOY_DIR=$GLASSFISH_DIR/glassfish/domains/domain1/autodeploy
-DM_APP_DIR=$GLASSFISH_DIR/glassfish/domains/domain1/applications/$DM_CONTEXT_ROOT
+PAYARA_DIR=$DM_OPT_DIR/payara
+DM_DEPLOY_DIR=$PAYARA_DIR/glassfish/domains/domain1/autodeploy
+DM_APP_DIR=$PAYARA_DIR/glassfish/domains/domain1/applications/$DM_CONTEXT_ROOT
 DM_DIST_DIR=$DM_ROOT_DIR/src/java/DmWebPortal/dist
 DM_WAR_FILE=$DM_CONTEXT_ROOT.war
 
 export AS_JAVA=$JAVA_HOME
-ASADMIN_CMD=$GLASSFISH_DIR/bin/asadmin
+ASADMIN_CMD=$PAYARA_DIR/bin/asadmin
 
 # remove war file from autodeploy directory
 echo "Removing war file $DM_DEPLOY_DIR/$DM_WAR_FILE"
@@ -62,7 +62,7 @@ else
 fi
 
 # restart server
-echo "Restarting glassfish"
+echo "Restarting payara"
 $ASADMIN_CMD stop-domain ${DM_DOMAIN}
 $ASADMIN_CMD start-domain ${DM_DOMAIN}
 
diff --git a/setup.csh b/setup.csh
index 31559cec269b91e522fe28f18d68bb35016ce2ac..b8e5abd586fd96d392329b038bfb79e2729e575c 100644
--- a/setup.csh
+++ b/setup.csh
@@ -44,7 +44,7 @@ if ( ! -d $DM_SUPPORT_DIR ) then
     exit 1
 endif
 setenv DM_OPT_DIR $DM_SUPPORT_DIR/opt
-setenv DM_GLASSFISH_DIR $DM_OPT_DIR/glassfish
+setenv DM_GLASSFISH_DIR $DM_OPT_DIR/payara
 
 # Setup path and other variables
 setenv PATH ${DM_OPT_DIR}/node/bin:${PATH}
diff --git a/setup.sh b/setup.sh
index 99755495e427e48fcf7f81b3ca9ff031a1e1250a..6b09cf9ea215e4ae3ccac29f6dae702fabefb134 100644
--- a/setup.sh
+++ b/setup.sh
@@ -42,7 +42,7 @@ if [ ! -d $DM_SUPPORT_DIR ]; then
     return 1
 fi
 export DM_OPT_DIR=$DM_SUPPORT_DIR/opt
-export DM_GLASSFISH_DIR=$DM_OPT_DIR/glassfish
+export DM_GLASSFISH_DIR=$DM_OPT_DIR/payara
 
 
 # Add to path only if directory exists.
diff --git a/src/java/DmBase/nbproject/project.properties b/src/java/DmBase/nbproject/project.properties
index 253a8928becc64ab7e42fb0d420a4d69db8e08a6..123982548b0c5ad6603a0a8230d3e1cac91d99f9 100644
--- a/src/java/DmBase/nbproject/project.properties
+++ b/src/java/DmBase/nbproject/project.properties
@@ -32,12 +32,14 @@ excludes=
 file.reference.gson-2.3.1.jar=../lib/gson-2.3.1.jar
 file.reference.log4j-1.2.17.jar=../lib/log4j-1.2.17.jar
 file.reference.primefaces-6.0.jar=../lib/primefaces-6.0.jar
+file.reference.jaxb-osgi.jar=/local/DataManagement/support/opt/payara/glassfish/modules/jaxb-osgi.jar
 includes=**
 jar.compress=false
 javac.classpath=\
     ${file.reference.log4j-1.2.17.jar}:\
     ${file.reference.gson-2.3.1.jar}:\
-    ${file.reference.primefaces-6.0.jar}
+    ${file.reference.primefaces-6.0.jar}:\
+    ${file.reference.jaxb-osgi.jar}
 # Space-separated list of extra javac options
 javac.compilerargs=
 javac.deprecation=false
diff --git a/src/java/DmWebPortal/nbproject/project.properties b/src/java/DmWebPortal/nbproject/project.properties
index 8782235871851de23a029e7374f6baaee0636edb..4b807418c429ea163533a0e5ea9a4a730bc28735 100644
--- a/src/java/DmWebPortal/nbproject/project.properties
+++ b/src/java/DmWebPortal/nbproject/project.properties
@@ -43,7 +43,7 @@ j2ee.compile.on.save=true
 j2ee.copy.static.files.on.save=true
 j2ee.deploy.on.save=true
 j2ee.platform=1.7-web
-j2ee.platform.classpath=${j2ee.server.home}/modules/endorsed/javax.annotation-api.jar:${j2ee.server.home}/modules/endorsed/webservices-api-osgi.jar:${j2ee.server.home}/modules/endorsed/jaxb-api.jar:${j2ee.server.home}/modules/javax.enterprise.concurrent-api.jar:${j2ee.server.home}/modules/javax.security.auth.message-api.jar:${j2ee.server.home}/modules/javax.enterprise.concurrent.jar:${j2ee.server.home}/modules/javax.servlet.jsp.jstl.jar:${j2ee.server.home}/modules/javax.enterprise.deploy-api.jar:${j2ee.server.home}/modules/javax.xml.registry-api.jar:${j2ee.server.home}/modules/javax.servlet.jsp.jstl-api.jar:${j2ee.server.home}/modules/javax.transaction-api.jar:${j2ee.server.home}/modules/javax.management.j2ee-api.jar:${j2ee.server.home}/modules/javax.interceptor-api.jar:${j2ee.server.home}/modules/javax.servlet.jsp-api.jar:${j2ee.server.home}/modules/javax.security.jacc-api.jar:${j2ee.server.home}/modules/javax.websocket-api.jar:${j2ee.server.home}/modules/weld-osgi-bundle.jar:${j2ee.server.home}/modules/webservices-osgi.jar:${j2ee.server.home}/modules/javax.resource-api.jar:${j2ee.server.home}/modules/javax.batch-api.jar:${j2ee.server.home}/modules/javax.ws.rs-api.jar:${j2ee.server.home}/modules/bean-validator.jar:${j2ee.server.home}/modules/javax.xml.rpc-api.jar:${j2ee.server.home}/modules/javax.servlet-api.jar:${j2ee.server.home}/modules/javax.persistence.jar:${j2ee.server.home}/modules/javax.servlet.jsp.jar:${j2ee.server.home}/modules/javax.jms-api.jar:${j2ee.server.home}/modules/javax.ejb-api.jar:${j2ee.server.home}/modules/javax.inject.jar:${j2ee.server.home}/modules/javax.faces.jar:${j2ee.server.home}/modules/javax.mail.jar:${j2ee.server.home}/modules/javax.json.jar:${j2ee.server.home}/modules/jaxb-osgi.jar:${j2ee.server.home}/modules/javax.el.jar:${j2ee.server.home}/modules/cdi-api.jar:${j2ee.server.middleware}/mq/lib/jaxm-api.jar
+j2ee.platform.classpath=${j2ee.server.home}/modules/endorsed/javax.annotation-api.jar:${j2ee.server.home}/modules/endorsed/webservices-api-osgi.jar:${j2ee.server.home}/modules/endorsed/jaxb-api.jar:${j2ee.server.home}/modules/javax.enterprise.concurrent-api.jar:${j2ee.server.home}/modules/javax.security.auth.message-api.jar:${j2ee.server.home}/modules/javax.enterprise.concurrent.jar:${j2ee.server.home}/modules/javax.enterprise.deploy-api.jar:${j2ee.server.home}/modules/javax.servlet.jsp.jstl-api.jar:${j2ee.server.home}/modules/javax.management.j2ee-api.jar:${j2ee.server.home}/modules/javax.security.jacc-api.jar:${j2ee.server.home}/modules/javax.servlet.jsp.jstl.jar:${j2ee.server.home}/modules/javax.xml.registry-api.jar:${j2ee.server.home}/modules/javax.transaction-api.jar:${j2ee.server.home}/modules/javax.interceptor-api.jar:${j2ee.server.home}/modules/javax.servlet.jsp-api.jar:${j2ee.server.home}/modules/javax.websocket-api.jar:${j2ee.server.home}/modules/javax.resource-api.jar:${j2ee.server.home}/modules/javax.xml.rpc-api.jar:${j2ee.server.home}/modules/javax.servlet-api.jar:${j2ee.server.home}/modules/javax.persistence.jar:${j2ee.server.home}/modules/javax.servlet.jsp.jar:${j2ee.server.home}/modules/weld-osgi-bundle.jar:${j2ee.server.home}/modules/webservices-osgi.jar:${j2ee.server.home}/modules/javax.batch-api.jar:${j2ee.server.home}/modules/javax.ws.rs-api.jar:${j2ee.server.home}/modules/bean-validator.jar:${j2ee.server.home}/modules/javax.jms-api.jar:${j2ee.server.home}/modules/javax.ejb-api.jar:${j2ee.server.home}/modules/javax.inject.jar:${j2ee.server.home}/modules/javax.faces.jar:${j2ee.server.home}/modules/javax.mail.jar:${j2ee.server.home}/modules/javax.json.jar:${j2ee.server.home}/modules/jaxb-osgi.jar:${j2ee.server.home}/modules/javax.el.jar:${j2ee.server.home}/modules/cdi-api.jar:${j2ee.server.home}/modules/jakarta.faces.jar:${j2ee.server.home}/modules/validation-api.jar:${j2ee.server.middleware}/mq/lib/jaxm-api.jar
 j2ee.platform.embeddableejb.classpath=${j2ee.server.home}/lib/embedded/glassfish-embedded-static-shell.jar
 j2ee.platform.wscompile.classpath=${j2ee.server.home}/modules/webservices-osgi.jar
 j2ee.platform.wsgen.classpath=${j2ee.server.home}/modules/webservices-osgi.jar:${j2ee.server.home}/modules/endorsed/webservices-api-osgi.jar:${j2ee.server.home}/modules/jaxb-osgi.jar:${j2ee.server.home}/modules/endorsed/jaxb-api.jar
diff --git a/src/python/dm/common/objects/daqInfo.py b/src/python/dm/common/objects/daqInfo.py
index fe502676db20490d037b2ceec081e84ebce97a9f..6ef9e9d0e6942c70f685e4542522ff0b6e021476 100755
--- a/src/python/dm/common/objects/daqInfo.py
+++ b/src/python/dm/common/objects/daqInfo.py
@@ -114,11 +114,12 @@ class DaqInfo(DmObject):
         self['percentageProcessingErrors'] = '%.2f' % percentageProcessingErrors
 
         if self.get('endTime'):
-            daqStatus = dmProcessingStatus.DM_PROCESSING_STATUS_FINALIZING
             if nCompletedFiles >= nFiles:
                 daqStatus = dmProcessingStatus.DM_PROCESSING_STATUS_DONE
                 if nProcessingErrors:
                     daqStatus = dmProcessingStatus.DM_PROCESSING_STATUS_FAILED
+            else:
+                daqStatus = dmProcessingStatus.DM_PROCESSING_STATUS_FAILED
             lastFileProcessingErrorTime = self.get('lastFileProcessingErrorTime')
             lastFileProcessedTime = self.get('lastFileProcessedTime')
             endTime = lastFileProcessedTime
diff --git a/src/python/dm/common/processing/plugins/mongoDbFileCatalogPlugin.py b/src/python/dm/common/processing/plugins/mongoDbFileCatalogPlugin.py
index 277c797b504a25d6dcbb07c89cc5f61322a2e509..977a1031f84d8bc458ca9117abd4d332ff27c110 100755
--- a/src/python/dm/common/processing/plugins/mongoDbFileCatalogPlugin.py
+++ b/src/python/dm/common/processing/plugins/mongoDbFileCatalogPlugin.py
@@ -63,8 +63,8 @@ class MongoDbFileCatalogPlugin(FileProcessor):
         self.logger.debug('Before releasing Mongo DB API instance semaphore count is %s ' % (self.mongoApiSemaphore.__dict__.get('_Semaphore__value')))
         self.mongoApiSemaphore.release()
 
-    def getInitialCollectionSize(self, fileMongoDbApi, experimentName, uploadOrDaqId):
-        key = '%s-%s' % (experimentName, uploadOrDaqId)
+    def getInitialCollectionSize(self, fileMongoDbApi, experimentName, uploadId):
+        key = '%s-%s' % (experimentName, uploadId)
         self.lock.acquire()
         try:
             nFiles = self.initialCollectionSizeMap.get(key)
@@ -78,35 +78,35 @@ class MongoDbFileCatalogPlugin(FileProcessor):
     def processFile(self, fileInfo):
         experimentFilePath = fileInfo.get('experimentFilePath')
         experimentName = fileInfo.get('experimentName')
-        uploadOrDaqId = fileInfo.get('uploadId') or fileInfo.get('daqId') 
+        uploadId = fileInfo.get('uploadId') 
         self.logger.debug('Processing file "%s" for experiment %s' % (experimentFilePath, experimentName))
 
         fileMetadata = self.metadataCollector.processMetadata(fileInfo, self.statUtility)
         self.logger.debug('File "%s" catalog entry: %s' % (experimentFilePath, str(fileMetadata)))
         fileInfo['metadata'] = fileMetadata
         fileMongoDbApi = self.acquireMongoApi()
-        if self.getInitialCollectionSize(fileMongoDbApi, experimentName, uploadOrDaqId) == 0:
-            self.logger.debug('Initial collection size is zero, adding file %s' % (experimentFilePath))
+        if uploadId and self.getInitialCollectionSize(fileMongoDbApi, experimentName, uploadId) == 0:
+            self.logger.debug('Initial collection size is zero for upload %s, adding file %s' % (uploadId, experimentFilePath))
             fileMongoDbApi.addExperimentFileUnchecked(fileMetadata)
         else:
-            self.logger.debug('Initial collection size is not zero, updating file %s' % (experimentFilePath))
+            self.logger.debug('Updating file %s' % (experimentFilePath))
             fileMongoDbApi.updateOrAddExperimentFile(fileMetadata)
         self.releaseMongoApi(fileMongoDbApi)
 
     def processFile2(self, fileInfo, fileMongoDbApi):
         experimentFilePath = fileInfo.get('experimentFilePath')
         experimentName = fileInfo.get('experimentName')
-        uploadOrDaqId = fileInfo.get('uploadId') or fileInfo.get('daqId') 
+        uploadId = fileInfo.get('uploadId') 
         self.logger.debug('Processing file "%s" for experiment %s' % (experimentFilePath, experimentName))
 
         fileMetadata = self.metadataCollector.processMetadata(fileInfo, self.statUtility)
         self.logger.debug('File "%s" catalog entry: %s' % (experimentFilePath, str(fileMetadata)))
         fileInfo['metadata'] = fileMetadata
-        if self.getInitialCollectionSize(fileMongoDbApi, experimentName, uploadOrDaqId) == 0:
-            self.logger.debug('Initial collection size is zero, adding file %s' % (experimentFilePath))
+        if uploadId and self.getInitialCollectionSize(fileMongoDbApi, experimentName, uploadId) == 0:
+            self.logger.debug('Initial collection size is zero for upload %s, adding file %s' % (uploadId, experimentFilePath))
             fileMongoDbApi.addExperimentFileUnchecked(fileMetadata)
         else:
-            self.logger.debug('Initial collection size is not zero, updating file %s' % (experimentFilePath))
+            self.logger.debug('Updating file %s' % (experimentFilePath))
             fileMongoDbApi.updateOrAddExperimentFile(fileMetadata)
 
     def processFilePath(self, fileMongoDbApi, filePath, filePathDict, experiment, dataDirectory, destDirectory, daqInfo, uploadId, processDirectoryInfo):
@@ -161,4 +161,4 @@ class MongoDbFileCatalogPlugin(FileProcessor):
             fileMongoDbApi = self.acquireMongoApi()
             processingTimer = threading.Timer(self.PROCESSING_TIMER_DELAY_PERIOD, self.processFilePath, args=[fileMongoDbApi, filePath, filePathDict, experiment, dataDirectory, destDirectory, daqInfo, uploadId, processDirectoryInfo])
             processingTimer.start()
-                   
+
diff --git a/src/python/dm/proc_web_service/api/workflowProcApi.py b/src/python/dm/proc_web_service/api/workflowProcApi.py
index 86693717d50888fbb3960c9d7b35a4d772311852..34ca6ff42088e07a5c4b4eef5663ea699db44048 100755
--- a/src/python/dm/proc_web_service/api/workflowProcApi.py
+++ b/src/python/dm/proc_web_service/api/workflowProcApi.py
@@ -24,7 +24,7 @@ class WorkflowProcApi(ProcRestApi):
 
         :param username: DM username (it may be omitted if environment variable DM_LOGIN_FILE points to a file containing the "<username>|<password>" pair)
         :type username: str
-
+        
         :param password: DM password (it may be omitted if environment variable DM_LOGIN_FILE points to a file containing the "<username>|<password>" pair)
         :type password: str
 
@@ -41,7 +41,7 @@ class WorkflowProcApi(ProcRestApi):
         Add workflow to the DM workflow database. Workflows are defined 
         using dictionaries, and they serve as templates for running 
         processing jobs.
-         
+
         Workflow keys:
           - name (required)
           - owner (required)
@@ -52,7 +52,7 @@ class WorkflowProcApi(ProcRestApi):
          
         Stage dictionary keys can be anything; they will get sorted, 
         and stages will get executed in the sorted order
-         
+
         Stage keys:
           - command (required; may use $variable strings that would get 
             their values at runtime, via <key>:<value> arguments)
@@ -64,6 +64,7 @@ class WorkflowProcApi(ProcRestApi):
             subsequent workflow stages)    
           - repeatPeriod, repeatUntil, maxRepeats (optional; must be 
             used together):
+
               - repeatPeriod: 
                   - defines period in seconds after which the stage command 
                     will be repeated 
@@ -78,12 +79,11 @@ class WorkflowProcApi(ProcRestApi):
               - maxRepeats: 
                   - defines maximum number of repeats; if this 
                     number is reached, stage will fail
-             
+
         Reserved keys that cannot be used in a stage definition:
           - workflow: processing job workflow specification
-         
-        Reserved keys that may be used in a stage definition as command input 
-        variables:
+        
+        Reserved keys that may be used in a stage definition as command input variables:
           - id: processing job id 
           - stage: processing job workflow stage 
           - status: processing job status 
@@ -101,59 +101,53 @@ class WorkflowProcApi(ProcRestApi):
           - nFailedFiles: number of failed files
           - nSkippedFiles: number of skipped files
           - nAbortedFiles: number of aborted files
-          - nCompletedFiles: number of completed input files 
-              - sum of processed, failed, skipped and aborted files
+          - nCompletedFiles: number of completed input files (sum of processed,  failed, skipped and aborted files)
           - processedFiles: list of processed files
           - failedFiles: list of failed files
           - skippedFiles: list of skipped files
           - abortedFiles: list of aborted files
         
-        Reserved keys designated for specifying processing job input
-        files that may be used in a stage definition as command input 
-        variables:
-          - filePath: input file path
-              - if filePath is specified as one of the stage command 
-                input variables, the workflow stage will iterate over 
-                all job input files 
+        Reserved keys designated for specifying processing job input files that may be used in a stage definition as command input variables:
+          - filePath: input file path (if filePath is specified as one of the stage command input variables, the workflow stage will iterate over all job input files)
           - filePathList: list of all input file paths
           - filePathPattern: glob pattern for input file paths
-          - fileQueryDict: metadata catalog query dictionary 
-              - not yet implemented (reserved for future use)
+          - fileQueryDict: metadata catalog query dictionary (not yet implemented)
           - dataDir: directory containing data files 
          
         Any keys that are not reserved may be used in a stage
         definition as command input or output variables. Stage output
         variables can be used as input for any of the subsequent stages.
-       
+      
         Example workflow definition:
-        
-        {
-          'name'        : 'example-01'
-          'owner'       : 'anOwner', 
-          'stages'      : { 
-            '01-START'  : { 
-              'command' : '/bin/date +%Y%m%d%H%M%S', 
-              'outputVariableRegexList' : ['(?P<timeStamp>.*)']
-            },
-            '02-MKDIR'  : {
-              'command' : '/bin/mkdir -p /tmp/workflow.$timeStamp'
-            },
-            '03-ECHO'   : {
-              'command' : '/bin/echo "START JOB ID: $id" > /tmp/workflow.$timeStamp/$id.out'
-            },
-            '04-MD5SUM' : {
-              'command' : '/bin/md5sum $filePath | cut -f1 -d" "', 
-              'outputVariableRegexList' : ['(?P<md5Sum>.*)']
-            },
-            '05-ECHO'   : {
-              'command' : 'echo "FILE $filePath MD5 SUM: $md5Sum" >> /tmp/workflow.$timeStamp/$id.out'
-            },
-            '06-DONE'   : {
-              'command' : '/bin/echo "STOP JOB ID: $id" >> /tmp/workflow.$timeStamp/$id.out'
-            },
-          },
-          'description' : 'Workflow Example 01'
-        }
+
+            {
+                'name'        : 'example-01',
+                'owner'       : 'anOwner', 
+                'stages'      : { 
+                    '01-START'  : { 
+                        'command' : '/bin/date +%Y%m%d%H%M%S', 
+                        'outputVariableRegexList' : ['(?P<timeStamp>.*)']
+                    },
+                    '02-MKDIR'  : {
+                        'command' : '/bin/mkdir -p /tmp/workflow.$timeStamp'
+                    },
+                    '03-ECHO'   : {
+                        'command' : '/bin/echo "START JOB ID: $id" > /tmp/workflow.$timeStamp/$id.out'
+                    },
+                    '04-MD5SUM' : {
+                        'command' : '/bin/md5sum $filePath | cut -f1 -d" "', 
+                        'outputVariableRegexList' : ['(?P<md5Sum>.*)']
+                    },
+                    '05-ECHO'   : {
+                        'command' : 'echo "FILE $filePath MD5 SUM: $md5Sum" >> /tmp/workflow.$timeStamp/$id.out'
+                    },
+                    '06-DONE'   : {
+                        'command' : '/bin/echo "STOP JOB ID: $id" >> /tmp/workflow.$timeStamp/$id.out'
+                    },
+                },
+                'description' : 'Workflow Example 01'
+            }
+
 
         :param workflow: Workflow description
         :type workflow: Workflow or dict
@@ -430,15 +424,13 @@ class WorkflowProcApi(ProcRestApi):
           - nFailedFiles: number of failed files
           - nSkippedFiles: number of skipped files
           - nAbortedFiles: number of aborted files
-          - nCompletedFiles: number of completed input files 
-              - sum of processed, failed, skipped and aborted files
+          - nCompletedFiles: number of completed input files (sum of processed, failed, skipped and aborted files)
           - processedFiles: list of processed files
           - failedFiles: list of failed files
           - skippedFiles: list of skipped files
           - abortedFiles: list of aborted files
         
-        Reserved keys designated for specifying processing job input
-        files that may be passed as job input at runtime:
+        Reserved keys designated for specifying processing job input files that may be passed as job input at runtime:
           - filePath: input file path
               - if filePath:<aPath> is specified as part of job input, the 
                 job input file list will consist of a single file