diff --git a/doc/RELEASE_NOTES.txt b/doc/RELEASE_NOTES.txt
index 20409ea9a9b5bc9c31a3a62540c86e0972c80cf5..3a063d0875aaea2a6aeb15654bbec382de7c001d 100644
--- a/doc/RELEASE_NOTES.txt
+++ b/doc/RELEASE_NOTES.txt
@@ -1,8 +1,9 @@
 Release 0.8 (01/26/2016)
 =============================
+
 - Enhanced upload/daq performance and functionality (hidden files are not
-  processed, for uploads system can detect files that had been processed 
-  already)
+  processed; for uploads system can detect files that had been processed 
+  already; added handling and reporting for processing errors)
 - Source file checksum is calculated for rsync/gridftp plugins by default
 
 Release 0.7 (12/08/2015)
diff --git a/etc/dm.sudo-rules.template b/etc/dm.sudo-rules.template
index afc042be96a25874f2808d914800812065a58d29..063a04dd7b8d5fe6e23075a2152221951e08036c 100644
--- a/etc/dm.sudo-rules.template
+++ b/etc/dm.sudo-rules.template
@@ -7,7 +7,8 @@ Cmnd_Alias USERMOD=/usr/sbin/usermod -a -G * *
 Cmnd_Alias GROUPADD=/usr/sbin/groupadd *
 Cmnd_Alias CHOWN=/bin/chown -R \:* *
 Cmnd_Alias GPASSWD=/usr/bin/gpasswd * * *
+Cmnd_Alias NSCD=/usr/sbin/nscd -i *
 
-USER HOST = (root) NOPASSWD: SETFACL,USERMOD,GROUPADD,CHOWN,GPASSWD
+USER HOST = (root) NOPASSWD: SETFACL,USERMOD,GROUPADD,CHOWN,GPASSWD,NSCD
 
 
diff --git a/src/python/dm/common/objects/daqInfo.py b/src/python/dm/common/objects/daqInfo.py
index c5d32214eddbb746d5fad1f81e1d35a09b7265b6..0e3e2a9e77cf4dfa1fae0d7592f5532aa4f0f3ee 100755
--- a/src/python/dm/common/objects/daqInfo.py
+++ b/src/python/dm/common/objects/daqInfo.py
@@ -6,7 +6,8 @@ from dm.common.utility.dictUtility import DictUtility
 
 class DaqInfo(DmObject):
 
-    DEFAULT_KEY_LIST = [ 'id', 'experimentName', 'dataDirectory', 'status', 'nProcessedFiles', 'nFiles', 'startTimestamp', 'endTimestamp' ]
+    DEFAULT_KEY_LIST = [ 'id', 'experimentName', 'dataDirectory', 'status', 'nProcessedFiles', 'nProcessingErrors', 'nFiles', 'startTime', 'endTime', 'runTime', 'startTimestamp', 'endTimestamp' ]
+    
 
     def __init__(self, dict={}):
         DmObject.__init__(self, dict)
@@ -20,15 +21,37 @@ class DaqInfo(DmObject):
         fileDict = self.get('fileDict')
         nFiles = len(fileDict)
         nProcessedFiles = 0
+        nProcessingErrors = 0
+        processingErrors = {}
         for (filePath,uploadFileInfo) in fileDict.items():
             if uploadFileInfo.get('processed'):
                  nProcessedFiles += 1                
+            elif uploadFileInfo.get('processingError'):
+                nProcessingErrors += 1
+                processingErrors[filePath] = uploadFileInfo.get('processingError')
+        if len(processingErrors):
+            self['processingErrors'] = processingErrors
+
+        # need to handle 'failed' uploads
+        nCompletedFiles = nProcessedFiles+nProcessingErrors
         self['nProcessedFiles'] = '%s' % (nProcessedFiles)
+        self['nProcessingErrors'] = '%s' % (nProcessingErrors)
         self['nFiles'] = '%s' % (nFiles)
 
-        # need to handle 'failed' uploads
+        percentageComplete = 100.0
+        percentageProcessed = 100.0
+        percentageProcessingErrors = 0.0
+        if nFiles > 0:
+             percentageComplete = float(nCompletedFiles)/float(nFiles)*100.0
+             percentageProcessed = float(nProcessedFiles)/float(nFiles)*100.0
+             percentageProcessingErrors = float(nProcessingErrors)/float(nFiles)*100.0
+        self['percentageComplete'] = '%.2f' % percentageComplete
+        self['percentageProcessed'] = '%.2f' % percentageProcessed
+        self['percentageProcessingErrors'] = '%.2f' % percentageProcessingErrors
+
         if self.get('endTime'):
             daqStatus = 'done'
+            self['runTime'] = self.get('endTime') - self.get('startTime')
         self['status'] = daqStatus
 
     def toDictWithOriginalKeys(self):
@@ -38,14 +61,16 @@ class DaqInfo(DmObject):
                 del dict[key]
         return dict
 
-    def scrub(self):
+    def scrub(self, includeFileDetails=False):
         # Remove redundant information
-        daqInfo2 = DictUtility.deepCopy(self.data, excludeKeys='fileDict')
+        daqInfo2 = DictUtility.deepCopy(self.data, excludeKeys=['fileDict'])
+        if not includeFileDetails:
+            return DaqInfo(daqInfo2)
         fileDict = self.get('fileDict', {})
         fileDict2 = {}
         for (filePath,fileInfo) in fileDict.items():
             fileInfo2 = {}
-            for key in ['processed', 'lastUpdateTime']:
+            for key in ['processed', 'lastUpdateTime', 'processingError']:
                 if fileInfo.has_key(key):
                     fileInfo2[key] = fileInfo[key]
             fileDict2[filePath] = fileInfo2
diff --git a/src/python/dm/common/objects/uploadInfo.py b/src/python/dm/common/objects/uploadInfo.py
index d992e3c7513687c13324162fc3f74f8b2cd3332e..06469ab73042a1313063b3a519a19dfce8fac4e1 100755
--- a/src/python/dm/common/objects/uploadInfo.py
+++ b/src/python/dm/common/objects/uploadInfo.py
@@ -1,11 +1,13 @@
 #!/usr/bin/env python
 
+import time
 from dmObject import DmObject
 from dm.common.utility.dictUtility import DictUtility
+from dm.common.utility.timeUtility import TimeUtility
 
 class UploadInfo(DmObject):
 
-    DEFAULT_KEY_LIST = [ 'id', 'experimentName', 'dataDirectory', 'status', 'nProcessedFiles', 'nFiles', 'percentageComplete', 'startTimestamp', 'endTimestamp' ]
+    DEFAULT_KEY_LIST = [ 'id', 'experimentName', 'dataDirectory', 'status', 'nProcessedFiles', 'nProcessingErrors', 'nFiles', 'percentageComplete', 'startTime', 'endTime', 'runTime', 'startTimestamp', 'endTimestamp' ]
 
     def __init__(self, dict={}):
         DmObject.__init__(self, dict)
@@ -18,30 +20,60 @@ class UploadInfo(DmObject):
         fileDict = self.get('fileDict')
         nFiles = len(fileDict)
         nProcessedFiles = 0
+        nProcessingErrors = 0
+        processingErrors = {}
+        endTime = 0
         for (filePath,uploadFileInfo) in fileDict.items():
             if uploadFileInfo.get('processed'):
                 nProcessedFiles += 1
+            elif uploadFileInfo.get('processingError'):
+                nProcessingErrors += 1
+                processingErrors[filePath] = uploadFileInfo.get('processingError')
+
+            endProcessingTime = uploadFileInfo.get('endProcessingTime')
+            if endProcessingTime is not None and endProcessingTime > endTime:
+                endTime = endProcessingTime
+        if len(processingErrors):
+            self['processingErrors'] = processingErrors
 
         # need to handle 'failed' uploads
-        if nProcessedFiles == nFiles:
+        nCompletedFiles = nProcessedFiles+nProcessingErrors
+        if nCompletedFiles == nFiles:
             uploadStatus = 'done'
+            if not endTime:
+                endTime = time.time()
+            self['endTime'] = endTime
+            self['endTimestamp'] = TimeUtility.formatLocalTimeStamp(endTime)
+            startTime = self.get('startTime')
+            if startTime:
+                runTime = endTime - startTime
+                self['runTime'] = runTime
         self['status'] = uploadStatus
         self['nProcessedFiles'] = '%s' % (nProcessedFiles)
+        self['nProcessingErrors'] = '%s' % (nProcessingErrors)
         self['nFiles'] = '%s' % (nFiles)
 
         percentageComplete = 100.0
+        percentageProcessed = 100.0
+        percentageProcessingErrors = 0.0
         if nFiles > 0:
-             percentageComplete = float(nProcessedFiles)/float(nFiles)*100.0
+             percentageComplete = float(nCompletedFiles)/float(nFiles)*100.0
+             percentageProcessed = float(nProcessedFiles)/float(nFiles)*100.0
+             percentageProcessingErrors = float(nProcessingErrors)/float(nFiles)*100.0
         self['percentageComplete'] = '%.2f' % percentageComplete
+        self['percentageProcessed'] = '%.2f' % percentageProcessed
+        self['percentageProcessingErrors'] = '%.2f' % percentageProcessingErrors
 
-    def scrub(self):
+    def scrub(self, includeFileDetails=False):
         # Remove redundant information
-        uploadInfo2 = DictUtility.deepCopy(self.data, excludeKeys='fileDict')
+        uploadInfo2 = DictUtility.deepCopy(self.data, excludeKeys=['fileDict'])
+        if not includeFileDetails:
+            return UploadInfo(uploadInfo2)
         fileDict = self.get('fileDict', {})
         fileDict2 = {}
         for (filePath,fileInfo) in fileDict.items():
             fileInfo2 = {}
-            for key in ['processed', 'lastUpdateTime']:
+            for key in ['processed', 'lastUpdateTime', 'processingError']:
                 if fileInfo.has_key(key):
                     fileInfo2[key] = fileInfo[key]
             fileDict2[filePath] = fileInfo2
diff --git a/src/python/dm/common/processing/fileProcessingManager.py b/src/python/dm/common/processing/fileProcessingManager.py
index 74dc397d26385f116f213501d83b6bccb8e16469..8a5ea3db991ec5d050d90535ff3323530677b28a 100755
--- a/src/python/dm/common/processing/fileProcessingManager.py
+++ b/src/python/dm/common/processing/fileProcessingManager.py
@@ -84,8 +84,8 @@ class FileProcessingManager(threading.Thread,Singleton):
     # Each plugin calculates list of files that need to be processed
     # Final result is union of all plugins
     def checkUploadFilesForProcessing(self, filePathsDict, uploadInfo):
-        if ValueUtility.toBoolean(uploadInfo.get('processAllFiles')):
-            del uploadInfo['processAllFiles']
+        if ValueUtility.toBoolean(uploadInfo.get('reprocessFiles')):
+            del uploadInfo['reprocessFiles']
             return filePathsDict
         checkedFilePathsDict = {}
         for processorKey in self.fileProcessorKeyList:
diff --git a/src/python/dm/common/processing/fileProcessingThread.py b/src/python/dm/common/processing/fileProcessingThread.py
index 8a85fe6eef00c93cda2557179ae71ad1dd9a1974..e546cd9ebefbc7dae732ad4569dd2ccd2dce37a9 100755
--- a/src/python/dm/common/processing/fileProcessingThread.py
+++ b/src/python/dm/common/processing/fileProcessingThread.py
@@ -1,6 +1,7 @@
 #!/usr/bin/env python
 
 import threading
+import time
 
 from dm.common.utility.loggingManager import LoggingManager
 
@@ -36,7 +37,11 @@ class FileProcessingThread(threading.Thread):
                 filePath = fileInfo.get('filePath')
                 
                 try:
+                    fileInfo['startProcessingTime'] = time.time() 
+                    processorNumber = 0
+                    nProcessors = len(self.fileProcessorKeyList)
                     for processorKey in self.fileProcessorKeyList: 
+                        processorNumber += 1
                         processor = self.fileProcessorDict.get(processorKey)
                         processorName = processor.__class__.__name__
                         fileProcessedByDict = fileInfo.get('processedByDict', {})
@@ -51,9 +56,13 @@ class FileProcessingThread(threading.Thread):
                             processor.processFile(fileInfo)
                             fileProcessedByDict[processorName] = True
                             self.logger.debug('%s processed file at path %s ' % (processorName, filePath))
+                            if processorNumber == nProcessors:
+                                self.logger.debug('File %s processing is complete' % (filePath))
+                                fileInfo['endProcessingTime'] = time.time() 
                         except Exception, ex:
                             self.logger.exception(ex)
-                            self.logger.debug('%s processing failed for file at path %s ' % (processorName, filePath))
+                            errorMsg = '%s processing error: %s' % (processorName, str(ex))
+                            self.logger.debug(errorMsg)
                             fileProcessingDict = fileInfo.get('processingDict', {})
                             fileInfo['processingDict'] = fileProcessingDict
                             processorDict = fileProcessingDict.get(processorName, {}) 
@@ -66,6 +75,8 @@ class FileProcessingThread(threading.Thread):
                             if nRetriesLeft <= 0:
                                 self.logger.debug('No more %s retries left for file %s' % (processorName, fileInfo))
                                 self.unprocessedFileDict[filePath] = fileInfo
+                                fileInfo['processingError'] = errorMsg
+                                fileInfo['endProcessingTime'] = time.time() 
                             else:
                                 retryWaitPeriod = processor.getRetryWaitPeriodInSeconds()
                                 self.logger.debug('%s will retry processing file %s in %s seconds' % (processorName, filePath, retryWaitPeriod))
diff --git a/src/python/dm/common/processing/plugins/gridftpFileTransferPlugin.py b/src/python/dm/common/processing/plugins/gridftpFileTransferPlugin.py
index 53288d86141abf5c1768b28a7a14729f31ee77e4..ca0cf64e5a44cdaa4bdd1d1fe0999544f80724f9 100755
--- a/src/python/dm/common/processing/plugins/gridftpFileTransferPlugin.py
+++ b/src/python/dm/common/processing/plugins/gridftpFileTransferPlugin.py
@@ -1,6 +1,7 @@
 #!/usr/bin/env python
 
 import os
+import copy
 from fileTransferPlugin import FileTransferPlugin
 from dm.common.utility.fileUtility import FileUtility
 from dm.common.utility.ftpUtility import FtpUtility
@@ -48,27 +49,31 @@ class GridftpFileTransferPlugin(FileTransferPlugin):
         (scheme, host, port, replacementDirPath) = FtpUtility.parseFtpUrl(dataDirectory)
         ftpUtility = SftpUtility(storageHost)
         storageFilePathsDict = ftpUtility.getFiles(storageDirectory, {}, replacementDirPath)
-        pluginFilePathsDict = {}
-        filePaths = filePathsDict.keys()
-        for filePath in filePaths:
+        pluginFilePathsDict = copy.copy(filePathsDict)
+        # Remove file from plugin dict if we do not need to transfer it
+        for (filePath,storageFilePathDict) in storageFilePathsDict.items():
             filePathDict = filePathsDict.get(filePath)
-            storageFilePathDict = storageFilePathsDict.get(filePath)
-
-            if not storageFilePathDict:
-                # remote directory does not have the file
-                pluginFilePathsDict[filePath] = filePathDict
-            else:
-                fSize = filePathDict.get('fileSize') 
-                sfSize = storageFilePathDict.get('fileSize') 
-                # check size
-                if not fSize or not sfSize or fSize != sfSize:
-                    pluginFilePathsDict[filePath] = filePathDict
-                else:
-                    # sizes are the same, check modify time
-                    mTime = filePathDict.get('fileModificationTime') 
-                    smTime = storageFilePathDict.get('fileModificationTime') 
-                    if not mTime or not smTime or mTime > smTime:
-                        pluginFilePathsDict[filePath] = filePathDict
+            if filePathDict is None:
+                # We are not attempting to transfer this file
+                # No need to change plugin file dict
+                continue
+
+            # Check size
+            fSize = filePathDict.get('fileSize') 
+            sfSize = storageFilePathDict.get('fileSize') 
+            if not fSize or not sfSize or fSize != sfSize:
+                # Sizes differ, need to transfer file
+                continue
+
+            # Sizes are the same, check modify time
+            mTime = filePathDict.get('fileModificationTime') 
+            smTime = storageFilePathDict.get('fileModificationTime') 
+            if not mTime or not smTime or mTime > smTime:
+                # Source time is later than storage time, need to transfer file
+                continue
+
+            # No need to transfer file
+            del pluginFilePathsDict[filePath]
 
         self.logger.debug('Number of original files: %s, number of plugin files: %s', len(filePathsDict), len(pluginFilePathsDict))
         return pluginFilePathsDict
diff --git a/src/python/dm/common/utility/ftpUtility.py b/src/python/dm/common/utility/ftpUtility.py
index 42efebd8f4d1dcb1aea4f0e76b4ab3e758e42a88..312faf28256beeb285b14309806a3dfaa1ea6119 100755
--- a/src/python/dm/common/utility/ftpUtility.py
+++ b/src/python/dm/common/utility/ftpUtility.py
@@ -143,3 +143,6 @@ if __name__ == '__main__':
     print files
     print ftpUtility.getMd5Sum('/export/8-id-i/test/testfile01')
     print ftpUtility.statFile('/export/8-id-i/test/testfile01')
+    #ftpUtility = FtpUtility('xstor-devel', 22)
+    #files = ftpUtility.getFiles('/data/testing')
+    #print files
diff --git a/src/python/dm/common/utility/ldapLinuxPlatformUtility.py b/src/python/dm/common/utility/ldapLinuxPlatformUtility.py
index ec88542b99c68a304e0ecb8047f17b199523d402..4ab55aa5b825a8f638e8d24692bd0a7f824bc06f 100755
--- a/src/python/dm/common/utility/ldapLinuxPlatformUtility.py
+++ b/src/python/dm/common/utility/ldapLinuxPlatformUtility.py
@@ -20,6 +20,7 @@ class LdapLinuxPlatformUtility:
     SETFACL_CMD = '/usr/bin/setfacl'
     CHOWN_CMD = '/bin/chown'
     GPASSWD_CMD = '/usr/bin/gpasswd'
+    NSCD_CMD = '/usr/sbin/nscd'
 
     def __init__(self, serverUrl, adminDn, adminPasswordFile, groupDnFormat, minGidNumber=None):
         self.serverUrl = serverUrl
@@ -166,6 +167,9 @@ class LdapLinuxPlatformUtility:
             logger.error('Could not add user %s to group %s: %s' % (username, groupName, ex))
             raise InternalError(exception=ex)
 
+        # Refresh NSCD cache
+        self.refreshNscdGroupCache()
+
     def deleteUserFromGroup(self, username, groupName):
         """ Remove user from group. """
         logger = self.getLogger()
@@ -195,6 +199,10 @@ class LdapLinuxPlatformUtility:
             logger.error('Could not remove user %s from group %s: %s' % (username, groupName, ex))
             raise InternalError(exception=ex)
 
+        # Refresh NSCD cache
+        self.refreshNscdGroupCache()
+
+
     @classmethod
     def createLocalGroup(cls, name):
         """ Create local group if it does not exist. """
@@ -257,6 +265,9 @@ class LdapLinuxPlatformUtility:
             logger.error('Could not set users %s for group %s: %s' % (usernameList, groupName, ex))
             raise InternalError(exception=ex)
 
+        # Refresh NSCD cache
+        self.refreshNscdGroupCache()
+
     @classmethod
     def setPathReadExecutePermissionsForGroup(cls, path, groupName):
         """ Set path permissions for the given group. """
@@ -272,6 +283,17 @@ class LdapLinuxPlatformUtility:
         cmd = '%s \:%s %s' % (cls.CHOWN_CMD, groupName, path)
         cls.executeSudoCommand(cmd)
 
+    @classmethod
+    def refreshNscdGroupCache(cls):
+        logger = cls.getLogger()
+        try:
+            logger.debug('Refreshing NCSD secondary group membership cache')
+            cmd = '%s -i group' % (cls.NSCD_CMD)
+            cls.executeSudoCommand(cmd)
+        except Exception, ex:
+            logger.warn('Failed to refresh NCSD group cache: %s' % (str(ex)))
+
+
 #######################################################################
 # Testing.
 
diff --git a/src/python/dm/daq_web_service/cli/startDaqCli.py b/src/python/dm/daq_web_service/cli/startDaqCli.py
index ea7cf18d615c97c70ea1d41ffd8db6d5dfecd7ab..7570f412c498416c7f6327ee41dac5da0b0ca608 100755
--- a/src/python/dm/daq_web_service/cli/startDaqCli.py
+++ b/src/python/dm/daq_web_service/cli/startDaqCli.py
@@ -9,6 +9,7 @@ class StartDaqCli(DaqWebServiceSessionCli):
         DaqWebServiceSessionCli.__init__(self, validArgCount=self.ANY_NUMBER_OF_POSITIONAL_ARGS)
         self.addOption('', '--experiment', dest='experimentName', help='Experiment name.')
         self.addOption('', '--data-directory', dest='dataDirectory', help='Experiment data directory. If specified string does not already contain file server URL, value of the %s environment variable will be prepended to it.' % self.DM_FILE_SERVER_URL_ENV_VAR)
+        self.addOption('', '--process-hidden', dest='processHidden', action='store_true', default=False, help='Process hidden source files.')
 
     def checkArgs(self):
         if self.options.experimentName is None:
@@ -16,9 +17,14 @@ class StartDaqCli(DaqWebServiceSessionCli):
         if self.options.dataDirectory is None:
             raise InvalidRequest('Experiment data directory must be provided.')
 
+    def updateDaqInfoFromOptions(self, daqInfo):
+        if self.options.processHidden:
+            daqInfo['processHiddenFiles'] = True
+
     def runCommand(self):
         self.parseArgs(usage="""
     dm-start-daq --experiment=EXPERIMENTNAME --data-directory=DATADIRECTORY
+        [--process-hidden]
         [key1:value1, key2:value2, ...]
 
 Description:
@@ -29,6 +35,7 @@ Description:
         self.checkArgs()
         api = ExperimentRestApi(self.getLoginUsername(), self.getLoginPassword(), self.getServiceHost(), self.getServicePort(), self.getServiceProtocol())
         daqInfo = self.splitArgsIntoDict()
+        self.updateDaqInfoFromOptions(daqInfo)
         daqInfo = api.startDaq(self.getExperimentName(), self.getDataDirectory(), daqInfo=daqInfo)
         print daqInfo.getDisplayString(self.getDisplayKeys(), self.getDisplayFormat())
 
diff --git a/src/python/dm/daq_web_service/cli/uploadCli.py b/src/python/dm/daq_web_service/cli/uploadCli.py
index 80f5c3caa3ef07482eb7bcc8fc62505b32d9edc9..dae08440cc84b062fdacf857b8ce1a7f8158d8d2 100755
--- a/src/python/dm/daq_web_service/cli/uploadCli.py
+++ b/src/python/dm/daq_web_service/cli/uploadCli.py
@@ -9,6 +9,8 @@ class UploadCli(DaqWebServiceSessionCli):
         DaqWebServiceSessionCli.__init__(self, validArgCount=self.ANY_NUMBER_OF_POSITIONAL_ARGS)
         self.addOption('', '--experiment', dest='experimentName', help='Experiment name.')
         self.addOption('', '--data-directory', dest='dataDirectory', help='Experiment data directory. If specified string does not already contain file server URL, value of the %s environment variable will be prepended to it.' % self.DM_FILE_SERVER_URL_ENV_VAR)
+        self.addOption('', '--reprocess', dest='reprocess', action='store_true', default=False, help='Reprocess source files that are already in storage, even if they have not been modified.')
+        self.addOption('', '--process-hidden', dest='processHidden', action='store_true', default=False, help='Process hidden source files.')
 
     def checkArgs(self):
         if self.options.experimentName is None:
@@ -16,9 +18,17 @@ class UploadCli(DaqWebServiceSessionCli):
         if self.options.dataDirectory is None:
             raise InvalidRequest('Experiment data directory must be provided.')
 
+    def updateDaqInfoFromOptions(self, daqInfo):
+        if self.options.reprocess:
+            daqInfo['reprocessFiles'] = True
+        if self.options.processHidden:
+            daqInfo['processHiddenFiles'] = True
+        
     def runCommand(self):
         self.parseArgs(usage="""
     dm-upload --experiment=EXPERIMENTNAME --data-directory=DATADIRECTORY
+        [--reprocess]
+        [--process-hidden]
         [key1:value1, key2:value2, ...]
 
 Description:
@@ -30,6 +40,7 @@ Description:
         self.checkArgs()
         api = ExperimentRestApi(self.getLoginUsername(), self.getLoginPassword(), self.getServiceHost(), self.getServicePort(), self.getServiceProtocol())
         daqInfo = self.splitArgsIntoDict()
+        self.updateDaqInfoFromOptions(daqInfo)
         uploadInfo = api.upload(self.getExperimentName(), self.getDataDirectory(), daqInfo=daqInfo)
         print uploadInfo.getDisplayString(self.getDisplayKeys(), self.getDisplayFormat())
 
diff --git a/src/python/dm/daq_web_service/service/impl/experimentSessionControllerImpl.py b/src/python/dm/daq_web_service/service/impl/experimentSessionControllerImpl.py
index 0d4e050767bd5f377f488ea0cf01bf8b864a5e6f..eca652131d1a86738d6b29fb4b92e9fd466a5341 100755
--- a/src/python/dm/daq_web_service/service/impl/experimentSessionControllerImpl.py
+++ b/src/python/dm/daq_web_service/service/impl/experimentSessionControllerImpl.py
@@ -42,20 +42,21 @@ class ExperimentSessionControllerImpl(DmObjectManager):
         FileSystemObserver.getInstance().startObservingPath(dataDirectory, experiment)
         return daqInfo
 
-    def stopDaq(self, experimentName, dataDirectory):
+    def stopDaq(self, experimentName, dataDirectory, includeFileDetails=False):
         experiment = self.dsExperimentApi.getExperimentByName(experimentName)
         daqInfo = DaqTracker.getInstance().stopDaq(experiment, dataDirectory)
         FileSystemObserver.getInstance().stopObservingPath(dataDirectory, experiment)
-        return daqInfo.scrub()
+        daqInfo.updateStatus()
+        return daqInfo.scrub(includeFileDetails)
 
-    def getDaqInfo(self, id):
+    def getDaqInfo(self, id, includeFileDetails=False):
         daqInfo = DaqTracker.getInstance().getDaqInfo(id)
         if not daqInfo:
             raise ObjectNotFound('Daq id %s not found.' % id)
         daqInfo.updateStatus()
-        return daqInfo.scrub()
+        return daqInfo.scrub(includeFileDetails)
 
-    def upload(self, experimentName, dataDirectory, daqInfo):
+    def upload(self, experimentName, dataDirectory, daqInfo, includeFileDetails=False):
         experiment = self.dsExperimentApi.getExperimentByName(experimentName)
         experiment['daqInfo'] = daqInfo
         storageDirectory = experiment.get('storageDirectory')
@@ -107,13 +108,13 @@ class ExperimentSessionControllerImpl(DmObjectManager):
         uploadInfo['fileDict'] = fileDict
         #self.logger.debug('Upload info %s' % uploadInfo)
         UploadTracker.getInstance().put(uploadId, uploadInfo)
-        return uploadInfo.scrub()
+        return uploadInfo.scrub(includeFileDetails)
 
-    def getUploadInfo(self, id):
+    def getUploadInfo(self, id, includeFileDetails=False):
         uploadInfo = UploadTracker.getInstance().get(id)
         if not uploadInfo:
             raise ObjectNotFound('Upload id %s not found.' % id)
         uploadInfo.updateStatus()
-        return uploadInfo.scrub()
+        return uploadInfo.scrub(includeFileDetails)