Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • DM/dm-docs
  • hammonds/dm-docs
  • hparraga/dm-docs
3 results
Show changes
Showing
with 970 additions and 0 deletions
#!/usr/bin/env python
import threading
import time
from dm.common.utility.loggingManager import LoggingManager
from dm.common.utility.configurationManager import ConfigurationManager
from dm.common.utility.objectUtility import ObjectUtility
from dm.common.utility.timeBasedProcessingQueue import TimeBasedProcessingQueue
from dm.common.utility.singleton import Singleton
from fileProcessingThread import FileProcessingThread
class FileProcessingManager(threading.Thread,Singleton):
CONFIG_SECTION_NAME = 'FileProcessingManager'
NUMBER_OF_PROCESSING_THREADS_KEY = 'numberofprocessingthreads'
DEFAULT_NUMBER_OF_RETRIES_KEY = 'defaultnumberofretries'
DEFAULT_RETRY_WAIT_PERIOD_IN_SECONDS_KEY = 'defaultretrywaitperiodinseconds'
FILE_PROCESSOR_KEY = 'fileprocessor'
# Singleton.
__instanceLock = threading.RLock()
__instance = None
def __init__(self):
FileProcessingManager.__instanceLock.acquire()
try:
if FileProcessingManager.__instance:
return
FileProcessingManager.__instance = self
self.logger = LoggingManager.getInstance().getLogger(self.__class__.__name__)
self.logger.debug('Initializing')
self.lock = threading.RLock()
self.fileProcessingThreadDict = {}
self.eventFlag = threading.Event()
self.fileProcessorDict = {}
self.fileProcessorKeyList = []
self.fileProcessingQueue = TimeBasedProcessingQueue()
self.processedFileDict = {}
self.unprocessedFileDict = {}
self.__configure()
self.logger.debug('Initialization complete')
finally:
FileProcessingManager.__instanceLock.release()
def __configure(self):
cm = ConfigurationManager.getInstance()
configItems = cm.getConfigItems(FileProcessingManager.CONFIG_SECTION_NAME)
self.logger.debug('Got config items: %s' % configItems)
self.nProcessingThreads = int(cm.getConfigOption(FileProcessingManager.CONFIG_SECTION_NAME, FileProcessingManager.NUMBER_OF_PROCESSING_THREADS_KEY))
self.defaultNumberOfRetries = int(cm.getConfigOption(FileProcessingManager.CONFIG_SECTION_NAME, FileProcessingManager.DEFAULT_NUMBER_OF_RETRIES_KEY))
self.defaultRetryWaitPeriodInSeconds = int(cm.getConfigOption(FileProcessingManager.CONFIG_SECTION_NAME, FileProcessingManager.DEFAULT_RETRY_WAIT_PERIOD_IN_SECONDS_KEY))
# Create processors
for (key,value) in configItems:
if key.startswith(FileProcessingManager.FILE_PROCESSOR_KEY):
(moduleName,className,constructor) = cm.getModuleClassConstructorTuple(value)
self.logger.debug('Creating file processor instance of class %s' % className)
fileProcessor = ObjectUtility.createObjectInstance(moduleName, className, constructor)
self.logger.debug('Configuring file processor %s' % fileProcessor)
fileProcessor.setNumberOfRetries(self.defaultNumberOfRetries)
fileProcessor.setRetryWaitPeriodInSeconds(self.defaultRetryWaitPeriodInSeconds)
fileProcessor.configure()
self.fileProcessorDict[key] = fileProcessor
self.fileProcessorKeyList = self.fileProcessorDict.keys()
self.fileProcessorKeyList.sort()
self.logger.debug('File processor keys: %s' % self.fileProcessorKeyList)
def processFile(self, fileInfo):
self.fileProcessingQueue.push(fileInfo)
self.eventFlag.set()
def appendFileProcessor(self, fileProcessor):
key = fileProcessor.__class__.__name__
self.logger.debug('Adding file processor: %s' % key)
self.fileProcessorDict[key] = fileProcessor
self.fileProcessorKeyList.append(key)
self.logger.debug('File processor keys: %s' % self.fileProcessorKeyList)
def start(self):
self.lock.acquire()
try:
self.logger.debug('Starting file processing threads')
for i in range(0, self.nProcessingThreads):
tName = 'FileProcessingThread-%s' % i
t = FileProcessingThread(tName, self, self.fileProcessorDict, self.fileProcessorKeyList, self.fileProcessingQueue, self.processedFileDict, self.unprocessedFileDict)
t.start()
self.fileProcessingThreadDict[tName] = t
finally:
self.lock.release()
def stop(self):
self.logger.debug('Stopping file processing threads')
for (tName, t) in self.fileProcessingThreadDict.items():
t.stop()
self.lock.acquire()
try:
self.eventFlag.set()
finally:
self.lock.release()
for (tName, t) in self.fileProcessingThreadDict.items():
t.join()
def setEvent(self):
self.lock.acquire()
try:
self.eventFlag.set()
finally:
self.lock.release()
def clearEvent(self):
self.lock.acquire()
try:
self.eventFlag.clear()
finally:
self.lock.release()
def waitOnEvent(self, timeoutInSeconds=None):
self.eventFlag.wait(timeoutInSeconds)
####################################################################
# Testing
if __name__ == '__main__':
fp = FileProcessingManager.getInstance()
print fp
#fp.start()
#time.sleep(30)
#fp.stop()
#!/usr/bin/env python
import threading
from dm.common.utility.loggingManager import LoggingManager
class FileProcessingThread(threading.Thread):
THREAD_EVENT_TIMEOUT_IN_SECONDS = 10.0
def __init__ (self, name, fileProcessingManager, fileProcessorDict, fileProcessorKeyList, fileProcessingQueue, processedFileDict, unprocessedFileDict):
threading.Thread.__init__(self)
self.setName(name)
self.exitFlag = False
self.fileProcessingManager = fileProcessingManager
self.fileProcessorDict = fileProcessorDict
self.fileProcessorKeyList = fileProcessorKeyList
self.fileProcessingQueue = fileProcessingQueue
self.processedFileDict = processedFileDict
self.unprocessedFileDict = unprocessedFileDict
self.logger = LoggingManager.getInstance().getLogger(name)
def run(self):
self.logger.debug('Starting thread: %s' % self.getName())
while True:
self.fileProcessingManager.clearEvent()
if self.exitFlag:
self.logger.debug('Exit flag is set')
break
while True:
fileInfo = self.fileProcessingQueue.pop()
if fileInfo is None:
break
filePath = fileInfo.get('filePath')
try:
for processorKey in self.fileProcessorKeyList:
processor = self.fileProcessorDict.get(processorKey)
processorName = processor.__class__.__name__
fileProcessedByDict = fileInfo.get('processedByDict', {})
fileInfo['processedByDict'] = fileProcessedByDict
if fileProcessedByDict.has_key(processorName):
self.logger.debug('%s has already been processed by %s ' % (fileInfo, processorName))
continue
self.logger.debug('%s is about to process file %s ' % (processorName, fileInfo))
try:
processor.processFile(fileInfo)
fileProcessedByDict[processorName] = True
self.logger.debug('%s processed file at path %s ' % (processorName, filePath))
except Exception, ex:
self.logger.exception(ex)
self.logger.debug('%s processing failed for file at path %s ' % (processorName, filePath))
fileProcessingDict = fileInfo.get('processingDict', {})
fileInfo['processingDict'] = fileProcessingDict
processorDict = fileProcessingDict.get(processorName, {})
fileProcessingDict[processorName] = processorDict
processorDict['lastError'] = str(ex)
nRetriesLeft = processorDict.get('numberOfRetriesLeft', processor.getNumberOfRetries())
self.logger.debug('Number of %s retries left for file %s: %s' % (processorName, filePath, nRetriesLeft))
processorDict['numberOfRetriesLeft'] = nRetriesLeft - 1
if nRetriesLeft <= 0:
self.logger.debug('No more %s retries left for file %s' % (processorName, fileInfo))
self.unprocessedFileDict[filePath] = fileInfo
else:
retryWaitPeriod = processor.getRetryWaitPeriodInSeconds()
self.logger.debug('%s will retry processing file %s in %s seconds' % (processorName, filePath, retryWaitPeriod))
self.fileProcessingQueue.push(fileInfo, retryWaitPeriod)
# Do not process this file further until
# this plugin is done
break
except Exception, ex:
self.logger.exception(ex)
self.fileProcessingManager.waitOnEvent(self.THREAD_EVENT_TIMEOUT_IN_SECONDS)
self.logger.debug('%s is done' % self.getName())
def stop(self):
self.exitFlag = True
####################################################################
# Testing
if __name__ == '__main__':
pass
#!/usr/bin/env python
import abc
class FileProcessor:
DEFAULT_NUMBER_OF_RETRIES = 0
DEFAULT_RETRY_WAIT_PERIOD_IN_SECONDS = 60
def __init__(self):
self.configDict = {}
@abc.abstractmethod
def processFile(self, fileInfo):
return NotImplemented
def configure(self):
# Use this method for processor configuration
pass
def setConfigKeyValue(self, key, value):
self.configDict[key] = value
def getConfigKeyValue(self, key):
return self.configDict.get(key)
def setNumberOfRetries(self, nRetries):
self.configDict['numberOfRetries'] = nRetries
def getNumberOfRetries(self):
return self.configDict.get('numberOfRetries', self.DEFAULT_NUMBER_OF_RETRIES)
def setRetryWaitPeriodInSeconds(self, waitPeriod):
self.configDict['retryWaitPeriodInSeconds'] = waitPeriod
def getRetryWaitPeriodInSeconds(self):
return self.configDict.get('retryWaitPeriodInSeconds', self.DEFAULT_RETRY_WAIT_PERIOD_IN_SECONDS)
#!/usr/bin/env python
import os
from dm.common.utility.loggingManager import LoggingManager
from dm.common.utility.dmSubprocess import DmSubprocess
from dm.common.exceptions.invalidArgument import InvalidArgument
from dm.common.exceptions.invalidRequest import InvalidRequest
from fileProcessor import FileProcessor
class FileTransferPlugin(FileProcessor):
def __init__(self, command, src=None, dest=None):
FileProcessor.__init__(self)
self.src = src
self.dest = dest
self.logger = LoggingManager.getInstance().getLogger(self.__class__.__name__)
if command is None or not len(command):
raise InvalidArgument('File transfer command must be non-empty string.')
self.command = command
self.subprocess = None
def processFile(self, fileInfo):
filePath = fileInfo.get('filePath')
dataDirectory = fileInfo.get('dataDirectory')
experiment = fileInfo.get('experiment')
storageHost = experiment.get('storageHost')
storageDirectory = experiment.get('storageDirectory')
destUrl = self.getDestUrl(filePath, dataDirectory, storageHost, storageDirectory)
srcUrl = self.getSrcUrl(filePath, dataDirectory)
self.start(srcUrl, destUrl)
def getSrcUrl(self, filePath, dataDirectory):
# Use relative path with respect to data directory as a source
os.chdir(dataDirectory)
srcUrl = os.path.relpath(filePath, dataDirectory)
return srcUrl
def getDestUrl(self, filePath, dataDirectory, storageHost, storageDirectory):
destUrl = '%s:%s' % (storageHost, storageDirectory)
return destUrl
def getFullCommand(self, src, dest):
return '%s %s %s' % (self.command, src, dest)
def setSrc(self, src):
self.src = src
def setDest(self, dest):
self.dest = dest
def start(self, src=None, dest=None):
# Use preconfigured source if provided source is None
fileSrc = src
if src is None:
fileSrc = self.src
# Use provided destination only if preconfigured destination is None
# Plugins may have desired destination preconfigured for all files
fileDest = self.dest
if self.dest is None:
fileDest = dest
if not fileSrc or not fileDest:
raise InvalidRequest('Both source and destination must be non-empty strings.')
self.subprocess = DmSubprocess.getSubprocess(self.getFullCommand(fileSrc, fileDest))
return self.subprocess.run()
def wait(self):
if self.subprocess:
return self.subprocess.wait()
return None
def poll(self):
if self.subprocess:
return self.subprocess.poll()
return None
def getStdOut(self):
if self.subprocess:
return self.subprocess.getStdOut()
return None
def getStdErr(self):
if self.subprocess:
return self.subprocess.getStdErr()
return None
def getExitStatus(self):
if self.subprocess:
return self.subprocess.getExitStatus()
return None
def reset(self):
self.subprocess = None
#######################################################################
# Testing.
if __name__ == '__main__':
ft = FileTransfer('rsync -arv', '/tmp/xyz', '/tmp/xyz2')
ft.start()
print 'StdOut: ', ft.getStdOut()
print 'StdErr: ', ft.getStdErr()
print 'Exit Status: ', ft.getExitStatus()
#!/usr/bin/env python
import os
from fileTransferPlugin import FileTransferPlugin
from dm.common.utility.ftpUtility import FtpUtility
class GridftpFileTransferPlugin(FileTransferPlugin):
DEFAULT_COMMAND = 'globus-url-copy -c -cd -r -vb -tcp-bs 512K -p 1 -sync -sync-level 2'
DEFAULT_PORT = 2811
def __init__(self, src=None, dest=None, command=DEFAULT_COMMAND):
FileTransferPlugin.__init__(self, command, src, dest)
def getSrcUrl(self, filePath, dataDirectory):
(scheme, host, port, dirPath) = FtpUtility.parseFtpUrl(dataDirectory, defaultPort=self.DEFAULT_PORT)
if scheme:
srcUrl = '%s://%s:%s/%s' % (scheme, host, port, filePath)
elif self.src is None:
srcUrl = 'file://%s' % filePath
else:
srcUrl = '%s/%s' % (self.src,filePath)
return srcUrl
def getDestUrl(self, filePath, dataDirectory, storageHost, storageDirectory):
(scheme, host, port, dirPath) = FtpUtility.parseFtpUrl(dataDirectory, defaultPort=self.DEFAULT_PORT)
dirName = os.path.dirname(os.path.relpath(filePath, dirPath)).strip()
fileName = os.path.basename(filePath)
destUrl = 'sshftp://%s/%s/%s/%s' % (storageHost, storageDirectory, dirName, fileName)
return destUrl
#######################################################################
# Testing.
if __name__ == '__main__':
ft = GridftpFileTransferPlugin('/tmp/xyz', '/tmp/xyz2')
ft.start()
print 'StdOut: ', ft.getStdOut()
print 'StdErr: ', ft.getStdErr()
print 'Exit Status: ', ft.getExitStatus()
#!/usr/bin/env python
from fileTransferPlugin import FileTransferPlugin
class RsyncFileTransferPlugin(FileTransferPlugin):
COMMAND = 'rsync -arvlPR'
def __init__(self, src=None, dest=None):
FileTransferPlugin.__init__(self, self.COMMAND, src, dest)
#######################################################################
# Testing.
if __name__ == '__main__':
ft = RsyncFileTransferPlugin('/tmp/xyz', '/tmp/xyz2')
ft.start()
print 'StdOut: ', ft.getStdOut()
print 'StdErr: ', ft.getStdErr()
print 'Exit Status: ', ft.getExitStatus()
#!/usr/bin/env python
import os
from dm.common.utility.loggingManager import LoggingManager
from dm.common.utility.osUtility import OsUtility
from dm.common.utility.fileUtility import FileUtility
from dm.common.exceptions.fileProcessingError import FileProcessingError
from dm.common.processing.plugins.fileTransferPlugin import FileTransferPlugin
from dm.ds_web_service.api.dsRestApiFactory import DsRestApiFactory
class RsyncWithChecksumAndDeleteFileTransferPlugin(FileTransferPlugin):
COMMAND = 'rsync -arvlPR'
def __init__(self, src=None, dest=None):
FileTransferPlugin.__init__(self, self.COMMAND, src, dest)
self.dsFileApi = DsRestApiFactory.getFileRestApi()
self.logger = LoggingManager.getInstance().getLogger(self.__class__.__name__)
def processFile(self, fileInfo):
filePath = fileInfo.get('filePath')
dataDirectory = fileInfo.get('dataDirectory')
experiment = fileInfo.get('experiment')
experimentName = experiment.get('name')
experimentFilePath = fileInfo.get('experimentFilePath')
storageHost = experiment.get('storageHost')
storageDirectory = experiment.get('storageDirectory')
destUrl = self.getDestUrl(filePath, dataDirectory, storageHost, storageDirectory)
# Use relative path with respect to data directory as a source
os.chdir(dataDirectory)
srcUrl = self.getSrcUrl(filePath, dataDirectory)
# Calculate checksum
FileUtility.statFile(filePath, fileInfo)
FileUtility.getMd5Sum(filePath, fileInfo)
self.logger.debug('File info before transfer: %s' % fileInfo)
# Transfer file
self.start(srcUrl, destUrl)
# Get remote checksum
fileInfo2 = {}
fileInfo2['experimentFilePath'] = experimentFilePath
fileInfo2['experimentName'] = experimentName
fileMetadata = self.dsFileApi.statFile(experimentFilePath, experimentName, fileInfo2)
self.logger.debug('File stat after transfer: %s' % fileMetadata)
# Verify checksum
if fileMetadata.get('md5Sum') != fileInfo.get('md5Sum'):
self.logger.error('Checksum mismatch for file: %s' % filePath)
raise FileProcessingError('Checksum mismatch detected for file: %s' % filePath)
# Remove file
self.logger.debug('Checksum test passed, deleting file %s' % filePath)
OsUtility.removeFile(srcUrl)
#######################################################################
# Testing.
if __name__ == '__main__':
ft = RsyncWithChecksumAndDeleteFileTransferPlugin('/tmp/xyz', '/tmp/xyz2')
ft.start()
print 'StdOut: ', ft.getStdOut()
print 'StdErr: ', ft.getStdErr()
print 'Exit Status: ', ft.getExitStatus()
#!/usr/bin/env python
from dm.common.utility.loggingManager import LoggingManager
class AuthorizationPrincipalAuthenticator:
def __init__(self, name=None):
self.name = name
self.logger = LoggingManager.getInstance().getLogger(self.__class__.__name__)
def getName(self):
return self.name
def authenticatePrincipal(self, principal, password):
return None
#######################################################################
# Testing.
if __name__ == '__main__':
pass
#!/usr/bin/env python
import os
from dm.common.exceptions.authorizationError import AuthorizationError
from dm.common.objects.dmObjectManager import DmObjectManager
from dm.common.utility.configurationManager import ConfigurationManager
from dm.common.utility.objectCache import ObjectCache
from dm.common.utility.cryptUtility import CryptUtility
from dm.common.utility.objectUtility import ObjectUtility
from dm.common.utility.singleton import Singleton
class AuthorizationPrincipalManager(DmObjectManager, Singleton):
DEFAULT_CACHE_SIZE = 10000 # number of items
DEFAULT_CACHE_OBJECT_LIFETIME = 3600 # seconds
CONFIG_SECTION_NAME = 'AuthorizationPrincipalManager'
ADMIN_ROLE_NAME_KEY = 'adminrolename'
PRINCIPAL_RETRIEVER_KEY = 'principalretriever'
PRINCIPAL_AUTHENTICATOR_KEY = 'principalauthenticator'
# Singleton instance.
__instance = None
def __init__(self):
if AuthorizationPrincipalManager.__instance:
return
AuthorizationPrincipalManager.__instance = self
DmObjectManager.__init__(self)
self.configurationManager = ConfigurationManager.getInstance()
self.principalRetriever = None
self.principalAuthenticatorList = []
self.objectCache = ObjectCache(AuthorizationPrincipalManager.DEFAULT_CACHE_SIZE, AuthorizationPrincipalManager.DEFAULT_CACHE_OBJECT_LIFETIME)
self.configure()
@classmethod
def cryptPassword(cls, cleartext):
return CryptUtility.cryptPassword(cleartext)
@classmethod
def cryptPasswordWithPbkdf2(cls, cleartext):
return CryptUtility.cryptPasswordWithPbkdf2(cleartext)
def configure(self):
configItems = self.configurationManager.getConfigItems(AuthorizationPrincipalManager.CONFIG_SECTION_NAME)
self.logger.debug('Got config items: %s' % configItems)
adminRoleName = self.configurationManager.getConfigOption(AuthorizationPrincipalManager.CONFIG_SECTION_NAME, AuthorizationPrincipalManager.ADMIN_ROLE_NAME_KEY)
# Create principal retriever
principalRetriever = self.configurationManager.getConfigOption(AuthorizationPrincipalManager.CONFIG_SECTION_NAME, AuthorizationPrincipalManager.PRINCIPAL_RETRIEVER_KEY)
(moduleName,className,constructor) = self.configurationManager.getModuleClassConstructorTuple(principalRetriever, AuthorizationPrincipalManager)
self.logger.debug('Creating principal retriever class: %s' % className)
self.principalRetriever = ObjectUtility.createObjectInstance(moduleName, className, constructor)
if adminRoleName is not None:
self.principalRetriever.setAdminRoleName(adminRoleName)
self.logger.debug('Authorization principal retriever: %s' % (self.principalRetriever))
# Create principal authenticators
for (key,value) in configItems:
if key.startswith(AuthorizationPrincipalManager.PRINCIPAL_AUTHENTICATOR_KEY):
(moduleName,className,constructor) = self.configurationManager.getModuleClassConstructorTuple(value, AuthorizationPrincipalManager)
self.logger.debug('Creating principal authenticator class: %s' % className)
principalAuthenticator = ObjectUtility.createObjectInstance(moduleName, className, constructor)
self.addAuthorizationPrincipalAuthenticator(principalAuthenticator)
self.logger.debug('Authorization principal authenticator: %s' % (principalAuthenticator))
def addAuthorizationPrincipalAuthenticator(self, principalAuthenticator):
self.principalAuthenticatorList.append(principalAuthenticator)
def getAuthenticatedAuthorizationPrincipal(self, username, password):
""" Get principal based on a username and password """
# First try cache.
#self.logger.debug('Trying username %s from the cache' % username)
principal = None
principal = self.objectCache.get(username)
if principal is None:
# Try principal retriever
principal = self.principalRetriever.getAuthorizationPrincipal(username)
if principal is None:
self.logger.debug('No principal for username: %s' % username)
return
# Try all authorization principal authenticators.
for principalAuthenticator in self.principalAuthenticatorList:
self.logger.debug('Attempting to authenticate %s by %s' % (username, principalAuthenticator.getName()))
authenticatedPrincipal = principalAuthenticator.authenticatePrincipal(principal, password)
if authenticatedPrincipal is not None:
self.logger.debug('Adding authorization principal %s to the cache, authenticated by %s' % (principal.getName(),principalAuthenticator.getName()))
self.objectCache.put(username, authenticatedPrincipal)
return authenticatedPrincipal
return None
def removeAuthorizationPrincipal(self, username):
""" Clear principal from the cache. """
self.objectCache.remove(username)
#######################################################################
# Testing.
if __name__ == '__main__':
am = AuthorizationPrincipalManager.getInstance()
authPrincipal = am.getAuthorizationPrincipal('sveseli', 'sv')
print 'Auth principal: ', authPrincipal
#!/usr/bin/env python
from dm.common.constants import dmRole
from dm.common.utility.loggingManager import LoggingManager
class AuthorizationPrincipalRetriever:
def __init__(self, name=None):
self.adminRoleName = dmRole.DM_ADMIN_ROLE
self.name = name
self.logger = LoggingManager.getInstance().getLogger(self.__class__.__name__)
def getName(self):
return self.name
def setAdminRoleName(self, adminRoleName):
self.adminRoleName = adminRoleName
def getAuthorizationPrincipal(self, username):
return None
def checkAutorizationPrincipalRole(self, principal):
if principal is None or self.adminRoleName is None:
return
userInfo = principal.getUserInfo()
if userInfo is None:
return
for userSystemRoleName in userInfo.get('userSystemRoleNameList', []):
if userSystemRoleName == self.adminRoleName:
principal.setRole(dmRole.DM_ADMIN_ROLE)
#######################################################################
# Testing.
if __name__ == '__main__':
pass
#!/usr/bin/env python
from dm.common.utility.cryptUtility import CryptUtility
from authorizationPrincipalAuthenticator import AuthorizationPrincipalAuthenticator
class CryptedPasswordPrincipalAuthenticator(AuthorizationPrincipalAuthenticator):
def __init__(self):
AuthorizationPrincipalAuthenticator.__init__(self, self.__class__.__name__)
def authenticatePrincipal(self, principal, password):
if principal is not None:
principalToken = principal.getToken()
if principalToken is not None and len(principalToken):
if CryptUtility.verifyPasswordWithPbkdf2(password, principalToken):
self.logger.debug('Authentication successful for %s' % principal.getName())
return principal
else:
self.logger.debug('Authentication failed for %s' % principal.getName())
else:
self.logger.debug('Token is empty for %s, authentication not performed' % principal.getName())
return None
#######################################################################
# Testing.
if __name__ == '__main__':
pass
#!/usr/bin/env python
from dm.common.constants import dmRole
from dm.common.objects.authorizationPrincipal import AuthorizationPrincipal
from dm.common.db.api.userDbApi import UserDbApi
from authorizationPrincipalRetriever import AuthorizationPrincipalRetriever
class DbPrincipalRetriever(AuthorizationPrincipalRetriever):
def __init__(self):
AuthorizationPrincipalRetriever.__init__(self, self.__class__.__name__)
self.dbApi = UserDbApi()
def getAuthorizationPrincipal(self, username):
principal = None
try:
user = self.dbApi.getUserWithPasswordByUsername(username)
principal = AuthorizationPrincipal(name=username, token=user.get('password'))
principal.setRole(dmRole.DM_USER_ROLE)
principal.setUserInfo(user)
self.checkAutorizationPrincipalRole(principal)
except Exception, ex:
self.logger.debug(ex)
return principal
#######################################################################
# Testing.
if __name__ == '__main__':
pass
#!/usr/bin/env python
from dm.common.utility.ldapUtility import LdapUtility
from authorizationPrincipalAuthenticator import AuthorizationPrincipalAuthenticator
class LdapPasswordPrincipalAuthenticator(AuthorizationPrincipalAuthenticator):
def __init__(self, serverUrl, dnFormat):
AuthorizationPrincipalAuthenticator.__init__(self, self.__class__.__name__)
self.ldapUtility = LdapUtility(serverUrl, dnFormat)
def authenticatePrincipal(self, principal, password):
if principal is not None:
try:
self.logger.debug('Checking credentials for %s' % principal.getName())
self.ldapUtility.checkCredentials(principal.getName(), password)
return principal
except Exception, ex:
self.logger.debug(ex)
return None
#######################################################################
# Testing.
if __name__ == '__main__':
pass
#!/usr/bin/env python
from dm.common.constants import dmRole
from dm.common.objects.authorizationPrincipal import AuthorizationPrincipal
from dm.common.utility.cryptUtility import CryptUtility
from authorizationPrincipalRetriever import AuthorizationPrincipalRetriever
class NoOpPrincipalRetriever(AuthorizationPrincipalRetriever):
def __init__(self):
AuthorizationPrincipalRetriever.__init__(self, self.__class__.__name__)
def getAuthorizationPrincipal(self, username):
# Set password to be the same as username
noOpPassword = CryptUtility.cryptPasswordWithPbkdf2(username)
principal = AuthorizationPrincipal(name=username, token=noOpPassword)
principal.setRole(dmRole.DM_USER_ROLE)
if self.adminRoleName is not None:
principal.setRole(dmRole.DM_ADMIN_ROLE)
return principal
#######################################################################
# Testing.
if __name__ == '__main__':
pass
#!/usr/bin/env python
from dm.common.utility.loggingManager import LoggingManager
class SessionManager:
def __init__(self, name=None):
self.name = name
self.logger = LoggingManager.getInstance().getLogger(self.__class__.__name__)
def getName(self):
return self.name
def addSession(self, sessionId, sessionInfo):
return None
def checkSession(self, sessionId):
return None
#######################################################################
# Testing.
if __name__ == '__main__':
pass
#!/usr/bin/env python
import os
from dm.common.utility.configurationManager import ConfigurationManager
from dm.common.objects.dmObjectManager import DmObjectManager
from dm.common.utility.objectUtility import ObjectUtility
from dm.common.utility.singleton import Singleton
from dm.common.exceptions.objectNotFound import ObjectNotFound
class SingleSignOnManager(DmObjectManager, Singleton):
DEFAULT_SESSION_TIMEOUT_IN_SECONDS = 3600 # seconds
CONFIG_SECTION_NAME = 'SingleSignOnManager'
SESSION_MANAGER_KEY = 'sessionManager'
SESSION_TIMEOUT_IN_SECONDS_KEY = 'sessionTimeoutInSeconds'
# Singleton instance.
__instance = None
def __init__(self):
if SingleSignOnManager.__instance:
return
SingleSignOnManager.__instance = self
DmObjectManager.__init__(self)
self.configurationManager = ConfigurationManager.getInstance()
self.sessionManager = None
self.configure()
def configure(self):
configItems = self.configurationManager.getConfigItems(SingleSignOnManager.CONFIG_SECTION_NAME)
self.logger.debug('Got config items: %s' % configItems)
if not configItems:
return
sessionTimeout = self.configurationManager.getConfigOption(SingleSignOnManager.CONFIG_SECTION_NAME, SingleSignOnManager.SESSION_TIMEOUT_IN_SECONDS_KEY, SingleSignOnManager.DEFAULT_SESSION_TIMEOUT_IN_SECONDS)
self.logger.debug('Session timeout: %s' % sessionTimeout)
# Create session manager
sessionManager = self.configurationManager.getConfigOption(SingleSignOnManager.CONFIG_SECTION_NAME, SingleSignOnManager.SESSION_MANAGER_KEY)
(moduleName,className,constructor) = self.configurationManager.getModuleClassConstructorTuple(sessionManager, SingleSignOnManager)
self.logger.debug('Creating session manager class: %s' % className)
self.sessionManager = ObjectUtility.createObjectInstance(moduleName, className, constructor)
def addSession(self, sessionId, sessionInfo):
if self.sessionManager:
self.logger.debug('Adding session id %s: %s' % (sessionId,sessionInfo))
try:
return self.sessionManager.addSession(sessionId, sessionInfo)
except Exception, ex:
self.logger.error(ex)
return None
def checkSession(self, sessionId):
if self.sessionManager:
self.logger.debug('Checking session id: %s' % sessionId)
try:
return self.sessionManager.checkSession(sessionId)
except ObjectNotFound, ex:
self.logger.debug(ex)
except Exception, ex:
self.logger.error(ex)
return None
#######################################################################
# Testing.
if __name__ == '__main__':
am = AuthorizationPrincipalManager.getInstance()
authPrincipal = am.getAuthorizationPrincipal('sveseli', 'sv')
print 'Auth principal: ', authPrincipal
#!/usr/bin/env python
#
# Base DM controller class.
#
#######################################################################
import cherrypy
import httplib
import json
from sys import exc_info
from dm.common.utility.loggingManager import LoggingManager
from dm.common.constants import dmStatus
from dm.common.constants import dmHttpHeaders
from dm.common.exceptions.dmException import DmException
from dm.common.exceptions import dmExceptionMap
from dm.common.exceptions.internalError import InternalError
#######################################################################
class DmController(object):
""" Base controller class. """
def __init__(self):
self.logger = LoggingManager.getInstance().getLogger(self.__class__.__name__)
@classmethod
def getLogger(cls):
logger = LoggingManager.getInstance().getLogger(cls.__name__)
return logger
@classmethod
def addDmResponseHeaders(cls, status=dmStatus.DM_OK, msg='Success', exceptionType=None):
cherrypy.response.headers[dmHttpHeaders.DM_STATUS_CODE_HTTP_HEADER] = status
cherrypy.response.headers[dmHttpHeaders.DM_STATUS_MESSAGE_HTTP_HEADER] = msg
if exceptionType is not None:
cherrypy.response.headers[dmHttpHeaders.DM_EXCEPTION_TYPE_HTTP_HEADER] = exceptionType
@classmethod
def addDmSessionRoleHeaders(cls, role):
cherrypy.response.headers[dmHttpHeaders.DM_SESSION_ROLE_HTTP_HEADER] = role
@classmethod
def addDmExceptionHeaders(cls, ex):
cls.handleException(ex)
@classmethod
def handleCpException(cls):
cherrypy.response.status = httplib.OK
ex = exc_info()[1]
if ex == None:
ex = InternalError('Internal Webservice Error')
cls.handleException(ex)
@classmethod
def handleException(cls, ex):
exClass = ex.__class__.__name__.split('.')[-1]
status = None
msg = '%s' % ex
msg = msg.split('\n')[0]
for code in dmExceptionMap.DM_EXCEPTION_MAP.keys():
exStr = dmExceptionMap.DM_EXCEPTION_MAP.get(code).split('.')[-1]
if exStr == exClass:
status = code
if not status:
status = dmStatus.DM_ERROR
cls.addDmResponseHeaders(status, msg, exClass)
@classmethod
def formatJsonResponse(cls, response):
cherrypy.response.headers['Content-Type'] = 'application/json'
return '%s' % (response)
@classmethod
def toJson(cls, o):
return json.dumps(o)
@classmethod
def fromJson(cls, s):
return json.loads(s)
@classmethod
def listToJson(cls, dmObjectList):
jsonList = []
for dmObject in dmObjectList:
jsonList.append(dmObject.getDictRep(keyList='__all__'))
return json.dumps(jsonList)
@classmethod
def getSessionUser(cls):
return cherrypy.serving.session.get('user')
@classmethod
def getSessionUsername(cls):
return cherrypy.serving.session.get('_cp_username')
# Exception decorator for all exposed method calls
@classmethod
def execute(cls, func):
def decorate(*args, **kwargs):
try:
response = func(*args, **kwargs)
except DmException, ex:
cls.getLogger().error('%s' % ex)
cls.handleException(ex)
response = ex.getFullJsonRep()
except Exception, ex:
cls.getLogger().error('%s' % ex)
cls.handleException(ex)
response = InternalError(ex).getFullJsonRep()
return cls.formatJsonResponse(response)
return decorate