Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • DM/dm-docs
  • hammonds/dm-docs
  • hparraga/dm-docs
3 results
Show changes
Showing
with 884 additions and 0 deletions
#!/usr/bin/env python
import os
import time
from dm.common.utility.loggingManager import LoggingManager
from dm.common.utility.timeUtility import TimeUtility
from dm.common.processing.plugins.fileProcessor import FileProcessor
from dm.common.mongodb.api.fileMongoDbApi import FileMongoDbApi
class MongoDbFileCatalogPlugin(FileProcessor):
def __init__(self):
FileProcessor.__init__(self)
self.fileMongoDbApi = FileMongoDbApi()
self.logger = LoggingManager.getInstance().getLogger(self.__class__.__name__)
def processFile(self, fileInfo):
experimentFilePath = fileInfo.get('experimentFilePath')
experimentName = fileInfo.get('experimentName')
self.logger.debug('Processing file %s for experiment %s' % (fileInfo, experimentName))
daqInfo = fileInfo.get('daqInfo')
storageDirectory = daqInfo.get('storageDirectory')
storageHost = daqInfo.get('storageHost')
storageFilePath = os.path.join(storageDirectory, experimentFilePath)
fileProcessingTime = time.time()
fileProcessingTimeStamp = TimeUtility.formatLocalTimeStamp(fileProcessingTime)
# Prepare catalogging entry
fileInfo2 = {}
for key in ['md5Sum', 'fileSize', 'fileCreationTime', 'fileCreationTimeStamp']:
if fileInfo.has_key(key):
fileInfo2[key] = fileInfo.get(key, '')
fileInfo2['fileName'] = os.path.basename(experimentFilePath)
fileInfo2['storageDirectory'] = storageDirectory
fileInfo2['storageHost'] = storageHost
fileInfo2['storageFilePath'] = storageFilePath
fileInfo2['experimentFilePath'] = experimentFilePath
fileInfo2['experimentName'] = experimentName
fileInfo2['fileProcessingTime'] = fileProcessingTime
fileInfo2['fileProcessingTimeStamp'] = fileProcessingTimeStamp
self.logger.debug('Daq info: %s' % (daqInfo))
fileInfo2.update(daqInfo)
if daqInfo.has_key('id'):
fileInfo2['daqId'] = daqInfo.get('id')
del fileInfo2['id']
self.fileMongoDbApi.updateOrAddExperimentFile(fileInfo2)
#######################################################################
# Testing.
if __name__ == '__main__':
pass
#!/usr/bin/env python
import os
import time
from dm.common.utility.loggingManager import LoggingManager
from dm.common.utility.timeUtility import TimeUtility
from dm.common.processing.plugins.fileProcessor import FileProcessor
from dm.common.mongodb.api.fileMongoDbApi import FileMongoDbApi
from dm.common.utility.dmSubprocess import DmSubprocess
class ScriptProcessingPlugin(FileProcessor):
PROCESSING_SCRIPT_KEY = 'processingScript'
def __init__(self):
FileProcessor.__init__(self)
self.fileMongoDbApi = FileMongoDbApi()
self.logger = LoggingManager.getInstance().getLogger(self.__class__.__name__)
def processFile(self, fileInfo):
experimentName = fileInfo.get('experimentName')
experimentFilePath = fileInfo.get('experimentFilePath')
daqInfo = fileInfo.get('daqInfo', {})
processingScript = daqInfo.get(self.PROCESSING_SCRIPT_KEY)
if not processingScript:
self.logger.debug('Ignoring file %s for experiment %s' % (experimentFilePath, experimentName))
return
self.logger.debug('Processing file %s for experiment %s' % (experimentFilePath, experimentName))
storageDirectory = fileInfo.get('storageDirectory')
storageFilePath = os.path.join(storageDirectory, experimentFilePath)
# Process file
try:
p = DmSubprocess('%s %s' % (processingScript, storageFilePath))
p.run()
stdOut = p.getStdOut()
except Exception, ex:
self.logger.error('Cannot process file %s for experiment %s: %s' % (experimentFilePath, experimentName, ex))
return
# Prepare catalogging entry
fileInfo2 = {}
fileInfo2['fileName'] = os.path.basename(experimentFilePath)
fileInfo2['experimentName'] = experimentName
fileInfo2['processingScriptOutput'] = '%s' % stdOut.strip()
self.fileMongoDbApi.updateOrAddExperimentFile(fileInfo2)
#######################################################################
# Testing.
if __name__ == '__main__':
pass
#!/usr/bin/env python
import os
import time
from dm.common.utility.loggingManager import LoggingManager
from dm.common.utility.timeUtility import TimeUtility
from dm.common.processing.plugins.fileProcessor import FileProcessor
from dm.common.mongodb.api.fileMongoDbApi import FileMongoDbApi
class SddsParameterProcessingPlugin(FileProcessor):
PROCESS_SDDS_PARAMETERS_KEY = 'processSddsParameters'
def __init__(self):
FileProcessor.__init__(self)
self.fileMongoDbApi = FileMongoDbApi()
self.logger = LoggingManager.getInstance().getLogger(self.__class__.__name__)
def processFile(self, fileInfo):
experimentFilePath = fileInfo.get('experimentFilePath')
experimentName = fileInfo.get('experimentName')
daqInfo = fileInfo.get('daqInfo', {})
processSddsParameters = daqInfo.get(self.PROCESS_SDDS_PARAMETERS_KEY)
if not processSddsParameters:
self.logger.debug('Ignoring file %s for experiment %s' % (experimentFilePath, experimentName))
return
self.logger.debug('Processing file %s for experiment %s' % (fileInfo, experimentName))
storageDirectory = fileInfo.get('storageDirectory')
storageFilePath = os.path.join(storageDirectory, experimentFilePath)
# Load file
try:
import sddsdata
from sdds import SDDS
s = SDDS(0)
self.logger.error('Loading file %s for experiment %s' % (experimentFilePath, experimentName))
s.load(storageFilePath)
except Exception, ex:
self.logger.error('Cannot process file %s for experiment %s: %s' % (experimentFilePath, experimentName, ex))
return
# Prepare catalogging entry
fileInfo2 = {}
fileInfo2['fileName'] = os.path.basename(experimentFilePath)
fileInfo2['experimentName'] = experimentName
for i in range(0,len(s.parameterName)):
parameterName = s.parameterName[i]
parameterData = s.parameterData[i][0]
fileInfo2[parameterName] = parameterData
self.fileMongoDbApi.updateOrAddExperimentFile(fileInfo2)
self.logger.error('SDDS terminate file %s for experiment %s' % (experimentFilePath, experimentName))
sddsdata.Terminate(0)
#######################################################################
# Testing.
if __name__ == '__main__':
pass
#!/usr/bin/env python
import os
import time
from dm.common.utility.loggingManager import LoggingManager
from dm.common.utility.timeUtility import TimeUtility
from dm.common.processing.plugins.fileProcessor import FileProcessor
from dm.common.utility.dmSubprocess import DmSubprocess
class SgeJobSubmissionPlugin(FileProcessor):
SGE_JOB_SCRIPT_KEY = 'sgeJobScript'
def __init__(self, sgeRoot):
FileProcessor.__init__(self)
self.sgeRoot = sgeRoot
self.logger = LoggingManager.getInstance().getLogger(self.__class__.__name__)
def processFile(self, fileInfo):
experimentName = fileInfo.get('experimentName')
experimentFilePath = fileInfo.get('experimentFilePath')
daqInfo = fileInfo.get('daqInfo', {})
sgeJobScript = daqInfo.get(self.SGE_JOB_SCRIPT_KEY)
if not sgeJobScript:
self.logger.debug('Ignoring file %s for experiment %s' % (experimentFilePath, experimentName))
return
self.logger.debug('Processing file %s for experiment %s' % (experimentFilePath, experimentName))
storageDirectory = fileInfo.get('storageDirectory')
storageFilePath = os.path.join(storageDirectory, experimentFilePath)
# Process file
try:
p = DmSubprocess('. %s/default/common/settings.sh ; qsub -v DM_EXPERIMENT_NAME=%s %s %s' % (self.sgeRoot, experimentName, sgeJobScript, storageFilePath))
p.run()
except Exception, ex:
self.logger.error('Cannot process file %s for experiment %s: %s' % (experimentFilePath, experimentName, ex))
return
#######################################################################
# Testing.
if __name__ == '__main__':
pass
#!/usr/bin/env python
#
# Implementation for user info controller.
#
from dm.common.objects.dmObject import DmObject
from dm.common.objects.dmObjectManager import DmObjectManager
from dm.common.db.api.userDbApi import UserDbApi
from dm.ds_web_service.service.impl.experimentManager import ExperimentManager
class UserInfoSessionControllerImpl(DmObjectManager):
""" User info controller implementation class. """
def __init__(self):
DmObjectManager.__init__(self)
self.userDbApi = UserDbApi()
def getUsers(self):
return self.userDbApi.getUsers()
def getUserById(self, id):
return self.userDbApi.getUserById(id)
def getUserByUsername(self, username):
return self.userDbApi.getUserByUsername(username)
def addUserExperimentRole(self, username, experimentName, roleName):
userInfo = self.userDbApi.addUserExperimentRole(username, experimentName, roleName)
ExperimentManager.getInstance().addUserToGroup(username, experimentName)
return userInfo
def deleteUserExperimentRole(self, username, experimentName, roleName):
userInfo = self.userDbApi.deleteUserExperimentRole(username, experimentName, roleName)
ExperimentManager.getInstance().deleteUserFromGroup(username, experimentName)
return userInfo
#!/usr/bin/env python
import cherrypy
from dm.common.service.dmSessionController import DmSessionController
from dm.ds_web_service.service.impl.userInfoSessionControllerImpl import UserInfoSessionControllerImpl
class UserInfoSessionController(DmSessionController):
def __init__(self):
DmSessionController.__init__(self)
self.userInfoSessionControllerImpl = UserInfoSessionControllerImpl()
@cherrypy.expose
@DmSessionController.require(DmSessionController.isAdministrator())
@DmSessionController.execute
def getUsers(self, **kwargs):
return self.listToJson(self.userInfoSessionControllerImpl.getUsers())
@cherrypy.expose
@DmSessionController.require(DmSessionController.isAdministrator())
@DmSessionController.execute
def getUserById(self, id, **kwargs):
if not id:
raise InvalidRequest('Invalid id provided.')
response = self.userInfoSessionControllerImpl.getUserById(id).getFullJsonRep()
self.logger.debug('Returning user info for %s: %s' % (id,response))
return response
@cherrypy.expose
@DmSessionController.require(DmSessionController.isAdministrator())
@DmSessionController.execute
def getUserByUsername(self, username, **kwargs):
if not len(username):
raise InvalidRequest('Invalid username provided.')
response = self.userInfoSessionControllerImpl.getUserByUsername(username).getFullJsonRep()
self.logger.debug('Returning user info for %s: %s' % (username,response))
return response
@cherrypy.expose
@DmSessionController.require(DmSessionController.isAdministrator())
@DmSessionController.execute
def addUserExperimentRole(self, username, experimentName, roleName, **kwargs):
if not username:
raise InvalidRequest('Invalid username provided.')
if not experimentName:
raise InvalidRequest('Invalid experiment name provided.')
if not roleName:
raise InvalidRequest('Invalid role name provided.')
response = self.userInfoSessionControllerImpl.addUserExperimentRole(username, experimentName, roleName).getFullJsonRep()
return response
@cherrypy.expose
@DmSessionController.require(DmSessionController.isAdministrator())
@DmSessionController.execute
def deleteUserExperimentRole(self, username, experimentName, roleName, **kwargs):
if not username:
raise InvalidRequest('Invalid username provided.')
if not experimentName:
raise InvalidRequest('Invalid experiment name provided.')
if not roleName:
raise InvalidRequest('Invalid role name provided.')
response = self.userInfoSessionControllerImpl.deleteUserExperimentRole(username, experimentName, roleName).getFullJsonRep()
return response
#!/usr/bin/env python
#
# User route descriptor.
#
from dm.common.utility.configurationManager import ConfigurationManager
from userInfoSessionController import UserInfoSessionController
class UserRouteDescriptor:
@classmethod
def getRoutes(cls):
contextRoot = ConfigurationManager.getInstance().getContextRoot()
# Static instances shared between different routes
userInfoSessionController = UserInfoSessionController()
# Define routes.
routes = [
# Get user info list
{
'name' : 'getUsers',
'path' : '%s/users' % contextRoot,
'controller' : userInfoSessionController,
'action' : 'getUsers',
'method' : ['GET']
},
# Get user by id
{
'name' : 'getUserById',
'path' : '%s/users/:(id)' % contextRoot,
'controller' : userInfoSessionController,
'action' : 'getUserById',
'method' : ['GET']
},
# Get user by username
{
'name' : 'getUserByUsername',
'path' : '%s/usersByUsername/:(username)' % contextRoot,
'controller' : userInfoSessionController,
'action' : 'getUserByUsername',
'method' : ['GET']
},
# Add user experiment role
{
'name' : 'addUserExperimentRole',
'path' : '%s/usersByExperiment/:(username)/:(experimentName)/:(roleName)' % contextRoot,
'controller' : userInfoSessionController,
'action' : 'addUserExperimentRole',
'method' : ['POST']
},
# Delete user experiment role
{
'name' : 'deleteUserExperimentRole',
'path' : '%s/usersByExperiment/:(username)/:(experimentName)/:(roleName)' % contextRoot,
'controller' : userInfoSessionController,
'action' : 'deleteUserExperimentRole',
'method' : ['DELETE']
},
]
return routes
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" path="src"/>
<classpathentry kind="src" path="resources"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry kind="lib" path="/home/phoebus/BFROSIK/trunk/tools/AccountSynchronizer/lib/ojdbc7.jar"/>
<classpathentry kind="lib" path="/home/phoebus/BFROSIK/trunk/tools/AccountSynchronizer/lib/postgresql-9.4-1201.jdbc4.jar"/>
<classpathentry kind="lib" path="/home/phoebus/BFROSIK/trunk/tools/AccountSynchronizer/lib/unboundid-ldapsdk-me.jar"/>
<classpathentry kind="output" path="bin"/>
</classpath>
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>accountSynchronizer</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.jdt.core.javanature</nature>
</natures>
</projectDescription>
Main-Class: gov.anl.aps.dm.sync.Synchronizer
Class-Path: . ../../lib/postgresql-9.4-1201.jdbc4.jar ../../lib/ojdbc7.jar
### log congiguration
log.file = accountSynchronizer%g.log
log.limit = 6000
log.count = 2
###
### Oracle database settings
###
oracle.database.connection = jdbc:oracle:thin:@ra.aps.anl.gov:1527:aps1
oracle.database.username = glob_conn
#oracle.database.password =
oracle.database.table = DCC.FL$03_BL_APV_VIEW_V2
dm.database.connection = jdbc:postgresql://127.0.0.1:11136/dm
dm.database.username = dm
### dm.database.password =
# The user name prefix that gets append to every badge number.
user.userid.prefix = d
package gov.anl.aps.dm.sync;
public class DmUser {
String username;
String firstName;
String lastName;
String middleName;
String email;
String badge;
String password;
void clear() {
username = null;
firstName = null;
lastName = null;
middleName = null;
email = null;
badge = null;
password = null;
}
}
package gov.anl.aps.dm.sync;
import java.sql.DriverManager;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Properties;
import java.util.logging.Level;
import java.util.logging.Logger;
public class OracleConnection {
Connection connection = null;
private Logger logger;
public OracleConnection(Logger logger) {
this.logger = logger;
}
void connect(Properties config) throws SQLException {
try {
DriverManager.registerDriver(new oracle.jdbc.OracleDriver());
} catch (SQLException e) {
logger.log(Level.SEVERE, "Can't establish Oracle Driver", e);
throw e;
}
// logger.log(Level.INFO, "Oracle JDBC Driver Registered!");
try {
connection = DriverManager.getConnection(
config.getProperty("oracle.database.connection"),
config.getProperty("oracle.database.username"),
config.getProperty("oracle.database.password"));
} catch (SQLException e) {
logger.log(Level.SEVERE, "Can't connect to Oracle data base", e);
throw e;
}
}
ResultSet getUsers(String table) throws SQLException {
try {
Statement statement = connection.createStatement();
return statement.executeQuery("select * from " + table);
} catch (SQLException ex) {
logger.log(Level.SEVERE, "table {0} does not exist", table);
throw ex;
}
}
void close() {
try {
connection.close();
} catch (SQLException ex) {
logger.log(Level.SEVERE, "problem closing Oracle connection ", ex);
}
}
}
package gov.anl.aps.dm.sync;
import java.sql.DriverManager;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Timestamp;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.logging.Level;
import java.util.logging.Logger;
public class PsqlConnection {
Connection connection = null;
private final Map<String, Timestamp> dmUsers = new HashMap<>(); // <username, lastUpdate>
private PreparedStatement insertQuery;
private PreparedStatement updateQuery;
private final String userTable = "user_info";
private Logger logger;
public PsqlConnection(Logger logger) {
this.logger = logger;
}
void connect(Properties config) throws SQLException {
try {
Class.forName("org.postgresql.Driver");
} catch (ClassNotFoundException e) {
logger.log(Level.SEVERE, "Can't establish PosgreSQL Driver", e);
System.exit(0);
}
// logger.log(Level.INFO, "PostgreSQL JDBC Driver Registered!");
try {
connection = DriverManager.getConnection(
config.getProperty("dm.database.connection"),
config.getProperty("dm.database.username"),
config.getProperty("dm.database.password"));
} catch (SQLException e) {
logger.log(Level.SEVERE, "Can't connect to PostgreSQL data base");
throw e;
}
}
void init() throws SQLException {
Statement statement = null;
try {
statement = connection.createStatement();
} catch (SQLException ex) {
logger.log(Level.SEVERE, "can't create statement for postgreSQL connection");
throw ex;
}
ResultSet results = null;
try {
if (statement != null) {
results = statement.executeQuery("SELECT * FROM " + userTable + ";");
}
} catch (SQLException ex) {
logger.log(Level.SEVERE, "can't execute SELECT query from " + userTable + " table");
throw ex;
}
try {
if (results != null) {
while (results.next()) {
String username = results.getString("username");
Timestamp lastUpdate = results.getTimestamp("last_update");
dmUsers.put(username, lastUpdate);
}
results.close();
}
if (statement != null) {
statement.close();
}
} catch (SQLException ex) {
logger.log(Level.WARNING, "query results processing error ", ex);
}
// prepare statements
try {
insertQuery = connection.prepareStatement ("INSERT INTO user_info (username, first_name, last_name, middle_name, email, badge, is_local_user, last_update) VALUES (?, ?, ?, ?, ?, ?, ?, ?)");
updateQuery = connection.prepareStatement ("UPDATE user_info SET email = ?, last_name = ?, last_update = ? WHERE username = ?");
} catch (SQLException ex) {
logger.log(Level.SEVERE, "Can't get peaparedStatement from connection ", ex);
throw ex;
}
}
protected Timestamp getUserLastUpdate(String username) {
return dmUsers.get(username);
}
boolean isUser(String username) {
return dmUsers.containsKey(username);
}
void addUser(DmUser dmuser, Timestamp currentTime) {
try {
insertQuery.setString(1, dmuser.username);
insertQuery.setString(2, dmuser.firstName);
insertQuery.setString(3, dmuser.lastName);
insertQuery.setString(4, dmuser.middleName);
insertQuery.setString(5, dmuser.email);
insertQuery.setString(6, dmuser.badge);
insertQuery.setBoolean(7, false);
insertQuery.setTimestamp(8, currentTime);
insertQuery.execute();
} catch (SQLException ex) {
logger.log(Level.SEVERE, "can't execute query to add user ", ex);
}
}
void updateUser(DmUser dmuser, Timestamp currentTime) {
try {
updateQuery.setString(1, dmuser.email);
updateQuery.setString(2, dmuser.lastName);
updateQuery.setTimestamp(3, currentTime);
updateQuery.setString(4, dmuser.username);
updateQuery.executeUpdate();
} catch (SQLException ex) {
logger.log(Level.SEVERE, "can't execute query to update user", ex);
}
}
void close() {
try {
insertQuery.close();
updateQuery.close();
connection.close();
} catch (SQLException ex) {
logger.log(Level.SEVERE, "problem closing PostgreSQL connection ", ex);
}
}
}
package gov.anl.aps.dm.sync;
import java.io.IOException;
import java.io.InputStream;
import java.io.FileInputStream;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.util.Properties;
import java.util.logging.FileHandler;
import java.util.logging.Level;
import java.util.logging.Logger;
public class Synchronizer {
private OracleConnection oConnection;
private PsqlConnection pConnection;
private final Properties config = new Properties();
private final Properties tableRows = new Properties();
private final String oracleTable = "oracleTable.properties";
private FileHandler fileHandler;
private static final Logger logger = Logger.getLogger("AccountSynchronizerLogger");
public static void main(String[] argv) {
Synchronizer sync = new Synchronizer();
sync.loadProperties(argv[0]);
sync.initLogger();
sync.oConnection = new OracleConnection(logger);
sync.pConnection = new PsqlConnection(logger);
sync.connectAndInit();
sync.synchronize();
}
private void loadProperties(String configFile) {
InputStream configInputStream = null;
try {
configInputStream = new FileInputStream(configFile);
} catch (Exception e) {
e.printStackTrace();
}
if (configInputStream != null) {
try {
this.config.load(configInputStream);
} catch (IOException ex) {
initDefaultLogger();
logger.log(Level.SEVERE, "can't load configuration ", ex);
System.exit(-1);
}
} else {
initDefaultLogger();
logger.log(Level.SEVERE, "can't find configuration file");
System.exit(-1);
}
InputStream tableInputStream = getClass().getClassLoader().getResourceAsStream(oracleTable);
if (tableInputStream != null) {
try {
this.tableRows.load(tableInputStream);
} catch (IOException ex) {
logger.log(Level.SEVERE, "can't load Oracle table rows definitions ", ex);
System.exit(-1);
}
}
}
private void initDefaultLogger() {
try {
fileHandler = new FileHandler("accountSynchronizer.log");
logger.addHandler(fileHandler);
logger.log(Level.WARNING, "Using default logger ");
} catch (SecurityException | IOException ex) {
ex.printStackTrace();
System.exit(-1);
}
}
private void initLogger() {
String pattern = config.getProperty("log.file");
int limit = 0;
int count = 0;
try {
String limitProperty = config.getProperty("log.limit");
limit = Integer.decode(limitProperty);
count = Integer.decode(config.getProperty("log.count"));
} catch (NumberFormatException e) {
initDefaultLogger();
logger.log(Level.WARNING, "The logger limit or count are not configured properly. ", e);
System.exit(-1);
}
try {
fileHandler = new FileHandler(pattern, limit, count);
logger.addHandler(fileHandler);
} catch (SecurityException | IOException e) {
e.printStackTrace();
System.exit(-1);
}
}
private void connectAndInit() {
try {
pConnection.connect(config);
}
catch (SQLException e) {
System.exit(-1);
}
try {
oConnection.connect(config);
}
catch (SQLException e) {
pConnection.close();
System.exit(-1);
}
try {
pConnection.init();
}
catch (SQLException e) {
pConnection.close();
oConnection.close();
System.exit(-1);
}
}
private void synchronize() {
java.util.Calendar cal = java.util.Calendar.getInstance();
Timestamp current = new Timestamp(cal.getTimeInMillis());
DmUser dmuser = new DmUser();
try {
ResultSet results = oConnection.getUsers(config.getProperty("oracle.database.table"));
while (results.next()) {
String badge = results.getString("BADGE_NO");
String username = config.getProperty("user.userid.prefix") + badge;
if (pConnection.isUser(username)) {
Timestamp userLastUpdate = pConnection.getUserLastUpdate(username);
if ((results.getTimestamp("LAST_CHANGE_DATE") == null) || (userLastUpdate == null) || (results.getTimestamp("LAST_CHANGE_DATE").compareTo(userLastUpdate) >= 0)) {
// update user
dmuser.email = results.getString("EMAIL");
dmuser.username = username;
dmuser.lastName = results.getString("LAST_NAME");
pConnection.updateUser(dmuser, current);
dmuser.clear();
}
} else {
// add user
dmuser.badge = badge;
dmuser.email = results.getString("EMAIL");
dmuser.firstName = results.getString("FIRST_NAME");
dmuser.lastName = results.getString("LAST_NAME");
dmuser.middleName = results.getString("MIDDLE_NAME");
dmuser.username = username;
pConnection.addUser(dmuser, current);
dmuser.clear();
}
}
} catch (SQLException ex) {
logger.log(Level.SEVERE, "table processing error ", ex);
}
finally {
pConnection.close();
oConnection.close();
}
}
}
File added
File added
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" path="src"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry kind="lib" path="/local/bfrosik/sync/lib/ojdbc7.jar"/>
<classpathentry kind="lib" path="/local/bfrosik/sync/lib/postgresql-9.4-1201.jdbc4.jar"/>
<classpathentry kind="lib" path="/local/bfrosik/sync/lib/javax.json-api-1.0.jar"/>
<classpathentry kind="output" path="bin"/>
</classpath>
<?xml version="1.0" encoding="UTF-8"?>
<projectDescription>
<name>Synchronizer</name>
<comment></comment>
<projects>
</projects>
<buildSpec>
<buildCommand>
<name>org.eclipse.jdt.core.javabuilder</name>
<arguments>
</arguments>
</buildCommand>
</buildSpec>
<natures>
<nature>org.eclipse.jdt.core.javanature</nature>
</natures>
</projectDescription>
#!/bin/sh
DM_SVN_URL=https://subversion.xray.aps.anl.gov/DataManagement
execute() {
echo "Executing: $@"
eval "$@"
}
# load tools
execute svn export $DM_SVN_URL/trunk/tools/ExperimentSynchronizer ExperimentSynchronizer
EXPSYNC_DIR=ExperimentSynchronizer
cd $EXPSYNC_DIR/lib
# load java 7
execute svn export $DM_SVN_URL/support/src/jdk-7u51-linux-x64.tar.gz
JAVA_VERSION=7u51
tar zxf jdk-${JAVA_VERSION}*.tar.gz
cd ../resources
read -p "Enter Data Acquisition Service Host: " DAQ_SERVICE_HOST
read -p "Enter Experiment Data Root Directory (on DAQ): " DAQ_DATA_DIR
read -p "Enter Data Processing Host: " DAQ_REC_SERVICE_HOST
read -p "Enter Experiment Data Root Directory (on data processing node): " DAQ_REC_DATA_DIR
echo "Generating config file"
cmd="cat config.properties.template \
| sed 's?DAQ_CONNECTION?https://$DAQ_SERVICE_HOST:33336/dm?g' \
| sed 's?DATA_DIR?$DAQ_DATA_DIR?g' \
| sed 's?DAQ_REC_CONNECTION?https://$DAQ_REC_SERVICE_HOST:33336/dm?g' \
| sed 's?DATA_REC_DIR?$DAQ_REC_DATA_DIR?g' \
> config.properties"
eval $cmd || exit 1
cd ..
mkdir bin
execute lib/jdk1.7.0_51/bin/javac -classpath lib/*:resources/* -d bin/ src/gov/anl/dm/esafsync/*.java src/gov/anl/dm/esafsync/serviceconn/*.java
mkdir target
execute lib/jdk1.7.0_51/bin/jar cfm target/ExperimentSynchronizer.jar manifest.txt -C bin/ .
chmod 775 run_expSync.sh
cd ..