code
stringlengths 73
34.1k
| label
stringclasses 1
value |
---|---|
public Logger getLogger(String loggerName)
{
Logger logger;
//lookup in the cache first
logger = (Logger) cache.get(loggerName);
if(logger == null)
{
try
{
// get the configuration (not from the configurator because this is independent)
logger = createLoggerInstance(loggerName);
if(getBootLogger().isDebugEnabled())
{
getBootLogger().debug("Using logger class '"
+ (getConfiguration() != null ? getConfiguration().getLoggerClass() : null)
+ "' for " + loggerName);
}
// configure the logger
getBootLogger().debug("Initializing logger instance " + loggerName);
logger.configure(conf);
}
catch(Throwable t)
{
// do reassign check and signal logger creation failure
reassignBootLogger(true);
logger = getBootLogger();
getBootLogger().error("[" + this.getClass().getName()
+ "] Could not initialize logger " + (conf != null ? conf.getLoggerClass() : null), t);
}
//cache it so we can get it faster the next time
cache.put(loggerName, logger);
// do reassign check
reassignBootLogger(false);
}
return logger;
} | java |
private Logger createLoggerInstance(String loggerName) throws Exception
{
Class loggerClass = getConfiguration().getLoggerClass();
Logger log = (Logger) ClassHelper.newInstance(loggerClass, String.class, loggerName);
log.configure(getConfiguration());
return log;
} | java |
private Field getFieldRecursive(Class c, String name) throws NoSuchFieldException
{
try
{
return c.getDeclaredField(name);
}
catch (NoSuchFieldException e)
{
// if field could not be found in the inheritance hierarchy, signal error
if ((c == Object.class) || (c.getSuperclass() == null) || c.isInterface())
{
throw e;
}
// if field could not be found in class c try in superclass
else
{
return getFieldRecursive(c.getSuperclass(), name);
}
}
} | java |
protected String buildErrorSetMsg(Object obj, Object value, Field aField)
{
String eol = SystemUtils.LINE_SEPARATOR;
StringBuffer buf = new StringBuffer();
buf
.append(eol + "[try to set 'object value' in 'target object'")
.append(eol + "target obj class: " + (obj != null ? obj.getClass().getName() : null))
.append(eol + "target field name: " + (aField != null ? aField.getName() : null))
.append(eol + "target field type: " + (aField != null ? aField.getType() : null))
.append(eol + "target field declared in: " + (aField != null ? aField.getDeclaringClass().getName() : null))
.append(eol + "object value class: " + (value != null ? value.getClass().getName() : null))
.append(eol + "object value: " + (value != null ? value : null))
.append(eol + "]");
return buf.toString();
} | java |
protected PersistenceBrokerInternal createNewBrokerInstance(PBKey key) throws PBFactoryException
{
if (key == null) throw new PBFactoryException("Could not create new broker with PBkey argument 'null'");
// check if the given key really exists
if (MetadataManager.getInstance().connectionRepository().getDescriptor(key) == null)
{
throw new PBFactoryException("Given PBKey " + key + " does not match in metadata configuration");
}
if (log.isEnabledFor(Logger.INFO))
{
// only count created instances when INFO-Log-Level
log.info("Create new PB instance for PBKey " + key +
", already created persistence broker instances: " + instanceCount);
// useful for testing
++this.instanceCount;
}
PersistenceBrokerInternal instance = null;
Class[] types = {PBKey.class, PersistenceBrokerFactoryIF.class};
Object[] args = {key, this};
try
{
instance = (PersistenceBrokerInternal) ClassHelper.newInstance(implementationClass, types, args);
OjbConfigurator.getInstance().configure(instance);
instance = (PersistenceBrokerInternal) InterceptorFactory.getInstance().createInterceptorFor(instance);
}
catch (Exception e)
{
log.error("Creation of a new PB instance failed", e);
throw new PBFactoryException("Creation of a new PB instance failed", e);
}
return instance;
} | java |
public void actionPerformed(java.awt.event.ActionEvent e)
{
System.out.println("Action Command: " + e.getActionCommand());
System.out.println("Action Params : " + e.paramString());
System.out.println("Action Source : " + e.getSource());
System.out.println("Action SrcCls : " + e.getSource().getClass().getName());
org.apache.ojb.broker.metadata.ClassDescriptor cld =
new org.apache.ojb.broker.metadata.ClassDescriptor(rootNode.getRepository());
// cld.setClassNameOfObject("New Class");
cld.setTableName("New Table");
rootNode.addClassDescriptor(cld);
} | java |
@Override
public ProxyAuthenticationMethod getMethod() {
switch (authenticationMethod) {
case BASIC:
return ProxyAuthenticationMethod.BASIC;
case DIGEST:
return ProxyAuthenticationMethod.DIGEST;
case URL:
return ProxyAuthenticationMethod.URL;
default:
return null;
}
} | java |
public static <T> MetaTinyType<T> metaFor(Class<?> candidate) {
for (MetaTinyType meta : metas) {
if (meta.isMetaOf(candidate)) {
return meta;
}
}
throw new IllegalArgumentException(String.format("not a tinytype: %s", candidate == null ? "null" : candidate.getCanonicalName()));
} | java |
public NamedStyleInfo getNamedStyleInfo(String name) {
for (NamedStyleInfo info : namedStyleInfos) {
if (info.getName().equals(name)) {
return info;
}
}
return null;
} | java |
public static void scanClassPathForFormattingAnnotations() {
ExecutorService executorService = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors() * 2);
// scan classpath and filter out classes that don't begin with "com.nds"
Reflections reflections = new Reflections("com.nds","com.cisco");
Set<Class<?>> annotated = reflections.getTypesAnnotatedWith(DefaultFormat.class);
// Reflections ciscoReflections = new Reflections("com.cisco");
//
// annotated.addAll(ciscoReflections.getTypesAnnotatedWith(DefaultFormat.class));
for (Class<?> markerClass : annotated) {
// if the marker class is indeed implementing FoundationLoggingMarker
// interface
if (FoundationLoggingMarker.class.isAssignableFrom(markerClass)) {
final Class<? extends FoundationLoggingMarker> clazz = (Class<? extends FoundationLoggingMarker>) markerClass;
executorService.execute(new Runnable() {
@Override
public void run() {
if (markersMap.get(clazz) == null) {
try {
// generate formatter class for this marker
// class
generateAndUpdateFormatterInMap(clazz);
} catch (Exception e) {
LOGGER.trace("problem generating formatter class from static scan method. error is: " + e.toString());
}
}
}
});
} else {// if marker class does not implement FoundationLoggingMarker
// interface, log ERROR
// verify the LOGGER was initialized. It might not be as this
// Method is called in a static block
if (LOGGER == null) {
LOGGER = LoggerFactory.getLogger(AbstractFoundationLoggingMarker.class);
}
LOGGER.error("Formatter annotations should only appear on foundationLoggingMarker implementations");
}
}
try {
TimeUnit.SECONDS.sleep(30);
} catch (InterruptedException e) {
LOGGER.trace(e.toString(), e);
}
executorService.shutdown();
// try {
// executorService.awaitTermination(15, TimeUnit.SECONDS);
// } catch (InterruptedException e) {
// LOGGER.error("creation of formatters has been interrupted");
// }
} | java |
public void addAppenderEvent(final Category cat, final Appender appender) {
updateDefaultLayout(appender);
if (appender instanceof FoundationFileRollingAppender) {
final FoundationFileRollingAppender timeSizeRollingAppender = (FoundationFileRollingAppender) appender;
// update the appender with default vales such as logging pattern, file size etc.
//updateDefaultTimeAndSizeRollingAppender(timeSizeRollingAppender);
// read teh proeprties and determine if archiving should be enabled.
updateArchivingSupport(timeSizeRollingAppender);
// by default add the rolling file listener to enable application
// state.
timeSizeRollingAppender.setFileRollEventListener(FoundationRollEventListener.class.getName());
boolean rollOnStartup = true;
if (FoundationLogger.log4jConfigProps != null && FoundationLogger.log4jConfigProps.containsKey(FoundationLoggerConstants.Foundation_ROLL_ON_STARTUP.toString())) {
rollOnStartup = Boolean.valueOf(FoundationLogger.log4jConfigProps.getProperty(FoundationLoggerConstants.Foundation_ROLL_ON_STARTUP.toString()));
}
timeSizeRollingAppender.setRollOnStartup(rollOnStartup);
// refresh the appender
timeSizeRollingAppender.activateOptions();
// timeSizeRollingAppender.setOriginalLayout(); //So application state will not make any problems
}else if(!(appender instanceof FoundationFileRollingAppender) && (appender instanceof TimeAndSizeRollingAppender)){ //TimeAndSizeRollingAppender
final TimeAndSizeRollingAppender timeSizeRollingAppender = (TimeAndSizeRollingAppender) appender;
// update the appender with default vales such as logging pattern, file size etc.
updateDefaultTimeAndSizeRollingAppender(timeSizeRollingAppender);
// read teh proeprties and determine if archiving should be enabled.
updateArchivingSupport(timeSizeRollingAppender);
// by default add the rolling file listener to enable application
// state.
timeSizeRollingAppender.setFileRollEventListener(FoundationRollEventListener.class.getName());
boolean rollOnStartup = true;
if (FoundationLogger.log4jConfigProps != null && FoundationLogger.log4jConfigProps.containsKey(FoundationLoggerConstants.Foundation_ROLL_ON_STARTUP.toString())) {
rollOnStartup = Boolean.valueOf(FoundationLogger.log4jConfigProps.getProperty(FoundationLoggerConstants.Foundation_ROLL_ON_STARTUP.toString()));
}
timeSizeRollingAppender.setRollOnStartup(rollOnStartup);
// refresh the appender
timeSizeRollingAppender.activateOptions();
// timeSizeRollingAppender.setOriginalLayout();
}
if ( ! (appender instanceof org.apache.log4j.AsyncAppender))
initiateAsyncSupport(appender);
} | java |
private void updateDefaultTimeAndSizeRollingAppender(final FoundationFileRollingAppender appender) {
if (appender.getDatePattern().trim().length() == 0) {
appender.setDatePattern(FoundationLoggerConstants.DEFAULT_DATE_PATTERN.toString());
}
String maxFileSizeKey = "log4j.appender."+appender.getName()+".MaxFileSize";
appender.setMaxFileSize(FoundationLogger.log4jConfigProps.getProperty(maxFileSizeKey, FoundationLoggerConstants.Foundation_MAX_FILE_SIZE.toString()));
// if (appender.getMaxFileSize() == null || appender.getMaxFileSize().equals(FoundationLoggerConstants.DEFAULT_FILE_SIZE.toString())) {
// appender.setMaxFileSize(FoundationLoggerConstants.Foundation_MAX_FILE_SIZE.toString());
// }
String maxRollCountKey = "log4j.appender."+appender.getName()+".MaxRollFileCount";
appender.setMaxRollFileCount(Integer.parseInt(FoundationLogger.log4jConfigProps.getProperty(maxRollCountKey,"100")));
} | java |
final void dispatchToAppender(final String message) {
// dispatch a copy, since events should be treated as being immutable
final FoundationFileRollingAppender appender = this.getSource();
if (appender != null) {
appender.append(new FileRollEvent(this, message));
}
} | java |
final void dispatchToAppender(final LoggingEvent customLoggingEvent) {
// wrap the LoggingEvent in a FileRollEvent to prevent recursion bug
final FoundationFileRollingAppender appender = this.getSource();
if (appender != null) {
appender.append(new FileRollEvent(customLoggingEvent, this));
}
} | java |
public String getStatement()
{
if(sql == null)
{
StringBuffer stmt = new StringBuffer(128);
ClassDescriptor cld = getClassDescriptor();
FieldDescriptor[] fieldDescriptors = cld.getPkFields();
if(fieldDescriptors == null || fieldDescriptors.length == 0)
{
throw new OJBRuntimeException("No PK fields defined in metadata for " + cld.getClassNameOfObject());
}
FieldDescriptor field = fieldDescriptors[0];
stmt.append(SELECT);
stmt.append(field.getColumnName());
stmt.append(FROM);
stmt.append(cld.getFullTableName());
appendWhereClause(cld, false, stmt);
sql = stmt.toString();
}
return sql;
} | java |
public static Comparator getComparator()
{
return new Comparator()
{
public int compare(Object o1, Object o2)
{
FieldDescriptor fmd1 = (FieldDescriptor) o1;
FieldDescriptor fmd2 = (FieldDescriptor) o2;
if (fmd1.getColNo() < fmd2.getColNo())
{
return -1;
}
else if (fmd1.getColNo() > fmd2.getColNo())
{
return 1;
}
else
{
return 0;
}
}
};
} | java |
public void setFieldConversionClassName(String fieldConversionClassName)
{
try
{
this.fieldConversion = (FieldConversion) ClassHelper.newInstance(fieldConversionClassName);
}
catch (Exception e)
{
throw new MetadataException(
"Could not instantiate FieldConversion class using default constructor", e);
}
} | java |
public void setConnection(JdbcConnectionDescriptor jcd) throws PlatformException
{
_jcd = jcd;
String targetDatabase = (String)_dbmsToTorqueDb.get(_jcd.getDbms().toLowerCase());
if (targetDatabase == null)
{
throw new PlatformException("Database "+_jcd.getDbms()+" is not supported by torque");
}
if (!targetDatabase.equals(_targetDatabase))
{
_targetDatabase = targetDatabase;
_creationScript = null;
_initScripts.clear();
}
} | java |
private String writeSchemata(File dir) throws IOException
{
writeCompressedTexts(dir, _torqueSchemata);
StringBuffer includes = new StringBuffer();
for (Iterator it = _torqueSchemata.keySet().iterator(); it.hasNext();)
{
includes.append((String)it.next());
if (it.hasNext())
{
includes.append(",");
}
}
return includes.toString();
} | java |
public void createDB() throws PlatformException
{
if (_creationScript == null)
{
createCreationScript();
}
Project project = new Project();
TorqueDataModelTask modelTask = new TorqueDataModelTask();
File tmpDir = null;
File scriptFile = null;
try
{
tmpDir = new File(getWorkDir(), "schemas");
tmpDir.mkdir();
scriptFile = new File(tmpDir, CREATION_SCRIPT_NAME);
writeCompressedText(scriptFile, _creationScript);
project.setBasedir(tmpDir.getAbsolutePath());
// we use the ant task 'sql' to perform the creation script
SQLExec sqlTask = new SQLExec();
SQLExec.OnError onError = new SQLExec.OnError();
onError.setValue("continue");
sqlTask.setProject(project);
sqlTask.setAutocommit(true);
sqlTask.setDriver(_jcd.getDriver());
sqlTask.setOnerror(onError);
sqlTask.setUserid(_jcd.getUserName());
sqlTask.setPassword(_jcd.getPassWord() == null ? "" : _jcd.getPassWord());
sqlTask.setUrl(getDBCreationUrl());
sqlTask.setSrc(scriptFile);
sqlTask.execute();
deleteDir(tmpDir);
}
catch (Exception ex)
{
// clean-up
if ((tmpDir != null) && tmpDir.exists())
{
try
{
scriptFile.delete();
}
catch (NullPointerException e)
{
LoggerFactory.getLogger(this.getClass()).error("NPE While deleting scriptFile [" + scriptFile.getName() + "]", e);
}
}
throw new PlatformException(ex);
}
} | java |
public void initDB() throws PlatformException
{
if (_initScripts.isEmpty())
{
createInitScripts();
}
Project project = new Project();
TorqueSQLTask sqlTask = new TorqueSQLTask();
File outputDir = null;
try
{
outputDir = new File(getWorkDir(), "sql");
outputDir.mkdir();
writeCompressedTexts(outputDir, _initScripts);
project.setBasedir(outputDir.getAbsolutePath());
// executing the generated sql, but this time with a torque task
TorqueSQLExec sqlExec = new TorqueSQLExec();
TorqueSQLExec.OnError onError = new TorqueSQLExec.OnError();
sqlExec.setProject(project);
onError.setValue("continue");
sqlExec.setAutocommit(true);
sqlExec.setDriver(_jcd.getDriver());
sqlExec.setOnerror(onError);
sqlExec.setUserid(_jcd.getUserName());
sqlExec.setPassword(_jcd.getPassWord() == null ? "" : _jcd.getPassWord());
sqlExec.setUrl(getDBManipulationUrl());
sqlExec.setSrcDir(outputDir.getAbsolutePath());
sqlExec.setSqlDbMap(SQL_DB_MAP_NAME);
sqlExec.execute();
deleteDir(outputDir);
}
catch (Exception ex)
{
// clean-up
if (outputDir != null)
{
deleteDir(outputDir);
}
throw new PlatformException(ex);
}
} | java |
protected String getDBManipulationUrl()
{
JdbcConnectionDescriptor jcd = getConnection();
return jcd.getProtocol()+":"+jcd.getSubProtocol()+":"+jcd.getDbAlias();
} | java |
private byte[] readStreamCompressed(InputStream stream) throws IOException
{
ByteArrayOutputStream bao = new ByteArrayOutputStream();
GZIPOutputStream gos = new GZIPOutputStream(bao);
OutputStreamWriter output = new OutputStreamWriter(gos);
BufferedReader input = new BufferedReader(new InputStreamReader(stream));
String line;
while ((line = input.readLine()) != null)
{
output.write(line);
output.write('\n');
}
input.close();
stream.close();
output.close();
gos.close();
bao.close();
return bao.toByteArray();
} | java |
private void readTextsCompressed(File dir, HashMap results) throws IOException
{
if (dir.exists() && dir.isDirectory())
{
File[] files = dir.listFiles();
for (int idx = 0; idx < files.length; idx++)
{
if (files[idx].isDirectory())
{
continue;
}
results.put(files[idx].getName(), readTextCompressed(files[idx]));
}
}
} | java |
private void writeCompressedText(File file, byte[] compressedContent) throws IOException
{
ByteArrayInputStream bais = new ByteArrayInputStream(compressedContent);
GZIPInputStream gis = new GZIPInputStream(bais);
BufferedReader input = new BufferedReader(new InputStreamReader(gis));
BufferedWriter output = new BufferedWriter(new FileWriter(file));
String line;
while ((line = input.readLine()) != null)
{
output.write(line);
output.write('\n');
}
input.close();
gis.close();
bais.close();
output.close();
} | java |
private void writeCompressedTexts(File dir, HashMap contents) throws IOException
{
String filename;
for (Iterator nameIt = contents.keySet().iterator(); nameIt.hasNext();)
{
filename = (String)nameIt.next();
writeCompressedText(new File(dir, filename), (byte[])contents.get(filename));
}
} | java |
public void setWorkDir(String dir) throws IOException
{
File workDir = new File(dir);
if (!workDir.exists() || !workDir.canWrite() || !workDir.canRead())
{
throw new IOException("Cannot access directory "+dir);
}
_workDir = workDir;
} | java |
private File getWorkDir() throws IOException
{
if (_workDir == null)
{
File dummy = File.createTempFile("dummy", ".log");
String workDir = dummy.getPath().substring(0, dummy.getPath().lastIndexOf(File.separatorChar));
if ((workDir == null) || (workDir.length() == 0))
{
workDir = ".";
}
dummy.delete();
_workDir = new File(workDir);
}
return _workDir;
} | java |
private void deleteDir(File dir)
{
if (dir.exists() && dir.isDirectory())
{
File[] files = dir.listFiles();
for (int idx = 0; idx < files.length; idx++)
{
if (!files[idx].exists())
{
continue;
}
if (files[idx].isDirectory())
{
deleteDir(files[idx]);
}
else
{
files[idx].delete();
}
}
dir.delete();
}
} | java |
@GET
@Produces(MediaType.APPLICATION_JSON)
@Path("/graph/{name}/{version}")
public Response getModuleGraph(@PathParam("name") final String moduleName,
@PathParam("version") final String moduleVersion,
@Context final UriInfo uriInfo){
LOG.info("Dependency Checker got a get module graph export request.");
if(moduleName == null || moduleVersion == null){
return Response.serverError().status(HttpStatus.NOT_ACCEPTABLE_406).build();
}
final FiltersHolder filters = new FiltersHolder();
filters.init(uriInfo.getQueryParameters());
final String moduleId = DbModule.generateID(moduleName, moduleVersion);
final AbstractGraph moduleGraph = getGraphsHandler(filters).getModuleGraph(moduleId);
return Response.ok(moduleGraph).build();
} | java |
void update(Object feature) throws LayerException {
SimpleFeatureSource source = getFeatureSource();
if (source instanceof SimpleFeatureStore) {
SimpleFeatureStore store = (SimpleFeatureStore) source;
String featureId = getFeatureModel().getId(feature);
Filter filter = filterService.createFidFilter(new String[] { featureId });
transactionSynchronization.synchTransaction(store);
List<Name> names = new ArrayList<Name>();
Map<String, Attribute> attrMap = getFeatureModel().getAttributes(feature);
List<Object> values = new ArrayList<Object>();
for (Map.Entry<String, Attribute> entry : attrMap.entrySet()) {
String name = entry.getKey();
names.add(store.getSchema().getDescriptor(name).getName());
values.add(entry.getValue().getValue());
}
try {
store.modifyFeatures(names.toArray(new Name[names.size()]), values.toArray(), filter);
store.modifyFeatures(store.getSchema().getGeometryDescriptor().getName(), getFeatureModel()
.getGeometry(feature), filter);
log.debug("Updated feature {} in {}", featureId, getFeatureSourceName());
} catch (IOException ioe) {
featureModelUsable = false;
throw new LayerException(ioe, ExceptionCode.LAYER_MODEL_IO_EXCEPTION);
}
} else {
log.error("Don't know how to create or update " + getFeatureSourceName() + ", class "
+ source.getClass().getName() + " does not implement SimpleFeatureStore");
throw new LayerException(ExceptionCode.CREATE_OR_UPDATE_NOT_IMPLEMENTED, getFeatureSourceName(), source
.getClass().getName());
}
} | java |
@Override
public void format(final StringBuffer sbuf, final LoggingEvent event) {
for (int i = 0; i < patternConverters.length; i++) {
final int startField = sbuf.length();
patternConverters[i].format(event, sbuf);
patternFields[i].format(startField, sbuf);
}
} | java |
private Database readSingleSchemaFile(DatabaseIO reader, File schemaFile)
{
Database model = null;
if (!schemaFile.isFile())
{
log("Path "+schemaFile.getAbsolutePath()+" does not denote a schema file", Project.MSG_ERR);
}
else if (!schemaFile.canRead())
{
log("Could not read schema file "+schemaFile.getAbsolutePath(), Project.MSG_ERR);
}
else
{
try
{
model = reader.read(schemaFile);
log("Read schema file "+schemaFile.getAbsolutePath(), Project.MSG_INFO);
}
catch (Exception ex)
{
throw new BuildException("Could not read schema file "+schemaFile.getAbsolutePath()+": "+ex.getLocalizedMessage(), ex);
}
}
return model;
} | java |
private MetadataManager initOJB()
{
try
{
if (_ojbPropertiesFile == null)
{
_ojbPropertiesFile = new File("OJB.properties");
if (!_ojbPropertiesFile.exists())
{
throw new BuildException("Could not find OJB.properties, please specify it via the ojbpropertiesfile attribute");
}
}
else
{
if (!_ojbPropertiesFile.exists())
{
throw new BuildException("Could not load the specified OJB properties file "+_ojbPropertiesFile);
}
log("Using properties file "+_ojbPropertiesFile.getAbsolutePath(), Project.MSG_INFO);
System.setProperty("OJB.properties", _ojbPropertiesFile.getAbsolutePath());
}
MetadataManager metadataManager = MetadataManager.getInstance();
RepositoryPersistor persistor = new RepositoryPersistor();
if (_repositoryFile != null)
{
if (!_repositoryFile.exists())
{
throw new BuildException("Could not load the specified repository file "+_repositoryFile);
}
log("Loading repository file "+_repositoryFile.getAbsolutePath(), Project.MSG_INFO);
// this will load the info from the specified repository file
// and merge it with the existing info (if it has been loaded)
metadataManager.mergeConnectionRepository(persistor.readConnectionRepository(_repositoryFile.getAbsolutePath()));
metadataManager.mergeDescriptorRepository(persistor.readDescriptorRepository(_repositoryFile.getAbsolutePath()));
}
else if (metadataManager.connectionRepository().getAllDescriptor().isEmpty() &&
metadataManager.getGlobalRepository().getDescriptorTable().isEmpty())
{
// Seems nothing was loaded, probably because we're not starting in the directory
// that the properties file is in, and the repository file path is relative
// So lets try to resolve this path and load the repository info manually
Properties props = new Properties();
props.load(new FileInputStream(_ojbPropertiesFile));
String repositoryPath = props.getProperty("repositoryFile", "repository.xml");
File repositoryFile = new File(repositoryPath);
if (!repositoryFile.exists())
{
repositoryFile = new File(_ojbPropertiesFile.getParentFile(), repositoryPath);
}
metadataManager.mergeConnectionRepository(persistor.readConnectionRepository(repositoryFile.getAbsolutePath()));
metadataManager.mergeDescriptorRepository(persistor.readDescriptorRepository(repositoryFile.getAbsolutePath()));
}
// we might have to determine the default pb key ourselves
if (metadataManager.getDefaultPBKey() == null)
{
for (Iterator it = metadataManager.connectionRepository().getAllDescriptor().iterator(); it.hasNext();)
{
JdbcConnectionDescriptor descriptor = (JdbcConnectionDescriptor)it.next();
if (descriptor.isDefaultConnection())
{
metadataManager.setDefaultPBKey(new PBKey(descriptor.getJcdAlias(), descriptor.getUserName(), descriptor.getPassWord()));
break;
}
}
}
return metadataManager;
}
catch (Exception ex)
{
if (ex instanceof BuildException)
{
throw (BuildException)ex;
}
else
{
throw new BuildException(ex);
}
}
} | java |
public String putDocument(Document document) {
String key = UUID.randomUUID().toString();
documentMap.put(key, document);
return key;
} | java |
public Document removeDocument(String key) throws PrintingException {
if (documentMap.containsKey(key)) {
return documentMap.remove(key);
} else {
throw new PrintingException(PrintingException.DOCUMENT_NOT_FOUND, key);
}
} | java |
private static Query buildQuery(ClassDescriptor cld)
{
FieldDescriptor[] pkFields = cld.getPkFields();
Criteria crit = new Criteria();
for(int i = 0; i < pkFields.length; i++)
{
crit.addEqualTo(pkFields[i].getAttributeName(), null);
}
return new QueryByCriteria(cld.getClassOfObject(), crit);
} | java |
public String getMessage(Locale locale) {
if (getCause() != null) {
String message = getShortMessage(locale) + ", " + translate("ROOT_CAUSE", locale) + " ";
if (getCause() instanceof GeomajasException) {
return message + ((GeomajasException) getCause()).getMessage(locale);
}
return message + getCause().getMessage();
} else {
return getShortMessage(locale);
}
} | java |
public String getShortMessage(Locale locale) {
String message;
message = translate(Integer.toString(exceptionCode), locale);
if (message != null && msgParameters != null && msgParameters.length > 0) {
for (int i = 0; i < msgParameters.length; i++) {
boolean isIncluded = false;
String needTranslationParam = "$${" + i + "}";
if (message.contains(needTranslationParam)) {
String translation = translate(msgParameters[i], locale);
if (null == translation && null != msgParameters[i]) {
translation = msgParameters[i].toString();
}
if (null == translation) {
translation = "[null]";
}
message = message.replace(needTranslationParam, translation);
isIncluded = true;
}
String verbatimParam = "${" + i + "}";
String rs = null == msgParameters[i] ? "[null]" : msgParameters[i].toString();
if (message.contains(verbatimParam)) {
message = message.replace(verbatimParam, rs);
isIncluded = true;
}
if (!isIncluded) {
message = message + " (" + rs + ")"; // NOSONAR replace/contains makes StringBuilder use difficult
}
}
}
return message;
} | java |
public PlanarImage toDirectColorModel(RenderedImage img) {
BufferedImage dest = new BufferedImage(img.getWidth(), img.getHeight(), BufferedImage.TYPE_4BYTE_ABGR);
BufferedImage source = new BufferedImage(img.getColorModel(), (WritableRaster) img.getData(), img
.getColorModel().isAlphaPremultiplied(), null);
ColorConvertOp op = new ColorConvertOp(null);
op.filter(source, dest);
return PlanarImage.wrapRenderedImage(dest);
} | java |
public ManagedConnection createManagedConnection(Subject subject, ConnectionRequestInfo info)
{
Util.log("In OTMJCAManagedConnectionFactory.createManagedConnection");
try
{
Kit kit = getKit();
PBKey key = ((OTMConnectionRequestInfo) info).getPbKey();
OTMConnection connection = kit.acquireConnection(key);
return new OTMJCAManagedConnection(this, connection, key);
}
catch (ResourceException e)
{
throw new OTMConnectionRuntimeException(e.getMessage());
}
} | java |
public void render(OutputStream outputStream, Format format, int dpi) throws PrintingException {
try {
if (baos == null) {
prepare();
}
writeDocument(outputStream, format, dpi);
} catch (Exception e) { // NOSONAR
throw new PrintingException(e, PrintingException.DOCUMENT_RENDER_PROBLEM);
}
} | java |
private void prepare() throws IOException, DocumentException, PrintingException {
if (baos == null) {
baos = new ByteArrayOutputStream(); // let it grow as much as needed
}
baos.reset();
boolean resize = false;
if (page.getConstraint().getWidth() == 0 || page.getConstraint().getHeight() == 0) {
resize = true;
}
// Create a document in the requested ISO scale.
Document document = new Document(page.getBounds(), 0, 0, 0, 0);
PdfWriter writer;
writer = PdfWriter.getInstance(document, baos);
// Render in correct colors for transparent rasters
writer.setRgbTransparencyBlending(true);
// The mapView is not scaled to the document, we assume the mapView
// has the right ratio.
// Write document title and metadata
document.open();
PdfContext context = new PdfContext(writer);
context.initSize(page.getBounds());
// first pass of all children to calculate size
page.calculateSize(context);
if (resize) {
// we now know the bounds of the document
// round 'm up and restart with a new document
int width = (int) Math.ceil(page.getBounds().getWidth());
int height = (int) Math.ceil(page.getBounds().getHeight());
page.getConstraint().setWidth(width);
page.getConstraint().setHeight(height);
document = new Document(new Rectangle(width, height), 0, 0, 0, 0);
writer = PdfWriter.getInstance(document, baos);
// Render in correct colors for transparent rasters
writer.setRgbTransparencyBlending(true);
document.open();
baos.reset();
context = new PdfContext(writer);
context.initSize(page.getBounds());
}
// int compressionLevel = writer.getCompressionLevel(); // For testing
// writer.setCompressionLevel(0);
// Actual drawing
document.addTitle("Geomajas");
// second pass to layout
page.layout(context);
// finally render (uses baos)
page.render(context);
document.add(context.getImage());
// Now close the document
document.close();
} | java |
private static synchronized boolean isLog4JConfigured()
{
if(!log4jConfigured)
{
Enumeration en = org.apache.log4j.Logger.getRootLogger().getAllAppenders();
if (!(en instanceof org.apache.log4j.helpers.NullEnumeration))
{
log4jConfigured = true;
}
else
{
Enumeration cats = LogManager.getCurrentLoggers();
while (cats.hasMoreElements())
{
org.apache.log4j.Logger c = (org.apache.log4j.Logger) cats.nextElement();
if (!(c.getAllAppenders() instanceof org.apache.log4j.helpers.NullEnumeration))
{
log4jConfigured = true;
}
}
}
if(log4jConfigured)
{
String msg = "Log4J is already configured, will not search for log4j properties file";
LoggerFactory.getBootLogger().info(msg);
}
else
{
LoggerFactory.getBootLogger().info("Log4J is not configured");
}
}
return log4jConfigured;
} | java |
private org.apache.log4j.Logger getLogger()
{
/*
Logger interface extends Serializable, thus Log field is
declared 'transient' and we have to null-check
*/
if (logger == null)
{
logger = org.apache.log4j.Logger.getLogger(name);
}
return logger;
} | java |
public final void debug(Object pObject)
{
getLogger().log(FQCN, Level.DEBUG, pObject, null);
} | java |
public final void info(Object pObject)
{
getLogger().log(FQCN, Level.INFO, pObject, null);
} | java |
public final void warn(Object pObject)
{
getLogger().log(FQCN, Level.WARN, pObject, null);
} | java |
public final void error(Object pObject)
{
getLogger().log(FQCN, Level.ERROR, pObject, null);
} | java |
public final void fatal(Object pObject)
{
getLogger().log(FQCN, Level.FATAL, pObject, null);
} | java |
protected Class<?> getPropertyClass(ClassMetadata meta, String propertyName) throws HibernateLayerException {
// try to assure the correct separator is used
propertyName = propertyName.replace(XPATH_SEPARATOR, SEPARATOR);
if (propertyName.contains(SEPARATOR)) {
String directProperty = propertyName.substring(0, propertyName.indexOf(SEPARATOR));
try {
Type prop = meta.getPropertyType(directProperty);
if (prop.isCollectionType()) {
CollectionType coll = (CollectionType) prop;
prop = coll.getElementType((SessionFactoryImplementor) sessionFactory);
}
ClassMetadata propMeta = sessionFactory.getClassMetadata(prop.getReturnedClass());
return getPropertyClass(propMeta, propertyName.substring(propertyName.indexOf(SEPARATOR) + 1));
} catch (HibernateException e) {
throw new HibernateLayerException(e, ExceptionCode.HIBERNATE_COULD_NOT_RESOLVE, propertyName,
meta.getEntityName());
}
} else {
try {
return meta.getPropertyType(propertyName).getReturnedClass();
} catch (HibernateException e) {
throw new HibernateLayerException(e, ExceptionCode.HIBERNATE_COULD_NOT_RESOLVE, propertyName,
meta.getEntityName());
}
}
} | java |
public void setSessionFactory(SessionFactory sessionFactory) throws HibernateLayerException {
try {
this.sessionFactory = sessionFactory;
if (null != layerInfo) {
entityMetadata = sessionFactory.getClassMetadata(layerInfo.getFeatureInfo().getDataSourceName());
}
} catch (Exception e) { // NOSONAR
throw new HibernateLayerException(e, ExceptionCode.HIBERNATE_NO_SESSION_FACTORY);
}
} | java |
static JDOClass getJDOClass(Class c)
{
JDOClass rc = null;
try
{
JavaModelFactory javaModelFactory = RuntimeJavaModelFactory.getInstance();
JavaModel javaModel = javaModelFactory.getJavaModel(c.getClassLoader());
JDOModel m = JDOModelFactoryImpl.getInstance().getJDOModel(javaModel);
rc = m.getJDOClass(c.getName());
}
catch (RuntimeException ex)
{
throw new JDOFatalInternalException("Not a JDO class: " + c.getName());
}
return rc;
} | java |
static Object getLCState(StateManagerInternal sm)
{
// unfortunately the LifeCycleState classes are package private.
// so we have to do some dirty reflection hack to access them
try
{
Field myLC = sm.getClass().getDeclaredField("myLC");
myLC.setAccessible(true);
return myLC.get(sm);
}
catch (NoSuchFieldException e)
{
return e;
}
catch (IllegalAccessException e)
{
return e;
}
} | java |
@PostConstruct
protected void postConstruct() throws GeomajasException {
if (null == baseTmsUrl) {
throw new GeomajasException(ExceptionCode.PARAMETER_MISSING, "baseTmsUrl");
}
// Make sure we have a base URL we can work with:
if ((baseTmsUrl.startsWith("http://") || baseTmsUrl.startsWith("https://")) && !baseTmsUrl.endsWith("/")) {
baseTmsUrl += "/";
}
// Make sure there is a correct RasterLayerInfo object:
if (layerInfo == null || layerInfo == UNUSABLE_LAYER_INFO) {
try {
tileMap = configurationService.getCapabilities(this);
version = tileMap.getVersion();
extension = tileMap.getTileFormat().getExtension();
layerInfo = configurationService.asLayerInfo(tileMap);
usable = true;
} catch (TmsLayerException e) {
// a layer needs an info object to keep the DtoConfigurationPostProcessor happy !
layerInfo = UNUSABLE_LAYER_INFO;
usable = false;
log.warn("The layer could not be correctly initialized: " + getId(), e);
}
} else if (extension == null) {
throw new GeomajasException(ExceptionCode.PARAMETER_MISSING, "extension");
}
if (layerInfo != null) {
// Finally prepare some often needed values:
state = new TileServiceState(geoService, layerInfo);
// when proxying the real url will be resolved later on, just use a simple one for now
boolean proxying = useCache || useProxy || null != authentication;
if (tileMap != null && !proxying) {
urlBuilder = new TileMapUrlBuilder(tileMap);
} else {
urlBuilder = new SimpleTmsUrlBuilder(extension);
}
}
} | java |
public Collection getReaders(Object obj)
{
checkTimedOutLocks();
Identity oid = new Identity(obj,getBroker());
return getReaders(oid);
} | java |
private void removeTimedOutLocks(long timeout)
{
int count = 0;
long maxAge = System.currentTimeMillis() - timeout;
boolean breakFromLoop = false;
ObjectLocks temp = null;
synchronized (locktable)
{
Iterator it = locktable.values().iterator();
/**
* run this loop while:
* - we have more in the iterator
* - the breakFromLoop flag hasn't been set
* - we haven't removed more than the limit for this cleaning iteration.
*/
while (it.hasNext() && !breakFromLoop && (count <= MAX_LOCKS_TO_CLEAN))
{
temp = (ObjectLocks) it.next();
if (temp.getWriter() != null)
{
if (temp.getWriter().getTimestamp() < maxAge)
{
// writer has timed out, set it to null
temp.setWriter(null);
}
}
if (temp.getYoungestReader() < maxAge)
{
// all readers are older than timeout.
temp.getReaders().clear();
if (temp.getWriter() == null)
{
// all readers and writer are older than timeout,
// remove the objectLock from the iterator (which
// is backed by the map, so it will be removed.
it.remove();
}
}
else
{
// we need to walk each reader.
Iterator readerIt = temp.getReaders().values().iterator();
LockEntry readerLock = null;
while (readerIt.hasNext())
{
readerLock = (LockEntry) readerIt.next();
if (readerLock.getTimestamp() < maxAge)
{
// this read lock is old, remove it.
readerIt.remove();
}
}
}
count++;
}
}
} | java |
public void createAgent(String agent_name, String path) {
IComponentIdentifier agent = cmsService.createComponent(agent_name,
path, null, null).get(new ThreadSuspendable());
createdAgents.put(agent_name, agent);
} | java |
public IExternalAccess getAgentsExternalAccess(String agent_name) {
return cmsService.getExternalAccess(getAgentID(agent_name)).get(
new ThreadSuspendable());
} | java |
public void create(final DbProduct dbProduct) {
if(repositoryHandler.getProduct(dbProduct.getName()) != null){
throw new WebApplicationException(Response.status(Response.Status.CONFLICT).entity("Product already exist!").build());
}
repositoryHandler.store(dbProduct);
} | java |
public DbProduct getProduct(final String name) {
final DbProduct dbProduct = repositoryHandler.getProduct(name);
if(dbProduct == null){
throw new WebApplicationException(Response.status(Response.Status.NOT_FOUND)
.entity("Product " + name + " does not exist.").build());
}
return dbProduct;
} | java |
public void deleteProduct(final String name) {
final DbProduct dbProduct = getProduct(name);
repositoryHandler.deleteProduct(dbProduct.getName());
} | java |
public void setProductModules(final String name, final List<String> moduleNames) {
final DbProduct dbProduct = getProduct(name);
dbProduct.setModules(moduleNames);
repositoryHandler.store(dbProduct);
} | java |
@Override
public Object[] getAgentPlans(String agent_name, Connector connector) {
// Not supported in JADE
connector.getLogger().warning("Non suported method for Jade Platform. There is no plans in Jade platform.");
throw new java.lang.UnsupportedOperationException("Non suported method for Jade Platform. There is no extra properties.");
} | java |
protected synchronized int loadSize() throws PersistenceBrokerException
{
PersistenceBroker broker = getBroker();
try
{
return broker.getCount(getQuery());
}
catch (Exception ex)
{
throw new PersistenceBrokerException(ex);
}
finally
{
releaseBroker(broker);
}
} | java |
protected Collection loadData() throws PersistenceBrokerException
{
PersistenceBroker broker = getBroker();
try
{
Collection result;
if (_data != null) // could be set by listener
{
result = _data;
}
else if (_size != 0)
{
// TODO: returned ManageableCollection should extend Collection to avoid
// this cast
result = (Collection) broker.getCollectionByQuery(getCollectionClass(), getQuery());
}
else
{
result = (Collection)getCollectionClass().newInstance();
}
return result;
}
catch (Exception ex)
{
throw new PersistenceBrokerException(ex);
}
finally
{
releaseBroker(broker);
}
} | java |
protected void beforeLoading()
{
if (_listeners != null)
{
CollectionProxyListener listener;
if (_perThreadDescriptorsEnabled) {
loadProfileIfNeeded();
}
for (int idx = _listeners.size() - 1; idx >= 0; idx--)
{
listener = (CollectionProxyListener)_listeners.get(idx);
listener.beforeLoading(this);
}
}
} | java |
public void clear()
{
Class collClass = getCollectionClass();
// ECER: assure we notify all objects being removed,
// necessary for RemovalAwareCollections...
if (IRemovalAwareCollection.class.isAssignableFrom(collClass))
{
getData().clear();
}
else
{
Collection coll;
// BRJ: use an empty collection so isLoaded will return true
// for non RemovalAwareCollections only !!
try
{
coll = (Collection) collClass.newInstance();
}
catch (Exception e)
{
coll = new ArrayList();
}
setData(coll);
}
_size = 0;
} | java |
protected synchronized void releaseBroker(PersistenceBroker broker)
{
/*
arminw:
only close the broker instance if we get
it from the PBF, do nothing if we obtain it from
PBThreadMapping
*/
if (broker != null && _needsClose)
{
_needsClose = false;
broker.close();
}
} | java |
protected synchronized PersistenceBroker getBroker() throws PBFactoryException
{
/*
mkalen:
NB! The loadProfileIfNeeded must be called _before_ acquiring a broker below,
since some methods in PersistenceBrokerImpl will keep a local reference to
the descriptor repository that was active during broker construction/refresh
(not checking the repository beeing used on method invocation).
PersistenceBrokerImpl#getClassDescriptor(Class clazz) is such a method,
that will throw ClassNotPersistenceCapableException on the following scenario:
(All happens in one thread only):
t0: activate per-thread metadata changes
t1: load, register and activate profile A
t2: load object O1 witch collection proxy C to objects {O2} (C stores profile key K(A))
t3: close broker from t2
t4: load, register and activate profile B
t5: reference O1.getO2Collection, causing C loadData() to be invoked
t6: C calls getBroker
broker B is created and descriptorRepository is set to descriptors from profile B
t7: C calls loadProfileIfNeeded, re-activating profile A
t8: C calls B.getCollectionByQuery
t9: B gets callback (via QueryReferenceBroker) to getClassDescriptor
the local descriptorRepository from t6 is used!
=> We will now try to query for {O2} with profile B
(even though we re-activated profile A in t7)
=> ClassNotPersistenceCapableException
Keeping loadProfileIfNeeded() at the start of this method changes everything from t6:
t6: C calls loadProfileIfNeeded, re-activating profile A
t7: C calls getBroker,
broker B is created and descriptorRepository is set to descriptors from profile A
t8: C calls B.getCollectionByQuery
t9: B gets callback to getClassDescriptor,
the local descriptorRepository from t6 is used
=> We query for {O2} with profile A
=> All good :-)
*/
if (_perThreadDescriptorsEnabled)
{
loadProfileIfNeeded();
}
PersistenceBroker broker;
if (getBrokerKey() == null)
{
/*
arminw:
if no PBKey is set we throw an exception, because we don't
know which PB (connection) should be used.
*/
throw new OJBRuntimeException("Can't find associated PBKey. Need PBKey to obtain a valid" +
"PersistenceBroker instance from intern resources.");
}
// first try to use the current threaded broker to avoid blocking
broker = PersistenceBrokerThreadMapping.currentPersistenceBroker(getBrokerKey());
// current broker not found or was closed, create a intern new one
if (broker == null || broker.isClosed())
{
broker = PersistenceBrokerFactory.createPersistenceBroker(getBrokerKey());
// signal that we use a new internal obtained PB instance to read the
// data and that this instance have to be closed after use
_needsClose = true;
}
return broker;
} | java |
public synchronized void addListener(CollectionProxyListener listener)
{
if (_listeners == null)
{
_listeners = new ArrayList();
}
// to avoid multi-add of same listener, do check
if(!_listeners.contains(listener))
{
_listeners.add(listener);
}
} | java |
public String getURN() throws InvalidRegistrationContentException {
if (parsedConfig==null || parsedConfig.urn==null || parsedConfig.urn.trim().isEmpty()) {
throw new InvalidRegistrationContentException("Invalid registration config - failed to read mediator URN");
}
return parsedConfig.urn;
} | java |
public boolean deleteExisting(final File file) {
if (!file.exists()) {
return true;
}
boolean deleted = false;
if (file.canWrite()) {
deleted = file.delete();
} else {
LogLog.debug(file + " is not writeable for delete (retrying)");
}
if (!deleted) {
if (!file.exists()) {
deleted = true;
} else {
file.delete();
deleted = (!file.exists());
}
}
return deleted;
} | java |
public boolean rename(final File from, final File to) {
boolean renamed = false;
if (this.isWriteable(from)) {
renamed = from.renameTo(to);
} else {
LogLog.debug(from + " is not writeable for rename (retrying)");
}
if (!renamed) {
from.renameTo(to);
renamed = (!from.exists());
}
return renamed;
} | java |
protected synchronized void registerOpenDatabase(DatabaseImpl newDB)
{
DatabaseImpl old_db = getCurrentDatabase();
if (old_db != null)
{
try
{
if (old_db.isOpen())
{
log.warn("## There is still an opened database, close old one ##");
old_db.close();
}
}
catch (Throwable t)
{
//ignore
}
}
if (log.isDebugEnabled()) log.debug("Set current database " + newDB + " PBKey was " + newDB.getPBKey());
setCurrentDatabase(newDB);
// usedDatabases.add(newDB.getPBKey());
} | java |
public String[] getAttributeNames()
{
Set keys = (attributeMap == null ? new HashSet() : attributeMap.keySet());
String[] result = new String[keys.size()];
keys.toArray(result);
return result;
} | java |
public void check(ModelDef modelDef, String checkLevel) throws ConstraintException
{
ensureReferencedKeys(modelDef, checkLevel);
checkReferenceForeignkeys(modelDef, checkLevel);
checkCollectionForeignkeys(modelDef, checkLevel);
checkKeyModifications(modelDef, checkLevel);
} | java |
private void ensureReferencedPKs(ModelDef modelDef, ReferenceDescriptorDef refDef) throws ConstraintException
{
String targetClassName = refDef.getProperty(PropertyHelper.OJB_PROPERTY_CLASS_REF);
ClassDescriptorDef targetClassDef = modelDef.getClass(targetClassName);
ensurePKsFromHierarchy(targetClassDef);
} | java |
private void ensureReferencedPKs(ModelDef modelDef, CollectionDescriptorDef collDef) throws ConstraintException
{
String elementClassName = collDef.getProperty(PropertyHelper.OJB_PROPERTY_ELEMENT_CLASS_REF);
ClassDescriptorDef elementClassDef = modelDef.getClass(elementClassName);
String indirTable = collDef.getProperty(PropertyHelper.OJB_PROPERTY_INDIRECTION_TABLE);
String localKey = collDef.getProperty(PropertyHelper.OJB_PROPERTY_FOREIGNKEY);
String remoteKey = collDef.getProperty(PropertyHelper.OJB_PROPERTY_REMOTE_FOREIGNKEY);
boolean hasRemoteKey = remoteKey != null;
ArrayList fittingCollections = new ArrayList();
// we're checking for the fitting remote collection(s) and also
// use their foreignkey as remote-foreignkey in the original collection definition
for (Iterator it = elementClassDef.getAllExtentClasses(); it.hasNext();)
{
ClassDescriptorDef subTypeDef = (ClassDescriptorDef)it.next();
// find the collection in the element class that has the same indirection table
for (Iterator collIt = subTypeDef.getCollections(); collIt.hasNext();)
{
CollectionDescriptorDef curCollDef = (CollectionDescriptorDef)collIt.next();
if (indirTable.equals(curCollDef.getProperty(PropertyHelper.OJB_PROPERTY_INDIRECTION_TABLE)) &&
(collDef != curCollDef) &&
(!hasRemoteKey || CommaListIterator.sameLists(remoteKey, curCollDef.getProperty(PropertyHelper.OJB_PROPERTY_FOREIGNKEY))) &&
(!curCollDef.hasProperty(PropertyHelper.OJB_PROPERTY_REMOTE_FOREIGNKEY) ||
CommaListIterator.sameLists(localKey, curCollDef.getProperty(PropertyHelper.OJB_PROPERTY_REMOTE_FOREIGNKEY))))
{
fittingCollections.add(curCollDef);
}
}
}
if (!fittingCollections.isEmpty())
{
// if there is more than one, check that they match, i.e. that they all have the same foreignkeys
if (!hasRemoteKey && (fittingCollections.size() > 1))
{
CollectionDescriptorDef firstCollDef = (CollectionDescriptorDef)fittingCollections.get(0);
String foreignKey = firstCollDef.getProperty(PropertyHelper.OJB_PROPERTY_FOREIGNKEY);
for (int idx = 1; idx < fittingCollections.size(); idx++)
{
CollectionDescriptorDef curCollDef = (CollectionDescriptorDef)fittingCollections.get(idx);
if (!CommaListIterator.sameLists(foreignKey, curCollDef.getProperty(PropertyHelper.OJB_PROPERTY_FOREIGNKEY)))
{
throw new ConstraintException("Cannot determine the element-side collection that corresponds to the collection "+
collDef.getName()+" in type "+collDef.getOwner().getName()+
" because there are at least two different collections that would fit."+
" Specifying remote-foreignkey in the original collection "+collDef.getName()+
" will perhaps help");
}
}
// store the found keys at the collections
collDef.setProperty(PropertyHelper.OJB_PROPERTY_REMOTE_FOREIGNKEY, foreignKey);
for (int idx = 0; idx < fittingCollections.size(); idx++)
{
CollectionDescriptorDef curCollDef = (CollectionDescriptorDef)fittingCollections.get(idx);
curCollDef.setProperty(PropertyHelper.OJB_PROPERTY_REMOTE_FOREIGNKEY, localKey);
}
}
}
// copy subclass pk fields into target class (if not already present)
ensurePKsFromHierarchy(elementClassDef);
} | java |
private void ensureReferencedFKs(ModelDef modelDef, CollectionDescriptorDef collDef) throws ConstraintException
{
String elementClassName = collDef.getProperty(PropertyHelper.OJB_PROPERTY_ELEMENT_CLASS_REF);
ClassDescriptorDef elementClassDef = modelDef.getClass(elementClassName);
String fkFieldNames = collDef.getProperty(PropertyHelper.OJB_PROPERTY_FOREIGNKEY);
ArrayList missingFields = new ArrayList();
SequencedHashMap fkFields = new SequencedHashMap();
// first we gather all field names
for (CommaListIterator it = new CommaListIterator(fkFieldNames); it.hasNext();)
{
String fieldName = (String)it.next();
FieldDescriptorDef fieldDef = elementClassDef.getField(fieldName);
if (fieldDef == null)
{
missingFields.add(fieldName);
}
fkFields.put(fieldName, fieldDef);
}
// next we traverse all sub types and gather fields as we go
for (Iterator it = elementClassDef.getAllExtentClasses(); it.hasNext() && !missingFields.isEmpty();)
{
ClassDescriptorDef subTypeDef = (ClassDescriptorDef)it.next();
for (int idx = 0; idx < missingFields.size();)
{
FieldDescriptorDef fieldDef = subTypeDef.getField((String)missingFields.get(idx));
if (fieldDef != null)
{
fkFields.put(fieldDef.getName(), fieldDef);
missingFields.remove(idx);
}
else
{
idx++;
}
}
}
if (!missingFields.isEmpty())
{
throw new ConstraintException("Cannot find field "+missingFields.get(0).toString()+" in the hierarchy with root type "+
elementClassDef.getName()+" which is used as foreignkey in collection "+
collDef.getName()+" in "+collDef.getOwner().getName());
}
// copy the found fields into the element class
ensureFields(elementClassDef, fkFields.values());
} | java |
private void ensurePKsFromHierarchy(ClassDescriptorDef classDef) throws ConstraintException
{
SequencedHashMap pks = new SequencedHashMap();
for (Iterator it = classDef.getAllExtentClasses(); it.hasNext();)
{
ClassDescriptorDef subTypeDef = (ClassDescriptorDef)it.next();
ArrayList subPKs = subTypeDef.getPrimaryKeys();
// check against already present PKs
for (Iterator pkIt = subPKs.iterator(); pkIt.hasNext();)
{
FieldDescriptorDef fieldDef = (FieldDescriptorDef)pkIt.next();
FieldDescriptorDef foundPKDef = (FieldDescriptorDef)pks.get(fieldDef.getName());
if (foundPKDef != null)
{
if (!isEqual(fieldDef, foundPKDef))
{
throw new ConstraintException("Cannot pull up the declaration of the required primary key "+fieldDef.getName()+
" because its definitions in "+fieldDef.getOwner().getName()+" and "+
foundPKDef.getOwner().getName()+" differ");
}
}
else
{
pks.put(fieldDef.getName(), fieldDef);
}
}
}
ensureFields(classDef, pks.values());
} | java |
private void ensureFields(ClassDescriptorDef classDef, Collection fields) throws ConstraintException
{
boolean forceVirtual = !classDef.getBooleanProperty(PropertyHelper.OJB_PROPERTY_GENERATE_REPOSITORY_INFO, true);
for (Iterator it = fields.iterator(); it.hasNext();)
{
FieldDescriptorDef fieldDef = (FieldDescriptorDef)it.next();
// First we check whether this field is already present in the class
FieldDescriptorDef foundFieldDef = classDef.getField(fieldDef.getName());
if (foundFieldDef != null)
{
if (isEqual(fieldDef, foundFieldDef))
{
if (forceVirtual)
{
foundFieldDef.setProperty(PropertyHelper.OJB_PROPERTY_VIRTUAL_FIELD, "true");
}
continue;
}
else
{
throw new ConstraintException("Cannot pull up the declaration of the required field "+fieldDef.getName()+
" from type "+fieldDef.getOwner().getName()+" to basetype "+classDef.getName()+
" because there is already a different field of the same name");
}
}
// perhaps a reference or collection ?
if (classDef.getCollection(fieldDef.getName()) != null)
{
throw new ConstraintException("Cannot pull up the declaration of the required field "+fieldDef.getName()+
" from type "+fieldDef.getOwner().getName()+" to basetype "+classDef.getName()+
" because there is already a collection of the same name");
}
if (classDef.getReference(fieldDef.getName()) != null)
{
throw new ConstraintException("Cannot pull up the declaration of the required field "+fieldDef.getName()+
" from type "+fieldDef.getOwner().getName()+" to basetype "+classDef.getName()+
" because there is already a reference of the same name");
}
classDef.addFieldClone(fieldDef);
classDef.getField(fieldDef.getName()).setProperty(PropertyHelper.OJB_PROPERTY_VIRTUAL_FIELD, "true");
}
} | java |
private boolean isEqual(FieldDescriptorDef first, FieldDescriptorDef second)
{
return first.getName().equals(second.getName()) &&
first.getProperty(PropertyHelper.OJB_PROPERTY_COLUMN).equals(second.getProperty(PropertyHelper.OJB_PROPERTY_COLUMN)) &&
first.getProperty(PropertyHelper.OJB_PROPERTY_JDBC_TYPE).equals(second.getProperty(PropertyHelper.OJB_PROPERTY_JDBC_TYPE));
} | java |
private void checkCollectionForeignkeys(ModelDef modelDef, String checkLevel) throws ConstraintException
{
if (CHECKLEVEL_NONE.equals(checkLevel))
{
return;
}
ClassDescriptorDef classDef;
CollectionDescriptorDef collDef;
for (Iterator it = modelDef.getClasses(); it.hasNext();)
{
classDef = (ClassDescriptorDef)it.next();
for (Iterator collIt = classDef.getCollections(); collIt.hasNext();)
{
collDef = (CollectionDescriptorDef)collIt.next();
if (!collDef.getBooleanProperty(PropertyHelper.OJB_PROPERTY_IGNORE, false))
{
if (collDef.hasProperty(PropertyHelper.OJB_PROPERTY_INDIRECTION_TABLE))
{
checkIndirectionTable(modelDef, collDef);
}
else
{
checkCollectionForeignkeys(modelDef, collDef);
}
}
}
}
} | java |
private void checkReferenceForeignkeys(ModelDef modelDef, String checkLevel) throws ConstraintException
{
if (CHECKLEVEL_NONE.equals(checkLevel))
{
return;
}
ClassDescriptorDef classDef;
ReferenceDescriptorDef refDef;
for (Iterator it = modelDef.getClasses(); it.hasNext();)
{
classDef = (ClassDescriptorDef)it.next();
for (Iterator refIt = classDef.getReferences(); refIt.hasNext();)
{
refDef = (ReferenceDescriptorDef)refIt.next();
if (!refDef.getBooleanProperty(PropertyHelper.OJB_PROPERTY_IGNORE, false))
{
checkReferenceForeignkeys(modelDef, refDef);
}
}
}
} | java |
private ReferenceDescriptorDef usedByReference(ModelDef modelDef, FieldDescriptorDef fieldDef)
{
String ownerClassName = ((ClassDescriptorDef)fieldDef.getOwner()).getQualifiedName();
ClassDescriptorDef classDef;
ReferenceDescriptorDef refDef;
String targetClassName;
// only relevant for primarykey fields
if (PropertyHelper.toBoolean(fieldDef.getProperty(PropertyHelper.OJB_PROPERTY_PRIMARYKEY), false))
{
for (Iterator classIt = modelDef.getClasses(); classIt.hasNext();)
{
classDef = (ClassDescriptorDef)classIt.next();
for (Iterator refIt = classDef.getReferences(); refIt.hasNext();)
{
refDef = (ReferenceDescriptorDef)refIt.next();
targetClassName = refDef.getProperty(PropertyHelper.OJB_PROPERTY_CLASS_REF).replace('$', '.');
if (ownerClassName.equals(targetClassName))
{
// the field is a primary key of the class referenced by this reference descriptor
return refDef;
}
}
}
}
return null;
} | java |
public Object[] getForeignKeyValues(Object obj, ClassDescriptor mif)
throws PersistenceBrokerException
{
FieldDescriptor[] fks = getForeignKeyFieldDescriptors(mif);
// materialize object only if FK fields are declared
if(fks.length > 0) obj = ProxyHelper.getRealObject(obj);
Object[] result = new Object[fks.length];
for (int i = 0; i < result.length; i++)
{
FieldDescriptor fmd = fks[i];
PersistentField f = fmd.getPersistentField();
// BRJ: do NOT convert.
// conversion is done when binding the sql-statement
//
// FieldConversion fc = fmd.getFieldConversion();
// Object val = fc.javaToSql(f.get(obj));
result[i] = f.get(obj);
}
return result;
} | java |
public void addForeignKeyField(int newId)
{
if (m_ForeignKeyFields == null)
{
m_ForeignKeyFields = new Vector();
}
m_ForeignKeyFields.add(new Integer(newId));
} | java |
public void addForeignKeyField(String newField)
{
if (m_ForeignKeyFields == null)
{
m_ForeignKeyFields = new Vector();
}
m_ForeignKeyFields.add(newField);
} | java |
public OJBLock atomicGetOrCreateLock(Object resourceId, Object isolationId)
{
synchronized(globalLocks)
{
MultiLevelLock lock = getLock(resourceId);
if(lock == null)
{
lock = createLock(resourceId, isolationId);
}
return (OJBLock) lock;
}
} | java |
public void initSize(Rectangle rectangle) {
template = writer.getDirectContent().createTemplate(rectangle.getWidth(), rectangle.getHeight());
} | java |
public Rectangle getTextSize(String text, Font font) {
template.saveState();
// get the font
DefaultFontMapper mapper = new DefaultFontMapper();
BaseFont bf = mapper.awtToPdf(font);
template.setFontAndSize(bf, font.getSize());
// calculate text width and height
float textWidth = template.getEffectiveStringWidth(text, false);
float ascent = bf.getAscentPoint(text, font.getSize());
float descent = bf.getDescentPoint(text, font.getSize());
float textHeight = ascent - descent;
template.restoreState();
return new Rectangle(0, 0, textWidth, textHeight);
} | java |
public void drawText(String text, Font font, Rectangle box, Color fontColor) {
template.saveState();
// get the font
DefaultFontMapper mapper = new DefaultFontMapper();
BaseFont bf = mapper.awtToPdf(font);
template.setFontAndSize(bf, font.getSize());
// calculate descent
float descent = 0;
if (text != null) {
descent = bf.getDescentPoint(text, font.getSize());
}
// calculate the fitting size
Rectangle fit = getTextSize(text, font);
// draw text if necessary
template.setColorFill(fontColor);
template.beginText();
template.showTextAligned(PdfContentByte.ALIGN_LEFT, text, origX + box.getLeft() + 0.5f
* (box.getWidth() - fit.getWidth()), origY + box.getBottom() + 0.5f
* (box.getHeight() - fit.getHeight()) - descent, 0);
template.endText();
template.restoreState();
} | java |
public void strokeRectangle(Rectangle rect, Color color, float linewidth) {
strokeRectangle(rect, color, linewidth, null);
} | java |
public void strokeRoundRectangle(Rectangle rect, Color color, float linewidth, float r) {
template.saveState();
setStroke(color, linewidth, null);
template.roundRectangle(origX + rect.getLeft(), origY + rect.getBottom(), rect.getWidth(), rect.getHeight(), r);
template.stroke();
template.restoreState();
} | java |
public void fillRectangle(Rectangle rect, Color color) {
template.saveState();
setFill(color);
template.rectangle(origX + rect.getLeft(), origY + rect.getBottom(), rect.getWidth(), rect.getHeight());
template.fill();
template.restoreState();
} | java |
public void strokeEllipse(Rectangle rect, Color color, float linewidth) {
template.saveState();
setStroke(color, linewidth, null);
template.ellipse(origX + rect.getLeft(), origY + rect.getBottom(), origX + rect.getRight(),
origY + rect.getTop());
template.stroke();
template.restoreState();
} | java |
public void fillEllipse(Rectangle rect, Color color) {
template.saveState();
setFill(color);
template.ellipse(origX + rect.getLeft(), origY + rect.getBottom(), origX + rect.getRight(),
origY + rect.getTop());
template.fill();
template.restoreState();
} | java |
public void moveRectangleTo(Rectangle rect, float x, float y) {
float width = rect.getWidth();
float height = rect.getHeight();
rect.setLeft(x);
rect.setBottom(y);
rect.setRight(rect.getLeft() + width);
rect.setTop(rect.getBottom() + height);
} | java |
public void translateRectangle(Rectangle rect, float dx, float dy) {
float width = rect.getWidth();
float height = rect.getHeight();
rect.setLeft(rect.getLeft() + dx);
rect.setBottom(rect.getBottom() + dy);
rect.setRight(rect.getLeft() + dx + width);
rect.setTop(rect.getBottom() + dy + height);
} | java |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.