lower_case_table_names=1
You can determine the case-sensitivity of your own MySQL installation by logging in with the command line client or running the query browser and entering the following command :
SHOW VARIABLES LIKE 'lower_case%'
lower_case_table_names=1
SHOW VARIABLES LIKE 'lower_case%'
DROP TABLE IF EXISTS QRTZ_JOB_LISTENERS;
DROP TABLE IF EXISTS QRTZ_TRIGGER_LISTENERS;
DROP TABLE IF EXISTS QRTZ_FIRED_TRIGGERS;
DROP TABLE IF EXISTS QRTZ_PAUSED_TRIGGER_GRPS;
DROP TABLE IF EXISTS QRTZ_SCHEDULER_STATE;
DROP TABLE IF EXISTS QRTZ_LOCKS;
DROP TABLE IF EXISTS QRTZ_SIMPLE_TRIGGERS;
DROP TABLE IF EXISTS QRTZ_CRON_TRIGGERS;
DROP TABLE IF EXISTS QRTZ_BLOB_TRIGGERS;
DROP TABLE IF EXISTS QRTZ_TRIGGERS;
DROP TABLE IF EXISTS QRTZ_JOB_DETAILS;
DROP TABLE IF EXISTS QRTZ_CALENDARS;
CREATE TABLE QRTZ_JOB_DETAILS(
JOB_NAME VARCHAR(200) NOT NULL,
JOB_GROUP VARCHAR(200) NOT NULL,
DESCRIPTION VARCHAR(250) NULL,
JOB_CLASS_NAME VARCHAR(250) NOT NULL,
IS_DURABLE VARCHAR(1) NOT NULL,
IS_VOLATILE VARCHAR(1) NOT NULL,
IS_STATEFUL VARCHAR(1) NOT NULL,
REQUESTS_RECOVERY VARCHAR(1) NOT NULL,
JOB_DATA BLOB NULL,
PRIMARY KEY (JOB_NAME,JOB_GROUP))
TYPE=InnoDB;
CREATE TABLE QRTZ_JOB_LISTENERS (
JOB_NAME VARCHAR(200) NOT NULL,
JOB_GROUP VARCHAR(200) NOT NULL,
JOB_LISTENER VARCHAR(200) NOT NULL,
PRIMARY KEY (JOB_NAME,JOB_GROUP,JOB_LISTENER),
INDEX (JOB_NAME, JOB_GROUP),
FOREIGN KEY (JOB_NAME,JOB_GROUP)
REFERENCES QRTZ_JOB_DETAILS(JOB_NAME,JOB_GROUP))
TYPE=InnoDB;
CREATE TABLE QRTZ_TRIGGERS (
TRIGGER_NAME VARCHAR(200) NOT NULL,
TRIGGER_GROUP VARCHAR(200) NOT NULL,
JOB_NAME VARCHAR(200) NOT NULL,
JOB_GROUP VARCHAR(200) NOT NULL,
IS_VOLATILE VARCHAR(1) NOT NULL,
DESCRIPTION VARCHAR(250) NULL,
NEXT_FIRE_TIME BIGINT(13) NULL,
PREV_FIRE_TIME BIGINT(13) NULL,
PRIORITY INTEGER NULL,
TRIGGER_STATE VARCHAR(16) NOT NULL,
TRIGGER_TYPE VARCHAR(8) NOT NULL,
START_TIME BIGINT(13) NOT NULL,
END_TIME BIGINT(13) NULL,
CALENDAR_NAME VARCHAR(200) NULL,
MISFIRE_INSTR SMALLINT(2) NULL,
JOB_DATA BLOB NULL,
PRIMARY KEY (TRIGGER_NAME,TRIGGER_GROUP),
INDEX (JOB_NAME, JOB_GROUP),
FOREIGN KEY (JOB_NAME,JOB_GROUP)
REFERENCES QRTZ_JOB_DETAILS(JOB_NAME,JOB_GROUP))
TYPE=InnoDB;
CREATE TABLE QRTZ_SIMPLE_TRIGGERS (
TRIGGER_NAME VARCHAR(200) NOT NULL,
TRIGGER_GROUP VARCHAR(200) NOT NULL,
REPEAT_COUNT BIGINT(7) NOT NULL,
REPEAT_INTERVAL BIGINT(12) NOT NULL,
TIMES_TRIGGERED BIGINT(7) NOT NULL,
PRIMARY KEY (TRIGGER_NAME,TRIGGER_GROUP),
INDEX (TRIGGER_NAME, TRIGGER_GROUP),
FOREIGN KEY (TRIGGER_NAME,TRIGGER_GROUP)
REFERENCES QRTZ_TRIGGERS(TRIGGER_NAME,TRIGGER_GROUP))
TYPE=InnoDB;
CREATE TABLE QRTZ_CRON_TRIGGERS (
TRIGGER_NAME VARCHAR(200) NOT NULL,
TRIGGER_GROUP VARCHAR(200) NOT NULL,
CRON_EXPRESSION VARCHAR(120) NOT NULL,
TIME_ZONE_ID VARCHAR(80),
PRIMARY KEY (TRIGGER_NAME,TRIGGER_GROUP),
INDEX (TRIGGER_NAME, TRIGGER_GROUP),
FOREIGN KEY (TRIGGER_NAME,TRIGGER_GROUP)
REFERENCES QRTZ_TRIGGERS(TRIGGER_NAME,TRIGGER_GROUP))
TYPE=InnoDB;
CREATE TABLE QRTZ_BLOB_TRIGGERS (
TRIGGER_NAME VARCHAR(200) NOT NULL,
TRIGGER_GROUP VARCHAR(200) NOT NULL,
BLOB_DATA BLOB NULL,
PRIMARY KEY (TRIGGER_NAME,TRIGGER_GROUP),
INDEX (TRIGGER_NAME, TRIGGER_GROUP),
FOREIGN KEY (TRIGGER_NAME,TRIGGER_GROUP)
REFERENCES QRTZ_TRIGGERS(TRIGGER_NAME,TRIGGER_GROUP))
TYPE=InnoDB;
CREATE TABLE QRTZ_TRIGGER_LISTENERS (
TRIGGER_NAME VARCHAR(200) NOT NULL,
TRIGGER_GROUP VARCHAR(200) NOT NULL,
TRIGGER_LISTENER VARCHAR(200) NOT NULL,
PRIMARY KEY (TRIGGER_NAME,TRIGGER_GROUP,TRIGGER_LISTENER),
INDEX (TRIGGER_NAME, TRIGGER_GROUP),
FOREIGN KEY (TRIGGER_NAME,TRIGGER_GROUP)
REFERENCES QRTZ_TRIGGERS(TRIGGER_NAME,TRIGGER_GROUP))
TYPE=InnoDB;
CREATE TABLE QRTZ_CALENDARS (
CALENDAR_NAME VARCHAR(200) NOT NULL,
CALENDAR BLOB NOT NULL,
PRIMARY KEY (CALENDAR_NAME))
TYPE=InnoDB;
CREATE TABLE QRTZ_PAUSED_TRIGGER_GRPS (
TRIGGER_GROUP VARCHAR(200) NOT NULL,
PRIMARY KEY (TRIGGER_GROUP))
TYPE=InnoDB;
CREATE TABLE QRTZ_FIRED_TRIGGERS (
ENTRY_ID VARCHAR(95) NOT NULL,
TRIGGER_NAME VARCHAR(200) NOT NULL,
TRIGGER_GROUP VARCHAR(200) NOT NULL,
IS_VOLATILE VARCHAR(1) NOT NULL,
INSTANCE_NAME VARCHAR(200) NOT NULL,
FIRED_TIME BIGINT(13) NOT NULL,
PRIORITY INTEGER NOT NULL,
STATE VARCHAR(16) NOT NULL,
JOB_NAME VARCHAR(200) NULL,
JOB_GROUP VARCHAR(200) NULL,
IS_STATEFUL VARCHAR(1) NULL,
REQUESTS_RECOVERY VARCHAR(1) NULL,
PRIMARY KEY (ENTRY_ID))
TYPE=InnoDB;
CREATE TABLE QRTZ_SCHEDULER_STATE (
INSTANCE_NAME VARCHAR(200) NOT NULL,
LAST_CHECKIN_TIME BIGINT(13) NOT NULL,
CHECKIN_INTERVAL BIGINT(13) NOT NULL,
PRIMARY KEY (INSTANCE_NAME))
TYPE=InnoDB;
CREATE TABLE QRTZ_LOCKS (
LOCK_NAME VARCHAR(40) NOT NULL,
PRIMARY KEY (LOCK_NAME))
TYPE=InnoDB;
INSERT INTO QRTZ_LOCKS values('TRIGGER_ACCESS');
INSERT INTO QRTZ_LOCKS values('JOB_ACCESS');
INSERT INTO QRTZ_LOCKS values('CALENDAR_ACCESS');
INSERT INTO QRTZ_LOCKS values('STATE_ACCESS');
INSERT INTO QRTZ_LOCKS values('MISFIRE_ACCESS');
commit;
<dependency>
<groupId>opensymphony</groupId>
<artifactId>quartz</artifactId>
<version>1.6.0</version>
<scope>provided</scope>
</dependency>
<bean class="org.springframework.scheduling.quartz.SchedulerFactoryBean">
<property name="jobFactory">
<bean class="org.springframework.scheduling.quartz.SpringBeanJobFactory"/>
</property>
<property name="dataSource" ref="mainDataSource" />
<property name="transactionManager" ref="mainDataSourceTransactionManager" />
<property name="quartzProperties">
<util:properties location="/WEB-INF/config/quartz.properties"/>
</property>
<property name="applicationContextSchedulerContextKey" value="applicationContext"/>
<property name="waitForJobsToCompleteOnShutdown" value="true" />
</bean>
<bean id="mainDataSourceTransactionManager" class="org.springframework.jdbc.datasource.DataSourceTransactionManager" p:dataSource-ref="mainDataSource" />
org.quartz.threadPool.class=org.quartz.simpl.SimpleThreadPool
org.quartz.threadPool.threadCount=5
org.quartz.threadPool.threadPriority=4
org.quartz.jobStore.tablePrefix=qrtz_
org.quartz.jobStore.isClustered=false
org.quartz.jobStore.driverDelegateClass=org.quartz.impl.jdbcjobstore.StdJDBCDelegate
public class SpringBeanDelegatingJob implements Job {
private static final Log LOGGER = LogFactory.getLog(SpringBeanDelegatingJob.class);
public static final String APPLICATION_CONTEXT_KEY = "applicationContext";
@SuppressWarnings("unchecked")
public void execute(JobExecutionContext arg0) throws JobExecutionException {
JobDetail jobDetail = arg0.getJobDetail();
String beanName = substringBefore(jobDetail.getName(), "Detail");
if (LOGGER.isInfoEnabled()) {
LOGGER.info ("Running SpringBeanDelegatingJob - Job Name ["+jobDetail.getName()+"], Group Name ["+jobDetail.getGroup()+"]");
LOGGER.info ("Delegating to bean ["+beanName+"]");
}
ApplicationContext applicationContext = null;
try {
applicationContext = (ApplicationContext) arg0.getScheduler().getContext().get(APPLICATION_CONTEXT_KEY);
} catch (SchedulerException e2) {
throw new JobExecutionException("Holy fuck, there was some kind of god-damned problem with the fucking Scheduler", e2);
}
Job bean = null;
try {
bean = (Job) applicationContext.getBean (beanName, Job.class);
} catch (BeansException e1) {
throw new JobExecutionException("Unable to retrieve target bean that is to be used as a job source", e1);
}
bean.execute (arg0);
return;
}
}
public class SpringBeanMethodInvokingJob implements InitializingBean, Job {
private Object targetBean;
private String targetMethod;
//Constructors
public SpringBeanMethodInvokingJob() {
super();
}
//Behaviour Methods
public void execute(JobExecutionContext arg0) throws JobExecutionException {
Method method = null;
try {
method = targetBean.getClass().getMethod(targetMethod);
} catch (Exception e) {
throw new JobExecutionException("Unable to get targetMethod ["+targetMethod+
"] on bean with class ["+targetBean.getClass().getName()+"]");
}
try {
method.invoke(targetBean);
} catch (Exception e) {
throw new JobExecutionException("Unable to invoke method ["+method.getName()+"] on bean ["+targetBean.toString()+"]");
}
return; //done
}
public void afterPropertiesSet() throws Exception {
Assert.notNull(targetBean, "'targetBean' cannot be null");
Assert.isTrue(isNotBlank(targetMethod), "'targetMethod' cannot be blank");
}
//Property Accessors
@Required
public final void setTargetBean(Object targetBean) {
this.targetBean = targetBean;
}
@Required
public final void setTargetMethod(String targetMethod) {
this.targetMethod = targetMethod;
}
}
apt-get install mytop
SET PASSWORD FOR 'myuser'@'%.wherever.com' = PASSWORD('newpass');
show processlist
My configuration went from this (in Webflow 1) :
<flow:registry id="flowRegistry">
<flow:location path="/WEB-INF/flows/**/*-flow.xml"/>
</flow:registry>
<flow:executor id="flowExecutor" registry-ref="flowRegistry">
<!--flow:execution-listeners>
<flow:listener ref="webflowDebugListener"/>
</flow:execution-listeners-->
</flow:executor>
<!--bean id="webflowDebugListener" class="org.springframework.webflow.execution.DebuggingListener"/-->
<bean name="flowController" class="org.springframework.webflow.executor.mvc.FlowController">
<property name="flowExecutor" ref="flowExecutor" />
<property name="argumentHandler">
<bean class="org.springframework.webflow.executor.support.RequestParameterFlowExecutorArgumentHandler" />
</property>
</bean>
To this (in Webflow 2) :
<webflow:flow-registry id="flowRegistry" flow-builder-services="flowBuilderServices">
<webflow:flow-location-pattern value="/WEB-INF/flows/**/*-flow.xml"/>
</webflow:flow-registry>
<webflow:flow-executor id="flowExecutor" flow-registry="flowRegistry">
<webflow:flow-execution-listeners>
<!-- webflow:listener ref="webflowDebugListener"/ -->
<webflow:listener ref="securityFlowExecutionListener"/>
</webflow:flow-execution-listeners>
</webflow:flow-executor>
<bean id="securityFlowExecutionListener" class="org.springframework.webflow.security.SecurityFlowExecutionListener" />
<webflow:flow-builder-services id="flowBuilderServices" view-factory-creator="viewFactoryCreator" conversion-service="webflowConversionService"/>
<bean id="webflowConversionService" class="com.mypackage.modules.springframework.webflow2.ConversionServiceFactoryBean">
<property name="editorMappings">
<map>
<entry key="java.util.Calendar"><idref bean="sqlDateCalendarEditor"/></entry>
</map>
</property>
</bean>
<bean id="viewFactoryCreator" class="org.springframework.webflow.mvc.builder.MvcViewFactoryCreator">
<property name="viewResolvers">
<list>
<ref local="xmlViewResolver"/>
<ref local="decoratedJstlViewResolver"/>
<ref local="urlBasedViewResolver"/>
</list>
</property>
</bean>
<bean id="webflowDebugListener" class="org.springframework.webflow.execution.DebuggingListener"/>
<bean name="flowController" class="org.springframework.webflow.mvc.servlet.FlowController">
<property name="flowExecutor" ref="flowExecutor" />
<property name="flowUrlHandler">
<bean class="org.springframework.webflow.context.servlet.DefaultFlowUrlHandler" />
</property>
</bean>
Now, you may be asking yourself, "Why would there be more configuration in Spring Webflow 2 ? Doesn't Spring generally improve on things like configuration syntax between major versions ? The answer is, of course, yes. However, there are a number of new features that come with Webflow 2 that need to be configured, hence the extra beans for configuration. Note also, that the syntax has changed. I direct your attention to the added 'flow-' prefixes on element names of the previously existing Webflow elements.
<dependency>
<groupId>org.springframework.security</groupId>
<artifactId>spring-security-core-tiger</artifactId>
<version>${spring.security.version}</version>
<exclusions>
<exclusion>
<groupId>org.springframework</groupId>
<artifactId>spring-core</artifactId>
</exclusion>
<exclusion>
<groupId>org.springframework</groupId>
<artifactId>spring-support</artifactId>
</exclusion>
</exclusions>
</dependency>
...
<dependency>
<groupId>org.springframework.security</groupId>
<artifactId>spring-security-taglibs</artifactId>
<version>${spring.security.version}</version>
<exclusions>
<exclusion>
<groupId>org.springframework</groupId>
<artifactId>spring-web</artifactId>
</exclusion>
</exclusions>
</dependency>
xmlns:security="http://www.springframework.org/security/tags"
public enum Role implements GrantedAuthority, ConfigAttribute
{
ROLE_ADMINISTRATOR;
public String getAuthority() {
return name();
}
public String getAttribute() {
return name();
}
public String getMessageKey() {
return "enum.Role.".concat(name());
}
}
public final class Role implements GrantedAuthority, ConfigAttribute, Serializable, Cloneable {
private static final long serialVersionUID = 1L;
public static final Role ROLE_ADMINISTRATOR = new Role(0, "ROLE_ADMINISTRATOR");
private static final Role[] VALUES = new Role[] {
ROLE_ADMINISTRATOR
};
private static final MapNAME_MAPPINGS = new HashMap ();
static {
Arrays.sort(VALUES, new Comparator() {
public int compare(Role o1, Role o2) {
return o1.ordinal - o2.ordinal;
}
});
for (Role r : VALUES) {
NAME_MAPPINGS.put (r.getName(), r);
}
}
private int ordinal;
private String name;
//Constructors
/*
* DO NOT EVER USE THIS! It exists only for serialization purposes.
*/
public Role() {
super();
}
private Role(Role r) {
this(r.ordinal, r.name);
}
private Role(int ordinal, String name) {
super();
this.ordinal = ordinal;
this.name = name;
}
//Behaviour Methods
public int compareTo(Object o) {
if (o == null || o.getClass() != Role.class) {
throw new IllegalArgumentException(
"Comparison object may not be null, and must be a Role");
}
return this.ordinal - ((Role) o).ordinal;
}
public String getAuthority() {
return getName();
}
public String getAttribute() {
return getName();
}
//Pseudo-properties
public String getMessageKey() {
return "enum.Role.".concat(getName());
}
//Property Accessors
public final int getOrdinal() {
return this.ordinal;
}
public final String getName() {
return this.name;
}
//Helper Methods
public static final int hashCode(Role r) {
return r == null ? 0 : r.hashCode();
}
public static final boolean equals(Role x, Role y) {
return x == null ? (y == null) : x.equals(y);
}
public static final Role clone(Role r) {
return r == null ? null : r.clone();
}
public static final Role[] values() {
return VALUES;
}
public static final Role valueOf(String s) throws IllegalArgumentException {
String key = defaultString(s).toUpperCase();
if (NAME_MAPPINGS.containsKey(key)) {
return NAME_MAPPINGS.get(key);
} else {
throw new IllegalArgumentException("No role by the name ["+s+"] exists");
}
}
//Object Overrides
@Override
public String toString() {
return new StringBuffer().append(this.name)
.append("(").append(this.ordinal).append(")")
.toString();
}
@Override
public Role clone() {
return new Role(this);
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + ((this.name == null) ? 0 : this.name.hashCode());
result = prime * result + this.ordinal;
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
final Role other = (Role) obj;
if (this.name == null) {
if (other.name != null)
return false;
} else if (!this.name.equals(other.name))
return false;
if (this.ordinal != other.ordinal)
return false;
return true;
}
}
public class RoleUserType implements UserType {
private static final int[] SQL_TYPES = new int[] { Types.INTEGER };
public static final String HIBERNATE_TYPE_NAME = "RoleUserType";
public Object deepCopy(Object value) throws HibernateException {
return value;
}
public Object assemble(Serializable cached, Object owner) throws HibernateException {
return (Role)cached;
}
public Serializable disassemble(Object value) throws HibernateException {
return (Role)value;
}
public boolean equals(Object x, Object y) throws HibernateException {
return Role.equals((Role)x, (Role)y);
}
public int hashCode(Object x) throws HibernateException {
return Role.hashCode((Role)x);
}
public boolean isMutable() {
return false;
}
public Object nullSafeGet(ResultSet resultSet, String[] names, Object owner) throws HibernateException, SQLException {
int roleOrdinal = resultSet.getInt(names[0]);
return resultSet.wasNull() ? null : Role.values()[roleOrdinal];
}
public void nullSafeSet(PreparedStatement statement, Object value, int index)
throws HibernateException, SQLException {
if (value == null) {
statement.setNull(index, Types.INTEGER);
} else {
statement.setInt(index, ((Role)value).getOrdinal());
}
}
public Object replace(Object original, Object target, Object owner) throws HibernateException {
return original;
}
@SuppressWarnings("unchecked")
public Class returnedClass() {
return Role.class;
}
public int[] sqlTypes() {
return SQL_TYPES;
}
}
@TypeDef(name = RoleUserType.HIBERNATE_TYPE_NAME, typeClass = RoleUserType.class)
package com.mypackage.model;
import org.hibernate.annotations.TypeDef;
import eu.alenislimited.acshelper.support.hibernate.RoleUserType;
@Enumerated(EnumType.ORDINAL)
@Type(type = RoleUserType.HIBERNATE_TYPE_NAME)
def name= (name)
@name = name
end
@person_instance.send "name=", "John Smith"
/**
* Create an instance of {@link PurchaseOrder }
*
*/
public PurchaseOrder createPurchaseOrder() {
return new PurchaseOrder();
}
Pure FTP is a great FTP server and I love it because it's ridiculously fast, reliable and works well for our company. The only problem is that it's tricky to configure on Ubuntu because just like with numerous other programs, the people at Ubuntu, in their infinite wisdom, felt the need to fuck with the program's default way of doing things. Specifically, in Ubuntu, command line arguments for starting the FTP service are done through individual files for each command line option you want the service to be started with, with values for the options placed inside each of the files. This is a stupid way of doing things, but that's not the topic of this post.
The problem I've been having lately is that I recently deployed new projects within our production network, and these new projects required access to the FTP server, and aren't on the same box as the server, as is the sole other project that has been using the FTP server. I'd been getting problems trying to connect to the server from any LAN box, but not any external boxes nor the machine itself (localhost). The error I got back was "421 Service not available". I googled around for hours and found nothing useful, until I started realizing that other people were getting 421 errors when their PureFTP instance was misconfigured, but with different messages, and then it got me thinking that maybe my instance was somehow misconfigured.
I re-read the documentation for PureFTP and after an hour or so, it hit me that the server does reverse lookups to resolve fully qualified names, and such resolution doesn't work properly on our network (for good reasons that I'm not going to go into). After disabling reverse DNS resolution with the -H startup option (`echo "yes" >> DontResolve` in the configuration directory in Ubuntu), the problem went away.
I hope this helps anybody else who runs into the same problem.
For those not in the know, schemagen is a program that comes as part of the Java Web Services Developer Pack and it's used to generated XML schemas from JAXB annotations on Java beans. It so happens that the creators of the JAXB reference implementation also made a Maven 2 plugin for this program. I had previously attempted to use it to generate schemas in my initial attempts with JAXB, but had been presented with a slough of unfriendly exceptions being thrown at me whenever I ran the plugin. Not having the time back then to really play around with it and being considerably less experienced with JAXB, I had to shelve it and find other, less satisfying solutions to my problems with generating documentation.
Recently, I've had to come back to using JAXB because I'm making a RESTful Web Services API that leverages the power of both JAXB and Hibernate to do all my heavy lifting for me. This time around, I wasn't willing to tolerate a lack of schema to give to our users of the API, because it would mean a lot more work for them, and a lot more work for me. This time, I decided to be persistent and dig around the jaxb-schemagen plugin to make sure I could get it working. I'm proud to say that my perseverance paid off, and I now have my RESTful API schema being automatically generated for my application. Here are the problems and the solutions I ran into when I was trying to get going on this:
/etc/apt/sources.list, and you'll have to find a line similar to :
deb cdrom:[Ubuntu-Server 7.10 _Gutsy Gibbon_ - Release i386 (20071016)]/ gutsy main restricted
Remove-Item -recurse -force [directory name]
rm -rf [directory name]
It's been a while since I've posted to the blog 'cause I've been so busy, and it seems fitting that this be a good way to resume posting, as this issue has pissed me off quite a bit and been a major thorn in my side for the longest time.
The Spring Framework has some pretty good support for creating test classes for your application, however it by default does not properly initialize log4j logging when doing tests, and I found out today why. When running your application in a Servlet container, you'd configure Spring logging in web.xml. However, when running in a standalone context, Spring has no way of knowing how you want logging configured, so it leaves it up to log4j to configure itself. On that front, you have to realize what log4j's default configuration strategy is : reading a 'log4j.properties' file from the root of the classpath. Once this hits you (and it took me a while), getting logging running for your test cases becomes a simple matter of placing a valid 'log4j.properties' config file in the root of your test classpath, and logging starts working properly, so now you can read those pesky hibernate generated queries off your test log .
cat /etc/issue
lsb_release -a
java.lang.NullPointerException
at com.mysql.jdbc.StringUtils.indexOfIgnoreCaseRespectQuotes(StringUtils.java:959)
at com.mysql.jdbc.DatabaseMetaData.getCallStmtParameterTypes(DatabaseMetaData.java:1296)
at com.mysql.jdbc.DatabaseMetaData.getProcedureColumns(DatabaseMetaData.java:3670)
at com.mysql.jdbc.CallableStatement.determineParameterTypes(CallableStatement.java:702)
at com.mysql.jdbc.CallableStatement.(CallableStatement.java:513)
at com.mysql.jdbc.Connection.parseCallableStatement(Connection.java:4422)
at com.mysql.jdbc.Connection.prepareCall(Connection.java:4496)
at com.mysql.jdbc.Connection.prepareCall(Connection.java:4470)
at org.apache.commons.dbcp.DelegatingConnection.prepareCall(DelegatingConnection.java:275)
at org.apache.commons.dbcp.PoolingDataSource$PoolGuardConnectionWrapper.prepareCall(PoolingDataSource.java:292)
Ok, so for a while, I've had an SFTP jail setup within our company for our clients to connect and dump batch files to our systems. It later became necessary to have an administrator user for our staff to be able to go in and read any of the files from any of the clients without having to use a bunch of different logins to login as each individual client. This makes sense as doing so would be far too cumbersome. This is where ACLs came in. I learned what I had to so that I could get going, but that wasn't very much (forunately at the time, unfortunately later). I've since learned a couple of interesting things since then:
manpage.
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
Start -> Programs -> Microsoft SQL Server -> Client Network Utility -> Alias (tab) -> Add... (button).Under 'Server alias' enter an easy to remember name for the connection. Under 'Network libraries' select the TCP/IP option. Under 'Connection parameters' enter the DNS name or IP address under 'Server name' of the server you wish to connect to. Change the option for 'Dynamically determine port' if the server you're attempting to connect to doesn't run under the standard port of 1433 for SQL Server. Hit 'Ok' to save your settings, then 'Ok' to close out of the Client Network Utility.