-->
These old forums are deprecated now and set to read-only. We are waiting for you on our new forums!
More modern, Discourse-based and with GitHub/Google/Twitter authentication built-in.

All times are UTC - 5 hours [ DST ]



Forum locked This topic is locked, you cannot edit posts or make further replies.  [ 1 post ] 
Author Message
 Post subject: Hibernate Exceptionw hen updating objects
PostPosted: Fri Jun 03, 2005 10:29 am 
Beginner
Beginner

Joined: Thu Apr 29, 2004 4:31 am
Posts: 42
Need help with Hibernate? Read this first:
http://www.hibernate.org/ForumMailingli ... AskForHelp

Hibernate version:
2.1
Mapping documents:
<?xml version="1.0"?>

<!DOCTYPE hibernate-mapping PUBLIC
"-//Hibernate/Hibernate Mapping DTD 2.0//EN"
"http://hibernate.sourceforge.net/hibernate-mapping-2.0.dtd">

<hibernate-mapping>
<class
name="de.nava.informa.impl.hibernate.Channel"
table="CHANNELS"
dynamic-update="false"
dynamic-insert="false"
>
<id
name="id"
column="CHANNEL_ID"
type="integer"
unsaved-value="-1"
>
<generator class="native">
</generator>
</id>
<property
name="title"
type="java.lang.String"
update="true"
insert="true"
column="TITLE"
not-null="true"
/>
<property
name="description"
type="java.lang.String"
update="true"
insert="true"
column="DESCRIPTION"
/>
<property
name="locationString"
type="string"
update="true"
insert="true"
column="LOCSTRING"
/>
<property
name="creator"
type="java.lang.String"
update="true"
insert="true"
column="CREATOR"
/>
<property
name="publisher"
type="java.lang.String"
update="true"
insert="true"
column="PUBLISHER"
/>
<property
name="language"
type="java.lang.String"
update="true"
insert="true"
column="LANGUAGE"
/>
<property
name="formatString"
type="string"
update="true"
insert="true"
column="FORMAT"
/>
<bag
name="items"
table="ITEMS"
lazy="false"
inverse="true"
cascade="all"
>
<key
column="CHANNEL_ID"
/>

<one-to-many
class="de.nava.informa.impl.hibernate.Item"
/>
</bag>
<many-to-one
name="image"
class="de.nava.informa.impl.hibernate.Image"
cascade="none"
outer-join="auto"
update="true"
insert="true"
column="IMAGE_ID"
not-null="false"
/>
<many-to-one
name="textInput"
class="de.nava.informa.impl.hibernate.TextInput"
cascade="none"
outer-join="auto"
update="true"
insert="true"
column="TEXTINPUT_ID"
not-null="false"
/>
<property
name="copyright"
type="java.lang.String"
update="true"
insert="true"
column="COPYRIGHT"
/>
<property
name="rating"
type="java.lang.String"
update="true"
insert="true"
column="RATING"
/>
<property
name="generator"
type="java.lang.String"
update="true"
insert="true"
column="GENERATOR"
/>
<property
name="docs"
type="java.lang.String"
update="true"
insert="true"
column="DOCS"
/>
<property
name="ttl"
type="int"
update="true"
insert="true"
column="TTL"
/>
<property
name="lastUpdated"
type="java.util.Date"
update="true"
insert="true"
column="LAST_UPDATED"
/>
<property
name="lastBuildDate"
type="java.util.Date"
update="true"
insert="true"
column="LAST_BUILD_DATE"
/>
<property
name="pubDate"
type="java.util.Date"
update="true"
insert="true"
column="PUB_DATE"
/>
<property
name="updatePeriod"
type="java.lang.String"
update="true"
insert="true"
column="UPDATE_PERIOD"
/>
<property
name="updateFrequency"
type="int"
update="true"
insert="true"
column="UPDATE_FREQUENCY"
/>
<property
name="updateBase"
type="java.util.Date"
update="true"
insert="true"
column="UPDATE_BASE"
/>
</class>
<query name="ChannelQuery"><![CDATA[
from de.nava.informa.impl.hibernate.Channel c WHERE c.id like ?
]]>
</query>
</hibernate-mapping>

Code between sessionFactory.openSession() and session.close():
/**
* <p>
* Called by the <code>{@link org.quartz.Scheduler}</code> when a <code>{@link org.quartz.Trigger}</code>
* fires that is associated with the <code>Job</code>.
* </p>
*
* @throws JobExecutionException
* if there is an exception while executing the job.
*/
protected void executeInternal(JobExecutionContext context)
throws JobExecutionException {
System.err.println("***** " + name
+ ":we need to retrieve data from channel:"
+ channelId);
Transaction tx = null;
de.nava.informa.impl.hibernate.Channel channel = null;
net.sf.hibernate.Session session = null;
try {
String query = "ChannelQuery";
Object[] params = new Object[]{channelId};
SchedulerContext schedulerContext = context.getScheduler().getContext();
BeanFactory beanFactory = (BeanFactory)schedulerContext.get("applicationContext");
StockPersistenceManager persistenceManager = (StockPersistenceManager)beanFactory.getBean("stockPersistenceManager");

List channels = (List)persistenceManager.query(query,params);
System.err.println("****" + name
+ " has found " + channels.size()
+ " channels!!!");
net.sf.hibernate.SessionFactory factory = (net.sf.hibernate.SessionFactory)beanFactory.getBean("stockSessionFactory");
System.err.println("getting session from factory...");
session = SessionFactoryUtils.getNewSession(factory);
tx = session.beginTransaction();
if ((channels != null) && !channels.isEmpty()) {
channel = (de.nava.informa.impl.hibernate.Channel) channels.get(0);
}
ChannelBuilderIF builder = new ChannelBuilder(session);
System.err.println("*************************** Channel location=" + channel.getLocation());
ChannelIF tempChannel = FeedParser.parse(builder,
channel.getLocationString());

System.err.println("copying channel...");
InformaUtils.copyChannelProperties(tempChannel, channel);
channel.setLastUpdated(new Date());
Collection items = channel.getItems();
System.err.println("Got exactly:" + items.size() + " channels..");

Iterator itemIter = items.iterator();
ItemIF oldItem = null;

Collection newItems = new ArrayList();
while(itemIter.hasNext()) {
oldItem = (ItemIF)itemIter.next();
System.out.println(oldItem.getTitle() + " : " +
oldItem.getLink());
}
// compare with existing items, only add new ones
if (tempChannel.getItems().isEmpty()) {
System.out.println("No items found in channel " +
channel.getLocation());

} else {
Iterator it = tempChannel.getItems().iterator();
while (it.hasNext()) {
ItemIF item = (ItemIF) it.next();
System.out.println(channel.getItems().contains(item) +
" : " + item);
if (!channel.getItems().contains(item)) {
System.out.println("Found new item: " + item);
newItems.add(item);
}
} // while more items
}

Iterator newIter = newItems.iterator();
ItemIF item = null;
while(newIter.hasNext()) {
item = (ItemIF) newIter.next();
channel.addItem(builder.createItem(channel, item));
}
System.out.println("Done copying...");
session.delete(tempChannel);
} catch (HibernateException hex) {
System.err.println(hex.getMessage()+ hex);
throw new JobExecutionException(hex);
} catch (IOException ioe) {
System.err.println(ioe.getMessage()+ ioe);
throw new JobExecutionException(ioe);
} catch (ParseException pe) {
System.err.println(pe.getMessage() + pe);
throw new JobExecutionException(pe);
} catch (SchedulerException hex) {
System.err.println(hex.getMessage()+ hex);
throw new JobExecutionException(hex);
} catch (PersistenceException ioe) {
System.err.println(ioe.getMessage()+ ioe);
throw new JobExecutionException(ioe);

} finally {
try {
session.update(channel);
tx.commit();
session.close();
} catch (HibernateException hex) {
System.err.println(hex.getMessage()+ hex);
throw new JobExecutionException(hex);
}
}

Full stack trace of any exception that occurs:
[STDOUT] identifier of an instance of de.nava.informa.impl.hibernate.Channel altered from 5 to
5net.sf.hibernate.HibernateException: identifier of an instance of de.nava.informa.impl.hibernate.Channel altered from 5 to 5 2005-06-02 21:50:55,859 INFO [org.quartz.core.JobRunShell] Job DEFAULT.reutersJob threw a JobExecutionException:
org.quartz.JobExecutionException: net.sf.hibernate.HibernateException:
identifier of an instance of de.nava.informa.impl.hibernate.Channel
altered from 5 to 5 [See nested exception:
net.sf.hibernate.HibernateException: identifier of an instance of de.nava.informa.impl.hibernate.Channel altered from 5 to 5]
at
com.myapp.scheduler.InformaJob.executeInternal(InformaJob.java:154)
at org.springframework.scheduling.quartz.QuartzJobBean.execute(QuartzJobBea
n.java:66)
at org.quartz.core.JobRunShell.run(JobRunShell.java:191)
at org.quartz.simpl.SimpleThreadPool$WorkerThread.run(SimpleThreadPool.java
:516)
* Nested Exception (Underlying Cause) ---------------
net.sf.hibernate.HibernateException: identifier of an instance of de.nava.informa.impl.hibernate.Channel altered from 5 to 5
at
net.sf.hibernate.impl.SessionImpl.checkId(SessionImpl.java:2522)
at
net.sf.hibernate.impl.SessionImpl.flushEntity(SessionImpl.java:2345)
at
net.sf.hibernate.impl.SessionImpl.flushEntities(SessionImpl.java:2338)
at
net.sf.hibernate.impl.SessionImpl.flushEverything(SessionImpl.java:2205)
at
net.sf.hibernate.impl.SessionImpl.flush(SessionImpl.java:2184)
at net.sf.hibernate.transaction.JDBCTransaction.commit(JDBCTransaction.java
:61)
at
com.myapp.scheduler.InformaJob.executeInternal(InformaJob.java:150)
at org.springframework.scheduling.quartz.QuartzJobBean.execute(QuartzJobBea
n.java:66)
at org.quartz.core.JobRunShell.run(JobRunShell.java:191)
at org.quartz.simpl.SimpleThreadPool$WorkerThread.run(SimpleThreadPool.java
:516)

Name and version of the database you are using:
MySQL 4.1.1alpha

Hello all,
i have written a Quartz job that uses INforma Feedparser for updating items in my database.
For some reasons, i am receiving the exception mentioned above...
What i GUESS is happening is that my Channel object has some items object, and it is updating them depending on what is downloaded from the URL.
Since some of them have changed, the whole channel is saved (together wtih items), but i am not clear on why it should update also the channelID, since id is the primary key and shouldn't be modified ..

anyone can help?

thanx in advance and regards

marco


Top
 Profile  
 
Display posts from previous:  Sort by  
Forum locked This topic is locked, you cannot edit posts or make further replies.  [ 1 post ] 

All times are UTC - 5 hours [ DST ]


You cannot post new topics in this forum
You cannot reply to topics in this forum
You cannot edit your posts in this forum
You cannot delete your posts in this forum

Search for:
© Copyright 2014, Red Hat Inc. All rights reserved. JBoss and Hibernate are registered trademarks and servicemarks of Red Hat, Inc.