📄 loader.java
字号:
// This check is needed since processing leaves the cursor
// after the last physical row for the current logical row;
// thus if we are after the last physical row, this might be
// caused by either:
// 1) scrolling to the last logical row
// 2) scrolling past the last logical row
// In the latter scenario, the previous logical row
// really is the last logical row.
//
// In all other cases, we should process back two
// logical records (the current logic row, plus the
// previous logical row).
if ( resultSet.isAfterLast() && isLogicallyAfterLast ) {
// position cursor to the last row
resultSet.last();
keyToRead = getKeyFromResultSet(
0,
getEntityPersisters()[0],
null,
resultSet,
session
);
}
else {
// Since the result set cursor is always left at the first
// physical row after the "last processed", we need to jump
// back one position to get the key value we are interested
// in skipping
resultSet.previous();
// sequentially read the result set in reverse until we recognize
// a change in the key value. At that point, we are pointed at
// the last physical sequential row for the logical row in which
// we are interested in processing
boolean firstPass = true;
final EntityKey lastKey = getKeyFromResultSet(
0,
getEntityPersisters()[0],
null,
resultSet,
session
);
while ( resultSet.previous() ) {
EntityKey checkKey = getKeyFromResultSet(
0,
getEntityPersisters()[0],
null,
resultSet,
session
);
if ( firstPass ) {
firstPass = false;
keyToRead = checkKey;
}
if ( !lastKey.equals( checkKey ) ) {
break;
}
}
}
// Read backwards until we read past the first physical sequential
// row with the key we are interested in loading
while ( resultSet.previous() ) {
EntityKey checkKey = getKeyFromResultSet(
0,
getEntityPersisters()[0],
null,
resultSet,
session
);
if ( !keyToRead.equals( checkKey ) ) {
break;
}
}
// Finally, read ahead one row to position result set cursor
// at the first physical row we are interested in loading
resultSet.next();
// and perform the load
return sequentialLoad( resultSet, session, queryParameters, returnProxies, keyToRead );
}
catch ( SQLException sqle ) {
throw JDBCExceptionHelper.convert(
factory.getSQLExceptionConverter(),
sqle,
"could not perform sequential read of results (forward)",
getSQLString()
);
}
}
private static EntityKey getOptionalObjectKey(QueryParameters queryParameters, SessionImplementor session) {
final Object optionalObject = queryParameters.getOptionalObject();
final Serializable optionalId = queryParameters.getOptionalId();
final String optionalEntityName = queryParameters.getOptionalEntityName();
if ( optionalObject != null && optionalEntityName != null ) {
return new EntityKey(
optionalId,
session.getEntityPersister( optionalEntityName, optionalObject ),
session.getEntityMode()
);
}
else {
return null;
}
}
private Object getRowFromResultSet(
final ResultSet resultSet,
final SessionImplementor session,
final QueryParameters queryParameters,
final LockMode[] lockModeArray,
final EntityKey optionalObjectKey,
final List hydratedObjects,
final EntityKey[] keys,
boolean returnProxies)
throws SQLException, HibernateException {
final Loadable[] persisters = getEntityPersisters();
final int entitySpan = persisters.length;
for ( int i = 0; i < entitySpan; i++ ) {
keys[i] = getKeyFromResultSet(
i,
persisters[i],
i == entitySpan - 1 ?
queryParameters.getOptionalId() :
null,
resultSet,
session
);
//TODO: the i==entitySpan-1 bit depends upon subclass implementation (very bad)
}
registerNonExists( keys, persisters, session );
// this call is side-effecty
Object[] row = getRow(
resultSet,
persisters,
keys,
queryParameters.getOptionalObject(),
optionalObjectKey,
lockModeArray,
hydratedObjects,
session
);
readCollectionElements( row, resultSet, session );
if ( returnProxies ) {
// now get an existing proxy for each row element (if there is one)
for ( int i = 0; i < entitySpan; i++ ) {
Object entity = row[i];
Object proxy = session.getPersistenceContext().proxyFor( persisters[i], keys[i], entity );
if ( entity != proxy ) {
// force the proxy to resolve itself
( (HibernateProxy) proxy ).getHibernateLazyInitializer().setImplementation(entity);
row[i] = proxy;
}
}
}
return getResultColumnOrRow( row, resultSet, session );
}
/**
* Read any collection elements contained in a single row of the result set
*/
private void readCollectionElements(Object[] row, ResultSet resultSet, SessionImplementor session)
throws SQLException, HibernateException {
//TODO: make this handle multiple collection roles!
final CollectionPersister[] collectionPersisters = getCollectionPersisters();
if ( collectionPersisters != null ) {
final CollectionAliases[] descriptors = getCollectionAliases();
final int[] collectionOwners = getCollectionOwners();
for ( int i=0; i<collectionPersisters.length; i++ ) {
final boolean hasCollectionOwners = collectionOwners !=null &&
collectionOwners[i] > -1;
//true if this is a query and we are loading multiple instances of the same collection role
//otherwise this is a CollectionInitializer and we are loading up a single collection or batch
final Object owner = hasCollectionOwners ?
row[ collectionOwners[i] ] :
null; //if null, owner will be retrieved from session
final CollectionPersister collectionPersister = collectionPersisters[i];
final Serializable key;
if ( owner == null ) {
key = null;
}
else {
key = collectionPersister.getCollectionType().getKeyOfOwner( owner, session );
//TODO: old version did not require hashmap lookup:
//keys[collectionOwner].getIdentifier()
}
readCollectionElement(
owner,
key,
collectionPersister,
descriptors[i],
resultSet,
session
);
}
}
}
private List doQuery(final SessionImplementor session,
final QueryParameters queryParameters,
final boolean returnProxies) throws SQLException, HibernateException {
final RowSelection selection = queryParameters.getRowSelection();
final int maxRows = hasMaxRows( selection ) ?
selection.getMaxRows().intValue() :
Integer.MAX_VALUE;
final int entitySpan = getEntityPersisters().length;
final ArrayList hydratedObjects = entitySpan == 0 ? null : new ArrayList( entitySpan * 10 );
final PreparedStatement st = prepareQueryStatement( queryParameters, false, session );
final ResultSet rs = getResultSet( st, queryParameters.hasAutoDiscoverScalarTypes(), queryParameters.isCallable(), selection, session );
// would be great to move all this below here into another method that could also be used
// from the new scrolling stuff.
//
// Would need to change the way the max-row stuff is handled (i.e. behind an interface) so
// that I could do the control breaking at the means to know when to stop
final LockMode[] lockModeArray = getLockModes( queryParameters.getLockModes() );
final EntityKey optionalObjectKey = getOptionalObjectKey( queryParameters, session );
final boolean createSubselects = isSubselectLoadingEnabled();
final List subselectResultKeys = createSubselects ? new ArrayList() : null;
final List results = new ArrayList();
try {
handleEmptyCollections( queryParameters.getCollectionKeys(), rs, session );
EntityKey[] keys = new EntityKey[entitySpan]; //we can reuse it for each row
if ( log.isTraceEnabled() ) log.trace( "processing result set" );
int count;
for ( count = 0; count < maxRows && rs.next(); count++ ) {
if ( log.isTraceEnabled() ) log.debug("result set row: " + count);
Object result = getRowFromResultSet(
rs,
session,
queryParameters,
lockModeArray,
optionalObjectKey,
hydratedObjects,
keys,
returnProxies
);
results.add( result );
if ( createSubselects ) {
subselectResultKeys.add(keys);
keys = new EntityKey[entitySpan]; //can't reuse in this case
}
}
if ( log.isTraceEnabled() ) {
log.trace( "done processing result set (" + count + " rows)" );
}
}
finally {
session.getBatcher().closeQueryStatement( st, rs );
}
initializeEntitiesAndCollections( hydratedObjects, rs, session, queryParameters.isReadOnly() );
if ( createSubselects ) createSubselects( subselectResultKeys, queryParameters, session );
return results; //getResultList(results);
}
protected boolean isSubselectLoadingEnabled() {
return false;
}
protected boolean hasSubselectLoadableCollections() {
final Loadable[] loadables = getEntityPersisters();
for (int i=0; i<loadables.length; i++ ) {
if ( loadables[i].hasSubselectLoadableCollections() ) return true;
}
return false;
}
private static Set[] transpose( List keys ) {
Set[] result = new Set[ ( ( EntityKey[] ) keys.get(0) ).length ];
for ( int j=0; j<result.length; j++ ) {
result[j] = new HashSet( keys.size() );
for ( int i=0; i<keys.size(); i++ ) {
result[j].add( ( ( EntityKey[] ) keys.get(i) ) [j] );
}
}
return result;
}
private void createSubselects(List keys, QueryParameters queryParameters, SessionImplementor session) {
if ( keys.size() > 1 ) { //if we only returned one entity, query by key is more efficient
Set[] keySets = transpose(keys);
Map namedParameterLocMap = buildNamedParameterLocMap( queryParameters );
final Loadable[] loadables = getEntityPersisters();
final String[] aliases = getAliases();
final Iterator iter = keys.iterator();
while ( iter.hasNext() ) {
final EntityKey[] rowKeys = (EntityKey[]) iter.next();
for ( int i=0; i<rowKeys.length; i++ ) {
if ( rowKeys[i]!=null && loadables[i].hasSubselectLoadableCollections() ) {
SubselectFetch subselectFetch = new SubselectFetch(
//getSQLString(),
aliases[i],
loadables[i],
queryParameters,
keySets[i],
namedParameterLocMap
);
session.getPersistenceContext()
.getBatchFetchQueue()
.addSubselect( rowKeys[i], subselectFetch );
}
}
}
}
}
private Map buildNamedParameterLocMap(QueryParameters queryParameters) {
if ( queryParameters.getNamedParameters()!=null ) {
final Map namedParameterLocMap = new HashMap();
Iterator piter = queryParameters.getNamedParameters().keySet().iterator();
while ( piter.hasNext() ) {
String name = (String) piter.next();
namedParameterLocMap.put(
name,
getNamedParameterLocs(name)
);
}
return namedParameterLocMap;
}
else {
return null;
}
}
private void initializeEntitiesAndCollections(
final List hydratedObjects,
final Object resultSetId,
final SessionImplementor session,
final boolean readOnly)
throws HibernateException {
final CollectionPersister[] collectionPersisters = getCollectionPersisters();
if ( collectionPersisters != null ) {
for ( int i=0; i<collectionPersisters.length; i++ ) {
if ( collectionPersisters[i].isArray() ) {
//for arrays, we should end the collection load before resolving
//the entities, since the actual array instances are not instantiated
//during loading
//TODO: or we could do this polymorphically, and have two
// different operations implemented differently for arrays
endCollectionLoad( resultSetId, session, collectionPersisters[i] );
}
}
}
//important: reuse the same event instances for performance!
final PreLoadEvent pre;
final PostLoadEvent post;
if ( session.isEventSource() ) {
pre = new PreLoadEvent( (EventSource) session );
post = new PostLoadEvent( (EventSource) session );
}
else {
pre = null;
post = null;
}
if ( hydratedObjects!=null ) {
int hydratedObjectsSize = hydratedObjects.size();
if ( log.isTraceEnabled() ) {
log.trace( "total objects hydrated: " + hydratedObjectsSize );
}
for ( int i = 0; i < hydratedObjectsSize; i++ ) {
TwoPhaseLoad.initializeEntity( hydratedObjects.get(i), readOnly, session, pre, post );
}
}
if ( collectionPersisters != null ) {
for ( int i=0; i<collectionPersisters.length; i++ ) {
if ( !collectionPersisters[i].isArray() ) {
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -