?? primarykeyfilter.java
字號:
/*
*
* The DbUnit Database Testing Framework
* Copyright (C)2005, DbUnit.org
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*
*/
package org.dbunit.database;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
//TODO: should not have dependency on sub-package!
import org.dbunit.database.search.ForeignKeyRelationshipEdge;
import org.dbunit.dataset.DataSetException;
import org.dbunit.dataset.IDataSet;
import org.dbunit.dataset.ITable;
import org.dbunit.dataset.ITableIterator;
import org.dbunit.dataset.ITableMetaData;
import org.dbunit.dataset.filter.AbstractTableFilter;
import org.dbunit.util.SQLHelper;
/**
* Filter a table given a map of the allowed rows based on primary key values.<br>
* It uses a depth-first algorithm (although not recursive - it might be refactored
* in the future) to define which rows are allowed, as well which rows are necessary
* (and hence allowed) because of dependencies with the allowed rows.<br>
* <strong>NOTE:</strong> multi-column primary keys are not supported at the moment.
* TODO: test cases
* @author Felipe Leme <dbunit@felipeal.net>
* @version $Revision: 629 $
* @since Sep 9, 2005
*/
public class PrimaryKeyFilter extends AbstractTableFilter {
private final IDatabaseConnection connection;
private final Map allowedPKsPerTable;
private final Map allowedPKsInput;
private final Map pksToScanPerTable;
private final boolean reverseScan;
protected final Logger logger = LoggerFactory.getLogger(getClass());
// cache de primary keys
private final Map pkColumnPerTable = new HashMap();
private final Map fkEdgesPerTable = new HashMap();
private final Map fkReverseEdgesPerTable = new HashMap();
// name of the tables, in reverse order of depedency
private final List tableNames = new ArrayList();
/**
* Default constructor, it takes as input a map with desired rows in a final
* dataset; the filter will ensure that the rows necessary by these initial rows
* are also allowed (and so on...).
* @param connection database connection
* @param allowedPKs map of allowed rows, based on the primary keys (key is the name
* of a table; value is a Set with allowed primary keys for that table)
* @param reverseDependency flag indicating if the rows that depend on a row should
* also be allowed by the filter
*/
public PrimaryKeyFilter(IDatabaseConnection connection, Map allowedPKs, boolean reverseDependency) {
this.connection = connection;
this.allowedPKsPerTable = new HashMap();
this.allowedPKsInput = allowedPKs;
this.reverseScan = reverseDependency;
// we need a deep copy here
// this.idsToScanPerTable = new HashMap(allowedIds);
this.pksToScanPerTable = new HashMap(allowedPKs.size());
Iterator iterator = allowedPKs.entrySet().iterator();
while ( iterator.hasNext() ) {
Map.Entry entry = (Map.Entry) iterator.next();
Object table = entry.getKey();
Set inputSet = (Set) entry.getValue();
Set newSet = new HashSet( inputSet );
this.pksToScanPerTable.put( table, newSet );
}
}
public void nodeAdded(Object node) {
logger.debug("nodeAdded(node=" + node + ") - start");
this.tableNames.add( node );
if ( this.logger.isDebugEnabled() ) {
this.logger.debug("nodeAdded: " + node );
}
}
public void edgeAdded(ForeignKeyRelationshipEdge edge) {
if ( this.logger.isDebugEnabled() ) {
this.logger.debug("edgeAdded: " + edge );
}
// first add it to the "direct edges"
String from = (String) edge.getFrom();
Set edges = (Set) this.fkEdgesPerTable.get(from);
if ( edges == null ) {
edges = new HashSet();
this.fkEdgesPerTable.put( from, edges );
}
if ( ! edges.contains(edge) ) {
edges.add(edge);
}
// then add it to the "reverse edges"
String to = (String) edge.getTo();
edges = (Set) this.fkReverseEdgesPerTable.get(to);
if ( edges == null ) {
edges = new HashSet();
this.fkReverseEdgesPerTable.put( to, edges );
}
if ( ! edges.contains(edge) ) {
edges.add(edge);
}
// finally, update the PKs cache
Object pkTo = this.pkColumnPerTable.get( to );
if ( pkTo == null ) {
Object pk = edge.getPKColumn();
this.pkColumnPerTable.put( to, pk );
}
}
/**
* @see AbstractTableFilter
*/
public boolean isValidName(String tableName) throws DataSetException {
logger.debug("isValidName(tableName=" + tableName + ") - start");
// boolean isValid = this.allowedIds.containsKey(tableName);
// return isValid;
return true;
}
public ITableIterator iterator(IDataSet dataSet, boolean reversed)
throws DataSetException {
if ( this.logger.isDebugEnabled() ) {
this.logger.debug("Filter.iterator()" );
}
try {
searchPKs(dataSet);
} catch (SQLException e) {
throw new DataSetException( e );
}
return new FilterIterator(reversed ? dataSet.reverseIterator() : dataSet
.iterator());
}
private void searchPKs(IDataSet dataSet) throws DataSetException, SQLException {
logger.debug("searchPKs(dataSet=" + dataSet + ") - start");
int counter = 0;
while ( ! this.pksToScanPerTable.isEmpty() ) {
counter ++;
if ( this.logger.isDebugEnabled() ) {
this.logger.debug( "RUN # " + counter );
}
for ( int i=this.tableNames.size()-1; i>=0; i-- ) {
String tableName = (String) this.tableNames.get(i);
// TODO: support multi-column PKs
String pkColumn = dataSet.getTable(tableName).getTableMetaData().getPrimaryKeys()[0].getColumnName();
Set tmpSet = (Set) this.pksToScanPerTable.get( tableName );
if ( tmpSet != null && ! tmpSet.isEmpty() ) {
Set pksToScan = new HashSet( tmpSet );
if ( this.logger.isDebugEnabled() ) {
this.logger.debug( "before search: "+ tableName + "=>" + pksToScan );
}
scanPKs( tableName, pkColumn, pksToScan );
scanReversePKs( tableName, pksToScan );
allowPKs( tableName, pksToScan );
removePKsToScan( tableName, pksToScan );
} // if
} // for
removeScannedTables();
} // while
if ( this.logger.isDebugEnabled() ) {
this.logger.debug( "Finished searchIds()" );
}
}
private void removeScannedTables() {
logger.debug("removeScannedTables() - start");
Iterator iterator = this.pksToScanPerTable.entrySet().iterator();
List tablesToRemove = new ArrayList();
while ( iterator.hasNext() ) {
Map.Entry entry = (Map.Entry) iterator.next();
String table = (String) entry.getKey();
Set pksToScan = (Set) entry.getValue();
boolean removeIt = pksToScan.isEmpty();
if ( ! this.tableNames.contains(table) ) {
if ( this.logger.isWarnEnabled() ) {
this.logger.warn("Discarding ids " + pksToScan + " of table " + table +
"as this table has not been passed as input" );
}
removeIt = true;
}
if ( removeIt ) {
tablesToRemove.add( table );
}
}
iterator = tablesToRemove.iterator();
while ( iterator.hasNext() ) {
this.pksToScanPerTable.remove( iterator.next() );
}
}
private void allowPKs(String table, Set newAllowedPKs) {
logger.debug("allowPKs(table=" + table + ", newAllowedPKs=" + newAllowedPKs + ") - start");
?? 快捷鍵說明
復制代碼
Ctrl + C
搜索代碼
Ctrl + F
全屏模式
F11
切換主題
Ctrl + Shift + D
顯示快捷鍵
?
增大字號
Ctrl + =
減小字號
Ctrl + -