(1) package net.atpco.dds.offline.filing.common.datamapper.binder 1.ClassInfo package net.atpco.dds.offline.filing.common.datamapper.binder;
import java.lang.reflect.Method; import java.util.Collection; import java.util.HashMap; import java.util.Locale; import java.util.Map; import java.util.concurrent.ConcurrentHashMap;
import org.apache.log4j.Logger;
/** * This class represents a cached set of class definition information that * allows for easy mapping between property names and getter methods. */ public final class ClassInfo { /** logger */ private static final Logger LOGGER = Logger.getLogger(ClassInfo.class);
private transient final Map<String,Invoker> getMethods = new ConcurrentHashMap<String,Invoker>(); private transient final Map<String,Class<?>> getTypes = new ConcurrentHashMap<String,Class<?>>();
private final transient String className;
public ClassInfo(final Class<?> clazz) { className = clazz.getName(); addGetMethods(clazz); }
/** * Add the get methods based on the java bean naming standard. * Allowed methods are is and getters method only. * @param cls */ private void addGetMethods(final Class<?> cls) { final Method[] methods = getClassMethods(cls); for (int i = 0; i < methods.length; i++) { final Method method = methods[i]; String name = method.getName(); if (name.startsWith("get") && name.length() > 3) { if (method.getParameterTypes().length == 0) { name = dropCase(name); addGetMethod(name,method); } } else if (name.startsWith("is") && name.length() > 2 && method.getParameterTypes().length == 0) { name = dropCase(name); addGetMethod(name,method); } } }
/** * Add the method to the map. * @param name * @param method */ private void addGetMethod(final String name,final Method method) { getMethods.put(name,new MethodInvoker(method)); getTypes.put(name,method.getReturnType()); }
/** * This method returns an array containing all methods declared in this * class and any superclass. We use this method,instead of the simpler * Class.getMethods(),because we want to look for private methods as well. * * @param cls * The class * @return An array containing all methods in this class */ private Method[] getClassMethods(final Class<?> cls) { final Map<String,Method> uniqueMethods = new HashMap<String,Method>(); Class<?> currentClass = cls; while (currentClass != null) { addUniqueMethods(uniqueMethods,currentClass.getDeclaredMethods());
// we also need to look for interface methods - // because the class may be abstract final Class<?>[] interfaces = currentClass.getInterfaces(); for (int i = 0; i < interfaces.length; i++) { addUniqueMethods(uniqueMethods,interfaces[i].getMethods()); }
currentClass = currentClass.getSuperclass(); }
final Collection<Method> methods = uniqueMethods.values();
return methods.toArray(new Method[methods.size()]); }
/** * Add the unique method to the map. There is a possibility that * the class might be inherited. * @param uniqueMethods * @param methods */ private void addUniqueMethods(final Map<String,Method> uniqueMethods, final Method[] methods) { for (final Method currentMethod : methods) { if (!currentMethod.isBridge()) { final String signature = getSignature(currentMethod); // check to see if the method is already known // if it is known,then an extended class must have // overridden a method if (!uniqueMethods.containsKey(signature)) { currentMethod.setAccessible(true); uniqueMethods.put(signature,currentMethod); } } } }
/** * Get the method signature. * @param method * @return */ private String getSignature(final Method method) { final StringBuffer strBuf = new StringBuffer(); strBuf.append(method.getName()); final Class<?>[] parameters = method.getParameterTypes();
for (int i = 0; i < parameters.length; i++) { if (i == 0) { strBuf.append(':'); } else { strBuf.append(','); } strBuf.append(parameters[i].getName()); }
return strBuf.toString(); }
/** * Adding only the method name by dropping the is and get word * @param methodName * @return */ private static String dropCase(final String methodName) { String mName; if (methodName.startsWith("get")) { mName = methodName.substring(3);
} else { mName = methodName.substring(2); }
if (mName.length() == 1 || mName.length() > 1 && !Character.isUpperCase(mName .charAt(1))) { mName = mName.substring(0,1).toLowerCase(Locale.US) + mName.substring(1); }
return mName; }
/** * Returns the getter invoker object * @param propertyName * @return */ public Invoker getGetInvoker(final String propertyName) { final Invoker method = getMethods.get(propertyName); if (method == null) { LOGGER.info("There is no READABLE property named '" + propertyName + "' in class '" + className + "'"); } return method; } }
2.DataLoadConfig package net.atpco.dds.offline.filing.common.datamapper.binder;
import java.sql.Types; import java.util.Map; import java.util.concurrent.ConcurrentHashMap;
import javax.annotation.PostConstruct;
import net.atpco.dds.offline.filing.common.datamapper.parser.ClassConfig;
import org.apache.log4j.Logger; import org.springframework.context.annotation.Scope; import org.springframework.stereotype.Component;
@Component("dataLoadConfig") @Scope("singleton") public class DataLoadConfig { /** logger */ private static Logger LOGGER = Logger.getLogger(DataLoadConfig.class); private transient final Map<String,Integer> JDBC_TYPE_MAP = new ConcurrentHashMap<String,Integer>(); private transient final Map<String,String> SQL_CACHE = new ConcurrentHashMap<String,String>(); private transient final Map<String,ClassConfig> MAPPING_CONFIG = new ConcurrentHashMap<String,ClassConfig>(); private transient final Map<Class<?>,ClassInfo> CLASS_INFO_MAP = new ConcurrentHashMap<Class<?>,ClassInfo>(); @PostConstruct public void initialize() { JDBC_TYPE_MAP.put("BIGINT",Types.BIGINT); JDBC_TYPE_MAP.put("CHAR",Types.CHAR); JDBC_TYPE_MAP.put("DATE",Types.DATE); JDBC_TYPE_MAP.put("DECIMAL",Types.DECIMAL); JDBC_TYPE_MAP.put("DOUBLE",Types.DOUBLE); JDBC_TYPE_MAP.put("FLOAT",Types.FLOAT); JDBC_TYPE_MAP.put("INTEGER",Types.INTEGER); JDBC_TYPE_MAP.put("JAVA_OBJECT",Types.JAVA_OBJECT); JDBC_TYPE_MAP.put("LONGVARBINARY",Types.LONGVARBINARY); JDBC_TYPE_MAP.put("LONGVARCHAR",Types.LONGVARCHAR); JDBC_TYPE_MAP.put("NULL",Types.NULL); JDBC_TYPE_MAP.put("NUMERIC",Types.NUMERIC); JDBC_TYPE_MAP.put("OTHER",Types.OTHER); JDBC_TYPE_MAP.put("SMALLINT",Types.SMALLINT); JDBC_TYPE_MAP.put("STRUCT",Types.STRUCT); JDBC_TYPE_MAP.put("TIME",Types.TIME); JDBC_TYPE_MAP.put("TIMESTAMP",Types.TIMESTAMP); JDBC_TYPE_MAP.put("TIMESTMP",Types.TIMESTAMP); JDBC_TYPE_MAP.put("TINYINT",Types.TINYINT); JDBC_TYPE_MAP.put("VARBINARY",Types.VARBINARY); JDBC_TYPE_MAP.put("VARCHAR",Types.VARCHAR); }
/** * Returns the jdbc type * @param jdbcTypeName * @return */ public Integer getJdbcType(final String jdbcTypeName) { if(JDBC_TYPE_MAP !=null && JDBC_TYPE_MAP.containsKey(jdbcTypeName)) { return JDBC_TYPE_MAP.get(jdbcTypeName); } return null; }
/** * Gets an instance of ClassInfo for the specified class. * * @param clazz * The class for which to lookup the method cache. * @return The method cache for the class */ public ClassInfo getClassInfo(final Class<?> clazz) { synchronized (clazz) { ClassInfo cached = CLASS_INFO_MAP.get(clazz); if (cached == null) { cached = new ClassInfo(clazz); CLASS_INFO_MAP.put(clazz,cached); } return cached; } }
/** * Get the class config object * @param identifier * @return */ public ClassConfig getClassConfig(final String identifier) { return MAPPING_CONFIG.get(identifier); }
/** * Set the class config object based on the identifier * @param identifier * @param config */ public void setClassConfig(final String identifier, final ClassConfig config) { if(MAPPING_CONFIG.containsKey(identifier)) { LOGGER.warn("Identifier: " + identifier + " already exists "); } MAPPING_CONFIG.put(identifier,config); }
/** * Get the sql from cache if its already compiled * @param identifier * @return */ public String getSqlFromCache(final String identifier) { return SQL_CACHE.get(identifier); }
/** * Set the sql into cache * @param identifier * @param sql */ public void setSqlIntoCache(final String identifier,final String sql) { SQL_CACHE.put(identifier,sql); } }
3.DataLoadDAO package net.atpco.dds.offline.filing.common.datamapper.binder;
import org.springframework.jdbc.core.namedparam.SqlParameterSource;
public interface DataLoadDAO { int[] batchUpdate(String sql,SqlParameterSource[] paramSource); }
4.DataLoadDAOImpl package net.atpco.dds.offline.filing.common.datamapper.binder;
import java.util.Map; import java.util.Map.Entry;
import org.apache.log4j.Logger; import org.springframework.jdbc.core.SqlParameterValue; import org.springframework.jdbc.core.namedparam.MapSqlParameterSource; import org.springframework.jdbc.core.namedparam.SqlParameterSource; import org.springframework.stereotype.Repository; import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional;
/** * Responsible for persisting the data into database as a batch * * @author atp1axi * @version 1.0 * */ @Repository("dataLoadDAO") public class DataLoadDAOImpl extends DDSDAO implements DataLoadDAO { /** logger */ private static final Logger LOGGER = Logger.getLogger(DataLoadDAOImpl.class);
/** * Do a batch update. The parameters are configured sql paramter source. * * @param sql * - Inser/update sql * @param paramSource * - MapSqlparametersource,eventually it will set the values in * prepared statement. */ @Override @Transactional(propagation = Propagation.REQUIRED) public int[] batchUpdate(final String sql, final SqlParameterSource[] paramSource) { System.out.println("keys and values===="); for (int i = 0; i < paramSource.length; i++) { System.out.println("sql==" + sql); MapSqlParameterSource ps = (MapSqlParameterSource) paramSource[i]; Map<String,Object> objectMap = ps.getValues(); for (Entry<String,Object> entry : objectMap.entrySet()) { SqlParameterValue rr = (SqlParameterValue) entry.getValue(); System.out.println(entry.getKey() + " : " + rr.getValue() + "====="); System.out.println(rr.getValue() == null ? "null" : rr .getValue().getClass()); } }
return namedJdbcTemplate.batchUpdate(sql,paramSource); }
}
5.DataLoadRequest package net.atpco.dds.offline.filing.common.datamapper.binder;
import java.util.List;
import org.springframework.util.CollectionUtils;
public class DataLoadRequest {
public DataLoadRequest(final Mode mode,final String identifier, final List<?> objects) { this.mode = mode; this.identifier = identifier; this.dataList = objects; }
private Mode mode; private String identifier; private List<?> dataList;
public void setMode(final Mode mode) { this.mode = mode; }
public void setIdentifier(final String identifier) { this.identifier = identifier; }
public void setDataList(final List<?> dataList) { this.dataList = dataList; }
public Mode getMode() { return mode; }
public String getIdentifier() { return identifier; }
public List<?> getDataList() { return dataList; }
public boolean isValid() { boolean isValid; if (mode == null || CollectionUtils.isEmpty(getDataList()) || isEmpty(getIdentifier())) { isValid = false; } else { isValid = true; } return isValid; }
public boolean isEmpty(String paramString){ if((paramString!=null) && (paramString.length()>0)){ for(int i=0;i<paramString.length();i++){ if(paramString.charAt(i)!=' '){ return false; } } } return true; }
@Override public String toString() {
final StringBuffer request = new StringBuffer(); if(mode != null){ request.append("mode: "); request.append(mode.toString()); } if(identifier != null){ request.append("identifier: "); request.append(identifier); } if(dataList != null && !dataList.isEmpty()){ request.append("dataList: "); request.append(dataList.toString()); } return super.toString() + request; } }
6.DataLoadService /* * Created on Oct 17,2013 * * Copyright 2006 ATPCO Confidential and Proprietary. All Rights Reserved. */ package net.atpco.dds.offline.filing.common.datamapper.binder;
/** * * Load the data based on the based on the object to table name mapping. * Identifier is the key to identify the corresponding data from cache * and generate the sql based on update and insert * * @author atp1axi * @version 1.0 * */ public interface DataLoadService {
/** * Responsible for generating the insert or update script based on the * identifier. The configuration file will be loaded at container * start up. The generated sql's and parsed configurations will be in * in-memory cache. * @throws Exception */ boolean update(DataLoadRequest request) throws Exception; }
7.DataLoadServiceImpl package net.atpco.dds.offline.filing.common.datamapper.binder;
import java.util.ArrayList; import java.util.List; import java.util.Map;
import net.atpco.dds.offline.filing.common.datamapper.parser.ClassConfig; import net.atpco.dds.offline.filing.common.datamapper.parser.PropertyConfig;
import org.apache.log4j.Logger; import org.omg.CORBA.portable.ApplicationException; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Scope; import org.springframework.jdbc.core.namedparam.MapSqlParameterSource; import org.springframework.jdbc.core.namedparam.SqlParameterSource; import org.springframework.stereotype.Service; import org.springframework.transaction.annotation.Propagation; import org.springframework.transaction.annotation.Transactional;
@Service("dataLoadService") @Scope("prototype") public class DataLoadServiceImpl implements DataLoadService {
private static Logger LOGGER = Logger.getLogger(DataLoadServiceImpl.class);
@Autowired @Qualifier("sqlPreparator") private transient SqlPreparator sqlPreparator;
@Autowired @Qualifier("dataLoadDAO") private transient DataLoadDAO dataLoadDAO;
@Autowired @Qualifier("dataLoadConfig") private transient DataLoadConfig dataLoadConfig;
@Value("${jdbc.batch.size}") private transient Integer batchSize;
/** * Responsible for generating the insert or update script based on the * identifier. The configuration file will be loaded at container start up. * The generated sql's and parsed configurations will be in in-memory cache. */ @Override @Transactional(propagation = Propagation.REQUIRED) public boolean update(final DataLoadRequest dataLoadRequest) throws Exception { if (!dataLoadRequest.isValid()) { throw new Exception("Inputs are null"); }
final ClassConfig config = dataLoadConfig .getClassConfig(dataLoadRequest.getIdentifier());
if (config == null) { throw new Exception( "The identifier " + dataLoadRequest.getIdentifier() + " is null. Make sure the file is included in mapping xml."); } // Get the generated sql from cache String sql = dataLoadConfig.getSqlFromCache(dataLoadRequest .getIdentifier() + "_" + dataLoadRequest.getMode().toString());
// if sql does not exist in cache prepare a new one and put it in cache if (sql == null) { sql = sqlPreparator.prepareSql(config,dataLoadRequest.getMode()); dataLoadConfig.setSqlIntoCache(dataLoadRequest.getIdentifier() + "_" + dataLoadRequest.getMode().toString(),sql); } final List<SqlParameterSource> paramSourceList = new ArrayList<SqlParameterSource>(); for (final Object object : dataLoadRequest.getDataList()) { final MapSqlParameterSource parameterSource = sqlPreparator .prepareSqlParameter(dataLoadRequest.getMode(),config,object); paramSourceList.add(parameterSource); } // Persist the data based on the batch size. final List<SqlParameterSource[]> batchList = getBatches(paramSourceList); int processingCount = 0; for (final SqlParameterSource[] sources : batchList) { LOGGER.info("Processing from "+processingCount+ " th record to the maximum of "+sources.length+ " records"); dataLoadDAO.batchUpdate(sql,sources); processingCount = processingCount + sources.length; } return true; }
/** * Split the single collection into multiple collections based on the batch * size. * * @param collection * @return */ private List<SqlParameterSource[]> getBatches( final List<SqlParameterSource> collection) { int batchCount = 0; final List<SqlParameterSource[]> batches = new ArrayList<SqlParameterSource[]>(); while (batchCount < collection.size()) { final int nextInc = Math.min(collection.size() - batchCount, batchSize); final List<SqlParameterSource> batch = collection.subList( batchCount,batchCount + nextInc); batches.add(batch.toArray(new SqlParameterSource[batch.size()])); batchCount = batchCount + nextInc; } return batches; }
} (编辑:李大同)
【声明】本站内容均来自网络,其相关言论仅代表作者个人观点,不代表本站立场。若无意侵犯到您的权利,请及时与联系站长删除相关内容!
|