Beruflich Dokumente
Kultur Dokumente
*
* Module PXBridgeHelper.java
*
* IBM Confidential
* OCO Source Materials
* (c) Copyright IBM Corp. 2001, 2014, 2017, 2018
*
* (c) Copyright 2009, 2014 IBM Corporation - All Rights Reserved
* This is unpublished proprietary source code of IBM Corporation
* The copyright notice above does not evidence any actual or intended
* publication of such source code.
*
* Maintenance log - insert most recent change descriptions at top
*
* Date....... ECASE WHO Description..................................
* Sep 16, 2009 ----- lmahanam Changes to support Push Down Optimization
* Aug 13, 2009 ----- cedirisu Multi-Connector OSH support changes
* Aug 03, 2009 ----- cedirisu Job Runtime OSH generation Refactor changes
* Jun 01, 2009 ----- cedirisu Initial Creation
*
***************************************************************************** */
package com.ascential.investigate.utils.jobs.osh.operator;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import com.ascential.investigate.exception.SorcererException;
import com.ascential.investigate.shared.logging.SorcererServiceLogging;
import com.ascential.investigate.utils.XMETAHelper;
import com.ascential.investigate.utils.dbutils.DataConnection;
import com.ascential.investigate.utils.dbutils.DatabaseTypeSourceEnum;
import com.ascential.investigate.utils.dbutils.DatabaseTypeUtils;
import
com.ascential.investigate.utils.jobs.osh.connector.cpg.descriptor.PropertyDefinitio
n;
import com.ascential.investigate.utils.jobs.osh.payload.ColumnInfo;
import com.ascential.investigate.utils.jobs.osh.payload.ExecutionEngineInfo;
import com.ascential.investigate.utils.jobs.osh.payload.SQLVirtualTableInfo;
import com.ascential.investigate.utils.jobs.osh.payload.TableInfo;
import com.ascential.investigate.utils.jobs.properties.JobProperties;
import com.ascential.investigate.utils.model.IAModelUtils;
import com.ibm.infosphere.ia.utils.connectors.TableAction;
import com.ibm.infosphere.ia.utils.connectors.WriteMode;
import ASCLModel.investigate.CredentialMapping;
/**
* Helper class used to formulate data for the pxbridge osh
*
* @author Chamath Edirisuriya
*
*/
public class PXBridgeHelper implements Serializable {
private static final long serialVersionUID = 1L;
if ( dataConnection_ != null ) {
if(credentialMapping != null)
{
if(paramName.equals("Username"))
{
paramValue = credentialMapping.getUser();
}
if(paramName.equals("Password"))
{
paramValue = credentialMapping.getPassword();
}
}
if (paramValue!=null) {
properties.put(paramName, paramValue);
}
}
SorcererServiceLogging.debug("parameters: " + properties);
}
updateProperties();
if ( engineInfo_ != null ){
setAutoCommitMode(String.valueOf(engineInfo_.getAutoCommitMode()));
setIsolationLevel(engineInfo_.getIsolationLevel());
setArraySize(engineInfo_.getArraySize());
}
}
updateProperties();
}
setFieldInfo(tableInfo_);
}
/**
* IA 8.1.1 and earlier stored in XMeta where clauses with escaped quotes.
(For instance: "where col=\'x\'"
* IA 8.1.2 and later store in XMeta non escaped where clause (For instance:
"where col='x'". The escape is done during the job generation.
* When 8.1.1 is upgraded to 8.1.2 or later, CA and rule execution would fail
with where conditions defined in 8.1.1 or earlier
* because they don't expect the string to be escaped and produce a double
escape during the job generation.
* Therefore we need to look at the where clause before using is and see if
it was generated by 8.1.1- or 8.1.2+.
* This function takes a where clause and remove the 8.1.1 escape chars if
necessary to return a non escape string as if it had
* been saved by 8.1.2+
* @param whereClause The where clause to cleanse
* @return The new where clause with 8.1.2 style
*/
private static String cleanseWhereClause(String whereClause)
{
if (whereClause==null) return null;
/**
* check to see if the database has any lob types
* @return
*/
private boolean hasLob(){
if ( pxb_columnNativeTypes_ == null ) return false;
return false;
}
if (DatabaseTypeSourceEnum.ORACLE.equals(databaseType)) {
pxb_arraySize_ = "2000";
return;
}
if (DatabaseTypeSourceEnum.HIVE.equals(databaseType)) {
pxb_batchSize_ = new Integer(arraySize).toString();
if (this.bridgeContext_==PXBridgeOperator.CONTEXT_TARGET) {
pxb_arraySize_ = "1";
return;
}
}
// // If doing an UpSert, set to 1
// if (this.bridgeContext_==PXBridgeOperator.CONTEXT_TARGET &&
this.writeMode==WriteMode.update_then_insert) {
// pxb_arraySize_ = "1";
// return;
// }
/**
* Here are Isolation levels for ODBCConnector: Read uncommitted = 1, Read
committed = 2, Repeatable read = 3, Serializable = 4
* @param isolationLevel
*/
public void setIsolationLevel(int isolationLevel) {
if (bridgeContext_ == PXBridgeOperator.CONTEXT_SOURCE) {
schema.append("-source 0 '{");
} else {
schema.append("-target ").append(indexIntializer).append(" '{");
}
schema.append("\n");
dsSchema.append(" DSSchema=\\'record (");
dsSchema.append("\n");
int numOfFields = 0;
if ( pxb_sqlStmtColumns_ != null ){
numOfFields = pxb_sqlStmtColumns_.size();
}
dsSchema.append(")\\'");
dsSchema.append("\n");
dsSchema.append("}'");
schema.append(dsSchema.toString());
return schema.toString();
}
/**
* Checks the type of datatype based on database type enum
* @param pxType
* @return
*/
private boolean pxTypeisTimeOrTimeStamp(String pxType) {
if(pxType!= null && (pxType.equals("time") ||
pxType.equals("timestamp"))){
return true;
}
return false;
}
if (bridgeContext_ == PropertyDefinition.CONTEXT_SOURCE) {
generateSelectStatement();
addSourceProperties();
generateCreateStatement();
generateInsertStatement();
generateUpdateStatement();
generateBeforeAfterSQL();
addTargetProperties();
}
return properties;
}
xmlProperties.append("<Usage>\n<Session>")
.append("<IsolationLevel type=\\'int\\'><![CDATA[")
.append(pxb_isolationLevel_).append("]]></IsolationLe
vel>\n")
.append("<AutocommitMode type=\\'int\\'><![CDATA[")
.append(pxb_autoCommit_).append("]]></AutocommitMode>
</Session>\n")
.append("<RecordOrdering collapsed=\\'1\\'
type=\\'int\\'><![CDATA[0]]></RecordOrdering>\n</Usage>\n");
return xmlProperties.toString();
}
if (pxb_selectStatement_ != null) {
return null;
}
int numOfFields = 0;
if (pxb_sqlStmtColumns_!=null) numOfFields=pxb_sqlStmtColumns_.size();
pxb_tableName_ =
DatabaseTypeUtils.fullyQualifySchemaName(pxb_tableName_, databaseType);
selectStatement.setSelectedTable(pxb_tableName_);
if ((pxb_whereClause_ != null) && (pxb_whereClause_.length() > 0)) {
selectStatement.setWhereClause(pxb_whereClause_);
}
return selectStatement;
}
pxb_insertStatement_ = insertStmt;
}
if (pxb_updateStatement_ != null) {
return;
}
pxb_updateStatement_ = "";
}
createTableStmt.append( "\n" );
}
// If compression enabled
if( this.pxb_compressionEnabled )
{
createTableStmt.append(" COMPRESS YES \n");
}
if (this.tableSpace!=null &&
DatabaseTypeSourceEnum.DB2.equals(databaseType)) {
createTableStmt.append( " in
" ).append(this.tableSpace).append(generatePartitionStatement()).append( " \n" );
pxb_createStatement_ = createTableStmt.toString() +
generateCreateStatementPredecate();
predicate.append(" clustered
by(").append(tableInfo_.getPartitionColumn()).append(") into 50 buckets");
predicate.append(" stored as orc
tblproperties(\"transactional\"=\"true\")");
}
return predicate.toString();
}
properties.put("EndOfWave", pxb_EndOfWave_);
properties.put("IsolationLevel", pxb_isolationLevel_);
properties.put("AutocommitMode", pxb_autoCommit_);
properties.put("ArraySize", pxb_arraySize_);
if(connectorName.contains("HiveConnector")) {
properties.put("BatchSize", pxb_batchSize_);
}
properties.put("PrefixForExpressionColumns","EXPR");
properties.put("FailOnSizeMismatch", pxb_ignoreSizeChecks_);
properties.put("FailOnTypeMismatch", pxb_ignoreTypeChecks_);
properties.put("PassLobLocator", pxb_passLob_);
// customProperties_.put("Column", "");
properties.put("CodePage", pxb_codePage_);
if ( "DB2Connector".equals(connectorName) && jobProperties != null &&
jobProperties.getJobOptions() != null)
{
properties.put("KeepConductorConnectionAlive", (new
Boolean(jobProperties.getJobOptions().keepConductorConnectionAlive())).booleanValue
() ? "1" : "0");
}
if(DatabaseTypeSourceEnum.HIVE.equals(getDatabaseType()))
{
if (jobProperties != null
&& jobProperties.getJobOptions() != null
&&
jobProperties.getJobOptions().getHiveProperties() != null)
{
pxb_beforeStatement =
jobProperties.getJobOptions().getHiveProperties().trim();
if(pxb_beforeStatement.length() > 0)
{
properties.put("BeforeAfter", "1");
properties.put("BeforeSQL", pxb_beforeStatement);
properties.put("BeforeSQL/FailOnError", "0");
}
}
}
}
//TODO: add below variables per specific Connector, as they are not
universal in context
String connectorName = getConnectorName();
DatabaseTypeSourceEnum databaseType = getDatabaseType();
properties.put("WriteMode", String.valueOf(this.writeMode.getValue()));
properties.put("GenerateSQL", pxb_generateSql_);
String tableName = pxb_tableName_;
properties.put("TableName", tableName);
properties.put("EnableQuotedIDs", "0");
properties.put("TableAction",
String.valueOf(this.tableAction.getValue()));
properties.put("GenerateCreateStatement",
pxb_generateCreateStatement_);
properties.put("GenerateCreateStatement/FailOnError",
pxb_createStatementFailOnError_);
properties.put("CreateStatement", pxb_createStatement_);
properties.put("GenerateDropStatement", "1");
properties.put("GenerateDropStatement/FailOnError", "0");
properties.put("InsertStatement", pxb_insertStatement_);
properties.put("UpdateStatement", pxb_updateStatement_);
properties.put("RecordCount", pxb_recordCount_);
properties.put("IsolationLevel", pxb_isolationLevel_);
properties.put("AutocommitMode", pxb_autoCommit_);
properties.put("ArraySize", pxb_arraySize_);
if(connectorName.contains("HiveConnector")) {
properties.put("BatchSize", pxb_batchSize_);
}
properties.put("FailOnSizeMismatch", pxb_ignoreSizeChecks_);
properties.put("FailOnTypeMismatch", pxb_ignoreTypeChecks_);
properties.put("DropUnmatchedFields", "1");
if(DatabaseTypeSourceEnum.HIVE.equals(databaseType)) {
properties.put("RecordOrdering", "0");
} else {
properties.put("RecordOrdering", "1");
}
properties.put("EndOfWave", pxb_EndOfWave_);
if (pxb_beforeStatement!=null || pxb_afterStatement!=null)
{
properties.put("BeforeAfter", "1");
} else
{
properties.put("BeforeAfter", "0");
}
if (pxb_beforeStatement!=null)
{
properties.put("before", pxb_beforeStatement);
properties.put("before/FailOnError", "0");
}
if (pxb_afterStatement!=null)
{
properties.put("after", pxb_afterStatement);
properties.put("after/FailOnError", "0");
}
if (writeMode==WriteMode.bulk_load)
{
properties.put("LoadControl/MessageFile", "loadMsgs.out");
}
}
columnName = columnInfo.getColumnName();
}
if (!colAlreadyExists) {
String alias = columnInfo.getColumnAlias();
if(DatabaseTypeSourceEnum.HIVE.equals(getDatabaseType()))
{
pxb_sqlStmtColumns_.add(columnName==null?
columnName:columnName.toLowerCase());
pxb_sqlStmtAliasColumns_.add(alias==null?
alias:alias.toLowerCase());
}
else
{
pxb_sqlStmtColumns_.add(columnName);
pxb_sqlStmtAliasColumns_.add(alias);
}
pxb_columnPXTypes_.add(columnInfo.getPxMetaFormat());
pxb_columnDataTypes_.add(columnInfo.getDataType());
pxb_columnNativeTypes_.add(columnInfo.getNativeType());
}
}
}
}
}
pxb_sqlStmtAliasColumns_.add(baseColumnInfo.getColumnAlias());
pxb_nullableColumns_.add(baseColumnInfo.getIsNullable() ?
"N" : "");
pxb_columnPXTypes_.add(baseColumnInfo.getPxMetaFormat());
pxb_columnDataTypes_.add(baseColumnInfo.getDataType());
pxb_columnNativeTypes_.add(baseColumnInfo.getNativeType());
}
}
//###########################################################################
###
// Some helper classes to help building the select statement in a parsed form
//###########################################################################
###
public SelectStatement()
{
}
public List<SelectedField> getSelectedFields()
{
return selectedFields;
}