Skip to content

Commit 0462e66

Browse files
committed
Merge branch 'hotfix_1.8_3.10.x_merged23442' into '1.8_release_3.10.x'
oralce char type See merge request dt-insight-engine/flinkStreamSQL!8
2 parents 6159bb9 + 04028d5 commit 0462e66

File tree

9 files changed

+160
-34
lines changed

9 files changed

+160
-34
lines changed

core/src/main/java/com/dtstack/flink/sql/table/AbsTableParser.java

+16-2
Original file line numberDiff line numberDiff line change
@@ -43,10 +43,12 @@ public abstract class AbsTableParser {
4343

4444
private static final String PRIMARY_KEY = "primaryKey";
4545
private static final String NEST_JSON_FIELD_KEY = "nestFieldKey";
46+
private static final String CHAR_TYPE_NO_LENGTH = "CHAR";
4647

4748
private static Pattern primaryKeyPattern = Pattern.compile("(?i)PRIMARY\\s+KEY\\s*\\((.*)\\)");
4849
private static Pattern nestJsonFieldKeyPattern = Pattern.compile("(?i)((@*\\S+\\.)*\\S+)\\s+(\\w+)\\s+AS\\s+(\\w+)(\\s+NOT\\s+NULL)?$");
4950
private static Pattern physicalFieldFunPattern = Pattern.compile("\\w+\\((\\w+)\\)$");
51+
private static Pattern charTypePattern = Pattern.compile("(?i)CHAR\\((\\d*)\\)$");
5052

5153
private Map<String, Pattern> patternMap = Maps.newHashMap();
5254

@@ -107,13 +109,25 @@ public void parseFieldsInfo(String fieldsInfo, TableInfo tableInfo){
107109
System.arraycopy(filedInfoArr, 0, filedNameArr, 0, filedInfoArr.length - 1);
108110
String fieldName = String.join(" ", filedNameArr);
109111
String fieldType = filedInfoArr[filedInfoArr.length - 1 ].trim();
110-
Class fieldClass = dbTypeConvertToJavaType(fieldType);
112+
113+
114+
Class fieldClass = null;
115+
TableInfo.FieldExtraInfo fieldExtraInfo = null;
116+
117+
Matcher matcher = charTypePattern.matcher(fieldType);
118+
if (matcher.find()) {
119+
fieldClass = dbTypeConvertToJavaType(CHAR_TYPE_NO_LENGTH);
120+
fieldExtraInfo = new TableInfo.FieldExtraInfo();
121+
fieldExtraInfo.setLength(Integer.valueOf(matcher.group(1)));
122+
} else {
123+
fieldClass = dbTypeConvertToJavaType(fieldType);
124+
}
111125

112126
tableInfo.addPhysicalMappings(filedInfoArr[0],filedInfoArr[0]);
113127
tableInfo.addField(fieldName);
114128
tableInfo.addFieldClass(fieldClass);
115129
tableInfo.addFieldType(fieldType);
116-
tableInfo.addFieldExtraInfo(null);
130+
tableInfo.addFieldExtraInfo(fieldExtraInfo);
117131
}
118132

119133
tableInfo.finish();

core/src/main/java/com/dtstack/flink/sql/table/TableInfo.java

+20
Original file line numberDiff line numberDiff line change
@@ -194,6 +194,18 @@ public static class FieldExtraInfo implements Serializable {
194194
* default false:allow field is null
195195
*/
196196
boolean notNull = false;
197+
/**
198+
* field length,eg.char(4)
199+
*/
200+
int length;
201+
202+
public int getLength() {
203+
return length;
204+
}
205+
206+
public void setLength(int length) {
207+
this.length = length;
208+
}
197209

198210
public boolean getNotNull() {
199211
return notNull;
@@ -202,5 +214,13 @@ public boolean getNotNull() {
202214
public void setNotNull(boolean notNull) {
203215
this.notNull = notNull;
204216
}
217+
218+
@Override
219+
public String toString() {
220+
return "FieldExtraInfo{" +
221+
"notNull=" + notNull +
222+
", length=" + length +
223+
'}';
224+
}
205225
}
206226
}

oracle/oracle-side/oracle-async-side/src/main/java/com/dtstack/flink/sql/side/oracle/OracleAsyncSideInfo.java

+19-5
Original file line numberDiff line numberDiff line change
@@ -23,13 +23,10 @@
2323
import com.dtstack.flink.sql.side.SideTableInfo;
2424
import com.dtstack.flink.sql.side.rdb.async.RdbAsyncSideInfo;
2525
import com.dtstack.flink.sql.side.rdb.table.RdbSideTableInfo;
26+
import com.dtstack.flink.sql.table.TableInfo;
2627
import com.dtstack.flink.sql.util.DtStringUtil;
27-
import com.dtstack.flink.sql.util.ParseUtils;
28-
import org.apache.calcite.sql.SqlNode;
28+
import org.apache.commons.lang3.StringUtils;
2929
import org.apache.flink.api.java.typeutils.RowTypeInfo;
30-
import com.google.common.collect.Lists;
31-
32-
import java.util.Arrays;
3330
import java.util.List;
3431

3532

@@ -49,4 +46,21 @@ public String quoteIdentifier(String identifier) {
4946
return "\"" + identifier + "\"";
5047
}
5148

49+
@Override
50+
public String wrapperPlaceholder(String fieldName) {
51+
int pos = sideTableInfo.getFieldList().indexOf(fieldName);
52+
String type = sideTableInfo.getFieldTypeList().get(pos);
53+
54+
String sqlDefaultPlaceholder = " ? ";
55+
String rpadFormat = "rpad(?, %d, ' ')";
56+
57+
if (StringUtils.contains(type.toLowerCase(), "char")) {
58+
TableInfo.FieldExtraInfo fieldExtraInfo = sideTableInfo.getFieldExtraInfoList().get(pos);
59+
int charLength = fieldExtraInfo == null ? 0 : fieldExtraInfo.getLength();
60+
if (charLength > 0) {
61+
return String.format(rpadFormat, charLength);
62+
}
63+
}
64+
return sqlDefaultPlaceholder;
65+
}
5266
}

oracle/oracle-sink/src/main/java/com/dtstack/flink/sql/sink/oracle/OracleDialect.java

+72-17
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
package com.dtstack.flink.sql.sink.oracle;
2020

2121
import com.dtstack.flink.sql.sink.rdb.dialect.JDBCDialect;
22+
import com.dtstack.flink.sql.table.TableInfo;
2223
import com.dtstack.flink.sql.util.DtStringUtil;
2324
import org.apache.commons.lang3.StringUtils;
2425

@@ -34,6 +35,14 @@
3435
*/
3536
public class OracleDialect implements JDBCDialect {
3637

38+
private final String SQL_DEFAULT_PLACEHOLDER = " ? ";
39+
private final String DEAL_CHAR_KEY = "char";
40+
private String RPAD_FORMAT = " rpad(?, %d, ' ') ";
41+
42+
private List<String> fieldList;
43+
private List<String> fieldTypeList;
44+
private List<TableInfo.FieldExtraInfo> fieldExtraInfoList;
45+
3746
@Override
3847
public boolean canHandle(String url) {
3948
return url.startsWith("jdbc:oracle:");
@@ -47,39 +56,50 @@ public Optional<String> defaultDriverName() {
4756
@Override
4857
public Optional<String> getUpsertStatement(String schema, String tableName, String[] fieldNames, String[] uniqueKeyFields, boolean allReplace) {
4958
tableName = DtStringUtil.getTableFullPath(schema, tableName);
50-
StringBuilder sb = new StringBuilder();
51-
sb.append("MERGE INTO " + tableName + " T1 USING "
52-
+ "(" + buildDualQueryStatement(fieldNames) + ") T2 ON ("
53-
+ buildConnectionConditions(uniqueKeyFields) + ") ");
59+
StringBuilder mergeIntoSql = new StringBuilder();
60+
mergeIntoSql.append("MERGE INTO " + tableName + " T1 USING (")
61+
.append(buildDualQueryStatement(fieldNames))
62+
.append(") T2 ON (")
63+
.append(buildConnectionConditions(uniqueKeyFields) + ") ");
5464

5565
String updateSql = buildUpdateConnection(fieldNames, uniqueKeyFields, allReplace);
5666

5767
if (StringUtils.isNotEmpty(updateSql)) {
58-
sb.append(" WHEN MATCHED THEN UPDATE SET ");
59-
sb.append(updateSql);
68+
mergeIntoSql.append(" WHEN MATCHED THEN UPDATE SET ");
69+
mergeIntoSql.append(updateSql);
6070
}
6171

62-
sb.append(" WHEN NOT MATCHED THEN "
63-
+ "INSERT (" + Arrays.stream(fieldNames).map(this::quoteIdentifier).collect(Collectors.joining(",")) + ") VALUES ("
64-
+ Arrays.stream(fieldNames).map(col -> "T2." + quoteIdentifier(col)).collect(Collectors.joining(",")) + ")");
72+
mergeIntoSql.append(" WHEN NOT MATCHED THEN ")
73+
.append("INSERT (")
74+
.append(Arrays.stream(fieldNames).map(this::quoteIdentifier).collect(Collectors.joining(",")))
75+
.append(") VALUES (")
76+
.append(Arrays.stream(fieldNames).map(col -> "T2." + quoteIdentifier(col)).collect(Collectors.joining(",")))
77+
.append(")");
6578

66-
return Optional.of(sb.toString());
79+
return Optional.of(mergeIntoSql.toString());
6780
}
6881

6982
/**
70-
* build T1."A"=T2."A" or T1."A"=nvl(T2."A",T1."A")
83+
* build T1."A"=T2."A" or T1."A"=nvl(T2."A",T1."A")
7184
* @param fieldNames
7285
* @param uniqueKeyFields
7386
* @param allReplace
7487
* @return
7588
*/
7689
private String buildUpdateConnection(String[] fieldNames, String[] uniqueKeyFields, boolean allReplace) {
7790
List<String> uniqueKeyList = Arrays.asList(uniqueKeyFields);
78-
return Arrays.stream(fieldNames).filter(col -> !uniqueKeyList.contains(col)).map(col -> {
79-
return allReplace ? quoteIdentifier("T1") + "." + quoteIdentifier(col) + " = " + quoteIdentifier("T2") + "." + quoteIdentifier(col) :
80-
quoteIdentifier("T1") + "." + quoteIdentifier(col) + " =nvl(" + quoteIdentifier("T2") + "." + quoteIdentifier(col) + ","
81-
+ quoteIdentifier("T1") + "." + quoteIdentifier(col) + ")";
82-
}).collect(Collectors.joining(","));
91+
String updateConnectionSql = Arrays.stream(fieldNames).
92+
filter(col -> !uniqueKeyList.contains(col))
93+
.map(col -> buildConnectionByAllReplace(allReplace, col))
94+
.collect(Collectors.joining(","));
95+
return updateConnectionSql;
96+
}
97+
98+
private String buildConnectionByAllReplace(boolean allReplace, String col) {
99+
String conncetionSql = allReplace ? quoteIdentifier("T1") + "." + quoteIdentifier(col) + " = " + quoteIdentifier("T2") + "." + quoteIdentifier(col) :
100+
quoteIdentifier("T1") + "." + quoteIdentifier(col) + " =nvl(" + quoteIdentifier("T2") + "." + quoteIdentifier(col) + ","
101+
+ quoteIdentifier("T1") + "." + quoteIdentifier(col) + ")";
102+
return conncetionSql;
83103
}
84104

85105

@@ -95,8 +115,43 @@ private String buildConnectionConditions(String[] uniqueKeyFields) {
95115
*/
96116
public String buildDualQueryStatement(String[] column) {
97117
StringBuilder sb = new StringBuilder("SELECT ");
98-
String collect = Arrays.stream(column).map(col -> " ? " + quoteIdentifier(col)).collect(Collectors.joining(", "));
118+
String collect = Arrays.stream(column)
119+
.map(col -> wrapperPlaceholder(col) + quoteIdentifier(col))
120+
.collect(Collectors.joining(", "));
99121
sb.append(collect).append(" FROM DUAL");
100122
return sb.toString();
101123
}
124+
125+
126+
/**
127+
* char type is wrapped with rpad
128+
* @param fieldName
129+
* @return
130+
*/
131+
public String wrapperPlaceholder(String fieldName) {
132+
int pos = fieldList.indexOf(fieldName);
133+
String type = fieldTypeList.get(pos);
134+
135+
if (StringUtils.contains(type.toLowerCase(), DEAL_CHAR_KEY)) {
136+
TableInfo.FieldExtraInfo fieldExtraInfo = fieldExtraInfoList.get(pos);
137+
int charLength = fieldExtraInfo == null ? 0 : fieldExtraInfo.getLength();
138+
if (charLength > 0) {
139+
return String.format(RPAD_FORMAT, charLength);
140+
}
141+
}
142+
return SQL_DEFAULT_PLACEHOLDER;
143+
}
144+
145+
146+
public void setFieldList(List<String> fieldList) {
147+
this.fieldList = fieldList;
148+
}
149+
150+
public void setFieldTypeList(List<String> fieldTypeList) {
151+
this.fieldTypeList = fieldTypeList;
152+
}
153+
154+
public void setFieldExtraInfoList(List<TableInfo.FieldExtraInfo> fieldExtraInfoList) {
155+
this.fieldExtraInfoList = fieldExtraInfoList;
156+
}
102157
}

oracle/oracle-sink/src/main/java/com/dtstack/flink/sql/sink/oracle/OracleSink.java

+4
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,10 @@ public OracleSink() {
3737

3838
@Override
3939
public JDBCUpsertOutputFormat getOutputFormat() {
40+
((OracleDialect) jdbcDialect).setFieldList(fieldList);
41+
((OracleDialect) jdbcDialect).setFieldTypeList(fieldTypeList);
42+
((OracleDialect) jdbcDialect).setFieldExtraInfoList(fieldExtraInfoList);
43+
4044
JDBCOptions jdbcOptions = JDBCOptions.builder()
4145
.setDBUrl(dbURL)
4246
.setDialect(jdbcDialect)

rdb/rdb-side/src/main/java/com/dtstack/flink/sql/side/rdb/async/RdbAsyncSideInfo.java

+17-5
Original file line numberDiff line numberDiff line change
@@ -137,14 +137,26 @@ public String getAdditionalWhereClause() {
137137

138138
public String getSelectFromStatement(String tableName, List<String> selectFields, List<String> conditionFields, List<String> sqlJoinCompareOperate,
139139
List<PredicateInfo> predicateInfoes) {
140-
String fromClause = selectFields.stream().map(this::quoteIdentifier).collect(Collectors.joining(", "));
141-
String whereClause = conditionFields.stream().map(f -> quoteIdentifier(f) + sqlJoinCompareOperate.get(conditionFields.indexOf(f)) + " ? ")
140+
String fromClause = selectFields.stream()
141+
.map(this::quoteIdentifier)
142+
.collect(Collectors.joining(", "));
143+
144+
String whereClause = conditionFields.stream()
145+
.map(f -> quoteIdentifier(f) + sqlJoinCompareOperate.get(conditionFields.indexOf(f)) + wrapperPlaceholder(f))
146+
.collect(Collectors.joining(" AND "));
147+
148+
String predicateClause = predicateInfoes.stream()
149+
.map(this::buildFilterCondition)
142150
.collect(Collectors.joining(" AND "));
143-
String predicateClause = predicateInfoes.stream().map(this::buildFilterCondition).collect(Collectors.joining(" AND "));
144151

145-
String sql = "SELECT " + fromClause + " FROM " + tableName + (conditionFields.size() > 0 ? " WHERE " + whereClause : "")
152+
String dimQuerySql = "SELECT " + fromClause + " FROM " + tableName + (conditionFields.size() > 0 ? " WHERE " + whereClause : "")
146153
+ (predicateInfoes.size() > 0 ? " AND " + predicateClause : "") + getAdditionalWhereClause();
147-
return sql;
154+
155+
return dimQuerySql;
156+
}
157+
158+
public String wrapperPlaceholder(String fieldName) {
159+
return " ? ";
148160
}
149161

150162
public String buildFilterCondition(PredicateInfo info) {

rdb/rdb-side/src/main/java/com/dtstack/flink/sql/side/rdb/table/RdbSideTableInfo.java

+2
Original file line numberDiff line numberDiff line change
@@ -47,6 +47,8 @@ public boolean check() {
4747
Preconditions.checkNotNull(tableName, "rdb of tableName is required");
4848
Preconditions.checkNotNull(userName, "rdb of userName is required");
4949
Preconditions.checkNotNull(password, "rdb of password is required");
50+
Preconditions.checkArgument(getFieldList().size() == getFieldExtraInfoList().size(),
51+
"fields and fieldExtraInfoList attributes must be the same length");
5052
return true;
5153
}
5254

rdb/rdb-sink/src/main/java/com/dtstack/flink/sql/sink/rdb/RdbSink.java

+8-5
Original file line numberDiff line numberDiff line change
@@ -20,8 +20,8 @@
2020
import com.dtstack.flink.sql.sink.IStreamSinkGener;
2121
import com.dtstack.flink.sql.sink.rdb.format.JDBCUpsertOutputFormat;
2222
import com.dtstack.flink.sql.sink.rdb.table.RdbTableInfo;
23+
import com.dtstack.flink.sql.table.TableInfo;
2324
import com.dtstack.flink.sql.table.TargetTableInfo;
24-
import org.apache.commons.lang3.StringUtils;
2525
import org.apache.flink.api.common.typeinfo.TypeInformation;
2626
import org.apache.flink.api.java.tuple.Tuple2;
2727
import org.apache.flink.api.java.typeutils.RowTypeInfo;
@@ -34,10 +34,6 @@
3434
import com.dtstack.flink.sql.sink.rdb.dialect.JDBCDialect;
3535

3636
import java.io.Serializable;
37-
import java.math.BigDecimal;
38-
import java.sql.Date;
39-
import java.sql.Timestamp;
40-
import java.sql.Types;
4137
import java.util.Arrays;
4238
import java.util.List;
4339

@@ -88,6 +84,10 @@ public abstract class RdbSink implements RetractStreamTableSink<Row>, Serializab
8884

8985
protected String updateMode;
9086

87+
public List<String> fieldList;
88+
public List<String> fieldTypeList;
89+
public List<TableInfo.FieldExtraInfo> fieldExtraInfoList;
90+
9191
public RdbSink(JDBCDialect jdbcDialect) {
9292
this.jdbcDialect = jdbcDialect;
9393
}
@@ -111,6 +111,9 @@ public RdbSink genStreamSink(TargetTableInfo targetTableInfo) {
111111
this.sqlTypes = JDBCTypeConvertUtils.buildSqlTypes(fieldTypeArray);
112112
this.allReplace = rdbTableInfo.isAllReplace();
113113
this.updateMode = rdbTableInfo.getUpdateMode();
114+
this.fieldList = rdbTableInfo.getFieldList();
115+
this.fieldTypeList = rdbTableInfo.getFieldTypeList();
116+
this.fieldExtraInfoList = rdbTableInfo.getFieldExtraInfoList();
114117
return this;
115118
}
116119

rdb/rdb-sink/src/main/java/com/dtstack/flink/sql/sink/rdb/table/RdbTableInfo.java

+2
Original file line numberDiff line numberDiff line change
@@ -183,6 +183,8 @@ public boolean check() {
183183
}
184184

185185

186+
Preconditions.checkArgument(getFieldList().size() == getFieldExtraInfoList().size(),
187+
"fields and fieldExtraInfoList attributes must be the same length");
186188
return true;
187189
}
188190

0 commit comments

Comments
 (0)