Skip to content

Commit 7b6f303

Browse files
author
gituser
committed
Merge branch '1.10_release_4.0.x' into 1.10_release_4.1.x
2 parents dc0e683 + 25316fe commit 7b6f303

4 files changed

Lines changed: 23 additions & 21 deletions

File tree

core/src/main/java/com/dtstack/flink/sql/table/AbstractTableInfo.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -49,7 +49,7 @@ public abstract class AbstractTableInfo implements Serializable {
4949
private final List<String> fieldList = Lists.newArrayList();
5050

5151
/**key:别名, value: realField */
52-
private Map<String, String> physicalFields = Maps.newHashMap();
52+
private Map<String, String> physicalFields = Maps.newLinkedHashMap();
5353

5454
private final List<String> fieldTypeList = Lists.newArrayList();
5555

hbase/hbase-sink/src/main/java/com/dtstack/flink/sql/sink/hbase/HbaseOutputFormat.java

Lines changed: 20 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,11 @@
2828
import org.apache.flink.configuration.Configuration;
2929
import org.apache.flink.types.Row;
3030
import org.apache.flink.util.Preconditions;
31-
import org.apache.hadoop.hbase.*;
31+
import org.apache.hadoop.hbase.AuthUtil;
32+
import org.apache.hadoop.hbase.ChoreService;
33+
import org.apache.hadoop.hbase.HBaseConfiguration;
34+
import org.apache.hadoop.hbase.ScheduledChore;
35+
import org.apache.hadoop.hbase.TableName;
3236
import org.apache.hadoop.hbase.client.Connection;
3337
import org.apache.hadoop.hbase.client.ConnectionFactory;
3438
import org.apache.hadoop.hbase.client.Delete;
@@ -42,9 +46,10 @@
4246
import java.io.File;
4347
import java.io.IOException;
4448
import java.security.PrivilegedAction;
49+
import java.util.LinkedHashMap;
50+
import java.util.LinkedList;
4551
import java.util.List;
4652
import java.util.Map;
47-
import java.util.Set;
4853

4954
/**
5055
* @author: jingzhen@dtstack.com
@@ -129,21 +134,18 @@ private void openKerberosConn() throws Exception {
129134

130135
UserGroupInformation userGroupInformation = HbaseConfigUtils.loginAndReturnUGI(conf, clientPrincipal, clientKeytabFile);
131136
org.apache.hadoop.conf.Configuration finalConf = conf;
132-
conn = userGroupInformation.doAs(new PrivilegedAction<Connection>() {
133-
@Override
134-
public Connection run() {
135-
try {
136-
ScheduledChore authChore = AuthUtil.getAuthChore(finalConf);
137-
if (authChore != null) {
138-
choreService = new ChoreService("hbaseKerberosSink");
139-
choreService.scheduleChore(authChore);
140-
}
141-
142-
return ConnectionFactory.createConnection(finalConf);
143-
} catch (IOException e) {
144-
LOG.error("Get connection fail with config:{}", finalConf);
145-
throw new RuntimeException(e);
137+
conn = userGroupInformation.doAs((PrivilegedAction<Connection>) () -> {
138+
try {
139+
ScheduledChore authChore = AuthUtil.getAuthChore(finalConf);
140+
if (authChore != null) {
141+
choreService = new ChoreService("hbaseKerberosSink");
142+
choreService.scheduleChore(authChore);
146143
}
144+
145+
return ConnectionFactory.createConnection(finalConf);
146+
} catch (IOException e) {
147+
LOG.error("Get connection fail with config:{}", finalConf);
148+
throw new RuntimeException(e);
147149
}
148150
});
149151
}
@@ -355,8 +357,8 @@ public HbaseOutputFormat finish() {
355357
String[] qualifiers = new String[format.columnNames.length];
356358

357359
if (format.columnNameFamily != null) {
358-
Set<String> keySet = format.columnNameFamily.keySet();
359-
String[] columns = keySet.toArray(new String[keySet.size()]);
360+
List<String> keyList = new LinkedList<>(format.columnNameFamily.keySet());
361+
String[] columns = keyList.toArray(new String[0]);
360362
for (int i = 0; i < columns.length; ++i) {
361363
String col = columns[i];
362364
String[] part = col.split(":");

hbase/hbase-sink/src/main/java/com/dtstack/flink/sql/sink/hbase/table/HbaseSinkParser.java

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -97,7 +97,6 @@ public AbstractTableInfo getTableInfo(String tableName, String fieldsInfo, Map<S
9797

9898
public void parseFieldsInfo(String fieldsInfo, HbaseTableInfo tableInfo){
9999
List<String> fieldRows = DtStringUtil.splitIgnoreQuota(fieldsInfo, ',');
100-
Map<String, String> columnFamilies = new LinkedHashMap<>();
101100
for(String fieldRow : fieldRows){
102101
fieldRow = fieldRow.trim();
103102

@@ -129,7 +128,7 @@ public void parseFieldsInfo(String fieldsInfo, HbaseTableInfo tableInfo){
129128
}
130129

131130
private Map<String, String> parseColumnFamily(Map<String, String> physicalFieldMap){
132-
Map<String, String> columnFamiles = Maps.newHashMap();
131+
Map<String, String> columnFamiles = Maps.newLinkedHashMap();
133132
physicalFieldMap.values().forEach(x -> {
134133
String[] columnFamily = StringUtils.split(x.trim(), ":");
135134
columnFamiles.put(x, columnFamily[1]);

hbase/hbase-sink/src/main/java/com/dtstack/flink/sql/sink/hbase/table/HbaseTableInfo.java

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@
2525
import com.google.common.base.Preconditions;
2626
import com.google.common.collect.Maps;
2727

28+
import java.util.LinkedHashMap;
2829
import java.util.Map;
2930

3031
/**

0 commit comments

Comments
 (0)