You are viewing a plain text version of this content. The canonical link for it is here.
Posted to issues@flink.apache.org by GitBox <gi...@apache.org> on 2019/07/10 05:43:03 UTC

[GitHub] [flink] lirui-apache commented on a change in pull request #9037: [FLINK-13157]reeanble unit test read complext type of HiveInputFormatTest

lirui-apache commented on a change in pull request #9037: [FLINK-13157]reeanble unit test read complext type of HiveInputFormatTest
URL: https://github.com/apache/flink/pull/9037#discussion_r301893481
 
 

 ##########
 File path: flink-connectors/flink-connector-hive/src/test/java/org/apache/flink/batch/connectors/hive/HiveInputFormatTest.java
 ##########
 @@ -125,54 +128,54 @@ public void testReadFromHiveInputFormat() throws Exception {
 		Assert.assertEquals("4,4,a,4000,4.44", rows.get(3).toString());
 	}
 
-//	@Test
-//	public void testReadComplextDataTypeFromHiveInputFormat() throws Exception {
-//		final String dbName = "default";
-//		final String tblName = "complext_test";
-//
-//		TableSchema.Builder builder = new TableSchema.Builder();
-//		builder.fields(new String[]{"a", "m", "s"}, new DataType[]{
-//				DataTypes.ARRAY(DataTypes.INT()),
-//				DataTypes.MAP(DataTypes.INT(), DataTypes.STRING()),
-//				DataTypes.ROW(DataTypes.FIELD("f1", DataTypes.INT()), DataTypes.FIELD("f2", DataTypes.STRING()))});
-//
-//		//Now we used metaStore client to create hive table instead of using hiveCatalog for it doesn't support set
-//		//serDe temporarily.
-//		HiveMetastoreClientWrapper client = HiveMetastoreClientFactory.create(hiveConf, null);
-//		org.apache.hadoop.hive.metastore.api.Table tbl = new org.apache.hadoop.hive.metastore.api.Table();
-//		tbl.setDbName(dbName);
-//		tbl.setTableName(tblName);
-//		tbl.setCreateTime((int) (System.currentTimeMillis() / 1000));
-//		tbl.setParameters(new HashMap<>());
-//		StorageDescriptor sd = new StorageDescriptor();
-//		String location = HiveInputFormatTest.class.getResource("/complex_test").getPath();
-//		sd.setLocation(location);
-//		sd.setInputFormat(DEFAULT_HIVE_INPUT_FORMAT_TEST_INPUT_FORMAT_CLASS);
-//		sd.setOutputFormat(DEFAULT_OUTPUT_FORMAT_CLASS);
-//		sd.setSerdeInfo(new SerDeInfo());
-//		sd.getSerdeInfo().setSerializationLib(DEFAULT_HIVE_INPUT_FORMAT_TEST_SERDE_CLASS);
-//		sd.getSerdeInfo().setParameters(new HashMap<>());
-//		sd.getSerdeInfo().getParameters().put("serialization.format", "1");
-//		sd.getSerdeInfo().getParameters().put("field.delim", ";");
-//		//org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe use 'colelction.delim' as a delimiter config key
-//		// it may be a typo of this class
-//		sd.getSerdeInfo().getParameters().put("colelction.delim", ",");
-//		sd.getSerdeInfo().getParameters().put("mapkey.delim", ":");
-//		sd.setCols(HiveTableUtil.createHiveColumns(builder.build()));
-//		tbl.setSd(sd);
-//		tbl.setPartitionKeys(new ArrayList<>());
-//
-//		client.createTable(tbl);
-//		ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
-//		env.setParallelism(1);
-//		RowTypeInfo rowTypeInfo = new RowTypeInfo(builder.build().getFieldTypes(), builder.build().getFieldNames());
-//		List<HiveTablePartition> partitions = new ArrayList<>();
-//		partitions.add(new HiveTablePartition(sd, new HashMap<>()));
-//		HiveTableInputFormat hiveTableInputFormat =
-//			new HiveTableInputFormat(new JobConf(hiveConf), hiveCatalog., partitions);
-//		DataSet<Row> rowDataSet = env.createInput(hiveTableInputFormat);
-//		List<Row> rows = rowDataSet.collect();
-//		Assert.assertEquals(1, rows.size());
-//		Assert.assertEquals("[1, 2, 3],{1=a, 2=b},3,c", rows.get(0).toString());
-//	}
+	@Test
+	public void testReadComplextDataTypeFromHiveInputFormat() throws Exception {
+		final String dbName = "default";
+		final String tblName = "complext_test";
+
+		TableSchema.Builder builder = new TableSchema.Builder();
+		builder.fields(new String[]{"a", "m", "s"}, new DataType[]{
+				DataTypes.ARRAY(DataTypes.INT()),
+				DataTypes.MAP(DataTypes.INT(), DataTypes.STRING()),
+				DataTypes.ROW(DataTypes.FIELD("f1", DataTypes.INT()), DataTypes.FIELD("f2", DataTypes.STRING()))});
+
+		//Now we used metaStore client to create hive table instead of using hiveCatalog for it doesn't support set
+		//serDe temporarily.
+		HiveMetastoreClientWrapper client = HiveMetastoreClientFactory.create(hiveConf, null);
+		org.apache.hadoop.hive.metastore.api.Table tbl = new org.apache.hadoop.hive.metastore.api.Table();
+		tbl.setDbName(dbName);
+		tbl.setTableName(tblName);
+		tbl.setCreateTime((int) (System.currentTimeMillis() / 1000));
+		tbl.setParameters(new HashMap<>());
+		StorageDescriptor sd = new StorageDescriptor();
+		String location = HiveInputFormatTest.class.getResource("/complex_test").getPath();
+		sd.setLocation(location);
+		sd.setInputFormat(DEFAULT_HIVE_INPUT_FORMAT_TEST_INPUT_FORMAT_CLASS);
+		sd.setOutputFormat(DEFAULT_OUTPUT_FORMAT_CLASS);
+		sd.setSerdeInfo(new SerDeInfo());
+		sd.getSerdeInfo().setSerializationLib(DEFAULT_HIVE_INPUT_FORMAT_TEST_SERDE_CLASS);
+		sd.getSerdeInfo().setParameters(new HashMap<>());
+		sd.getSerdeInfo().getParameters().put("serialization.format", "1");
+		sd.getSerdeInfo().getParameters().put("field.delim", ";");
 
 Review comment:
   Can we use these config keys from `serdeConstants`?

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services