stem.currentTimeMillis() / 1000).set("id", 22L).set("recordType_",
>> "Type2").build();
>>
>> recs.add(gr2);
>>
>> }
>>
>>
>>
>> testPath = new Path("/tmp", UUID.randomUUID().toString());
>>
ord : recs) {
>
> writer.write(record);
>
> }
>
> writer.close();
>
> }
>
>
>
>
>
> private ParquetTableSource createParquetTableSource(Path path) throws
> IOException {
>
> MessageType nestedSchema = SCHEMA
batchTableEnvironment =
BatchTableEnvironment.create(env);
ParquetTableSource tableSource = createParquetTableSource(testPath);
batchTableEnvironment.registerTableSource("ParquetTable", tableSource);
Table tab = batchTableEnvironment.sqlQuery("select id,
Hi Hanan,
Thanks for reporting the issue. Would you please attach your test code
here? I may help to investigate.
Best Regards
Peter Huang
On Mon, Nov 18, 2019 at 2:51 AM Hanan Yehudai
wrote:
> I have tried to persist Generic Avro records in a parquet file and then
> read it via ParquetTable
I have tried to persist Generic Avro records in a parquet file and then read it
via ParquetTablesource – using SQL.
Seems that the SQL I not executed properly !
The persisted records are :
Id , type
333,Type1
22,Type2
333,Type1
22,Type2
333,Type1
22,Type2
333,Type1
2