hequn8128 commented on a change in pull request #13066:
URL: https://github.com/apache/flink/pull/13066#discussion_r466795433



##########
File path: flink-python/pyflink/fn_execution/beam/beam_coder_impl_slow.py
##########
@@ -186,6 +187,68 @@ def __repr__(self):
         return 'ArrayCoderImpl[%s]' % repr(self._elem_coder)
 
 
+class PickledBytesCoderImpl(StreamCoderImpl):
+
+    def __init__(self):
+        self.field_coder = BinaryCoderImpl()
+
+    def encode_to_stream(self, value, out_stream, nested):
+        coded_data = pickle.dumps(value)
+        real_coded_data = self.field_coder.encode(coded_data)

Review comment:
       This comment has not been addressed?

##########
File path: flink-python/pyflink/proto/flink-fn-execution.proto
##########
@@ -147,3 +163,46 @@ message Schema {
 
   repeated Field fields = 1;
 }
+
+// A representation of the data type information of a DataStream.
+message TypeInfo {
+  enum TypeName {
+    ROW = 0;
+    STRING = 1;
+    BYTE = 2;
+    BOOLEAN = 3;
+    SHORT = 4;
+    INT = 5;
+    LONG = 6;
+    FLOAT = 7;
+    DOUBLE = 8;
+    CHAR = 9;
+    BIG_INT = 10;
+    BIG_DEC = 11;
+    SQL_DATE = 12;
+    SQL_TIME = 13;
+    SQL_TIMESTAMP = 14;
+    INSTANT = 15;

Review comment:
       Please remove the INSTANT type!

##########
File path: flink-python/pyflink/proto/flink-fn-execution.proto
##########
@@ -147,3 +163,46 @@ message Schema {
 
   repeated Field fields = 1;
 }
+
+// A representation of the data type information of a DataStream.

Review comment:
       A representation of the data type information in DataStream.

##########
File path: flink-python/pyflink/proto/flink-fn-execution.proto
##########
@@ -52,7 +52,23 @@ message UserDefinedFunctions {
   bool metric_enabled = 2;
 }
 
-// A representation of the data schema.
+// User defined DataStream function definition.
+message UserDefinedDataStreamFunction {
+  enum FunctionType {
+    MAP = 0;
+    FLAT_MAP = 1;
+  }
+  FunctionType functionType = 1;
+  bytes payload = 2;
+}
+
+// A list of user-defined DataStream functions to be executed in a batch.
+message UserDefinedDataStreamFunctions {
+  repeated UserDefinedDataStreamFunction udfs = 1;
+  bool metric_enabled = 2;
+}
+
+// A representation of the DataStream.

Review comment:
       A representation of the Table API Schema

##########
File path: 
flink-python/src/main/java/org/apache/flink/table/runtime/runners/python/beam/BeamTablePythonStatelessFunctionRunner.java
##########
@@ -0,0 +1,93 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.table.runtime.runners.python.beam;
+
+import org.apache.flink.annotation.Internal;
+import org.apache.flink.fnexecution.v1.FlinkFnApi;
+import org.apache.flink.python.env.PythonEnvironmentManager;
+import org.apache.flink.python.metric.FlinkMetricContainer;
+import 
org.apache.flink.streaming.api.runners.python.beam.BeamPythonStatelessFunctionRunner;
+import org.apache.flink.table.runtime.typeutils.PythonTypeUtils;
+import org.apache.flink.table.types.logical.LogicalType;
+import org.apache.flink.table.types.logical.RowType;
+import org.apache.flink.util.Preconditions;
+
+import org.apache.beam.model.pipeline.v1.RunnerApi;
+
+import java.util.Map;
+
+/**
+ * A {@link BeamTablePythonStatelessFunctionRunner} used to execute Python 
stateless functions.
+ */
+@Internal
+public class BeamTablePythonStatelessFunctionRunner extends 
BeamPythonStatelessFunctionRunner {
+
+

Review comment:
       The comment has not been addressed!
   private static final long serialVersionUID = 1L;

##########
File path: flink-python/pyflink/datastream/data_stream.py
##########
@@ -160,3 +171,122 @@ def set_buffer_timeout(self, timeout_millis: int):
         """
         self._j_data_stream.setBufferTimeout(timeout_millis)
         return self
+
+    def map(self, func: Union[Callable, MapFunction], type_info: 
TypeInformation = None) \
+            -> DataStream:
+        """
+        Applies a Map transformation on a DataStream. The transformation calls 
a MapFunction for
+        each element of the DataStream. Each MapFunction call returns exactly 
one element. The user
+        can also extend RichMapFunction to gain access to other features 
provided by the
+        RichFunction interface.
+
+        Note that If user does not specify the output data type, the output 
data will be serialized
+        as pickle primitive byte array.
+
+        :param func: The MapFunction that is called for each element of the 
DataStream.
+        :param type_info: The type information of the MapFunction output data.
+        :return: The transformed DataStream.
+        """
+        if not isinstance(func, MapFunction):
+            if callable(func):
+                func = MapFunctionWrapper(func)
+            else:
+                raise TypeError("The input must be MapFunction or a callable 
function")
+        func_name = "m_map_" + str(uuid.uuid1())
+        j_python_data_stream_scalar_function_operator, output_type_info = \
+            self._get_java_python_function_operator(func,
+                                                    type_info,
+                                                    func_name,
+                                                    flink_fn_execution_pb2
+                                                    
.UserDefinedDataStreamFunction.MAP)
+        return DataStream(self._j_data_stream.transform(

Review comment:
       func_name => "Map". Keep consistent with Java. Same for flatMap
   ```
           return DataStream(self._j_data_stream.transform(
               "Map",
               output_type_info.get_java_type_info(),
               j_python_data_stream_scalar_function_operator
           ))
   ```

##########
File path: flink-python/pyflink/fn_execution/beam/beam_coders.py
##########
@@ -209,3 +211,51 @@ def _to_row_type(row_schema):
 
     def __repr__(self):
         return 'ArrowCoder[%s]' % self._schema
+
+
+class DataStreamStatelessMapCoder(FastCoder):
+
+    def __init__(self, field_coder):
+        self._field_coder = field_coder
+
+    def _create_impl(self):
+        return beam_coder_impl_slow.DataStreamStatelessMapCoderImpl(
+            self._field_coder.get_slow_impl())
+
+    def is_deterministic(self):  # type: () -> bool
+        return all(c.is_deterministic() for c in self._field_coder)
+
+    @Coder.register_urn(FLINK_MAP_FUNCTION_DATA_STREAM_CODER_URN, 
flink_fn_execution_pb2.TypeInfo)
+    def _pickled_from_runner_api_parameter(type_info_proto, unused_components, 
unused_context):
+        return 
DataStreamStatelessMapCoder(from_type_info_proto(type_info_proto.field[0].type))
+
+    def to_type_hint(self):
+        pass

Review comment:
       return self._field_coder.to_type_hint ?

##########
File path: 
flink-python/src/main/java/org/apache/flink/datastream/runtime/operators/python/DataStreamPythonStatelessFunctionOperator.java
##########
@@ -0,0 +1,152 @@
+package org.apache.flink.datastream.runtime.operators.python;

Review comment:
       The comment has not been addressed! 
   Add license text for this class. Please check the test failures

##########
File path: 
flink-python/src/main/java/org/apache/flink/streaming/api/runners/python/beam/BeamPythonStatelessFunctionRunner.java
##########
@@ -170,34 +152,11 @@ public ExecutableStage createExecutableStage() throws 
Exception {
                        .build();
        }
 
-       /**
-        * Gets the proto representation of the input coder.
-        */
-       private RunnerApi.Coder getInputCoderProto() {
-               return getRowCoderProto(inputType);
-       }
+       protected  abstract byte[] getUserDefinedFunctionsProtoBytes();

Review comment:
       The comment has not been addressed!
   Keep one blank between protected and abstract.

##########
File path: flink-python/pyflink/datastream/data_stream.py
##########
@@ -160,3 +171,122 @@ def set_buffer_timeout(self, timeout_millis: int):
         """
         self._j_data_stream.setBufferTimeout(timeout_millis)
         return self
+
+    def map(self, func: Union[Callable, MapFunction], type_info: 
TypeInformation = None) \
+            -> DataStream:
+        """
+        Applies a Map transformation on a DataStream. The transformation calls 
a MapFunction for
+        each element of the DataStream. Each MapFunction call returns exactly 
one element. The user
+        can also extend RichMapFunction to gain access to other features 
provided by the
+        RichFunction interface.
+
+        Note that If user does not specify the output data type, the output 
data will be serialized
+        as pickle primitive byte array.
+
+        :param func: The MapFunction that is called for each element of the 
DataStream.
+        :param type_info: The type information of the MapFunction output data.
+        :return: The transformed DataStream.
+        """
+        if not isinstance(func, MapFunction):
+            if callable(func):
+                func = MapFunctionWrapper(func)
+            else:
+                raise TypeError("The input must be MapFunction or a callable 
function")

Review comment:
       must be a MapFunction 

##########
File path: flink-python/pyflink/datastream/data_stream.py
##########
@@ -160,3 +171,122 @@ def set_buffer_timeout(self, timeout_millis: int):
         """
         self._j_data_stream.setBufferTimeout(timeout_millis)
         return self
+
+    def map(self, func: Union[Callable, MapFunction], type_info: 
TypeInformation = None) \
+            -> DataStream:
+        """
+        Applies a Map transformation on a DataStream. The transformation calls 
a MapFunction for
+        each element of the DataStream. Each MapFunction call returns exactly 
one element. The user
+        can also extend RichMapFunction to gain access to other features 
provided by the
+        RichFunction interface.
+
+        Note that If user does not specify the output data type, the output 
data will be serialized
+        as pickle primitive byte array.
+
+        :param func: The MapFunction that is called for each element of the 
DataStream.
+        :param type_info: The type information of the MapFunction output data.
+        :return: The transformed DataStream.
+        """
+        if not isinstance(func, MapFunction):
+            if callable(func):
+                func = MapFunctionWrapper(func)
+            else:
+                raise TypeError("The input must be MapFunction or a callable 
function")
+        func_name = "m_map_" + str(uuid.uuid1())
+        j_python_data_stream_scalar_function_operator, output_type_info = \
+            self._get_java_python_function_operator(func,
+                                                    type_info,
+                                                    func_name,
+                                                    flink_fn_execution_pb2
+                                                    
.UserDefinedDataStreamFunction.MAP)
+        return DataStream(self._j_data_stream.transform(
+            func_name,
+            output_type_info.get_java_type_info(),
+            j_python_data_stream_scalar_function_operator
+        ))
+
+    def flat_map(self, func: Union[Callable, FlatMapFunction], type_info: 
TypeInformation = None)\
+            -> DataStream:
+        """
+        Applies a FlatMap transformation on a DataStream. The transformation 
calls a FlatMapFunction
+        for each element of the DataStream. Each FlatMapFunction call can 
return any number of
+        elements including none. The user can also extend RichFlatMapFunction 
to gain access to
+        other features provided by the RichFUnction.
+
+        :param func: The FlatMapFunction that is called for each element of 
the DataStream.
+        :param type_info: The type information of output data.
+        :return: The transformed DataStream.
+        """
+        if not isinstance(func, FlatMapFunction):
+            if callable(func):
+                func = FlatMapFunctionWrapper(func)
+            else:
+                raise TypeError("The input must be FlatMapFunction or a 
callable function")

Review comment:
       a FlatMapFunction




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


Reply via email to