tencentcloud.CkafkaDatahubTask
Explore with Pulumi AI
Provides a resource to create a ckafka datahub_task
Example Usage
import * as pulumi from "@pulumi/pulumi";
import * as tencentcloud from "@pulumi/tencentcloud";
const datahubTask = new tencentcloud.CkafkaDatahubTask("datahubTask", {
    sourceResource: {
        postgreSqlParam: {
            database: "postgres",
            isTableRegular: false,
            keyColumns: "",
            pluginName: "decoderbufs",
            recordWithSchema: false,
            resource: "resource-y9nxnw46",
            snapshotMode: "never",
            table: "*",
        },
        type: "POSTGRESQL",
    },
    targetResource: {
        topicParam: {
            compressionType: "none",
            resource: "1308726196-keep-topic",
            useAutoCreateTopic: false,
        },
        type: "TOPIC",
    },
    taskName: "test-task123321",
    taskType: "SOURCE",
});
import pulumi
import pulumi_tencentcloud as tencentcloud
datahub_task = tencentcloud.CkafkaDatahubTask("datahubTask",
    source_resource={
        "postgre_sql_param": {
            "database": "postgres",
            "is_table_regular": False,
            "key_columns": "",
            "plugin_name": "decoderbufs",
            "record_with_schema": False,
            "resource": "resource-y9nxnw46",
            "snapshot_mode": "never",
            "table": "*",
        },
        "type": "POSTGRESQL",
    },
    target_resource={
        "topic_param": {
            "compression_type": "none",
            "resource": "1308726196-keep-topic",
            "use_auto_create_topic": False,
        },
        "type": "TOPIC",
    },
    task_name="test-task123321",
    task_type="SOURCE")
package main
import (
	"github.com/pulumi/pulumi-terraform-provider/sdks/go/tencentcloud/tencentcloud"
	"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
func main() {
	pulumi.Run(func(ctx *pulumi.Context) error {
		_, err := tencentcloud.NewCkafkaDatahubTask(ctx, "datahubTask", &tencentcloud.CkafkaDatahubTaskArgs{
			SourceResource: &tencentcloud.CkafkaDatahubTaskSourceResourceArgs{
				PostgreSqlParam: &tencentcloud.CkafkaDatahubTaskSourceResourcePostgreSqlParamArgs{
					Database:         pulumi.String("postgres"),
					IsTableRegular:   pulumi.Bool(false),
					KeyColumns:       pulumi.String(""),
					PluginName:       pulumi.String("decoderbufs"),
					RecordWithSchema: pulumi.Bool(false),
					Resource:         pulumi.String("resource-y9nxnw46"),
					SnapshotMode:     pulumi.String("never"),
					Table:            pulumi.String("*"),
				},
				Type: pulumi.String("POSTGRESQL"),
			},
			TargetResource: &tencentcloud.CkafkaDatahubTaskTargetResourceArgs{
				TopicParam: &tencentcloud.CkafkaDatahubTaskTargetResourceTopicParamArgs{
					CompressionType:    pulumi.String("none"),
					Resource:           pulumi.String("1308726196-keep-topic"),
					UseAutoCreateTopic: pulumi.Bool(false),
				},
				Type: pulumi.String("TOPIC"),
			},
			TaskName: pulumi.String("test-task123321"),
			TaskType: pulumi.String("SOURCE"),
		})
		if err != nil {
			return err
		}
		return nil
	})
}
using System.Collections.Generic;
using System.Linq;
using Pulumi;
using Tencentcloud = Pulumi.Tencentcloud;
return await Deployment.RunAsync(() => 
{
    var datahubTask = new Tencentcloud.CkafkaDatahubTask("datahubTask", new()
    {
        SourceResource = new Tencentcloud.Inputs.CkafkaDatahubTaskSourceResourceArgs
        {
            PostgreSqlParam = new Tencentcloud.Inputs.CkafkaDatahubTaskSourceResourcePostgreSqlParamArgs
            {
                Database = "postgres",
                IsTableRegular = false,
                KeyColumns = "",
                PluginName = "decoderbufs",
                RecordWithSchema = false,
                Resource = "resource-y9nxnw46",
                SnapshotMode = "never",
                Table = "*",
            },
            Type = "POSTGRESQL",
        },
        TargetResource = new Tencentcloud.Inputs.CkafkaDatahubTaskTargetResourceArgs
        {
            TopicParam = new Tencentcloud.Inputs.CkafkaDatahubTaskTargetResourceTopicParamArgs
            {
                CompressionType = "none",
                Resource = "1308726196-keep-topic",
                UseAutoCreateTopic = false,
            },
            Type = "TOPIC",
        },
        TaskName = "test-task123321",
        TaskType = "SOURCE",
    });
});
package generated_program;
import com.pulumi.Context;
import com.pulumi.Pulumi;
import com.pulumi.core.Output;
import com.pulumi.tencentcloud.CkafkaDatahubTask;
import com.pulumi.tencentcloud.CkafkaDatahubTaskArgs;
import com.pulumi.tencentcloud.inputs.CkafkaDatahubTaskSourceResourceArgs;
import com.pulumi.tencentcloud.inputs.CkafkaDatahubTaskSourceResourcePostgreSqlParamArgs;
import com.pulumi.tencentcloud.inputs.CkafkaDatahubTaskTargetResourceArgs;
import com.pulumi.tencentcloud.inputs.CkafkaDatahubTaskTargetResourceTopicParamArgs;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.io.File;
import java.nio.file.Files;
import java.nio.file.Paths;
public class App {
    public static void main(String[] args) {
        Pulumi.run(App::stack);
    }
    public static void stack(Context ctx) {
        var datahubTask = new CkafkaDatahubTask("datahubTask", CkafkaDatahubTaskArgs.builder()
            .sourceResource(CkafkaDatahubTaskSourceResourceArgs.builder()
                .postgreSqlParam(CkafkaDatahubTaskSourceResourcePostgreSqlParamArgs.builder()
                    .database("postgres")
                    .isTableRegular(false)
                    .keyColumns("")
                    .pluginName("decoderbufs")
                    .recordWithSchema(false)
                    .resource("resource-y9nxnw46")
                    .snapshotMode("never")
                    .table("*")
                    .build())
                .type("POSTGRESQL")
                .build())
            .targetResource(CkafkaDatahubTaskTargetResourceArgs.builder()
                .topicParam(CkafkaDatahubTaskTargetResourceTopicParamArgs.builder()
                    .compressionType("none")
                    .resource("1308726196-keep-topic")
                    .useAutoCreateTopic(false)
                    .build())
                .type("TOPIC")
                .build())
            .taskName("test-task123321")
            .taskType("SOURCE")
            .build());
    }
}
resources:
  datahubTask:
    type: tencentcloud:CkafkaDatahubTask
    properties:
      sourceResource:
        postgreSqlParam:
          database: postgres
          isTableRegular: false
          keyColumns: ""
          pluginName: decoderbufs
          recordWithSchema: false
          resource: resource-y9nxnw46
          snapshotMode: never
          table: '*'
        type: POSTGRESQL
      targetResource:
        topicParam:
          compressionType: none
          resource: 1308726196-keep-topic
          useAutoCreateTopic: false
        type: TOPIC
      taskName: test-task123321
      taskType: SOURCE
Create CkafkaDatahubTask Resource
Resources are created with functions called constructors. To learn more about declaring and configuring resources, see Resources.
Constructor syntax
new CkafkaDatahubTask(name: string, args: CkafkaDatahubTaskArgs, opts?: CustomResourceOptions);@overload
def CkafkaDatahubTask(resource_name: str,
                      args: CkafkaDatahubTaskArgs,
                      opts: Optional[ResourceOptions] = None)
@overload
def CkafkaDatahubTask(resource_name: str,
                      opts: Optional[ResourceOptions] = None,
                      task_name: Optional[str] = None,
                      task_type: Optional[str] = None,
                      ckafka_datahub_task_id: Optional[str] = None,
                      schema_id: Optional[str] = None,
                      source_resource: Optional[CkafkaDatahubTaskSourceResourceArgs] = None,
                      target_resource: Optional[CkafkaDatahubTaskTargetResourceArgs] = None,
                      transform_param: Optional[CkafkaDatahubTaskTransformParamArgs] = None,
                      transforms_param: Optional[CkafkaDatahubTaskTransformsParamArgs] = None)func NewCkafkaDatahubTask(ctx *Context, name string, args CkafkaDatahubTaskArgs, opts ...ResourceOption) (*CkafkaDatahubTask, error)public CkafkaDatahubTask(string name, CkafkaDatahubTaskArgs args, CustomResourceOptions? opts = null)
public CkafkaDatahubTask(String name, CkafkaDatahubTaskArgs args)
public CkafkaDatahubTask(String name, CkafkaDatahubTaskArgs args, CustomResourceOptions options)
type: tencentcloud:CkafkaDatahubTask
properties: # The arguments to resource properties.
options: # Bag of options to control resource's behavior.
Parameters
- name string
- The unique name of the resource.
- args CkafkaDatahubTaskArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- resource_name str
- The unique name of the resource.
- args CkafkaDatahubTaskArgs
- The arguments to resource properties.
- opts ResourceOptions
- Bag of options to control resource's behavior.
- ctx Context
- Context object for the current deployment.
- name string
- The unique name of the resource.
- args CkafkaDatahubTaskArgs
- The arguments to resource properties.
- opts ResourceOption
- Bag of options to control resource's behavior.
- name string
- The unique name of the resource.
- args CkafkaDatahubTaskArgs
- The arguments to resource properties.
- opts CustomResourceOptions
- Bag of options to control resource's behavior.
- name String
- The unique name of the resource.
- args CkafkaDatahubTaskArgs
- The arguments to resource properties.
- options CustomResourceOptions
- Bag of options to control resource's behavior.
CkafkaDatahubTask Resource Properties
To learn more about resource properties and how to use them, see Inputs and Outputs in the Architecture and Concepts docs.
Inputs
In Python, inputs that are objects can be passed either as argument classes or as dictionary literals.
The CkafkaDatahubTask resource accepts the following input properties:
- TaskName string
- name of the task.
- TaskType string
- type of the task, SOURCE(data input), SINK(data output).
- CkafkaDatahub stringTask Id 
- ID of the resource.
- SchemaId string
- SchemaId.
- SourceResource CkafkaDatahub Task Source Resource 
- data resource.
- TargetResource CkafkaDatahub Task Target Resource 
- Target Resource.
- TransformParam CkafkaDatahub Task Transform Param 
- Data Processing Rules.
- TransformsParam CkafkaDatahub Task Transforms Param 
- Data processing rules.
- TaskName string
- name of the task.
- TaskType string
- type of the task, SOURCE(data input), SINK(data output).
- CkafkaDatahub stringTask Id 
- ID of the resource.
- SchemaId string
- SchemaId.
- SourceResource CkafkaDatahub Task Source Resource Args 
- data resource.
- TargetResource CkafkaDatahub Task Target Resource Args 
- Target Resource.
- TransformParam CkafkaDatahub Task Transform Param Args 
- Data Processing Rules.
- TransformsParam CkafkaDatahub Task Transforms Param Args 
- Data processing rules.
- taskName String
- name of the task.
- taskType String
- type of the task, SOURCE(data input), SINK(data output).
- ckafkaDatahub StringTask Id 
- ID of the resource.
- schemaId String
- SchemaId.
- sourceResource CkafkaDatahub Task Source Resource 
- data resource.
- targetResource CkafkaDatahub Task Target Resource 
- Target Resource.
- transformParam CkafkaDatahub Task Transform Param 
- Data Processing Rules.
- transformsParam CkafkaDatahub Task Transforms Param 
- Data processing rules.
- taskName string
- name of the task.
- taskType string
- type of the task, SOURCE(data input), SINK(data output).
- ckafkaDatahub stringTask Id 
- ID of the resource.
- schemaId string
- SchemaId.
- sourceResource CkafkaDatahub Task Source Resource 
- data resource.
- targetResource CkafkaDatahub Task Target Resource 
- Target Resource.
- transformParam CkafkaDatahub Task Transform Param 
- Data Processing Rules.
- transformsParam CkafkaDatahub Task Transforms Param 
- Data processing rules.
- task_name str
- name of the task.
- task_type str
- type of the task, SOURCE(data input), SINK(data output).
- ckafka_datahub_ strtask_ id 
- ID of the resource.
- schema_id str
- SchemaId.
- source_resource CkafkaDatahub Task Source Resource Args 
- data resource.
- target_resource CkafkaDatahub Task Target Resource Args 
- Target Resource.
- transform_param CkafkaDatahub Task Transform Param Args 
- Data Processing Rules.
- transforms_param CkafkaDatahub Task Transforms Param Args 
- Data processing rules.
- taskName String
- name of the task.
- taskType String
- type of the task, SOURCE(data input), SINK(data output).
- ckafkaDatahub StringTask Id 
- ID of the resource.
- schemaId String
- SchemaId.
- sourceResource Property Map
- data resource.
- targetResource Property Map
- Target Resource.
- transformParam Property Map
- Data Processing Rules.
- transformsParam Property Map
- Data processing rules.
Outputs
All input properties are implicitly available as output properties. Additionally, the CkafkaDatahubTask resource produces the following output properties:
- Id string
- The provider-assigned unique ID for this managed resource.
- Id string
- The provider-assigned unique ID for this managed resource.
- id String
- The provider-assigned unique ID for this managed resource.
- id string
- The provider-assigned unique ID for this managed resource.
- id str
- The provider-assigned unique ID for this managed resource.
- id String
- The provider-assigned unique ID for this managed resource.
Look up Existing CkafkaDatahubTask Resource
Get an existing CkafkaDatahubTask resource’s state with the given name, ID, and optional extra properties used to qualify the lookup.
public static get(name: string, id: Input<ID>, state?: CkafkaDatahubTaskState, opts?: CustomResourceOptions): CkafkaDatahubTask@staticmethod
def get(resource_name: str,
        id: str,
        opts: Optional[ResourceOptions] = None,
        ckafka_datahub_task_id: Optional[str] = None,
        schema_id: Optional[str] = None,
        source_resource: Optional[CkafkaDatahubTaskSourceResourceArgs] = None,
        target_resource: Optional[CkafkaDatahubTaskTargetResourceArgs] = None,
        task_name: Optional[str] = None,
        task_type: Optional[str] = None,
        transform_param: Optional[CkafkaDatahubTaskTransformParamArgs] = None,
        transforms_param: Optional[CkafkaDatahubTaskTransformsParamArgs] = None) -> CkafkaDatahubTaskfunc GetCkafkaDatahubTask(ctx *Context, name string, id IDInput, state *CkafkaDatahubTaskState, opts ...ResourceOption) (*CkafkaDatahubTask, error)public static CkafkaDatahubTask Get(string name, Input<string> id, CkafkaDatahubTaskState? state, CustomResourceOptions? opts = null)public static CkafkaDatahubTask get(String name, Output<String> id, CkafkaDatahubTaskState state, CustomResourceOptions options)resources:  _:    type: tencentcloud:CkafkaDatahubTask    get:      id: ${id}- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- resource_name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- name
- The unique name of the resulting resource.
- id
- The unique provider ID of the resource to lookup.
- state
- Any extra arguments used during the lookup.
- opts
- A bag of options that control this resource's behavior.
- CkafkaDatahub stringTask Id 
- ID of the resource.
- SchemaId string
- SchemaId.
- SourceResource CkafkaDatahub Task Source Resource 
- data resource.
- TargetResource CkafkaDatahub Task Target Resource 
- Target Resource.
- TaskName string
- name of the task.
- TaskType string
- type of the task, SOURCE(data input), SINK(data output).
- TransformParam CkafkaDatahub Task Transform Param 
- Data Processing Rules.
- TransformsParam CkafkaDatahub Task Transforms Param 
- Data processing rules.
- CkafkaDatahub stringTask Id 
- ID of the resource.
- SchemaId string
- SchemaId.
- SourceResource CkafkaDatahub Task Source Resource Args 
- data resource.
- TargetResource CkafkaDatahub Task Target Resource Args 
- Target Resource.
- TaskName string
- name of the task.
- TaskType string
- type of the task, SOURCE(data input), SINK(data output).
- TransformParam CkafkaDatahub Task Transform Param Args 
- Data Processing Rules.
- TransformsParam CkafkaDatahub Task Transforms Param Args 
- Data processing rules.
- ckafkaDatahub StringTask Id 
- ID of the resource.
- schemaId String
- SchemaId.
- sourceResource CkafkaDatahub Task Source Resource 
- data resource.
- targetResource CkafkaDatahub Task Target Resource 
- Target Resource.
- taskName String
- name of the task.
- taskType String
- type of the task, SOURCE(data input), SINK(data output).
- transformParam CkafkaDatahub Task Transform Param 
- Data Processing Rules.
- transformsParam CkafkaDatahub Task Transforms Param 
- Data processing rules.
- ckafkaDatahub stringTask Id 
- ID of the resource.
- schemaId string
- SchemaId.
- sourceResource CkafkaDatahub Task Source Resource 
- data resource.
- targetResource CkafkaDatahub Task Target Resource 
- Target Resource.
- taskName string
- name of the task.
- taskType string
- type of the task, SOURCE(data input), SINK(data output).
- transformParam CkafkaDatahub Task Transform Param 
- Data Processing Rules.
- transformsParam CkafkaDatahub Task Transforms Param 
- Data processing rules.
- ckafka_datahub_ strtask_ id 
- ID of the resource.
- schema_id str
- SchemaId.
- source_resource CkafkaDatahub Task Source Resource Args 
- data resource.
- target_resource CkafkaDatahub Task Target Resource Args 
- Target Resource.
- task_name str
- name of the task.
- task_type str
- type of the task, SOURCE(data input), SINK(data output).
- transform_param CkafkaDatahub Task Transform Param Args 
- Data Processing Rules.
- transforms_param CkafkaDatahub Task Transforms Param Args 
- Data processing rules.
- ckafkaDatahub StringTask Id 
- ID of the resource.
- schemaId String
- SchemaId.
- sourceResource Property Map
- data resource.
- targetResource Property Map
- Target Resource.
- taskName String
- name of the task.
- taskType String
- type of the task, SOURCE(data input), SINK(data output).
- transformParam Property Map
- Data Processing Rules.
- transformsParam Property Map
- Data processing rules.
Supporting Types
CkafkaDatahubTaskSourceResource, CkafkaDatahubTaskSourceResourceArgs          
- Type string
- resource type.
- ClickHouse CkafkaParam Datahub Task Source Resource Click House Param 
- ClickHouse config, Type CLICKHOUSE requierd.
- ClsParam CkafkaDatahub Task Source Resource Cls Param 
- Cls configuration, Required when Type is CLS.
- CosParam CkafkaDatahub Task Source Resource Cos Param 
- Cos configuration, required when Type is COS.
- CtsdbParam CkafkaDatahub Task Source Resource Ctsdb Param 
- Ctsdb configuration, Required when Type is CTSDB.
- DtsParam CkafkaDatahub Task Source Resource Dts Param 
- Dts configuration, required when Type is DTS.
- EsParam CkafkaDatahub Task Source Resource Es Param 
- Es configuration, required when Type is ES.
- EventBus CkafkaParam Datahub Task Source Resource Event Bus Param 
- EB configuration, required when type is EB.
- KafkaParam CkafkaDatahub Task Source Resource Kafka Param 
- ckafka configuration, required when Type is KAFKA.
- MariaDb CkafkaParam Datahub Task Source Resource Maria Db Param 
- MariaDB configuration, Required when Type is MARIADB.
- MongoDb CkafkaParam Datahub Task Source Resource Mongo Db Param 
- MongoDB config, Required when Type is MONGODB.
- MySql CkafkaParam Datahub Task Source Resource My Sql Param 
- MySQL configuration, Required when Type is MYSQL.
- PostgreSql CkafkaParam Datahub Task Source Resource Postgre Sql Param 
- PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
- ScfParam CkafkaDatahub Task Source Resource Scf Param 
- Scf configuration, Required when Type is SCF.
- SqlServer CkafkaParam Datahub Task Source Resource Sql Server Param 
- SQLServer configuration, Required when Type is SQLSERVER.
- TdwParam CkafkaDatahub Task Source Resource Tdw Param 
- Tdw configuration, required when Type is TDW.
- TopicParam CkafkaDatahub Task Source Resource Topic Param 
- Topic configuration, Required when Type is Topic.
- Type string
- resource type.
- ClickHouse CkafkaParam Datahub Task Source Resource Click House Param 
- ClickHouse config, Type CLICKHOUSE requierd.
- ClsParam CkafkaDatahub Task Source Resource Cls Param 
- Cls configuration, Required when Type is CLS.
- CosParam CkafkaDatahub Task Source Resource Cos Param 
- Cos configuration, required when Type is COS.
- CtsdbParam CkafkaDatahub Task Source Resource Ctsdb Param 
- Ctsdb configuration, Required when Type is CTSDB.
- DtsParam CkafkaDatahub Task Source Resource Dts Param 
- Dts configuration, required when Type is DTS.
- EsParam CkafkaDatahub Task Source Resource Es Param 
- Es configuration, required when Type is ES.
- EventBus CkafkaParam Datahub Task Source Resource Event Bus Param 
- EB configuration, required when type is EB.
- KafkaParam CkafkaDatahub Task Source Resource Kafka Param 
- ckafka configuration, required when Type is KAFKA.
- MariaDb CkafkaParam Datahub Task Source Resource Maria Db Param 
- MariaDB configuration, Required when Type is MARIADB.
- MongoDb CkafkaParam Datahub Task Source Resource Mongo Db Param 
- MongoDB config, Required when Type is MONGODB.
- MySql CkafkaParam Datahub Task Source Resource My Sql Param 
- MySQL configuration, Required when Type is MYSQL.
- PostgreSql CkafkaParam Datahub Task Source Resource Postgre Sql Param 
- PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
- ScfParam CkafkaDatahub Task Source Resource Scf Param 
- Scf configuration, Required when Type is SCF.
- SqlServer CkafkaParam Datahub Task Source Resource Sql Server Param 
- SQLServer configuration, Required when Type is SQLSERVER.
- TdwParam CkafkaDatahub Task Source Resource Tdw Param 
- Tdw configuration, required when Type is TDW.
- TopicParam CkafkaDatahub Task Source Resource Topic Param 
- Topic configuration, Required when Type is Topic.
- type String
- resource type.
- clickHouse CkafkaParam Datahub Task Source Resource Click House Param 
- ClickHouse config, Type CLICKHOUSE requierd.
- clsParam CkafkaDatahub Task Source Resource Cls Param 
- Cls configuration, Required when Type is CLS.
- cosParam CkafkaDatahub Task Source Resource Cos Param 
- Cos configuration, required when Type is COS.
- ctsdbParam CkafkaDatahub Task Source Resource Ctsdb Param 
- Ctsdb configuration, Required when Type is CTSDB.
- dtsParam CkafkaDatahub Task Source Resource Dts Param 
- Dts configuration, required when Type is DTS.
- esParam CkafkaDatahub Task Source Resource Es Param 
- Es configuration, required when Type is ES.
- eventBus CkafkaParam Datahub Task Source Resource Event Bus Param 
- EB configuration, required when type is EB.
- kafkaParam CkafkaDatahub Task Source Resource Kafka Param 
- ckafka configuration, required when Type is KAFKA.
- mariaDb CkafkaParam Datahub Task Source Resource Maria Db Param 
- MariaDB configuration, Required when Type is MARIADB.
- mongoDb CkafkaParam Datahub Task Source Resource Mongo Db Param 
- MongoDB config, Required when Type is MONGODB.
- mySql CkafkaParam Datahub Task Source Resource My Sql Param 
- MySQL configuration, Required when Type is MYSQL.
- postgreSql CkafkaParam Datahub Task Source Resource Postgre Sql Param 
- PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
- scfParam CkafkaDatahub Task Source Resource Scf Param 
- Scf configuration, Required when Type is SCF.
- sqlServer CkafkaParam Datahub Task Source Resource Sql Server Param 
- SQLServer configuration, Required when Type is SQLSERVER.
- tdwParam CkafkaDatahub Task Source Resource Tdw Param 
- Tdw configuration, required when Type is TDW.
- topicParam CkafkaDatahub Task Source Resource Topic Param 
- Topic configuration, Required when Type is Topic.
- type string
- resource type.
- clickHouse CkafkaParam Datahub Task Source Resource Click House Param 
- ClickHouse config, Type CLICKHOUSE requierd.
- clsParam CkafkaDatahub Task Source Resource Cls Param 
- Cls configuration, Required when Type is CLS.
- cosParam CkafkaDatahub Task Source Resource Cos Param 
- Cos configuration, required when Type is COS.
- ctsdbParam CkafkaDatahub Task Source Resource Ctsdb Param 
- Ctsdb configuration, Required when Type is CTSDB.
- dtsParam CkafkaDatahub Task Source Resource Dts Param 
- Dts configuration, required when Type is DTS.
- esParam CkafkaDatahub Task Source Resource Es Param 
- Es configuration, required when Type is ES.
- eventBus CkafkaParam Datahub Task Source Resource Event Bus Param 
- EB configuration, required when type is EB.
- kafkaParam CkafkaDatahub Task Source Resource Kafka Param 
- ckafka configuration, required when Type is KAFKA.
- mariaDb CkafkaParam Datahub Task Source Resource Maria Db Param 
- MariaDB configuration, Required when Type is MARIADB.
- mongoDb CkafkaParam Datahub Task Source Resource Mongo Db Param 
- MongoDB config, Required when Type is MONGODB.
- mySql CkafkaParam Datahub Task Source Resource My Sql Param 
- MySQL configuration, Required when Type is MYSQL.
- postgreSql CkafkaParam Datahub Task Source Resource Postgre Sql Param 
- PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
- scfParam CkafkaDatahub Task Source Resource Scf Param 
- Scf configuration, Required when Type is SCF.
- sqlServer CkafkaParam Datahub Task Source Resource Sql Server Param 
- SQLServer configuration, Required when Type is SQLSERVER.
- tdwParam CkafkaDatahub Task Source Resource Tdw Param 
- Tdw configuration, required when Type is TDW.
- topicParam CkafkaDatahub Task Source Resource Topic Param 
- Topic configuration, Required when Type is Topic.
- type str
- resource type.
- click_house_ Ckafkaparam Datahub Task Source Resource Click House Param 
- ClickHouse config, Type CLICKHOUSE requierd.
- cls_param CkafkaDatahub Task Source Resource Cls Param 
- Cls configuration, Required when Type is CLS.
- cos_param CkafkaDatahub Task Source Resource Cos Param 
- Cos configuration, required when Type is COS.
- ctsdb_param CkafkaDatahub Task Source Resource Ctsdb Param 
- Ctsdb configuration, Required when Type is CTSDB.
- dts_param CkafkaDatahub Task Source Resource Dts Param 
- Dts configuration, required when Type is DTS.
- es_param CkafkaDatahub Task Source Resource Es Param 
- Es configuration, required when Type is ES.
- event_bus_ Ckafkaparam Datahub Task Source Resource Event Bus Param 
- EB configuration, required when type is EB.
- kafka_param CkafkaDatahub Task Source Resource Kafka Param 
- ckafka configuration, required when Type is KAFKA.
- maria_db_ Ckafkaparam Datahub Task Source Resource Maria Db Param 
- MariaDB configuration, Required when Type is MARIADB.
- mongo_db_ Ckafkaparam Datahub Task Source Resource Mongo Db Param 
- MongoDB config, Required when Type is MONGODB.
- my_sql_ Ckafkaparam Datahub Task Source Resource My Sql Param 
- MySQL configuration, Required when Type is MYSQL.
- postgre_sql_ Ckafkaparam Datahub Task Source Resource Postgre Sql Param 
- PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
- scf_param CkafkaDatahub Task Source Resource Scf Param 
- Scf configuration, Required when Type is SCF.
- sql_server_ Ckafkaparam Datahub Task Source Resource Sql Server Param 
- SQLServer configuration, Required when Type is SQLSERVER.
- tdw_param CkafkaDatahub Task Source Resource Tdw Param 
- Tdw configuration, required when Type is TDW.
- topic_param CkafkaDatahub Task Source Resource Topic Param 
- Topic configuration, Required when Type is Topic.
- type String
- resource type.
- clickHouse Property MapParam 
- ClickHouse config, Type CLICKHOUSE requierd.
- clsParam Property Map
- Cls configuration, Required when Type is CLS.
- cosParam Property Map
- Cos configuration, required when Type is COS.
- ctsdbParam Property Map
- Ctsdb configuration, Required when Type is CTSDB.
- dtsParam Property Map
- Dts configuration, required when Type is DTS.
- esParam Property Map
- Es configuration, required when Type is ES.
- eventBus Property MapParam 
- EB configuration, required when type is EB.
- kafkaParam Property Map
- ckafka configuration, required when Type is KAFKA.
- mariaDb Property MapParam 
- MariaDB configuration, Required when Type is MARIADB.
- mongoDb Property MapParam 
- MongoDB config, Required when Type is MONGODB.
- mySql Property MapParam 
- MySQL configuration, Required when Type is MYSQL.
- postgreSql Property MapParam 
- PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
- scfParam Property Map
- Scf configuration, Required when Type is SCF.
- sqlServer Property MapParam 
- SQLServer configuration, Required when Type is SQLSERVER.
- tdwParam Property Map
- Tdw configuration, required when Type is TDW.
- topicParam Property Map
- Topic configuration, Required when Type is Topic.
CkafkaDatahubTaskSourceResourceClickHouseParam, CkafkaDatahubTaskSourceResourceClickHouseParamArgs                
- Cluster string
- ClickHouse cluster.
- Database string
- ClickHouse database name.
- Resource string
- resource id.
- Schemas
List<CkafkaDatahub Task Source Resource Click House Param Schema> 
- ClickHouse schema.
- Table string
- ClickHouse table.
- DropCls CkafkaDatahub Task Source Resource Click House Param Drop Cls 
- When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- DropInvalid boolMessage 
- Whether ClickHouse discards the message that fails to parse, the default is true.
- Ip string
- ClickHouse ip.
- Password string
- ClickHouse passwd.
- Port double
- ClickHouse port.
- SelfBuilt bool
- Whether it is a self-built cluster.
- ServiceVip string
- instance vip.
- Type string
- type of table column.
- UniqVpc stringId 
- instance vpc id.
- UserName string
- ClickHouse user name.
- Cluster string
- ClickHouse cluster.
- Database string
- ClickHouse database name.
- Resource string
- resource id.
- Schemas
[]CkafkaDatahub Task Source Resource Click House Param Schema 
- ClickHouse schema.
- Table string
- ClickHouse table.
- DropCls CkafkaDatahub Task Source Resource Click House Param Drop Cls 
- When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- DropInvalid boolMessage 
- Whether ClickHouse discards the message that fails to parse, the default is true.
- Ip string
- ClickHouse ip.
- Password string
- ClickHouse passwd.
- Port float64
- ClickHouse port.
- SelfBuilt bool
- Whether it is a self-built cluster.
- ServiceVip string
- instance vip.
- Type string
- type of table column.
- UniqVpc stringId 
- instance vpc id.
- UserName string
- ClickHouse user name.
- cluster String
- ClickHouse cluster.
- database String
- ClickHouse database name.
- resource String
- resource id.
- schemas
List<CkafkaDatahub Task Source Resource Click House Param Schema> 
- ClickHouse schema.
- table String
- ClickHouse table.
- dropCls CkafkaDatahub Task Source Resource Click House Param Drop Cls 
- When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- dropInvalid BooleanMessage 
- Whether ClickHouse discards the message that fails to parse, the default is true.
- ip String
- ClickHouse ip.
- password String
- ClickHouse passwd.
- port Double
- ClickHouse port.
- selfBuilt Boolean
- Whether it is a self-built cluster.
- serviceVip String
- instance vip.
- type String
- type of table column.
- uniqVpc StringId 
- instance vpc id.
- userName String
- ClickHouse user name.
- cluster string
- ClickHouse cluster.
- database string
- ClickHouse database name.
- resource string
- resource id.
- schemas
CkafkaDatahub Task Source Resource Click House Param Schema[] 
- ClickHouse schema.
- table string
- ClickHouse table.
- dropCls CkafkaDatahub Task Source Resource Click House Param Drop Cls 
- When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- dropInvalid booleanMessage 
- Whether ClickHouse discards the message that fails to parse, the default is true.
- ip string
- ClickHouse ip.
- password string
- ClickHouse passwd.
- port number
- ClickHouse port.
- selfBuilt boolean
- Whether it is a self-built cluster.
- serviceVip string
- instance vip.
- type string
- type of table column.
- uniqVpc stringId 
- instance vpc id.
- userName string
- ClickHouse user name.
- cluster str
- ClickHouse cluster.
- database str
- ClickHouse database name.
- resource str
- resource id.
- schemas
Sequence[CkafkaDatahub Task Source Resource Click House Param Schema] 
- ClickHouse schema.
- table str
- ClickHouse table.
- drop_cls CkafkaDatahub Task Source Resource Click House Param Drop Cls 
- When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop_invalid_ boolmessage 
- Whether ClickHouse discards the message that fails to parse, the default is true.
- ip str
- ClickHouse ip.
- password str
- ClickHouse passwd.
- port float
- ClickHouse port.
- self_built bool
- Whether it is a self-built cluster.
- service_vip str
- instance vip.
- type str
- type of table column.
- uniq_vpc_ strid 
- instance vpc id.
- user_name str
- ClickHouse user name.
- cluster String
- ClickHouse cluster.
- database String
- ClickHouse database name.
- resource String
- resource id.
- schemas List<Property Map>
- ClickHouse schema.
- table String
- ClickHouse table.
- dropCls Property Map
- When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- dropInvalid BooleanMessage 
- Whether ClickHouse discards the message that fails to parse, the default is true.
- ip String
- ClickHouse ip.
- password String
- ClickHouse passwd.
- port Number
- ClickHouse port.
- selfBuilt Boolean
- Whether it is a self-built cluster.
- serviceVip String
- instance vip.
- type String
- type of table column.
- uniqVpc StringId 
- instance vpc id.
- userName String
- ClickHouse user name.
CkafkaDatahubTaskSourceResourceClickHouseParamDropCls, CkafkaDatahubTaskSourceResourceClickHouseParamDropClsArgs                    
- DropCls stringLog Set 
- cls LogSet id.
- DropCls stringOwneruin 
- account.
- DropCls stringRegion 
- The region where the cls is delivered.
- DropCls stringTopic Id 
- cls topic.
- DropInvalid boolMessage To Cls 
- Whether to deliver to cls.
- DropCls stringLog Set 
- cls LogSet id.
- DropCls stringOwneruin 
- account.
- DropCls stringRegion 
- The region where the cls is delivered.
- DropCls stringTopic Id 
- cls topic.
- DropInvalid boolMessage To Cls 
- Whether to deliver to cls.
- dropCls StringLog Set 
- cls LogSet id.
- dropCls StringOwneruin 
- account.
- dropCls StringRegion 
- The region where the cls is delivered.
- dropCls StringTopic Id 
- cls topic.
- dropInvalid BooleanMessage To Cls 
- Whether to deliver to cls.
- dropCls stringLog Set 
- cls LogSet id.
- dropCls stringOwneruin 
- account.
- dropCls stringRegion 
- The region where the cls is delivered.
- dropCls stringTopic Id 
- cls topic.
- dropInvalid booleanMessage To Cls 
- Whether to deliver to cls.
- drop_cls_ strlog_ set 
- cls LogSet id.
- drop_cls_ strowneruin 
- account.
- drop_cls_ strregion 
- The region where the cls is delivered.
- drop_cls_ strtopic_ id 
- cls topic.
- drop_invalid_ boolmessage_ to_ cls 
- Whether to deliver to cls.
- dropCls StringLog Set 
- cls LogSet id.
- dropCls StringOwneruin 
- account.
- dropCls StringRegion 
- The region where the cls is delivered.
- dropCls StringTopic Id 
- cls topic.
- dropInvalid BooleanMessage To Cls 
- Whether to deliver to cls.
CkafkaDatahubTaskSourceResourceClickHouseParamSchema, CkafkaDatahubTaskSourceResourceClickHouseParamSchemaArgs                  
- AllowNull bool
- Whether the column item is allowed to be empty.
- ColumnName string
- column name.
- JsonKey string
- The json Key name corresponding to this column.
- Type string
- type of table column.
- AllowNull bool
- Whether the column item is allowed to be empty.
- ColumnName string
- column name.
- JsonKey string
- The json Key name corresponding to this column.
- Type string
- type of table column.
- allowNull Boolean
- Whether the column item is allowed to be empty.
- columnName String
- column name.
- jsonKey String
- The json Key name corresponding to this column.
- type String
- type of table column.
- allowNull boolean
- Whether the column item is allowed to be empty.
- columnName string
- column name.
- jsonKey string
- The json Key name corresponding to this column.
- type string
- type of table column.
- allow_null bool
- Whether the column item is allowed to be empty.
- column_name str
- column name.
- json_key str
- The json Key name corresponding to this column.
- type str
- type of table column.
- allowNull Boolean
- Whether the column item is allowed to be empty.
- columnName String
- column name.
- jsonKey String
- The json Key name corresponding to this column.
- type String
- type of table column.
CkafkaDatahubTaskSourceResourceClsParam, CkafkaDatahubTaskSourceResourceClsParamArgs              
- DecodeJson bool
- Whether the produced information is in json format.
- Resource string
- cls id.
- ContentKey string
- Required when Decode Json is false.
- LogSet string
- LogSet id.
- TimeField string
- Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
- DecodeJson bool
- Whether the produced information is in json format.
- Resource string
- cls id.
- ContentKey string
- Required when Decode Json is false.
- LogSet string
- LogSet id.
- TimeField string
- Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
- decodeJson Boolean
- Whether the produced information is in json format.
- resource String
- cls id.
- contentKey String
- Required when Decode Json is false.
- logSet String
- LogSet id.
- timeField String
- Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
- decodeJson boolean
- Whether the produced information is in json format.
- resource string
- cls id.
- contentKey string
- Required when Decode Json is false.
- logSet string
- LogSet id.
- timeField string
- Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
- decode_json bool
- Whether the produced information is in json format.
- resource str
- cls id.
- content_key str
- Required when Decode Json is false.
- log_set str
- LogSet id.
- time_field str
- Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
- decodeJson Boolean
- Whether the produced information is in json format.
- resource String
- cls id.
- contentKey String
- Required when Decode Json is false.
- logSet String
- LogSet id.
- timeField String
- Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
CkafkaDatahubTaskSourceResourceCosParam, CkafkaDatahubTaskSourceResourceCosParamArgs              
- BucketName string
- cos bucket name.
- Region string
- region code.
- AggregateBatch doubleSize 
- The size of aggregated messages MB.
- AggregateInterval double
- time interval.
- DirectoryTime stringFormat 
- Partition format formatted according to strptime time.
- FormatOutput stringType 
- The file format after message aggregation csv|json.
- ObjectKey string
- ObjectKey.
- ObjectKey stringPrefix 
- Dumped object directory prefix.
- BucketName string
- cos bucket name.
- Region string
- region code.
- AggregateBatch float64Size 
- The size of aggregated messages MB.
- AggregateInterval float64
- time interval.
- DirectoryTime stringFormat 
- Partition format formatted according to strptime time.
- FormatOutput stringType 
- The file format after message aggregation csv|json.
- ObjectKey string
- ObjectKey.
- ObjectKey stringPrefix 
- Dumped object directory prefix.
- bucketName String
- cos bucket name.
- region String
- region code.
- aggregateBatch DoubleSize 
- The size of aggregated messages MB.
- aggregateInterval Double
- time interval.
- directoryTime StringFormat 
- Partition format formatted according to strptime time.
- formatOutput StringType 
- The file format after message aggregation csv|json.
- objectKey String
- ObjectKey.
- objectKey StringPrefix 
- Dumped object directory prefix.
- bucketName string
- cos bucket name.
- region string
- region code.
- aggregateBatch numberSize 
- The size of aggregated messages MB.
- aggregateInterval number
- time interval.
- directoryTime stringFormat 
- Partition format formatted according to strptime time.
- formatOutput stringType 
- The file format after message aggregation csv|json.
- objectKey string
- ObjectKey.
- objectKey stringPrefix 
- Dumped object directory prefix.
- bucket_name str
- cos bucket name.
- region str
- region code.
- aggregate_batch_ floatsize 
- The size of aggregated messages MB.
- aggregate_interval float
- time interval.
- directory_time_ strformat 
- Partition format formatted according to strptime time.
- format_output_ strtype 
- The file format after message aggregation csv|json.
- object_key str
- ObjectKey.
- object_key_ strprefix 
- Dumped object directory prefix.
- bucketName String
- cos bucket name.
- region String
- region code.
- aggregateBatch NumberSize 
- The size of aggregated messages MB.
- aggregateInterval Number
- time interval.
- directoryTime StringFormat 
- Partition format formatted according to strptime time.
- formatOutput StringType 
- The file format after message aggregation csv|json.
- objectKey String
- ObjectKey.
- objectKey StringPrefix 
- Dumped object directory prefix.
CkafkaDatahubTaskSourceResourceCtsdbParam, CkafkaDatahubTaskSourceResourceCtsdbParamArgs              
- CtsdbMetric string
- Ctsdb metric.
- Resource string
- resource id.
- CtsdbMetric string
- Ctsdb metric.
- Resource string
- resource id.
- ctsdbMetric String
- Ctsdb metric.
- resource String
- resource id.
- ctsdbMetric string
- Ctsdb metric.
- resource string
- resource id.
- ctsdb_metric str
- Ctsdb metric.
- resource str
- resource id.
- ctsdbMetric String
- Ctsdb metric.
- resource String
- resource id.
CkafkaDatahubTaskSourceResourceDtsParam, CkafkaDatahubTaskSourceResourceDtsParamArgs              
- Resource string
- Dts instance Id.
- GroupId string
- Dts consumer group Id.
- GroupPassword string
- Dts consumer group passwd.
- GroupUser string
- Dts account.
- Ip string
- Dts connection ip.
- Port double
- Dts connection port.
- Topic string
- Dts topic.
- TranSql bool
- False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
- Resource string
- Dts instance Id.
- GroupId string
- Dts consumer group Id.
- GroupPassword string
- Dts consumer group passwd.
- GroupUser string
- Dts account.
- Ip string
- Dts connection ip.
- Port float64
- Dts connection port.
- Topic string
- Dts topic.
- TranSql bool
- False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
- resource String
- Dts instance Id.
- groupId String
- Dts consumer group Id.
- groupPassword String
- Dts consumer group passwd.
- groupUser String
- Dts account.
- ip String
- Dts connection ip.
- port Double
- Dts connection port.
- topic String
- Dts topic.
- tranSql Boolean
- False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
- resource string
- Dts instance Id.
- groupId string
- Dts consumer group Id.
- groupPassword string
- Dts consumer group passwd.
- groupUser string
- Dts account.
- ip string
- Dts connection ip.
- port number
- Dts connection port.
- topic string
- Dts topic.
- tranSql boolean
- False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
- resource str
- Dts instance Id.
- group_id str
- Dts consumer group Id.
- group_password str
- Dts consumer group passwd.
- group_user str
- Dts account.
- ip str
- Dts connection ip.
- port float
- Dts connection port.
- topic str
- Dts topic.
- tran_sql bool
- False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
- resource String
- Dts instance Id.
- groupId String
- Dts consumer group Id.
- groupPassword String
- Dts consumer group passwd.
- groupUser String
- Dts account.
- ip String
- Dts connection ip.
- port Number
- Dts connection port.
- topic String
- Dts topic.
- tranSql Boolean
- False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
CkafkaDatahubTaskSourceResourceEsParam, CkafkaDatahubTaskSourceResourceEsParamArgs              
- Resource string
- Resource.
- ContentKey string
- key for data in non-json format.
- DatabasePrimary stringKey 
- When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
- DateFormat string
- Es date suffix.
- DocumentId stringField 
- The field name of the document ID value dumped into Es.
- DropCls CkafkaDatahub Task Source Resource Es Param Drop Cls 
- When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- DropDlq CkafkaDatahub Task Source Resource Es Param Drop Dlq 
- dead letter queue.
- DropInvalid boolJson Message 
- Whether Es discards messages in non-json format.
- DropInvalid boolMessage 
- Whether Es discards the message of parsing failure.
- Index string
- Es index name.
- IndexType string
- Es custom index name type, STRING, JSONPATH, the default is STRING.
- Password string
- Es Password.
- Port double
- Es connection port.
- SelfBuilt bool
- Whether it is a self-built cluster.
- ServiceVip string
- instance vip.
- UniqVpc stringId 
- instance vpc id.
- UserName string
- Es UserName.
- Resource string
- Resource.
- ContentKey string
- key for data in non-json format.
- DatabasePrimary stringKey 
- When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
- DateFormat string
- Es date suffix.
- DocumentId stringField 
- The field name of the document ID value dumped into Es.
- DropCls CkafkaDatahub Task Source Resource Es Param Drop Cls 
- When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- DropDlq CkafkaDatahub Task Source Resource Es Param Drop Dlq 
- dead letter queue.
- DropInvalid boolJson Message 
- Whether Es discards messages in non-json format.
- DropInvalid boolMessage 
- Whether Es discards the message of parsing failure.
- Index string
- Es index name.
- IndexType string
- Es custom index name type, STRING, JSONPATH, the default is STRING.
- Password string
- Es Password.
- Port float64
- Es connection port.
- SelfBuilt bool
- Whether it is a self-built cluster.
- ServiceVip string
- instance vip.
- UniqVpc stringId 
- instance vpc id.
- UserName string
- Es UserName.
- resource String
- Resource.
- contentKey String
- key for data in non-json format.
- databasePrimary StringKey 
- When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
- dateFormat String
- Es date suffix.
- documentId StringField 
- The field name of the document ID value dumped into Es.
- dropCls CkafkaDatahub Task Source Resource Es Param Drop Cls 
- When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- dropDlq CkafkaDatahub Task Source Resource Es Param Drop Dlq 
- dead letter queue.
- dropInvalid BooleanJson Message 
- Whether Es discards messages in non-json format.
- dropInvalid BooleanMessage 
- Whether Es discards the message of parsing failure.
- index String
- Es index name.
- indexType String
- Es custom index name type, STRING, JSONPATH, the default is STRING.
- password String
- Es Password.
- port Double
- Es connection port.
- selfBuilt Boolean
- Whether it is a self-built cluster.
- serviceVip String
- instance vip.
- uniqVpc StringId 
- instance vpc id.
- userName String
- Es UserName.
- resource string
- Resource.
- contentKey string
- key for data in non-json format.
- databasePrimary stringKey 
- When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
- dateFormat string
- Es date suffix.
- documentId stringField 
- The field name of the document ID value dumped into Es.
- dropCls CkafkaDatahub Task Source Resource Es Param Drop Cls 
- When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- dropDlq CkafkaDatahub Task Source Resource Es Param Drop Dlq 
- dead letter queue.
- dropInvalid booleanJson Message 
- Whether Es discards messages in non-json format.
- dropInvalid booleanMessage 
- Whether Es discards the message of parsing failure.
- index string
- Es index name.
- indexType string
- Es custom index name type, STRING, JSONPATH, the default is STRING.
- password string
- Es Password.
- port number
- Es connection port.
- selfBuilt boolean
- Whether it is a self-built cluster.
- serviceVip string
- instance vip.
- uniqVpc stringId 
- instance vpc id.
- userName string
- Es UserName.
- resource str
- Resource.
- content_key str
- key for data in non-json format.
- database_primary_ strkey 
- When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
- date_format str
- Es date suffix.
- document_id_ strfield 
- The field name of the document ID value dumped into Es.
- drop_cls CkafkaDatahub Task Source Resource Es Param Drop Cls 
- When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop_dlq CkafkaDatahub Task Source Resource Es Param Drop Dlq 
- dead letter queue.
- drop_invalid_ booljson_ message 
- Whether Es discards messages in non-json format.
- drop_invalid_ boolmessage 
- Whether Es discards the message of parsing failure.
- index str
- Es index name.
- index_type str
- Es custom index name type, STRING, JSONPATH, the default is STRING.
- password str
- Es Password.
- port float
- Es connection port.
- self_built bool
- Whether it is a self-built cluster.
- service_vip str
- instance vip.
- uniq_vpc_ strid 
- instance vpc id.
- user_name str
- Es UserName.
- resource String
- Resource.
- contentKey String
- key for data in non-json format.
- databasePrimary StringKey 
- When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
- dateFormat String
- Es date suffix.
- documentId StringField 
- The field name of the document ID value dumped into Es.
- dropCls Property Map
- When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- dropDlq Property Map
- dead letter queue.
- dropInvalid BooleanJson Message 
- Whether Es discards messages in non-json format.
- dropInvalid BooleanMessage 
- Whether Es discards the message of parsing failure.
- index String
- Es index name.
- indexType String
- Es custom index name type, STRING, JSONPATH, the default is STRING.
- password String
- Es Password.
- port Number
- Es connection port.
- selfBuilt Boolean
- Whether it is a self-built cluster.
- serviceVip String
- instance vip.
- uniqVpc StringId 
- instance vpc id.
- userName String
- Es UserName.
CkafkaDatahubTaskSourceResourceEsParamDropCls, CkafkaDatahubTaskSourceResourceEsParamDropClsArgs                  
- DropCls stringLog Set 
- cls LogSet id.
- DropCls stringOwneruin 
- account.
- DropCls stringRegion 
- The region where the cls is delivered.
- DropCls stringTopic Id 
- cls topic.
- DropInvalid boolMessage To Cls 
- Whether to deliver to cls.
- DropCls stringLog Set 
- cls LogSet id.
- DropCls stringOwneruin 
- account.
- DropCls stringRegion 
- The region where the cls is delivered.
- DropCls stringTopic Id 
- cls topic.
- DropInvalid boolMessage To Cls 
- Whether to deliver to cls.
- dropCls StringLog Set 
- cls LogSet id.
- dropCls StringOwneruin 
- account.
- dropCls StringRegion 
- The region where the cls is delivered.
- dropCls StringTopic Id 
- cls topic.
- dropInvalid BooleanMessage To Cls 
- Whether to deliver to cls.
- dropCls stringLog Set 
- cls LogSet id.
- dropCls stringOwneruin 
- account.
- dropCls stringRegion 
- The region where the cls is delivered.
- dropCls stringTopic Id 
- cls topic.
- dropInvalid booleanMessage To Cls 
- Whether to deliver to cls.
- drop_cls_ strlog_ set 
- cls LogSet id.
- drop_cls_ strowneruin 
- account.
- drop_cls_ strregion 
- The region where the cls is delivered.
- drop_cls_ strtopic_ id 
- cls topic.
- drop_invalid_ boolmessage_ to_ cls 
- Whether to deliver to cls.
- dropCls StringLog Set 
- cls LogSet id.
- dropCls StringOwneruin 
- account.
- dropCls StringRegion 
- The region where the cls is delivered.
- dropCls StringTopic Id 
- cls topic.
- dropInvalid BooleanMessage To Cls 
- Whether to deliver to cls.
CkafkaDatahubTaskSourceResourceEsParamDropDlq, CkafkaDatahubTaskSourceResourceEsParamDropDlqArgs                  
- Type string
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- DlqType string
- dlq type, CKAFKA|TOPIC.
- KafkaParam CkafkaDatahub Task Source Resource Es Param Drop Dlq Kafka Param 
- Ckafka type dlq.
- MaxRetry doubleAttempts 
- retry times.
- RetryInterval double
- retry interval.
- TopicParam CkafkaDatahub Task Source Resource Es Param Drop Dlq Topic Param 
- DIP Topic type dead letter queue.
- Type string
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- DlqType string
- dlq type, CKAFKA|TOPIC.
- KafkaParam CkafkaDatahub Task Source Resource Es Param Drop Dlq Kafka Param 
- Ckafka type dlq.
- MaxRetry float64Attempts 
- retry times.
- RetryInterval float64
- retry interval.
- TopicParam CkafkaDatahub Task Source Resource Es Param Drop Dlq Topic Param 
- DIP Topic type dead letter queue.
- type String
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- dlqType String
- dlq type, CKAFKA|TOPIC.
- kafkaParam CkafkaDatahub Task Source Resource Es Param Drop Dlq Kafka Param 
- Ckafka type dlq.
- maxRetry DoubleAttempts 
- retry times.
- retryInterval Double
- retry interval.
- topicParam CkafkaDatahub Task Source Resource Es Param Drop Dlq Topic Param 
- DIP Topic type dead letter queue.
- type string
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- dlqType string
- dlq type, CKAFKA|TOPIC.
- kafkaParam CkafkaDatahub Task Source Resource Es Param Drop Dlq Kafka Param 
- Ckafka type dlq.
- maxRetry numberAttempts 
- retry times.
- retryInterval number
- retry interval.
- topicParam CkafkaDatahub Task Source Resource Es Param Drop Dlq Topic Param 
- DIP Topic type dead letter queue.
- type str
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- dlq_type str
- dlq type, CKAFKA|TOPIC.
- kafka_param CkafkaDatahub Task Source Resource Es Param Drop Dlq Kafka Param 
- Ckafka type dlq.
- max_retry_ floatattempts 
- retry times.
- retry_interval float
- retry interval.
- topic_param CkafkaDatahub Task Source Resource Es Param Drop Dlq Topic Param 
- DIP Topic type dead letter queue.
- type String
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- dlqType String
- dlq type, CKAFKA|TOPIC.
- kafkaParam Property Map
- Ckafka type dlq.
- maxRetry NumberAttempts 
- retry times.
- retryInterval Number
- retry interval.
- topicParam Property Map
- DIP Topic type dead letter queue.
CkafkaDatahubTaskSourceResourceEsParamDropDlqKafkaParam, CkafkaDatahubTaskSourceResourceEsParamDropDlqKafkaParamArgs                      
- Resource string
- instance resource.
- SelfBuilt bool
- whether the cluster is built by yourself instead of cloud product.
- CompressionType string
- Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- EnableToleration bool
- enable dead letter queue.
- MsgMultiple double
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- OffsetType string
- Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- PartitionNum double
- the partition num of the topic.
- QpsLimit double
- Qps(query per seconds) limit.
- ResourceName string
- instance name.
- StartTime double
- when Offset type timestamp is required.
- TableMappings List<CkafkaDatahub Task Source Resource Es Param Drop Dlq Kafka Param Table Mapping> 
- maps of table to topic, required when multi topic is selected.
- Topic string
- Topic name.
- TopicId string
- Topic ID.
- UseAuto boolCreate Topic 
- Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- UseTable boolMapping 
- whether to use multi table.
- ZoneId double
- Zone ID.
- Resource string
- instance resource.
- SelfBuilt bool
- whether the cluster is built by yourself instead of cloud product.
- CompressionType string
- Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- EnableToleration bool
- enable dead letter queue.
- MsgMultiple float64
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- OffsetType string
- Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- PartitionNum float64
- the partition num of the topic.
- QpsLimit float64
- Qps(query per seconds) limit.
- ResourceName string
- instance name.
- StartTime float64
- when Offset type timestamp is required.
- TableMappings []CkafkaDatahub Task Source Resource Es Param Drop Dlq Kafka Param Table Mapping 
- maps of table to topic, required when multi topic is selected.
- Topic string
- Topic name.
- TopicId string
- Topic ID.
- UseAuto boolCreate Topic 
- Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- UseTable boolMapping 
- whether to use multi table.
- ZoneId float64
- Zone ID.
- resource String
- instance resource.
- selfBuilt Boolean
- whether the cluster is built by yourself instead of cloud product.
- compressionType String
- Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enableToleration Boolean
- enable dead letter queue.
- msgMultiple Double
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offsetType String
- Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partitionNum Double
- the partition num of the topic.
- qpsLimit Double
- Qps(query per seconds) limit.
- resourceName String
- instance name.
- startTime Double
- when Offset type timestamp is required.
- tableMappings List<CkafkaDatahub Task Source Resource Es Param Drop Dlq Kafka Param Table Mapping> 
- maps of table to topic, required when multi topic is selected.
- topic String
- Topic name.
- topicId String
- Topic ID.
- useAuto BooleanCreate Topic 
- Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- useTable BooleanMapping 
- whether to use multi table.
- zoneId Double
- Zone ID.
- resource string
- instance resource.
- selfBuilt boolean
- whether the cluster is built by yourself instead of cloud product.
- compressionType string
- Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enableToleration boolean
- enable dead letter queue.
- msgMultiple number
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offsetType string
- Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partitionNum number
- the partition num of the topic.
- qpsLimit number
- Qps(query per seconds) limit.
- resourceName string
- instance name.
- startTime number
- when Offset type timestamp is required.
- tableMappings CkafkaDatahub Task Source Resource Es Param Drop Dlq Kafka Param Table Mapping[] 
- maps of table to topic, required when multi topic is selected.
- topic string
- Topic name.
- topicId string
- Topic ID.
- useAuto booleanCreate Topic 
- Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- useTable booleanMapping 
- whether to use multi table.
- zoneId number
- Zone ID.
- resource str
- instance resource.
- self_built bool
- whether the cluster is built by yourself instead of cloud product.
- compression_type str
- Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enable_toleration bool
- enable dead letter queue.
- msg_multiple float
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset_type str
- Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partition_num float
- the partition num of the topic.
- qps_limit float
- Qps(query per seconds) limit.
- resource_name str
- instance name.
- start_time float
- when Offset type timestamp is required.
- table_mappings Sequence[CkafkaDatahub Task Source Resource Es Param Drop Dlq Kafka Param Table Mapping] 
- maps of table to topic, required when multi topic is selected.
- topic str
- Topic name.
- topic_id str
- Topic ID.
- use_auto_ boolcreate_ topic 
- Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- use_table_ boolmapping 
- whether to use multi table.
- zone_id float
- Zone ID.
- resource String
- instance resource.
- selfBuilt Boolean
- whether the cluster is built by yourself instead of cloud product.
- compressionType String
- Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enableToleration Boolean
- enable dead letter queue.
- msgMultiple Number
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offsetType String
- Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partitionNum Number
- the partition num of the topic.
- qpsLimit Number
- Qps(query per seconds) limit.
- resourceName String
- instance name.
- startTime Number
- when Offset type timestamp is required.
- tableMappings List<Property Map>
- maps of table to topic, required when multi topic is selected.
- topic String
- Topic name.
- topicId String
- Topic ID.
- useAuto BooleanCreate Topic 
- Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- useTable BooleanMapping 
- whether to use multi table.
- zoneId Number
- Zone ID.
CkafkaDatahubTaskSourceResourceEsParamDropDlqKafkaParamTableMapping, CkafkaDatahubTaskSourceResourceEsParamDropDlqKafkaParamTableMappingArgs                          
CkafkaDatahubTaskSourceResourceEsParamDropDlqTopicParam, CkafkaDatahubTaskSourceResourceEsParamDropDlqTopicParamArgs                      
- Resource string
- The topic name of the topic sold separately.
- CompressionType string
- Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- MsgMultiple double
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- OffsetType string
- Offset type, initial position earliest, latest position latest, time point position timestamp.
- StartTime double
- It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- TopicId string
- Topic TopicId.
- UseAuto boolCreate Topic 
- whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- Resource string
- The topic name of the topic sold separately.
- CompressionType string
- Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- MsgMultiple float64
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- OffsetType string
- Offset type, initial position earliest, latest position latest, time point position timestamp.
- StartTime float64
- It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- TopicId string
- Topic TopicId.
- UseAuto boolCreate Topic 
- whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource String
- The topic name of the topic sold separately.
- compressionType String
- Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msgMultiple Double
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offsetType String
- Offset type, initial position earliest, latest position latest, time point position timestamp.
- startTime Double
- It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topicId String
- Topic TopicId.
- useAuto BooleanCreate Topic 
- whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource string
- The topic name of the topic sold separately.
- compressionType string
- Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msgMultiple number
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offsetType string
- Offset type, initial position earliest, latest position latest, time point position timestamp.
- startTime number
- It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topicId string
- Topic TopicId.
- useAuto booleanCreate Topic 
- whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource str
- The topic name of the topic sold separately.
- compression_type str
- Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msg_multiple float
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset_type str
- Offset type, initial position earliest, latest position latest, time point position timestamp.
- start_time float
- It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topic_id str
- Topic TopicId.
- use_auto_ boolcreate_ topic 
- whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource String
- The topic name of the topic sold separately.
- compressionType String
- Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msgMultiple Number
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offsetType String
- Offset type, initial position earliest, latest position latest, time point position timestamp.
- startTime Number
- It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topicId String
- Topic TopicId.
- useAuto BooleanCreate Topic 
- whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
CkafkaDatahubTaskSourceResourceEventBusParam, CkafkaDatahubTaskSourceResourceEventBusParamArgs                
- resource str
- instance id.
- self_built bool
- Whether it is a self-built cluster.
- type str
- resource type. EB_COS/EB_ES/EB_CLS.
- function_name str
- SCF function name.
- namespace str
- SCF namespace.
- qualifier str
- SCF version and alias.
CkafkaDatahubTaskSourceResourceKafkaParam, CkafkaDatahubTaskSourceResourceKafkaParamArgs              
- Resource string
- instance resource.
- SelfBuilt bool
- whether the cluster is built by yourself instead of cloud product.
- CompressionType string
- Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- EnableToleration bool
- enable dead letter queue.
- MsgMultiple double
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- OffsetType string
- Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- PartitionNum double
- the partition num of the topic.
- QpsLimit double
- Qps(query per seconds) limit.
- ResourceName string
- instance name.
- StartTime double
- when Offset type timestamp is required.
- TableMappings List<CkafkaDatahub Task Source Resource Kafka Param Table Mapping> 
- maps of table to topic, required when multi topic is selected.
- Topic string
- Topic name.
- TopicId string
- Topic ID.
- UseAuto boolCreate Topic 
- Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- UseTable boolMapping 
- whether to use multi table.
- ZoneId double
- Zone ID.
- Resource string
- instance resource.
- SelfBuilt bool
- whether the cluster is built by yourself instead of cloud product.
- CompressionType string
- Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- EnableToleration bool
- enable dead letter queue.
- MsgMultiple float64
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- OffsetType string
- Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- PartitionNum float64
- the partition num of the topic.
- QpsLimit float64
- Qps(query per seconds) limit.
- ResourceName string
- instance name.
- StartTime float64
- when Offset type timestamp is required.
- TableMappings []CkafkaDatahub Task Source Resource Kafka Param Table Mapping 
- maps of table to topic, required when multi topic is selected.
- Topic string
- Topic name.
- TopicId string
- Topic ID.
- UseAuto boolCreate Topic 
- Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- UseTable boolMapping 
- whether to use multi table.
- ZoneId float64
- Zone ID.
- resource String
- instance resource.
- selfBuilt Boolean
- whether the cluster is built by yourself instead of cloud product.
- compressionType String
- Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enableToleration Boolean
- enable dead letter queue.
- msgMultiple Double
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offsetType String
- Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partitionNum Double
- the partition num of the topic.
- qpsLimit Double
- Qps(query per seconds) limit.
- resourceName String
- instance name.
- startTime Double
- when Offset type timestamp is required.
- tableMappings List<CkafkaDatahub Task Source Resource Kafka Param Table Mapping> 
- maps of table to topic, required when multi topic is selected.
- topic String
- Topic name.
- topicId String
- Topic ID.
- useAuto BooleanCreate Topic 
- Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- useTable BooleanMapping 
- whether to use multi table.
- zoneId Double
- Zone ID.
- resource string
- instance resource.
- selfBuilt boolean
- whether the cluster is built by yourself instead of cloud product.
- compressionType string
- Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enableToleration boolean
- enable dead letter queue.
- msgMultiple number
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offsetType string
- Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partitionNum number
- the partition num of the topic.
- qpsLimit number
- Qps(query per seconds) limit.
- resourceName string
- instance name.
- startTime number
- when Offset type timestamp is required.
- tableMappings CkafkaDatahub Task Source Resource Kafka Param Table Mapping[] 
- maps of table to topic, required when multi topic is selected.
- topic string
- Topic name.
- topicId string
- Topic ID.
- useAuto booleanCreate Topic 
- Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- useTable booleanMapping 
- whether to use multi table.
- zoneId number
- Zone ID.
- resource str
- instance resource.
- self_built bool
- whether the cluster is built by yourself instead of cloud product.
- compression_type str
- Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enable_toleration bool
- enable dead letter queue.
- msg_multiple float
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset_type str
- Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partition_num float
- the partition num of the topic.
- qps_limit float
- Qps(query per seconds) limit.
- resource_name str
- instance name.
- start_time float
- when Offset type timestamp is required.
- table_mappings Sequence[CkafkaDatahub Task Source Resource Kafka Param Table Mapping] 
- maps of table to topic, required when multi topic is selected.
- topic str
- Topic name.
- topic_id str
- Topic ID.
- use_auto_ boolcreate_ topic 
- Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- use_table_ boolmapping 
- whether to use multi table.
- zone_id float
- Zone ID.
- resource String
- instance resource.
- selfBuilt Boolean
- whether the cluster is built by yourself instead of cloud product.
- compressionType String
- Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enableToleration Boolean
- enable dead letter queue.
- msgMultiple Number
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offsetType String
- Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partitionNum Number
- the partition num of the topic.
- qpsLimit Number
- Qps(query per seconds) limit.
- resourceName String
- instance name.
- startTime Number
- when Offset type timestamp is required.
- tableMappings List<Property Map>
- maps of table to topic, required when multi topic is selected.
- topic String
- Topic name.
- topicId String
- Topic ID.
- useAuto BooleanCreate Topic 
- Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- useTable BooleanMapping 
- whether to use multi table.
- zoneId Number
- Zone ID.
CkafkaDatahubTaskSourceResourceKafkaParamTableMapping, CkafkaDatahubTaskSourceResourceKafkaParamTableMappingArgs                  
CkafkaDatahubTaskSourceResourceMariaDbParam, CkafkaDatahubTaskSourceResourceMariaDbParamArgs                
- Database string
- MariaDB database name, * for all database.
- Resource string
- MariaDB connection Id.
- Table string
- MariaDB db name, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- IncludeContent stringChanges 
- If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- IncludeQuery bool
- If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- IsTable boolPrefix 
- When the Table input is a prefix, the value of this item is true, otherwise it is false.
- KeyColumns string
- Format library 1. table 1: field 1, field 2; library 2. table 2: field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- OutputFormat string
- output format, DEFAULT, CANAL_1, CANAL_2.
- RecordWith boolSchema 
- If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- SnapshotMode string
- schema_only|initial, default initial.
- Database string
- MariaDB database name, * for all database.
- Resource string
- MariaDB connection Id.
- Table string
- MariaDB db name, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- IncludeContent stringChanges 
- If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- IncludeQuery bool
- If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- IsTable boolPrefix 
- When the Table input is a prefix, the value of this item is true, otherwise it is false.
- KeyColumns string
- Format library 1. table 1: field 1, field 2; library 2. table 2: field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- OutputFormat string
- output format, DEFAULT, CANAL_1, CANAL_2.
- RecordWith boolSchema 
- If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- SnapshotMode string
- schema_only|initial, default initial.
- database String
- MariaDB database name, * for all database.
- resource String
- MariaDB connection Id.
- table String
- MariaDB db name, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- includeContent StringChanges 
- If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- includeQuery Boolean
- If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- isTable BooleanPrefix 
- When the Table input is a prefix, the value of this item is true, otherwise it is false.
- keyColumns String
- Format library 1. table 1: field 1, field 2; library 2. table 2: field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- outputFormat String
- output format, DEFAULT, CANAL_1, CANAL_2.
- recordWith BooleanSchema 
- If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- snapshotMode String
- schema_only|initial, default initial.
- database string
- MariaDB database name, * for all database.
- resource string
- MariaDB connection Id.
- table string
- MariaDB db name, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- includeContent stringChanges 
- If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- includeQuery boolean
- If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- isTable booleanPrefix 
- When the Table input is a prefix, the value of this item is true, otherwise it is false.
- keyColumns string
- Format library 1. table 1: field 1, field 2; library 2. table 2: field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- outputFormat string
- output format, DEFAULT, CANAL_1, CANAL_2.
- recordWith booleanSchema 
- If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- snapshotMode string
- schema_only|initial, default initial.
- database str
- MariaDB database name, * for all database.
- resource str
- MariaDB connection Id.
- table str
- MariaDB db name, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- include_content_ strchanges 
- If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- include_query bool
- If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- is_table_ boolprefix 
- When the Table input is a prefix, the value of this item is true, otherwise it is false.
- key_columns str
- Format library 1. table 1: field 1, field 2; library 2. table 2: field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- output_format str
- output format, DEFAULT, CANAL_1, CANAL_2.
- record_with_ boolschema 
- If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- snapshot_mode str
- schema_only|initial, default initial.
- database String
- MariaDB database name, * for all database.
- resource String
- MariaDB connection Id.
- table String
- MariaDB db name, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- includeContent StringChanges 
- If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- includeQuery Boolean
- If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- isTable BooleanPrefix 
- When the Table input is a prefix, the value of this item is true, otherwise it is false.
- keyColumns String
- Format library 1. table 1: field 1, field 2; library 2. table 2: field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- outputFormat String
- output format, DEFAULT, CANAL_1, CANAL_2.
- recordWith BooleanSchema 
- If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- snapshotMode String
- schema_only|initial, default initial.
CkafkaDatahubTaskSourceResourceMongoDbParam, CkafkaDatahubTaskSourceResourceMongoDbParamArgs                
- Collection string
- MongoDB collection.
- CopyExisting bool
- Whether to copy the stock data, the default parameter is true.
- Database string
- MongoDB database name.
- Resource string
- resource id.
- Ip string
- Mongo DB connection ip.
- ListeningEvent string
- Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
- Password string
- MongoDB database password.
- Pipeline string
- aggregation pipeline.
- Port double
- MongoDB connection port.
- ReadPreference string
- Master-slave priority, default master node.
- SelfBuilt bool
- Whether it is a self-built cluster.
- UserName string
- MongoDB database user name.
- Collection string
- MongoDB collection.
- CopyExisting bool
- Whether to copy the stock data, the default parameter is true.
- Database string
- MongoDB database name.
- Resource string
- resource id.
- Ip string
- Mongo DB connection ip.
- ListeningEvent string
- Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
- Password string
- MongoDB database password.
- Pipeline string
- aggregation pipeline.
- Port float64
- MongoDB connection port.
- ReadPreference string
- Master-slave priority, default master node.
- SelfBuilt bool
- Whether it is a self-built cluster.
- UserName string
- MongoDB database user name.
- collection String
- MongoDB collection.
- copyExisting Boolean
- Whether to copy the stock data, the default parameter is true.
- database String
- MongoDB database name.
- resource String
- resource id.
- ip String
- Mongo DB connection ip.
- listeningEvent String
- Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
- password String
- MongoDB database password.
- pipeline String
- aggregation pipeline.
- port Double
- MongoDB connection port.
- readPreference String
- Master-slave priority, default master node.
- selfBuilt Boolean
- Whether it is a self-built cluster.
- userName String
- MongoDB database user name.
- collection string
- MongoDB collection.
- copyExisting boolean
- Whether to copy the stock data, the default parameter is true.
- database string
- MongoDB database name.
- resource string
- resource id.
- ip string
- Mongo DB connection ip.
- listeningEvent string
- Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
- password string
- MongoDB database password.
- pipeline string
- aggregation pipeline.
- port number
- MongoDB connection port.
- readPreference string
- Master-slave priority, default master node.
- selfBuilt boolean
- Whether it is a self-built cluster.
- userName string
- MongoDB database user name.
- collection str
- MongoDB collection.
- copy_existing bool
- Whether to copy the stock data, the default parameter is true.
- database str
- MongoDB database name.
- resource str
- resource id.
- ip str
- Mongo DB connection ip.
- listening_event str
- Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
- password str
- MongoDB database password.
- pipeline str
- aggregation pipeline.
- port float
- MongoDB connection port.
- read_preference str
- Master-slave priority, default master node.
- self_built bool
- Whether it is a self-built cluster.
- user_name str
- MongoDB database user name.
- collection String
- MongoDB collection.
- copyExisting Boolean
- Whether to copy the stock data, the default parameter is true.
- database String
- MongoDB database name.
- resource String
- resource id.
- ip String
- Mongo DB connection ip.
- listeningEvent String
- Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
- password String
- MongoDB database password.
- pipeline String
- aggregation pipeline.
- port Number
- MongoDB connection port.
- readPreference String
- Master-slave priority, default master node.
- selfBuilt Boolean
- Whether it is a self-built cluster.
- userName String
- MongoDB database user name.
CkafkaDatahubTaskSourceResourceMySqlParam, CkafkaDatahubTaskSourceResourceMySqlParamArgs                
- Database string
- MySQL database name, * is the whole database.
- Resource string
- MySQL connection Id.
- Table string
- The name of the MySQL data table, is the non-system table in all the monitored databases, which can be separated by, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name, when a regular expression needs to be filled in, the format is data database name.data table name.
- DataSource stringIncrement Column 
- the name of the column to be monitored.
- DataSource stringIncrement Mode 
- TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
- DataSource stringMonitor Mode 
- TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
- DataSource stringMonitor Resource 
- When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
- DataSource stringStart From 
- HEAD means copy stock + incremental data, TAIL means copy only incremental data.
- DataTarget stringInsert Mode 
- INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- DataTarget stringPrimary Key Field 
- When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- DataTarget List<CkafkaRecord Mappings Datahub Task Source Resource My Sql Param Data Target Record Mapping> 
- Mapping relationship between tables and messages.
- DdlTopic string
- The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
- DropCls CkafkaDatahub Task Source Resource My Sql Param Drop Cls 
- When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- DropInvalid boolMessage 
- Whether to discard messages that fail to parse, the default is true.
- IncludeContent stringChanges 
- If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- IncludeQuery bool
- If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- IsTable boolPrefix 
- When the Table input is a prefix, the value of this item is true, otherwise it is false.
- IsTable boolRegular 
- Whether the input table is a regular expression, if this option and Is Table Prefix are true at the same time, the judgment priority of this option is higher than Is Table Prefix.
- KeyColumns string
- Format library1.table1 field 1,field 2;library 2.table2 field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- OutputFormat string
- output format, DEFAULT, CANAL_1, CANAL_2.
- RecordWith boolSchema 
- If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- SignalDatabase string
- database name of signal table.
- SnapshotMode string
- whether to Copy inventory information (schema_only does not copy, initial full amount), the default is initial.
- TopicRegex string
- Regular expression for routing events to specific topics, defaults to (.*).
- TopicReplacement string
- TopicRegex, $1, $2.
- Database string
- MySQL database name, * is the whole database.
- Resource string
- MySQL connection Id.
- Table string
- The name of the MySQL data table, is the non-system table in all the monitored databases, which can be separated by, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name, when a regular expression needs to be filled in, the format is data database name.data table name.
- DataSource stringIncrement Column 
- the name of the column to be monitored.
- DataSource stringIncrement Mode 
- TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
- DataSource stringMonitor Mode 
- TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
- DataSource stringMonitor Resource 
- When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
- DataSource stringStart From 
- HEAD means copy stock + incremental data, TAIL means copy only incremental data.
- DataTarget stringInsert Mode 
- INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- DataTarget stringPrimary Key Field 
- When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- DataTarget []CkafkaRecord Mappings Datahub Task Source Resource My Sql Param Data Target Record Mapping 
- Mapping relationship between tables and messages.
- DdlTopic string
- The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
- DropCls CkafkaDatahub Task Source Resource My Sql Param Drop Cls 
- When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- DropInvalid boolMessage 
- Whether to discard messages that fail to parse, the default is true.
- IncludeContent stringChanges 
- If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- IncludeQuery bool
- If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- IsTable boolPrefix 
- When the Table input is a prefix, the value of this item is true, otherwise it is false.
- IsTable boolRegular 
- Whether the input table is a regular expression, if this option and Is Table Prefix are true at the same time, the judgment priority of this option is higher than Is Table Prefix.
- KeyColumns string
- Format library1.table1 field 1,field 2;library 2.table2 field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- OutputFormat string
- output format, DEFAULT, CANAL_1, CANAL_2.
- RecordWith boolSchema 
- If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- SignalDatabase string
- database name of signal table.
- SnapshotMode string
- whether to Copy inventory information (schema_only does not copy, initial full amount), the default is initial.
- TopicRegex string
- Regular expression for routing events to specific topics, defaults to (.*).
- TopicReplacement string
- TopicRegex, $1, $2.
- database String
- MySQL database name, * is the whole database.
- resource String
- MySQL connection Id.
- table String
- The name of the MySQL data table, is the non-system table in all the monitored databases, which can be separated by, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name, when a regular expression needs to be filled in, the format is data database name.data table name.
- dataSource StringIncrement Column 
- the name of the column to be monitored.
- dataSource StringIncrement Mode 
- TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
- dataSource StringMonitor Mode 
- TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
- dataSource StringMonitor Resource 
- When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
- dataSource StringStart From 
- HEAD means copy stock + incremental data, TAIL means copy only incremental data.
- dataTarget StringInsert Mode 
- INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- dataTarget StringPrimary Key Field 
- When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- dataTarget List<CkafkaRecord Mappings Datahub Task Source Resource My Sql Param Data Target Record Mapping> 
- Mapping relationship between tables and messages.
- ddlTopic String
- The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
- dropCls CkafkaDatahub Task Source Resource My Sql Param Drop Cls 
- When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- dropInvalid BooleanMessage 
- Whether to discard messages that fail to parse, the default is true.
- includeContent StringChanges 
- If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- includeQuery Boolean
- If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- isTable BooleanPrefix 
- When the Table input is a prefix, the value of this item is true, otherwise it is false.
- isTable BooleanRegular 
- Whether the input table is a regular expression, if this option and Is Table Prefix are true at the same time, the judgment priority of this option is higher than Is Table Prefix.
- keyColumns String
- Format library1.table1 field 1,field 2;library 2.table2 field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- outputFormat String
- output format, DEFAULT, CANAL_1, CANAL_2.
- recordWith BooleanSchema 
- If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- signalDatabase String
- database name of signal table.
- snapshotMode String
- whether to Copy inventory information (schema_only does not copy, initial full amount), the default is initial.
- topicRegex String
- Regular expression for routing events to specific topics, defaults to (.*).
- topicReplacement String
- TopicRegex, $1, $2.
- database string
- MySQL database name, * is the whole database.
- resource string
- MySQL connection Id.
- table string
- The name of the MySQL data table, is the non-system table in all the monitored databases, which can be separated by, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name, when a regular expression needs to be filled in, the format is data database name.data table name.
- dataSource stringIncrement Column 
- the name of the column to be monitored.
- dataSource stringIncrement Mode 
- TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
- dataSource stringMonitor Mode 
- TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
- dataSource stringMonitor Resource 
- When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
- dataSource stringStart From 
- HEAD means copy stock + incremental data, TAIL means copy only incremental data.
- dataTarget stringInsert Mode 
- INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- dataTarget stringPrimary Key Field 
- When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- dataTarget CkafkaRecord Mappings Datahub Task Source Resource My Sql Param Data Target Record Mapping[] 
- Mapping relationship between tables and messages.
- ddlTopic string
- The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
- dropCls CkafkaDatahub Task Source Resource My Sql Param Drop Cls 
- When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- dropInvalid booleanMessage 
- Whether to discard messages that fail to parse, the default is true.
- includeContent stringChanges 
- If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- includeQuery boolean
- If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- isTable booleanPrefix 
- When the Table input is a prefix, the value of this item is true, otherwise it is false.
- isTable booleanRegular 
- Whether the input table is a regular expression, if this option and Is Table Prefix are true at the same time, the judgment priority of this option is higher than Is Table Prefix.
- keyColumns string
- Format library1.table1 field 1,field 2;library 2.table2 field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- outputFormat string
- output format, DEFAULT, CANAL_1, CANAL_2.
- recordWith booleanSchema 
- If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- signalDatabase string
- database name of signal table.
- snapshotMode string
- whether to Copy inventory information (schema_only does not copy, initial full amount), the default is initial.
- topicRegex string
- Regular expression for routing events to specific topics, defaults to (.*).
- topicReplacement string
- TopicRegex, $1, $2.
- database str
- MySQL database name, * is the whole database.
- resource str
- MySQL connection Id.
- table str
- The name of the MySQL data table, is the non-system table in all the monitored databases, which can be separated by, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name, when a regular expression needs to be filled in, the format is data database name.data table name.
- data_source_ strincrement_ column 
- the name of the column to be monitored.
- data_source_ strincrement_ mode 
- TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
- data_source_ strmonitor_ mode 
- TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
- data_source_ strmonitor_ resource 
- When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
- data_source_ strstart_ from 
- HEAD means copy stock + incremental data, TAIL means copy only incremental data.
- data_target_ strinsert_ mode 
- INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- data_target_ strprimary_ key_ field 
- When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- data_target_ Sequence[Ckafkarecord_ mappings Datahub Task Source Resource My Sql Param Data Target Record Mapping] 
- Mapping relationship between tables and messages.
- ddl_topic str
- The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
- drop_cls CkafkaDatahub Task Source Resource My Sql Param Drop Cls 
- When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop_invalid_ boolmessage 
- Whether to discard messages that fail to parse, the default is true.
- include_content_ strchanges 
- If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- include_query bool
- If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- is_table_ boolprefix 
- When the Table input is a prefix, the value of this item is true, otherwise it is false.
- is_table_ boolregular 
- Whether the input table is a regular expression, if this option and Is Table Prefix are true at the same time, the judgment priority of this option is higher than Is Table Prefix.
- key_columns str
- Format library1.table1 field 1,field 2;library 2.table2 field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- output_format str
- output format, DEFAULT, CANAL_1, CANAL_2.
- record_with_ boolschema 
- If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- signal_database str
- database name of signal table.
- snapshot_mode str
- whether to Copy inventory information (schema_only does not copy, initial full amount), the default is initial.
- topic_regex str
- Regular expression for routing events to specific topics, defaults to (.*).
- topic_replacement str
- TopicRegex, $1, $2.
- database String
- MySQL database name, * is the whole database.
- resource String
- MySQL connection Id.
- table String
- The name of the MySQL data table, is the non-system table in all the monitored databases, which can be separated by, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name, when a regular expression needs to be filled in, the format is data database name.data table name.
- dataSource StringIncrement Column 
- the name of the column to be monitored.
- dataSource StringIncrement Mode 
- TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
- dataSource StringMonitor Mode 
- TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
- dataSource StringMonitor Resource 
- When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
- dataSource StringStart From 
- HEAD means copy stock + incremental data, TAIL means copy only incremental data.
- dataTarget StringInsert Mode 
- INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- dataTarget StringPrimary Key Field 
- When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- dataTarget List<Property Map>Record Mappings 
- Mapping relationship between tables and messages.
- ddlTopic String
- The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
- dropCls Property Map
- When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- dropInvalid BooleanMessage 
- Whether to discard messages that fail to parse, the default is true.
- includeContent StringChanges 
- If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- includeQuery Boolean
- If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- isTable BooleanPrefix 
- When the Table input is a prefix, the value of this item is true, otherwise it is false.
- isTable BooleanRegular 
- Whether the input table is a regular expression, if this option and Is Table Prefix are true at the same time, the judgment priority of this option is higher than Is Table Prefix.
- keyColumns String
- Format library1.table1 field 1,field 2;library 2.table2 field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- outputFormat String
- output format, DEFAULT, CANAL_1, CANAL_2.
- recordWith BooleanSchema 
- If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- signalDatabase String
- database name of signal table.
- snapshotMode String
- whether to Copy inventory information (schema_only does not copy, initial full amount), the default is initial.
- topicRegex String
- Regular expression for routing events to specific topics, defaults to (.*).
- topicReplacement String
- TopicRegex, $1, $2.
CkafkaDatahubTaskSourceResourceMySqlParamDataTargetRecordMapping, CkafkaDatahubTaskSourceResourceMySqlParamDataTargetRecordMappingArgs                        
- AllowNull bool
- Whether the message is allowed to be empty.
- AutoIncrement bool
- Whether it is an auto-increment column.
- ColumnName string
- Column Name.
- ColumnSize string
- current ColumnSize.
- DecimalDigits string
- current Column DecimalDigits.
- DefaultValue string
- Database table default parameters.
- ExtraInfo string
- Database table extra fields.
- JsonKey string
- The key name of the message.
- Type string
- message type.
- AllowNull bool
- Whether the message is allowed to be empty.
- AutoIncrement bool
- Whether it is an auto-increment column.
- ColumnName string
- Column Name.
- ColumnSize string
- current ColumnSize.
- DecimalDigits string
- current Column DecimalDigits.
- DefaultValue string
- Database table default parameters.
- ExtraInfo string
- Database table extra fields.
- JsonKey string
- The key name of the message.
- Type string
- message type.
- allowNull Boolean
- Whether the message is allowed to be empty.
- autoIncrement Boolean
- Whether it is an auto-increment column.
- columnName String
- Column Name.
- columnSize String
- current ColumnSize.
- decimalDigits String
- current Column DecimalDigits.
- defaultValue String
- Database table default parameters.
- extraInfo String
- Database table extra fields.
- jsonKey String
- The key name of the message.
- type String
- message type.
- allowNull boolean
- Whether the message is allowed to be empty.
- autoIncrement boolean
- Whether it is an auto-increment column.
- columnName string
- Column Name.
- columnSize string
- current ColumnSize.
- decimalDigits string
- current Column DecimalDigits.
- defaultValue string
- Database table default parameters.
- extraInfo string
- Database table extra fields.
- jsonKey string
- The key name of the message.
- type string
- message type.
- allow_null bool
- Whether the message is allowed to be empty.
- auto_increment bool
- Whether it is an auto-increment column.
- column_name str
- Column Name.
- column_size str
- current ColumnSize.
- decimal_digits str
- current Column DecimalDigits.
- default_value str
- Database table default parameters.
- extra_info str
- Database table extra fields.
- json_key str
- The key name of the message.
- type str
- message type.
- allowNull Boolean
- Whether the message is allowed to be empty.
- autoIncrement Boolean
- Whether it is an auto-increment column.
- columnName String
- Column Name.
- columnSize String
- current ColumnSize.
- decimalDigits String
- current Column DecimalDigits.
- defaultValue String
- Database table default parameters.
- extraInfo String
- Database table extra fields.
- jsonKey String
- The key name of the message.
- type String
- message type.
CkafkaDatahubTaskSourceResourceMySqlParamDropCls, CkafkaDatahubTaskSourceResourceMySqlParamDropClsArgs                    
- DropCls stringLog Set 
- cls LogSet id.
- DropCls stringOwneruin 
- account.
- DropCls stringRegion 
- The region where the cls is delivered.
- DropCls stringTopic Id 
- cls topic.
- DropInvalid boolMessage To Cls 
- Whether to deliver to cls.
- DropCls stringLog Set 
- cls LogSet id.
- DropCls stringOwneruin 
- account.
- DropCls stringRegion 
- The region where the cls is delivered.
- DropCls stringTopic Id 
- cls topic.
- DropInvalid boolMessage To Cls 
- Whether to deliver to cls.
- dropCls StringLog Set 
- cls LogSet id.
- dropCls StringOwneruin 
- account.
- dropCls StringRegion 
- The region where the cls is delivered.
- dropCls StringTopic Id 
- cls topic.
- dropInvalid BooleanMessage To Cls 
- Whether to deliver to cls.
- dropCls stringLog Set 
- cls LogSet id.
- dropCls stringOwneruin 
- account.
- dropCls stringRegion 
- The region where the cls is delivered.
- dropCls stringTopic Id 
- cls topic.
- dropInvalid booleanMessage To Cls 
- Whether to deliver to cls.
- drop_cls_ strlog_ set 
- cls LogSet id.
- drop_cls_ strowneruin 
- account.
- drop_cls_ strregion 
- The region where the cls is delivered.
- drop_cls_ strtopic_ id 
- cls topic.
- drop_invalid_ boolmessage_ to_ cls 
- Whether to deliver to cls.
- dropCls StringLog Set 
- cls LogSet id.
- dropCls StringOwneruin 
- account.
- dropCls StringRegion 
- The region where the cls is delivered.
- dropCls StringTopic Id 
- cls topic.
- dropInvalid BooleanMessage To Cls 
- Whether to deliver to cls.
CkafkaDatahubTaskSourceResourcePostgreSqlParam, CkafkaDatahubTaskSourceResourcePostgreSqlParamArgs                
- Database string
- PostgreSQL database name.
- PluginName string
- (decoderbufs/pgoutput), default decoderbufs.
- Resource string
- PostgreSQL connection Id.
- Table string
- PostgreSQL tableName, * is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of Schema name.Data table name, and you need to fill in a regular expression When, the format is Schema name.data table name.
- DataFormat string
- Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
- DataTarget stringInsert Mode 
- INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- DataTarget stringPrimary Key Field 
- When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- DataTarget List<CkafkaRecord Mappings Datahub Task Source Resource Postgre Sql Param Data Target Record Mapping> 
- Mapping relationship between tables and messages.
- DropInvalid boolMessage 
- Whether to discard messages that fail to parse, the default is true.
- IsTable boolRegular 
- Whether the input table is a regular expression.
- KeyColumns string
- Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- RecordWith boolSchema 
- If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- SnapshotMode string
- never|initial, default initial.
- Database string
- PostgreSQL database name.
- PluginName string
- (decoderbufs/pgoutput), default decoderbufs.
- Resource string
- PostgreSQL connection Id.
- Table string
- PostgreSQL tableName, * is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of Schema name.Data table name, and you need to fill in a regular expression When, the format is Schema name.data table name.
- DataFormat string
- Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
- DataTarget stringInsert Mode 
- INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- DataTarget stringPrimary Key Field 
- When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- DataTarget []CkafkaRecord Mappings Datahub Task Source Resource Postgre Sql Param Data Target Record Mapping 
- Mapping relationship between tables and messages.
- DropInvalid boolMessage 
- Whether to discard messages that fail to parse, the default is true.
- IsTable boolRegular 
- Whether the input table is a regular expression.
- KeyColumns string
- Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- RecordWith boolSchema 
- If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- SnapshotMode string
- never|initial, default initial.
- database String
- PostgreSQL database name.
- pluginName String
- (decoderbufs/pgoutput), default decoderbufs.
- resource String
- PostgreSQL connection Id.
- table String
- PostgreSQL tableName, * is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of Schema name.Data table name, and you need to fill in a regular expression When, the format is Schema name.data table name.
- dataFormat String
- Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
- dataTarget StringInsert Mode 
- INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- dataTarget StringPrimary Key Field 
- When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- dataTarget List<CkafkaRecord Mappings Datahub Task Source Resource Postgre Sql Param Data Target Record Mapping> 
- Mapping relationship between tables and messages.
- dropInvalid BooleanMessage 
- Whether to discard messages that fail to parse, the default is true.
- isTable BooleanRegular 
- Whether the input table is a regular expression.
- keyColumns String
- Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- recordWith BooleanSchema 
- If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- snapshotMode String
- never|initial, default initial.
- database string
- PostgreSQL database name.
- pluginName string
- (decoderbufs/pgoutput), default decoderbufs.
- resource string
- PostgreSQL connection Id.
- table string
- PostgreSQL tableName, * is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of Schema name.Data table name, and you need to fill in a regular expression When, the format is Schema name.data table name.
- dataFormat string
- Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
- dataTarget stringInsert Mode 
- INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- dataTarget stringPrimary Key Field 
- When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- dataTarget CkafkaRecord Mappings Datahub Task Source Resource Postgre Sql Param Data Target Record Mapping[] 
- Mapping relationship between tables and messages.
- dropInvalid booleanMessage 
- Whether to discard messages that fail to parse, the default is true.
- isTable booleanRegular 
- Whether the input table is a regular expression.
- keyColumns string
- Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- recordWith booleanSchema 
- If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- snapshotMode string
- never|initial, default initial.
- database str
- PostgreSQL database name.
- plugin_name str
- (decoderbufs/pgoutput), default decoderbufs.
- resource str
- PostgreSQL connection Id.
- table str
- PostgreSQL tableName, * is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of Schema name.Data table name, and you need to fill in a regular expression When, the format is Schema name.data table name.
- data_format str
- Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
- data_target_ strinsert_ mode 
- INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- data_target_ strprimary_ key_ field 
- When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- data_target_ Sequence[Ckafkarecord_ mappings Datahub Task Source Resource Postgre Sql Param Data Target Record Mapping] 
- Mapping relationship between tables and messages.
- drop_invalid_ boolmessage 
- Whether to discard messages that fail to parse, the default is true.
- is_table_ boolregular 
- Whether the input table is a regular expression.
- key_columns str
- Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- record_with_ boolschema 
- If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- snapshot_mode str
- never|initial, default initial.
- database String
- PostgreSQL database name.
- pluginName String
- (decoderbufs/pgoutput), default decoderbufs.
- resource String
- PostgreSQL connection Id.
- table String
- PostgreSQL tableName, * is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of Schema name.Data table name, and you need to fill in a regular expression When, the format is Schema name.data table name.
- dataFormat String
- Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
- dataTarget StringInsert Mode 
- INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- dataTarget StringPrimary Key Field 
- When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- dataTarget List<Property Map>Record Mappings 
- Mapping relationship between tables and messages.
- dropInvalid BooleanMessage 
- Whether to discard messages that fail to parse, the default is true.
- isTable BooleanRegular 
- Whether the input table is a regular expression.
- keyColumns String
- Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- recordWith BooleanSchema 
- If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- snapshotMode String
- never|initial, default initial.
CkafkaDatahubTaskSourceResourcePostgreSqlParamDataTargetRecordMapping, CkafkaDatahubTaskSourceResourcePostgreSqlParamDataTargetRecordMappingArgs                        
- AllowNull bool
- Whether the message is allowed to be empty.
- AutoIncrement bool
- Whether it is an auto-increment column.
- ColumnName string
- Column Name.
- ColumnSize string
- current ColumnSize.
- DecimalDigits string
- current Column DecimalDigits.
- DefaultValue string
- Database table default parameters.
- ExtraInfo string
- Database table extra fields.
- JsonKey string
- The key name of the message.
- Type string
- message type.
- AllowNull bool
- Whether the message is allowed to be empty.
- AutoIncrement bool
- Whether it is an auto-increment column.
- ColumnName string
- Column Name.
- ColumnSize string
- current ColumnSize.
- DecimalDigits string
- current Column DecimalDigits.
- DefaultValue string
- Database table default parameters.
- ExtraInfo string
- Database table extra fields.
- JsonKey string
- The key name of the message.
- Type string
- message type.
- allowNull Boolean
- Whether the message is allowed to be empty.
- autoIncrement Boolean
- Whether it is an auto-increment column.
- columnName String
- Column Name.
- columnSize String
- current ColumnSize.
- decimalDigits String
- current Column DecimalDigits.
- defaultValue String
- Database table default parameters.
- extraInfo String
- Database table extra fields.
- jsonKey String
- The key name of the message.
- type String
- message type.
- allowNull boolean
- Whether the message is allowed to be empty.
- autoIncrement boolean
- Whether it is an auto-increment column.
- columnName string
- Column Name.
- columnSize string
- current ColumnSize.
- decimalDigits string
- current Column DecimalDigits.
- defaultValue string
- Database table default parameters.
- extraInfo string
- Database table extra fields.
- jsonKey string
- The key name of the message.
- type string
- message type.
- allow_null bool
- Whether the message is allowed to be empty.
- auto_increment bool
- Whether it is an auto-increment column.
- column_name str
- Column Name.
- column_size str
- current ColumnSize.
- decimal_digits str
- current Column DecimalDigits.
- default_value str
- Database table default parameters.
- extra_info str
- Database table extra fields.
- json_key str
- The key name of the message.
- type str
- message type.
- allowNull Boolean
- Whether the message is allowed to be empty.
- autoIncrement Boolean
- Whether it is an auto-increment column.
- columnName String
- Column Name.
- columnSize String
- current ColumnSize.
- decimalDigits String
- current Column DecimalDigits.
- defaultValue String
- Database table default parameters.
- extraInfo String
- Database table extra fields.
- jsonKey String
- The key name of the message.
- type String
- message type.
CkafkaDatahubTaskSourceResourceScfParam, CkafkaDatahubTaskSourceResourceScfParamArgs              
- FunctionName string
- SCF function name.
- BatchSize double
- The maximum number of messages sent in each batch, the default is 1000.
- MaxRetries double
- The number of retries after the SCF call fails, the default is 5.
- Namespace string
- SCF cloud function namespace, the default is default.
- Qualifier string
- SCF cloud function version and alias, the default is DEFAULT.
- FunctionName string
- SCF function name.
- BatchSize float64
- The maximum number of messages sent in each batch, the default is 1000.
- MaxRetries float64
- The number of retries after the SCF call fails, the default is 5.
- Namespace string
- SCF cloud function namespace, the default is default.
- Qualifier string
- SCF cloud function version and alias, the default is DEFAULT.
- functionName String
- SCF function name.
- batchSize Double
- The maximum number of messages sent in each batch, the default is 1000.
- maxRetries Double
- The number of retries after the SCF call fails, the default is 5.
- namespace String
- SCF cloud function namespace, the default is default.
- qualifier String
- SCF cloud function version and alias, the default is DEFAULT.
- functionName string
- SCF function name.
- batchSize number
- The maximum number of messages sent in each batch, the default is 1000.
- maxRetries number
- The number of retries after the SCF call fails, the default is 5.
- namespace string
- SCF cloud function namespace, the default is default.
- qualifier string
- SCF cloud function version and alias, the default is DEFAULT.
- function_name str
- SCF function name.
- batch_size float
- The maximum number of messages sent in each batch, the default is 1000.
- max_retries float
- The number of retries after the SCF call fails, the default is 5.
- namespace str
- SCF cloud function namespace, the default is default.
- qualifier str
- SCF cloud function version and alias, the default is DEFAULT.
- functionName String
- SCF function name.
- batchSize Number
- The maximum number of messages sent in each batch, the default is 1000.
- maxRetries Number
- The number of retries after the SCF call fails, the default is 5.
- namespace String
- SCF cloud function namespace, the default is default.
- qualifier String
- SCF cloud function version and alias, the default is DEFAULT.
CkafkaDatahubTaskSourceResourceSqlServerParam, CkafkaDatahubTaskSourceResourceSqlServerParamArgs                
- Database string
- SQLServer database name.
- Resource string
- SQLServer connection Id.
- Table string
- SQLServer table, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- SnapshotMode string
- schema_only|initial default initial.
- Database string
- SQLServer database name.
- Resource string
- SQLServer connection Id.
- Table string
- SQLServer table, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- SnapshotMode string
- schema_only|initial default initial.
- database String
- SQLServer database name.
- resource String
- SQLServer connection Id.
- table String
- SQLServer table, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- snapshotMode String
- schema_only|initial default initial.
- database string
- SQLServer database name.
- resource string
- SQLServer connection Id.
- table string
- SQLServer table, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- snapshotMode string
- schema_only|initial default initial.
- database str
- SQLServer database name.
- resource str
- SQLServer connection Id.
- table str
- SQLServer table, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- snapshot_mode str
- schema_only|initial default initial.
- database String
- SQLServer database name.
- resource String
- SQLServer connection Id.
- table String
- SQLServer table, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- snapshotMode String
- schema_only|initial default initial.
CkafkaDatahubTaskSourceResourceTdwParam, CkafkaDatahubTaskSourceResourceTdwParamArgs              
- Bid string
- Tdw bid.
- Tid string
- Tdw tid.
- IsDomestic bool
- default true.
- TdwHost string
- TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
- TdwPort double
- TDW port, default 8099.
- Bid string
- Tdw bid.
- Tid string
- Tdw tid.
- IsDomestic bool
- default true.
- TdwHost string
- TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
- TdwPort float64
- TDW port, default 8099.
- bid String
- Tdw bid.
- tid String
- Tdw tid.
- isDomestic Boolean
- default true.
- tdwHost String
- TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
- tdwPort Double
- TDW port, default 8099.
- bid string
- Tdw bid.
- tid string
- Tdw tid.
- isDomestic boolean
- default true.
- tdwHost string
- TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
- tdwPort number
- TDW port, default 8099.
- bid str
- Tdw bid.
- tid str
- Tdw tid.
- is_domestic bool
- default true.
- tdw_host str
- TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
- tdw_port float
- TDW port, default 8099.
- bid String
- Tdw bid.
- tid String
- Tdw tid.
- isDomestic Boolean
- default true.
- tdwHost String
- TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
- tdwPort Number
- TDW port, default 8099.
CkafkaDatahubTaskSourceResourceTopicParam, CkafkaDatahubTaskSourceResourceTopicParamArgs              
- Resource string
- The topic name of the topic sold separately.
- CompressionType string
- Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- MsgMultiple double
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- OffsetType string
- Offset type, initial position earliest, latest position latest, time point position timestamp.
- StartTime double
- It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- TopicId string
- Topic TopicId.
- UseAuto boolCreate Topic 
- whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- Resource string
- The topic name of the topic sold separately.
- CompressionType string
- Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- MsgMultiple float64
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- OffsetType string
- Offset type, initial position earliest, latest position latest, time point position timestamp.
- StartTime float64
- It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- TopicId string
- Topic TopicId.
- UseAuto boolCreate Topic 
- whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource String
- The topic name of the topic sold separately.
- compressionType String
- Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msgMultiple Double
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offsetType String
- Offset type, initial position earliest, latest position latest, time point position timestamp.
- startTime Double
- It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topicId String
- Topic TopicId.
- useAuto BooleanCreate Topic 
- whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource string
- The topic name of the topic sold separately.
- compressionType string
- Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msgMultiple number
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offsetType string
- Offset type, initial position earliest, latest position latest, time point position timestamp.
- startTime number
- It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topicId string
- Topic TopicId.
- useAuto booleanCreate Topic 
- whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource str
- The topic name of the topic sold separately.
- compression_type str
- Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msg_multiple float
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset_type str
- Offset type, initial position earliest, latest position latest, time point position timestamp.
- start_time float
- It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topic_id str
- Topic TopicId.
- use_auto_ boolcreate_ topic 
- whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource String
- The topic name of the topic sold separately.
- compressionType String
- Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msgMultiple Number
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offsetType String
- Offset type, initial position earliest, latest position latest, time point position timestamp.
- startTime Number
- It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topicId String
- Topic TopicId.
- useAuto BooleanCreate Topic 
- whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
CkafkaDatahubTaskTargetResource, CkafkaDatahubTaskTargetResourceArgs          
- Type string
- Resource Type.
- ClickHouse CkafkaParam Datahub Task Target Resource Click House Param 
- ClickHouse config, Type CLICKHOUSE requierd.
- ClsParam CkafkaDatahub Task Target Resource Cls Param 
- Cls configuration, Required when Type is CLS.
- CosParam CkafkaDatahub Task Target Resource Cos Param 
- Cos configuration, required when Type is COS.
- CtsdbParam CkafkaDatahub Task Target Resource Ctsdb Param 
- Ctsdb configuration, Required when Type is CTSDB.
- DtsParam CkafkaDatahub Task Target Resource Dts Param 
- Dts configuration, required when Type is DTS.
- EsParam CkafkaDatahub Task Target Resource Es Param 
- Es configuration, required when Type is ES.
- EventBus CkafkaParam Datahub Task Target Resource Event Bus Param 
- EB configuration, required when type is EB.
- KafkaParam CkafkaDatahub Task Target Resource Kafka Param 
- ckafka configuration, required when Type is KAFKA.
- MariaDb CkafkaParam Datahub Task Target Resource Maria Db Param 
- MariaDB configuration, Required when Type is MARIADB.
- MongoDb CkafkaParam Datahub Task Target Resource Mongo Db Param 
- MongoDB config, Required when Type is MONGODB.
- MySql CkafkaParam Datahub Task Target Resource My Sql Param 
- MySQL configuration, Required when Type is MYSQL.
- PostgreSql CkafkaParam Datahub Task Target Resource Postgre Sql Param 
- PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
- ScfParam CkafkaDatahub Task Target Resource Scf Param 
- Scf configuration, Required when Type is SCF.
- SqlServer CkafkaParam Datahub Task Target Resource Sql Server Param 
- SQLServer configuration, Required when Type is SQLSERVER.
- TdwParam CkafkaDatahub Task Target Resource Tdw Param 
- Tdw configuration, required when Type is TDW.
- TopicParam CkafkaDatahub Task Target Resource Topic Param 
- Topic configuration, Required when Type is Topic.
- Type string
- Resource Type.
- ClickHouse CkafkaParam Datahub Task Target Resource Click House Param 
- ClickHouse config, Type CLICKHOUSE requierd.
- ClsParam CkafkaDatahub Task Target Resource Cls Param 
- Cls configuration, Required when Type is CLS.
- CosParam CkafkaDatahub Task Target Resource Cos Param 
- Cos configuration, required when Type is COS.
- CtsdbParam CkafkaDatahub Task Target Resource Ctsdb Param 
- Ctsdb configuration, Required when Type is CTSDB.
- DtsParam CkafkaDatahub Task Target Resource Dts Param 
- Dts configuration, required when Type is DTS.
- EsParam CkafkaDatahub Task Target Resource Es Param 
- Es configuration, required when Type is ES.
- EventBus CkafkaParam Datahub Task Target Resource Event Bus Param 
- EB configuration, required when type is EB.
- KafkaParam CkafkaDatahub Task Target Resource Kafka Param 
- ckafka configuration, required when Type is KAFKA.
- MariaDb CkafkaParam Datahub Task Target Resource Maria Db Param 
- MariaDB configuration, Required when Type is MARIADB.
- MongoDb CkafkaParam Datahub Task Target Resource Mongo Db Param 
- MongoDB config, Required when Type is MONGODB.
- MySql CkafkaParam Datahub Task Target Resource My Sql Param 
- MySQL configuration, Required when Type is MYSQL.
- PostgreSql CkafkaParam Datahub Task Target Resource Postgre Sql Param 
- PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
- ScfParam CkafkaDatahub Task Target Resource Scf Param 
- Scf configuration, Required when Type is SCF.
- SqlServer CkafkaParam Datahub Task Target Resource Sql Server Param 
- SQLServer configuration, Required when Type is SQLSERVER.
- TdwParam CkafkaDatahub Task Target Resource Tdw Param 
- Tdw configuration, required when Type is TDW.
- TopicParam CkafkaDatahub Task Target Resource Topic Param 
- Topic configuration, Required when Type is Topic.
- type String
- Resource Type.
- clickHouse CkafkaParam Datahub Task Target Resource Click House Param 
- ClickHouse config, Type CLICKHOUSE requierd.
- clsParam CkafkaDatahub Task Target Resource Cls Param 
- Cls configuration, Required when Type is CLS.
- cosParam CkafkaDatahub Task Target Resource Cos Param 
- Cos configuration, required when Type is COS.
- ctsdbParam CkafkaDatahub Task Target Resource Ctsdb Param 
- Ctsdb configuration, Required when Type is CTSDB.
- dtsParam CkafkaDatahub Task Target Resource Dts Param 
- Dts configuration, required when Type is DTS.
- esParam CkafkaDatahub Task Target Resource Es Param 
- Es configuration, required when Type is ES.
- eventBus CkafkaParam Datahub Task Target Resource Event Bus Param 
- EB configuration, required when type is EB.
- kafkaParam CkafkaDatahub Task Target Resource Kafka Param 
- ckafka configuration, required when Type is KAFKA.
- mariaDb CkafkaParam Datahub Task Target Resource Maria Db Param 
- MariaDB configuration, Required when Type is MARIADB.
- mongoDb CkafkaParam Datahub Task Target Resource Mongo Db Param 
- MongoDB config, Required when Type is MONGODB.
- mySql CkafkaParam Datahub Task Target Resource My Sql Param 
- MySQL configuration, Required when Type is MYSQL.
- postgreSql CkafkaParam Datahub Task Target Resource Postgre Sql Param 
- PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
- scfParam CkafkaDatahub Task Target Resource Scf Param 
- Scf configuration, Required when Type is SCF.
- sqlServer CkafkaParam Datahub Task Target Resource Sql Server Param 
- SQLServer configuration, Required when Type is SQLSERVER.
- tdwParam CkafkaDatahub Task Target Resource Tdw Param 
- Tdw configuration, required when Type is TDW.
- topicParam CkafkaDatahub Task Target Resource Topic Param 
- Topic configuration, Required when Type is Topic.
- type string
- Resource Type.
- clickHouse CkafkaParam Datahub Task Target Resource Click House Param 
- ClickHouse config, Type CLICKHOUSE requierd.
- clsParam CkafkaDatahub Task Target Resource Cls Param 
- Cls configuration, Required when Type is CLS.
- cosParam CkafkaDatahub Task Target Resource Cos Param 
- Cos configuration, required when Type is COS.
- ctsdbParam CkafkaDatahub Task Target Resource Ctsdb Param 
- Ctsdb configuration, Required when Type is CTSDB.
- dtsParam CkafkaDatahub Task Target Resource Dts Param 
- Dts configuration, required when Type is DTS.
- esParam CkafkaDatahub Task Target Resource Es Param 
- Es configuration, required when Type is ES.
- eventBus CkafkaParam Datahub Task Target Resource Event Bus Param 
- EB configuration, required when type is EB.
- kafkaParam CkafkaDatahub Task Target Resource Kafka Param 
- ckafka configuration, required when Type is KAFKA.
- mariaDb CkafkaParam Datahub Task Target Resource Maria Db Param 
- MariaDB configuration, Required when Type is MARIADB.
- mongoDb CkafkaParam Datahub Task Target Resource Mongo Db Param 
- MongoDB config, Required when Type is MONGODB.
- mySql CkafkaParam Datahub Task Target Resource My Sql Param 
- MySQL configuration, Required when Type is MYSQL.
- postgreSql CkafkaParam Datahub Task Target Resource Postgre Sql Param 
- PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
- scfParam CkafkaDatahub Task Target Resource Scf Param 
- Scf configuration, Required when Type is SCF.
- sqlServer CkafkaParam Datahub Task Target Resource Sql Server Param 
- SQLServer configuration, Required when Type is SQLSERVER.
- tdwParam CkafkaDatahub Task Target Resource Tdw Param 
- Tdw configuration, required when Type is TDW.
- topicParam CkafkaDatahub Task Target Resource Topic Param 
- Topic configuration, Required when Type is Topic.
- type str
- Resource Type.
- click_house_ Ckafkaparam Datahub Task Target Resource Click House Param 
- ClickHouse config, Type CLICKHOUSE requierd.
- cls_param CkafkaDatahub Task Target Resource Cls Param 
- Cls configuration, Required when Type is CLS.
- cos_param CkafkaDatahub Task Target Resource Cos Param 
- Cos configuration, required when Type is COS.
- ctsdb_param CkafkaDatahub Task Target Resource Ctsdb Param 
- Ctsdb configuration, Required when Type is CTSDB.
- dts_param CkafkaDatahub Task Target Resource Dts Param 
- Dts configuration, required when Type is DTS.
- es_param CkafkaDatahub Task Target Resource Es Param 
- Es configuration, required when Type is ES.
- event_bus_ Ckafkaparam Datahub Task Target Resource Event Bus Param 
- EB configuration, required when type is EB.
- kafka_param CkafkaDatahub Task Target Resource Kafka Param 
- ckafka configuration, required when Type is KAFKA.
- maria_db_ Ckafkaparam Datahub Task Target Resource Maria Db Param 
- MariaDB configuration, Required when Type is MARIADB.
- mongo_db_ Ckafkaparam Datahub Task Target Resource Mongo Db Param 
- MongoDB config, Required when Type is MONGODB.
- my_sql_ Ckafkaparam Datahub Task Target Resource My Sql Param 
- MySQL configuration, Required when Type is MYSQL.
- postgre_sql_ Ckafkaparam Datahub Task Target Resource Postgre Sql Param 
- PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
- scf_param CkafkaDatahub Task Target Resource Scf Param 
- Scf configuration, Required when Type is SCF.
- sql_server_ Ckafkaparam Datahub Task Target Resource Sql Server Param 
- SQLServer configuration, Required when Type is SQLSERVER.
- tdw_param CkafkaDatahub Task Target Resource Tdw Param 
- Tdw configuration, required when Type is TDW.
- topic_param CkafkaDatahub Task Target Resource Topic Param 
- Topic configuration, Required when Type is Topic.
- type String
- Resource Type.
- clickHouse Property MapParam 
- ClickHouse config, Type CLICKHOUSE requierd.
- clsParam Property Map
- Cls configuration, Required when Type is CLS.
- cosParam Property Map
- Cos configuration, required when Type is COS.
- ctsdbParam Property Map
- Ctsdb configuration, Required when Type is CTSDB.
- dtsParam Property Map
- Dts configuration, required when Type is DTS.
- esParam Property Map
- Es configuration, required when Type is ES.
- eventBus Property MapParam 
- EB configuration, required when type is EB.
- kafkaParam Property Map
- ckafka configuration, required when Type is KAFKA.
- mariaDb Property MapParam 
- MariaDB configuration, Required when Type is MARIADB.
- mongoDb Property MapParam 
- MongoDB config, Required when Type is MONGODB.
- mySql Property MapParam 
- MySQL configuration, Required when Type is MYSQL.
- postgreSql Property MapParam 
- PostgreSQL configuration, Required when Type is POSTGRESQL or TDSQL C_POSTGRESQL.
- scfParam Property Map
- Scf configuration, Required when Type is SCF.
- sqlServer Property MapParam 
- SQLServer configuration, Required when Type is SQLSERVER.
- tdwParam Property Map
- Tdw configuration, required when Type is TDW.
- topicParam Property Map
- Topic configuration, Required when Type is Topic.
CkafkaDatahubTaskTargetResourceClickHouseParam, CkafkaDatahubTaskTargetResourceClickHouseParamArgs                
- Cluster string
- ClickHouse cluster.
- Database string
- ClickHouse database name.
- Resource string
- resource id.
- Schemas
List<CkafkaDatahub Task Target Resource Click House Param Schema> 
- ClickHouse schema.
- Table string
- ClickHouse table.
- DropCls CkafkaDatahub Task Target Resource Click House Param Drop Cls 
- When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- DropInvalid boolMessage 
- Whether ClickHouse discards the message that fails to parse, the default is true.
- Ip string
- ClickHouse ip.
- Password string
- ClickHouse passwd.
- Port double
- ClickHouse port.
- SelfBuilt bool
- Whether it is a self-built cluster.
- ServiceVip string
- instance vip.
- Type string
- type of table column.
- UniqVpc stringId 
- instance vpc id.
- UserName string
- ClickHouse user name.
- Cluster string
- ClickHouse cluster.
- Database string
- ClickHouse database name.
- Resource string
- resource id.
- Schemas
[]CkafkaDatahub Task Target Resource Click House Param Schema 
- ClickHouse schema.
- Table string
- ClickHouse table.
- DropCls CkafkaDatahub Task Target Resource Click House Param Drop Cls 
- When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- DropInvalid boolMessage 
- Whether ClickHouse discards the message that fails to parse, the default is true.
- Ip string
- ClickHouse ip.
- Password string
- ClickHouse passwd.
- Port float64
- ClickHouse port.
- SelfBuilt bool
- Whether it is a self-built cluster.
- ServiceVip string
- instance vip.
- Type string
- type of table column.
- UniqVpc stringId 
- instance vpc id.
- UserName string
- ClickHouse user name.
- cluster String
- ClickHouse cluster.
- database String
- ClickHouse database name.
- resource String
- resource id.
- schemas
List<CkafkaDatahub Task Target Resource Click House Param Schema> 
- ClickHouse schema.
- table String
- ClickHouse table.
- dropCls CkafkaDatahub Task Target Resource Click House Param Drop Cls 
- When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- dropInvalid BooleanMessage 
- Whether ClickHouse discards the message that fails to parse, the default is true.
- ip String
- ClickHouse ip.
- password String
- ClickHouse passwd.
- port Double
- ClickHouse port.
- selfBuilt Boolean
- Whether it is a self-built cluster.
- serviceVip String
- instance vip.
- type String
- type of table column.
- uniqVpc StringId 
- instance vpc id.
- userName String
- ClickHouse user name.
- cluster string
- ClickHouse cluster.
- database string
- ClickHouse database name.
- resource string
- resource id.
- schemas
CkafkaDatahub Task Target Resource Click House Param Schema[] 
- ClickHouse schema.
- table string
- ClickHouse table.
- dropCls CkafkaDatahub Task Target Resource Click House Param Drop Cls 
- When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- dropInvalid booleanMessage 
- Whether ClickHouse discards the message that fails to parse, the default is true.
- ip string
- ClickHouse ip.
- password string
- ClickHouse passwd.
- port number
- ClickHouse port.
- selfBuilt boolean
- Whether it is a self-built cluster.
- serviceVip string
- instance vip.
- type string
- type of table column.
- uniqVpc stringId 
- instance vpc id.
- userName string
- ClickHouse user name.
- cluster str
- ClickHouse cluster.
- database str
- ClickHouse database name.
- resource str
- resource id.
- schemas
Sequence[CkafkaDatahub Task Target Resource Click House Param Schema] 
- ClickHouse schema.
- table str
- ClickHouse table.
- drop_cls CkafkaDatahub Task Target Resource Click House Param Drop Cls 
- When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop_invalid_ boolmessage 
- Whether ClickHouse discards the message that fails to parse, the default is true.
- ip str
- ClickHouse ip.
- password str
- ClickHouse passwd.
- port float
- ClickHouse port.
- self_built bool
- Whether it is a self-built cluster.
- service_vip str
- instance vip.
- type str
- type of table column.
- uniq_vpc_ strid 
- instance vpc id.
- user_name str
- ClickHouse user name.
- cluster String
- ClickHouse cluster.
- database String
- ClickHouse database name.
- resource String
- resource id.
- schemas List<Property Map>
- ClickHouse schema.
- table String
- ClickHouse table.
- dropCls Property Map
- When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- dropInvalid BooleanMessage 
- Whether ClickHouse discards the message that fails to parse, the default is true.
- ip String
- ClickHouse ip.
- password String
- ClickHouse passwd.
- port Number
- ClickHouse port.
- selfBuilt Boolean
- Whether it is a self-built cluster.
- serviceVip String
- instance vip.
- type String
- type of table column.
- uniqVpc StringId 
- instance vpc id.
- userName String
- ClickHouse user name.
CkafkaDatahubTaskTargetResourceClickHouseParamDropCls, CkafkaDatahubTaskTargetResourceClickHouseParamDropClsArgs                    
- DropCls stringLog Set 
- cls LogSet id.
- DropCls stringOwneruin 
- account.
- DropCls stringRegion 
- The region where the cls is delivered.
- DropCls stringTopic Id 
- cls topic.
- DropInvalid boolMessage To Cls 
- Whether to deliver to cls.
- DropCls stringLog Set 
- cls LogSet id.
- DropCls stringOwneruin 
- account.
- DropCls stringRegion 
- The region where the cls is delivered.
- DropCls stringTopic Id 
- cls topic.
- DropInvalid boolMessage To Cls 
- Whether to deliver to cls.
- dropCls StringLog Set 
- cls LogSet id.
- dropCls StringOwneruin 
- account.
- dropCls StringRegion 
- The region where the cls is delivered.
- dropCls StringTopic Id 
- cls topic.
- dropInvalid BooleanMessage To Cls 
- Whether to deliver to cls.
- dropCls stringLog Set 
- cls LogSet id.
- dropCls stringOwneruin 
- account.
- dropCls stringRegion 
- The region where the cls is delivered.
- dropCls stringTopic Id 
- cls topic.
- dropInvalid booleanMessage To Cls 
- Whether to deliver to cls.
- drop_cls_ strlog_ set 
- cls LogSet id.
- drop_cls_ strowneruin 
- account.
- drop_cls_ strregion 
- The region where the cls is delivered.
- drop_cls_ strtopic_ id 
- cls topic.
- drop_invalid_ boolmessage_ to_ cls 
- Whether to deliver to cls.
- dropCls StringLog Set 
- cls LogSet id.
- dropCls StringOwneruin 
- account.
- dropCls StringRegion 
- The region where the cls is delivered.
- dropCls StringTopic Id 
- cls topic.
- dropInvalid BooleanMessage To Cls 
- Whether to deliver to cls.
CkafkaDatahubTaskTargetResourceClickHouseParamSchema, CkafkaDatahubTaskTargetResourceClickHouseParamSchemaArgs                  
- AllowNull bool
- Whether the column item is allowed to be empty.
- ColumnName string
- column name.
- JsonKey string
- The json Key name corresponding to this column.
- Type string
- type of table column.
- AllowNull bool
- Whether the column item is allowed to be empty.
- ColumnName string
- column name.
- JsonKey string
- The json Key name corresponding to this column.
- Type string
- type of table column.
- allowNull Boolean
- Whether the column item is allowed to be empty.
- columnName String
- column name.
- jsonKey String
- The json Key name corresponding to this column.
- type String
- type of table column.
- allowNull boolean
- Whether the column item is allowed to be empty.
- columnName string
- column name.
- jsonKey string
- The json Key name corresponding to this column.
- type string
- type of table column.
- allow_null bool
- Whether the column item is allowed to be empty.
- column_name str
- column name.
- json_key str
- The json Key name corresponding to this column.
- type str
- type of table column.
- allowNull Boolean
- Whether the column item is allowed to be empty.
- columnName String
- column name.
- jsonKey String
- The json Key name corresponding to this column.
- type String
- type of table column.
CkafkaDatahubTaskTargetResourceClsParam, CkafkaDatahubTaskTargetResourceClsParamArgs              
- DecodeJson bool
- Whether the produced information is in json format.
- Resource string
- cls id.
- ContentKey string
- Required when Decode Json is false.
- LogSet string
- LogSet id.
- TimeField string
- Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
- DecodeJson bool
- Whether the produced information is in json format.
- Resource string
- cls id.
- ContentKey string
- Required when Decode Json is false.
- LogSet string
- LogSet id.
- TimeField string
- Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
- decodeJson Boolean
- Whether the produced information is in json format.
- resource String
- cls id.
- contentKey String
- Required when Decode Json is false.
- logSet String
- LogSet id.
- timeField String
- Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
- decodeJson boolean
- Whether the produced information is in json format.
- resource string
- cls id.
- contentKey string
- Required when Decode Json is false.
- logSet string
- LogSet id.
- timeField string
- Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
- decode_json bool
- Whether the produced information is in json format.
- resource str
- cls id.
- content_key str
- Required when Decode Json is false.
- log_set str
- LogSet id.
- time_field str
- Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
- decodeJson Boolean
- Whether the produced information is in json format.
- resource String
- cls id.
- contentKey String
- Required when Decode Json is false.
- logSet String
- LogSet id.
- timeField String
- Specify the content of a field in the message as the time of the cls log. The format of the field content needs to be a second-level timestamp.
CkafkaDatahubTaskTargetResourceCosParam, CkafkaDatahubTaskTargetResourceCosParamArgs              
- BucketName string
- cos bucket name.
- Region string
- region code.
- AggregateBatch doubleSize 
- The size of aggregated messages MB.
- AggregateInterval double
- time interval.
- DirectoryTime stringFormat 
- Partition format formatted according to strptime time.
- FormatOutput stringType 
- The file format after message aggregation csv|json.
- ObjectKey string
- ObjectKey.
- ObjectKey stringPrefix 
- Dumped object directory prefix.
- BucketName string
- cos bucket name.
- Region string
- region code.
- AggregateBatch float64Size 
- The size of aggregated messages MB.
- AggregateInterval float64
- time interval.
- DirectoryTime stringFormat 
- Partition format formatted according to strptime time.
- FormatOutput stringType 
- The file format after message aggregation csv|json.
- ObjectKey string
- ObjectKey.
- ObjectKey stringPrefix 
- Dumped object directory prefix.
- bucketName String
- cos bucket name.
- region String
- region code.
- aggregateBatch DoubleSize 
- The size of aggregated messages MB.
- aggregateInterval Double
- time interval.
- directoryTime StringFormat 
- Partition format formatted according to strptime time.
- formatOutput StringType 
- The file format after message aggregation csv|json.
- objectKey String
- ObjectKey.
- objectKey StringPrefix 
- Dumped object directory prefix.
- bucketName string
- cos bucket name.
- region string
- region code.
- aggregateBatch numberSize 
- The size of aggregated messages MB.
- aggregateInterval number
- time interval.
- directoryTime stringFormat 
- Partition format formatted according to strptime time.
- formatOutput stringType 
- The file format after message aggregation csv|json.
- objectKey string
- ObjectKey.
- objectKey stringPrefix 
- Dumped object directory prefix.
- bucket_name str
- cos bucket name.
- region str
- region code.
- aggregate_batch_ floatsize 
- The size of aggregated messages MB.
- aggregate_interval float
- time interval.
- directory_time_ strformat 
- Partition format formatted according to strptime time.
- format_output_ strtype 
- The file format after message aggregation csv|json.
- object_key str
- ObjectKey.
- object_key_ strprefix 
- Dumped object directory prefix.
- bucketName String
- cos bucket name.
- region String
- region code.
- aggregateBatch NumberSize 
- The size of aggregated messages MB.
- aggregateInterval Number
- time interval.
- directoryTime StringFormat 
- Partition format formatted according to strptime time.
- formatOutput StringType 
- The file format after message aggregation csv|json.
- objectKey String
- ObjectKey.
- objectKey StringPrefix 
- Dumped object directory prefix.
CkafkaDatahubTaskTargetResourceCtsdbParam, CkafkaDatahubTaskTargetResourceCtsdbParamArgs              
- CtsdbMetric string
- Ctsdb metric.
- Resource string
- resource id.
- CtsdbMetric string
- Ctsdb metric.
- Resource string
- resource id.
- ctsdbMetric String
- Ctsdb metric.
- resource String
- resource id.
- ctsdbMetric string
- Ctsdb metric.
- resource string
- resource id.
- ctsdb_metric str
- Ctsdb metric.
- resource str
- resource id.
- ctsdbMetric String
- Ctsdb metric.
- resource String
- resource id.
CkafkaDatahubTaskTargetResourceDtsParam, CkafkaDatahubTaskTargetResourceDtsParamArgs              
- Resource string
- Dts instance Id.
- GroupId string
- Dts consumer group Id.
- GroupPassword string
- Dts consumer group passwd.
- GroupUser string
- Dts account.
- Ip string
- Dts connection ip.
- Port double
- Dts connection port.
- Topic string
- Dts topic.
- TranSql bool
- False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
- Resource string
- Dts instance Id.
- GroupId string
- Dts consumer group Id.
- GroupPassword string
- Dts consumer group passwd.
- GroupUser string
- Dts account.
- Ip string
- Dts connection ip.
- Port float64
- Dts connection port.
- Topic string
- Dts topic.
- TranSql bool
- False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
- resource String
- Dts instance Id.
- groupId String
- Dts consumer group Id.
- groupPassword String
- Dts consumer group passwd.
- groupUser String
- Dts account.
- ip String
- Dts connection ip.
- port Double
- Dts connection port.
- topic String
- Dts topic.
- tranSql Boolean
- False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
- resource string
- Dts instance Id.
- groupId string
- Dts consumer group Id.
- groupPassword string
- Dts consumer group passwd.
- groupUser string
- Dts account.
- ip string
- Dts connection ip.
- port number
- Dts connection port.
- topic string
- Dts topic.
- tranSql boolean
- False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
- resource str
- Dts instance Id.
- group_id str
- Dts consumer group Id.
- group_password str
- Dts consumer group passwd.
- group_user str
- Dts account.
- ip str
- Dts connection ip.
- port float
- Dts connection port.
- topic str
- Dts topic.
- tran_sql bool
- False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
- resource String
- Dts instance Id.
- groupId String
- Dts consumer group Id.
- groupPassword String
- Dts consumer group passwd.
- groupUser String
- Dts account.
- ip String
- Dts connection ip.
- port Number
- Dts connection port.
- topic String
- Dts topic.
- tranSql Boolean
- False to synchronize the original data, true to synchronize the parsed json format data, the default is true.
CkafkaDatahubTaskTargetResourceEsParam, CkafkaDatahubTaskTargetResourceEsParamArgs              
- Resource string
- Resource.
- ContentKey string
- key for data in non-json format.
- DatabasePrimary stringKey 
- When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
- DateFormat string
- Es date suffix.
- DocumentId stringField 
- The field name of the document ID value dumped into Es.
- DropCls CkafkaDatahub Task Target Resource Es Param Drop Cls 
- When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- DropDlq CkafkaDatahub Task Target Resource Es Param Drop Dlq 
- dead letter queue.
- DropInvalid boolJson Message 
- Whether Es discards messages in non-json format.
- DropInvalid boolMessage 
- Whether Es discards the message of parsing failure.
- Index string
- Es index name.
- IndexType string
- Es custom index name type, STRING, JSONPATH, the default is STRING.
- Password string
- Es Password.
- Port double
- Es connection port.
- SelfBuilt bool
- Whether it is a self-built cluster.
- ServiceVip string
- instance vip.
- UniqVpc stringId 
- instance vpc id.
- UserName string
- Es UserName.
- Resource string
- Resource.
- ContentKey string
- key for data in non-json format.
- DatabasePrimary stringKey 
- When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
- DateFormat string
- Es date suffix.
- DocumentId stringField 
- The field name of the document ID value dumped into Es.
- DropCls CkafkaDatahub Task Target Resource Es Param Drop Cls 
- When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- DropDlq CkafkaDatahub Task Target Resource Es Param Drop Dlq 
- dead letter queue.
- DropInvalid boolJson Message 
- Whether Es discards messages in non-json format.
- DropInvalid boolMessage 
- Whether Es discards the message of parsing failure.
- Index string
- Es index name.
- IndexType string
- Es custom index name type, STRING, JSONPATH, the default is STRING.
- Password string
- Es Password.
- Port float64
- Es connection port.
- SelfBuilt bool
- Whether it is a self-built cluster.
- ServiceVip string
- instance vip.
- UniqVpc stringId 
- instance vpc id.
- UserName string
- Es UserName.
- resource String
- Resource.
- contentKey String
- key for data in non-json format.
- databasePrimary StringKey 
- When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
- dateFormat String
- Es date suffix.
- documentId StringField 
- The field name of the document ID value dumped into Es.
- dropCls CkafkaDatahub Task Target Resource Es Param Drop Cls 
- When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- dropDlq CkafkaDatahub Task Target Resource Es Param Drop Dlq 
- dead letter queue.
- dropInvalid BooleanJson Message 
- Whether Es discards messages in non-json format.
- dropInvalid BooleanMessage 
- Whether Es discards the message of parsing failure.
- index String
- Es index name.
- indexType String
- Es custom index name type, STRING, JSONPATH, the default is STRING.
- password String
- Es Password.
- port Double
- Es connection port.
- selfBuilt Boolean
- Whether it is a self-built cluster.
- serviceVip String
- instance vip.
- uniqVpc StringId 
- instance vpc id.
- userName String
- Es UserName.
- resource string
- Resource.
- contentKey string
- key for data in non-json format.
- databasePrimary stringKey 
- When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
- dateFormat string
- Es date suffix.
- documentId stringField 
- The field name of the document ID value dumped into Es.
- dropCls CkafkaDatahub Task Target Resource Es Param Drop Cls 
- When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- dropDlq CkafkaDatahub Task Target Resource Es Param Drop Dlq 
- dead letter queue.
- dropInvalid booleanJson Message 
- Whether Es discards messages in non-json format.
- dropInvalid booleanMessage 
- Whether Es discards the message of parsing failure.
- index string
- Es index name.
- indexType string
- Es custom index name type, STRING, JSONPATH, the default is STRING.
- password string
- Es Password.
- port number
- Es connection port.
- selfBuilt boolean
- Whether it is a self-built cluster.
- serviceVip string
- instance vip.
- uniqVpc stringId 
- instance vpc id.
- userName string
- Es UserName.
- resource str
- Resource.
- content_key str
- key for data in non-json format.
- database_primary_ strkey 
- When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
- date_format str
- Es date suffix.
- document_id_ strfield 
- The field name of the document ID value dumped into Es.
- drop_cls CkafkaDatahub Task Target Resource Es Param Drop Cls 
- When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop_dlq CkafkaDatahub Task Target Resource Es Param Drop Dlq 
- dead letter queue.
- drop_invalid_ booljson_ message 
- Whether Es discards messages in non-json format.
- drop_invalid_ boolmessage 
- Whether Es discards the message of parsing failure.
- index str
- Es index name.
- index_type str
- Es custom index name type, STRING, JSONPATH, the default is STRING.
- password str
- Es Password.
- port float
- Es connection port.
- self_built bool
- Whether it is a self-built cluster.
- service_vip str
- instance vip.
- uniq_vpc_ strid 
- instance vpc id.
- user_name str
- Es UserName.
- resource String
- Resource.
- contentKey String
- key for data in non-json format.
- databasePrimary StringKey 
- When the message dumped to ES is the binlog of Database, if you need to synchronize database operations, that is, fill in the primary key of the database table when adding, deleting, and modifying operations to ES.
- dateFormat String
- Es date suffix.
- documentId StringField 
- The field name of the document ID value dumped into Es.
- dropCls Property Map
- When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- dropDlq Property Map
- dead letter queue.
- dropInvalid BooleanJson Message 
- Whether Es discards messages in non-json format.
- dropInvalid BooleanMessage 
- Whether Es discards the message of parsing failure.
- index String
- Es index name.
- indexType String
- Es custom index name type, STRING, JSONPATH, the default is STRING.
- password String
- Es Password.
- port Number
- Es connection port.
- selfBuilt Boolean
- Whether it is a self-built cluster.
- serviceVip String
- instance vip.
- uniqVpc StringId 
- instance vpc id.
- userName String
- Es UserName.
CkafkaDatahubTaskTargetResourceEsParamDropCls, CkafkaDatahubTaskTargetResourceEsParamDropClsArgs                  
- DropCls stringLog Set 
- cls LogSet id.
- DropCls stringOwneruin 
- account.
- DropCls stringRegion 
- The region where the cls is delivered.
- DropCls stringTopic Id 
- cls topic.
- DropInvalid boolMessage To Cls 
- Whether to deliver to cls.
- DropCls stringLog Set 
- cls LogSet id.
- DropCls stringOwneruin 
- account.
- DropCls stringRegion 
- The region where the cls is delivered.
- DropCls stringTopic Id 
- cls topic.
- DropInvalid boolMessage To Cls 
- Whether to deliver to cls.
- dropCls StringLog Set 
- cls LogSet id.
- dropCls StringOwneruin 
- account.
- dropCls StringRegion 
- The region where the cls is delivered.
- dropCls StringTopic Id 
- cls topic.
- dropInvalid BooleanMessage To Cls 
- Whether to deliver to cls.
- dropCls stringLog Set 
- cls LogSet id.
- dropCls stringOwneruin 
- account.
- dropCls stringRegion 
- The region where the cls is delivered.
- dropCls stringTopic Id 
- cls topic.
- dropInvalid booleanMessage To Cls 
- Whether to deliver to cls.
- drop_cls_ strlog_ set 
- cls LogSet id.
- drop_cls_ strowneruin 
- account.
- drop_cls_ strregion 
- The region where the cls is delivered.
- drop_cls_ strtopic_ id 
- cls topic.
- drop_invalid_ boolmessage_ to_ cls 
- Whether to deliver to cls.
- dropCls StringLog Set 
- cls LogSet id.
- dropCls StringOwneruin 
- account.
- dropCls StringRegion 
- The region where the cls is delivered.
- dropCls StringTopic Id 
- cls topic.
- dropInvalid BooleanMessage To Cls 
- Whether to deliver to cls.
CkafkaDatahubTaskTargetResourceEsParamDropDlq, CkafkaDatahubTaskTargetResourceEsParamDropDlqArgs                  
- Type string
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- DlqType string
- dlq type, CKAFKA|TOPIC.
- KafkaParam CkafkaDatahub Task Target Resource Es Param Drop Dlq Kafka Param 
- Ckafka type dlq.
- MaxRetry doubleAttempts 
- retry times.
- RetryInterval double
- retry interval.
- TopicParam CkafkaDatahub Task Target Resource Es Param Drop Dlq Topic Param 
- DIP Topic type dead letter queue.
- Type string
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- DlqType string
- dlq type, CKAFKA|TOPIC.
- KafkaParam CkafkaDatahub Task Target Resource Es Param Drop Dlq Kafka Param 
- Ckafka type dlq.
- MaxRetry float64Attempts 
- retry times.
- RetryInterval float64
- retry interval.
- TopicParam CkafkaDatahub Task Target Resource Es Param Drop Dlq Topic Param 
- DIP Topic type dead letter queue.
- type String
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- dlqType String
- dlq type, CKAFKA|TOPIC.
- kafkaParam CkafkaDatahub Task Target Resource Es Param Drop Dlq Kafka Param 
- Ckafka type dlq.
- maxRetry DoubleAttempts 
- retry times.
- retryInterval Double
- retry interval.
- topicParam CkafkaDatahub Task Target Resource Es Param Drop Dlq Topic Param 
- DIP Topic type dead letter queue.
- type string
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- dlqType string
- dlq type, CKAFKA|TOPIC.
- kafkaParam CkafkaDatahub Task Target Resource Es Param Drop Dlq Kafka Param 
- Ckafka type dlq.
- maxRetry numberAttempts 
- retry times.
- retryInterval number
- retry interval.
- topicParam CkafkaDatahub Task Target Resource Es Param Drop Dlq Topic Param 
- DIP Topic type dead letter queue.
- type str
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- dlq_type str
- dlq type, CKAFKA|TOPIC.
- kafka_param CkafkaDatahub Task Target Resource Es Param Drop Dlq Kafka Param 
- Ckafka type dlq.
- max_retry_ floatattempts 
- retry times.
- retry_interval float
- retry interval.
- topic_param CkafkaDatahub Task Target Resource Es Param Drop Dlq Topic Param 
- DIP Topic type dead letter queue.
- type String
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- dlqType String
- dlq type, CKAFKA|TOPIC.
- kafkaParam Property Map
- Ckafka type dlq.
- maxRetry NumberAttempts 
- retry times.
- retryInterval Number
- retry interval.
- topicParam Property Map
- DIP Topic type dead letter queue.
CkafkaDatahubTaskTargetResourceEsParamDropDlqKafkaParam, CkafkaDatahubTaskTargetResourceEsParamDropDlqKafkaParamArgs                      
- Resource string
- instance resource.
- SelfBuilt bool
- whether the cluster is built by yourself instead of cloud product.
- CompressionType string
- Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- EnableToleration bool
- enable dead letter queue.
- MsgMultiple double
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- OffsetType string
- Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- PartitionNum double
- the partition num of the topic.
- QpsLimit double
- Qps(query per seconds) limit.
- ResourceName string
- instance name.
- StartTime double
- when Offset type timestamp is required.
- TableMappings List<CkafkaDatahub Task Target Resource Es Param Drop Dlq Kafka Param Table Mapping> 
- maps of table to topic, required when multi topic is selected.
- Topic string
- Topic name.
- TopicId string
- Topic ID.
- UseAuto boolCreate Topic 
- Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- UseTable boolMapping 
- whether to use multi table.
- ZoneId double
- Zone ID.
- Resource string
- instance resource.
- SelfBuilt bool
- whether the cluster is built by yourself instead of cloud product.
- CompressionType string
- Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- EnableToleration bool
- enable dead letter queue.
- MsgMultiple float64
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- OffsetType string
- Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- PartitionNum float64
- the partition num of the topic.
- QpsLimit float64
- Qps(query per seconds) limit.
- ResourceName string
- instance name.
- StartTime float64
- when Offset type timestamp is required.
- TableMappings []CkafkaDatahub Task Target Resource Es Param Drop Dlq Kafka Param Table Mapping 
- maps of table to topic, required when multi topic is selected.
- Topic string
- Topic name.
- TopicId string
- Topic ID.
- UseAuto boolCreate Topic 
- Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- UseTable boolMapping 
- whether to use multi table.
- ZoneId float64
- Zone ID.
- resource String
- instance resource.
- selfBuilt Boolean
- whether the cluster is built by yourself instead of cloud product.
- compressionType String
- Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enableToleration Boolean
- enable dead letter queue.
- msgMultiple Double
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offsetType String
- Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partitionNum Double
- the partition num of the topic.
- qpsLimit Double
- Qps(query per seconds) limit.
- resourceName String
- instance name.
- startTime Double
- when Offset type timestamp is required.
- tableMappings List<CkafkaDatahub Task Target Resource Es Param Drop Dlq Kafka Param Table Mapping> 
- maps of table to topic, required when multi topic is selected.
- topic String
- Topic name.
- topicId String
- Topic ID.
- useAuto BooleanCreate Topic 
- Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- useTable BooleanMapping 
- whether to use multi table.
- zoneId Double
- Zone ID.
- resource string
- instance resource.
- selfBuilt boolean
- whether the cluster is built by yourself instead of cloud product.
- compressionType string
- Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enableToleration boolean
- enable dead letter queue.
- msgMultiple number
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offsetType string
- Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partitionNum number
- the partition num of the topic.
- qpsLimit number
- Qps(query per seconds) limit.
- resourceName string
- instance name.
- startTime number
- when Offset type timestamp is required.
- tableMappings CkafkaDatahub Task Target Resource Es Param Drop Dlq Kafka Param Table Mapping[] 
- maps of table to topic, required when multi topic is selected.
- topic string
- Topic name.
- topicId string
- Topic ID.
- useAuto booleanCreate Topic 
- Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- useTable booleanMapping 
- whether to use multi table.
- zoneId number
- Zone ID.
- resource str
- instance resource.
- self_built bool
- whether the cluster is built by yourself instead of cloud product.
- compression_type str
- Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enable_toleration bool
- enable dead letter queue.
- msg_multiple float
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset_type str
- Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partition_num float
- the partition num of the topic.
- qps_limit float
- Qps(query per seconds) limit.
- resource_name str
- instance name.
- start_time float
- when Offset type timestamp is required.
- table_mappings Sequence[CkafkaDatahub Task Target Resource Es Param Drop Dlq Kafka Param Table Mapping] 
- maps of table to topic, required when multi topic is selected.
- topic str
- Topic name.
- topic_id str
- Topic ID.
- use_auto_ boolcreate_ topic 
- Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- use_table_ boolmapping 
- whether to use multi table.
- zone_id float
- Zone ID.
- resource String
- instance resource.
- selfBuilt Boolean
- whether the cluster is built by yourself instead of cloud product.
- compressionType String
- Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enableToleration Boolean
- enable dead letter queue.
- msgMultiple Number
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offsetType String
- Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partitionNum Number
- the partition num of the topic.
- qpsLimit Number
- Qps(query per seconds) limit.
- resourceName String
- instance name.
- startTime Number
- when Offset type timestamp is required.
- tableMappings List<Property Map>
- maps of table to topic, required when multi topic is selected.
- topic String
- Topic name.
- topicId String
- Topic ID.
- useAuto BooleanCreate Topic 
- Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- useTable BooleanMapping 
- whether to use multi table.
- zoneId Number
- Zone ID.
CkafkaDatahubTaskTargetResourceEsParamDropDlqKafkaParamTableMapping, CkafkaDatahubTaskTargetResourceEsParamDropDlqKafkaParamTableMappingArgs                          
CkafkaDatahubTaskTargetResourceEsParamDropDlqTopicParam, CkafkaDatahubTaskTargetResourceEsParamDropDlqTopicParamArgs                      
- Resource string
- The topic name of the topic sold separately.
- CompressionType string
- Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- MsgMultiple double
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- OffsetType string
- Offset type, initial position earliest, latest position latest, time point position timestamp.
- StartTime double
- It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- TopicId string
- Topic TopicId.
- UseAuto boolCreate Topic 
- whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- Resource string
- The topic name of the topic sold separately.
- CompressionType string
- Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- MsgMultiple float64
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- OffsetType string
- Offset type, initial position earliest, latest position latest, time point position timestamp.
- StartTime float64
- It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- TopicId string
- Topic TopicId.
- UseAuto boolCreate Topic 
- whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource String
- The topic name of the topic sold separately.
- compressionType String
- Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msgMultiple Double
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offsetType String
- Offset type, initial position earliest, latest position latest, time point position timestamp.
- startTime Double
- It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topicId String
- Topic TopicId.
- useAuto BooleanCreate Topic 
- whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource string
- The topic name of the topic sold separately.
- compressionType string
- Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msgMultiple number
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offsetType string
- Offset type, initial position earliest, latest position latest, time point position timestamp.
- startTime number
- It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topicId string
- Topic TopicId.
- useAuto booleanCreate Topic 
- whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource str
- The topic name of the topic sold separately.
- compression_type str
- Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msg_multiple float
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset_type str
- Offset type, initial position earliest, latest position latest, time point position timestamp.
- start_time float
- It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topic_id str
- Topic TopicId.
- use_auto_ boolcreate_ topic 
- whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource String
- The topic name of the topic sold separately.
- compressionType String
- Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msgMultiple Number
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offsetType String
- Offset type, initial position earliest, latest position latest, time point position timestamp.
- startTime Number
- It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topicId String
- Topic TopicId.
- useAuto BooleanCreate Topic 
- whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
CkafkaDatahubTaskTargetResourceEventBusParam, CkafkaDatahubTaskTargetResourceEventBusParamArgs                
- resource str
- instance id.
- self_built bool
- Whether it is a self-built cluster.
- type str
- resource type. EB_COS/EB_ES/EB_CLS.
- function_name str
- SCF function name.
- namespace str
- SCF namespace.
- qualifier str
- SCF version and alias.
CkafkaDatahubTaskTargetResourceKafkaParam, CkafkaDatahubTaskTargetResourceKafkaParamArgs              
- Resource string
- instance resource.
- SelfBuilt bool
- whether the cluster is built by yourself instead of cloud product.
- CompressionType string
- Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- EnableToleration bool
- enable dead letter queue.
- MsgMultiple double
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- OffsetType string
- Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- PartitionNum double
- the partition num of the topic.
- QpsLimit double
- Qps(query per seconds) limit.
- ResourceName string
- instance name.
- StartTime double
- when Offset type timestamp is required.
- TableMappings List<CkafkaDatahub Task Target Resource Kafka Param Table Mapping> 
- maps of table to topic, required when multi topic is selected.
- Topic string
- Topic name.
- TopicId string
- Topic ID.
- UseAuto boolCreate Topic 
- Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- UseTable boolMapping 
- whether to use multi table.
- ZoneId double
- Zone ID.
- Resource string
- instance resource.
- SelfBuilt bool
- whether the cluster is built by yourself instead of cloud product.
- CompressionType string
- Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- EnableToleration bool
- enable dead letter queue.
- MsgMultiple float64
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- OffsetType string
- Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- PartitionNum float64
- the partition num of the topic.
- QpsLimit float64
- Qps(query per seconds) limit.
- ResourceName string
- instance name.
- StartTime float64
- when Offset type timestamp is required.
- TableMappings []CkafkaDatahub Task Target Resource Kafka Param Table Mapping 
- maps of table to topic, required when multi topic is selected.
- Topic string
- Topic name.
- TopicId string
- Topic ID.
- UseAuto boolCreate Topic 
- Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- UseTable boolMapping 
- whether to use multi table.
- ZoneId float64
- Zone ID.
- resource String
- instance resource.
- selfBuilt Boolean
- whether the cluster is built by yourself instead of cloud product.
- compressionType String
- Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enableToleration Boolean
- enable dead letter queue.
- msgMultiple Double
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offsetType String
- Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partitionNum Double
- the partition num of the topic.
- qpsLimit Double
- Qps(query per seconds) limit.
- resourceName String
- instance name.
- startTime Double
- when Offset type timestamp is required.
- tableMappings List<CkafkaDatahub Task Target Resource Kafka Param Table Mapping> 
- maps of table to topic, required when multi topic is selected.
- topic String
- Topic name.
- topicId String
- Topic ID.
- useAuto BooleanCreate Topic 
- Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- useTable BooleanMapping 
- whether to use multi table.
- zoneId Double
- Zone ID.
- resource string
- instance resource.
- selfBuilt boolean
- whether the cluster is built by yourself instead of cloud product.
- compressionType string
- Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enableToleration boolean
- enable dead letter queue.
- msgMultiple number
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offsetType string
- Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partitionNum number
- the partition num of the topic.
- qpsLimit number
- Qps(query per seconds) limit.
- resourceName string
- instance name.
- startTime number
- when Offset type timestamp is required.
- tableMappings CkafkaDatahub Task Target Resource Kafka Param Table Mapping[] 
- maps of table to topic, required when multi topic is selected.
- topic string
- Topic name.
- topicId string
- Topic ID.
- useAuto booleanCreate Topic 
- Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- useTable booleanMapping 
- whether to use multi table.
- zoneId number
- Zone ID.
- resource str
- instance resource.
- self_built bool
- whether the cluster is built by yourself instead of cloud product.
- compression_type str
- Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enable_toleration bool
- enable dead letter queue.
- msg_multiple float
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset_type str
- Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partition_num float
- the partition num of the topic.
- qps_limit float
- Qps(query per seconds) limit.
- resource_name str
- instance name.
- start_time float
- when Offset type timestamp is required.
- table_mappings Sequence[CkafkaDatahub Task Target Resource Kafka Param Table Mapping] 
- maps of table to topic, required when multi topic is selected.
- topic str
- Topic name.
- topic_id str
- Topic ID.
- use_auto_ boolcreate_ topic 
- Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- use_table_ boolmapping 
- whether to use multi table.
- zone_id float
- Zone ID.
- resource String
- instance resource.
- selfBuilt Boolean
- whether the cluster is built by yourself instead of cloud product.
- compressionType String
- Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enableToleration Boolean
- enable dead letter queue.
- msgMultiple Number
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offsetType String
- Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partitionNum Number
- the partition num of the topic.
- qpsLimit Number
- Qps(query per seconds) limit.
- resourceName String
- instance name.
- startTime Number
- when Offset type timestamp is required.
- tableMappings List<Property Map>
- maps of table to topic, required when multi topic is selected.
- topic String
- Topic name.
- topicId String
- Topic ID.
- useAuto BooleanCreate Topic 
- Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- useTable BooleanMapping 
- whether to use multi table.
- zoneId Number
- Zone ID.
CkafkaDatahubTaskTargetResourceKafkaParamTableMapping, CkafkaDatahubTaskTargetResourceKafkaParamTableMappingArgs                  
CkafkaDatahubTaskTargetResourceMariaDbParam, CkafkaDatahubTaskTargetResourceMariaDbParamArgs                
- Database string
- MariaDB database name, * for all database.
- Resource string
- MariaDB connection Id.
- Table string
- MariaDB db name, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- IncludeContent stringChanges 
- If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- IncludeQuery bool
- If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- IsTable boolPrefix 
- When the Table input is a prefix, the value of this item is true, otherwise it is false.
- KeyColumns string
- Format library 1. table 1: field 1, field 2; library 2. table 2: field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- OutputFormat string
- output format, DEFAULT, CANAL_1, CANAL_2.
- RecordWith boolSchema 
- If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- SnapshotMode string
- schema_only|initial, default initial.
- Database string
- MariaDB database name, * for all database.
- Resource string
- MariaDB connection Id.
- Table string
- MariaDB db name, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- IncludeContent stringChanges 
- If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- IncludeQuery bool
- If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- IsTable boolPrefix 
- When the Table input is a prefix, the value of this item is true, otherwise it is false.
- KeyColumns string
- Format library 1. table 1: field 1, field 2; library 2. table 2: field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- OutputFormat string
- output format, DEFAULT, CANAL_1, CANAL_2.
- RecordWith boolSchema 
- If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- SnapshotMode string
- schema_only|initial, default initial.
- database String
- MariaDB database name, * for all database.
- resource String
- MariaDB connection Id.
- table String
- MariaDB db name, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- includeContent StringChanges 
- If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- includeQuery Boolean
- If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- isTable BooleanPrefix 
- When the Table input is a prefix, the value of this item is true, otherwise it is false.
- keyColumns String
- Format library 1. table 1: field 1, field 2; library 2. table 2: field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- outputFormat String
- output format, DEFAULT, CANAL_1, CANAL_2.
- recordWith BooleanSchema 
- If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- snapshotMode String
- schema_only|initial, default initial.
- database string
- MariaDB database name, * for all database.
- resource string
- MariaDB connection Id.
- table string
- MariaDB db name, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- includeContent stringChanges 
- If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- includeQuery boolean
- If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- isTable booleanPrefix 
- When the Table input is a prefix, the value of this item is true, otherwise it is false.
- keyColumns string
- Format library 1. table 1: field 1, field 2; library 2. table 2: field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- outputFormat string
- output format, DEFAULT, CANAL_1, CANAL_2.
- recordWith booleanSchema 
- If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- snapshotMode string
- schema_only|initial, default initial.
- database str
- MariaDB database name, * for all database.
- resource str
- MariaDB connection Id.
- table str
- MariaDB db name, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- include_content_ strchanges 
- If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- include_query bool
- If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- is_table_ boolprefix 
- When the Table input is a prefix, the value of this item is true, otherwise it is false.
- key_columns str
- Format library 1. table 1: field 1, field 2; library 2. table 2: field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- output_format str
- output format, DEFAULT, CANAL_1, CANAL_2.
- record_with_ boolschema 
- If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- snapshot_mode str
- schema_only|initial, default initial.
- database String
- MariaDB database name, * for all database.
- resource String
- MariaDB connection Id.
- table String
- MariaDB db name, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- includeContent StringChanges 
- If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- includeQuery Boolean
- If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- isTable BooleanPrefix 
- When the Table input is a prefix, the value of this item is true, otherwise it is false.
- keyColumns String
- Format library 1. table 1: field 1, field 2; library 2. table 2: field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- outputFormat String
- output format, DEFAULT, CANAL_1, CANAL_2.
- recordWith BooleanSchema 
- If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- snapshotMode String
- schema_only|initial, default initial.
CkafkaDatahubTaskTargetResourceMongoDbParam, CkafkaDatahubTaskTargetResourceMongoDbParamArgs                
- Collection string
- MongoDB collection.
- CopyExisting bool
- Whether to copy the stock data, the default parameter is true.
- Database string
- MongoDB database name.
- Resource string
- resource id.
- Ip string
- Mongo DB connection ip.
- ListeningEvent string
- Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
- Password string
- MongoDB database password.
- Pipeline string
- aggregation pipeline.
- Port double
- MongoDB connection port.
- ReadPreference string
- Master-slave priority, default master node.
- SelfBuilt bool
- Whether it is a self-built cluster.
- UserName string
- MongoDB database user name.
- Collection string
- MongoDB collection.
- CopyExisting bool
- Whether to copy the stock data, the default parameter is true.
- Database string
- MongoDB database name.
- Resource string
- resource id.
- Ip string
- Mongo DB connection ip.
- ListeningEvent string
- Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
- Password string
- MongoDB database password.
- Pipeline string
- aggregation pipeline.
- Port float64
- MongoDB connection port.
- ReadPreference string
- Master-slave priority, default master node.
- SelfBuilt bool
- Whether it is a self-built cluster.
- UserName string
- MongoDB database user name.
- collection String
- MongoDB collection.
- copyExisting Boolean
- Whether to copy the stock data, the default parameter is true.
- database String
- MongoDB database name.
- resource String
- resource id.
- ip String
- Mongo DB connection ip.
- listeningEvent String
- Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
- password String
- MongoDB database password.
- pipeline String
- aggregation pipeline.
- port Double
- MongoDB connection port.
- readPreference String
- Master-slave priority, default master node.
- selfBuilt Boolean
- Whether it is a self-built cluster.
- userName String
- MongoDB database user name.
- collection string
- MongoDB collection.
- copyExisting boolean
- Whether to copy the stock data, the default parameter is true.
- database string
- MongoDB database name.
- resource string
- resource id.
- ip string
- Mongo DB connection ip.
- listeningEvent string
- Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
- password string
- MongoDB database password.
- pipeline string
- aggregation pipeline.
- port number
- MongoDB connection port.
- readPreference string
- Master-slave priority, default master node.
- selfBuilt boolean
- Whether it is a self-built cluster.
- userName string
- MongoDB database user name.
- collection str
- MongoDB collection.
- copy_existing bool
- Whether to copy the stock data, the default parameter is true.
- database str
- MongoDB database name.
- resource str
- resource id.
- ip str
- Mongo DB connection ip.
- listening_event str
- Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
- password str
- MongoDB database password.
- pipeline str
- aggregation pipeline.
- port float
- MongoDB connection port.
- read_preference str
- Master-slave priority, default master node.
- self_built bool
- Whether it is a self-built cluster.
- user_name str
- MongoDB database user name.
- collection String
- MongoDB collection.
- copyExisting Boolean
- Whether to copy the stock data, the default parameter is true.
- database String
- MongoDB database name.
- resource String
- resource id.
- ip String
- Mongo DB connection ip.
- listeningEvent String
- Listening event type, if it is empty, it means select all. Values include insert, update, replace, delete, invalidate, drop, dropdatabase, rename, used between multiple types, separated by commas.
- password String
- MongoDB database password.
- pipeline String
- aggregation pipeline.
- port Number
- MongoDB connection port.
- readPreference String
- Master-slave priority, default master node.
- selfBuilt Boolean
- Whether it is a self-built cluster.
- userName String
- MongoDB database user name.
CkafkaDatahubTaskTargetResourceMySqlParam, CkafkaDatahubTaskTargetResourceMySqlParamArgs                
- Database string
- MySQL database name, * is the whole database.
- Resource string
- MySQL connection Id.
- Table string
- The name of the MySQL data table, is the non-system table in all the monitored databases, which can be separated by, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name, when a regular expression needs to be filled in, the format is data database name.data table name.
- DataSource stringIncrement Column 
- the name of the column to be monitored.
- DataSource stringIncrement Mode 
- TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
- DataSource stringMonitor Mode 
- TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
- DataSource stringMonitor Resource 
- When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
- DataSource stringStart From 
- HEAD means copy stock + incremental data, TAIL means copy only incremental data.
- DataTarget stringInsert Mode 
- INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- DataTarget stringPrimary Key Field 
- When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- DataTarget List<CkafkaRecord Mappings Datahub Task Target Resource My Sql Param Data Target Record Mapping> 
- Mapping relationship between tables and messages.
- DdlTopic string
- The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
- DropCls CkafkaDatahub Task Target Resource My Sql Param Drop Cls 
- When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- DropInvalid boolMessage 
- Whether to discard messages that fail to parse, the default is true.
- IncludeContent stringChanges 
- If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- IncludeQuery bool
- If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- IsTable boolPrefix 
- When the Table input is a prefix, the value of this item is true, otherwise it is false.
- IsTable boolRegular 
- Whether the input table is a regular expression, if this option and Is Table Prefix are true at the same time, the judgment priority of this option is higher than Is Table Prefix.
- KeyColumns string
- Format library1.table1 field 1,field 2;library 2.table2 field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- OutputFormat string
- output format, DEFAULT, CANAL_1, CANAL_2.
- RecordWith boolSchema 
- If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- SignalDatabase string
- database name of signal table.
- SnapshotMode string
- whether to Copy inventory information (schema_only does not copy, initial full amount), the default is initial.
- TopicRegex string
- Regular expression for routing events to specific topics, defaults to (.*).
- TopicReplacement string
- TopicRegex, $1, $2.
- Database string
- MySQL database name, * is the whole database.
- Resource string
- MySQL connection Id.
- Table string
- The name of the MySQL data table, is the non-system table in all the monitored databases, which can be separated by, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name, when a regular expression needs to be filled in, the format is data database name.data table name.
- DataSource stringIncrement Column 
- the name of the column to be monitored.
- DataSource stringIncrement Mode 
- TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
- DataSource stringMonitor Mode 
- TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
- DataSource stringMonitor Resource 
- When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
- DataSource stringStart From 
- HEAD means copy stock + incremental data, TAIL means copy only incremental data.
- DataTarget stringInsert Mode 
- INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- DataTarget stringPrimary Key Field 
- When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- DataTarget []CkafkaRecord Mappings Datahub Task Target Resource My Sql Param Data Target Record Mapping 
- Mapping relationship between tables and messages.
- DdlTopic string
- The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
- DropCls CkafkaDatahub Task Target Resource My Sql Param Drop Cls 
- When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- DropInvalid boolMessage 
- Whether to discard messages that fail to parse, the default is true.
- IncludeContent stringChanges 
- If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- IncludeQuery bool
- If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- IsTable boolPrefix 
- When the Table input is a prefix, the value of this item is true, otherwise it is false.
- IsTable boolRegular 
- Whether the input table is a regular expression, if this option and Is Table Prefix are true at the same time, the judgment priority of this option is higher than Is Table Prefix.
- KeyColumns string
- Format library1.table1 field 1,field 2;library 2.table2 field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- OutputFormat string
- output format, DEFAULT, CANAL_1, CANAL_2.
- RecordWith boolSchema 
- If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- SignalDatabase string
- database name of signal table.
- SnapshotMode string
- whether to Copy inventory information (schema_only does not copy, initial full amount), the default is initial.
- TopicRegex string
- Regular expression for routing events to specific topics, defaults to (.*).
- TopicReplacement string
- TopicRegex, $1, $2.
- database String
- MySQL database name, * is the whole database.
- resource String
- MySQL connection Id.
- table String
- The name of the MySQL data table, is the non-system table in all the monitored databases, which can be separated by, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name, when a regular expression needs to be filled in, the format is data database name.data table name.
- dataSource StringIncrement Column 
- the name of the column to be monitored.
- dataSource StringIncrement Mode 
- TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
- dataSource StringMonitor Mode 
- TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
- dataSource StringMonitor Resource 
- When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
- dataSource StringStart From 
- HEAD means copy stock + incremental data, TAIL means copy only incremental data.
- dataTarget StringInsert Mode 
- INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- dataTarget StringPrimary Key Field 
- When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- dataTarget List<CkafkaRecord Mappings Datahub Task Target Resource My Sql Param Data Target Record Mapping> 
- Mapping relationship between tables and messages.
- ddlTopic String
- The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
- dropCls CkafkaDatahub Task Target Resource My Sql Param Drop Cls 
- When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- dropInvalid BooleanMessage 
- Whether to discard messages that fail to parse, the default is true.
- includeContent StringChanges 
- If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- includeQuery Boolean
- If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- isTable BooleanPrefix 
- When the Table input is a prefix, the value of this item is true, otherwise it is false.
- isTable BooleanRegular 
- Whether the input table is a regular expression, if this option and Is Table Prefix are true at the same time, the judgment priority of this option is higher than Is Table Prefix.
- keyColumns String
- Format library1.table1 field 1,field 2;library 2.table2 field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- outputFormat String
- output format, DEFAULT, CANAL_1, CANAL_2.
- recordWith BooleanSchema 
- If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- signalDatabase String
- database name of signal table.
- snapshotMode String
- whether to Copy inventory information (schema_only does not copy, initial full amount), the default is initial.
- topicRegex String
- Regular expression for routing events to specific topics, defaults to (.*).
- topicReplacement String
- TopicRegex, $1, $2.
- database string
- MySQL database name, * is the whole database.
- resource string
- MySQL connection Id.
- table string
- The name of the MySQL data table, is the non-system table in all the monitored databases, which can be separated by, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name, when a regular expression needs to be filled in, the format is data database name.data table name.
- dataSource stringIncrement Column 
- the name of the column to be monitored.
- dataSource stringIncrement Mode 
- TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
- dataSource stringMonitor Mode 
- TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
- dataSource stringMonitor Resource 
- When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
- dataSource stringStart From 
- HEAD means copy stock + incremental data, TAIL means copy only incremental data.
- dataTarget stringInsert Mode 
- INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- dataTarget stringPrimary Key Field 
- When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- dataTarget CkafkaRecord Mappings Datahub Task Target Resource My Sql Param Data Target Record Mapping[] 
- Mapping relationship between tables and messages.
- ddlTopic string
- The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
- dropCls CkafkaDatahub Task Target Resource My Sql Param Drop Cls 
- When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- dropInvalid booleanMessage 
- Whether to discard messages that fail to parse, the default is true.
- includeContent stringChanges 
- If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- includeQuery boolean
- If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- isTable booleanPrefix 
- When the Table input is a prefix, the value of this item is true, otherwise it is false.
- isTable booleanRegular 
- Whether the input table is a regular expression, if this option and Is Table Prefix are true at the same time, the judgment priority of this option is higher than Is Table Prefix.
- keyColumns string
- Format library1.table1 field 1,field 2;library 2.table2 field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- outputFormat string
- output format, DEFAULT, CANAL_1, CANAL_2.
- recordWith booleanSchema 
- If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- signalDatabase string
- database name of signal table.
- snapshotMode string
- whether to Copy inventory information (schema_only does not copy, initial full amount), the default is initial.
- topicRegex string
- Regular expression for routing events to specific topics, defaults to (.*).
- topicReplacement string
- TopicRegex, $1, $2.
- database str
- MySQL database name, * is the whole database.
- resource str
- MySQL connection Id.
- table str
- The name of the MySQL data table, is the non-system table in all the monitored databases, which can be separated by, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name, when a regular expression needs to be filled in, the format is data database name.data table name.
- data_source_ strincrement_ column 
- the name of the column to be monitored.
- data_source_ strincrement_ mode 
- TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
- data_source_ strmonitor_ mode 
- TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
- data_source_ strmonitor_ resource 
- When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
- data_source_ strstart_ from 
- HEAD means copy stock + incremental data, TAIL means copy only incremental data.
- data_target_ strinsert_ mode 
- INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- data_target_ strprimary_ key_ field 
- When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- data_target_ Sequence[Ckafkarecord_ mappings Datahub Task Target Resource My Sql Param Data Target Record Mapping] 
- Mapping relationship between tables and messages.
- ddl_topic str
- The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
- drop_cls CkafkaDatahub Task Target Resource My Sql Param Drop Cls 
- When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- drop_invalid_ boolmessage 
- Whether to discard messages that fail to parse, the default is true.
- include_content_ strchanges 
- If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- include_query bool
- If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- is_table_ boolprefix 
- When the Table input is a prefix, the value of this item is true, otherwise it is false.
- is_table_ boolregular 
- Whether the input table is a regular expression, if this option and Is Table Prefix are true at the same time, the judgment priority of this option is higher than Is Table Prefix.
- key_columns str
- Format library1.table1 field 1,field 2;library 2.table2 field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- output_format str
- output format, DEFAULT, CANAL_1, CANAL_2.
- record_with_ boolschema 
- If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- signal_database str
- database name of signal table.
- snapshot_mode str
- whether to Copy inventory information (schema_only does not copy, initial full amount), the default is initial.
- topic_regex str
- Regular expression for routing events to specific topics, defaults to (.*).
- topic_replacement str
- TopicRegex, $1, $2.
- database String
- MySQL database name, * is the whole database.
- resource String
- MySQL connection Id.
- table String
- The name of the MySQL data table, is the non-system table in all the monitored databases, which can be separated by, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name, when a regular expression needs to be filled in, the format is data database name.data table name.
- dataSource StringIncrement Column 
- the name of the column to be monitored.
- dataSource StringIncrement Mode 
- TIMESTAMP indicates that the incremental column is of timestamp type, INCREMENT indicates that the incremental column is of self-incrementing id type.
- dataSource StringMonitor Mode 
- TABLE indicates that the read item is a table, QUERY indicates that the read item is a query.
- dataSource StringMonitor Resource 
- When DataMonitorMode=TABLE, pass in the Table that needs to be read; when DataMonitorMode=QUERY, pass in the query sql statement that needs to be read.
- dataSource StringStart From 
- HEAD means copy stock + incremental data, TAIL means copy only incremental data.
- dataTarget StringInsert Mode 
- INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- dataTarget StringPrimary Key Field 
- When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- dataTarget List<Property Map>Record Mappings 
- Mapping relationship between tables and messages.
- ddlTopic String
- The Topic that stores the Ddl information of My SQL, if it is empty, it will not be stored by default.
- dropCls Property Map
- When the member parameter Drop Invalid Message To Cls is set to true, the Drop Invalid Message parameter is invalid.
- dropInvalid BooleanMessage 
- Whether to discard messages that fail to parse, the default is true.
- includeContent StringChanges 
- If the value is all, DDL data and DML data will also be written to the selected topic; if the value is dml, only DML data will be written to the selected topic.
- includeQuery Boolean
- If the value is true, and the value of the binlog rows query log events configuration item in My SQL is ON, the data flowing into the topic contains the original SQL statement; if the value is false, the data flowing into the topic does not contain Original SQL statement.
- isTable BooleanPrefix 
- When the Table input is a prefix, the value of this item is true, otherwise it is false.
- isTable BooleanRegular 
- Whether the input table is a regular expression, if this option and Is Table Prefix are true at the same time, the judgment priority of this option is higher than Is Table Prefix.
- keyColumns String
- Format library1.table1 field 1,field 2;library 2.table2 field 2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- outputFormat String
- output format, DEFAULT, CANAL_1, CANAL_2.
- recordWith BooleanSchema 
- If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- signalDatabase String
- database name of signal table.
- snapshotMode String
- whether to Copy inventory information (schema_only does not copy, initial full amount), the default is initial.
- topicRegex String
- Regular expression for routing events to specific topics, defaults to (.*).
- topicReplacement String
- TopicRegex, $1, $2.
CkafkaDatahubTaskTargetResourceMySqlParamDataTargetRecordMapping, CkafkaDatahubTaskTargetResourceMySqlParamDataTargetRecordMappingArgs                        
- AllowNull bool
- Whether the message is allowed to be empty.
- AutoIncrement bool
- Whether it is an auto-increment column.
- ColumnName string
- Column Name.
- ColumnSize string
- current ColumnSize.
- DecimalDigits string
- current Column DecimalDigits.
- DefaultValue string
- Database table default parameters.
- ExtraInfo string
- Database table extra fields.
- JsonKey string
- The key name of the message.
- Type string
- message type.
- AllowNull bool
- Whether the message is allowed to be empty.
- AutoIncrement bool
- Whether it is an auto-increment column.
- ColumnName string
- Column Name.
- ColumnSize string
- current ColumnSize.
- DecimalDigits string
- current Column DecimalDigits.
- DefaultValue string
- Database table default parameters.
- ExtraInfo string
- Database table extra fields.
- JsonKey string
- The key name of the message.
- Type string
- message type.
- allowNull Boolean
- Whether the message is allowed to be empty.
- autoIncrement Boolean
- Whether it is an auto-increment column.
- columnName String
- Column Name.
- columnSize String
- current ColumnSize.
- decimalDigits String
- current Column DecimalDigits.
- defaultValue String
- Database table default parameters.
- extraInfo String
- Database table extra fields.
- jsonKey String
- The key name of the message.
- type String
- message type.
- allowNull boolean
- Whether the message is allowed to be empty.
- autoIncrement boolean
- Whether it is an auto-increment column.
- columnName string
- Column Name.
- columnSize string
- current ColumnSize.
- decimalDigits string
- current Column DecimalDigits.
- defaultValue string
- Database table default parameters.
- extraInfo string
- Database table extra fields.
- jsonKey string
- The key name of the message.
- type string
- message type.
- allow_null bool
- Whether the message is allowed to be empty.
- auto_increment bool
- Whether it is an auto-increment column.
- column_name str
- Column Name.
- column_size str
- current ColumnSize.
- decimal_digits str
- current Column DecimalDigits.
- default_value str
- Database table default parameters.
- extra_info str
- Database table extra fields.
- json_key str
- The key name of the message.
- type str
- message type.
- allowNull Boolean
- Whether the message is allowed to be empty.
- autoIncrement Boolean
- Whether it is an auto-increment column.
- columnName String
- Column Name.
- columnSize String
- current ColumnSize.
- decimalDigits String
- current Column DecimalDigits.
- defaultValue String
- Database table default parameters.
- extraInfo String
- Database table extra fields.
- jsonKey String
- The key name of the message.
- type String
- message type.
CkafkaDatahubTaskTargetResourceMySqlParamDropCls, CkafkaDatahubTaskTargetResourceMySqlParamDropClsArgs                    
- DropCls stringLog Set 
- cls LogSet id.
- DropCls stringOwneruin 
- account.
- DropCls stringRegion 
- The region where the cls is delivered.
- DropCls stringTopic Id 
- cls topic.
- DropInvalid boolMessage To Cls 
- Whether to deliver to cls.
- DropCls stringLog Set 
- cls LogSet id.
- DropCls stringOwneruin 
- account.
- DropCls stringRegion 
- The region where the cls is delivered.
- DropCls stringTopic Id 
- cls topic.
- DropInvalid boolMessage To Cls 
- Whether to deliver to cls.
- dropCls StringLog Set 
- cls LogSet id.
- dropCls StringOwneruin 
- account.
- dropCls StringRegion 
- The region where the cls is delivered.
- dropCls StringTopic Id 
- cls topic.
- dropInvalid BooleanMessage To Cls 
- Whether to deliver to cls.
- dropCls stringLog Set 
- cls LogSet id.
- dropCls stringOwneruin 
- account.
- dropCls stringRegion 
- The region where the cls is delivered.
- dropCls stringTopic Id 
- cls topic.
- dropInvalid booleanMessage To Cls 
- Whether to deliver to cls.
- drop_cls_ strlog_ set 
- cls LogSet id.
- drop_cls_ strowneruin 
- account.
- drop_cls_ strregion 
- The region where the cls is delivered.
- drop_cls_ strtopic_ id 
- cls topic.
- drop_invalid_ boolmessage_ to_ cls 
- Whether to deliver to cls.
- dropCls StringLog Set 
- cls LogSet id.
- dropCls StringOwneruin 
- account.
- dropCls StringRegion 
- The region where the cls is delivered.
- dropCls StringTopic Id 
- cls topic.
- dropInvalid BooleanMessage To Cls 
- Whether to deliver to cls.
CkafkaDatahubTaskTargetResourcePostgreSqlParam, CkafkaDatahubTaskTargetResourcePostgreSqlParamArgs                
- Database string
- PostgreSQL database name.
- PluginName string
- (decoderbufs/pgoutput), default decoderbufs.
- Resource string
- PostgreSQL connection Id.
- Table string
- PostgreSQL tableName, * is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of Schema name.Data table name, and you need to fill in a regular expression When, the format is Schema name.data table name.
- DataFormat string
- Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
- DataTarget stringInsert Mode 
- INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- DataTarget stringPrimary Key Field 
- When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- DataTarget List<CkafkaRecord Mappings Datahub Task Target Resource Postgre Sql Param Data Target Record Mapping> 
- Mapping relationship between tables and messages.
- DropInvalid boolMessage 
- Whether to discard messages that fail to parse, the default is true.
- IsTable boolRegular 
- Whether the input table is a regular expression.
- KeyColumns string
- Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- RecordWith boolSchema 
- If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- SnapshotMode string
- never|initial, default initial.
- Database string
- PostgreSQL database name.
- PluginName string
- (decoderbufs/pgoutput), default decoderbufs.
- Resource string
- PostgreSQL connection Id.
- Table string
- PostgreSQL tableName, * is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of Schema name.Data table name, and you need to fill in a regular expression When, the format is Schema name.data table name.
- DataFormat string
- Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
- DataTarget stringInsert Mode 
- INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- DataTarget stringPrimary Key Field 
- When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- DataTarget []CkafkaRecord Mappings Datahub Task Target Resource Postgre Sql Param Data Target Record Mapping 
- Mapping relationship between tables and messages.
- DropInvalid boolMessage 
- Whether to discard messages that fail to parse, the default is true.
- IsTable boolRegular 
- Whether the input table is a regular expression.
- KeyColumns string
- Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- RecordWith boolSchema 
- If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- SnapshotMode string
- never|initial, default initial.
- database String
- PostgreSQL database name.
- pluginName String
- (decoderbufs/pgoutput), default decoderbufs.
- resource String
- PostgreSQL connection Id.
- table String
- PostgreSQL tableName, * is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of Schema name.Data table name, and you need to fill in a regular expression When, the format is Schema name.data table name.
- dataFormat String
- Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
- dataTarget StringInsert Mode 
- INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- dataTarget StringPrimary Key Field 
- When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- dataTarget List<CkafkaRecord Mappings Datahub Task Target Resource Postgre Sql Param Data Target Record Mapping> 
- Mapping relationship between tables and messages.
- dropInvalid BooleanMessage 
- Whether to discard messages that fail to parse, the default is true.
- isTable BooleanRegular 
- Whether the input table is a regular expression.
- keyColumns String
- Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- recordWith BooleanSchema 
- If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- snapshotMode String
- never|initial, default initial.
- database string
- PostgreSQL database name.
- pluginName string
- (decoderbufs/pgoutput), default decoderbufs.
- resource string
- PostgreSQL connection Id.
- table string
- PostgreSQL tableName, * is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of Schema name.Data table name, and you need to fill in a regular expression When, the format is Schema name.data table name.
- dataFormat string
- Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
- dataTarget stringInsert Mode 
- INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- dataTarget stringPrimary Key Field 
- When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- dataTarget CkafkaRecord Mappings Datahub Task Target Resource Postgre Sql Param Data Target Record Mapping[] 
- Mapping relationship between tables and messages.
- dropInvalid booleanMessage 
- Whether to discard messages that fail to parse, the default is true.
- isTable booleanRegular 
- Whether the input table is a regular expression.
- keyColumns string
- Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- recordWith booleanSchema 
- If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- snapshotMode string
- never|initial, default initial.
- database str
- PostgreSQL database name.
- plugin_name str
- (decoderbufs/pgoutput), default decoderbufs.
- resource str
- PostgreSQL connection Id.
- table str
- PostgreSQL tableName, * is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of Schema name.Data table name, and you need to fill in a regular expression When, the format is Schema name.data table name.
- data_format str
- Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
- data_target_ strinsert_ mode 
- INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- data_target_ strprimary_ key_ field 
- When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- data_target_ Sequence[Ckafkarecord_ mappings Datahub Task Target Resource Postgre Sql Param Data Target Record Mapping] 
- Mapping relationship between tables and messages.
- drop_invalid_ boolmessage 
- Whether to discard messages that fail to parse, the default is true.
- is_table_ boolregular 
- Whether the input table is a regular expression.
- key_columns str
- Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- record_with_ boolschema 
- If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- snapshot_mode str
- never|initial, default initial.
- database String
- PostgreSQL database name.
- pluginName String
- (decoderbufs/pgoutput), default decoderbufs.
- resource String
- PostgreSQL connection Id.
- table String
- PostgreSQL tableName, * is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of Schema name.Data table name, and you need to fill in a regular expression When, the format is Schema name.data table name.
- dataFormat String
- Upstream data format (JSON|Debezium), required when the database synchronization mode matches the default field.
- dataTarget StringInsert Mode 
- INSERT means insert using Insert mode, UPSERT means insert using Upsert mode.
- dataTarget StringPrimary Key Field 
- When DataInsertMode=UPSERT, pass in the primary key that the current upsert depends on.
- dataTarget List<Property Map>Record Mappings 
- Mapping relationship between tables and messages.
- dropInvalid BooleanMessage 
- Whether to discard messages that fail to parse, the default is true.
- isTable BooleanRegular 
- Whether the input table is a regular expression.
- keyColumns String
- Format library1.table1:field 1,field2;library2.table2:field2, between tables; (semicolon) separated, between fields, (comma) separated. The table that is not specified defaults to the primary key of the table.
- recordWith BooleanSchema 
- If the value is true, the message will carry the schema corresponding to the message structure, if the value is false, it will not carry.
- snapshotMode String
- never|initial, default initial.
CkafkaDatahubTaskTargetResourcePostgreSqlParamDataTargetRecordMapping, CkafkaDatahubTaskTargetResourcePostgreSqlParamDataTargetRecordMappingArgs                        
- AllowNull bool
- Whether the message is allowed to be empty.
- AutoIncrement bool
- Whether it is an auto-increment column.
- ColumnName string
- Column Name.
- ColumnSize string
- current ColumnSize.
- DecimalDigits string
- current Column DecimalDigits.
- DefaultValue string
- Database table default parameters.
- ExtraInfo string
- Database table extra fields.
- JsonKey string
- The key name of the message.
- Type string
- message type.
- AllowNull bool
- Whether the message is allowed to be empty.
- AutoIncrement bool
- Whether it is an auto-increment column.
- ColumnName string
- Column Name.
- ColumnSize string
- current ColumnSize.
- DecimalDigits string
- current Column DecimalDigits.
- DefaultValue string
- Database table default parameters.
- ExtraInfo string
- Database table extra fields.
- JsonKey string
- The key name of the message.
- Type string
- message type.
- allowNull Boolean
- Whether the message is allowed to be empty.
- autoIncrement Boolean
- Whether it is an auto-increment column.
- columnName String
- Column Name.
- columnSize String
- current ColumnSize.
- decimalDigits String
- current Column DecimalDigits.
- defaultValue String
- Database table default parameters.
- extraInfo String
- Database table extra fields.
- jsonKey String
- The key name of the message.
- type String
- message type.
- allowNull boolean
- Whether the message is allowed to be empty.
- autoIncrement boolean
- Whether it is an auto-increment column.
- columnName string
- Column Name.
- columnSize string
- current ColumnSize.
- decimalDigits string
- current Column DecimalDigits.
- defaultValue string
- Database table default parameters.
- extraInfo string
- Database table extra fields.
- jsonKey string
- The key name of the message.
- type string
- message type.
- allow_null bool
- Whether the message is allowed to be empty.
- auto_increment bool
- Whether it is an auto-increment column.
- column_name str
- Column Name.
- column_size str
- current ColumnSize.
- decimal_digits str
- current Column DecimalDigits.
- default_value str
- Database table default parameters.
- extra_info str
- Database table extra fields.
- json_key str
- The key name of the message.
- type str
- message type.
- allowNull Boolean
- Whether the message is allowed to be empty.
- autoIncrement Boolean
- Whether it is an auto-increment column.
- columnName String
- Column Name.
- columnSize String
- current ColumnSize.
- decimalDigits String
- current Column DecimalDigits.
- defaultValue String
- Database table default parameters.
- extraInfo String
- Database table extra fields.
- jsonKey String
- The key name of the message.
- type String
- message type.
CkafkaDatahubTaskTargetResourceScfParam, CkafkaDatahubTaskTargetResourceScfParamArgs              
- FunctionName string
- SCF function name.
- BatchSize double
- The maximum number of messages sent in each batch, the default is 1000.
- MaxRetries double
- The number of retries after the SCF call fails, the default is 5.
- Namespace string
- SCF cloud function namespace, the default is default.
- Qualifier string
- SCF cloud function version and alias, the default is DEFAULT.
- FunctionName string
- SCF function name.
- BatchSize float64
- The maximum number of messages sent in each batch, the default is 1000.
- MaxRetries float64
- The number of retries after the SCF call fails, the default is 5.
- Namespace string
- SCF cloud function namespace, the default is default.
- Qualifier string
- SCF cloud function version and alias, the default is DEFAULT.
- functionName String
- SCF function name.
- batchSize Double
- The maximum number of messages sent in each batch, the default is 1000.
- maxRetries Double
- The number of retries after the SCF call fails, the default is 5.
- namespace String
- SCF cloud function namespace, the default is default.
- qualifier String
- SCF cloud function version and alias, the default is DEFAULT.
- functionName string
- SCF function name.
- batchSize number
- The maximum number of messages sent in each batch, the default is 1000.
- maxRetries number
- The number of retries after the SCF call fails, the default is 5.
- namespace string
- SCF cloud function namespace, the default is default.
- qualifier string
- SCF cloud function version and alias, the default is DEFAULT.
- function_name str
- SCF function name.
- batch_size float
- The maximum number of messages sent in each batch, the default is 1000.
- max_retries float
- The number of retries after the SCF call fails, the default is 5.
- namespace str
- SCF cloud function namespace, the default is default.
- qualifier str
- SCF cloud function version and alias, the default is DEFAULT.
- functionName String
- SCF function name.
- batchSize Number
- The maximum number of messages sent in each batch, the default is 1000.
- maxRetries Number
- The number of retries after the SCF call fails, the default is 5.
- namespace String
- SCF cloud function namespace, the default is default.
- qualifier String
- SCF cloud function version and alias, the default is DEFAULT.
CkafkaDatahubTaskTargetResourceSqlServerParam, CkafkaDatahubTaskTargetResourceSqlServerParamArgs                
- Database string
- SQLServer database name.
- Resource string
- SQLServer connection Id.
- Table string
- SQLServer table, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- SnapshotMode string
- schema_only|initial default initial.
- Database string
- SQLServer database name.
- Resource string
- SQLServer connection Id.
- Table string
- SQLServer table, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- SnapshotMode string
- schema_only|initial default initial.
- database String
- SQLServer database name.
- resource String
- SQLServer connection Id.
- table String
- SQLServer table, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- snapshotMode String
- schema_only|initial default initial.
- database string
- SQLServer database name.
- resource string
- SQLServer connection Id.
- table string
- SQLServer table, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- snapshotMode string
- schema_only|initial default initial.
- database str
- SQLServer database name.
- resource str
- SQLServer connection Id.
- table str
- SQLServer table, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- snapshot_mode str
- schema_only|initial default initial.
- database String
- SQLServer database name.
- resource String
- SQLServer connection Id.
- table String
- SQLServer table, *is the non-system table in all the monitored databases, you can use, to monitor multiple data tables, but the data table needs to be filled in the format of data database name.data table name.
- snapshotMode String
- schema_only|initial default initial.
CkafkaDatahubTaskTargetResourceTdwParam, CkafkaDatahubTaskTargetResourceTdwParamArgs              
- Bid string
- Tdw bid.
- Tid string
- Tdw tid.
- IsDomestic bool
- default true.
- TdwHost string
- TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
- TdwPort double
- TDW port, default 8099.
- Bid string
- Tdw bid.
- Tid string
- Tdw tid.
- IsDomestic bool
- default true.
- TdwHost string
- TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
- TdwPort float64
- TDW port, default 8099.
- bid String
- Tdw bid.
- tid String
- Tdw tid.
- isDomestic Boolean
- default true.
- tdwHost String
- TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
- tdwPort Double
- TDW port, default 8099.
- bid string
- Tdw bid.
- tid string
- Tdw tid.
- isDomestic boolean
- default true.
- tdwHost string
- TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
- tdwPort number
- TDW port, default 8099.
- bid str
- Tdw bid.
- tid str
- Tdw tid.
- is_domestic bool
- default true.
- tdw_host str
- TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
- tdw_port float
- TDW port, default 8099.
- bid String
- Tdw bid.
- tid String
- Tdw tid.
- isDomestic Boolean
- default true.
- tdwHost String
- TDW address, defalt tl-tdbank-tdmanager.tencent-distribute.com.
- tdwPort Number
- TDW port, default 8099.
CkafkaDatahubTaskTargetResourceTopicParam, CkafkaDatahubTaskTargetResourceTopicParamArgs              
- Resource string
- The topic name of the topic sold separately.
- CompressionType string
- Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- MsgMultiple double
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- OffsetType string
- Offset type, initial position earliest, latest position latest, time point position timestamp.
- StartTime double
- It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- TopicId string
- Topic TopicId.
- UseAuto boolCreate Topic 
- whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- Resource string
- The topic name of the topic sold separately.
- CompressionType string
- Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- MsgMultiple float64
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- OffsetType string
- Offset type, initial position earliest, latest position latest, time point position timestamp.
- StartTime float64
- It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- TopicId string
- Topic TopicId.
- UseAuto boolCreate Topic 
- whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource String
- The topic name of the topic sold separately.
- compressionType String
- Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msgMultiple Double
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offsetType String
- Offset type, initial position earliest, latest position latest, time point position timestamp.
- startTime Double
- It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topicId String
- Topic TopicId.
- useAuto BooleanCreate Topic 
- whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource string
- The topic name of the topic sold separately.
- compressionType string
- Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msgMultiple number
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offsetType string
- Offset type, initial position earliest, latest position latest, time point position timestamp.
- startTime number
- It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topicId string
- Topic TopicId.
- useAuto booleanCreate Topic 
- whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource str
- The topic name of the topic sold separately.
- compression_type str
- Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msg_multiple float
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset_type str
- Offset type, initial position earliest, latest position latest, time point position timestamp.
- start_time float
- It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topic_id str
- Topic TopicId.
- use_auto_ boolcreate_ topic 
- whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource String
- The topic name of the topic sold separately.
- compressionType String
- Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msgMultiple Number
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offsetType String
- Offset type, initial position earliest, latest position latest, time point position timestamp.
- startTime Number
- It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topicId String
- Topic TopicId.
- useAuto BooleanCreate Topic 
- whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
CkafkaDatahubTaskTransformParam, CkafkaDatahubTaskTransformParamArgs          
- AnalysisFormat string
- parsing format, JSON | DELIMITER| REGULAR.
- Content string
- Raw data.
- FailureParam CkafkaDatahub Task Transform Param Failure Param 
- Whether to keep parsing failure data.
- OutputFormat string
- output format.
- SourceType string
- Data source, TOPIC pulls from the source topic, CUSTOMIZE custom.
- AnalyseResults List<CkafkaDatahub Task Transform Param Analyse Result> 
- Analysis result.
- FilterParams List<CkafkaDatahub Task Transform Param Filter Param> 
- filter.
- MapParams List<CkafkaDatahub Task Transform Param Map Param> 
- Map.
- Regex string
- delimiter, regular expression.
- Result string
- Test Results.
- UseEvent boolBus 
- Whether the underlying engine uses eb.
- AnalysisFormat string
- parsing format, JSON | DELIMITER| REGULAR.
- Content string
- Raw data.
- FailureParam CkafkaDatahub Task Transform Param Failure Param 
- Whether to keep parsing failure data.
- OutputFormat string
- output format.
- SourceType string
- Data source, TOPIC pulls from the source topic, CUSTOMIZE custom.
- AnalyseResults []CkafkaDatahub Task Transform Param Analyse Result 
- Analysis result.
- FilterParams []CkafkaDatahub Task Transform Param Filter Param 
- filter.
- MapParams []CkafkaDatahub Task Transform Param Map Param 
- Map.
- Regex string
- delimiter, regular expression.
- Result string
- Test Results.
- UseEvent boolBus 
- Whether the underlying engine uses eb.
- analysisFormat String
- parsing format, JSON | DELIMITER| REGULAR.
- content String
- Raw data.
- failureParam CkafkaDatahub Task Transform Param Failure Param 
- Whether to keep parsing failure data.
- outputFormat String
- output format.
- sourceType String
- Data source, TOPIC pulls from the source topic, CUSTOMIZE custom.
- analyseResults List<CkafkaDatahub Task Transform Param Analyse Result> 
- Analysis result.
- filterParams List<CkafkaDatahub Task Transform Param Filter Param> 
- filter.
- mapParams List<CkafkaDatahub Task Transform Param Map Param> 
- Map.
- regex String
- delimiter, regular expression.
- result String
- Test Results.
- useEvent BooleanBus 
- Whether the underlying engine uses eb.
- analysisFormat string
- parsing format, JSON | DELIMITER| REGULAR.
- content string
- Raw data.
- failureParam CkafkaDatahub Task Transform Param Failure Param 
- Whether to keep parsing failure data.
- outputFormat string
- output format.
- sourceType string
- Data source, TOPIC pulls from the source topic, CUSTOMIZE custom.
- analyseResults CkafkaDatahub Task Transform Param Analyse Result[] 
- Analysis result.
- filterParams CkafkaDatahub Task Transform Param Filter Param[] 
- filter.
- mapParams CkafkaDatahub Task Transform Param Map Param[] 
- Map.
- regex string
- delimiter, regular expression.
- result string
- Test Results.
- useEvent booleanBus 
- Whether the underlying engine uses eb.
- analysis_format str
- parsing format, JSON | DELIMITER| REGULAR.
- content str
- Raw data.
- failure_param CkafkaDatahub Task Transform Param Failure Param 
- Whether to keep parsing failure data.
- output_format str
- output format.
- source_type str
- Data source, TOPIC pulls from the source topic, CUSTOMIZE custom.
- analyse_results Sequence[CkafkaDatahub Task Transform Param Analyse Result] 
- Analysis result.
- filter_params Sequence[CkafkaDatahub Task Transform Param Filter Param] 
- filter.
- map_params Sequence[CkafkaDatahub Task Transform Param Map Param] 
- Map.
- regex str
- delimiter, regular expression.
- result str
- Test Results.
- use_event_ boolbus 
- Whether the underlying engine uses eb.
- analysisFormat String
- parsing format, JSON | DELIMITER| REGULAR.
- content String
- Raw data.
- failureParam Property Map
- Whether to keep parsing failure data.
- outputFormat String
- output format.
- sourceType String
- Data source, TOPIC pulls from the source topic, CUSTOMIZE custom.
- analyseResults List<Property Map>
- Analysis result.
- filterParams List<Property Map>
- filter.
- mapParams List<Property Map>
- Map.
- regex String
- delimiter, regular expression.
- result String
- Test Results.
- useEvent BooleanBus 
- Whether the underlying engine uses eb.
CkafkaDatahubTaskTransformParamAnalyseResult, CkafkaDatahubTaskTransformParamAnalyseResultArgs              
CkafkaDatahubTaskTransformParamFailureParam, CkafkaDatahubTaskTransformParamFailureParamArgs              
- Type string
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- DlqType string
- dlq type, CKAFKA|TOPIC.
- KafkaParam CkafkaDatahub Task Transform Param Failure Param Kafka Param 
- Ckafka type dlq.
- MaxRetry doubleAttempts 
- retry times.
- RetryInterval double
- retry interval.
- TopicParam CkafkaDatahub Task Transform Param Failure Param Topic Param 
- DIP Topic type dead letter queue.
- Type string
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- DlqType string
- dlq type, CKAFKA|TOPIC.
- KafkaParam CkafkaDatahub Task Transform Param Failure Param Kafka Param 
- Ckafka type dlq.
- MaxRetry float64Attempts 
- retry times.
- RetryInterval float64
- retry interval.
- TopicParam CkafkaDatahub Task Transform Param Failure Param Topic Param 
- DIP Topic type dead letter queue.
- type String
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- dlqType String
- dlq type, CKAFKA|TOPIC.
- kafkaParam CkafkaDatahub Task Transform Param Failure Param Kafka Param 
- Ckafka type dlq.
- maxRetry DoubleAttempts 
- retry times.
- retryInterval Double
- retry interval.
- topicParam CkafkaDatahub Task Transform Param Failure Param Topic Param 
- DIP Topic type dead letter queue.
- type string
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- dlqType string
- dlq type, CKAFKA|TOPIC.
- kafkaParam CkafkaDatahub Task Transform Param Failure Param Kafka Param 
- Ckafka type dlq.
- maxRetry numberAttempts 
- retry times.
- retryInterval number
- retry interval.
- topicParam CkafkaDatahub Task Transform Param Failure Param Topic Param 
- DIP Topic type dead letter queue.
- type str
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- dlq_type str
- dlq type, CKAFKA|TOPIC.
- kafka_param CkafkaDatahub Task Transform Param Failure Param Kafka Param 
- Ckafka type dlq.
- max_retry_ floatattempts 
- retry times.
- retry_interval float
- retry interval.
- topic_param CkafkaDatahub Task Transform Param Failure Param Topic Param 
- DIP Topic type dead letter queue.
- type String
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- dlqType String
- dlq type, CKAFKA|TOPIC.
- kafkaParam Property Map
- Ckafka type dlq.
- maxRetry NumberAttempts 
- retry times.
- retryInterval Number
- retry interval.
- topicParam Property Map
- DIP Topic type dead letter queue.
CkafkaDatahubTaskTransformParamFailureParamKafkaParam, CkafkaDatahubTaskTransformParamFailureParamKafkaParamArgs                  
- Resource string
- instance resource.
- SelfBuilt bool
- whether the cluster is built by yourself instead of cloud product.
- CompressionType string
- Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- EnableToleration bool
- enable dead letter queue.
- MsgMultiple double
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- OffsetType string
- Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- PartitionNum double
- the partition num of the topic.
- QpsLimit double
- Qps(query per seconds) limit.
- ResourceName string
- instance name.
- StartTime double
- when Offset type timestamp is required.
- TableMappings List<CkafkaDatahub Task Transform Param Failure Param Kafka Param Table Mapping> 
- maps of table to topic, required when multi topic is selected.
- Topic string
- Topic name.
- TopicId string
- Topic ID.
- UseAuto boolCreate Topic 
- Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- UseTable boolMapping 
- whether to use multi table.
- ZoneId double
- Zone ID.
- Resource string
- instance resource.
- SelfBuilt bool
- whether the cluster is built by yourself instead of cloud product.
- CompressionType string
- Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- EnableToleration bool
- enable dead letter queue.
- MsgMultiple float64
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- OffsetType string
- Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- PartitionNum float64
- the partition num of the topic.
- QpsLimit float64
- Qps(query per seconds) limit.
- ResourceName string
- instance name.
- StartTime float64
- when Offset type timestamp is required.
- TableMappings []CkafkaDatahub Task Transform Param Failure Param Kafka Param Table Mapping 
- maps of table to topic, required when multi topic is selected.
- Topic string
- Topic name.
- TopicId string
- Topic ID.
- UseAuto boolCreate Topic 
- Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- UseTable boolMapping 
- whether to use multi table.
- ZoneId float64
- Zone ID.
- resource String
- instance resource.
- selfBuilt Boolean
- whether the cluster is built by yourself instead of cloud product.
- compressionType String
- Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enableToleration Boolean
- enable dead letter queue.
- msgMultiple Double
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offsetType String
- Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partitionNum Double
- the partition num of the topic.
- qpsLimit Double
- Qps(query per seconds) limit.
- resourceName String
- instance name.
- startTime Double
- when Offset type timestamp is required.
- tableMappings List<CkafkaDatahub Task Transform Param Failure Param Kafka Param Table Mapping> 
- maps of table to topic, required when multi topic is selected.
- topic String
- Topic name.
- topicId String
- Topic ID.
- useAuto BooleanCreate Topic 
- Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- useTable BooleanMapping 
- whether to use multi table.
- zoneId Double
- Zone ID.
- resource string
- instance resource.
- selfBuilt boolean
- whether the cluster is built by yourself instead of cloud product.
- compressionType string
- Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enableToleration boolean
- enable dead letter queue.
- msgMultiple number
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offsetType string
- Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partitionNum number
- the partition num of the topic.
- qpsLimit number
- Qps(query per seconds) limit.
- resourceName string
- instance name.
- startTime number
- when Offset type timestamp is required.
- tableMappings CkafkaDatahub Task Transform Param Failure Param Kafka Param Table Mapping[] 
- maps of table to topic, required when multi topic is selected.
- topic string
- Topic name.
- topicId string
- Topic ID.
- useAuto booleanCreate Topic 
- Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- useTable booleanMapping 
- whether to use multi table.
- zoneId number
- Zone ID.
- resource str
- instance resource.
- self_built bool
- whether the cluster is built by yourself instead of cloud product.
- compression_type str
- Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enable_toleration bool
- enable dead letter queue.
- msg_multiple float
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset_type str
- Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partition_num float
- the partition num of the topic.
- qps_limit float
- Qps(query per seconds) limit.
- resource_name str
- instance name.
- start_time float
- when Offset type timestamp is required.
- table_mappings Sequence[CkafkaDatahub Task Transform Param Failure Param Kafka Param Table Mapping] 
- maps of table to topic, required when multi topic is selected.
- topic str
- Topic name.
- topic_id str
- Topic ID.
- use_auto_ boolcreate_ topic 
- Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- use_table_ boolmapping 
- whether to use multi table.
- zone_id float
- Zone ID.
- resource String
- instance resource.
- selfBuilt Boolean
- whether the cluster is built by yourself instead of cloud product.
- compressionType String
- Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enableToleration Boolean
- enable dead letter queue.
- msgMultiple Number
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offsetType String
- Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partitionNum Number
- the partition num of the topic.
- qpsLimit Number
- Qps(query per seconds) limit.
- resourceName String
- instance name.
- startTime Number
- when Offset type timestamp is required.
- tableMappings List<Property Map>
- maps of table to topic, required when multi topic is selected.
- topic String
- Topic name.
- topicId String
- Topic ID.
- useAuto BooleanCreate Topic 
- Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- useTable BooleanMapping 
- whether to use multi table.
- zoneId Number
- Zone ID.
CkafkaDatahubTaskTransformParamFailureParamKafkaParamTableMapping, CkafkaDatahubTaskTransformParamFailureParamKafkaParamTableMappingArgs                      
CkafkaDatahubTaskTransformParamFailureParamTopicParam, CkafkaDatahubTaskTransformParamFailureParamTopicParamArgs                  
- Resource string
- The topic name of the topic sold separately.
- CompressionType string
- Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- MsgMultiple double
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- OffsetType string
- Offset type, initial position earliest, latest position latest, time point position timestamp.
- StartTime double
- It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- TopicId string
- Topic TopicId.
- UseAuto boolCreate Topic 
- whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- Resource string
- The topic name of the topic sold separately.
- CompressionType string
- Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- MsgMultiple float64
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- OffsetType string
- Offset type, initial position earliest, latest position latest, time point position timestamp.
- StartTime float64
- It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- TopicId string
- Topic TopicId.
- UseAuto boolCreate Topic 
- whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource String
- The topic name of the topic sold separately.
- compressionType String
- Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msgMultiple Double
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offsetType String
- Offset type, initial position earliest, latest position latest, time point position timestamp.
- startTime Double
- It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topicId String
- Topic TopicId.
- useAuto BooleanCreate Topic 
- whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource string
- The topic name of the topic sold separately.
- compressionType string
- Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msgMultiple number
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offsetType string
- Offset type, initial position earliest, latest position latest, time point position timestamp.
- startTime number
- It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topicId string
- Topic TopicId.
- useAuto booleanCreate Topic 
- whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource str
- The topic name of the topic sold separately.
- compression_type str
- Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msg_multiple float
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset_type str
- Offset type, initial position earliest, latest position latest, time point position timestamp.
- start_time float
- It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topic_id str
- Topic TopicId.
- use_auto_ boolcreate_ topic 
- whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource String
- The topic name of the topic sold separately.
- compressionType String
- Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msgMultiple Number
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offsetType String
- Offset type, initial position earliest, latest position latest, time point position timestamp.
- startTime Number
- It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topicId String
- Topic TopicId.
- useAuto BooleanCreate Topic 
- whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
CkafkaDatahubTaskTransformParamFilterParam, CkafkaDatahubTaskTransformParamFilterParamArgs              
- key str
- Key.
- match_mode str
- Matching mode, prefix matches PREFIX, suffix matches SUFFIX, contains matches CONTAINS, except matches EXCEPT, value matches NUMBER, IP matches IP.
- value str
- Value.
- type str
- REGULAR.
CkafkaDatahubTaskTransformParamMapParam, CkafkaDatahubTaskTransformParamMapParamArgs              
CkafkaDatahubTaskTransformsParam, CkafkaDatahubTaskTransformsParamArgs          
- Content string
- Raw data.
- FieldChains List<CkafkaDatahub Task Transforms Param Field Chain> 
- processing chain.
- BatchAnalyse CkafkaDatahub Task Transforms Param Batch Analyse 
- data process.
- FailureParam CkafkaDatahub Task Transforms Param Failure Param 
- fail process.
- FilterParams List<CkafkaDatahub Task Transforms Param Filter Param> 
- filter.
- KeepMetadata bool
- Whether to keep the data source Topic metadata information (source Topic, Partition, Offset), the default is false.
- OutputFormat string
- output format, JSON, ROW, default JSON.
- Result string
- result.
- RowParam CkafkaDatahub Task Transforms Param Row Param 
- The output format is ROW Required.
- SourceType string
- data source.
- Content string
- Raw data.
- FieldChains []CkafkaDatahub Task Transforms Param Field Chain 
- processing chain.
- BatchAnalyse CkafkaDatahub Task Transforms Param Batch Analyse 
- data process.
- FailureParam CkafkaDatahub Task Transforms Param Failure Param 
- fail process.
- FilterParams []CkafkaDatahub Task Transforms Param Filter Param 
- filter.
- KeepMetadata bool
- Whether to keep the data source Topic metadata information (source Topic, Partition, Offset), the default is false.
- OutputFormat string
- output format, JSON, ROW, default JSON.
- Result string
- result.
- RowParam CkafkaDatahub Task Transforms Param Row Param 
- The output format is ROW Required.
- SourceType string
- data source.
- content String
- Raw data.
- fieldChains List<CkafkaDatahub Task Transforms Param Field Chain> 
- processing chain.
- batchAnalyse CkafkaDatahub Task Transforms Param Batch Analyse 
- data process.
- failureParam CkafkaDatahub Task Transforms Param Failure Param 
- fail process.
- filterParams List<CkafkaDatahub Task Transforms Param Filter Param> 
- filter.
- keepMetadata Boolean
- Whether to keep the data source Topic metadata information (source Topic, Partition, Offset), the default is false.
- outputFormat String
- output format, JSON, ROW, default JSON.
- result String
- result.
- rowParam CkafkaDatahub Task Transforms Param Row Param 
- The output format is ROW Required.
- sourceType String
- data source.
- content string
- Raw data.
- fieldChains CkafkaDatahub Task Transforms Param Field Chain[] 
- processing chain.
- batchAnalyse CkafkaDatahub Task Transforms Param Batch Analyse 
- data process.
- failureParam CkafkaDatahub Task Transforms Param Failure Param 
- fail process.
- filterParams CkafkaDatahub Task Transforms Param Filter Param[] 
- filter.
- keepMetadata boolean
- Whether to keep the data source Topic metadata information (source Topic, Partition, Offset), the default is false.
- outputFormat string
- output format, JSON, ROW, default JSON.
- result string
- result.
- rowParam CkafkaDatahub Task Transforms Param Row Param 
- The output format is ROW Required.
- sourceType string
- data source.
- content str
- Raw data.
- field_chains Sequence[CkafkaDatahub Task Transforms Param Field Chain] 
- processing chain.
- batch_analyse CkafkaDatahub Task Transforms Param Batch Analyse 
- data process.
- failure_param CkafkaDatahub Task Transforms Param Failure Param 
- fail process.
- filter_params Sequence[CkafkaDatahub Task Transforms Param Filter Param] 
- filter.
- keep_metadata bool
- Whether to keep the data source Topic metadata information (source Topic, Partition, Offset), the default is false.
- output_format str
- output format, JSON, ROW, default JSON.
- result str
- result.
- row_param CkafkaDatahub Task Transforms Param Row Param 
- The output format is ROW Required.
- source_type str
- data source.
- content String
- Raw data.
- fieldChains List<Property Map>
- processing chain.
- batchAnalyse Property Map
- data process.
- failureParam Property Map
- fail process.
- filterParams List<Property Map>
- filter.
- keepMetadata Boolean
- Whether to keep the data source Topic metadata information (source Topic, Partition, Offset), the default is false.
- outputFormat String
- output format, JSON, ROW, default JSON.
- result String
- result.
- rowParam Property Map
- The output format is ROW Required.
- sourceType String
- data source.
CkafkaDatahubTaskTransformsParamBatchAnalyse, CkafkaDatahubTaskTransformsParamBatchAnalyseArgs              
- Format string
- ONE BY ONE single output, MERGE combined output.
- Format string
- ONE BY ONE single output, MERGE combined output.
- format String
- ONE BY ONE single output, MERGE combined output.
- format string
- ONE BY ONE single output, MERGE combined output.
- format str
- ONE BY ONE single output, MERGE combined output.
- format String
- ONE BY ONE single output, MERGE combined output.
CkafkaDatahubTaskTransformsParamFailureParam, CkafkaDatahubTaskTransformsParamFailureParamArgs              
- Type string
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- DlqType string
- dlq type, CKAFKA|TOPIC.
- KafkaParam CkafkaDatahub Task Transforms Param Failure Param Kafka Param 
- Ckafka type dlq.
- MaxRetry doubleAttempts 
- retry times.
- RetryInterval double
- retry interval.
- TopicParam CkafkaDatahub Task Transforms Param Failure Param Topic Param 
- DIP Topic type dead letter queue.
- Type string
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- DlqType string
- dlq type, CKAFKA|TOPIC.
- KafkaParam CkafkaDatahub Task Transforms Param Failure Param Kafka Param 
- Ckafka type dlq.
- MaxRetry float64Attempts 
- retry times.
- RetryInterval float64
- retry interval.
- TopicParam CkafkaDatahub Task Transforms Param Failure Param Topic Param 
- DIP Topic type dead letter queue.
- type String
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- dlqType String
- dlq type, CKAFKA|TOPIC.
- kafkaParam CkafkaDatahub Task Transforms Param Failure Param Kafka Param 
- Ckafka type dlq.
- maxRetry DoubleAttempts 
- retry times.
- retryInterval Double
- retry interval.
- topicParam CkafkaDatahub Task Transforms Param Failure Param Topic Param 
- DIP Topic type dead letter queue.
- type string
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- dlqType string
- dlq type, CKAFKA|TOPIC.
- kafkaParam CkafkaDatahub Task Transforms Param Failure Param Kafka Param 
- Ckafka type dlq.
- maxRetry numberAttempts 
- retry times.
- retryInterval number
- retry interval.
- topicParam CkafkaDatahub Task Transforms Param Failure Param Topic Param 
- DIP Topic type dead letter queue.
- type str
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- dlq_type str
- dlq type, CKAFKA|TOPIC.
- kafka_param CkafkaDatahub Task Transforms Param Failure Param Kafka Param 
- Ckafka type dlq.
- max_retry_ floatattempts 
- retry times.
- retry_interval float
- retry interval.
- topic_param CkafkaDatahub Task Transforms Param Failure Param Topic Param 
- DIP Topic type dead letter queue.
- type String
- type, DLQ dead letter queue, IGNORE_ERROR|DROP.
- dlqType String
- dlq type, CKAFKA|TOPIC.
- kafkaParam Property Map
- Ckafka type dlq.
- maxRetry NumberAttempts 
- retry times.
- retryInterval Number
- retry interval.
- topicParam Property Map
- DIP Topic type dead letter queue.
CkafkaDatahubTaskTransformsParamFailureParamKafkaParam, CkafkaDatahubTaskTransformsParamFailureParamKafkaParamArgs                  
- Resource string
- instance resource.
- SelfBuilt bool
- whether the cluster is built by yourself instead of cloud product.
- CompressionType string
- Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- EnableToleration bool
- enable dead letter queue.
- MsgMultiple double
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- OffsetType string
- Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- PartitionNum double
- the partition num of the topic.
- QpsLimit double
- Qps(query per seconds) limit.
- ResourceName string
- instance name.
- StartTime double
- when Offset type timestamp is required.
- TableMappings List<CkafkaDatahub Task Transforms Param Failure Param Kafka Param Table Mapping> 
- maps of table to topic, required when multi topic is selected.
- Topic string
- Topic name.
- TopicId string
- Topic ID.
- UseAuto boolCreate Topic 
- Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- UseTable boolMapping 
- whether to use multi table.
- ZoneId double
- Zone ID.
- Resource string
- instance resource.
- SelfBuilt bool
- whether the cluster is built by yourself instead of cloud product.
- CompressionType string
- Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- EnableToleration bool
- enable dead letter queue.
- MsgMultiple float64
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- OffsetType string
- Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- PartitionNum float64
- the partition num of the topic.
- QpsLimit float64
- Qps(query per seconds) limit.
- ResourceName string
- instance name.
- StartTime float64
- when Offset type timestamp is required.
- TableMappings []CkafkaDatahub Task Transforms Param Failure Param Kafka Param Table Mapping 
- maps of table to topic, required when multi topic is selected.
- Topic string
- Topic name.
- TopicId string
- Topic ID.
- UseAuto boolCreate Topic 
- Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- UseTable boolMapping 
- whether to use multi table.
- ZoneId float64
- Zone ID.
- resource String
- instance resource.
- selfBuilt Boolean
- whether the cluster is built by yourself instead of cloud product.
- compressionType String
- Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enableToleration Boolean
- enable dead letter queue.
- msgMultiple Double
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offsetType String
- Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partitionNum Double
- the partition num of the topic.
- qpsLimit Double
- Qps(query per seconds) limit.
- resourceName String
- instance name.
- startTime Double
- when Offset type timestamp is required.
- tableMappings List<CkafkaDatahub Task Transforms Param Failure Param Kafka Param Table Mapping> 
- maps of table to topic, required when multi topic is selected.
- topic String
- Topic name.
- topicId String
- Topic ID.
- useAuto BooleanCreate Topic 
- Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- useTable BooleanMapping 
- whether to use multi table.
- zoneId Double
- Zone ID.
- resource string
- instance resource.
- selfBuilt boolean
- whether the cluster is built by yourself instead of cloud product.
- compressionType string
- Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enableToleration boolean
- enable dead letter queue.
- msgMultiple number
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offsetType string
- Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partitionNum number
- the partition num of the topic.
- qpsLimit number
- Qps(query per seconds) limit.
- resourceName string
- instance name.
- startTime number
- when Offset type timestamp is required.
- tableMappings CkafkaDatahub Task Transforms Param Failure Param Kafka Param Table Mapping[] 
- maps of table to topic, required when multi topic is selected.
- topic string
- Topic name.
- topicId string
- Topic ID.
- useAuto booleanCreate Topic 
- Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- useTable booleanMapping 
- whether to use multi table.
- zoneId number
- Zone ID.
- resource str
- instance resource.
- self_built bool
- whether the cluster is built by yourself instead of cloud product.
- compression_type str
- Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enable_toleration bool
- enable dead letter queue.
- msg_multiple float
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset_type str
- Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partition_num float
- the partition num of the topic.
- qps_limit float
- Qps(query per seconds) limit.
- resource_name str
- instance name.
- start_time float
- when Offset type timestamp is required.
- table_mappings Sequence[CkafkaDatahub Task Transforms Param Failure Param Kafka Param Table Mapping] 
- maps of table to topic, required when multi topic is selected.
- topic str
- Topic name.
- topic_id str
- Topic ID.
- use_auto_ boolcreate_ topic 
- Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- use_table_ boolmapping 
- whether to use multi table.
- zone_id float
- Zone ID.
- resource String
- instance resource.
- selfBuilt Boolean
- whether the cluster is built by yourself instead of cloud product.
- compressionType String
- Whether to compress when writing to the Topic, if it is not enabled, fill in none, if it is enabled, fill in open.
- enableToleration Boolean
- enable dead letter queue.
- msgMultiple Number
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offsetType String
- Offset type, from beginning:earliest, from latest:latest, from specific time:timestamp.
- partitionNum Number
- the partition num of the topic.
- qpsLimit Number
- Qps(query per seconds) limit.
- resourceName String
- instance name.
- startTime Number
- when Offset type timestamp is required.
- tableMappings List<Property Map>
- maps of table to topic, required when multi topic is selected.
- topic String
- Topic name.
- topicId String
- Topic ID.
- useAuto BooleanCreate Topic 
- Does the used topic need to be automatically created (currently only supports SOURCE inflow tasks, if you do not use to distribute to multiple topics, you need to fill in the topic name that needs to be automatically created in the Topic field).
- useTable BooleanMapping 
- whether to use multi table.
- zoneId Number
- Zone ID.
CkafkaDatahubTaskTransformsParamFailureParamKafkaParamTableMapping, CkafkaDatahubTaskTransformsParamFailureParamKafkaParamTableMappingArgs                      
CkafkaDatahubTaskTransformsParamFailureParamTopicParam, CkafkaDatahubTaskTransformsParamFailureParamTopicParamArgs                  
- Resource string
- The topic name of the topic sold separately.
- CompressionType string
- Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- MsgMultiple double
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- OffsetType string
- Offset type, initial position earliest, latest position latest, time point position timestamp.
- StartTime double
- It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- TopicId string
- Topic TopicId.
- UseAuto boolCreate Topic 
- whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- Resource string
- The topic name of the topic sold separately.
- CompressionType string
- Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- MsgMultiple float64
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- OffsetType string
- Offset type, initial position earliest, latest position latest, time point position timestamp.
- StartTime float64
- It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- TopicId string
- Topic TopicId.
- UseAuto boolCreate Topic 
- whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource String
- The topic name of the topic sold separately.
- compressionType String
- Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msgMultiple Double
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offsetType String
- Offset type, initial position earliest, latest position latest, time point position timestamp.
- startTime Double
- It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topicId String
- Topic TopicId.
- useAuto BooleanCreate Topic 
- whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource string
- The topic name of the topic sold separately.
- compressionType string
- Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msgMultiple number
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offsetType string
- Offset type, initial position earliest, latest position latest, time point position timestamp.
- startTime number
- It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topicId string
- Topic TopicId.
- useAuto booleanCreate Topic 
- whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource str
- The topic name of the topic sold separately.
- compression_type str
- Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msg_multiple float
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offset_type str
- Offset type, initial position earliest, latest position latest, time point position timestamp.
- start_time float
- It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topic_id str
- Topic TopicId.
- use_auto_ boolcreate_ topic 
- whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
- resource String
- The topic name of the topic sold separately.
- compressionType String
- Whether to perform compression when writing a topic, if it is not enabled, fill in none, if it is enabled, you can choose one of gzip, snappy, lz4 to fill in.
- msgMultiple Number
- 1 source topic message is amplified into msg Multiple and written to the target topic (this parameter is currently only applicable to ckafka flowing into ckafka).
- offsetType String
- Offset type, initial position earliest, latest position latest, time point position timestamp.
- startTime Number
- It must be passed when the Offset type is timestamp, and the time stamp is passed, accurate to the second.
- topicId String
- Topic TopicId.
- useAuto BooleanCreate Topic 
- whether the used topic need to be automatically created (currently only supports SOURCE inflow tasks).
CkafkaDatahubTaskTransformsParamFieldChain, CkafkaDatahubTaskTransformsParamFieldChainArgs              
- Analyse
CkafkaDatahub Task Transforms Param Field Chain Analyse 
- analyze.
- AnalyseJson stringResult 
- Parsing results in JSON format.
- AnalyseResults List<CkafkaDatahub Task Transforms Param Field Chain Analyse Result> 
- Analysis result.
- Result string
- Test Results.
- SMTs
List<CkafkaDatahub Task Transforms Param Field Chain SMT> 
- data processing.
- SecondaryAnalyse CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse 
- secondary analysis.
- SecondaryAnalyse stringJson Result 
- Secondary parsing results in JSON format.
- SecondaryAnalyse List<CkafkaResults Datahub Task Transforms Param Field Chain Secondary Analyse Result> 
- Secondary Analysis Results.
- Analyse
CkafkaDatahub Task Transforms Param Field Chain Analyse 
- analyze.
- AnalyseJson stringResult 
- Parsing results in JSON format.
- AnalyseResults []CkafkaDatahub Task Transforms Param Field Chain Analyse Result 
- Analysis result.
- Result string
- Test Results.
- SMTs
[]CkafkaDatahub Task Transforms Param Field Chain SMT 
- data processing.
- SecondaryAnalyse CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse 
- secondary analysis.
- SecondaryAnalyse stringJson Result 
- Secondary parsing results in JSON format.
- SecondaryAnalyse []CkafkaResults Datahub Task Transforms Param Field Chain Secondary Analyse Result 
- Secondary Analysis Results.
- analyse
CkafkaDatahub Task Transforms Param Field Chain Analyse 
- analyze.
- analyseJson StringResult 
- Parsing results in JSON format.
- analyseResults List<CkafkaDatahub Task Transforms Param Field Chain Analyse Result> 
- Analysis result.
- result String
- Test Results.
- sMTs List<CkafkaDatahub Task Transforms Param Field Chain SMT> 
- data processing.
- secondaryAnalyse CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse 
- secondary analysis.
- secondaryAnalyse StringJson Result 
- Secondary parsing results in JSON format.
- secondaryAnalyse List<CkafkaResults Datahub Task Transforms Param Field Chain Secondary Analyse Result> 
- Secondary Analysis Results.
- analyse
CkafkaDatahub Task Transforms Param Field Chain Analyse 
- analyze.
- analyseJson stringResult 
- Parsing results in JSON format.
- analyseResults CkafkaDatahub Task Transforms Param Field Chain Analyse Result[] 
- Analysis result.
- result string
- Test Results.
- sMTs CkafkaDatahub Task Transforms Param Field Chain SMT[] 
- data processing.
- secondaryAnalyse CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse 
- secondary analysis.
- secondaryAnalyse stringJson Result 
- Secondary parsing results in JSON format.
- secondaryAnalyse CkafkaResults Datahub Task Transforms Param Field Chain Secondary Analyse Result[] 
- Secondary Analysis Results.
- analyse
CkafkaDatahub Task Transforms Param Field Chain Analyse 
- analyze.
- analyse_json_ strresult 
- Parsing results in JSON format.
- analyse_results Sequence[CkafkaDatahub Task Transforms Param Field Chain Analyse Result] 
- Analysis result.
- result str
- Test Results.
- s_mts Sequence[CkafkaDatahub Task Transforms Param Field Chain SMT] 
- data processing.
- secondary_analyse CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse 
- secondary analysis.
- secondary_analyse_ strjson_ result 
- Secondary parsing results in JSON format.
- secondary_analyse_ Sequence[Ckafkaresults Datahub Task Transforms Param Field Chain Secondary Analyse Result] 
- Secondary Analysis Results.
- analyse Property Map
- analyze.
- analyseJson StringResult 
- Parsing results in JSON format.
- analyseResults List<Property Map>
- Analysis result.
- result String
- Test Results.
- sMTs List<Property Map>
- data processing.
- secondaryAnalyse Property Map
- secondary analysis.
- secondaryAnalyse StringJson Result 
- Secondary parsing results in JSON format.
- secondaryAnalyse List<Property Map>Results 
- Secondary Analysis Results.
CkafkaDatahubTaskTransformsParamFieldChainAnalyse, CkafkaDatahubTaskTransformsParamFieldChainAnalyseArgs                
- Format string
- Parsing format, JSON, DELIMITER delimiter, REGULAR regular extraction, SOURCE processing all results of the upper layer.
- InputValue string
- KEY to be processed again - KEY expression.
- InputValue stringType 
- KEY to be processed again - mode.
- Regex string
- delimiter, regular expression.
- Format string
- Parsing format, JSON, DELIMITER delimiter, REGULAR regular extraction, SOURCE processing all results of the upper layer.
- InputValue string
- KEY to be processed again - KEY expression.
- InputValue stringType 
- KEY to be processed again - mode.
- Regex string
- delimiter, regular expression.
- format String
- Parsing format, JSON, DELIMITER delimiter, REGULAR regular extraction, SOURCE processing all results of the upper layer.
- inputValue String
- KEY to be processed again - KEY expression.
- inputValue StringType 
- KEY to be processed again - mode.
- regex String
- delimiter, regular expression.
- format string
- Parsing format, JSON, DELIMITER delimiter, REGULAR regular extraction, SOURCE processing all results of the upper layer.
- inputValue string
- KEY to be processed again - KEY expression.
- inputValue stringType 
- KEY to be processed again - mode.
- regex string
- delimiter, regular expression.
- format str
- Parsing format, JSON, DELIMITER delimiter, REGULAR regular extraction, SOURCE processing all results of the upper layer.
- input_value str
- KEY to be processed again - KEY expression.
- input_value_ strtype 
- KEY to be processed again - mode.
- regex str
- delimiter, regular expression.
- format String
- Parsing format, JSON, DELIMITER delimiter, REGULAR regular extraction, SOURCE processing all results of the upper layer.
- inputValue String
- KEY to be processed again - KEY expression.
- inputValue StringType 
- KEY to be processed again - mode.
- regex String
- delimiter, regular expression.
CkafkaDatahubTaskTransformsParamFieldChainAnalyseResult, CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultArgs                  
- Key string
- key.
- Operate string
- Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
- SchemeType string
- data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
- OriginalValue string
- OriginalValue.
- Value string
- value.
- ValueOperate CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate 
- VALUE process.
- ValueOperates List<CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate> 
- VALUE process chain.
- Key string
- key.
- Operate string
- Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
- SchemeType string
- data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
- OriginalValue string
- OriginalValue.
- Value string
- value.
- ValueOperate CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate 
- VALUE process.
- ValueOperates []CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate 
- VALUE process chain.
- key String
- key.
- operate String
- Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
- schemeType String
- data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
- originalValue String
- OriginalValue.
- value String
- value.
- valueOperate CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate 
- VALUE process.
- valueOperates List<CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate> 
- VALUE process chain.
- key string
- key.
- operate string
- Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
- schemeType string
- data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
- originalValue string
- OriginalValue.
- value string
- value.
- valueOperate CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate 
- VALUE process.
- valueOperates CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate[] 
- VALUE process chain.
- key str
- key.
- operate str
- Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
- scheme_type str
- data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
- original_value str
- OriginalValue.
- value str
- value.
- value_operate CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate 
- VALUE process.
- value_operates Sequence[CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate] 
- VALUE process chain.
- key String
- key.
- operate String
- Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
- schemeType String
- data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
- originalValue String
- OriginalValue.
- value String
- value.
- valueOperate Property Map
- VALUE process.
- valueOperates List<Property Map>
- VALUE process chain.
CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperate, CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateArgs                      
- Type string
- Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
- Date
CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate Date 
- Time conversion, required when TYPE=DATE.
- JsonPath CkafkaReplace Datahub Task Transforms Param Field Chain Analyse Result Value Operate Json Path Replace 
- Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
- KV
CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate KV 
- Key-value secondary analysis, must be passed when TYPE=KV.
- RegexReplace CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate Regex Replace 
- Regular replacement, required when TYPE=REGEX REPLACE.
- Replace
CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate Replace 
- replace, TYPE=REPLACE is required.
- Result string
- result.
- Split
CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate Split 
- The value supports one split and multiple values, required when TYPE=SPLIT.
- Substr
CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate Substr 
- Substr, TYPE=SUBSTR is required.
- UrlDecode CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate Url Decode 
- Url parsing.
- Type string
- Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
- Date
CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate Date 
- Time conversion, required when TYPE=DATE.
- JsonPath CkafkaReplace Datahub Task Transforms Param Field Chain Analyse Result Value Operate Json Path Replace 
- Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
- KV
CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate KV 
- Key-value secondary analysis, must be passed when TYPE=KV.
- RegexReplace CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate Regex Replace 
- Regular replacement, required when TYPE=REGEX REPLACE.
- Replace
CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate Replace 
- replace, TYPE=REPLACE is required.
- Result string
- result.
- Split
CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate Split 
- The value supports one split and multiple values, required when TYPE=SPLIT.
- Substr
CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate Substr 
- Substr, TYPE=SUBSTR is required.
- UrlDecode CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate Url Decode 
- Url parsing.
- type String
- Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
- date
CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate Date 
- Time conversion, required when TYPE=DATE.
- jsonPath CkafkaReplace Datahub Task Transforms Param Field Chain Analyse Result Value Operate Json Path Replace 
- Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
- kV CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate KV 
- Key-value secondary analysis, must be passed when TYPE=KV.
- regexReplace CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate Regex Replace 
- Regular replacement, required when TYPE=REGEX REPLACE.
- replace
CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate Replace 
- replace, TYPE=REPLACE is required.
- result String
- result.
- split
CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate Split 
- The value supports one split and multiple values, required when TYPE=SPLIT.
- substr
CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate Substr 
- Substr, TYPE=SUBSTR is required.
- urlDecode CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate Url Decode 
- Url parsing.
- type string
- Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
- date
CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate Date 
- Time conversion, required when TYPE=DATE.
- jsonPath CkafkaReplace Datahub Task Transforms Param Field Chain Analyse Result Value Operate Json Path Replace 
- Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
- kV CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate KV 
- Key-value secondary analysis, must be passed when TYPE=KV.
- regexReplace CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate Regex Replace 
- Regular replacement, required when TYPE=REGEX REPLACE.
- replace
CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate Replace 
- replace, TYPE=REPLACE is required.
- result string
- result.
- split
CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate Split 
- The value supports one split and multiple values, required when TYPE=SPLIT.
- substr
CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate Substr 
- Substr, TYPE=SUBSTR is required.
- urlDecode CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate Url Decode 
- Url parsing.
- type str
- Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
- date
CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate Date 
- Time conversion, required when TYPE=DATE.
- json_path_ Ckafkareplace Datahub Task Transforms Param Field Chain Analyse Result Value Operate Json Path Replace 
- Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
- k_v CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate KV 
- Key-value secondary analysis, must be passed when TYPE=KV.
- regex_replace CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate Regex Replace 
- Regular replacement, required when TYPE=REGEX REPLACE.
- replace
CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate Replace 
- replace, TYPE=REPLACE is required.
- result str
- result.
- split
CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate Split 
- The value supports one split and multiple values, required when TYPE=SPLIT.
- substr
CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate Substr 
- Substr, TYPE=SUBSTR is required.
- url_decode CkafkaDatahub Task Transforms Param Field Chain Analyse Result Value Operate Url Decode 
- Url parsing.
- type String
- Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
- date Property Map
- Time conversion, required when TYPE=DATE.
- jsonPath Property MapReplace 
- Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
- kV Property Map
- Key-value secondary analysis, must be passed when TYPE=KV.
- regexReplace Property Map
- Regular replacement, required when TYPE=REGEX REPLACE.
- replace Property Map
- replace, TYPE=REPLACE is required.
- result String
- result.
- split Property Map
- The value supports one split and multiple values, required when TYPE=SPLIT.
- substr Property Map
- Substr, TYPE=SUBSTR is required.
- urlDecode Property Map
- Url parsing.
CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateDate, CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateDateArgs                        
- Format string
- Time format.
- TargetType string
- input type, string|unix.
- TimeZone string
- default GMT+8.
- Format string
- Time format.
- TargetType string
- input type, string|unix.
- TimeZone string
- default GMT+8.
- format String
- Time format.
- targetType String
- input type, string|unix.
- timeZone String
- default GMT+8.
- format string
- Time format.
- targetType string
- input type, string|unix.
- timeZone string
- default GMT+8.
- format str
- Time format.
- target_type str
- input type, string|unix.
- time_zone str
- default GMT+8.
- format String
- Time format.
- targetType String
- input type, string|unix.
- timeZone String
- default GMT+8.
CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateJsonPathReplace, CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateJsonPathReplaceArgs                            
CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateKV, CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateKVArgs                        
- Delimiter string
- delimiter.
- Regex string
- Key-value secondary analysis delimiter.
- KeepOriginal stringKey 
- Keep the source Key, the default is false not to keep.
- Delimiter string
- delimiter.
- Regex string
- Key-value secondary analysis delimiter.
- KeepOriginal stringKey 
- Keep the source Key, the default is false not to keep.
- delimiter String
- delimiter.
- regex String
- Key-value secondary analysis delimiter.
- keepOriginal StringKey 
- Keep the source Key, the default is false not to keep.
- delimiter string
- delimiter.
- regex string
- Key-value secondary analysis delimiter.
- keepOriginal stringKey 
- Keep the source Key, the default is false not to keep.
- delimiter str
- delimiter.
- regex str
- Key-value secondary analysis delimiter.
- keep_original_ strkey 
- Keep the source Key, the default is false not to keep.
- delimiter String
- delimiter.
- regex String
- Key-value secondary analysis delimiter.
- keepOriginal StringKey 
- Keep the source Key, the default is false not to keep.
CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateRegexReplace, CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateRegexReplaceArgs                          
CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateReplace, CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateReplaceArgs                        
CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateSplit, CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateSplitArgs                        
- Regex string
- delimiter.
- Regex string
- delimiter.
- regex String
- delimiter.
- regex string
- delimiter.
- regex str
- delimiter.
- regex String
- delimiter.
CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateSubstr, CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateSubstrArgs                        
CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateUrlDecode, CkafkaDatahubTaskTransformsParamFieldChainAnalyseResultValueOperateUrlDecodeArgs                          
- CharsetName string
- code.
- CharsetName string
- code.
- charsetName String
- code.
- charsetName string
- code.
- charset_name str
- code.
- charsetName String
- code.
CkafkaDatahubTaskTransformsParamFieldChainSMT, CkafkaDatahubTaskTransformsParamFieldChainSMTArgs                
- Key string
- KEY.
- Operate string
- Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
- SchemeType string
- data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
- OriginalValue string
- OriginalValue.
- Value string
- VALUE.
- ValueOperate CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate 
- VALUE process.
- ValueOperates List<CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate> 
- VALUE process chain.
- Key string
- KEY.
- Operate string
- Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
- SchemeType string
- data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
- OriginalValue string
- OriginalValue.
- Value string
- VALUE.
- ValueOperate CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate 
- VALUE process.
- ValueOperates []CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate 
- VALUE process chain.
- key String
- KEY.
- operate String
- Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
- schemeType String
- data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
- originalValue String
- OriginalValue.
- value String
- VALUE.
- valueOperate CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate 
- VALUE process.
- valueOperates List<CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate> 
- VALUE process chain.
- key string
- KEY.
- operate string
- Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
- schemeType string
- data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
- originalValue string
- OriginalValue.
- value string
- VALUE.
- valueOperate CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate 
- VALUE process.
- valueOperates CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate[] 
- VALUE process chain.
- key str
- KEY.
- operate str
- Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
- scheme_type str
- data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
- original_value str
- OriginalValue.
- value str
- VALUE.
- value_operate CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate 
- VALUE process.
- value_operates Sequence[CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate] 
- VALUE process chain.
- key String
- KEY.
- operate String
- Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
- schemeType String
- data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
- originalValue String
- OriginalValue.
- value String
- VALUE.
- valueOperate Property Map
- VALUE process.
- valueOperates List<Property Map>
- VALUE process chain.
CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperate, CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateArgs                  
- Type string
- Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
- Date
CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate Date 
- Time conversion, required when TYPE=DATE.
- JsonPath CkafkaReplace Datahub Task Transforms Param Field Chain SMTValue Operate Json Path Replace 
- Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
- KV
CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate KV 
- Key-value secondary analysis, must be passed when TYPE=KV.
- RegexReplace CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate Regex Replace 
- Regular replacement, required when TYPE=REGEX REPLACE.
- Replace
CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate Replace 
- replace, TYPE=REPLACE is required.
- Result string
- result.
- Split
CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate Split 
- The value supports one split and multiple values, required when TYPE=SPLIT.
- Substr
CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate Substr 
- Substr, TYPE=SUBSTR is required.
- UrlDecode CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate Url Decode 
- Url parsing.
- Type string
- Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
- Date
CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate Date 
- Time conversion, required when TYPE=DATE.
- JsonPath CkafkaReplace Datahub Task Transforms Param Field Chain SMTValue Operate Json Path Replace 
- Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
- KV
CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate KV 
- Key-value secondary analysis, must be passed when TYPE=KV.
- RegexReplace CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate Regex Replace 
- Regular replacement, required when TYPE=REGEX REPLACE.
- Replace
CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate Replace 
- replace, TYPE=REPLACE is required.
- Result string
- result.
- Split
CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate Split 
- The value supports one split and multiple values, required when TYPE=SPLIT.
- Substr
CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate Substr 
- Substr, TYPE=SUBSTR is required.
- UrlDecode CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate Url Decode 
- Url parsing.
- type String
- Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
- date
CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate Date 
- Time conversion, required when TYPE=DATE.
- jsonPath CkafkaReplace Datahub Task Transforms Param Field Chain SMTValue Operate Json Path Replace 
- Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
- kV CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate KV 
- Key-value secondary analysis, must be passed when TYPE=KV.
- regexReplace CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate Regex Replace 
- Regular replacement, required when TYPE=REGEX REPLACE.
- replace
CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate Replace 
- replace, TYPE=REPLACE is required.
- result String
- result.
- split
CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate Split 
- The value supports one split and multiple values, required when TYPE=SPLIT.
- substr
CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate Substr 
- Substr, TYPE=SUBSTR is required.
- urlDecode CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate Url Decode 
- Url parsing.
- type string
- Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
- date
CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate Date 
- Time conversion, required when TYPE=DATE.
- jsonPath CkafkaReplace Datahub Task Transforms Param Field Chain SMTValue Operate Json Path Replace 
- Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
- kV CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate KV 
- Key-value secondary analysis, must be passed when TYPE=KV.
- regexReplace CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate Regex Replace 
- Regular replacement, required when TYPE=REGEX REPLACE.
- replace
CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate Replace 
- replace, TYPE=REPLACE is required.
- result string
- result.
- split
CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate Split 
- The value supports one split and multiple values, required when TYPE=SPLIT.
- substr
CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate Substr 
- Substr, TYPE=SUBSTR is required.
- urlDecode CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate Url Decode 
- Url parsing.
- type str
- Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
- date
CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate Date 
- Time conversion, required when TYPE=DATE.
- json_path_ Ckafkareplace Datahub Task Transforms Param Field Chain SMTValue Operate Json Path Replace 
- Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
- k_v CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate KV 
- Key-value secondary analysis, must be passed when TYPE=KV.
- regex_replace CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate Regex Replace 
- Regular replacement, required when TYPE=REGEX REPLACE.
- replace
CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate Replace 
- replace, TYPE=REPLACE is required.
- result str
- result.
- split
CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate Split 
- The value supports one split and multiple values, required when TYPE=SPLIT.
- substr
CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate Substr 
- Substr, TYPE=SUBSTR is required.
- url_decode CkafkaDatahub Task Transforms Param Field Chain SMTValue Operate Url Decode 
- Url parsing.
- type String
- Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
- date Property Map
- Time conversion, required when TYPE=DATE.
- jsonPath Property MapReplace 
- Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
- kV Property Map
- Key-value secondary analysis, must be passed when TYPE=KV.
- regexReplace Property Map
- Regular replacement, required when TYPE=REGEX REPLACE.
- replace Property Map
- replace, TYPE=REPLACE is required.
- result String
- result.
- split Property Map
- The value supports one split and multiple values, required when TYPE=SPLIT.
- substr Property Map
- Substr, TYPE=SUBSTR is required.
- urlDecode Property Map
- Url parsing.
CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateDate, CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateDateArgs                    
- Format string
- Time format.
- TargetType string
- input type, string|unix.
- TimeZone string
- default GMT+8.
- Format string
- Time format.
- TargetType string
- input type, string|unix.
- TimeZone string
- default GMT+8.
- format String
- Time format.
- targetType String
- input type, string|unix.
- timeZone String
- default GMT+8.
- format string
- Time format.
- targetType string
- input type, string|unix.
- timeZone string
- default GMT+8.
- format str
- Time format.
- target_type str
- input type, string|unix.
- time_zone str
- default GMT+8.
- format String
- Time format.
- targetType String
- input type, string|unix.
- timeZone String
- default GMT+8.
CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateJsonPathReplace, CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateJsonPathReplaceArgs                        
CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateKV, CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateKVArgs                    
- Delimiter string
- delimiter.
- Regex string
- Key-value secondary analysis delimiter.
- KeepOriginal stringKey 
- Keep the source Key, the default is false not to keep.
- Delimiter string
- delimiter.
- Regex string
- Key-value secondary analysis delimiter.
- KeepOriginal stringKey 
- Keep the source Key, the default is false not to keep.
- delimiter String
- delimiter.
- regex String
- Key-value secondary analysis delimiter.
- keepOriginal StringKey 
- Keep the source Key, the default is false not to keep.
- delimiter string
- delimiter.
- regex string
- Key-value secondary analysis delimiter.
- keepOriginal stringKey 
- Keep the source Key, the default is false not to keep.
- delimiter str
- delimiter.
- regex str
- Key-value secondary analysis delimiter.
- keep_original_ strkey 
- Keep the source Key, the default is false not to keep.
- delimiter String
- delimiter.
- regex String
- Key-value secondary analysis delimiter.
- keepOriginal StringKey 
- Keep the source Key, the default is false not to keep.
CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateRegexReplace, CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateRegexReplaceArgs                      
CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateReplace, CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateReplaceArgs                    
CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateSplit, CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateSplitArgs                    
- Regex string
- delimiter.
- Regex string
- delimiter.
- regex String
- delimiter.
- regex string
- delimiter.
- regex str
- delimiter.
- regex String
- delimiter.
CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateSubstr, CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateSubstrArgs                    
CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateUrlDecode, CkafkaDatahubTaskTransformsParamFieldChainSMTValueOperateUrlDecodeArgs                      
- CharsetName string
- code.
- CharsetName string
- code.
- charsetName String
- code.
- charsetName string
- code.
- charset_name str
- code.
- charsetName String
- code.
CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyse, CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseArgs                  
- Regex string
- delimiter.
- Regex string
- delimiter.
- regex String
- delimiter.
- regex string
- delimiter.
- regex str
- delimiter.
- regex String
- delimiter.
CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResult, CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultArgs                    
- Key string
- KEY.
- Operate string
- Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
- SchemeType string
- data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
- OriginalValue string
- OriginalValue.
- Value string
- VALUE.
- ValueOperate CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate 
- VALUE process.
- ValueOperates List<CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate> 
- VALUE process chain.
- Key string
- KEY.
- Operate string
- Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
- SchemeType string
- data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
- OriginalValue string
- OriginalValue.
- Value string
- VALUE.
- ValueOperate CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate 
- VALUE process.
- ValueOperates []CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate 
- VALUE process chain.
- key String
- KEY.
- operate String
- Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
- schemeType String
- data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
- originalValue String
- OriginalValue.
- value String
- VALUE.
- valueOperate CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate 
- VALUE process.
- valueOperates List<CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate> 
- VALUE process chain.
- key string
- KEY.
- operate string
- Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
- schemeType string
- data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
- originalValue string
- OriginalValue.
- value string
- VALUE.
- valueOperate CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate 
- VALUE process.
- valueOperates CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate[] 
- VALUE process chain.
- key str
- KEY.
- operate str
- Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
- scheme_type str
- data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
- original_value str
- OriginalValue.
- value str
- VALUE.
- value_operate CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate 
- VALUE process.
- value_operates Sequence[CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate] 
- VALUE process chain.
- key String
- KEY.
- operate String
- Operation, DATE system preset - timestamp, CUSTOMIZE customization, MAPPING mapping, JSONPATH.
- schemeType String
- data type, ORIGINAL, STRING, INT64, FLOAT64, BOOLEAN, MAP, ARRAY.
- originalValue String
- OriginalValue.
- value String
- VALUE.
- valueOperate Property Map
- VALUE process.
- valueOperates List<Property Map>
- VALUE process chain.
CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperate, CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateArgs                        
- Type string
- Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
- Date
CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Date 
- Time conversion, required when TYPE=DATE.
- JsonPath CkafkaReplace Datahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Json Path Replace 
- Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
- KV
CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate KV 
- Key-value secondary analysis, must be passed when TYPE=KV.
- RegexReplace CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Regex Replace 
- Regular replacement, required when TYPE=REGEX REPLACE.
- Replace
CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Replace 
- replace, TYPE=REPLACE is required.
- Result string
- result.
- Split
CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Split 
- The value supports one split and multiple values, required when TYPE=SPLIT.
- Substr
CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Substr 
- Substr, TYPE=SUBSTR is required.
- UrlDecode CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Url Decode 
- Url parsing.
- Type string
- Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
- Date
CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Date 
- Time conversion, required when TYPE=DATE.
- JsonPath CkafkaReplace Datahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Json Path Replace 
- Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
- KV
CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate KV 
- Key-value secondary analysis, must be passed when TYPE=KV.
- RegexReplace CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Regex Replace 
- Regular replacement, required when TYPE=REGEX REPLACE.
- Replace
CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Replace 
- replace, TYPE=REPLACE is required.
- Result string
- result.
- Split
CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Split 
- The value supports one split and multiple values, required when TYPE=SPLIT.
- Substr
CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Substr 
- Substr, TYPE=SUBSTR is required.
- UrlDecode CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Url Decode 
- Url parsing.
- type String
- Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
- date
CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Date 
- Time conversion, required when TYPE=DATE.
- jsonPath CkafkaReplace Datahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Json Path Replace 
- Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
- kV CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate KV 
- Key-value secondary analysis, must be passed when TYPE=KV.
- regexReplace CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Regex Replace 
- Regular replacement, required when TYPE=REGEX REPLACE.
- replace
CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Replace 
- replace, TYPE=REPLACE is required.
- result String
- result.
- split
CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Split 
- The value supports one split and multiple values, required when TYPE=SPLIT.
- substr
CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Substr 
- Substr, TYPE=SUBSTR is required.
- urlDecode CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Url Decode 
- Url parsing.
- type string
- Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
- date
CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Date 
- Time conversion, required when TYPE=DATE.
- jsonPath CkafkaReplace Datahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Json Path Replace 
- Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
- kV CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate KV 
- Key-value secondary analysis, must be passed when TYPE=KV.
- regexReplace CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Regex Replace 
- Regular replacement, required when TYPE=REGEX REPLACE.
- replace
CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Replace 
- replace, TYPE=REPLACE is required.
- result string
- result.
- split
CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Split 
- The value supports one split and multiple values, required when TYPE=SPLIT.
- substr
CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Substr 
- Substr, TYPE=SUBSTR is required.
- urlDecode CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Url Decode 
- Url parsing.
- type str
- Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
- date
CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Date 
- Time conversion, required when TYPE=DATE.
- json_path_ Ckafkareplace Datahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Json Path Replace 
- Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
- k_v CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate KV 
- Key-value secondary analysis, must be passed when TYPE=KV.
- regex_replace CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Regex Replace 
- Regular replacement, required when TYPE=REGEX REPLACE.
- replace
CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Replace 
- replace, TYPE=REPLACE is required.
- result str
- result.
- split
CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Split 
- The value supports one split and multiple values, required when TYPE=SPLIT.
- substr
CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Substr 
- Substr, TYPE=SUBSTR is required.
- url_decode CkafkaDatahub Task Transforms Param Field Chain Secondary Analyse Result Value Operate Url Decode 
- Url parsing.
- type String
- Processing mode, REPLACE replacement, SUBSTR interception, DATE date conversion, TRIM removal of leading and trailing spaces, REGEX REPLACE regular replacement, URL DECODE, LOWERCASE conversion to lowercase.
- date Property Map
- Time conversion, required when TYPE=DATE.
- jsonPath Property MapReplace 
- Json Path replacement, must pass when TYPE=JSON PATH REPLACE.
- kV Property Map
- Key-value secondary analysis, must be passed when TYPE=KV.
- regexReplace Property Map
- Regular replacement, required when TYPE=REGEX REPLACE.
- replace Property Map
- replace, TYPE=REPLACE is required.
- result String
- result.
- split Property Map
- The value supports one split and multiple values, required when TYPE=SPLIT.
- substr Property Map
- Substr, TYPE=SUBSTR is required.
- urlDecode Property Map
- Url parsing.
CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateDate, CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateDateArgs                          
- Format string
- Time format.
- TargetType string
- input type, string|unix.
- TimeZone string
- default GMT+8.
- Format string
- Time format.
- TargetType string
- input type, string|unix.
- TimeZone string
- default GMT+8.
- format String
- Time format.
- targetType String
- input type, string|unix.
- timeZone String
- default GMT+8.
- format string
- Time format.
- targetType string
- input type, string|unix.
- timeZone string
- default GMT+8.
- format str
- Time format.
- target_type str
- input type, string|unix.
- time_zone str
- default GMT+8.
- format String
- Time format.
- targetType String
- input type, string|unix.
- timeZone String
- default GMT+8.
CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateJsonPathReplace, CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateJsonPathReplaceArgs                              
CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateKV, CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateKVArgs                          
- Delimiter string
- delimiter.
- Regex string
- Key-value secondary analysis delimiter.
- KeepOriginal stringKey 
- Keep the source Key, the default is false not to keep.
- Delimiter string
- delimiter.
- Regex string
- Key-value secondary analysis delimiter.
- KeepOriginal stringKey 
- Keep the source Key, the default is false not to keep.
- delimiter String
- delimiter.
- regex String
- Key-value secondary analysis delimiter.
- keepOriginal StringKey 
- Keep the source Key, the default is false not to keep.
- delimiter string
- delimiter.
- regex string
- Key-value secondary analysis delimiter.
- keepOriginal stringKey 
- Keep the source Key, the default is false not to keep.
- delimiter str
- delimiter.
- regex str
- Key-value secondary analysis delimiter.
- keep_original_ strkey 
- Keep the source Key, the default is false not to keep.
- delimiter String
- delimiter.
- regex String
- Key-value secondary analysis delimiter.
- keepOriginal StringKey 
- Keep the source Key, the default is false not to keep.
CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateRegexReplace, CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateRegexReplaceArgs                            
CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateReplace, CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateReplaceArgs                          
CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateSplit, CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateSplitArgs                          
- Regex string
- delimiter.
- Regex string
- delimiter.
- regex String
- delimiter.
- regex string
- delimiter.
- regex str
- delimiter.
- regex String
- delimiter.
CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateSubstr, CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateSubstrArgs                          
CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateUrlDecode, CkafkaDatahubTaskTransformsParamFieldChainSecondaryAnalyseResultValueOperateUrlDecodeArgs                            
- CharsetName string
- code.
- CharsetName string
- code.
- charsetName String
- code.
- charsetName string
- code.
- charset_name str
- code.
- charsetName String
- code.
CkafkaDatahubTaskTransformsParamFilterParam, CkafkaDatahubTaskTransformsParamFilterParamArgs              
- key str
- Key.
- match_mode str
- Matching mode, prefix matches PREFIX, suffix matches SUFFIX, contains matches CONTAINS, except matches EXCEPT, value matches NUMBER, IP matches IP.
- value str
- Value.
- type str
- REGULAR.
CkafkaDatahubTaskTransformsParamRowParam, CkafkaDatahubTaskTransformsParamRowParamArgs              
- RowContent string
- row content, KEY_VALUE, VALUE.
- EntryDelimiter string
- delimiter.
- KeyValue stringDelimiter 
- key, value delimiter.
- RowContent string
- row content, KEY_VALUE, VALUE.
- EntryDelimiter string
- delimiter.
- KeyValue stringDelimiter 
- key, value delimiter.
- rowContent String
- row content, KEY_VALUE, VALUE.
- entryDelimiter String
- delimiter.
- keyValue StringDelimiter 
- key, value delimiter.
- rowContent string
- row content, KEY_VALUE, VALUE.
- entryDelimiter string
- delimiter.
- keyValue stringDelimiter 
- key, value delimiter.
- row_content str
- row content, KEY_VALUE, VALUE.
- entry_delimiter str
- delimiter.
- key_value_ strdelimiter 
- key, value delimiter.
- rowContent String
- row content, KEY_VALUE, VALUE.
- entryDelimiter String
- delimiter.
- keyValue StringDelimiter 
- key, value delimiter.
Import
ckafka datahub_task can be imported using the id, e.g.
$ pulumi import tencentcloud:index/ckafkaDatahubTask:CkafkaDatahubTask datahub_task datahub_task_id
To learn more about importing existing cloud resources, see Importing resources.
Package Details
- Repository
- tencentcloud tencentcloudstack/terraform-provider-tencentcloud
- License
- Notes
- This Pulumi package is based on the tencentcloudTerraform Provider.