I have a following schema (.avsc file)
{
"namespace": "com.avaya.mts",
"type": "record",
"name": "MultiTenancyOrgDataMessage",
"fields": [
{
"name": "eventType",
"type": [
"null",
{
"type": "enum",
"name": "TenantMessageType",
"symbols": [
"TENANT_ADDED",
"TENANT_UPDATED",
"TENANT_DELETED"
]
}
],
"doc": "Event type"
},
{
"name": "timestamp",
"type": "long",
"doc": "Timestamp of the operation"
},
{
"name": "tenantId",
"type": "string",
"doc": "Six digit unique tenant identifier"
},
{
"name": "data",
"type": [
"null",
{
"type": "record",
"name": "TenantData",
"fields": [
{
"name": "name",
"type": "string",
"doc": "Tenant name"
},
{
"name": "tenantId",
"type": "string",
"doc": "Six digit unique tenant identifier"
},
{
"name": "loginDomain",
"type": "string",
"doc": "Login domain name"
},
{
"name": "level",
"type": [
"null",
"string"
],
"doc": "OrganizationNode Level"
},
{
"name": "key",
"type": [
"null",
"string"
],
"doc": "String tenant identifier"
},
{
"name": "organizationNodeId",
"type": [
"null",
"string"
],
"doc": "Six digit unique orgnaization node identifier. Same as tenantId"
},
{
"name": "organizationHierarchy",
"type": [
"null",
"string"
],
"doc": "Full hierarchy path of the orgnaization node"
},
{
"name": "features",
"type": [
"null",
"string"
]
},
{
"name": "type",
"type": [
"null",
{
"type": "enum",
"name": "OrganizationNodeType",
"symbols": [
"TENANT",
"DEFAULT"
]
}
],
"doc": "Organization node type to differentiate between tenant and tenant hierarchy node"
},
{
"name": "orgLevels",
"type": [
"null",
{
"type": "array",
"items": {
"type": "record",
"name": "OrgLevel",
"fields": [
{
"name": "name",
"type": "string"
},
{
"name": "position",
"type": "int"
}
]
}
}
],
"doc": "Name of the organization node level"
},
{
"name": "solutionTemplates",
"type": [
"null",
{
"type": "record",
"name": "SolutionTemplate",
"fields": [
{
"name": "templateId",
"type": "string",
"doc": "Six digit unique solution template identifier"
},
{
"name": "name",
"type": "string",
"doc": "Solution template name"
},
{
"name": "description",
"type": [
"null",
"string"
],
"doc": "Solution template description"
}
]
}
],
"doc": "Solution template associated with the organization node"
},
{
"name": "profiles",
"type": [
"null",
{
"type": "array",
"items": {
"type": "record",
"name": "Profile",
"fields": [
{
"name": "profileId",
"type": "string",
"doc": "Six digit unique profile identifier"
},
{
"name": "name",
"type": "string",
"doc": "Profile name"
},
{
"name": "description",
"type": [
"null",
"string"
],
"doc": "Profile description"
}
]
}
}
],
"doc": "Array of the profiles associated with organization node"
},
{
"name": "tenantAttributes",
"type": [
"null",
{
"type": "array",
"items": {
"type": "record",
"name": "TenantAttribute",
"fields": [
{
"name": "name",
"type": "string",
"doc": "Tenant attribute name"
},
{
"name": "value",
"type": "string",
"doc": "Tenant attribute value"
}
]
}
}
],
"doc": "Array of tenant attributes"
},
{
"name": "children",
"type": [
"null",
{
"type": "array",
"items": {
"type": "record",
"name": "OrganizationNode",
"fields": [
{
"name": "name",
"type": [
"null",
"string"
],
"doc": "OrganizationNode name"
},
{
"name": "type",
"type": [
"null",
"OrganizationNodeType"
],
"doc": "OrganizationNode Type"
},
{
"name": "level",
"type": [
"null",
"string"
],
"doc": "OrganizationNode Level"
},
{
"name": "key",
"type": [
"null",
"string"
],
"doc": "Unique string identifier for the organization node"
},
{
"name": "organizationNodeId",
"type": [
"null",
"string"
],
"doc": "Six digit unique identifier for the ornagization node"
},
{
"name": "organizationHierarchy",
"type": [
"null",
"string"
],
"doc": "Full organizationNode hierarchy path"
},
{
"name": "features",
"type": [
"null",
"string"
],
"doc": "This contains the feature details associated with tenants."
},
{
"name": "solutionTemplates",
"type": [
"null",
"SolutionTemplate"
],
"doc": "Solution Template associated with the organization node"
},
{
"name": "profiles",
"type": [
"null",
{
"type": "array",
"items": "Profile"
}
],
"doc": "Array of profiles associated with the organization node"
},
{
"name": "children",
"type": [
"null",
{
"type": "array",
"items": "OrganizationNode"
}
],
"doc": "Child organization nodes"
}
]
}
}
]
},
{
"name": "isDefault",
"type": "boolean",
"doc": "Flag only set to true for the default tenant"
},
{
"name": "parent",
"type": {
"type": "record",
"name": "Parent",
"fields": [
{
"name": "tenantId",
"type": "string",
"doc": "Six digit unique tenant identifier for the parent node"
},
{
"name": "name",
"type": "string",
"doc": "Name of the parent node"
}
]
},
"doc": "Parent Organization Node"
}
]
}
]
}
]
}
And following is my sample json data
{
"eventType": "TENANT_ADDED",
"timestamp": 1442921557056,
"tenantId": "GHIJKL",
"data": {
"name": "bmc",
"type": "TENANT",
"level": "Organization",
"key": "bmc",
"organizationNodeId": "VEKCPC",
"organizationHierarchy": "bmc",
"profiles": [],
"children": [{
"name": "Bangalore",
"type": "DEFAULT",
"level": "Site",
"key": "Bangalore",
"organizationNodeId": "OCNGVJ",
"organizationHierarchy": "bmc/Bangalore",
"features": "Test",
"profiles": [{
"description": "",
"profileId": "MH985X",
"name": "VoiceProfile"
}],
"children": [],
"solutionTemplate": {
"templateId": "FDASGG",
"description": "Solution Template for site Banglore",
"name": "TemplateSiteBanglore"
}
}, {
"name": "Site_Pune",
"type": "DEFAULT",
"level": "Site",
"key": "Site_Pune",
"organizationNodeId": "DUQICJ",
"organizationHierarchy": "bmc/Site_Pune",
"profiles": [],
"children": [],
"solutionTemplate": {
"templateId": "FDASWE",
"description": "Template for site Pune",
"templateName": "Template_Site_Pune"
}
}],
"tenantAttributes": [],
"tenantId": "VEKCPC",
"orgLevels": [{
"name": "Organization",
"position": 1
}, {
"name": "Site",
"position": 2
}, {
"name": "Department",
"position": 3
}, {
"name": "Team",
"position": 4
}],
"loginDomain": "bmc.com",
"parent": {
"tenantId": "HQAYQU",
"name": "Default"
},
"solutionTemplates": {
"templateId": "FDAGSA",
"description": "Template for Tenant",
"templateName": "Template_BMC"
},
"isDefault": false
}
}
While executing my JMeter to generate the avro event in kafka topic, I am getting an error
** Error Section **
2020-03-19 09:23:24,965 INFO o.a.j.e.StandardJMeterEngine: Running the test!
2020-03-19 09:23:24,965 INFO o.a.j.s.SampleEvent: List of sample_variables: []
2020-03-19 09:23:24,966 INFO o.a.j.p.j.s.JavaSampler: Created class: com.gslab.pepper.sampler.PepperBoxKafkaSampler. Uses tearDownTest:
2020-03-19 09:23:24,966 INFO o.a.j.g.u.JMeterMenuBar: setRunning(true, local)
2020-03-19 09:23:25,114 INFO o.a.j.e.StandardJMeterEngine: No enabled thread groups found
2020-03-19 09:23:25,114 INFO o.a.j.e.StandardJMeterEngine: Starting tearDown thread groups
2020-03-19 09:23:25,114 INFO o.a.j.e.StandardJMeterEngine: Starting tearDown ThreadGroup: 1 : CreateTenant
2020-03-19 09:23:25,114 INFO o.a.j.e.StandardJMeterEngine: Starting 1 threads for group CreateTenant.
2020-03-19 09:23:25,114 INFO o.a.j.e.StandardJMeterEngine: Thread will continue on error
2020-03-19 09:23:25,115 INFO o.a.j.t.ThreadGroup: Starting thread group... number=1 threads=1 ramp-up=1 delayedStart=false
2020-03-19 09:23:25,115 INFO o.a.j.t.ThreadGroup: Started thread group number 1
2020-03-19 09:23:25,116 INFO o.a.j.t.JMeterThread: Thread started: CreateTenant 1-1
2020-03-19 09:23:25,530 ERROR o.a.j.t.JMeterThread: Test failed!
org.apache.avro.AvroTypeException: Expected start-union. Got VALUE_STRING
at org.apache.avro.io.JsonDecoder.error(JsonDecoder.java:514) ~[pepper-box-0.0.1-SNAPSHOT.jar:?]
at org.apache.avro.io.JsonDecoder.readIndex(JsonDecoder.java:433) ~[pepper-box-0.0.1-SNAPSHOT.jar:?]
at org.apache.avro.io.ResolvingDecoder.readIndex(ResolvingDecoder.java:282) ~[pepper-box-0.0.1-SNAPSHOT.jar:?]
at org.apache.avro.generic.GenericDatumReader.readWithoutConversion(GenericDatumReader.java:178) ~[pepper-box-0.0.1-SNAPSHOT.jar:?]
at org.apache.avro.generic.GenericDatumReader.read(GenericDatumReader.java:151) ~[pepper-box-0.0.1-SNAPSHOT.jar:?]
at org.apache.avro.generic.GenericDatumReader.readField(GenericDatumReader.java:248) ~[pepper-box-0.0.1-SNAPSHOT.jar:?]
at org.apache.avro.generic.GenericDatumReader.readRecord(GenericDatumReader.java:237) ~[pepper-box-0.0.1-SNAPSHOT.jar:?]
at org.apache.avro.generic.GenericDatumReader.readWithoutConversion(GenericDatumReader.java:170) ~[pepper-box-0.0.1-SNAPSHOT.jar:?]
at org.apache.avro.generic.GenericDatumReader.read(GenericDatumReader.java:151) ~[pepper-box-0.0.1-SNAPSHOT.jar:?]
at org.apache.avro.generic.GenericDatumReader.read(GenericDatumReader.java:144) ~[pepper-box-0.0.1-SNAPSHOT.jar:?]
at com.gslab.pepper.loadgen.impl.AvroSchemaLoadGenerator.nextMessage(AvroSchemaLoadGenerator.java:79) ~[pepper-box-0.0.1-SNAPSHOT.jar:?]
at com.gslab.pepper.config.avroschema.AvroSchemaConfigElement.iterationStart(AvroSchemaConfigElement.java:67) ~[pepper-box-0.0.1-SNAPSHOT.jar:?]
at org.apache.jmeter.control.GenericController.fireIterationStart(GenericController.java:399) ~[ApacheJMeter_core.jar:5.2.1]
at org.apache.jmeter.control.GenericController.fireIterEvents(GenericController.java:391) ~[ApacheJMeter_core.jar:5.2.1]
at org.apache.jmeter.control.GenericController.next(GenericController.java:160) ~[ApacheJMeter_core.jar:5.2.1]
at org.apache.jmeter.control.LoopController.next(LoopController.java:135) ~[ApacheJMeter_core.jar:5.2.1]
at org.apache.jmeter.threads.AbstractThreadGroup.next(AbstractThreadGroup.java:92) ~[ApacheJMeter_core.jar:5.2.1]
at org.apache.jmeter.threads.JMeterThread.run(JMeterThread.java:255) [ApacheJMeter_core.jar:5.2.1]
at java.lang.Thread.run(Thread.java:834) [?:?]
2020-03-19 09:23:25,530 INFO o.a.j.t.JMeterThread: Thread finished: CreateTenant 1-1
2020-03-19 09:23:25,531 INFO o.a.j.e.StandardJMeterEngine: Notifying test listeners of end of test
2020-03-19 09:23:25,531 INFO o.a.j.g.u.JMeterMenuBar: setRunning(false, local)
Can someone please explain why exactly am I getting the error?
Related
Im using AWS schema registry for debezium.
In the debezium I mentioned the server name as mysql-db01. So debezium will create a topic with this server name to add some metadata about the server and schema changes.
When I deployed the connector, in the schema registry I got the schema like this.
{
"type": "record",
"name": "SchemaChangeKey",
"namespace": "io.debezium.connector.mysql",
"fields": [
{
"name": "databaseName",
"type": "string"
}
],
"connect.name": "io.debezium.connector.mysql.SchemaChangeKey"
}
Then immediately another version got created like this.
{
"type": "record",
"name": "SchemaChangeValue",
"namespace": "io.debezium.connector.mysql",
"fields": [
{
"name": "source",
"type": {
"type": "record",
"name": "Source",
"fields": [
{
"name": "version",
"type": "string"
},
{
"name": "connector",
"type": "string"
},
{
"name": "name",
"type": "string"
},
{
"name": "ts_ms",
"type": "long"
},
{
"name": "snapshot",
"type": [
{
"type": "string",
"connect.version": 1,
"connect.parameters": {
"allowed": "true,last,false"
},
"connect.default": "false",
"connect.name": "io.debezium.data.Enum"
},
"null"
],
"default": "false"
},
{
"name": "db",
"type": "string"
},
{
"name": "sequence",
"type": [
"null",
"string"
],
"default": null
},
{
"name": "table",
"type": [
"null",
"string"
],
"default": null
},
{
"name": "server_id",
"type": "long"
},
{
"name": "gtid",
"type": [
"null",
"string"
],
"default": null
},
{
"name": "file",
"type": "string"
},
{
"name": "pos",
"type": "long"
},
{
"name": "row",
"type": "int"
},
{
"name": "thread",
"type": [
"null",
"long"
],
"default": null
},
{
"name": "query",
"type": [
"null",
"string"
],
"default": null
}
],
"connect.name": "io.debezium.connector.mysql.Source"
}
},
{
"name": "databaseName",
"type": [
"null",
"string"
],
"default": null
},
{
"name": "schemaName",
"type": [
"null",
"string"
],
"default": null
},
{
"name": "ddl",
"type": [
"null",
"string"
],
"default": null
},
{
"name": "tableChanges",
"type": {
"type": "array",
"items": {
"type": "record",
"name": "Change",
"namespace": "io.debezium.connector.schema",
"fields": [
{
"name": "type",
"type": "string"
},
{
"name": "id",
"type": "string"
},
{
"name": "table",
"type": {
"type": "record",
"name": "Table",
"fields": [
{
"name": "defaultCharsetName",
"type": [
"null",
"string"
],
"default": null
},
{
"name": "primaryKeyColumnNames",
"type": [
"null",
{
"type": "array",
"items": "string"
}
],
"default": null
},
{
"name": "columns",
"type": {
"type": "array",
"items": {
"type": "record",
"name": "Column",
"fields": [
{
"name": "name",
"type": "string"
},
{
"name": "jdbcType",
"type": "int"
},
{
"name": "nativeType",
"type": [
"null",
"int"
],
"default": null
},
{
"name": "typeName",
"type": "string"
},
{
"name": "typeExpression",
"type": [
"null",
"string"
],
"default": null
},
{
"name": "charsetName",
"type": [
"null",
"string"
],
"default": null
},
{
"name": "length",
"type": [
"null",
"int"
],
"default": null
},
{
"name": "scale",
"type": [
"null",
"int"
],
"default": null
},
{
"name": "position",
"type": "int"
},
{
"name": "optional",
"type": [
"null",
"boolean"
],
"default": null
},
{
"name": "autoIncremented",
"type": [
"null",
"boolean"
],
"default": null
},
{
"name": "generated",
"type": [
"null",
"boolean"
],
"default": null
}
],
"connect.name": "io.debezium.connector.schema.Column"
}
}
}
],
"connect.name": "io.debezium.connector.schema.Table"
}
}
],
"connect.name": "io.debezium.connector.schema.Change"
}
}
}
],
"connect.name": "io.debezium.connector.mysql.SchemaChangeValue"
These 2 schemas are not matching, so the AWS schema registry is not allowing the connector to register the 2nd version. But the 2nd version is the actual schema for the connector.
To solve this issue, I deleted the schema(in the schema registry). Then deleted the connector, re-deployed the connector, then It worked.
But I'm trying to understand why the very first time the schema has different versions.
I have used the following key/value convertors on the source and sink connectors to make it work.
"key.converter": "org.apache.kafka.connect.storage.StringConverter",
"key.converter.schemas.enable": "false",
"internal.key.converter": "com.amazonaws.services.schemaregistry.kafkaconnect.AWSKafkaAvroConverter",
"internal.key.converter.schemas.enable": "false",
"internal.value.converter": "com.amazonaws.services.schemaregistry.kafkaconnect.AWSKafkaAvroConverter",
"internal.value.converter.schemas.enable": "false",
"value.converter": "com.amazonaws.services.schemaregistry.kafkaconnect.AWSKafkaAvroConverter",
"value.converter.schemas.enable": "true",
"value.converter.region": "ap-south-1",
"key.converter.schemaAutoRegistrationEnabled": "true",
"value.converter.schemaAutoRegistrationEnabled": "true",
"key.converter.avroRecordType": "GENERIC_RECORD",
"value.converter.avroRecordType": "GENERIC_RECORD",
"key.converter.registry.name": "bhuvi-debezium",
"value.converter.registry.name": "bhuvi-debezium",
I am trying to convert a json to avro using 'kafka-avro-console-producer' and publish it to kafka topic.
I am able to do that flat json/schema's but for below given schema and json I am getting
"org.apache.avro.AvroTypeException: Unknown union branch EventId" error.
Any help would be appreciated.
Schema :
{
"type": "record",
"name": "Envelope",
"namespace": "CoreOLTPEvents.dbo.Event",
"fields": [{
"name": "before",
"type": ["null", {
"type": "record",
"name": "Value",
"fields": [{
"name": "EventId",
"type": "long"
}, {
"name": "CameraId",
"type": ["null", "long"],
"default": null
}, {
"name": "SiteId",
"type": ["null", "long"],
"default": null
}],
"connect.name": "CoreOLTPEvents.dbo.Event.Value"
}],
"default": null
}, {
"name": "after",
"type": ["null", "Value"],
"default": null
}, {
"name": "op",
"type": "string"
}, {
"name": "ts_ms",
"type": ["null", "long"],
"default": null
}],
"connect.name": "CoreOLTPEvents.dbo.Event.Envelope"
}
And Json input is like below :
{
"before": null,
"after": {
"EventId": 12,
"CameraId": 10,
"SiteId": 11974
},
"op": "C",
"ts_ms": null
}
And in my case I cant alter schema, I can alter only json such a way that it works
If you are using the Avro JSON format, the input you have is slightly off. For unions, non-null values need to be specified such that the type information is listed: https://avro.apache.org/docs/current/spec.html#json_encoding
See below for an example which I think should work.
{
"before": null,
"after": {
"CoreOLTPEvents.dbo.Event.Value": {
"EventId": 12,
"CameraId": {
"long": 10
},
"SiteId": {
"long": 11974
}
}
},
"op": "C",
"ts_ms": null
}
Removing "connect.name": "CoreOLTPEvents.dbo.Event.Value" and "connect.name": "CoreOLTPEvents.dbo.Event.Envelope" as The RecordType can only contains {'namespace', 'aliases', 'fields', 'name', 'type', 'doc'} keys.
Could you try with below schema and see if you are able to produce the msg?
{
"type": "record",
"name": "Envelope",
"namespace": "CoreOLTPEvents.dbo.Event",
"fields": [
{
"name": "before",
"type": [
"null",
{
"type": "record",
"name": "Value",
"fields": [
{
"name": "EventId",
"type": "long"
},
{
"name": "CameraId",
"type": [
"null",
"long"
],
"default": "null"
},
{
"name": "SiteId",
"type": [
"null",
"long"
],
"default": "null"
}
]
}
],
"default": null
},
{
"name": "after",
"type": [
"null",
"Value"
],
"default": null
},
{
"name": "op",
"type": "string"
},
{
"name": "ts_ms",
"type": [
"null",
"long"
],
"default": null
}
]
}
I am creating a DataFusion pipeline to ingest a CSV file from s3 bucket, applying wrangler directives and storing it in GCS bucket. The input CSV file had 18 columns. However, the output CSV file has only 8 columns. I have a doubt that this could be due to the CSV encoding format, but I am not sure. What could be the reason here?
Pipeline JSON
{
"name": "aws_fusion_v1",
"description": "Data Pipeline Application",
"artifact": {
"name": "cdap-data-pipeline",
"version": "6.1.2",
"scope": "SYSTEM"
},
"config": {
"resources": {
"memoryMB": 2048,
"virtualCores": 1
},
"driverResources": {
"memoryMB": 2048,
"virtualCores": 1
},
"connections": [
{
"from": "Amazon S3",
"to": "Wrangler"
},
{
"from": "Wrangler",
"to": "GCS2"
},
{
"from": "Argument Setter",
"to": "Amazon S3"
}
],
"comments": [],
"postActions": [],
"properties": {},
"processTimingEnabled": true,
"stageLoggingEnabled": true,
"stages": [
{
"name": "Amazon S3",
"plugin": {
"name": "S3",
"type": "batchsource",
"label": "Amazon S3",
"artifact": {
"name": "amazon-s3-plugins",
"version": "1.11.0",
"scope": "SYSTEM"
},
"properties": {
"format": "text",
"authenticationMethod": "Access Credentials",
"filenameOnly": "false",
"recursive": "false",
"ignoreNonExistingFolders": "false",
"schema": "{\"type\":\"record\",\"name\":\"etlSchemaBody\",\"fields\":[{\"name\":\"body\",\"type\":\"string\"}]}",
"referenceName": "aws_source",
"path": "${input.bucket}",
"accessID": "${input.access_id}",
"accessKey": "${input.access_key}"
}
},
"outputSchema": [
{
"name": "etlSchemaBody",
"schema": "{\"type\":\"record\",\"name\":\"etlSchemaBody\",\"fields\":[{\"name\":\"body\",\"type\":\"string\"}]}"
}
],
"type": "batchsource",
"label": "Amazon S3",
"icon": "icon-s3"
},
{
"name": "Wrangler",
"plugin": {
"name": "Wrangler",
"type": "transform",
"label": "Wrangler",
"artifact": {
"name": "wrangler-transform",
"version": "4.1.5",
"scope": "SYSTEM"
},
"properties": {
"field": "*",
"precondition": "false",
"threshold": "1",
"workspaceId": "804a2995-7c06-4ab2-b342-a9a01aa03a3d",
"schema": "${output.schema}",
"directives": "${directive}"
}
},
"outputSchema": [
{
"name": "etlSchemaBody",
"schema": "${output.schema}"
}
],
"inputSchema": [
{
"name": "Amazon S3",
"schema": "{\"type\":\"record\",\"name\":\"etlSchemaBody\",\"fields\":[{\"name\":\"body\",\"type\":\"string\"}]}"
}
],
"type": "transform",
"label": "Wrangler",
"icon": "icon-DataPreparation"
},
{
"name": "GCS2",
"plugin": {
"name": "GCS",
"type": "batchsink",
"label": "GCS2",
"artifact": {
"name": "google-cloud",
"version": "0.14.2",
"scope": "SYSTEM"
},
"properties": {
"project": "auto-detect",
"suffix": "yyyy-MM-dd-HH-mm",
"format": "csv",
"serviceFilePath": "auto-detect",
"location": "us",
"referenceName": "gcs_sink",
"path": "${output.path}",
"schema": "${output.schema}"
}
},
"outputSchema": [
{
"name": "etlSchemaBody",
"schema": "${output.schema}"
}
],
"inputSchema": [
{
"name": "Wrangler",
"schema": ""
}
],
"type": "batchsink",
"label": "GCS2",
"icon": "fa-plug"
},
{
"name": "Argument Setter",
"plugin": {
"name": "ArgumentSetter",
"type": "action",
"label": "Argument Setter",
"artifact": {
"name": "argument-setter-plugins",
"version": "1.1.1",
"scope": "USER"
},
"properties": {
"method": "GET",
"connectTimeout": "60000",
"readTimeout": "60000",
"numRetries": "0",
"followRedirects": "true",
"url": "${argfile}"
}
},
"outputSchema": [
{
"name": "etlSchemaBody",
"schema": ""
}
],
"type": "action",
"label": "Argument Setter",
"icon": "fa-plug"
}
],
"schedule": "0 * * * *",
"engine": "spark",
"numOfRecordsPreview": 100,
"description": "Data Pipeline Application",
"maxConcurrentRuns": 1
}
}
Edit:
The missing columns in the output file were due to spaces in the column names. But I am facing another issue. In wrangler, when I pass a directive as
"parse-as-csv :body ',' false", the output file is empty. But when I pass something like "parse-as-csv :body ',' true", the output file has all the data without header as expected.
I want to create a stream from kafka topic that monitor a mysql table. mysql table has columns with decimal(16,4) type and when I create stream with this command:
create stream test with (KAFKA_TOPIC='dbServer.Kafka.DailyUdr',VALUE_FORMAT='AVRO');
stream created and run but columns with decimal(16,4) type don't appear in result stream.
source topic value schema:
{
"type": "record",
"name": "Envelope",
"namespace": "dbServer.Kafka.DailyUdr",
"fields": [
{
"name": "before",
"type": [
"null",
{
"type": "record",
"name": "Value",
"fields": [
{
"name": "UserId",
"type": "int"
},
{
"name": "NationalCode",
"type": "string"
},
{
"name": "TotalInputOcted",
"type": "int"
},
{
"name": "TotalOutputOcted",
"type": "int"
},
{
"name": "Date",
"type": "string"
},
{
"name": "Service",
"type": "string"
},
{
"name": "decimalCol",
"type": [
"null",
{
"type": "bytes",
"scale": 4,
"precision": 16,
"connect.version": 1,
"connect.parameters": {
"scale": "4",
"connect.decimal.precision": "16"
},
"connect.name": "org.apache.kafka.connect.data.Decimal",
"logicalType": "decimal"
}
],
"default": null
}
],
"connect.name": "dbServer.Kafka.DailyUdr.Value"
}
],
"default": null
},
{
"name": "after",
"type": [
"null",
"Value"
],
"default": null
},
{
"name": "source",
"type": {
"type": "record",
"name": "Source",
"namespace": "io.debezium.connector.mysql",
"fields": [
{
"name": "version",
"type": [
"null",
"string"
],
"default": null
},
{
"name": "connector",
"type": [
"null",
"string"
],
"default": null
},
{
"name": "name",
"type": "string"
},
{
"name": "server_id",
"type": "long"
},
{
"name": "ts_sec",
"type": "long"
},
{
"name": "gtid",
"type": [
"null",
"string"
],
"default": null
},
{
"name": "file",
"type": "string"
},
{
"name": "pos",
"type": "long"
},
{
"name": "row",
"type": "int"
},
{
"name": "snapshot",
"type": [
{
"type": "boolean",
"connect.default": false
},
"null"
],
"default": false
},
{
"name": "thread",
"type": [
"null",
"long"
],
"default": null
},
{
"name": "db",
"type": [
"null",
"string"
],
"default": null
},
{
"name": "table",
"type": [
"null",
"string"
],
"default": null
},
{
"name": "query",
"type": [
"null",
"string"
],
"default": null
}
],
"connect.name": "io.debezium.connector.mysql.Source"
}
},
{
"name": "op",
"type": "string"
},
{
"name": "ts_ms",
"type": [
"null",
"long"
],
"default": null
}
],
"connect.name": "dbServer.Kafka.DailyUdr.Envelope"
}
my problem is in decimalCol column
KSQL does not yet support DECIMAL data type.
There is an issue here that you can track and upvote if you think it would be useful.
I am using jira rest api's in my application.
I have found the api for getting the meta-data for creating jira issue but that API doesn't return default values of the fields for example :-
This is the request :-
http://kelpie9:8081/rest/api/latest/issue/createmeta?projectKeys=QA&issuetypeNames=Bug&expand=project.issuetypes.fields
the default value of priority field is set to "major" and the description of priority is also customized but the return from api is:-
{
"expand": "projects",
"projects": [
{
"expand": "issuetypes",
"self": "http://kelpie9:8081/rest/api/2/project/QA",
"id": "10010",
"key": "QA",
"name": "QA",
"avatarUrls": {
"16x16": "http://kelpie9:8081/secure/projectavatar?size=small&pid=10010&avatarId=10011",
"48x48": "http://kelpie9:8081/secure/projectavatar?pid=10010&avatarId=10011"
},
"issuetypes": [
{
"expand": "fields",
"self": "http://kelpie9:8081/rest/api/2/issuetype/1",
"id": 1,
"name": "Bug",
"iconUrl": "http://kelpie9:8081/images/icons/bug.gif",
"fields": {
"summary": {
"required": true,
"schema": {
"type": "string",
"system": "summary"
},
"operations": [
"set"
]
},
"timetracking": {
"required": false,
"operations": [ ]
},
"issuetype": {
"required": true,
"schema": {
"type": "issuetype",
"system": "issuetype"
},
"operations": [ ],
"allowedValues": [
{
"id": "1",
"name": "Bug",
"description": "A problem which impairs or prevents the functions of the product.",
"iconUrl": "http://kelpie9:8081/images/icons/bug.gif"
}
]
},
"priority": {
"required": false,
"schema": {
"type": "priority",
"system": "priority"
},
"name": "Priority",
"operations": [
"set"
],
"allowedValues": [
{
"self": "http://172.19.30.101:18080/rest/api/2/priority/1",
"iconUrl": "http://172.19.30.101:18080/images/icons/priority_blocker.gif",
"name": "Blocker",
"id": "1"
},
{
"self": "http://172.19.30.101:18080/rest/api/2/priority/2",
"iconUrl": "http://172.19.30.101:18080/images/icons/priority_critical.gif",
"name": "Critical",
"id": "2"
},
{
"self": "http://172.19.30.101:18080/rest/api/2/priority/3",
"iconUrl": "http://172.19.30.101:18080/images/icons/priority_major.gif",
"name": "Major",
"id": "3"
},
{
"self": "http://172.19.30.101:18080/rest/api/2/priority/4",
"iconUrl": "http://172.19.30.101:18080/images/icons/priority_minor.gif",
"name": "Minor",
"id": "4"
},
{
"self": "http://172.19.30.101:18080/rest/api/2/priority/5",
"iconUrl": "http://172.19.30.101:18080/images/icons/priority_trivial.gif",
"name": "Trivial",
"id": "5"
}
]
},
"customfield_10080": {
"required": false,
"schema": {
"type": "array",
"items": "string",
"custom": "com.atlassian.jira.plugin.system.customfieldtypes:labels",
"customId": 10080
},
"operations": [ ]
},
"customfield_10010": {
"required": false,
"schema": {
"type": "array",
"items": "string",
"custom": "com.atlassian.jira.plugin.system.customfieldtypes:labels",
"customId": 10010
},
"operations": [ ]
},
"customfield_10071": {
"required": false,
"schema": {
"type": "array",
"items": "string",
"custom": "com.atlassian.jira.plugin.system.customfieldtypes:textfield",
"customId": 10071
},
"operations": [ ]
}
}
}
]
}
]
}
There is nothing like default value or description in priority field, how will I get those values?