Related
We are using ARM templates to deploy function apps but the slotSetting: true property is not respected and I cannot find any modern documentation as to how to make app settings slot specific.
THis is my app settings snippet in my ARM template
{
"name": "AzureWebJobs.HandleFiscalFrResponse.Disabled",
"value": "1",
"slotSetting": true
}
the setting and the value works but the slotSettings attribute is ignored silently, no error is shown its just ignored.
What is the correct way to make a function app setting slot specific?
I have reproduced the issue and able to resolve, please follow the below steps
Open VS code and create a file using .json extension and se the below code
Thanks #patelchandni for the ARM Template code.
My Filename.json
{
"$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#",
"contentVersion": "1.0.0.0",
"parameters": {
"functionAppName": {
"type": "string",
"defaultValue": "[format('tar-{0}', uniqueString(resourceGroup().id))]"
},
"storageAccountType": {
"type": "string",
"defaultValue": "Standard_LRS",
"allowedValues": [
"Standard_LRS",
"Standard_GRS",
"Standard_RAGRS"
]
},
"location": {
"type": "string",
"defaultValue": "[resourceGroup().location]"
},
"appInsightsLocation": {
"type": "string",
"defaultValue": "[resourceGroup().location]"
},
"functionWorkerRuntime": {
"type": "string",
"defaultValue": "node",
"allowedValues": [
"dotnet",
"node",
"python",
"java"
]
},
"functionPlanOS": {
"type": "string",
"defaultValue": "Windows",
"allowedValues": [
"Windows",
"Linux"
]
},
"functionAppPlanSku": {
"type": "string",
"defaultValue": "EP1",
"allowedValues": [
"EP1",
"EP2",
"EP3"
]
},
"linuxFxVersion": {
"type": "string",
"defaultValue": "",
"metadata": {
"description": "Only required for Linux app to represent runtime stack in the format of 'runtime|runtimeVersion'. For example: 'python|3.9'"
}
}
},
"variables": {
"hostingPlanName": "[parameters('functionAppName')]",
"applicationInsightsName": "[parameters('functionAppName')]",
"storageAccountName": "[concat(uniquestring(resourceGroup().id), 'azfunctions')]",
"isReserved": "[if(equals(parameters('functionPlanOS'), 'Linux'), true(), false())]",
"slotContentShareName": "[concat(parameters('functionAppName'), '-deployment')]"
},
"resources": [
{
"type": "Microsoft.Storage/storageAccounts",
"apiVersion": "2021-02-01",
"name": "[variables('storageAccountName')]",
"location": "[parameters('location')]",
"sku": {
"name": "[parameters('storageAccountType')]"
},
"kind": "Storage"
},
{
"type": "Microsoft.Web/serverfarms",
"apiVersion": "2021-02-01",
"name": "[variables('hostingPlanName')]",
"location": "[parameters('location')]",
"sku": {
"tier": "ElasticPremium",
"name": "[parameters('functionAppPlanSku')]",
"family": "EP"
},
"properties": {
"maximumElasticWorkerCount": 20,
"reserved": "[variables('isReserved')]"
},
"kind": "elastic"
},
{
"type": "microsoft.insights/components",
"apiVersion": "2020-02-02",
"name": "[variables('applicationInsightsName')]",
"location": "[parameters('appInsightsLocation')]",
"tags": {
"[concat('hidden-link:', resourceId('Microsoft.Web/sites', variables('applicationInsightsName')))]": "Resource"
},
"properties": {
"Application_Type": "web"
},
"kind": "web"
},
{
"type": "Microsoft.Web/sites",
"apiVersion": "2021-02-01",
"name": "[parameters('functionAppName')]",
"location": "[parameters('location')]",
"kind": "[if(variables('isReserved'), 'functionapp,linux', 'functionapp')]",
"dependsOn": [
"[resourceId('Microsoft.Web/serverfarms', variables('hostingPlanName'))]",
"[resourceId('Microsoft.Storage/storageAccounts', variables('storageAccountName'))]",
"[resourceId('Microsoft.Insights/components', variables('applicationInsightsName'))]"
],
"properties": {
"reserved": "[variables('isReserved')]",
"serverFarmId": "[resourceId('Microsoft.Web/serverfarms', variables('hostingPlanName'))]",
"siteConfig": {
"linuxFxVersion": "[if(variables('isReserved'), parameters('linuxFxVersion'), json('null'))]",
"appSettings": [
{
"name": "APPINSIGHTS_INSTRUMENTATIONKEY",
"value": "[reference(resourceId('microsoft.insights/components', variables('applicationInsightsName')), '2015-05-01').InstrumentationKey]"
},
{
"name": "AzureWebJobsStorage",
"value": "[concat('DefaultEndpointsProtocol=https;AccountName=', variables('storageAccountName'), ';EndpointSuffix=', environment().suffixes.storage, ';AccountKey=',listKeys(resourceId('Microsoft.Storage/storageAccounts', variables('storageAccountName')), '2019-06-01').keys[0].value)]"
},
{
"name": "WEBSITE_CONTENTAZUREFILECONNECTIONSTRING",
"value": "[concat('DefaultEndpointsProtocol=https;AccountName=', variables('storageAccountName'), ';EndpointSuffix=', environment().suffixes.storage, ';AccountKey=',listKeys(resourceId('Microsoft.Storage/storageAccounts', variables('storageAccountName')), '2019-06-01').keys[0].value)]"
},
{
"name": "WEBSITE_CONTENTSHARE",
"value": "[toLower(parameters('functionAppName'))]"
},
{
"name": "FUNCTIONS_EXTENSION_VERSION",
"value": "~4"
},
{
"name": "FUNCTIONS_WORKER_RUNTIME",
"value": "[parameters('functionWorkerRuntime')]"
},
{
"name": "WEBSITE_NODE_DEFAULT_VERSION",
"value": "~14"
}
]
}
}
},
{
"type": "Microsoft.Web/sites/slots",
"apiVersion": "2021-02-01",
"name": "[concat(parameters('functionAppName'), '/deployment')]",
"kind": "[if(variables('isReserved'), 'functionapp,linux', 'functionapp')]",
"location": "[parameters('location')]",
"dependsOn": [
"[resourceId('Microsoft.Web/sites', parameters('functionAppName'))]"
],
"properties": {
"reserved": "[variables('isReserved')]",
"serverFarmId": "[resourceId('Microsoft.Web/serverfarms', variables('hostingPlanName'))]",
"siteConfig": {
"linuxFxVersion": "[if(variables('isReserved'), parameters('linuxFxVersion'), json('null'))]",
"appSettings": [
{
"name": "APPINSIGHTS_INSTRUMENTATIONKEY",
"value": "[reference(resourceId('microsoft.insights/components', variables('applicationInsightsName')), '2015-05-01').InstrumentationKey]"
},
{
"name": "AzureWebJobsStorage",
"value": "[concat('DefaultEndpointsProtocol=https;AccountName=', variables('storageAccountName'), ';EndpointSuffix=', environment().suffixes.storage, ';AccountKey=',listKeys(resourceId('Microsoft.Storage/storageAccounts', variables('storageAccountName')), '2019-06-01').keys[0].value)]"
},
{
"name": "WEBSITE_CONTENTAZUREFILECONNECTIONSTRING",
"value": "[concat('DefaultEndpointsProtocol=https;AccountName=', variables('storageAccountName'), ';EndpointSuffix=', environment().suffixes.storage, ';AccountKey=',listKeys(resourceId('Microsoft.Storage/storageAccounts', variables('storageAccountName')), '2019-06-01').keys[0].value)]"
},
{
"name": "WEBSITE_CONTENTSHARE",
"value": "[variables('slotContentShareName')]"
},
{
"name": "FUNCTIONS_EXTENSION_VERSION",
"value": "~4"
},
{
"name": "FUNCTIONS_WORKER_RUNTIME",
"value": "[parameters('functionWorkerRuntime')]"
},
{
"name": "WEBSITE_NODE_DEFAULT_VERSION",
"value": "~14"
}
]
}
}
}
]
}
Click on the below marked one to create parameter file
Click new as shown in below picture
Use the below code in FileName.parameters.json
{
"$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentParameters.json#",
"contentVersion": "1.0.0.0",
"parameters": {
"functionAppName": {
"value": "zapper01"
},
"storageAccountType": {
"value": "Standard_LRS"
},
"location": {
"value": "EastUS"
},
"appInsightsLocation": {
"value": "EastUS"
},
"functionWorkerRuntime": {
"value": "node"
},
"functionPlanOS": {
"value": "Windows"
},
"functionAppPlanSku": {
"value": "EP1"
},
"linuxFxVersion": {
"value": "3.9"
}
}
}
To login to azure portal, run the PowerShell command
az login
Set subscription by using
az account set --subscription "Subscription ID xxxxxx-xxxxxxx-xxxxxxx-xxxxx"
Deploy to azure portal
New-AzResourceGroupDeployment -ResourceGroupName "ResourceGroupName" -TemplateFile "FileName.json" -TemplateParameterFile "Filename.parameters.json"
After execution you will get below one as result in power shall
After deploying into azure portal open the function app and select deployment slot
Update
I have deployed the below code in Custom deployment In Azure portal
Thanks, #seligj95 for the ARM Template code.
{
"$schema": "https://schema.management.azure.com/schemas/2019-04-01/deploymentTemplate.json#",
"contentVersion": "1.0.0.0",
"parameters": {
"baseResourceName": {
"type": "string",
"metadata": {
"description": "Name of the resource"
},
"maxLength": 15
},
"appSettingName": {
"type": "string",
"metadata": {
"description": "Name of the app setting"
},
"maxLength": 24
},
"environments": {
"defaultValue": [
"Dev",
"QA",
"UAT",
"Preview"
],
"type": "array",
"metadata": {
"description": "Array with the names for the environment slots"
},
"maxLength": 19
},
"location": {
"type": "string",
"defaultValue": "[resourceGroup().location]",
"metadata": {
"description": "Location for all resources."
}
}
},
"variables": {
"standardPlanMaxAdditionalSlots": 4,
"webAppPortalName": "[concat(parameters('baseResourceName'), 'Portal')]",
"appServicePlanName": "[concat('AppServicePlan-', parameters('baseResourceName'))]",
"stickyAppSettingName": "[concat(parameters('appSettingName'), '-sticky')]"
},
"resources": [
{
"apiVersion": "2020-06-01",
"type": "Microsoft.Web/serverfarms",
"kind": "app",
"name": "[variables('appServicePlanName')]",
"location": "[parameters('location')]",
"comments": "This app service plan is used for the web app and slots.",
"tags": {
"displayName": "AppServicePlan"
},
"properties": { },
"sku": {
"name": "[if(lessOrEquals(length(parameters('environments')), variables('standardPlanMaxAdditionalSlots')), 'S1', 'P1')]"
}
},
{
"apiVersion": "2020-06-01",
"type": "Microsoft.Web/sites",
"kind": "app",
"name": "[variables('webAppPortalName')]",
"location": "[parameters('location')]",
"comments": "This is the web app, also the default 'nameless' slot.",
"tags": {
"displayName": "WebApp"
},
"properties": {
"serverFarmId": "[resourceId('Microsoft.Web/serverfarms', variables('appServicePlanName'))]",
"siteConfig": {
"appSettings": [
{
"name": "[parameters('appSettingName')]",
"value": "value"
},
{
"name": "[variables('stickyAppSettingName')]",
"value": "value"
}
]
}
},
"dependsOn": [
"[resourceId('Microsoft.Web/serverfarms', variables('appServicePlanName'))]"
]
},
{
"apiVersion": "2020-06-01",
"type": "Microsoft.Web/sites/slots",
"name": "[concat(variables('webAppPortalName'), '/', parameters('environments')[copyIndex()])]",
"kind": "app",
"location": "[parameters('location')]",
"comments": "This specifies the web app slots.",
"tags": {
"displayName": "WebAppSlots"
},
"properties": {
"serverFarmId": "[resourceId('Microsoft.Web/serverfarms', variables('appServicePlanName'))]"
},
"dependsOn": [
"[resourceId('Microsoft.Web/Sites', variables('webAppPortalName'))]"
],
"copy": {
"name": "webPortalSlot",
"count": "[length(parameters('environments'))]"
}
},
{
"apiVersion": "2020-06-01",
"name": "[concat(variables('webAppPortalName'), '/slotconfignames')]",
"type": "Microsoft.Web/sites/config",
"comments": "This specifies the sticky (slot setting) application settings.",
"dependsOn": [
"[resourceId('Microsoft.Web/Sites', variables('webAppPortalName'))]"
],
"properties": {
"appSettingNames": [
"[variables('stickyAppSettingName')]"
]
}
}
]
}
I have selected the second slot as specific sticky slot in arm template, So the second slot is deployed
I have a following schema (.avsc file)
{
"namespace": "com.avaya.mts",
"type": "record",
"name": "MultiTenancyOrgDataMessage",
"fields": [
{
"name": "eventType",
"type": [
"null",
{
"type": "enum",
"name": "TenantMessageType",
"symbols": [
"TENANT_ADDED",
"TENANT_UPDATED",
"TENANT_DELETED"
]
}
],
"doc": "Event type"
},
{
"name": "timestamp",
"type": "long",
"doc": "Timestamp of the operation"
},
{
"name": "tenantId",
"type": "string",
"doc": "Six digit unique tenant identifier"
},
{
"name": "data",
"type": [
"null",
{
"type": "record",
"name": "TenantData",
"fields": [
{
"name": "name",
"type": "string",
"doc": "Tenant name"
},
{
"name": "tenantId",
"type": "string",
"doc": "Six digit unique tenant identifier"
},
{
"name": "loginDomain",
"type": "string",
"doc": "Login domain name"
},
{
"name": "level",
"type": [
"null",
"string"
],
"doc": "OrganizationNode Level"
},
{
"name": "key",
"type": [
"null",
"string"
],
"doc": "String tenant identifier"
},
{
"name": "organizationNodeId",
"type": [
"null",
"string"
],
"doc": "Six digit unique orgnaization node identifier. Same as tenantId"
},
{
"name": "organizationHierarchy",
"type": [
"null",
"string"
],
"doc": "Full hierarchy path of the orgnaization node"
},
{
"name": "features",
"type": [
"null",
"string"
]
},
{
"name": "type",
"type": [
"null",
{
"type": "enum",
"name": "OrganizationNodeType",
"symbols": [
"TENANT",
"DEFAULT"
]
}
],
"doc": "Organization node type to differentiate between tenant and tenant hierarchy node"
},
{
"name": "orgLevels",
"type": [
"null",
{
"type": "array",
"items": {
"type": "record",
"name": "OrgLevel",
"fields": [
{
"name": "name",
"type": "string"
},
{
"name": "position",
"type": "int"
}
]
}
}
],
"doc": "Name of the organization node level"
},
{
"name": "solutionTemplates",
"type": [
"null",
{
"type": "record",
"name": "SolutionTemplate",
"fields": [
{
"name": "templateId",
"type": "string",
"doc": "Six digit unique solution template identifier"
},
{
"name": "name",
"type": "string",
"doc": "Solution template name"
},
{
"name": "description",
"type": [
"null",
"string"
],
"doc": "Solution template description"
}
]
}
],
"doc": "Solution template associated with the organization node"
},
{
"name": "profiles",
"type": [
"null",
{
"type": "array",
"items": {
"type": "record",
"name": "Profile",
"fields": [
{
"name": "profileId",
"type": "string",
"doc": "Six digit unique profile identifier"
},
{
"name": "name",
"type": "string",
"doc": "Profile name"
},
{
"name": "description",
"type": [
"null",
"string"
],
"doc": "Profile description"
}
]
}
}
],
"doc": "Array of the profiles associated with organization node"
},
{
"name": "tenantAttributes",
"type": [
"null",
{
"type": "array",
"items": {
"type": "record",
"name": "TenantAttribute",
"fields": [
{
"name": "name",
"type": "string",
"doc": "Tenant attribute name"
},
{
"name": "value",
"type": "string",
"doc": "Tenant attribute value"
}
]
}
}
],
"doc": "Array of tenant attributes"
},
{
"name": "children",
"type": [
"null",
{
"type": "array",
"items": {
"type": "record",
"name": "OrganizationNode",
"fields": [
{
"name": "name",
"type": [
"null",
"string"
],
"doc": "OrganizationNode name"
},
{
"name": "type",
"type": [
"null",
"OrganizationNodeType"
],
"doc": "OrganizationNode Type"
},
{
"name": "level",
"type": [
"null",
"string"
],
"doc": "OrganizationNode Level"
},
{
"name": "key",
"type": [
"null",
"string"
],
"doc": "Unique string identifier for the organization node"
},
{
"name": "organizationNodeId",
"type": [
"null",
"string"
],
"doc": "Six digit unique identifier for the ornagization node"
},
{
"name": "organizationHierarchy",
"type": [
"null",
"string"
],
"doc": "Full organizationNode hierarchy path"
},
{
"name": "features",
"type": [
"null",
"string"
],
"doc": "This contains the feature details associated with tenants."
},
{
"name": "solutionTemplates",
"type": [
"null",
"SolutionTemplate"
],
"doc": "Solution Template associated with the organization node"
},
{
"name": "profiles",
"type": [
"null",
{
"type": "array",
"items": "Profile"
}
],
"doc": "Array of profiles associated with the organization node"
},
{
"name": "children",
"type": [
"null",
{
"type": "array",
"items": "OrganizationNode"
}
],
"doc": "Child organization nodes"
}
]
}
}
]
},
{
"name": "isDefault",
"type": "boolean",
"doc": "Flag only set to true for the default tenant"
},
{
"name": "parent",
"type": {
"type": "record",
"name": "Parent",
"fields": [
{
"name": "tenantId",
"type": "string",
"doc": "Six digit unique tenant identifier for the parent node"
},
{
"name": "name",
"type": "string",
"doc": "Name of the parent node"
}
]
},
"doc": "Parent Organization Node"
}
]
}
]
}
]
}
And following is my sample json data
{
"eventType": "TENANT_ADDED",
"timestamp": 1442921557056,
"tenantId": "GHIJKL",
"data": {
"name": "bmc",
"type": "TENANT",
"level": "Organization",
"key": "bmc",
"organizationNodeId": "VEKCPC",
"organizationHierarchy": "bmc",
"profiles": [],
"children": [{
"name": "Bangalore",
"type": "DEFAULT",
"level": "Site",
"key": "Bangalore",
"organizationNodeId": "OCNGVJ",
"organizationHierarchy": "bmc/Bangalore",
"features": "Test",
"profiles": [{
"description": "",
"profileId": "MH985X",
"name": "VoiceProfile"
}],
"children": [],
"solutionTemplate": {
"templateId": "FDASGG",
"description": "Solution Template for site Banglore",
"name": "TemplateSiteBanglore"
}
}, {
"name": "Site_Pune",
"type": "DEFAULT",
"level": "Site",
"key": "Site_Pune",
"organizationNodeId": "DUQICJ",
"organizationHierarchy": "bmc/Site_Pune",
"profiles": [],
"children": [],
"solutionTemplate": {
"templateId": "FDASWE",
"description": "Template for site Pune",
"templateName": "Template_Site_Pune"
}
}],
"tenantAttributes": [],
"tenantId": "VEKCPC",
"orgLevels": [{
"name": "Organization",
"position": 1
}, {
"name": "Site",
"position": 2
}, {
"name": "Department",
"position": 3
}, {
"name": "Team",
"position": 4
}],
"loginDomain": "bmc.com",
"parent": {
"tenantId": "HQAYQU",
"name": "Default"
},
"solutionTemplates": {
"templateId": "FDAGSA",
"description": "Template for Tenant",
"templateName": "Template_BMC"
},
"isDefault": false
}
}
While executing my JMeter to generate the avro event in kafka topic, I am getting an error
** Error Section **
2020-03-19 09:23:24,965 INFO o.a.j.e.StandardJMeterEngine: Running the test!
2020-03-19 09:23:24,965 INFO o.a.j.s.SampleEvent: List of sample_variables: []
2020-03-19 09:23:24,966 INFO o.a.j.p.j.s.JavaSampler: Created class: com.gslab.pepper.sampler.PepperBoxKafkaSampler. Uses tearDownTest:
2020-03-19 09:23:24,966 INFO o.a.j.g.u.JMeterMenuBar: setRunning(true, local)
2020-03-19 09:23:25,114 INFO o.a.j.e.StandardJMeterEngine: No enabled thread groups found
2020-03-19 09:23:25,114 INFO o.a.j.e.StandardJMeterEngine: Starting tearDown thread groups
2020-03-19 09:23:25,114 INFO o.a.j.e.StandardJMeterEngine: Starting tearDown ThreadGroup: 1 : CreateTenant
2020-03-19 09:23:25,114 INFO o.a.j.e.StandardJMeterEngine: Starting 1 threads for group CreateTenant.
2020-03-19 09:23:25,114 INFO o.a.j.e.StandardJMeterEngine: Thread will continue on error
2020-03-19 09:23:25,115 INFO o.a.j.t.ThreadGroup: Starting thread group... number=1 threads=1 ramp-up=1 delayedStart=false
2020-03-19 09:23:25,115 INFO o.a.j.t.ThreadGroup: Started thread group number 1
2020-03-19 09:23:25,116 INFO o.a.j.t.JMeterThread: Thread started: CreateTenant 1-1
2020-03-19 09:23:25,530 ERROR o.a.j.t.JMeterThread: Test failed!
org.apache.avro.AvroTypeException: Expected start-union. Got VALUE_STRING
at org.apache.avro.io.JsonDecoder.error(JsonDecoder.java:514) ~[pepper-box-0.0.1-SNAPSHOT.jar:?]
at org.apache.avro.io.JsonDecoder.readIndex(JsonDecoder.java:433) ~[pepper-box-0.0.1-SNAPSHOT.jar:?]
at org.apache.avro.io.ResolvingDecoder.readIndex(ResolvingDecoder.java:282) ~[pepper-box-0.0.1-SNAPSHOT.jar:?]
at org.apache.avro.generic.GenericDatumReader.readWithoutConversion(GenericDatumReader.java:178) ~[pepper-box-0.0.1-SNAPSHOT.jar:?]
at org.apache.avro.generic.GenericDatumReader.read(GenericDatumReader.java:151) ~[pepper-box-0.0.1-SNAPSHOT.jar:?]
at org.apache.avro.generic.GenericDatumReader.readField(GenericDatumReader.java:248) ~[pepper-box-0.0.1-SNAPSHOT.jar:?]
at org.apache.avro.generic.GenericDatumReader.readRecord(GenericDatumReader.java:237) ~[pepper-box-0.0.1-SNAPSHOT.jar:?]
at org.apache.avro.generic.GenericDatumReader.readWithoutConversion(GenericDatumReader.java:170) ~[pepper-box-0.0.1-SNAPSHOT.jar:?]
at org.apache.avro.generic.GenericDatumReader.read(GenericDatumReader.java:151) ~[pepper-box-0.0.1-SNAPSHOT.jar:?]
at org.apache.avro.generic.GenericDatumReader.read(GenericDatumReader.java:144) ~[pepper-box-0.0.1-SNAPSHOT.jar:?]
at com.gslab.pepper.loadgen.impl.AvroSchemaLoadGenerator.nextMessage(AvroSchemaLoadGenerator.java:79) ~[pepper-box-0.0.1-SNAPSHOT.jar:?]
at com.gslab.pepper.config.avroschema.AvroSchemaConfigElement.iterationStart(AvroSchemaConfigElement.java:67) ~[pepper-box-0.0.1-SNAPSHOT.jar:?]
at org.apache.jmeter.control.GenericController.fireIterationStart(GenericController.java:399) ~[ApacheJMeter_core.jar:5.2.1]
at org.apache.jmeter.control.GenericController.fireIterEvents(GenericController.java:391) ~[ApacheJMeter_core.jar:5.2.1]
at org.apache.jmeter.control.GenericController.next(GenericController.java:160) ~[ApacheJMeter_core.jar:5.2.1]
at org.apache.jmeter.control.LoopController.next(LoopController.java:135) ~[ApacheJMeter_core.jar:5.2.1]
at org.apache.jmeter.threads.AbstractThreadGroup.next(AbstractThreadGroup.java:92) ~[ApacheJMeter_core.jar:5.2.1]
at org.apache.jmeter.threads.JMeterThread.run(JMeterThread.java:255) [ApacheJMeter_core.jar:5.2.1]
at java.lang.Thread.run(Thread.java:834) [?:?]
2020-03-19 09:23:25,530 INFO o.a.j.t.JMeterThread: Thread finished: CreateTenant 1-1
2020-03-19 09:23:25,531 INFO o.a.j.e.StandardJMeterEngine: Notifying test listeners of end of test
2020-03-19 09:23:25,531 INFO o.a.j.g.u.JMeterMenuBar: setRunning(false, local)
Can someone please explain why exactly am I getting the error?
I tried all combination on the datatype of my data but each time my data factory pipeline is giving me this error:
{
"errorCode": "2200",
"message": "ErrorCode=UserErrorColumnNameNotAllowNull,'Type=Microsoft.DataTransfer.Common.Shared.HybridDeliveryException,Message=Empty or Null string found in Column Name 2. Please make sure column name not null and try again.,Source=Microsoft.DataTransfer.Common,'",
"failureType": "UserError",
"target": "xxx",
"details": []
}
My Copy data source code is something like this:{
"name": "xxx",
"description": "uuu",
"type": "Copy",
"dependsOn": [],
"policy": {
"timeout": "7.00:00:00",
"retry": 0,
"retryIntervalInSeconds": 30,
"secureOutput": false,
"secureInput": false
},
"userProperties": [],
"typeProperties": {
"source": {
"type": "DelimitedTextSource",
"storeSettings": {
"type": "AzureBlobStorageReadSettings",
"recursive": true,
"wildcardFileName": "*"
},
"formatSettings": {
"type": "DelimitedTextReadSettings"
}
},
"sink": {
"type": "AzureSqlSink"
},
"enableStaging": false,
"translator": {
"type": "TabularTranslator",
"mappings": [
{
"source": {
"name": "populationId",
"type": "Guid"
},
"sink": {
"name": "PopulationID",
"type": "String"
}
},
{
"source": {
"name": "inputTime",
"type": "DateTime"
},
"sink": {
"name": "inputTime",
"type": "DateTime"
}
},
{
"source": {
"name": "inputCount",
"type": "Decimal"
},
"sink": {
"name": "inputCount",
"type": "Decimal"
}
},
{
"source": {
"name": "inputBiomass",
"type": "Decimal"
},
"sink": {
"name": "inputBiomass",
"type": "Decimal"
}
},
{
"source": {
"name": "inputNumber",
"type": "Decimal"
},
"sink": {
"name": "inputNumber",
"type": "Decimal"
}
},
{
"source": {
"name": "utcOffset",
"type": "String"
},
"sink": {
"name": "utcOffset",
"type": "Int32"
}
},
{
"source": {
"name": "fishGroupName",
"type": "String"
},
"sink": {
"name": "fishgroupname",
"type": "String"
}
},
{
"source": {
"name": "yearClass",
"type": "String"
},
"sink": {
"name": "yearclass",
"type": "String"
}
}
]
}
},
"inputs": [
{
"referenceName": "DelimitedTextFTDimensions",
"type": "DatasetReference"
}
],
"outputs": [
{
"referenceName": "AzureSqlTable1",
"type": "DatasetReference"
}
]
}
Can anyone please help me understand the issue. I see in some blogs they ask me use treatnullasempty but I am not allowed to modify the JSON. is there a way to do that??
I suggest to using Data Flow DerivedColumn, DerivedColumn can help you build expression to replace the null column.
For example:
Derived Column, if Column_2 is null =true, return 'dd' :
iifNull(Column_2,'dd')
Mapping the column
Reference: Data transformation expressions in mapping data flow
Hope this helps.
fixed it.it was a easy fix as one of my column in destination was marked as not null, i changed it as null and it worked.
I am trying to insert data with the Kafka JDBC Sink connector, but it is returning me this exception.
org.apache.kafka.connect.errors.DataException: Invalid null value for required INT64 field
The records have the following schema:
[
{
"schema": {
"type": "struct",
"fields": [
{
"type": "int64",
"field": "ID"
},
{
"type": "int64",
"field": "TENANT_ID"
},
{
"type": "string",
"field": "ITEM"
},
{
"type": "int64",
"field": "TIPO"
},
{
"type": "int64",
"field": "BUSINESS_CONCEPT"
},
{
"type": "string",
"field": "ETIQUETA"
},
{
"type": "string",
"field": "VALOR"
},
{
"type": "string",
"field": "GG_T_TYPE"
},
{
"type": "string",
"field": "GG_T_TIMESTAMP"
},
{
"type": "string",
"field": "TD_T_TIMESTAMP"
},
{
"type": "string",
"field": "POS"
}
]
},
"payload": {
"ID": 298457,
"TENANT_ID": 83,
"ITEM": "0-0-0",
"TIPO": 4,
"BUSINESS_CONCEPT": null,
"ETIQUETA": "¿Cuándo ha ocurrido?",
"VALOR": "2019-05-31T10:33:00Z",
"GG_T_TYPE": "I",
"GG_T_TIMESTAMP": "2019-05-31 14:35:19.002221",
"TD_T_TIMESTAMP": "2019-06-05T10:46:55.0106",
"POS": "00000000530096832544"
}
}
]
As you can see, the value BUSINESS_CONCEPT can be null. It is the only null value, so I suppose the exception is due to that field. How could I make the sink insert the value as null?
You need to change the definition of
{
"type": "int64",
"field": "BUSINESS_CONCEPT"
}
to
{
"type": ["null", "int64"],
"field": "BUSINESS_CONCEPT"
}
in order to treat BUSINESS_CONCEPT as optional field.
I have been struggling through this issue quite for some time. I am working on AvroProducer(confluent kafka) and getting error related to schema defined.
Here is the complete stacktrace of the issue I am getting:
<!--language: lang-none-->
raise AvroTypeException(self.writer_schema, datum)
avro.io.AvroTypeException: The datum {'totalDifficulty': 2726165051, 'stateRoot': '0xf09bd6730b3ae7f5728836564837d7f776a8f7333628c8b84cb57d7c6d48ebba', 'sha3Uncles': '0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347', 'size': 538, 'logs': [], 'gasLimit': 8000000, 'mixHash': '0x410b2b19519be16496727c93515f399072ffecf06defe4913d00eb4d10bb7351', 'logsBloom': '0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000', 'nonce': '0x18dc6c0d30839c91', 'proofOfAuthorityData': '0xd883010817846765746888676f312e31302e34856c696e7578', 'number': 5414, 'timestamp': 1552577641, 'difficulty': 589091, 'gasUsed': 0, 'miner': '0x48FA5EBc2f0D82B5D52faAe624Fa2426998ab492', 'hash': '0x71259991acb407a85befa8b3c5df26a94a11a6c08f92f3e3b7c9c0e8e1f5916d', 'transactionsRoot': '0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421', 'receiptsRoot': '0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421', 'transactions': [], 'parentHash': '0x9f0c25eeab86fc144296cb034c94857beed331936016d60c0986a35ac07d9c68', 'uncles': []} is not an example of the schema {
"type": "record",
"name": "value",
"namespace": "exporter.value.opsnetBlock",
"fields": [
{
"type": "int",
"name": "difficulty"
},
{
"type": "string",
"name": "proofOfAuthorityData"
},
{
"type": "int",
"name": "gasLimit"
},
{
"type": "int",
"name": "gasUsed"
},
{
"type": "string",
"name": "hash"
},
{
"type": "string",
"name": "logsBloom"
},
{
"type": "int",
"name": "size"
},
{
"type": "string",
"name": "miner"
},
{
"type": "string",
"name": "mixHash"
},
{
"type": "string",
"name": "nonce"
},
{
"type": "int",
"name": "number"
},
{
"type": "string",
"name": "parentHash"
},
{
"type": "string",
"name": "receiptsRoot"
},
{
"type": "string",
"name": "sha3Uncles"
},
{
"type": "string",
"name": "stateRoot"
},
{
"type": "int",
"name": "timestamp"
},
{
"type": "int",
"name": "totalDifficulty"
},
{
"type": "string",
"name": "transactionsRoot"
},
{
"type": {
"type": "array",
"items": "string"
},
"name": "transactions"
},
{
"type": {
"type": "array",
"items": "string"
},
"name": "uncles"
},
{
"type": {
"type": "array",
"items": {
"type": "record",
"name": "Child",
"namespace": "exporter.value.opsnetBlock",
"fields": [
{
"type": "string",
"name": "address"
},
{
"type": "string",
"name": "blockHash"
},
{
"type": "int",
"name": "blockNumber"
},
{
"type": "string",
"name": "data"
},
{
"type": "int",
"name": "logIndex"
},
{
"type": "boolean",
"name": "removed"
},
{
"type": {
"type": "array",
"items": "string"
},
"name": "topics"
},
{
"type": "string",
"name": "transactionHash"
},
{
"type": "int",
"name": "transactionIndex"
}
]
}
},
"name": "logs"
}
]
}
Can anybody please tell me where am I going wrong in this?
Thanks in advance