Search code examples
sqlsql-serverazureazure-data-factory

How to set Datatype in Additional Column in ADF


I need to set datatype for Additional Column with Dynamic Content in Sink in ADF

By default its taking nvarchar(max) from Json obj but I need bigInt

Below is a Json Obj which create table with Additional column

{
    "source": {
        "type": "SqlServerSource",
         "additionalColumns": [
            {
                "name": "ApplicationId",
                "value": 3604509277250831000
            }
        ],
        "sqlReaderQuery": "SELECT * from Table A",
        "queryTimeout": "02:00:00",
        "isolationLevel": "ReadUncommitted",
        "partitionOption": "None"
    },
    "sink": {
        "type": "AzureSqlSink",
        "writeBehavior": "insert",
        "sqlWriterUseTableLock": false,
        "tableOption": "autoCreate",
        "disableMetricsCollection": false
    },
    "enableStaging": false,
    "translator": {
        "type": "TabularTranslator",
        "typeConversion": true,
        "typeConversionSettings": {
            "allowDataTruncation": true,
            "treatBooleanAsNumber": false
        }
    }
}

ADF Configuration

enter image description here

After create table Database - column with datatype

enter image description here

If I convert Dynamic content into Int

@int(pipeline().parameters.application.applicationId)

Then getting below warning

enter image description here

Please let me know how can I set Datatype in ADF


Solution

  • I also tried the same and getting same result.

    enter image description here

    By default its taking nvarchar(max) from Json obj but I need bigInt

    To resolve this when you add additional column in your source data set and in Mapping click onimport schema it will import the schema of the source and also give you additional column in schema you have to change the type of the column as Int64 as shown in below image. in below image you can see after name there is additional means it is an additional column.

    enter image description here

    After this run your pipeline, It will create additional column with data type bigint .

    {
        "name": "pipeline2",
        "properties": {
        "activities": [
        {
        "name": "Copy data1",
        "type": "Copy",
        "dependsOn": [],
        "policy": {
        "timeout": "0.12:00:00",
        "retry": 0,
        "retryIntervalInSeconds": 30,
        "secureOutput": false,
        "secureInput": false
        },
        "userProperties": [],
        "typeProperties": {
        "source": {
        "type": "JsonSource",
        "additionalColumns": [
        {
        "name": "name",
        "value": {
        "value": "@pipeline().parameters.demo.age",
        "type": "Expression"
        }
        }
        ],
        "storeSettings": {
        "type": "AzureBlobFSReadSettings",
        "recursive": true,
        "enablePartitionDiscovery": false
        },
        "formatSettings": {
        "type": "JsonReadSettings"
        }
        },
        "sink": {
        "type": "AzureSqlSink",
        "writeBehavior": "insert",
        "sqlWriterUseTableLock": false,
        "tableOption": "autoCreate",
        "disableMetricsCollection": false
        },
        "enableStaging": false,
        "translator": {
        "type": "TabularTranslator",
        "mappings": [
        {
        "source": {
        "path": "$['taskId']"
        },
        "sink": {
        "name": "taskId",
        "type": "String"
        }
        },
        {
        "source": {
        "path": "$['taskObtainedScore']"
        },
        "sink": {
        "name": "taskObtainedScore",
        "type": "String"
        }
        },
        {
        "source": {
        "path": "$['multiInstance']"
        },
        "sink": {
        "name": "multiInstance",
        "type": "String"
        }
        },
        {
        "source": {
        "path": "$['name']"
        },
        "sink": {
        "name": "name",
        "type": "Int64"
        }
        }
        ],
        "collectionReference": ""
        }
        },
        "inputs": [
        {
        "referenceName": "Json1",
        "type": "DatasetReference"
        }
        ],
        "outputs": [
        {
        "referenceName": "AzureSqlTable1",
        "type": "DatasetReference"
        }
        ]
        }
        ],
        "parameters": {
        "demo": {
        "type": "object",
        "defaultValue": {
        "name": "John",
        "age": 30,
        "isStudent": true
        }
        }
        },
        "annotations": []
        }
    }
    

    OUTPUT:

    enter image description here