SQL Server How to set Datatype in Additional Column in ADF

r7xajy2e  于 2023-02-18  发布在  其他
关注(0)|答案(1)|浏览(176)

I need to set datatype for Additional Column with Dynamic Content in Sink in ADF

By default its taking nvarchar(max) from Json obj but I need bigInt

Below is a Json Obj which create table with Additional column

{
    "source": {
        "type": "SqlServerSource",
         "additionalColumns": [
            {
                "name": "ApplicationId",
                "value": 3604509277250831000
            }
        ],
        "sqlReaderQuery": "SELECT * from Table A",
        "queryTimeout": "02:00:00",
        "isolationLevel": "ReadUncommitted",
        "partitionOption": "None"
    },
    "sink": {
        "type": "AzureSqlSink",
        "writeBehavior": "insert",
        "sqlWriterUseTableLock": false,
        "tableOption": "autoCreate",
        "disableMetricsCollection": false
    },
    "enableStaging": false,
    "translator": {
        "type": "TabularTranslator",
        "typeConversion": true,
        "typeConversionSettings": {
            "allowDataTruncation": true,
            "treatBooleanAsNumber": false
        }
    }
}

ADF Configuration

After create table Database - column with datatype

If I convert Dynamic content into Int

@int(pipeline().parameters.application.applicationId)

Then getting below warning

Please let me know how can I set Datatype in ADF

sd2nnvve

sd2nnvve1#

I also tried the same and getting same result.

By default its taking nvarchar(max) from Json obj but I need bigInt

To resolve this when you add additional column in your source data set and in Mapping click on import schema it will import the schema of the source and also give you additional column in schema you have to change the type of the column as Int64 as shown in below image. in below image you can see after name there is additional means it is an additional column.

After this run your pipeline, It will create additional column with data type bigint .

{
    "name": "pipeline2",
    "properties": {
    "activities": [
    {
    "name": "Copy data1",
    "type": "Copy",
    "dependsOn": [],
    "policy": {
    "timeout": "0.12:00:00",
    "retry": 0,
    "retryIntervalInSeconds": 30,
    "secureOutput": false,
    "secureInput": false
    },
    "userProperties": [],
    "typeProperties": {
    "source": {
    "type": "JsonSource",
    "additionalColumns": [
    {
    "name": "name",
    "value": {
    "value": "@pipeline().parameters.demo.age",
    "type": "Expression"
    }
    }
    ],
    "storeSettings": {
    "type": "AzureBlobFSReadSettings",
    "recursive": true,
    "enablePartitionDiscovery": false
    },
    "formatSettings": {
    "type": "JsonReadSettings"
    }
    },
    "sink": {
    "type": "AzureSqlSink",
    "writeBehavior": "insert",
    "sqlWriterUseTableLock": false,
    "tableOption": "autoCreate",
    "disableMetricsCollection": false
    },
    "enableStaging": false,
    "translator": {
    "type": "TabularTranslator",
    "mappings": [
    {
    "source": {
    "path": "$['taskId']"
    },
    "sink": {
    "name": "taskId",
    "type": "String"
    }
    },
    {
    "source": {
    "path": "$['taskObtainedScore']"
    },
    "sink": {
    "name": "taskObtainedScore",
    "type": "String"
    }
    },
    {
    "source": {
    "path": "$['multiInstance']"
    },
    "sink": {
    "name": "multiInstance",
    "type": "String"
    }
    },
    {
    "source": {
    "path": "$['name']"
    },
    "sink": {
    "name": "name",
    "type": "Int64"
    }
    }
    ],
    "collectionReference": ""
    }
    },
    "inputs": [
    {
    "referenceName": "Json1",
    "type": "DatasetReference"
    }
    ],
    "outputs": [
    {
    "referenceName": "AzureSqlTable1",
    "type": "DatasetReference"
    }
    ]
    }
    ],
    "parameters": {
    "demo": {
    "type": "object",
    "defaultValue": {
    "name": "John",
    "age": 30,
    "isStudent": true
    }
    }
    },
    "annotations": []
    }
}

OUTPUT:

相关问题