I have a pipeline within synapse which i want to generate parameters for using the parameter-definition.json but i get "{"stack":"TypeError: me.split is not a function\n at https://web.azuresynapse.net at z.parameterizeResource" when i publish.
My pipeline Json:
{ "name": "PL_DS_SQL_SHIR_To_source_ADLS_raw_acp_main", "properties": { "activities": [ { "name": "For Each Entity", "type": "ForEach", "state": "Active", "onInactiveMarkAs": "Succeeded", "dependsOn": [ { "activity": "Set CopyDatetime", "dependencyConditions": [ "Succeeded" ] } ], "userProperties": [], "typeProperties": { "items": { "value": "@activity('Get SQL Source Entities to Load').output.value", "type": "Expression" }, "activities": [ { "name": "Copy data - sourceSQL_to_raw", "type": "Copy", "dependsOn": [ { "activity": "SP_Log_Copy", "dependencyConditions": [ "Succeeded" ] } ], "policy": { "timeout": "0.12:00:00", "retry": 0, "retryIntervalInSeconds": 30, "secureOutput": false, "secureInput": false }, "userProperties": [], "typeProperties": { "source": { "type": "SqlDWSource", "sqlReaderQuery": { "value": "@item().sql_query", "type": "Expression" }, "queryTimeout": "02:00:00", "partitionOption": "None" }, "sink": { "type": "ParquetSink", "storeSettings": { "type": "AzureBlobFSWriteSettings" }, "formatSettings": { "type": "ParquetWriteSettings" } }, "enableStaging": false, "translator": { "type": "TabularTranslator", "typeConversion": true, "typeConversionSettings": { "allowDataTruncation": true, "treatBooleanAsNumber": false } } }, "inputs": [ { "referenceName": "DS_DV_GenericDataversePool", "type": "DatasetReference" } ], "outputs": [ { "referenceName": "DS_ADLS_ZoneParquetFile", "type": "DatasetReference", "parameters": { "ContainerName": "raw/main", "GroupName": { "value": "@item().source_code", "type": "Expression" }, "EntityName": { "value": "@item().entity_name", "type": "Expression" }, "DateTime": { "value": "@formatDateTime(variables('CopyDateTime'), 'yyyy/MM/dd HH:mm:ss')", "type": "Expression" }, "FileExtension": "parquet", "FileNumber": "1" } } ] }, { "name": "Update WatermarkValue is Incremental and copied rows", "type": "IfCondition", "dependsOn": [ { "activity": "Copy data - sourceSQL_to_raw", "dependencyConditions": [ "Succeeded" ] } ], "userProperties": [], "typeProperties": { "expression": { "value": "@or(\n empty(item().watermark_attribute_name)\n ,equals(activity('Copy data - sourceSQL_to_raw').output.rowsCopied, 0)\n)", "type": "Expression" }, "ifFalseActivities": [ { "name": "Get New Watermark value", "type": "Lookup", "dependsOn": [], "policy": { "timeout": "0.12:00:00", "retry": 0, "retryIntervalInSeconds": 30, "secureOutput": false, "secureInput": false }, "userProperties": [], "typeProperties": { "source": { "type": "SqlDWSource", "sqlReaderQuery": { "value": "SELECT \n CAST(MAX(@{item().watermark_attribute_name}) AS NVARCHAR(40)) AS watermark_attribute_value\nFROM \n OPENROWSET( \n BULK 'main/@{item().source_code}/@{item().entity_name}/@{formatDateTime(variables('CopyDateTime'), 'yyyy/MM/dd')}/@{item().entity_name}@{formatDateTime(variables('CopyDateTime'), 'yyyyMMdd')}@{formatDateTime(variables('CopyDateTime'), 'HHmmss')}_1.parquet',\n DATA_SOURCE = 'ds_data_lake_raw',\n FORMAT = 'PARQUET'\n ) AS [result]", "type": "Expression" }, "queryTimeout": "02:00:00", "partitionOption": "None" }, "dataset": { "referenceName": "DS_SYN_ServerlessSqlPoolGeneric", "type": "DatasetReference", "parameters": { "DatabaseName": "data-lake-raw" } } } }, { "name": "Update Watermark Value", "type": "SqlPoolStoredProcedure", "dependsOn": [ { "activity": "Get New Watermark value", "dependencyConditions": [ "Succeeded" ] } ], "policy": { "timeout": "0.12:00:00", "retry": 0, "retryIntervalInSeconds": 30, "secureOutput": false, "secureInput": false }, "userProperties": [], "sqlPool": { "referenceName": "datapoolƒ", "type": "SqlPoolReference" }, "typeProperties": { "storedProcedureName": "[metadata].[update_source_entity_watermark_value_main]", "storedProcedureParameters": { "entity_id": { "value": { "value": "@item().entity_id", "type": "Expression" }, "type": "Int32" }, "watermark_value": { "value": { "value": "@activity('Get New Watermark value').output.firstRow.watermark_attribute_value", "type": "Expression" }, "type": "String" } } } } ] } }, { "name": "SP_Log_Copy", "type": "SqlPoolStoredProcedure", "dependsOn": [], "policy": { "timeout": "0.12:00:00", "retry": 0, "retryIntervalInSeconds": 30, "secureOutput": false, "secureInput": false }, "userProperties": [], "sqlPool": { "referenceName": "datapool", "type": "SqlPoolReference" }, "typeProperties": { "storedProcedureName": "metadata.Etl_log_main", "storedProcedureParameters": { "entity_id": { "value": { "value": "@item().entity_id", "type": "Expression" }, "type": "Int32" }, "type": { "value": "1", "type": "Int32" }, "pipeline_id": { "value": { "value": "@pipeline().RunId", "type": "Expression" }, "type": "String" }, "rows": { "value": "0", "type": "Int32" } } } } ] } } }
This is my definition file:
"Microsoft.Synapse/workspaces/pipelines": { "properties": { "activities": [ { "userProperties": [], "typeProperties": { "activities": [ { "userProperties": [], "typeProperties": { "ifFalseActivities": [ { "name": "=", "type": "=", "userProperties": [], "sqlPool": { "referenceName": "=", "type": "=" } } ] } } ] } }