Logic App Standard Blob Storage Build-In Connector trigger not working properly
Describe the Bug
Trigger is not triggering when new files arrive on the correspondent folder, files are being dropped on the container/folder and logic app simply does not trigger. Once I go to the app and open the code, don't change anything and save the app, it starts triggering the files as per magic :/
This feels like a bug to me.
Plan Type
Strandard
Steps to Reproduce the Bug or Issue
- file gets dropped on container / folder
- trigger does not do anything
- Open flow
- Save flow (nothing changed)
- Trigger starts
Workflow JSON
{
"definition": {
"$schema": "https://schema.management.azure.com/providers/Microsoft.Logic/schemas/2016-06-01/workflowdefinition.json#",
"actions": {
"Check_file_size": {
"actions": {
"Check_on_status_code_201": {
"actions": {
"Upload_blob_with_audience_id": {
"inputs": {
"parameters": {
"blobName": "@concat('audienceCreated-',triggerBody()?['properties']?['blobName'])",
"containerName": "landing/DPGdatalab/in/",
"content": "@body('Create_audience_with_JSON')"
},
"serviceProviderConfiguration": {
"connectionName": "AzureBlob",
"operationId": "uploadBlob",
"serviceProviderId": "/serviceProviders/AzureBlob"
}
},
"type": "ServiceProvider"
}
},
"else": {
"actions": {
"Compose_JSON": {
"inputs": {
"id": "@{split(triggerBody()?['properties']?['blobname'], '_')[0]}"
},
"runAfter": {
"Refresh_audience_with_JSON": [
"SUCCEEDED"
]
},
"type": "Compose"
},
"Refresh_audience_with_JSON": {
"inputs": {
"body": "@body('Read_blob_content')?['content']",
"headers": {
"Content-Type": "application/json",
"x-api-key": "@{parameters('datalab_x_api_key')}"
},
"method": "POST",
"uri": "@{parameters('datalab_endpoint')}/v1/audiences/crm/@{split(triggerBody()?['properties']?['blobname'], '_')[0]}/refresh"
},
"runtimeConfiguration": {
"contentTransfer": {
"transferMode": "Chunked"
}
},
"type": "Http"
},
"Upload_blob_to_storage_audience_refreshed": {
"inputs": {
"parameters": {
"blobName": "@concat('audienceRefreshed-',triggerBody()?['properties']?['blobName'])",
"containerName": "landing/DPGdatalab/in/",
"content": "@outputs('Compose_JSON')",
"overrideIfExists": "true"
},
"serviceProviderConfiguration": {
"connectionName": "AzureBlob",
"operationId": "uploadBlob",
"serviceProviderId": "/serviceProviders/AzureBlob"
}
},
"runAfter": {
"Compose_JSON": [
"SUCCEEDED"
]
},
"type": "ServiceProvider"
}
}
},
"expression": {
"and": [
{
"equals": [
"@outputs('Create_audience_with_JSON')?['statusCode']",
201
]
}
]
},
"runAfter": {
"Create_audience_with_JSON": [
"SUCCEEDED",
"TIMEDOUT",
"SKIPPED",
"FAILED"
]
},
"type": "If"
},
"Create_audience_with_JSON": {
"inputs": {
"body": "@body('Read_blob_content')?['content']",
"headers": {
"Content-Type": "application/json",
"x-api-key": "@{parameters('datalab_x_api_key')}"
},
"method": "POST",
"uri": "@{parameters('datalab_endpoint')}/v1/audiences/crm"
},
"runAfter": {
"Read_blob_content": [
"SUCCEEDED"
]
},
"runtimeConfiguration": {
"contentTransfer": {
"transferMode": "Chunked"
}
},
"type": "Http"
},
"Read_blob_content": {
"inputs": {
"parameters": {
"blobName": "@triggerBody()?['name']",
"containerName": "dap-out"
},
"serviceProviderConfiguration": {
"connectionName": "AzureBlob",
"operationId": "readBlob",
"serviceProviderId": "/serviceProviders/AzureBlob"
}
},
"type": "ServiceProvider"
}
},
"else": {
"actions": {}
},
"expression": {
"and": [
{
"greater": [
"@triggerBody()?['properties']?['length']",
0
]
}
]
},
"runAfter": {},
"type": "If"
},
"Copy_to_dataLab_archive": {
"inputs": {
"parameters": {
"destinationBlobName": "/archives/@{triggerBody()?['name']}",
"destinationContainerName": "dap-out",
"overrideIfExists": true,
"sourceBlobName": "@triggerBody()?['name']",
"sourceContainerName": "dap-out"
},
"serviceProviderConfiguration": {
"connectionName": "AzureBlob",
"operationId": "copyBlob",
"serviceProviderId": "/serviceProviders/AzureBlob"
}
},
"runAfter": {
"Check_file_size": [
"SUCCEEDED"
]
},
"type": "ServiceProvider"
},
"Delete_from_main_folder": {
"inputs": {
"parameters": {
"blobName": "@triggerBody()?['name']",
"containerName": "dap-out"
},
"serviceProviderConfiguration": {
"connectionName": "AzureBlob",
"operationId": "deleteBlob",
"serviceProviderId": "/serviceProviders/AzureBlob"
}
},
"runAfter": {
"Copy_to_dataLab_archive": [
"SUCCEEDED"
]
},
"type": "ServiceProvider"
}
},
"contentVersion": "1.0.0.0",
"outputs": {},
"triggers": {
"When_a_blob_is_added_or_updated": {
"inputs": {
"parameters": {
"path": "dap-out/DPGdatalab/JSON/@{utcNow('yyyyMMdd')}/{blobname}"
},
"serviceProviderConfiguration": {
"connectionName": "AzureBlob",
"operationId": "whenABlobIsAddedOrModified",
"serviceProviderId": "/serviceProviders/AzureBlob"
}
},
"type": "ServiceProvider"
}
}
},
"kind": "Stateful"
}
Screenshots or Videos
No response
Additional context
No response
I really hope this does not fall into the pit of forget because this is quite the issue and I've seen lots of users complaining about it. If you really want us to use LAP standard more these things should be looked at.
Sharing this issue with Milin Joshi, not sure what is his github handle.
I faced a similar behaviour. Unsure whether this is related. After scratching my head so many times and trying so many different combinations, this was my finding: https://github.com/Azure/logicapps/issues/1143
This issue is stale because it has been open for 45 days with no activity.
This issue was closed because it has been inactive for 14 days since being marked as stale.
We have same kind of issue when we are calling sub -workflow from main workflow. Main workflow says that sub workflow does not exist when trying to save it. Then we need to save the sub workflow (no changes!) and suddenly we can save main workflow again.