I was able to resolve the issue by making changes in the host.json.Initially ,when the changes are being pushed from VS code it somehow got changed to below
After updating to the below, it started working
This browser is no longer supported.
Upgrade to Microsoft Edge to take advantage of the latest features, security updates, and technical support.
I am using Python SDK and Cosmos Db Trigger to monitor the Change feed and write to Blob in Azure Function. In between I am also updating the Stats.json blob(which has a key Stats
as 1 or 0).
I am using Visual Studio code to push changes in Azure Function. While testing locally, I am getting the error Unexpected status code: 401
and in the Azure portal, it's not getting triggered. I am not using any HTTP trigger.
__init__.py
import azure.functions as func
from azure.storage.blob import BlobServiceClient, BlobClient
import json
import logging
import os
import time
def get_recent_state_from_blob() -> int:
try:
blob_client=connect_storage()
logging.info("Connected to blob storage")
state_data = blob_client.download_blob().readall().decode("utf-8")
state_json = json.loads(state_data)
Stats = state_json.get("Stats", 0)
return Stats
except Exception as e:
logging.error(f"Error while getting recent state from blob:{e}")
return 0
def connect_storage() -> BlobClient:
container_name='changedata'
connection_string= os.environ['AzureWebJobsStorage']
blob_name='Stats.json'
try:
blob_service_client = BlobServiceClient.from_connection_string(connection_string)
container_client = blob_service_client.get_container_client(container_name)
blob_client = container_client.get_blob_client(blob_name)
return blob_client
except Exception as e:
raise e
def update_state_in_blob(new_state: int) -> None:
try:
blob_client=connect_storage()
logging.info("Connected to blob storage")
blob_data = blob_client.download_blob()
blob_content = blob_data.readall()
# Decode the existing JSON data
data = json.loads(blob_content)
# Update the 'State' key with value 1
data['Stats'] = new_state
# Add/Update the 'Time Updated' key with the current timestamp
from datetime import datetime
data['Time Updated'] = datetime.utcnow().isoformat()
# Encode the data back to JSON format
updated_content = json.dumps(data)
# Upload the updated content to the blob
blob_client.upload_blob(updated_content, overwrite=True)
logging.info("Blob updated successfully.")
except Exception as e:
raise e
def main(
documents: func.DocumentList, outputBlob: func.Out[str]
) -> None:
consolidated_data = []
if documents:
for document in documents:
logging.info("id: %s", document["id"])
logging.info("SwitchNum: %s", document["SwitchNum"])
logging.info("FileNum: %s", document["FileNum"])
logging.info("CallingNum: %s", document["CallingNum"])
consolidated_data.append(
{
"id": document["id"],
"SwitchNum": document["SwitchNum"],
"FileNum": document["FileNum"],
"CallingNum": document["CallingNum"],
}
)
data = {"consolidated_data": consolidated_data}
json_data = json.dumps(data, indent=4)
logging.info(json_data)
outputBlob.set(json_data)
state = get_recent_state_from_blob()
if state == 2:
update_state_in_blob(1)
time.sleep(300)
update_state_in_blob(0)
logging.info("Record written successfully")
else:
logging.info("State is Active. Skipping...")
function.json
{
"scriptFile": "__init__.py",
"bindings": [
{
"name": "documents",
"connectionStringSetting": "CosmosChangeFeedTrigger_ConnectionString",
"databaseName": "UI_Trigger",
"collectionName": "AF_Changefeed",
"leaseCollectionName": "leases",
"createLeaseCollectionIfNotExists": true,
"direction": "in",
"type": "cosmosDBTrigger"
},
{
"connection": "AzureStorageConnection",
"name": "outputBlob",
"path": "changedata/files",
"direction": "out",
"type": "blob"
}
]
}
My folder structure below:
I was able to resolve the issue by making changes in the host.json.Initially ,when the changes are being pushed from VS code it somehow got changed to below
After updating to the below, it started working