
If you want automatic import a CSV into a Cosmos DB to update data an Azure Function is your best option for this.

My script is written in python. First, create a function.json to define the trigger:
{
"scriptFile": "__init__.py",
"bindings": [
{
"name": "myblob",
"type": "blobTrigger",
"direction": "in",
"path": "transferin/Erweiterung_testdaten.csv",
"connection": "sausstoraget_STORAGE"
}
]
}
import logging
import azure.functions as func
import os, io
import pandas as pd
import json
import azure.cosmos.cosmos_client as cosmos_client
import azure.cosmos.errors as errors
config = {
'ENDPOINT': os.environ['ENDPOINT'],
'PRIMARYKEY': os.environ['PRIMARYKEY'],
'DBLink': os.environ['DBLink']
}
def main(myblob: func.InputStream):
logging.info(f"Python blob trigger function processed blob \n"
f"Name: {myblob.name}\n"
f"Blob Size: {myblob.length} bytes")
file = myblob.read()
logging.info (type(file))
df = pd.read_csv(io.BytesIO(file), sep=';', dtype=str)
logging.info (df)
results = []
results = json.loads(df.to_json(orient='records'))
logging.info (len(results))
client = cosmos_client.CosmosClient(url_connection=config['ENDPOINT'], auth={'masterKey': config['PRIMARYKEY']})
for item in results:
logging.info("Import")
item['id'] = item['CONTRACT_ID']
logging.info(json.dumps(item,indent=2))
try:
client.CreateItem(config['DBLink'], item)
except errors.HTTPFailure as e:
if e.status_code == 409:
query = {'query': 'SELECT * FROM c where c.id="%s"' % item['id']}
options = {}
docs = client.QueryItems(config['DBLink'], query, options)
doc = list(docs)[0]
# Get the document link from attribute `_self`
doc_link = doc['_self']
client.ReplaceItem(doc_link, item)
You can download the files from here https://github.com/dajor/azurefunction-cosmos
2 replies on “Import a CSV in a Cosmos DB over Azure Function”
Thanks for sharing…
Pretty! This was an extremely wonderful post. Thanks for supplying this info.