Search code examples
c#azure-functionsazure-data-lakeazure-blob-storage

How to generate/retrieve file in datalake using message queue?


I have an Azure function QueueTrigger1 that executes a function executeTemplateProcess to upload a tsv file on Google Drive and update a Jira ticket.

I need to create a Message Queue to generate a tsv file on datalake, run a python code, and then retrieve the tsv file (location) from the datalake and add it to the queue.

I have a basic foundation for the queue today, but i am not sure how to generate the file on the datalake and retrieve its location. We need to pass the file into the python code as input, thats why I am thinking we would need the file location on the datalake to be enqueued, but I am not sure how to perform this.

This is the namespaces for both the QueueTrigger1 and the executeTemplateProcess()

namespace DI
{
    public class DIProcess
    {
        public static void executeTemplateProcess(string jiraKey, string jiraIssueType, string jiraSummary, Component component, string jiraDescription)
        {
            if (rowCount > 0)
            {   //python code would run somewhere here following queue process before jira code executes below
                string dfileId = CopyTemplate(component.FileId, sheetName);

                // stop process if copy template not sucessfull
                if (string.IsNullOrEmpty(dfileId))
                    return;

                jira.AddComment("Google File copied.");

                // Update JIRA with the web link
                webLink = $"https://docs.google.com/spreadsheets/d/{dfileId}";
                jira.AddWebLink(webLink, sheetName);
                jira.AddComment("Jira weblink added.");
            }
            else
            {
                jira.UpdateStatus("Abandoned");
                jira.AddComment("Jira status updated to Abandoned.");
            }
        }
    }
}   

namespace companyxyzjira.QueueTrigger1
{
    public static class JiraQueueTrigger
    {
        [FunctionName("QueueTrigger1")]
        public static void Run([QueueTrigger("companyxyz-jira-dev-am", Connection = "storageaccountcompanyxyzji42f6_STORAGE")]string myQueueItem
            , ILogger log, ExecutionContext context)
        {
            dynamic jira;
            string jiraKey;
            string jiraIssueType;
            string jiraSummary;
            string jiraDescription;
            string[] jiraComponentNames;
            Component jiraComponent;

            log.LogInformation("Queue trigger function processing");

            jira = JsonConvert.DeserializeObject(myQueueItem);

            jiraKey = jira.issue.key;
            jiraIssueType = jira.issue.fields.issuetype.name;
            jiraSummary = jira.issue.fields.summary;
            jiraDescription = jira.issue.fields.description;

            try
            {
                DIProcess.executeTemplateProcess(jiraKey, jiraIssueType, jiraSummary, jiraComponent, jiraDescription);
            }
            catch (System.Exception e)
            {
                log.LogError(e.ToString());
                log.LogError(e.Message);
                log.LogError(e.StackTrace);
            }
        }
    }
}

I suppose this is my line of thinking but I am not sure how to communicate with the datalake...

[FunctionName("HttpTriggerCSharp")]
public static async Task<IActionResult> Run(
    [HttpTrigger(AuthorizationLevel.Function, "get", "post", Route = null)]
    HttpRequest req, [Queue("companyxyz-jira-dev-pm-mapping-done")] ICollector<string> QueueItem, ILogger log)
{
    log.LogInformation("HTTP trigger function processed a request.");

    string name = req.Query["name"];
    
    string requestBody = String.Empty;
    using (StreamReader streamReader =  new  StreamReader(req.Body))
    {
        requestBody = await streamReader.ReadToEndAsync();
        QueueItem.Add(requestBody); //i think?
    }
    dynamic data = JsonConvert.DeserializeObject(requestBody);
    name = name ?? data?.name;
    
    return name != null
        ? (ActionResult)new OkObjectResult($"{name}")
        : new BadRequestObjectResult("Please pass a name on the query string or in the request body");
}

datalake snapshot with input/output files (uploaded manually but thats what we want to automate from now on so we need to generate/retrieve these artifacts from/to message queue as described above)

datalake


Solution

  • from azure.storage.filedatalake import DataLakeServiceClient
    import pandas as pd
    
    connect_str = os.getenv('AZURE_STORAGE_CONNECTION_STRING')
        
    service_client = DataLakeServiceClient.from_connection_string(connect_str)
    #Put above code out of the body of function.
    
    file_system_client = service_client.get_file_system_client(file_system="test")
    directory_client = file_system_client.get_directory_client("test")
    file_client = directory_client.create_file("uploaded-file0316.txt")
    
    #Upload to datalake
    head = ["col1" , "col2" , "col3"]
    l = [[1 , 2 , 3],[4,5,6] , [8 , 7 , 9]]
    df = pd.DataFrame (l , columns = head)
    data = df.to_csv(index_label="idx", encoding = "utf-8")
    output = data.replace(',', '\t')
    print(output)
    file_client.append_data(data=output, offset=0, length=len(output))
    file_client.flush_data(len(output))
    
    #download from datalake
    download = file_client.download_file()
    content = download.readall()
    print(content)