Upload large files to an Azure Container via K6

I am writing a perf test that uploads large xmls to azure blob containers. I am a having an issue in that uploading one file can take up to 20mins - a 50mb XML. I have checked and there is no network related issues, and i can upload large files manually in seconds.

Is there a way to do this via K6? I am running this via docker

My current solution is below:

K6 Script:

import http from 'k6/http';
import { check, sleep } from 'k6';


const accountName = __ENV.STORAGE_ACCOUNT;
const containerNames = __ENV.CONTAINER_NAMES;
const accessToken = __ENV.ACCESS_TOKEN;

if (!accountName) throw new Error("STORAGE_ACCOUNT environment variable is required.");
if (!containerNames) throw new Error("CONTAINER_NAMES environment variable is required.");
if (!accessToken) throw new Error("ACCESS_TOKEN environment variable is required.");

const containerNameList = containerNames.split(',');
const baseFileName = __ENV.BASE_FILE_NAME || 'TestFile';
const fileSizeMb = parseInt(__ENV.FILE_SIZE_MB) || 10; 
const numberOfFiles = parseInt(__ENV.NUMBER_OF_FILES) || 1;


const filler = `<Test><record_type>Testing</record_type></Test>`;
const charPerMb = 1024 * 1024;


function generateXmlContent(sizeMb) {
  const xmlTemplateStart = `<?xml version="1.0" encoding="UTF-8"?><data><dataheaders>`;
  const xmlTemplateEnd = `</headers></data>`;
  const fillerCount = Math.floor((charPerMb * sizeMb - xmlTemplateStart.length - xmlTemplateEnd.length) / filler.length);
  return `${xmlTemplateStart}${Array(fillerCount).fill(filler).join('')}${xmlTemplateEnd}`;
}


function getTimestamp() {
  const now = new Date();
  return now
    .toISOString()
    .replace(/[-:.TZ]/g, '')
    .slice(0, 14) + now.getMilliseconds().toString().padStart(3, '0');
}

export function setup() {
  console.log("Setting up the test...");
  const fileList = [];
  for (let i = 0; i < numberOfFiles; i++) {
    const fileName = `${baseFileName}.${getTimestamp()}.XML`;
    fileList.push(fileName);
    if (i < numberOfFiles - 1) {
      sleep(1);
    }
  }
  console.log(`Prepared ${fileList.length} files.`);
  return { fileList, accountName, containerNameList, accessToken };
}

export default function (data) {
  const { fileList, accountName, containerNameList, accessToken } = data;

  console.log(`Starting upload of ${fileList.length} files to containers: ${containerNameList.join(', ')}`);

  fileList.forEach((fileName, index) => {
    const fileContent = generateXmlContent(fileSizeMb);

    console.log(`Uploading file ${index + 1}/${fileList.length}: ${fileName}`);

    
    const requests = containerNameList.map((containerName) => {
      const blobUrl = `https://${accountName}.blob.core.windows.net/${containerName}/${fileName}`;
      const headers = {
        Authorization: `Bearer ${accessToken}`,
        'x-ms-date': new Date().toUTCString(),
        'x-ms-version': '2021-04-10',
        'x-ms-blob-type': 'BlockBlob',
        'Content-Type': 'application/octet-stream',
      };

      return {
        method: 'PUT',
        url: blobUrl,
        body: fileContent,
        params: { headers, timeout: '800s' },
      };
    });

    
    const responses = http.batch(requests);

    responses.forEach((res, idx) => {
      const containerName = containerNameList[idx];
      check(res, {
        [`is status 201 for ${fileName} in ${containerName}`]: (r) => r.status === 201,
      });

      if (res.status === 201) {
        console.log(`Successfully uploaded ${fileName} to ${containerName}`);
      } else {
        console.error(`Failed to upload ${fileName} to ${containerName}. Status: ${res.status}`);
      }
    });

    
    sleep(1);
  });
}

export function teardown() {
  console.log("All files uploaded. Cleaning up...");
}

my taurus yaml:

execution:
  - executor: k6
    concurrency: 20
    iterations: 100
    hold-for: 30m
    scenario: load_test
    env:
      STORAGE_ACCOUNT: "accountname"
      CONTAINER_NAMES: "container1,containers"
      ACCESS_TOKEN: "token"
      BASE_FILE_NAME: "Filename"
      FILE_SIZE_MB: 50
      NUMBER_OF_FILES: 10

scenarios:
  load_test:
    script: Test.js

modules:
  k6:
    image: grafana/k6:latest
    arguments:
      - "--console-output=stdout"
      - "--http-timeout=800s"

reporting:
  - module: console
  - module: final-stats

settings:
  logging:
    level: debug

Hi @mayamiko, welcome to the communtify forum!

Given that you seem to generate the xml files in the script, I will expect that maybe that takes a while.

Can you use date.Now() and console.log (or custom metrics) to try to narrow down which parts takes 20 minutes?