Data Loading

Load test data from files, generate dynamic data

Loading Data Files

open(path)

Read file contents as string (init phase only)

new SharedArray(name, fn)

Share large read-only data across workers

new SharedCSV(path)

Load CSV with automatic header parsing

SharedArray

Efficiently share large read-only datasets across all workers without memory duplication.

// Load data once, share with all workers
const users = new SharedArray('users', () => JSON.parse(open('./data/users.json')));

export default function() {
  // Access shared data (read-only)
  const user = users[__WORKER_ID % users.length];
  http.post('/login', JSON.stringify({ email: user.email, password: user.password }));
}

SharedCSV

Load CSV files efficiently with automatic header parsing. Data is shared across all workers.

# users.csv

email,password,name
user1@example.com,pass123,John
user2@example.com,pass456,Jane

# Loading CSV

const users = new SharedCSV('./data/users.csv');

export default function() {
  // Get row by index (returns object with column names as keys)
  const user = users.get(__WORKER_ID % users.length);
  http.post('/login', JSON.stringify({ email: user.email, password: user.password }));

  // Get random row
  const randomUser = users.random();
}

SharedCSV Properties & Methods

length: Number of rows (excluding header)

headers: Array of column names

get(index): Get row by index as object

random(): Get random row as object

JSON Files

# users.json

[
  { "id": 1, "email": "user1@example.com", "password": "pass1" },
  { "id": 2, "email": "user2@example.com", "password": "pass2" },
  { "id": 3, "email": "user3@example.com", "password": "pass3" }
]

# Loading JSON

// Using SharedArray (recommended for large files)
const users = new SharedArray('users', () => JSON.parse(open('./data/users.json')));

// Or direct loading (simpler for small files)
const config = JSON.parse(open('./config.json'));

export default function() {
  // Random user each iteration
  const user = users[Math.floor(Math.random() * users.length)];

  http.post('https://api.example.com/login', JSON.stringify({
    email: user.email,
    password: user.password
  }), {
    headers: { 'Content-Type': 'application/json' }
  });
}

Sharing Data Across Workers

Data loaded at the top level is shared across all workers. Use setup() to prepare shared state.

// Loaded once, shared by all workers
const users = new SharedArray('users', () => JSON.parse(open('./data/users.json')));
const products = new SharedCSV('./data/products.csv');

export function setup() {
  // Authenticate once, share token
  const res = http.post('https://api.example.com/login', JSON.stringify({
    email: 'admin@example.com',
    password: 'admin'
  }));
  return { token: res.json().token };
}

export default function(data) {
  // Each worker gets the token from setup
  const user = users[__WORKER_ID % users.length];

  http.get(`https://api.example.com/users/${user.id}`, {
    headers: { 'Authorization': 'Bearer ' + data.token }
  });
}

Dynamic Data Generation

utils.uuid()

Generate UUID v4

utils.randomString(length)

Random alphanumeric string

utils.randomInt(min, max)

Random integer in range

utils.randomItem(array)

Random element from array

utils.randomEmail()

Generate random email address

utils.randomPhone()

Generate random US phone number

utils.randomName()

Random name {first, last, full}

utils.randomAddress()

Random US address {street, city, state, zip, full}

utils.randomDate(startYear, endYear)

Random date (YYYY-MM-DD)

utils.sequentialId()

Unique sequential ID across workers

export default function() {
  const payload = {
    id: utils.uuid(),
    username: 'user_' + utils.randomString(8),
    age: utils.randomInt(18, 65),
    tier: utils.randomItem(['free', 'pro', 'enterprise']),
    email: utils.randomEmail(),           // "xk7f2b1q@test.com"
    phone: utils.randomPhone(),           // "+1-555-847-2901"
    name: utils.randomName().full,        // "John Smith"
    dob: utils.randomDate(1970, 2000),    // "1985-07-23"
    orderId: utils.sequentialId(),        // unique ID per worker
  };

  http.post('https://api.example.com/users', JSON.stringify(payload));
}

Weighted Random Selection

// Custom weighted selection
function weightedRandom(items) {
  const total = items.reduce((sum, item) => sum + item.weight, 0);
  let random = Math.random() * total;

  for (const item of items) {
    random -= item.weight;
    if (random <= 0) return item.value;
  }
  return items[items.length - 1].value;
}

const actions = [
  { value: 'browse', weight: 70 },   // 70% browse
  { value: 'search', weight: 20 },   // 20% search
  { value: 'purchase', weight: 10 }, // 10% purchase
];

export default function() {
  const action = weightedRandom(actions);

  switch (action) {
    case 'browse':
      http.get('https://api.example.com/products');
      break;
    case 'search':
      http.get('https://api.example.com/search?q=' + utils.randomString(5));
      break;
    case 'purchase':
      http.post('https://api.example.com/orders', JSON.stringify({ product: 1 }));
      break;
  }
}

Unique Data per Worker

const users = new SharedCSV('./data/users.csv');

export default function() {
  // Each worker gets a unique user (wraps around if more workers than users)
  const user = users.get(__WORKER_ID % users.length);

  // Or unique per iteration
  const item = users.get(__ITERATION % users.length);

  print(`Worker ${__WORKER_ID} iteration ${__ITERATION} using ${user.email}`);
}

Per-Worker State

Use __WORKER_STATE to persist data across iterations within each worker.

export default function() {
  // Track per-worker state across iterations
  __WORKER_STATE.loginCount = (__WORKER_STATE.loginCount || 0) + 1;
  console.log(`Worker ${__WORKER_ID} iteration ${__ITERATION}, logins: ${__WORKER_STATE.loginCount}`);

  // Store session data that persists across iterations
  if (!__WORKER_STATE.token) {
    const res = http.post('/login', JSON.stringify(credentials));
    __WORKER_STATE.token = res.json().token;
  }

  // Use cached token in subsequent iterations
  http.get('/api/data', {
    headers: { 'Authorization': `Bearer ${__WORKER_STATE.token}` }
  });
}

Environment Variables

Access environment variables via the __ENV global object.

// Access environment variables
const apiKey = __ENV.API_KEY;
const baseUrl = __ENV.BASE_URL || 'https://api.example.com';

export default function() {
  http.get(baseUrl + '/data', {
    headers: { 'X-API-Key': apiKey }
  });
}

// Run with:
// API_KEY=secret123 BASE_URL=https://staging.api.com fusillade run test.js

Automatic .env File Loading

Fusillade automatically loads .env files when running tests. It checks:

1. The script's directory first

2. The current working directory

# .env file format

API_KEY=your-secret-key
BASE_URL=https://api.example.com
DEBUG=true

Environment variables set in the shell take precedence over .env file values.

Binary Files

// Load binary file for upload testing
const imageFile = http.file('./data/test-image.png', 'upload.png', 'image/png');

export default function() {
  const form = new FormData();
  form.append('file', imageFile);
  form.append('description', 'Test upload');

  http.post('https://api.example.com/upload', form.body(), {
    headers: form.headers()
  });
}

Working with Large Datasets

1. Data is loaded once at script initialization, not per iteration

2. SharedArray/SharedCSV share data across workers without duplication

3. For very large datasets, consider loading only what you need

4. Use __WORKER_ID and __ITERATION to partition data across workers

// Partition large dataset across workers
const allUsers = new SharedCSV('./data/million_users.csv');
const chunkSize = Math.ceil(allUsers.length / options.workers);

export default function() {
  const start = __WORKER_ID * chunkSize;
  const end = Math.min(start + chunkSize, allUsers.length);

  // Each worker works with its own subset
  const idx = start + (__ITERATION % (end - start));
  const user = allUsers.get(idx);
}