JobNimbus API Google Sheets Connection Script

JobNimbus to Google Sheets Automation

This Google Apps Script connects a Google Sheet directly to your JobNimbus account using their public API. When you run it, the script retrieves all job records from the past 365 days and automatically fills a sheet with key details such as Job Type, Address, City, Zip Code, Job Description, and Job Status. The time window is fully customizable, so you can easily adjust it to pull 90 days, 6 months, or all historical data depending on your reporting needs.

What the script does

  • Authenticates securely using your JobNimbus API key.
  • Queries all jobs updated within the past year (customizable time frame).
  • Fetches job data page by page through the JobNimbus API to avoid timeout errors.
  • Extracts key fields like job type, address, and status for quick reporting.
  • Writes all data into a clean, structured Google Sheet tab called “Jobs (Address)”.
  • Refreshes data on demand—simply re-run the script to update your sheet.

How to set it up

  1. Open a Google Sheet and go to Extensions → Apps Script.
  2. Paste the full code into the editor.
  3. Replace the placeholder with your JobNimbus API key.
  4. Save the project and select Load_Jobs_Address_JN from the function dropdown.
  5. Click Run, approve permissions, and watch the script build a Jobs (Address) tab with your synced JobNimbus data.

This setup is perfect for teams who want an easy, automated way to review or report on their JobNimbus jobs directly inside Google Sheets—no manual exports required.

/********* CONFIG *********/
const API_KEY   = 'PUT_YOUR_JOBNIMBUS_API_KEY_HERE';
const BASE_URL  = 'https://app.jobnimbus.com/api1'; // JobNimbus API base
const PAGE_SIZE = 100;
const MAX_PAGES = 200;


/********* QUERY STRING HELPER *********/
function toQuery_(obj) {
  return Object.keys(obj)
    .filter(k => obj[k] !== undefined && obj[k] !== null && obj[k] !== '')
    .map(k => Array.isArray(obj[k])
      ? obj[k].map(v => `${encodeURIComponent(k)}=${encodeURIComponent(v)}`).join('&')
      : `${encodeURIComponent(k)}=${encodeURIComponent(obj[k])}`
    )
    .join('&');
}

/********* CORE FETCHER *********/
function fetchOnce_(url, headers) {
  const res = UrlFetchApp.fetch(url, { method: 'get', headers, muteHttpExceptions: true });
  const code = res.getResponseCode();
  if (code < 200 || code >= 300) {
    throw new Error(`JobNimbus API error ${code}: ${res.getContentText()}`);
  }
  const text = res.getContentText();
  const data = text ? JSON.parse(text) : {};
  const rows = Array.isArray(data)
    ? data
    : (data.results || data.items || data.data || []);
  let nextUrl = null;
  const candidate = (data && (data.next || (data.links && data.links.next))) || null;
  if (candidate) {
    nextUrl = candidate.startsWith('/') ? `${BASE_URL}${candidate}` : candidate;
  }
  return { rows, nextUrl, raw: data };
}

function fetchByNextLink_(nextUrl, headers) {
  let all = [];
  for (let i = 0; i < MAX_PAGES && nextUrl; i++) {
    const res = UrlFetchApp.fetch(nextUrl, { method: 'get', headers, muteHttpExceptions: true });
    if (res.getResponseCode() < 200 || res.getResponseCode() >= 300) break;
    const data = JSON.parse(res.getContentText() || '{}');
    const rows = Array.isArray(data)
      ? data
      : (data.results || data.items || data.data || []);
    if (!rows || rows.length === 0) break;
    all = all.concat(rows);
    let nxt = data.next || (data.links && data.links.next);
    if (nxt && nxt.startsWith('/')) nxt = `${BASE_URL}${nxt}`;
    nextUrl = nxt || null;
  }
  return all;
}

function fetchJobsAll_({ updatedAfterISO } = {}) {
  const headers = { Authorization: `Bearer ${API_KEY}` };
  let all = [];
  let page = 1;

  for (; page <= MAX_PAGES; page++) {
    const q = {
      page: String(page),
      pageSize: String(PAGE_SIZE),
      ...(updatedAfterISO ? { updatedAfter: updatedAfterISO } : {})
    };
    const url = `${BASE_URL}/jobs?${toQuery_(q)}`;
    const { rows, nextUrl } = fetchOnce_(url, headers);

    if (!rows || rows.length === 0) break;
    all = all.concat(rows);
    if (nextUrl) return all.concat(fetchByNextLink_(nextUrl, headers));
    if (rows.length < PAGE_SIZE) break;
  }
  return all;
}

/********* SHEET WRITER *********/
function writeToSheet_(sheetName, header, rows) {
  const ss = SpreadsheetApp.getActiveSpreadsheet();
  let sh = ss.getSheetByName(sheetName);
  if (!sh) sh = ss.insertSheet(sheetName);
  sh.clearContents();
  sh.getRange(1, 1, 1, header.length).setValues([header]);
  if (rows.length === 0) return;
  sh.getRange(2, 1, rows.length, header.length).setValues(rows);
}

/********* FIELD EXTRACTORS (tailored to your JSON) *********/
function extractJobType_(j) {
  return j.record_type_name || '';
}
function extractJobAddress_(j) {
  const line1 = j.address_line1 || '';
  const line2 = j.address_line2 || '';
  const street = [line1, line2].filter(Boolean).join(', ');
  const city = j.city || '';
  const zip  = j.zip || '';
  return { street, city, zip };
}
function extractJobDescription_(j) {
  return j.description || '';
}
function extractJobStatus_(j) {
  return j.status_name || ''; // human-readable status (e.g., "Lead", "Completed")
}

/********* PUBLIC: LOAD JOBS WITH ADDRESS, DESCRIPTION, STATUS *********/
function Load_Jobs_Address_JN() {
   // const updatedAfterISO = new Date(Date.now() - 90*24*60*60*1000).toISOString(); // last 90 days
  const updatedAfterISO = new Date(Date.now() - 365*24*60*60*1000).toISOString(); // last 365 days

  const jobs = fetchJobsAll_({ updatedAfterISO });
  if (jobs.length > 0) Logger.log(JSON.stringify(jobs[0], null, 2));

  const header = ['Job Type', 'Address', 'City', 'Zip', 'Job Description', 'Job Status'];
  const rows = jobs.map(j => {
    const type = extractJobType_(j);
    const addr = extractJobAddress_(j);
    const desc = extractJobDescription_(j);
    const status = extractJobStatus_(j);
    return [type, addr.street, addr.city, addr.zip, desc, status];
  });

  writeToSheet_('Jobs (Address)', header, rows);
}

/********* QUICK DIAGNOSTIC *********/
function Test_Jobs_One_Page() {
  const headers = { Authorization: `Bearer ${API_KEY}` };
  const url = `${BASE_URL}/jobs?${toQuery_({ page: '1', pageSize: '5' })}`;
  const res = UrlFetchApp.fetch(url, { method: 'get', headers, muteHttpExceptions: true });
  Logger.log(res.getResponseCode());
  Logger.log(res.getContentText());
}

Here’s a paste-ready debug add-on that ignores the 180-day filter and logs/prints exactly what the API is returning. You don’t need to replace your whole script; just add these functions to the bottom of your current Apps Script and run them.


What this does

  • Fetches all jobs (no date filter).
  • Logs each page request and row counts.
  • Writes an All-time status summary sheet.
  • Writes a small debug sample sheet so you can eyeball rows.
/********* CONFIG (edit your key & base URL) *********/
const API_KEY   = 'PUT_YOUR_JOBNIMBUS_API_KEY_HERE';
const BASE_URL  = 'https://app.jobnimbus.com/api1';
const PAGE_SIZE = 100;
const MAX_PAGES = 200;

/********* INLINE HELPERS (self-contained) *********/
function toQuery_(obj) {
  return Object.keys(obj)
    .filter(k => obj[k] !== undefined && obj[k] !== null && obj[k] !== '')
    .map(k => Array.isArray(obj[k])
      ? obj[k].map(v => `${encodeURIComponent(k)}=${encodeURIComponent(v)}`).join('&')
      : `${encodeURIComponent(k)}=${encodeURIComponent(obj[k])}`
    )
    .join('&');
}

function fetchOnce_(url, headers) {
  const res = UrlFetchApp.fetch(url, { method:'get', headers, muteHttpExceptions:true });
  const code = res.getResponseCode();
  if (code < 200 || code >= 300) throw new Error(`API error ${code}: ${res.getContentText()}`);
  const data = JSON.parse(res.getContentText() || '{}');
  const rows = Array.isArray(data) ? data : (data.results || data.items || data.data || []);
  let nextUrl = null;
  const cand = (data.next || (data.links && data.links.next)) || null;
  if (cand) nextUrl = cand.startsWith('/') ? `${BASE_URL}${cand}` : cand;
  return { rows, nextUrl };
}

/********* SELF-CONTAINED DEBUG *********/
function Write_Job_Status_Summary_All_Debug_Solo() {
  const headers = { Authorization: `Bearer ${API_KEY}` };
  let all = [];
  let page = 1;

  for (; page <= MAX_PAGES; page++) {
    const q = { page: String(page), pageSize: String(PAGE_SIZE) };
    const url = `${BASE_URL}/jobs?${toQuery_(q)}`;
    Logger.log(`Fetching page ${page}`);
    const { rows, nextUrl } = fetchOnce_(url, headers);
    if (!rows || rows.length === 0) break;
    all = all.concat(rows);
    if (nextUrl) {
      Logger.log('Following next li

Leave a Comment