The use-case for this blog post is creating a static site by using Shopify as a headless CMS, and downloading the images to serve alongside your static CDN.

Static sites with Next.js

Next.js allows you to export a standalone static build that can be deployed on any webserver, such as cPanel or Plesk.

It will generate all the pages you need when using functions like getStaticProps(). This happens at build time which allows you to query an API, fetch the data and create a static copy of all the files you need.

Using Next export

To create a static copy of your website, run next export.

The default directory where your files will be generated is /out.

To preview this directory with a local server, you can run npx http-server out.

Note: In out/index.html, youโ€™ll see script and css links in the header are not relative. To fix this, edit the next.config.js file and add assetPrefix: './'.

Saving static images from urls

According to the post Asset download during nextjs export it mentions a use-case where a headless CMS has images stored on it and returns the URL to Next.js.

When exporting a static site we want to download these images and serve them from the same CDN where our site will be deployed but unfortunately thatโ€™s not the default behaviour of Next.js.

In that same post a rough example of how to acheive this is supplied.

Script to download URLs

GitHub user MartinVandersteen created this script fetchImages to do the job.

Create a js file in your root directory called fetchImages.js and paste the following code:

Note: Iโ€™ve adapated the above supplied code to work with Shopify URLโ€™s since they are not in a root directory.

JavaScript
// Dependencies :
// npm i download jsdom walk replaceall

const path = require('path')
const fsp = require('fs').promises
const download = require('download')
const walk = require('walk')
const replaceAll = require("replaceall");
const { JSDOM } = require('jsdom')

// usage :
// node fetchImages.js nextjsOutDirectory imagesPrefix1 imagesPrefix2 imagesPrefix3

// example :
// node fetchImages.js ./out http://localhost:1337

// What I did :
// - Put the file at the root of my project
// - Edit my "export" npm script and added "&& npm run fetch-images" after "next build && next export"

let startPath = process.argv[2]
let urls = process.argv.slice(3)
let completeUrls = []
let completeUrlsTrimmed = []
const assetFolder = "images/"

async function fetchImages() {
  let walker = walk.walk(startPath)
  walker.on('file', async (root, fileStats, next) => {
    if (fileStats.name.indexOf('.html') > 0) {
      const filePath = path.join(root, fileStats.name)
      const file = await fsp.readFile(filePath, { encoding: 'utf-8' })
      await handlePage(file)
      await updatePage(file, filePath)
    } else if (fileStats.name.indexOf('.json') > 0) {
      const filePath = path.join(root, fileStats.name)
      const file = await fsp.readFile(filePath, { encoding: 'utf-8' })
      await handleJson(file)
      await updatePage(file, filePath)
    }
    next()
  })
  walker.on('errors', console.log)
  walker.on('end', function () {
    console.log('all done');
  });
}

async function handlePage(data) {
  let dom = new JSDOM(data, { resources: 'usable' })
  let srcs = dom.window.document.querySelectorAll('img[src]')
  let srcSets = dom.window.document.querySelectorAll('img[srcSet]')

  function setUrls(src) {
    if (src && src !== '') {
      if (src.indexOf('http') === 0) {
        downloadSrc(src)
      }
    }
  }

  srcs.forEach((e) => setUrls(e.getAttribute('src')))

  srcSets.forEach((srcSet) => {
    const imgSrcSet = srcSet.getAttribute('srcset')
    imgSrcSet
      .split(', ')
      .forEach((src) => {
        setUrls(src)
      })
  })
}

// Update the pages' content
async function updatePage(data, filePath) {
  // Loop through completeUrls
  for (let i = 0; i < completeUrls.length; i++) {
    data = replaceAll(completeUrls[i], assetFolder + completeUrlsTrimmed[i], data)
  }

  try {
    console.log('updating file')
    fsp.writeFile(filePath, data, { encoding: 'utf8' })
  } catch (e) {
    console.log(e)
  }
}

async function handleJson(data) {
  let srcs = [];
  urls.forEach((url) => {
    console.log("looping through URL: ",url)
    let regex = new RegExp('"' + url + '\/\.\*\?' + '"', 'g');
    srcs = [...srcs, ...data.matchAll(regex)];
  });
  srcs = srcs.flat().map((e) => e.replace(/"/g, ''));

  console.log("srcs = ",srcs)

  function setUrls(src) {
    if (src && src !== '') {
      if (src.indexOf('http') === 0) {
        downloadSrc(src)
      }
    }
  }

  srcs.forEach((e) => setUrls(e))
}


// Download src images
async function downloadSrc(src) {
  console.log("src = "+src)

  const urlTrimmed = src.split('?')[0]
  const filename = urlTrimmed.split('/').slice(-1)[0]
  
  if (completeUrls.indexOf(src) === -1) {
    completeUrls.push(src)
    completeUrlsTrimmed.push(filename)
  }

  

  try {
    await fsp.stat(path.join(startPath, assetFolder, filename))
    console.log('skipping asset named', filename, 'already exists')
  } catch (e) {
    console.log('downloading to assets', urlTrimmed.split('/').slice(-1))
    await download(urlTrimmed.split('?')[0], path.join(startPath, assetFolder))
  }
}

fetchImages()

Install the script dependencies with yarn add jsdom walk replaceall.

To use the script run node fetchImages.js ./out http://website.com where the website url is the base url where your images are stored.

Note: all images will need to have unique names

The last step is adding fetchImages.js into your export step.

In your packages.json file, create a new line under scripts:

"fetch-images": "node fetchImages.js ./out https://cdn.shopify.com"

Then create an export command:

"export": "next build && next export && npm run fetch-images",

If you now run yarn export it will build your site into the out directory and download images from your API and store them into the out/images folder.

Note: This depends on where you host your platform, Netlify worked for me but Cloudflare didnโ€™t


Further reading