Skip to content
98 changes: 72 additions & 26 deletions src/commands/deploy.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import { join, resolve, relative } from 'pathe'
import { execa } from 'execa'
import { setupDotenv } from 'c12'
import { $api, fetchUser, selectTeam, selectProject, projectPath, fetchProject, linkProject, gitInfo } from '../utils/index.mjs'
import { getStorage, getPathsToDeploy, getFile, uploadAssetsToCloudflare, isMetaPath, isServerPath, getPublicFiles } from '../utils/deploy.mjs'
import { getStorage, getPathsToDeploy, getFile, uploadAssetsToCloudflare, uploadWorkersAssetsToCloudflare, isMetaPath, isWorkerMetaPath, isServerPath, isWorkerServerPath, getPublicFiles, getWorkerPublicFiles } from '../utils/deploy.mjs'
import { createMigrationsTable, fetchRemoteMigrations, queryDatabase } from '../utils/database.mjs'
import login from './login.mjs'
import ensure from './ensure.mjs'
Expand Down Expand Up @@ -105,6 +105,16 @@ export default defineCommand({
consola.success(`Connected to ${colors.blueBright(linkedProject.teamSlug)} team.`)
consola.success(`Linked to ${colors.blueBright(linkedProject.slug)} project.`)

if (linkedProject.type === 'worker' && deployEnv === 'preview') {
consola.warn('Currently NuxtHub on Workers (BETA) does not support preview environments.')
const shouldDeploy = await confirm({
message: `Deploy ${colors.blueBright(projectPath())} to production instead?`
})
if (!shouldDeploy || isCancel(shouldDeploy)) {
return consola.log('Cancelled.')
}
}

// #region Build
if (args.build) {
consola.info('Building the Nuxt project...')
Expand Down Expand Up @@ -135,6 +145,11 @@ export default defineCommand({
const fileKeys = await storage.getKeys()
const pathsToDeploy = getPathsToDeploy(fileKeys)
const config = await storage.getItem('hub.config.json')
if (!config.nitroPreset && linkedProject.type === 'worker') {
consola.error('Please upgrade `@nuxthub/core` to the latest version to deploy to a worker project.')
process.exit(1)
}
const isWorkerPreset = ['cloudflare_module', 'cloudflare_durable', 'cloudflare-module', 'cloudflare-durable'].includes(config.nitroPreset)
const { format: formatNumber } = new Intl.NumberFormat('en-US')

let spinner = ora(`Preparing ${colors.blueBright(linkedProject.slug)} deployment for ${deployEnvColored}...`).start()
Expand All @@ -145,40 +160,64 @@ export default defineCommand({
spinnerColorIndex = (spinnerColorIndex + 1) % spinnerColors.length
}, 2500)

let deploymentKey, serverFiles, metaFiles
let deploymentKey, serverFiles, metaFiles, completionToken
try {
const publicFiles = await getPublicFiles(storage, pathsToDeploy)

const deploymentInfo = await $api(`/teams/${linkedProject.teamSlug}/projects/${linkedProject.slug}/${deployEnv}/deploy/prepare`, {
let url = `/teams/${linkedProject.teamSlug}/projects/${linkedProject.slug}/${deployEnv}/deploy/prepare`
let publicFiles, publicManifest

if (isWorkerPreset) {
url = `/teams/${linkedProject.teamSlug}/projects/${linkedProject.slug}/${deployEnv}/deploy/worker/prepare`
publicFiles = await getWorkerPublicFiles(storage, pathsToDeploy)
/**
* { "/index.html": { hash: "hash", size: 30 }
*/
publicManifest = publicFiles.reduce((acc, file) => {
acc[file.path] = {
hash: file.hash,
size: file.size
}
return acc
}, {})
} else {
publicFiles = await getPublicFiles(storage, pathsToDeploy)
/**
* { "/index.html": "hash" }
*/
publicManifest = publicFiles.reduce((acc, file) => {
acc[file.path] = file.hash
return acc
}, {})
}
// Get deployment info by preparing the deployment
const deploymentInfo = await $api(url, {
method: 'POST',
body: {
config,
/**
* Public manifest is a map of file paths to their unique hash (SHA256 sliced to 32 characters).
* @example
* {
* "/index.html": "hash",
* "/assets/image.png": "hash"
* }
*/
publicManifest: publicFiles.reduce((acc, file) => {
acc[file.path] = file.hash
return acc
}, {})
publicManifest
}
})
spinner.succeed(`${colors.blueBright(linkedProject.slug)} ready to deploy.`)
const { missingPublicHashes, cloudflareUploadJwt } = deploymentInfo
deploymentKey = deploymentInfo.deploymentKey

const { cloudflareUploadJwt, buckets, accountId } = deploymentInfo
// missingPublicHash is sent for pages & buckets for worker
let missingPublicHashes = deploymentInfo.missingPublicHashes || buckets.flat()
const publicFilesToUpload = publicFiles.filter(file => missingPublicHashes.includes(file.hash))

if (publicFilesToUpload.length) {
const totalSizeToUpload = publicFilesToUpload.reduce((acc, file) => acc + file.size, 0)
spinner = ora(`Uploading ${colors.blueBright(formatNumber(publicFilesToUpload.length))} new static assets (${colors.blueBright(prettyBytes(totalSizeToUpload))})...`).start()
await uploadAssetsToCloudflare(publicFilesToUpload, cloudflareUploadJwt, ({ progressSize, totalSize }) => {
const percentage = Math.round((progressSize / totalSize) * 100)
spinner.text = `${percentage}% uploaded (${prettyBytes(progressSize)}/${prettyBytes(totalSize)})...`
})
if (linkedProject.type === 'pages') {
await uploadAssetsToCloudflare(publicFilesToUpload, cloudflareUploadJwt, ({ progressSize, totalSize }) => {
const percentage = Math.round((progressSize / totalSize) * 100)
spinner.text = `${percentage}% uploaded (${prettyBytes(progressSize)}/${prettyBytes(totalSize)})...`
})
} else {
completionToken = await uploadWorkersAssetsToCloudflare(accountId, publicFilesToUpload, cloudflareUploadJwt, ({ progressSize, totalSize }) => {
const percentage = Math.round((progressSize / totalSize) * 100)
spinner.text = `${percentage}% uploaded (${prettyBytes(progressSize)}/${prettyBytes(totalSize)})...`
})
}
spinner.succeed(`${colors.blueBright(formatNumber(publicFilesToUpload.length))} new static assets uploaded (${colors.blueBright(prettyBytes(totalSizeToUpload))})`)
}

Expand All @@ -188,8 +227,14 @@ export default defineCommand({
consola.info(`${colors.blueBright(formatNumber(publicFiles.length))} static assets (${colors.blueBright(prettyBytes(totalSize))} / ${colors.blueBright(prettyBytes(totalGzipSize))} gzip)`)
}

metaFiles = await Promise.all(pathsToDeploy.filter(isMetaPath).map(p => getFile(storage, p, 'base64')))
serverFiles = await Promise.all(pathsToDeploy.filter(isServerPath).map(p => getFile(storage, p, 'base64')))
metaFiles = await Promise.all(pathsToDeploy.filter(isWorkerPreset ? isWorkerMetaPath : isMetaPath).map(p => getFile(storage, p, 'base64')))
serverFiles = await Promise.all(pathsToDeploy.filter(isWorkerPreset ? isWorkerServerPath : isServerPath).map(p => getFile(storage, p, 'base64')))
if (isWorkerPreset) {
serverFiles = serverFiles.map(file => ({
...file,
path: file.path.replace('/server/', '/')
}))
}
const serverFilesSize = serverFiles.reduce((acc, file) => acc + file.size, 0)
const serverFilesGzipSize = serverFiles.reduce((acc, file) => acc + file.gzipSize, 0)
consola.info(`${colors.blueBright(formatNumber(serverFiles.length))} server files (${colors.blueBright(prettyBytes(serverFilesSize))} / ${colors.blueBright(prettyBytes(serverFilesGzipSize))} gzip)...`)
Expand Down Expand Up @@ -284,13 +329,14 @@ export default defineCommand({

// #region Complete deployment
spinner = ora(`Deploying ${colors.blueBright(linkedProject.slug)} to ${deployEnvColored}...`).start()
const deployment = await $api(`/teams/${linkedProject.teamSlug}/projects/${linkedProject.slug}/${deployEnv}/deploy/complete`, {
const deployment = await $api(`/teams/${linkedProject.teamSlug}/projects/${linkedProject.slug}/${deployEnv}/deploy/${isWorkerPreset ? 'worker/complete' : 'complete'}`, {
method: 'POST',
body: {
deploymentKey,
git,
serverFiles,
metaFiles
metaFiles,
completionToken
},
}).catch((err) => {
spinner.fail(`Failed to deploy ${colors.blueBright(linkedProject.slug)} to ${deployEnvColored}.`)
Expand Down
15 changes: 13 additions & 2 deletions src/commands/preview.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,13 @@ export default defineCommand({
required: false,
default: '.'
},
'log-level': {
type: 'string',
description: 'The log level to use.',
required: false,
default: 'log',
valueHint: 'debug, info, log, warn, error, none'
}
},
async run({ args }) {
const cmdCwd = process.cwd()
Expand Down Expand Up @@ -75,13 +82,17 @@ export default defineCommand({
}
throw err
}
const wranglerArgs = []
if (args['log-level']) {
wranglerArgs.push(`--log-level=${args['log-level']}`)
}
if (nitroConfig.preset === 'cloudflare-pages') {
consola.info(`Starting \`wrangler pages dev .\` command...`)
await execa(options)`wrangler pages dev .`
await execa(options)`wrangler pages dev . ${wranglerArgs}`
.catch(cmdError)
} else {
consola.info(`Starting \`wrangler dev\` command...`)
await execa(options)`wrangler dev`
await execa(options)`wrangler dev ${wranglerArgs}`
.catch(cmdError)
}

Expand Down
2 changes: 1 addition & 1 deletion src/internal.mjs
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
export { getStorage, getPathsToDeploy, getPublicFiles, uploadAssetsToCloudflare, isMetaPath, getFile, isServerPath } from './utils/deploy.mjs';
export { getStorage, getPathsToDeploy, getFile, getPublicFiles, getWorkerPublicFiles, uploadAssetsToCloudflare, uploadWorkersAssetsToCloudflare, isMetaPath, isWorkerMetaPath, isServerPath, isWorkerServerPath, } from './utils/deploy.mjs';
export { CreateDatabaseMigrationsTableQuery, ListDatabaseMigrationsQuery } from './utils/database.mjs';
export { generateWrangler } from './utils/wrangler.mjs';
10 changes: 10 additions & 0 deletions src/utils/data.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,15 @@ export async function selectProject(team) {
})
if (isCancel(projectName)) return null
projectName = projectName || defaultProjectName
const projectType = await select({
message: 'Select your project type',
initialValue: 'pages',
options: [
{ label: 'Cloudflare Pages', value: 'pages' },
{ label: 'Cloudflare Workers (beta)', value: 'worker' },
]
})
if (isCancel(projectType)) return null
const projectLocation = await select({
message: 'Select a region for the storage',
initialValue: 'weur',
Expand Down Expand Up @@ -159,6 +168,7 @@ export async function selectProject(team) {
method: 'POST',
body: {
name: projectName,
type: projectType,
location: projectLocation,
productionBranch: productionBranch || defaultProductionBranch
}
Expand Down
76 changes: 74 additions & 2 deletions src/utils/deploy.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -126,12 +126,18 @@ export const META_PATHS = [
'/nitro.json',
'/hub.config.json',
'/wrangler.toml',
'/package-lock.json',
'/package.json'
]

export const isMetaPath = (path) => META_PATHS.includes(path)
export const isServerPath = (path) => path.startsWith('/_worker.js/')
export const isPublicPath = (path) => !isMetaPath(path) && !isServerPath(path)

export const isWorkerMetaPath = (path) => META_PATHS.includes(path)
export const isWorkerPublicPath = (path) => path.startsWith('/public/')
export const isWorkerServerPath = (path) => path.startsWith('/server/')

/**
* Get all public files with their metadata
* @param {import('unstorage').Storage} storage - Storage instance
Expand All @@ -143,9 +149,18 @@ export async function getPublicFiles(storage, paths) {
paths.filter(isPublicPath).map(p => getFile(storage, p, 'base64'))
)
}
export async function getWorkerPublicFiles(storage, paths) {
const files = await Promise.all(
paths.filter(isWorkerPublicPath).map(p => getFile(storage, p, 'base64'))
)
return files.map((file) => ({
...file,
path: file.path.replace('/public/', '/')
}))
}

/**
* Upload assets to Cloudflare with concurrent uploads
* Upload assets to Cloudflare Pages with concurrent uploads
* @param {Array<{ path: string, data: string, hash: string, contentType: string }>} files - Files to upload
* @param {string} cloudflareUploadJwt - Cloudflare upload JWT
* @param {Function} onProgress - Callback function to update progress
Expand Down Expand Up @@ -200,4 +215,61 @@ export async function uploadAssetsToCloudflare(files, cloudflareUploadJwt, onPro
}
}

// async function uploadToCloudflare(body, cloudflareUploadJwt) {

/**
* Upload assets to Cloudflare Workers with concurrent uploads
* @param {Array<string<string>} buckets - Buckets of hashes to upload
* @param {Array<{ path: string, data: string, hash: string, contentType: string }>} files - Files to upload
* @param {string} cloudflareUploadJwt - Cloudflare upload JWT
* @param {Function} onProgress - Callback function to update progress
*/
export async function uploadWorkersAssetsToCloudflare(accountId, files, cloudflareUploadJwt, onProgress) {
const chunks = await createChunks(files)
if (!chunks.length) {
return
}

let filesUploaded = 0
let progressSize = 0
let completionToken
const totalSize = files.reduce((acc, file) => acc + file.size, 0)
for (let i = 0; i < chunks.length; i += CONCURRENT_UPLOADS) {
const chunkGroup = chunks.slice(i, i + CONCURRENT_UPLOADS)

await Promise.all(chunkGroup.map(async (filesInChunk) => {
const form = new FormData()
for (const file of filesInChunk) {
form.append(file.hash, new File([file.data], file.hash, { type: file.contentType}), file.hash)
}
return ofetch(`/accounts/${accountId}/workers/assets/upload?base64=true`, {
baseURL: 'https://api.cloudflare.com/client/v4/',
method: 'POST',
headers: {
Authorization: `Bearer ${cloudflareUploadJwt}`
},
retry: MAX_UPLOAD_ATTEMPTS,
retryDelay: UPLOAD_RETRY_DELAY,
body: form
})
.then((data) => {
if (data && data.result?.jwt) {
completionToken = data.result.jwt
}
if (typeof onProgress === 'function') {
filesUploaded += filesInChunk.length
progressSize += filesInChunk.reduce((acc, file) => acc + file.size, 0)
onProgress({ progress: filesUploaded, progressSize, total: files.length, totalSize })
}
})
.catch((err) => {
if (err.data) {
throw new Error(`Error while uploading assets to Cloudflare: ${JSON.stringify(err.data)} - ${err.message}`)
}
else {
throw new Error(`Error while uploading assets to Cloudflare: ${err.message.split(' - ')[1] || err.message}`)
}
})
}))
}
return completionToken
}
2 changes: 1 addition & 1 deletion src/utils/wrangler.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ export function generateWrangler(hub, { preset } = {}) {
if (preset === 'cloudflare-durable') {
wrangler.durable_objects ||= {}
wrangler.durable_objects.bindings = [{ name: '$DurableObject', class_name: '$DurableObject' }]
wrangler.migrations = [{ tag: 'v1', new_classes: ['$DurableObject'] }]
wrangler.migrations = [{ tag: 'v1', new_sqlite_classes: ['$DurableObject'] }]
}
}

Expand Down