/ scripts / chuckles.js
chuckles.js
  1  // Disable pino logger before requiring any backend-core modules
  2  process.env.DISABLE_PINO_LOGGER = "1"
  3  const fs = require("fs")
  4  const dotenv = require("dotenv")
  5  const { join } = require("path")
  6  const https = require("https")
  7  const http = require("http")
  8  const { URL } = require("url")
  9  
 10  const argv = require("yargs")
 11    .usage("Usage: $0 [--dump <file> | --curl <curl-command>] [options]")
 12    .option("dump", {
 13      alias: "d",
 14      type: "string",
 15      describe: "Path to the JSON dump file to import",
 16    })
 17    .option("curl", {
 18      alias: "c",
 19      type: "string",
 20      describe: "File containing cURL command (paste from browser dev tools)",
 21    })
 22    .option("dev", {
 23      type: "boolean",
 24      default: true,
 25      describe: "Import to development database (default: true)",
 26    })
 27    .option("verbose", {
 28      alias: "v",
 29      type: "boolean",
 30      default: false,
 31      describe: "Enable verbose logging",
 32    })
 33    .option("dry-run", {
 34      type: "boolean",
 35      default: false,
 36      describe: "Preview what would be done without making changes",
 37    })
 38    .option("skip-assets", {
 39      type: "boolean",
 40      default: false,
 41      describe: "Skip uploading client assets to object store",
 42    })
 43    .check(argv => {
 44      if (!argv.dump && !argv.curl) {
 45        throw new Error("Must provide either --dump or --curl option")
 46      }
 47      if (argv.dump && argv.curl) {
 48        throw new Error("Cannot use both --dump and --curl options together")
 49      }
 50      return true
 51    })
 52    .example("$0 --dump ./my-app-dump.json", "Import dump to dev database")
 53    .example(
 54      "$0 --dump ./my-app-dump.json --dev=false",
 55      "Import to production database"
 56    )
 57    .example(
 58      "$0 --dump ./my-app-dump.json --dry-run --verbose",
 59      "Preview import with detailed logging"
 60    )
 61    .example(
 62      "$0 --curl ./curl-command.txt",
 63      "Fetch data using cURL command from file"
 64    )
 65    .help().argv
 66  
 67  const serverDir = join(__dirname, "..", "packages", "server")
 68  dotenv.config({ path: join(serverDir, ".env") })
 69  const { db, objectStore } = require("@budibase/backend-core")
 70  
 71  const {
 72    dump,
 73    curl,
 74    dev,
 75    verbose,
 76    "dry-run": dryRun,
 77    "skip-assets": skipAssets,
 78  } = argv
 79  let dbName
 80  
 81  const log = (message, force = false) => {
 82    if (verbose || force) {
 83      console.log(message)
 84    }
 85  }
 86  
 87  const logError = message => {
 88    console.error(`❌ ${message}`)
 89  }
 90  
 91  const parseCurlCommand = curlCommand => {
 92    log("Parsing cURL command...")
 93  
 94    let originalUrl
 95    const headers = {}
 96  
 97    // Full cURL command - extract URL and headers
 98    // Try quoted URL first, then unquoted
 99    let urlMatch = curlCommand.match(/curl\s+['"]([^'"]+)['"]/)
100    if (!urlMatch) {
101      // Try unquoted URL (stops at first space followed by dash)
102      urlMatch = curlCommand.match(/curl\s+([^\s]+)/)
103    }
104    
105    if (!urlMatch) {
106      throw new Error("Could not extract URL from cURL command")
107    }
108  
109    originalUrl = urlMatch[1]
110    log(`Extracted URL: ${originalUrl}`)
111  
112    // Extract headers from -H flags (both quoted and unquoted)
113    const quotedHeaderRegex = /-H\s+['"]([^'"]+)['"]/g
114    const unquotedHeaderRegex = /-H\s+([^-]+?)(?=\s+-|$)/g
115    
116    let headerMatch
117    
118    // Try quoted headers first
119    while ((headerMatch = quotedHeaderRegex.exec(curlCommand)) !== null) {
120      const headerLine = headerMatch[1]
121      const colonIndex = headerLine.indexOf(":")
122      if (colonIndex > 0) {
123        const key = headerLine.substring(0, colonIndex).trim()
124        const value = headerLine.substring(colonIndex + 1).trim()
125        headers[key] = value
126      }
127    }
128    
129    // If no quoted headers found, try unquoted
130    if (Object.keys(headers).length === 0) {
131      while ((headerMatch = unquotedHeaderRegex.exec(curlCommand)) !== null) {
132        const headerLine = headerMatch[1].trim()
133        const colonIndex = headerLine.indexOf(":")
134        if (colonIndex > 0) {
135          const key = headerLine.substring(0, colonIndex).trim()
136          const value = headerLine.substring(colonIndex + 1).trim()
137          headers[key] = value
138        }
139      }
140    }
141  
142    // Extract cookies from -b flag and convert to Cookie header
143    let cookieMatch = curlCommand.match(/-b\s+['"]([^'"]+)['"]/)
144    if (!cookieMatch) {
145      // Try unquoted cookies
146      cookieMatch = curlCommand.match(/-b\s+([^-]+?)(?=\s+-|$)/)
147    }
148    if (cookieMatch) {
149      headers["Cookie"] = cookieMatch[1].trim()
150    }
151  
152    log(`Extracted ${Object.keys(headers).length} headers`)
153  
154    // Extract database name from URL path
155    const url = new URL(originalUrl)
156    const pathParts = url.pathname.split("/").filter(part => part.length > 0)
157    const dbName = pathParts[0]
158  
159    if (!dbName || !dbName.startsWith("app_")) {
160      throw new Error(
161        `Could not extract database name from URL path: ${url.pathname}`
162      )
163    }
164  
165    log(`Extracted database name: ${dbName}`)
166  
167    // Modify URL to add include_docs=true and remove other query params
168    const modifiedUrl = new URL(originalUrl)
169    modifiedUrl.search = "?include_docs=true"
170  
171    log(`Modified URL: ${modifiedUrl.toString()}`)
172  
173    return {
174      url: modifiedUrl.toString(),
175      headers,
176      dbName,
177      originalUrl,
178    }
179  }
180  
181  const fetchDataFromUrl = (url, headers) => {
182    return new Promise((resolve, reject) => {
183      log("Fetching data from URL...")
184  
185      const urlObj = new URL(url)
186      const options = {
187        hostname: urlObj.hostname,
188        port: urlObj.port,
189        path: urlObj.pathname + urlObj.search,
190        method: "GET",
191        headers,
192      }
193  
194      const client = urlObj.protocol === "https:" ? https : http
195  
196      const req = client.request(options, res => {
197        let data = ""
198  
199        log(`HTTP ${res.statusCode} ${res.statusMessage}`)
200  
201        if (res.statusCode < 200 || res.statusCode >= 300) {
202          reject(new Error(`HTTP ${res.statusCode}: ${res.statusMessage}`))
203          return
204        }
205  
206        res.on("data", chunk => {
207          data += chunk
208        })
209  
210        res.on("end", () => {
211          try {
212            const json = JSON.parse(data)
213            log(`Fetched ${json.rows ? json.rows.length : 0} rows`)
214            resolve(json)
215          } catch (err) {
216            reject(new Error(`Failed to parse JSON response: ${err.message}`))
217          }
218        })
219      })
220  
221      req.on("error", err => {
222        reject(new Error(`Request failed: ${err.message}`))
223      })
224  
225      req.setTimeout(30000, () => {
226        req.destroy()
227        reject(new Error("Request timeout after 30 seconds"))
228      })
229  
230      req.end()
231    })
232  }
233  
234  const validateDumpFile = dumpPath => {
235    if (!fs.existsSync(dumpPath)) {
236      throw new Error(`Dump file not found: ${dumpPath}`)
237    }
238  
239    const stats = fs.statSync(dumpPath)
240    if (!stats.isFile()) {
241      throw new Error(`Path is not a file: ${dumpPath}`)
242    }
243  
244    log(
245      `Found dump file: ${dumpPath} (${(stats.size / 1024 / 1024).toFixed(2)}MB)`
246    )
247  }
248  
249  const parseDumpFile = dumpPath => {
250    log("Reading and parsing dump file...")
251  
252    let dumpContents
253    try {
254      dumpContents = fs.readFileSync(dumpPath, "utf8")
255    } catch (err) {
256      throw new Error(`Failed to read dump file: ${err.message}`)
257    }
258  
259    let json
260    try {
261      json = JSON.parse(dumpContents)
262    } catch (err) {
263      throw new Error(`Invalid JSON in dump file: ${err.message}`)
264    }
265  
266    if (!json.rows || !Array.isArray(json.rows)) {
267      throw new Error("Dump file must contain a 'rows' array")
268    }
269  
270    if (json.rows.length === 0) {
271      throw new Error("Dump file contains no rows")
272    }
273  
274    log(`Parsed ${json.rows.length} rows from dump file`)
275    return json.rows
276  }
277  
278  const validateAndProcessRows = rawRows => {
279    log("Processing and validating rows...")
280  
281    const rows = rawRows.map(row => {
282      if (!row.doc) {
283        throw new Error("Row missing 'doc' property")
284      }
285      // Remove revision to avoid conflicts
286      delete row.doc._rev
287      return row.doc
288    })
289  
290    const metadata = rows.find(row => row._id === "app_metadata")
291    if (!metadata) {
292      throw new Error("No app_metadata document found in dump")
293    }
294  
295    if (!metadata.appId) {
296      throw new Error("app_metadata missing appId field")
297    }
298  
299    if (!metadata.instance) {
300      throw new Error("app_metadata missing instance field")
301    }
302  
303    log(`Found app metadata for app: ${metadata.appId}`)
304    return { rows, metadata }
305  }
306  
307  const setupDatabase = metadata => {
308    const uuid = metadata.appId.split("_").pop()
309    if (!uuid) {
310      throw new Error(`Invalid appId format: ${metadata.appId}`)
311    }
312  
313    const devDbName = db.APP_DEV_PREFIX + uuid
314    const prodDbName = db.APP_PREFIX + uuid
315    const targetDbName = dev ? devDbName : prodDbName
316  
317    log(
318      `Target database: ${targetDbName} (${dev ? "development" : "production"})`
319    )
320  
321    // Update metadata for target database
322    metadata.appId = targetDbName
323    metadata.instance._id = targetDbName
324  
325    return { targetDbName, prodDbName }
326  }
327  
328  const uploadAssets = async prodDbName => {
329    if (skipAssets) {
330      log("Skipping asset uploads")
331      return
332    }
333  
334    log("Uploading client assets to object store...")
335  
336    const clientJsPath = join(serverDir, "client", "budibase-client.js")
337    const manifestPath = join(serverDir, "client", "manifest.json")
338  
339    if (!fs.existsSync(clientJsPath)) {
340      throw new Error(`Client JS file not found: ${clientJsPath}`)
341    }
342  
343    if (!fs.existsSync(manifestPath)) {
344      throw new Error(`Manifest file not found: ${manifestPath}`)
345    }
346  
347    await objectStore.upload({
348      bucket: objectStore.ObjectStoreBuckets.APPS,
349      filename: `${prodDbName}/budibase-client.js`,
350      path: clientJsPath,
351    })
352  
353    await objectStore.upload({
354      bucket: objectStore.ObjectStoreBuckets.APPS,
355      filename: `${prodDbName}/manifest.json`,
356      path: manifestPath,
357    })
358  
359    log("✅ Assets uploaded successfully")
360  }
361  
362  async function run() {
363    try {
364      let rawRows
365      let sourceFile = dump
366  
367      if (curl) {
368        // Handle cURL option - read from file
369        if (!fs.existsSync(curl)) {
370          throw new Error(`cURL file not found: ${curl}`)
371        }
372  
373        const curlCommand = fs.readFileSync(curl, "utf8").trim()
374        log(`Read cURL command from file: ${curl}`)
375  
376        const {
377          url,
378          headers,
379          dbName: extractedDbName,
380        } = parseCurlCommand(curlCommand)
381        sourceFile = `${extractedDbName}.json`
382  
383        if (dryRun) {
384          console.log("🔍 DRY RUN - No changes will be made")
385          console.log(`Would fetch data from: ${url}`)
386          console.log(`Would save to: ${sourceFile}`)
387          console.log(`Headers: ${Object.keys(headers).length} headers extracted`)
388          return
389        }
390  
391        // Fetch data from URL
392        const fetchedData = await fetchDataFromUrl(url, headers)
393  
394        // Save to file
395        log(`Saving fetched data to ${sourceFile}...`)
396        fs.writeFileSync(sourceFile, JSON.stringify(fetchedData, null, 2))
397        log(`✅ Data saved to ${sourceFile}`)
398  
399        // Use the fetched data
400        rawRows = fetchedData.rows || []
401        if (rawRows.length === 0) {
402          throw new Error("Fetched data contains no rows")
403        }
404      } else {
405        // Handle dump file option
406        validateDumpFile(dump)
407        rawRows = parseDumpFile(dump)
408      }
409  
410      // Common processing for both paths
411      const { rows, metadata } = validateAndProcessRows(rawRows)
412      const { targetDbName, prodDbName } = setupDatabase(metadata)
413  
414      if (dryRun && !curl) {
415        console.log("🔍 DRY RUN - No changes will be made")
416        console.log(
417          `Would import ${rows.length} documents to database: ${targetDbName}`
418        )
419        if (!skipAssets) {
420          console.log("Would upload client assets to object store")
421        }
422        return
423      }
424  
425      // Execution phase
426      log("Initializing database connection...")
427      db.init()
428  
429      const database = db.getDB(targetDbName)
430      log(`Importing ${rows.length} documents to ${targetDbName}...`)
431  
432      await database.bulkDocs(rows)
433      log("✅ Documents imported successfully")
434  
435      await uploadAssets(prodDbName)
436  
437      dbName = targetDbName // Set for final message
438    } catch (err) {
439      logError(err.message)
440      if (verbose) {
441        console.error(err.stack)
442      }
443      process.exit(-1)
444    }
445  }
446  
447  run()
448    .then(() => {
449      if (!dryRun && dbName) {
450        console.log(`🎉 Successfully imported app dump to database: ${dbName}`)
451      }
452    })
453    .catch(err => {
454      // Error already handled in run() function
455      // This catch is just to prevent unhandled promise rejection
456    })