I have a FaunaDB project set up for a nonprofit giveaway that accepts a name and email in a form, checks the existing emails in the DB to ensure no repeats, then adds the entry to the DB if the email doesn’t exist. As part of this, I have built a serverless function to pull the data from the database so I can back up the entry weekly. Through this serverless function I discovered that the call is only checking the first 65 entries instead of the full database. Can anyone tell me what’s going on here and how to fix it?
Here’s the pull to CSV serverless function:
const faunadb = require('faunadb')
const client = new faunadb.Client({
secret: process.env.GATSBY_SERVER_FAUNA_SECRET,
})
const q = faunadb.query
function handle(inputData) {
const headers = Object.keys(inputData.data[0].data).toString()
const main = inputData.data.map(item => Object.values(item.data).toString())
const csv = [headers, ...main].join('\n')
return csv
}
exports.handler = async () => {
const currentDate = new Date().toISOString().split('T')[0]
const fileName = `entry-winners-${currentDate}`
try {
// Query data from FaunaDB
const response = await client.query(
q.Map(
q.Paginate(q.Match(q.Index(process.env.SUCCESS_ENTRANTS))),
q.Lambda('ref', q.Get(q.Var('ref')))
)
)
// Return CSV data
return {
statusCode: 200,
headers: {
'Content-Type': 'text/csv',
'Content-Disposition': `attachment; filename=${fileName}.csv`,
},
body: handle(response),
}
} catch (error) {
console.error('Error:', error)
return {
statusCode: 500,
body: JSON.stringify({ error: 'Failed to fetch data from FaunaDB' }),
}
}
}