Currently, I have implemented a rate limit using the lru-cache based on the official example provided by Vercel, and it is functioning flawlessly.
My intention was to consolidate the try-catch block in a Middleware to handle all routes, instead of duplicating it for each route. However, I encountered an issue where the CACHE_TOKEN value only increments within the api routes and remains stagnant in the Middleware.
Vercel's Example: https://github.com/vercel/next.js/tree/canary/examples/api-routes-rate-limit
SRC/ HELPERS/ RATELIMIT.JS
import LRU from 'lru-cache'
export default function rateLimit() {
const tokenCache = new LRU({
max: 500, // Max 500 users per second
ttl: 1 * 60000, // 1 minute in milliseconds
})
const token = 'CACHE_TOKEN'
const limit = 3
return {
check: (res) =>
new Promise((resolve, reject) => {
const tokenCount = tokenCache.get(token) || [0]
if (tokenCount[0] === 0) {
tokenCache.set(token, tokenCount)
}
tokenCount[0] += 1
const currentUsage = tokenCount[0]
const isRateLimited = currentUsage >= limit
res.headers.set('X-RateLimit-Limit', limit)
res.headers.set('X-RateLimit-Remaining', isRateLimited ? 0 : limit - currentUsage)
console.log(tokenCache.get(token))
/*
using api route: [ 1 ] [ 2 ] [ 3 ]
using middleware: [ 1 ] [ 1 ] [ 1 ] [ 1 ] [ 1 ] ...
*/
return isRateLimited ? reject() : resolve()
}),
}
}
SRC/ PAGES/ API/ HELLO.JS
import { NextResponse } from 'next/server'
import rateLimit from '@/helpers/rateLimit'
const limiter = rateLimit()
export default async function handler(req, res) {
const response = NextResponse.next()
try {
await limiter.check(response) // 10 requests per minute
} catch (error) {
console.log(error)
return res.status(429).json({ error: 'Rate limit exceeded' })
}
return res.status(200).json({ message: 'Hello World' })
}
SRC/ MIDDLEWARE.JS
import { NextResponse } from 'next/server'
import rateLimit from '@/helpers/rateLimit'
const limiter = rateLimit()
export async function middleware(req) {
const response = NextResponse.next()
try {
await limiter.check(response) // 10 requests per minute
} catch (error) {
console.log(error)
return NextResponse.json({ error: 'Rate limit exceeded' })
}
}