是的,这是一个暂时的问题。在我自己的 Fwitter 示例中,我在某个时刻体验到了这一点。我们的唯一性检测在代码正在执行的一个复杂 FQL 流程中创建/更新/删除事物的代码不能很好地发挥作用:)。我开票了,它应该在此期间修复。
很高兴知道那里的速率限制让我有点尝试事件。它也可以写得更简单,我想我有点过于深入和公平。我当时刚加入 FaunaDB。我正在开发一个包含那个更简单版本的骨架应用程序。同时这里是代码:
更简单的速率限制
import { rateLimiting } from '../../fauna-queries/helpers/errors'
import faunadb from 'faunadb'
/*
* Ideally we limit the amount of calls that come to Login.
*/
const q = faunadb.query
const {
If,
Epoch,
Match,
Index,
Collection,
Let,
Var,
Paginate,
Select,
TimeDiff,
Or,
GTE,
Abort,
Create,
IsEmpty,
Count,
LT,
Do,
Now,
Subtract
} = q
function AddRateLimiting(action, FqlQueryToExecute, Identifier, calls, perMilliseconds) {
const ExecuteAndCreateLog = Do(
Create(Collection('logs'), {
data: {
action: action,
identity: Identifier
}
}),
FqlQueryToExecute
)
return Let(
{
logsPage: Paginate(Match(Index('logs_by_action_and_identity_ordered_by_ts'), action, Identifier), {
size: calls
})
},
If(
Or(IsEmpty(Var('logsPage')), LT(Count(Select(['data'], Var('logsPage'))), calls)),
// If no logs exist yet, create one.
ExecuteAndCreateLog,
Let(
{
// the page looks like { data: [timestamp1, timestamp2,...]},
// we will retrieve the last timestamp of that page. If the pagesize would be 3, it would be the oldest of these 3 events.
// since the index is ordered from new to old.
timestamp: Select(['data', Subtract(calls, 1)], Var('logsPage')),
// transform the Fauna timestamp to a Time object
time: Epoch(Var('timestamp'), 'microseconds'),
// How long ago was that event in ms
ageInMs: TimeDiff(Var('time'), Now(), 'milliseconds')
},
If(
GTE(Var('ageInMs'), perMilliseconds),
// Then great we execute
ExecuteAndCreateLog,
// Else.. Abort! Rate-limiting in action
Abort(rateLimiting)
)
)
)
)
}
在错误登录后阻止
我还分开阻止三个错误登录,因为我有点滥用该速率限制系统。当然,这段代码中有一些未定义,它只是为了让您了解它如何查找更多信息,请留意骨架+博客的出现。
// Let's wrap some other functionality around the login.
const BlockThreeFaultyLogins = Do(
If(
GTE(Count(Match(Index('logs_by_action_and_identity'), 'faulty_login', email)), MAX_LOGIN_ATTEMPTS),
// Abort if exceeded
Abort(tooManyFaultyLogins),
// Else, just continue as usual!
Let(
{
login: LoginFQL
},
Do(
If(
Equals(false, Var('login')),
// if the login is faulty, we'll add a log entry
Create(Collection('logs'), {
data: {
action: 'faulty_login',
identity: email
}
}),
// Else, we will clean up the faulty_login logs
q.Map(
Paginate(Match(Index('logs_by_action_and_identity'), 'faulty_login', email)),
Lambda(['logRef'], Delete(Var('logRef')))
)
),
Var('login')
)
)
)
)