Documentdb 5秒存储过程执行限制
Document DB 5 secs stored procedure execution limit
我有一个包含 return 4000 个文档的存储过程,由于 5 秒的执行限制,我的存储过程没有重新调整任何数据。我尝试处理集合接受值,但它没有按预期工作。
我尝试将响应设置为 "return" 以防达到 5 秒限制,存储过程未设置响应。
function getRequirementNodes()
{
var context = getContext();
var response = context.getResponse();
var collection = context.getCollection();
var collectionLink = collection.getSelfLink();
var nodesBatch = [];
var continueToken= true;
var query = { query: 'SELECT * from root '};
getNodes(null)
function getNodes(continuation)
{
var requestOptions = {continuation: continuation};
var accepted = collection.queryDocuments(collectionLink, query, requestOptions,
function(err, documentsRead, responseOptions)
{
if (documentsRead.length > 0)
{
nodesBatch = nodesBatch.concat(documentsRead);
}
else if (responseOptions.continuation)
{
continueToken = responseOptions.continuation
nodesBatch = nodesBatch.concat(documentsRead);
getNodes(responseOptions.continuation);
}
else
{
continueToken= false;
response.setBody(nodesBatch);
}
});
if (!accepted)
{
response.setBody("return");
}
}
}
脚本返回空响应,因为从未调用包含 response.setBody()
的块。
我会解释的。让我们把 queryDocuments
回调的这一部分分解下来:
if (documentsRead.length > 0) {
nodesBatch = nodesBatch.concat(documentsRead);
} else if (responseOptions.continuation) {
continueToken = responseOptions.continuation
nodesBatch = nodesBatch.concat(documentsRead);
getNodes(responseOptions.continuation);
} else {
continueToken = false;
response.setBody(nodesBatch);
}
请注意,如果查询在第一页内有结果(很可能会这样)...脚本会将查询结果附加到 nodesBatch
:
if (documentsRead.length > 0) {
nodesBatch = nodesBatch.concat(documentsRead);
}
脚本随后将完成。响应正文未设置(空),如果有继续标记,脚本不会发出后续查询。
假设集合不为空,那么这可能就是您遇到的行为。
注意:如果您查询的是大型数据集,则可能会达到响应大小限制 (1 MB)。
我重新编写了脚本来解决上述问题,并包含一个片段来说明如何处理响应大小限制:
function getRequirementNodes(continuationToken) {
var context = getContext();
var response = context.getResponse();
var collection = context.getCollection();
var collectionLink = collection.getSelfLink();
var nodesBatch = [];
var lastContinuationToken;
var responseSize = 0;
var query = {
query: 'SELECT * FROM root'
};
getNodes(continuationToken);
function getNodes(continuationToken) {
// Tune the pageSize to fit your dataset.
var requestOptions = {
continuation: continuationToken,
pageSize: 1
};
var accepted = collection.queryDocuments(collectionLink, query, requestOptions,
function(err, documentsRead, responseOptions) {
// The size of the current query response page.
var queryPageSize = JSON.stringify(documentsRead).length;
// DocumentDB has a response size limit of 1 MB.
if (responseSize + queryPageSize < 1024 * 1024) {
// Append query results to nodesBatch.
nodesBatch = nodesBatch.concat(documentsRead);
// Keep track of the response size.
responseSize += queryPageSize;
if (responseOptions.continuation) {
// If there is a continuation token... Run the query again to get the next page of results
lastContinuationToken = responseOptions.continuation;
getNodes(responseOptions.continuation);
} else {
// If there is no continutation token, we are done. Return the response.
response.setBody({
"message": "Query completed succesfully.",
"queryResponse": nodesBatch
});
}
} else {
// If the response size limit reached; run the script again with the lastContinuationToken as a script parameter.
response.setBody({
"message": "Response size limit reached.",
"lastContinuationToken": lastContinuationToken,
"queryResponse": nodesBatch
});
}
});
if (!accepted) {
// If the execution limit reached; run the script again with the lastContinuationToken as a script parameter.
response.setBody({
"message": "Execution limit reached.",
"lastContinuationToken": lastContinuationToken,
"queryResponse": nodesBatch
});
}
}
}
我有一个包含 return 4000 个文档的存储过程,由于 5 秒的执行限制,我的存储过程没有重新调整任何数据。我尝试处理集合接受值,但它没有按预期工作。
我尝试将响应设置为 "return" 以防达到 5 秒限制,存储过程未设置响应。
function getRequirementNodes()
{
var context = getContext();
var response = context.getResponse();
var collection = context.getCollection();
var collectionLink = collection.getSelfLink();
var nodesBatch = [];
var continueToken= true;
var query = { query: 'SELECT * from root '};
getNodes(null)
function getNodes(continuation)
{
var requestOptions = {continuation: continuation};
var accepted = collection.queryDocuments(collectionLink, query, requestOptions,
function(err, documentsRead, responseOptions)
{
if (documentsRead.length > 0)
{
nodesBatch = nodesBatch.concat(documentsRead);
}
else if (responseOptions.continuation)
{
continueToken = responseOptions.continuation
nodesBatch = nodesBatch.concat(documentsRead);
getNodes(responseOptions.continuation);
}
else
{
continueToken= false;
response.setBody(nodesBatch);
}
});
if (!accepted)
{
response.setBody("return");
}
}
}
脚本返回空响应,因为从未调用包含 response.setBody()
的块。
我会解释的。让我们把 queryDocuments
回调的这一部分分解下来:
if (documentsRead.length > 0) {
nodesBatch = nodesBatch.concat(documentsRead);
} else if (responseOptions.continuation) {
continueToken = responseOptions.continuation
nodesBatch = nodesBatch.concat(documentsRead);
getNodes(responseOptions.continuation);
} else {
continueToken = false;
response.setBody(nodesBatch);
}
请注意,如果查询在第一页内有结果(很可能会这样)...脚本会将查询结果附加到 nodesBatch
:
if (documentsRead.length > 0) {
nodesBatch = nodesBatch.concat(documentsRead);
}
脚本随后将完成。响应正文未设置(空),如果有继续标记,脚本不会发出后续查询。
假设集合不为空,那么这可能就是您遇到的行为。
注意:如果您查询的是大型数据集,则可能会达到响应大小限制 (1 MB)。
我重新编写了脚本来解决上述问题,并包含一个片段来说明如何处理响应大小限制:
function getRequirementNodes(continuationToken) {
var context = getContext();
var response = context.getResponse();
var collection = context.getCollection();
var collectionLink = collection.getSelfLink();
var nodesBatch = [];
var lastContinuationToken;
var responseSize = 0;
var query = {
query: 'SELECT * FROM root'
};
getNodes(continuationToken);
function getNodes(continuationToken) {
// Tune the pageSize to fit your dataset.
var requestOptions = {
continuation: continuationToken,
pageSize: 1
};
var accepted = collection.queryDocuments(collectionLink, query, requestOptions,
function(err, documentsRead, responseOptions) {
// The size of the current query response page.
var queryPageSize = JSON.stringify(documentsRead).length;
// DocumentDB has a response size limit of 1 MB.
if (responseSize + queryPageSize < 1024 * 1024) {
// Append query results to nodesBatch.
nodesBatch = nodesBatch.concat(documentsRead);
// Keep track of the response size.
responseSize += queryPageSize;
if (responseOptions.continuation) {
// If there is a continuation token... Run the query again to get the next page of results
lastContinuationToken = responseOptions.continuation;
getNodes(responseOptions.continuation);
} else {
// If there is no continutation token, we are done. Return the response.
response.setBody({
"message": "Query completed succesfully.",
"queryResponse": nodesBatch
});
}
} else {
// If the response size limit reached; run the script again with the lastContinuationToken as a script parameter.
response.setBody({
"message": "Response size limit reached.",
"lastContinuationToken": lastContinuationToken,
"queryResponse": nodesBatch
});
}
});
if (!accepted) {
// If the execution limit reached; run the script again with the lastContinuationToken as a script parameter.
response.setBody({
"message": "Execution limit reached.",
"lastContinuationToken": lastContinuationToken,
"queryResponse": nodesBatch
});
}
}
}