我无法复制 - 我正在加载 150k 文档并在 ~0.5 > ~0.8 秒内转换为列表。以下是 timeit 测试脚本的结果 - 以秒为单位将 150,000 个文档从数据库转换为列表。
--------------------------------------------------
Default batch size
0.530369997025
--------------------------------------------------
Batch Size 1000
0.570069074631
--------------------------------------------------
Batch Size 10000
0.686305046082
这是我的测试脚本:
#!/usr/bin/env python
import timeit
def main():
"""
Testing loading 150k documents in pymongo
"""
setup = """
import datetime
from random import randint
from pymongo import MongoClient
connection = MongoClient()
db = connection.test_load
sample = db.sample
if db.sample.count() < 150000:
connection.drop_database('test_load')
# Insert 150k sample data
for i in xrange(15000):
sample.insert([{"date": datetime.datetime.now(),
"int1": randint(0, 1000000),
"int2": randint(0, 1000000),
"int4": randint(0, 1000000)} for i in xrange(10)])
"""
stmt = """
from pymongo import MongoClient
connection = MongoClient()
db = connection.test_load
sample = db.sample
cursor = sample.find()
test = list(cursor)
assert len(test) == 150000
"""
print "-" * 100
print """Default batch size"""
t = timeit.Timer(stmt=stmt, setup=setup)
print t.timeit(1)
stmt = """
from pymongo import MongoClient
connection = MongoClient()
db = connection.test_load
sample = db.sample
cursor = sample.find()
cursor.batch_size(1000)
test = list(cursor)
assert len(test) == 150000
"""
print "-" * 100
print """Batch Size 1000"""
t = timeit.Timer(stmt=stmt, setup=setup)
print t.timeit(1)
stmt = """
from pymongo import MongoClient
connection = MongoClient()
db = connection.test_load
sample = db.sample
cursor = sample.find()
cursor.batch_size(10000)
test = list(cursor)
assert len(test) == 150000
"""
print "-" * 100
print """Batch Size 10000"""
t = timeit.Timer(stmt=stmt, setup=setup)
print t.timeit(1)
if __name__ == "__main__":
main()
我对您如何获得 80 秒而不是 0.8 秒感到困惑!我根据您的定义创建了示例数据 - 这与您的有何不同?