我试图从应用程序的文本文件转储中向django的pgsql数据库添加大约40-50k行以进行数据处理
以下是我的功能
def populate_backup_db(dumpfile):
sensordata=sensorrecords() **** This is the Model
start_time = time.time()
file= open(dumpfile)
filedata = file.readlines()
endcount=len(filedata)
i=0
imagecount=0
while i<endcount:
lineitem = split_entry(filedata[i])
if (lineitem[0]== "HEADER"):
imagecount=imagecount+1
sensordata.Sensor = lineitem[1]
sensordata.Date1 = lineitem[2]
sensordata.Date2 = lineitem[3]
sensordata.Version = lineitem[4]
sensordata.Proxyclient = lineitem[8]
sensordata.Triggerdate = ctodatetime(lineitem[13])
sensordata.Compression = lineitem[16]
sensordata.Encryption = lineitem[17]
sensordata.Fragments = lineitem[21]
sensordata.Pbit = lineitem[37]
sensordata.BlockIntFT = lineitem[38]
sensordata.OriginServer = lineitem[56]
sensordata.save()
i=i+1
elapsed_time = time.time() - start_time
print(imagecount ,'entries saved to database from ',dumpfile,'. Time Taken is ',elapsed_time,' seconds.')
file.close()
将所有数据保存到数据库大约需要2-3分钟。
这个Dumpfile可能会增加大小,如果要使用这个函数,可能需要几分钟的时间将所有数据保存到数据库中。
如何从DIP文件中提取所有数据,然后将其全部保存到数据库中。
我看到一个名为bulk_create()的django方法
bulk_create()¶
bulk_create(objs, batch_size=None, ignore_conflicts=False)¶
此方法以有效的方式将提供的对象列表插入数据库(通常只有一个查询,不管有多少对象):
>>> Entry.objects.bulk_create([
... Entry(headline='This is a test'),
... Entry(headline='This is only a test'),
... ])
这个例子似乎是手动添加条目,我使用的函数是运行一个循环,直到获取所有条目,并在进程中保存。
如何循环运行?我要替换吗?
sensordata.save()
具有
some_list.append(sensordata)
在循环结束后,做一个
sensordata.objects.bulk_create(some_list)
我编辑了代码,将对象附加到一个列表中,然后在最后执行大容量更新,如下所示
def populate_backup_db(dumpfile):
sensordata=sensorrecords() **** This is the Model
datalist =[]
start_time = time.time()
file= open(dumpfile)
filedata = file.readlines()
endcount=len(filedata)
i=0
imagecount=0
while i<endcount:
lineitem = split_entry(filedata[i])
if (lineitem[0]== "HEADER"):
imagecount=imagecount+1
sensordata.Sensor = lineitem[1]
sensordata.Date1 = lineitem[2]
sensordata.Date2 = lineitem[3]
sensordata.Version = lineitem[4]
sensordata.Proxyclient = lineitem[8]
sensordata.Triggerdate = ctodatetime(lineitem[13])
sensordata.Compression = lineitem[16]
sensordata.Encryption = lineitem[17]
sensordata.Fragments = lineitem[21]
sensordata.Pbit = lineitem[37]
sensordata.BlockIntFT = lineitem[38]
sensordata.OriginServer = lineitem[56]
datalist.append(sensordata)
i=i+1
elapsed_time = time.time() - start_time
print(imagecount ,'entries saved to database from ',dumpfile,'. Time Taken is ',elapsed_time,' seconds.')
sensordata.objects.bulk_create(datalist)
file.close()
这将在下面引发一个错误
Traceback:
File "C:\Python\Python36\lib\site-packages\django\core\handlers\exception.py" in inner
34. response = get_response(request)
File "C:\Python\Python36\lib\site-packages\django\core\handlers\base.py" in _get_response
126. response = self.process_exception_by_middleware(e, request)
File "C:\Python\Python36\lib\site-packages\django\core\handlers\base.py" in _get_response
124. response = wrapped_callback(request, *callback_args, **callback_kwargs)
File "C:\Python\Python36\lib\site-packages\django\contrib\auth\decorators.py" in _wrapped_view
21. return view_func(request, *args, **kwargs)
File "C:\Users\va\eclipse-workspace\prod\home\views.py" in process_data
68. get_backup_data()
File "C:\Users\va\eclipse-workspace\prod\home\process.py" in get_backup_data
8. populate_backup_db('c:\\users\\va\\desktop\\vsp\\backupdata_server.txt')
File "C:\Users\va\eclipse-workspace\prod\home\process.py" in populate_backup_db
122. sensordata.objects.bulk_create(datalist)
File "C:\Python\Python36\lib\site-packages\django\db\models\manager.py" in __get__
176. raise AttributeError("Manager isn't accessible via %s instances" % cls.__name__)
Exception Type: AttributeError at /process_data/
Exception Value: Manager isn't accessible via sensorrecords instances