我写了一个脚本,我在其中阅读了大约 400 万个点和 800.000 个图。该脚本剪辑每个绘图内的点并为每个绘图保存一个新的文本文件。
一段时间后,我的 PC 内存已满。我曾尝试在我的脚本中挖掘,但在每个循环for i in xrange(len(sr)):
中,每个对象都被替换,并且剪辑的点保存在一个新的 txt 文件中。
在这种情况下是否有一些策略可以在不降低性能的情况下提高内存使用率(脚本已经很慢)?我是python的初学者,如果问题很简单,我很抱歉。
提前感谢詹尼
inFile ="C://04-las_clip_inside_area//prova//Ku_115_class_Notground_normalize.las"
poly ="C://04-las_clip_inside_area//prova//ku_115_plot_clip.shp"
chunkSize = None
MinPoints = 1
sf = shapefile.Reader(poly) #open shpfile
sr = sf.shapeRecords()
poly_filename, ext = path.splitext(poly)
inFile_filename = os.path.splitext(os.path.basename(inFile))[0]
pbar = ProgressBar(len(sr)) # set progressbar
if chunkSize == None:
points = [(p.x,p.y) for p in lasfile.File(inFile,None,'r')]
for i in xrange(len(sr)):
pbar.update(i+1) # progressbar
verts = np.array(sr[i].shape.points,float)
record = sr[i].record[0]
index = nonzero(points_inside_poly(points, verts))[0]
if len(index) >= MinPoints:
file_out = open("{0}_{1}_{2}.txt".format(poly_filename, inFile_filename, record), "w")
inside_points = [lasfile.File(inFile,None,'r')[l] for l in index]
for p in inside_points:
file_out.write("%s %s %s %s %s %s %s %s %s %s %s" % (p.x, p.y, p.z, p.intensity,p.return_number,p.number_of_returns,p.scan_direction,p.flightline_edge,p.classification,p.scan_angle,record)+ "\n")
file_out.close()
这是原始功能
def LAS2TXTClipSplitbyChunk(inFile,poly,chunkSize=1,MinPoints=1):
sf = shapefile.Reader(poly) #open shpfile
sr = sf.shapeRecords()
poly_filename, ext = path.splitext(poly)
inFile_filename = os.path.splitext(os.path.basename(inFile))[0]
pbar = ProgressBar(len(sr)) # set progressbar
if chunkSize == None:
points = [(p.x,p.y) for p in lasfile.File(inFile,None,'r')]
for i in xrange(len(sr)):
pbar.update(i+1) # progressbar
verts = np.array(sr[i].shape.points,float)
record = sr[i].record[0]
index = nonzero(points_inside_poly(points, verts))[0]
if len(index) >= MinPoints:
file_out = open("{0}_{1}_{2}.txt".format(poly_filename, inFile_filename, record), "w")
inside_points = [lasfile.File(inFile,None,'r')[l] for l in index]
for p in inside_points:
file_out.write("%s %s %s %s %s %s %s %s %s %s %s" % (p.x, p.y, p.z, p.intensity,p.return_number,p.number_of_returns,p.scan_direction,p.flightline_edge,p.classification,p.scan_angle,record)+ "\n")
file_out.close()
else:
for i in xrange(len(sr)):
pbar.update(i+1) # progressbar
verts = np.array(sr[i].shape.points,float)
record = sr[i].record[0]
f = lasfile.File(inFile,None,'r')
file_out = open("{0}_{1}_{2}.txt".format(poly_filename, inFile_filename, record), "w")
TotPoints = 0
while True:
chunk = list(islice(f,chunkSize))
if not chunk:
break
points = [(p.x,p.y) for p in chunk]
index = nonzero(points_inside_poly(points, verts))[0]
TotPoints += len(index) #add points to count inside th plot
chunk = [chunk[l] for l in index]
for p in chunk:
file_out.write("%s %s %s %s %s %s %s %s %s %s %s" % (p.x, p.y, p.z, p.intensity,p.return_number,p.number_of_returns,p.scan_direction,p.flightline_edge,p.classification,p.scan_angle,record)+ "\n")
if TotPoints >= MinPoints:
file_out.close()
else:
file_out.close()
os.remove("{0}_{1}_{2}.txt".format(poly_filename, inFile_filename, record))
f.close()
unutbu 建议的脚本是:
import shapefile
import os
import glob
from os import path
import numpy as np
from numpy import nonzero
from matplotlib.nxutils import points_inside_poly
from itertools import islice
from liblas import file as lasfile
from shapely.geometry import Polygon
from progressbar import ProgressBar
import multiprocessing as mp
inFile ="C://04-las_clip_inside_area//prova//Ku_115_class_Notground_normalize.las"
poly ="C://04-las_clip_inside_area//prova//ku_115_plot_clip.shp"
chunkSize = None
MinPoints = 1
def pointinside(record):
verts = np.array(record.shape.points, float)
record = record.record[0]
index = nonzero(points_inside_poly(points, verts))[0]
if len(index) >= MinPoints:
outfile = "{0}_{1}_{2}.txt".format(poly_filename, inFile_filename, record)
with open(outfile, "w") as file_out:
inside_points = [lasfile.File(inFile, None, 'r')[l] for l in index]
for p in inside_points:
fields = (p.x, p.y, p.z, p.intensity, p.return_number,
p.number_of_returns, p.scan_direction, p.flightline_edge,
p.classification, p.scan_angle, record)
file_out.write(' '.join(map(str, fields)) + "\n")
sf = shapefile.Reader(poly) #open shpfile
sr = sf.shapeRecords()
poly_filename, ext = path.splitext(poly)
inFile_filename = os.path.splitext(os.path.basename(inFile))[0]
pbar = ProgressBar(len(sr)) # set progressbar
if chunkSize == None:
points = [(p.x,p.y) for p in lasfile.File(inFile,None,'r')]
for i in xrange(len(sr)):
pbar.update(i+1) # progressbar
proc = mp.Process(target = pointinside, args = (sr[i], ))
proc.start()
proc.join()