我正在构建一个管道来处理、聚合和转换来自 csv 文件的数据,然后写回另一个 csv 文件……我从 19 列 csv 文件中加载行,并通过一些数学运算(map reduce 风格)在另一个中写回 30 列.csv。
直到我尝试将 25mb 文件上传到应用程序,250000 行,然后我决定流式传输所有操作而不是急切地处理......但现在我正在使用流逐个函数更改函数,我面临我不明白为什么只创建了 5 个字段后,当我尝试写入文件时,程序会冻结并在几千行后停止写入。
我正在流式传输每个函数,因此据我所知,它不应该有任何锁,并且对于前数千次写入它工作正常,所以我想知道发生了什么,在 erlang 观察者中,我只能看到资源的使用下降到接近0 并且它不再写入文件。
这是我的流函数(在我从文件加载之前),接下来是我的写入函数:
def process(stream, field_longs_lats, team_settings) do
main_stream =
stream
# Removing once that don't have timestamp
|> Stream.filter(fn [time | _tl] -> time != "-" end)
# Filter all duplicated rows by timestamp
|> Stream.uniq_by(fn [time | _tl] -> time end)
|> Stream.map(&Transform.apply_row_tranformations/1)
cumulative_milli =
main_stream
|> Stream.map(fn [_time, milli | _tl] -> milli end)
|> Statistics.cumulative_sum()
speeds =
main_stream
|> Stream.map(fn [_time, _milli, _lat, _long, pace | _tl] ->
pace
end)
|> Stream.map(&Statistics.get_speed/1)
cals = Motion.calories_per_timestep(cumulative_milli, cumulative_milli)
long_stream =
main_stream
|> Stream.map(fn [_time, _milli, lat | _tl] -> lat end)
lat_stream =
main_stream
|> Stream.map(fn [_time, _milli, _lat, long | _tl] -> long end)
x_y_tuples =
RelativeCoordinates.relative_coordinates(long_stream, lat_stream, field_longs_lats)
x = Stream.map(x_y_tuples, fn {x, _y} -> x end)
y = Stream.map(x_y_tuples, fn {_x, y} -> y end)
[x, y, cals, long_stream, lat_stream]
end
写:
def write_to_file(keyword_list, file_name) do
file = File.open!(file_name, [:write, :utf8])
IO.write(file, V4.empty_v4_headers() <> "\n")
keyword_list
|> Stream.zip()
|> Stream.each(&write_tuple_row(&1, file))
|> Stream.run()
File.close(file)
end
@spec write_tuple_row(tuple(), pid()) :: :ok
def write_tuple_row(tuple, file) do
IO.inspect("writing #{inspect(tuple)}")
row_content =
Tuple.to_list(tuple)
|> Enum.map_join(",", fn value -> Transformations.to_string(value) end)
IO.write(file, row_content <> "\n")
end