refactor: change length for performance

This commit is contained in:
carla 2026-01-19 12:37:39 +01:00 committed by Simon
parent 3b5225893d
commit 22593af621
Signed by: simon
GPG key ID: 40E7A58C4AA1EDB2

View file

@ -70,7 +70,8 @@ defmodule Mv.Membership.Import.MemberCSV do
@type chunk_result :: %{ @type chunk_result :: %{
inserted: non_neg_integer(), inserted: non_neg_integer(),
failed: non_neg_integer(), failed: non_neg_integer(),
errors: list(Error.t()) errors: list(Error.t()),
errors_truncated?: boolean()
} }
alias Mv.Membership.Import.CsvParser alias Mv.Membership.Import.CsvParser
@ -269,6 +270,7 @@ defmodule Mv.Membership.Import.MemberCSV do
- No additional errors are collected in the `errors` list - No additional errors are collected in the `errors` list
- Processing continues for all rows - Processing continues for all rows
- The `failed` count continues to increment correctly for all failed rows - The `failed` count continues to increment correctly for all failed rows
- The `errors_truncated?` flag is set to `true` to indicate that additional errors were suppressed
## Returns ## Returns
@ -286,7 +288,7 @@ defmodule Mv.Membership.Import.MemberCSV do
iex> chunk = [{2, %{member: %{email: "invalid"}, custom: %{}}}] iex> chunk = [{2, %{member: %{email: "invalid"}, custom: %{}}}]
iex> opts = [existing_error_count: 25, max_errors: 50] iex> opts = [existing_error_count: 25, max_errors: 50]
iex> MemberCSV.process_chunk(chunk, %{}, %{}, opts) iex> MemberCSV.process_chunk(chunk, %{}, %{}, opts)
{:ok, %{inserted: 0, failed: 1, errors: [%Error{}]}} {:ok, %{inserted: 0, failed: 1, errors: [%Error{}], errors_truncated?: false}}
""" """
@spec process_chunk( @spec process_chunk(
list({pos_integer(), map()}), list({pos_integer(), map()}),
@ -299,31 +301,31 @@ defmodule Mv.Membership.Import.MemberCSV do
existing_error_count = Keyword.get(opts, :existing_error_count, 0) existing_error_count = Keyword.get(opts, :existing_error_count, 0)
max_errors = Keyword.get(opts, :max_errors, 50) max_errors = Keyword.get(opts, :max_errors, 50)
{inserted, failed, errors} = {inserted, failed, errors, _collected_error_count, truncated?} =
Enum.reduce(chunk_rows_with_lines, {0, 0, []}, fn {line_number, row_map}, Enum.reduce(chunk_rows_with_lines, {0, 0, [], 0, false}, fn {line_number, row_map},
{acc_inserted, acc_failed, acc_errors} -> {acc_inserted, acc_failed, acc_errors, acc_error_count, acc_truncated?} ->
current_error_count = existing_error_count + length(acc_errors) current_error_count = existing_error_count + acc_error_count
case process_row(row_map, line_number, custom_field_lookup) do case process_row(row_map, line_number, custom_field_lookup) do
{:ok, _member} -> {:ok, _member} ->
{acc_inserted + 1, acc_failed, acc_errors} {acc_inserted + 1, acc_failed, acc_errors, acc_error_count, acc_truncated?}
{:error, error} -> {:error, error} ->
new_acc_failed = acc_failed + 1 new_acc_failed = acc_failed + 1
# Only collect errors if under limit # Only collect errors if under limit
new_acc_errors = {new_acc_errors, new_error_count, new_truncated?} =
if current_error_count < max_errors do if current_error_count < max_errors do
[error | acc_errors] {[error | acc_errors], acc_error_count + 1, acc_truncated?}
else else
acc_errors {acc_errors, acc_error_count, true}
end end
{acc_inserted, new_acc_failed, new_acc_errors} {acc_inserted, new_acc_failed, new_acc_errors, new_error_count, new_truncated?}
end end
end) end)
{:ok, %{inserted: inserted, failed: failed, errors: Enum.reverse(errors)}} {:ok, %{inserted: inserted, failed: failed, errors: Enum.reverse(errors), errors_truncated?: truncated?}}
end end
@doc """ @doc """