feat: adds error capping

This commit is contained in:
carla 2026-01-19 12:02:28 +01:00
parent c31392e4fe
commit 3cbd90ecdd
2 changed files with 161 additions and 2 deletions

View file

@ -258,7 +258,17 @@ defmodule Mv.Membership.Import.MemberCSV do
- `row_map` - Map with `:member` and `:custom` keys containing field values
- `column_map` - Map of canonical field names (atoms) to column indices (for reference)
- `custom_field_map` - Map of custom field IDs (strings) to column indices (for reference)
- `opts` - Optional keyword list for processing options
- `opts` - Optional keyword list for processing options:
- `:custom_field_lookup` - Map of custom field IDs to metadata (default: `%{}`)
- `:existing_error_count` - Number of errors already collected in previous chunks (default: `0`)
- `:max_errors` - Maximum number of errors to collect per import overall (default: `50`)
## Error Capping
Errors are capped at `max_errors` per import overall. When the limit is reached:
- No additional errors are collected in the `errors` list
- Processing continues for all rows
- The `failed` count continues to increment correctly for all failed rows
## Returns
@ -272,6 +282,11 @@ defmodule Mv.Membership.Import.MemberCSV do
iex> custom_field_map = %{}
iex> MemberCSV.process_chunk(chunk, column_map, custom_field_map)
{:ok, %{inserted: 1, failed: 0, errors: []}}
iex> chunk = [{2, %{member: %{email: "invalid"}, custom: %{}}}]
iex> opts = [existing_error_count: 25, max_errors: 50]
iex> MemberCSV.process_chunk(chunk, %{}, %{}, opts)
{:ok, %{inserted: 0, failed: 1, errors: [%Error{}]}}
"""
@spec process_chunk(
list({pos_integer(), map()}),
@ -281,16 +296,30 @@ defmodule Mv.Membership.Import.MemberCSV do
) :: {:ok, chunk_result()} | {:error, String.t()}
def process_chunk(chunk_rows_with_lines, _column_map, _custom_field_map, opts \\ []) do
custom_field_lookup = Keyword.get(opts, :custom_field_lookup, %{})
existing_error_count = Keyword.get(opts, :existing_error_count, 0)
max_errors = Keyword.get(opts, :max_errors, 50)
{inserted, failed, errors} =
Enum.reduce(chunk_rows_with_lines, {0, 0, []}, fn {line_number, row_map},
{acc_inserted, acc_failed, acc_errors} ->
current_error_count = existing_error_count + length(acc_errors)
case process_row(row_map, line_number, custom_field_lookup) do
{:ok, _member} ->
{acc_inserted + 1, acc_failed, acc_errors}
{:error, error} ->
{acc_inserted, acc_failed + 1, [error | acc_errors]}
new_acc_failed = acc_failed + 1
# Only collect errors if under limit
new_acc_errors =
if current_error_count < max_errors do
[error | acc_errors]
else
acc_errors
end
{acc_inserted, new_acc_failed, new_acc_errors}
end
end)