From 257739d2736dd15851d25499d59ace0c1a27b894 Mon Sep 17 00:00:00 2001 From: carla Date: Mon, 19 Jan 2026 12:02:28 +0100 Subject: [PATCH] feat: adds error capping --- lib/mv/membership/import/member_csv.ex | 33 ++++- test/mv/membership/import/member_csv_test.exs | 130 ++++++++++++++++++ 2 files changed, 161 insertions(+), 2 deletions(-) diff --git a/lib/mv/membership/import/member_csv.ex b/lib/mv/membership/import/member_csv.ex index ec729cd..5f92109 100644 --- a/lib/mv/membership/import/member_csv.ex +++ b/lib/mv/membership/import/member_csv.ex @@ -258,7 +258,17 @@ defmodule Mv.Membership.Import.MemberCSV do - `row_map` - Map with `:member` and `:custom` keys containing field values - `column_map` - Map of canonical field names (atoms) to column indices (for reference) - `custom_field_map` - Map of custom field IDs (strings) to column indices (for reference) - - `opts` - Optional keyword list for processing options + - `opts` - Optional keyword list for processing options: + - `:custom_field_lookup` - Map of custom field IDs to metadata (default: `%{}`) + - `:existing_error_count` - Number of errors already collected in previous chunks (default: `0`) + - `:max_errors` - Maximum number of errors to collect per import overall (default: `50`) + + ## Error Capping + + Errors are capped at `max_errors` per import overall. When the limit is reached: + - No additional errors are collected in the `errors` list + - Processing continues for all rows + - The `failed` count continues to increment correctly for all failed rows ## Returns @@ -272,6 +282,11 @@ defmodule Mv.Membership.Import.MemberCSV do iex> custom_field_map = %{} iex> MemberCSV.process_chunk(chunk, column_map, custom_field_map) {:ok, %{inserted: 1, failed: 0, errors: []}} + + iex> chunk = [{2, %{member: %{email: "invalid"}, custom: %{}}}] + iex> opts = [existing_error_count: 25, max_errors: 50] + iex> MemberCSV.process_chunk(chunk, %{}, %{}, opts) + {:ok, %{inserted: 0, failed: 1, errors: [%Error{}]}} """ @spec process_chunk( list({pos_integer(), map()}), @@ -281,16 +296,30 @@ defmodule Mv.Membership.Import.MemberCSV do ) :: {:ok, chunk_result()} | {:error, String.t()} def process_chunk(chunk_rows_with_lines, _column_map, _custom_field_map, opts \\ []) do custom_field_lookup = Keyword.get(opts, :custom_field_lookup, %{}) + existing_error_count = Keyword.get(opts, :existing_error_count, 0) + max_errors = Keyword.get(opts, :max_errors, 50) {inserted, failed, errors} = Enum.reduce(chunk_rows_with_lines, {0, 0, []}, fn {line_number, row_map}, {acc_inserted, acc_failed, acc_errors} -> + current_error_count = existing_error_count + length(acc_errors) + case process_row(row_map, line_number, custom_field_lookup) do {:ok, _member} -> {acc_inserted + 1, acc_failed, acc_errors} {:error, error} -> - {acc_inserted, acc_failed + 1, [error | acc_errors]} + new_acc_failed = acc_failed + 1 + + # Only collect errors if under limit + new_acc_errors = + if current_error_count < max_errors do + [error | acc_errors] + else + acc_errors + end + + {acc_inserted, new_acc_failed, new_acc_errors} end end) diff --git a/test/mv/membership/import/member_csv_test.exs b/test/mv/membership/import/member_csv_test.exs index 6edc9d8..b5af238 100644 --- a/test/mv/membership/import/member_csv_test.exs +++ b/test/mv/membership/import/member_csv_test.exs @@ -325,6 +325,136 @@ defmodule Mv.Membership.Import.MemberCSVTest do # Check that @doc exists by reading the module assert function_exported?(MemberCSV, :process_chunk, 4) end + + test "error capping collects exactly 50 errors" do + # Create 50 rows with invalid emails + chunk_rows_with_lines = + 1..50 + |> Enum.map(fn i -> + {i + 1, %{member: %{email: "invalid-email-#{i}"}, custom: %{}}} + end) + + column_map = %{email: 0} + custom_field_map = %{} + opts = [existing_error_count: 0, max_errors: 50] + + assert {:ok, chunk_result} = + MemberCSV.process_chunk(chunk_rows_with_lines, column_map, custom_field_map, opts) + + assert chunk_result.inserted == 0 + assert chunk_result.failed == 50 + assert length(chunk_result.errors) == 50 + end + + test "error capping collects only first 50 errors when more than 50 errors occur" do + # Create 60 rows with invalid emails + chunk_rows_with_lines = + 1..60 + |> Enum.map(fn i -> + {i + 1, %{member: %{email: "invalid-email-#{i}"}, custom: %{}}} + end) + + column_map = %{email: 0} + custom_field_map = %{} + opts = [existing_error_count: 0, max_errors: 50] + + assert {:ok, chunk_result} = + MemberCSV.process_chunk(chunk_rows_with_lines, column_map, custom_field_map, opts) + + assert chunk_result.inserted == 0 + assert chunk_result.failed == 60 + assert length(chunk_result.errors) == 50 + end + + test "error capping respects existing_error_count" do + # Create 30 rows with invalid emails + chunk_rows_with_lines = + 1..30 + |> Enum.map(fn i -> + {i + 1, %{member: %{email: "invalid-email-#{i}"}, custom: %{}}} + end) + + column_map = %{email: 0} + custom_field_map = %{} + opts = [existing_error_count: 25, max_errors: 50] + + assert {:ok, chunk_result} = + MemberCSV.process_chunk(chunk_rows_with_lines, column_map, custom_field_map, opts) + + assert chunk_result.inserted == 0 + assert chunk_result.failed == 30 + # Should only collect 25 errors (25 existing + 25 new = 50 limit) + assert length(chunk_result.errors) == 25 + end + + test "error capping collects no errors when limit already reached" do + # Create 10 rows with invalid emails + chunk_rows_with_lines = + 1..10 + |> Enum.map(fn i -> + {i + 1, %{member: %{email: "invalid-email-#{i}"}, custom: %{}}} + end) + + column_map = %{email: 0} + custom_field_map = %{} + opts = [existing_error_count: 50, max_errors: 50] + + assert {:ok, chunk_result} = + MemberCSV.process_chunk(chunk_rows_with_lines, column_map, custom_field_map, opts) + + assert chunk_result.inserted == 0 + assert chunk_result.failed == 10 + assert length(chunk_result.errors) == 0 + end + + test "error capping with mixed success and failure" do + # Create 100 rows: 30 valid, 70 invalid + valid_rows = + 1..30 + |> Enum.map(fn i -> + {i + 1, %{member: %{email: "valid#{i}@example.com"}, custom: %{}}} + end) + + invalid_rows = + 31..100 + |> Enum.map(fn i -> + {i + 1, %{member: %{email: "invalid-email-#{i}"}, custom: %{}}} + end) + + chunk_rows_with_lines = valid_rows ++ invalid_rows + + column_map = %{email: 0} + custom_field_map = %{} + opts = [existing_error_count: 0, max_errors: 50] + + assert {:ok, chunk_result} = + MemberCSV.process_chunk(chunk_rows_with_lines, column_map, custom_field_map, opts) + + assert chunk_result.inserted == 30 + assert chunk_result.failed == 70 + # Should only collect 50 errors (limit reached) + assert length(chunk_result.errors) == 50 + end + + test "error capping with custom max_errors" do + # Create 20 rows with invalid emails + chunk_rows_with_lines = + 1..20 + |> Enum.map(fn i -> + {i + 1, %{member: %{email: "invalid-email-#{i}"}, custom: %{}}} + end) + + column_map = %{email: 0} + custom_field_map = %{} + opts = [existing_error_count: 0, max_errors: 10] + + assert {:ok, chunk_result} = + MemberCSV.process_chunk(chunk_rows_with_lines, column_map, custom_field_map, opts) + + assert chunk_result.inserted == 0 + assert chunk_result.failed == 20 + assert length(chunk_result.errors) == 10 + end end describe "validate_row/3" do