defmodule Mv.Membership.Import.MemberCSV do @moduledoc """ Service module for importing members from CSV files. require Ash.Query This module provides the core API for CSV member import functionality: - `prepare/2` - Parses and validates CSV content, returns import state - `process_chunk/3` - Processes a chunk of rows and creates members ## Error Handling Errors are returned as `%MemberCSV.Error{}` structs containing: - `csv_line_number` - The physical line number in the CSV file (or `nil` for general errors) - `field` - The field name (atom) or `nil` if not field-specific - `message` - Human-readable error message (or `nil` for general errors) ## Import State The `import_state` returned by `prepare/2` contains: - `chunks` - List of row chunks ready for processing - `column_map` - Map of canonical field names to column indices - `custom_field_map` - Map of custom field names to column indices - `warnings` - List of warning messages (e.g., unknown custom field columns) ## Chunk Results The `chunk_result` returned by `process_chunk/3` contains: - `inserted` - Number of successfully created members - `failed` - Number of failed member creations - `errors` - List of `%MemberCSV.Error{}` structs (capped at 50 per import) ## Examples # Prepare CSV for import {:ok, import_state} = MemberCSV.prepare(csv_content) # Process first chunk chunk = Enum.at(import_state.chunks, 0) {:ok, result} = MemberCSV.process_chunk(chunk, import_state.column_map) """ defmodule Error do @moduledoc """ Error struct for CSV import errors. ## Fields - `csv_line_number` - The physical line number in the CSV file (1-based, header is line 1) - `field` - The field name as an atom (e.g., `:email`) or `nil` if not field-specific - `message` - Human-readable error message """ defstruct csv_line_number: nil, field: nil, message: nil @type t :: %__MODULE__{ csv_line_number: pos_integer() | nil, field: atom() | nil, message: String.t() | nil } end @type import_state :: %{ chunks: list(list({pos_integer(), map()})), column_map: %{atom() => non_neg_integer()}, custom_field_map: %{String.t() => non_neg_integer()}, custom_field_lookup: %{ String.t() => %{id: String.t(), value_type: atom(), name: String.t()} }, warnings: list(String.t()) } @type chunk_result :: %{ inserted: non_neg_integer(), failed: non_neg_integer(), errors: list(Error.t()), errors_truncated?: boolean() } alias Mv.Membership.Import.CsvParser alias Mv.Membership.Import.HeaderMapper use Gettext, backend: MvWeb.Gettext alias Mv.Helpers.SystemActor # Import FieldTypes for human-readable type labels alias MvWeb.Translations.FieldTypes # Configuration constants @default_max_errors 50 @default_chunk_size 200 @default_max_rows 1000 @doc """ Prepares CSV content for import by parsing, mapping headers, and validating limits. This function: 1. Strips UTF-8 BOM if present 2. Detects CSV delimiter (semicolon or comma) 3. Parses headers and data rows 4. Maps headers to canonical member fields 5. Maps custom field columns by name 6. Validates row count limits 7. Chunks rows for processing ## Parameters - `file_content` - The raw CSV file content as a string - `opts` - Optional keyword list: - `:max_rows` - Maximum number of data rows allowed (default: 1000) - `:chunk_size` - Number of rows per chunk (default: 200) - `:actor` - Actor for authorization (default: system actor for systemic operations) ## Returns - `{:ok, import_state}` - Successfully prepared import state - `{:error, reason}` - Error reason (string or error struct) ## Examples iex> MemberCSV.prepare("email\\njohn@example.com") {:ok, %{chunks: [...], column_map: %{email: 0}, ...}} iex> MemberCSV.prepare("") {:error, "CSV file is empty"} """ @spec prepare(String.t(), keyword()) :: {:ok, import_state()} | {:error, String.t()} def prepare(file_content, opts \\ []) do max_rows = Keyword.get(opts, :max_rows, @default_max_rows) chunk_size = Keyword.get(opts, :chunk_size, @default_chunk_size) actor = Keyword.get(opts, :actor, SystemActor.get_system_actor()) with {:ok, headers, rows} <- CsvParser.parse(file_content), {:ok, custom_fields} <- load_custom_fields(actor), {:ok, maps, warnings} <- build_header_maps(headers, custom_fields), :ok <- validate_row_count(rows, max_rows) do chunks = chunk_rows(rows, maps, chunk_size) # Build custom field lookup for efficient value processing custom_field_lookup = build_custom_field_lookup(custom_fields) {:ok, %{ chunks: chunks, column_map: maps.member, custom_field_map: maps.custom, custom_field_lookup: custom_field_lookup, warnings: warnings }} end end # Loads all custom fields from the database defp load_custom_fields(actor) do custom_fields = Mv.Membership.CustomField |> Ash.read!(actor: actor) {:ok, custom_fields} rescue e -> {:error, "Failed to load custom fields: #{Exception.message(e)}"} end # Builds custom field lookup map for efficient value processing defp build_custom_field_lookup(custom_fields) do custom_fields |> Enum.reduce(%{}, fn cf, acc -> id_str = to_string(cf.id) Map.put(acc, id_str, %{id: cf.id, value_type: cf.value_type, name: cf.name}) end) end # Builds header maps using HeaderMapper and collects warnings for unknown custom fields defp build_header_maps(headers, custom_fields) do # Convert custom fields to maps with id and name custom_field_maps = Enum.map(custom_fields, fn cf -> %{id: to_string(cf.id), name: cf.name} end) case HeaderMapper.build_maps(headers, custom_field_maps) do {:ok, %{member: member_map, custom: custom_map, unknown: unknown}} -> # Build warnings for unknown custom field columns warnings = unknown |> Enum.filter(fn header -> # Check if it could be a custom field (not a known member field) normalized = HeaderMapper.normalize_header(header) # If it's not empty and not a member field, it might be a custom field normalized != "" && not member_field?(normalized) end) |> Enum.map(fn header -> gettext( "Unknown column '%{header}' will be ignored. If this is a custom field, create it in Mila before importing.", header: header ) end) {:ok, %{member: member_map, custom: custom_map}, warnings} {:error, reason} -> {:error, reason} end end # Checks if a normalized header matches a member field # Uses HeaderMapper.known_member_fields/0 as single source of truth defp member_field?(normalized) when is_binary(normalized) do MapSet.member?(HeaderMapper.known_member_fields(), normalized) end defp member_field?(_), do: false # Validates that row count doesn't exceed limit defp validate_row_count(rows, max_rows) do if length(rows) > max_rows do {:error, "CSV file exceeds maximum row limit of #{max_rows} rows"} else :ok end end # Chunks rows and converts them to row maps using column maps defp chunk_rows(rows, maps, chunk_size) do rows |> Enum.chunk_every(chunk_size) |> Enum.map(fn chunk -> Enum.map(chunk, fn {line_number, row_values} -> row_map = build_row_map(row_values, maps) {line_number, row_map} end) end) end # Builds a row map from raw row values using column maps defp build_row_map(row_values, maps) do row_tuple = List.to_tuple(row_values) tuple_size = tuple_size(row_tuple) member_map = maps.member |> Enum.reduce(%{}, fn {field, index}, acc -> value = if index < tuple_size, do: elem(row_tuple, index), else: "" Map.put(acc, field, value) end) custom_map = maps.custom |> Enum.reduce(%{}, fn {custom_field_id, index}, acc -> value = if index < tuple_size, do: elem(row_tuple, index), else: "" Map.put(acc, custom_field_id, value) end) %{member: member_map, custom: custom_map} end @doc """ Processes a chunk of CSV rows and creates members. This function: 1. Validates each row 2. Creates members via Ash resource 3. Creates custom field values for each member 4. Collects errors with correct CSV line numbers 5. Returns chunk processing results ## Parameters - `chunk_rows_with_lines` - List of tuples `{csv_line_number, row_map}` where: - `csv_line_number` - Physical line number in CSV (1-based) - `row_map` - Map with `:member` and `:custom` keys containing field values - `column_map` - Map of canonical field names (atoms) to column indices (for reference) - `custom_field_map` - Map of custom field IDs (strings) to column indices (for reference) - `opts` - Optional keyword list for processing options: - `:custom_field_lookup` - Map of custom field IDs to metadata (default: `%{}`) - `:existing_error_count` - Number of errors already collected in previous chunks (default: `0`) - `:max_errors` - Maximum number of errors to collect per import overall (default: `50`) ## Error Capping Errors are capped at `max_errors` per import overall. When the limit is reached: - No additional errors are collected in the `errors` list - Processing continues for all rows - The `failed` count continues to increment correctly for all failed rows - The `errors_truncated?` flag is set to `true` to indicate that additional errors were suppressed ## Returns - `{:ok, chunk_result}` - Chunk processing results - `{:error, reason}` - Error reason (string) ## Examples iex> chunk = [{2, %{member: %{email: "john@example.com"}, custom: %{}}}] iex> column_map = %{email: 0} iex> custom_field_map = %{} iex> MemberCSV.process_chunk(chunk, column_map, custom_field_map) {:ok, %{inserted: 1, failed: 0, errors: []}} iex> chunk = [{2, %{member: %{email: "invalid"}, custom: %{}}}] iex> opts = [existing_error_count: 25, max_errors: 50] iex> MemberCSV.process_chunk(chunk, %{}, %{}, opts) {:ok, %{inserted: 0, failed: 1, errors: [%Error{}], errors_truncated?: false}} """ @spec process_chunk( list({pos_integer(), map()}), %{atom() => non_neg_integer()}, %{String.t() => non_neg_integer()}, keyword() ) :: {:ok, chunk_result()} | {:error, String.t()} def process_chunk(chunk_rows_with_lines, _column_map, _custom_field_map, opts \\ []) do custom_field_lookup = Keyword.get(opts, :custom_field_lookup, %{}) existing_error_count = Keyword.get(opts, :existing_error_count, 0) max_errors = Keyword.get(opts, :max_errors, @default_max_errors) actor = Keyword.get(opts, :actor, SystemActor.get_system_actor()) {inserted, failed, errors, _collected_error_count, truncated?} = Enum.reduce(chunk_rows_with_lines, {0, 0, [], 0, false}, fn {line_number, row_map}, {acc_inserted, acc_failed, acc_errors, acc_error_count, acc_truncated?} -> current_error_count = existing_error_count + acc_error_count case process_row(row_map, line_number, custom_field_lookup, actor) do {:ok, _member} -> update_inserted( {acc_inserted, acc_failed, acc_errors, acc_error_count, acc_truncated?} ) {:error, error} -> handle_row_error( {acc_inserted, acc_failed, acc_errors, acc_error_count, acc_truncated?}, error, current_error_count, max_errors ) end end) {:ok, %{ inserted: inserted, failed: failed, errors: Enum.reverse(errors), errors_truncated?: truncated? }} end @doc """ Validates a single CSV row before database insertion. This function: 1. Trims all string values in the member map 2. Validates that email is present and not empty after trimming 3. Validates email format using EctoCommons.EmailValidator 4. Returns structured errors with Gettext-backed messages ## Parameters - `row_map` - Map with `:member` and `:custom` keys containing field values - `csv_line_number` - Physical line number in CSV (1-based, header is line 1) - `opts` - Optional keyword list (for future extensions) ## Returns - `{:ok, trimmed_row_map}` - Successfully validated row with trimmed values - `{:error, %Error{}}` - Validation error with structured error information ## Examples iex> row_map = %{member: %{email: " john@example.com "}, custom: %{}} iex> MemberCSV.validate_row(row_map, 2, []) {:ok, %{member: %{email: "john@example.com"}, custom: %{}}} iex> row_map = %{member: %{}, custom: %{}} iex> MemberCSV.validate_row(row_map, 3, []) {:error, %MemberCSV.Error{csv_line_number: 3, field: :email, message: "Email is required."}} """ @spec validate_row(map(), pos_integer(), keyword()) :: {:ok, map()} | {:error, Error.t()} def validate_row(row_map, csv_line_number, _opts \\ []) do # Safely get member map (handle missing key) member_attrs = Map.get(row_map, :member, %{}) custom_attrs = Map.get(row_map, :custom, %{}) # Validate email using schemaless changeset changeset = {%{}, %{email: :string}} |> Ecto.Changeset.cast(%{email: Map.get(member_attrs, :email)}, [:email]) |> Ecto.Changeset.update_change(:email, &String.trim/1) |> Ecto.Changeset.validate_required([:email]) |> EctoCommons.EmailValidator.validate_email(:email, checks: Mv.Constants.email_validator_checks() ) if changeset.valid? do # Apply trimmed email back to member_attrs trimmed_email = Ecto.Changeset.get_change(changeset, :email) trimmed_member = Map.put(member_attrs, :email, trimmed_email) |> trim_string_values() {:ok, %{member: trimmed_member, custom: custom_attrs}} else # Extract first error error = extract_changeset_error(changeset, csv_line_number) {:error, error} end end # Extracts the first error from a changeset and converts it to a MemberCSV.Error struct defp extract_changeset_error(changeset, csv_line_number) do errors = Ecto.Changeset.traverse_errors(changeset, &format_error_message/1) case errors do %{email: [message | _]} -> # Email-specific error %Error{ csv_line_number: csv_line_number, field: :email, message: gettext_error_message(message) } errors when map_size(errors) > 0 -> # Get first error (any field) {field, [message | _]} = Enum.at(Enum.to_list(errors), 0) %Error{ csv_line_number: csv_line_number, field: String.to_existing_atom(to_string(field)), message: gettext_error_message(message) } _ -> # Fallback %Error{ csv_line_number: csv_line_number, field: :email, message: gettext("Email is invalid.") } end end # Helper function to update accumulator when row is successfully inserted defp update_inserted({acc_inserted, acc_failed, acc_errors, acc_error_count, acc_truncated?}) do {acc_inserted + 1, acc_failed, acc_errors, acc_error_count, acc_truncated?} end # Helper function to handle row error with error count limit checking defp handle_row_error( {acc_inserted, acc_failed, acc_errors, acc_error_count, acc_truncated?}, error, current_error_count, max_errors ) do new_acc_failed = acc_failed + 1 {new_acc_errors, new_error_count, new_truncated?} = collect_error_if_under_limit( error, acc_errors, acc_error_count, acc_truncated?, current_error_count, max_errors ) {acc_inserted, new_acc_failed, new_acc_errors, new_error_count, new_truncated?} end # Helper function to collect error only if under limit defp collect_error_if_under_limit( error, acc_errors, acc_error_count, acc_truncated?, current_error_count, max_errors ) do if current_error_count < max_errors do {[error | acc_errors], acc_error_count + 1, acc_truncated?} else {acc_errors, acc_error_count, true} end end # Formats error message by replacing placeholders defp format_error_message({msg, opts}) do Enum.reduce(opts, msg, fn {key, value}, acc -> String.replace(acc, "%{#{key}}", to_string(value)) end) end # Maps changeset error messages to appropriate Gettext messages defp gettext_error_message(message) when is_binary(message) do cond do String.contains?(String.downcase(message), "required") or String.contains?(String.downcase(message), "can't be blank") -> gettext("Email is required.") String.contains?(String.downcase(message), "invalid") or String.contains?(String.downcase(message), "not a valid") -> gettext("Email is invalid.") true -> message end end defp gettext_error_message(_), do: gettext("Email is invalid.") # Processes a single row and creates member with custom field values defp process_row( row_map, line_number, custom_field_lookup, actor ) do # Validate row before database insertion case validate_row(row_map, line_number, []) do {:error, error} -> # Return validation error immediately, no DB insert attempted {:error, error} {:ok, %{member: trimmed_member_attrs, custom: custom_attrs}} -> # Prepare custom field values for Ash case prepare_custom_field_values(custom_attrs, custom_field_lookup) do {:error, validation_errors} -> # Custom field validation errors - return first error first_error = List.first(validation_errors) {:error, %Error{csv_line_number: line_number, field: nil, message: first_error}} {:ok, custom_field_values} -> create_member_with_custom_fields( trimmed_member_attrs, custom_field_values, line_number, actor ) end end rescue e -> {:error, %Error{csv_line_number: line_number, field: nil, message: Exception.message(e)}} end # Creates a member with custom field values, handling errors appropriately defp create_member_with_custom_fields( trimmed_member_attrs, custom_field_values, line_number, actor ) do # Create member with custom field values member_attrs_with_cf = trimmed_member_attrs |> Map.put(:custom_field_values, custom_field_values) # Only include custom_field_values if not empty final_attrs = if Enum.empty?(custom_field_values) do Map.delete(member_attrs_with_cf, :custom_field_values) else member_attrs_with_cf end case Mv.Membership.create_member(final_attrs, actor: actor) do {:ok, member} -> {:ok, member} {:error, %Ash.Error.Invalid{} = error} -> # Extract email from final_attrs for better error messages email = Map.get(final_attrs, :email) || Map.get(trimmed_member_attrs, :email) {:error, format_ash_error(error, line_number, email)} {:error, error} -> {:error, %Error{csv_line_number: line_number, field: nil, message: inspect(error)}} end end # Prepares custom field values from row map for Ash # Returns {:ok, [custom_field_value_maps]} or {:error, [validation_errors]} defp prepare_custom_field_values(custom_attrs, custom_field_lookup) when is_map(custom_attrs) do {values, errors} = custom_attrs |> Enum.filter(fn {_id, value} -> value != nil && value != "" end) |> Enum.reduce({[], []}, fn {custom_field_id_str, value}, {acc_values, acc_errors} -> process_single_custom_field( custom_field_id_str, value, custom_field_lookup, acc_values, acc_errors ) end) if Enum.empty?(errors) do {:ok, Enum.reverse(values)} else {:error, Enum.reverse(errors)} end end defp prepare_custom_field_values(_, _), do: {:ok, []} # Processes a single custom field value and returns updated accumulator defp process_single_custom_field( custom_field_id_str, value, custom_field_lookup, acc_values, acc_errors ) do # Trim value early and skip if empty trimmed_value = if is_binary(value), do: String.trim(value), else: value # Skip empty values (after trimming) - don't create CFV if trimmed_value == "" or trimmed_value == nil do {acc_values, acc_errors} else process_non_empty_custom_field( custom_field_id_str, trimmed_value, custom_field_lookup, acc_values, acc_errors ) end end # Processes a non-empty custom field value defp process_non_empty_custom_field( custom_field_id_str, trimmed_value, custom_field_lookup, acc_values, acc_errors ) do case Map.get(custom_field_lookup, custom_field_id_str) do nil -> # Custom field not found, skip {acc_values, acc_errors} %{id: custom_field_id, value_type: value_type, name: custom_field_name} -> case format_custom_field_value(trimmed_value, value_type, custom_field_name) do {:ok, formatted_value} -> value_map = %{ "custom_field_id" => to_string(custom_field_id), "value" => formatted_value } {[value_map | acc_values], acc_errors} {:error, reason} -> {acc_values, [reason | acc_errors]} end end end # Formats a custom field value according to its type # Uses _union_type and _union_value format as expected by Ash # Returns {:ok, formatted_value} or {:error, error_message} defp format_custom_field_value(value, :string, _custom_field_name) when is_binary(value) do {:ok, %{"_union_type" => "string", "_union_value" => String.trim(value)}} end defp format_custom_field_value(value, :integer, custom_field_name) when is_binary(value) do trimmed = String.trim(value) case Integer.parse(trimmed) do {int_value, ""} -> # Fully consumed - valid integer {:ok, %{"_union_type" => "integer", "_union_value" => int_value}} {_int_value, _remaining} -> # Not fully consumed - invalid {:error, format_custom_field_error(custom_field_name, :integer, trimmed)} :error -> {:error, format_custom_field_error(custom_field_name, :integer, trimmed)} end end defp format_custom_field_value(value, :boolean, custom_field_name) when is_binary(value) do trimmed = String.trim(value) case parse_boolean_value(trimmed) do {:ok, bool_value} -> {:ok, %{"_union_type" => "boolean", "_union_value" => bool_value}} :error -> {:error, format_custom_field_error_with_details( custom_field_name, :boolean, trimmed, gettext("(true/false/1/0/yes/no/ja/nein)") )} end end defp format_custom_field_value(value, :date, custom_field_name) when is_binary(value) do trimmed = String.trim(value) case Date.from_iso8601(trimmed) do {:ok, date} -> {:ok, %{"_union_type" => "date", "_union_value" => date}} {:error, _} -> {:error, format_custom_field_error_with_details( custom_field_name, :date, trimmed, gettext("(ISO-8601 format: YYYY-MM-DD)") )} end end defp format_custom_field_value(value, :email, custom_field_name) when is_binary(value) do trimmed = String.trim(value) # Use EctoCommons.EmailValidator for consistency with Member email validation changeset = {%{}, %{email: :string}} |> Ecto.Changeset.cast(%{email: trimmed}, [:email]) |> EctoCommons.EmailValidator.validate_email(:email, checks: Mv.Constants.email_validator_checks() ) if changeset.valid? do {:ok, %{"_union_type" => "email", "_union_value" => trimmed}} else {:error, format_custom_field_error(custom_field_name, :email, trimmed)} end end defp format_custom_field_value(value, _type, _custom_field_name) when is_binary(value) do # Default to string if type is unknown {:ok, %{"_union_type" => "string", "_union_value" => String.trim(value)}} end # Parses a boolean value from a string, supporting multiple formats defp parse_boolean_value(value) when is_binary(value) do lower = String.downcase(value) parse_boolean_value_lower(lower) end # Helper function with pattern matching for boolean values defp parse_boolean_value_lower("true"), do: {:ok, true} defp parse_boolean_value_lower("1"), do: {:ok, true} defp parse_boolean_value_lower("yes"), do: {:ok, true} defp parse_boolean_value_lower("ja"), do: {:ok, true} defp parse_boolean_value_lower("false"), do: {:ok, false} defp parse_boolean_value_lower("0"), do: {:ok, false} defp parse_boolean_value_lower("no"), do: {:ok, false} defp parse_boolean_value_lower("nein"), do: {:ok, false} defp parse_boolean_value_lower(_), do: :error # Generates a consistent error message for custom field validation failures # Uses human-readable field type labels (e.g., "Number" instead of "integer") defp format_custom_field_error(custom_field_name, value_type, value) do type_label = FieldTypes.label(value_type) gettext("custom_field: %{name} – expected %{type}, got: %{value}", name: custom_field_name, type: type_label, value: value ) end # Generates an error message with additional details (e.g., format hints) defp format_custom_field_error_with_details(custom_field_name, value_type, value, details) do type_label = FieldTypes.label(value_type) gettext("custom_field: %{name} – expected %{type} %{details}, got: %{value}", name: custom_field_name, type: type_label, details: details, value: value ) end # Trims all string values in member attributes defp trim_string_values(attrs) do Enum.reduce(attrs, %{}, fn {key, value}, acc -> trimmed_value = if is_binary(value) do String.trim(value) else value end Map.put(acc, key, trimmed_value) end) end # Formats Ash errors into MemberCSV.Error structs defp format_ash_error(%Ash.Error.Invalid{errors: errors}, line_number, email) do # Try to find email-related errors first (for better error messages) email_error = Enum.find(errors, fn error -> case error do %{field: :email} -> true _ -> false end end) case email_error || List.first(errors) do %{field: field, message: message} when is_atom(field) -> %Error{ csv_line_number: line_number, field: field, message: format_error_message(message, field, email) } %{message: message} -> %Error{ csv_line_number: line_number, field: nil, message: format_error_message(message, nil, email) } _ -> %Error{ csv_line_number: line_number, field: nil, message: gettext("Validation failed") } end end # Formats error messages, handling common cases like email uniqueness defp format_error_message(message, field, email) when is_binary(message) do if email_uniqueness_error?(message, field) do # Include email in error message for better user feedback email_str = if email, do: to_string(email), else: gettext("email") gettext("email %{email} has already been taken", email: email_str) else message end end defp format_error_message(message, _field, _email), do: to_string(message) # Checks if error message indicates email uniqueness constraint violation defp email_uniqueness_error?(message, :email) do message_lower = String.downcase(message) String.contains?(message_lower, "unique") or String.contains?(message_lower, "constraint") or String.contains?(message_lower, "duplicate") or String.contains?(message_lower, "already been taken") or String.contains?(message_lower, "already exists") or String.contains?(message_lower, "violates unique constraint") end defp email_uniqueness_error?(_message, _field), do: false end