mitgliederverwaltung/lib/mv/membership/import/member_csv.ex
carla ffe146716b
Some checks failed
continuous-integration/drone/push Build is failing
formatting and refactoring
2026-01-23 17:52:09 +01:00

682 lines
23 KiB
Elixir

defmodule Mv.Membership.Import.MemberCSV do
@moduledoc """
Service module for importing members from CSV files.
require Ash.Query
This module provides the core API for CSV member import functionality:
- `prepare/2` - Parses and validates CSV content, returns import state
- `process_chunk/3` - Processes a chunk of rows and creates members
## Error Handling
Errors are returned as `%MemberCSV.Error{}` structs containing:
- `csv_line_number` - The physical line number in the CSV file (or `nil` for general errors)
- `field` - The field name (atom) or `nil` if not field-specific
- `message` - Human-readable error message (or `nil` for general errors)
## Import State
The `import_state` returned by `prepare/2` contains:
- `chunks` - List of row chunks ready for processing
- `column_map` - Map of canonical field names to column indices
- `custom_field_map` - Map of custom field names to column indices
- `warnings` - List of warning messages (e.g., unknown custom field columns)
## Chunk Results
The `chunk_result` returned by `process_chunk/3` contains:
- `inserted` - Number of successfully created members
- `failed` - Number of failed member creations
- `errors` - List of `%MemberCSV.Error{}` structs (capped at 50 per import)
## Examples
# Prepare CSV for import
{:ok, import_state} = MemberCSV.prepare(csv_content)
# Process first chunk
chunk = Enum.at(import_state.chunks, 0)
{:ok, result} = MemberCSV.process_chunk(chunk, import_state.column_map)
"""
defmodule Error do
@moduledoc """
Error struct for CSV import errors.
## Fields
- `csv_line_number` - The physical line number in the CSV file (1-based, header is line 1)
- `field` - The field name as an atom (e.g., `:email`) or `nil` if not field-specific
- `message` - Human-readable error message
"""
defstruct csv_line_number: nil, field: nil, message: nil
@type t :: %__MODULE__{
csv_line_number: pos_integer() | nil,
field: atom() | nil,
message: String.t() | nil
}
end
@type import_state :: %{
chunks: list(list({pos_integer(), map()})),
column_map: %{atom() => non_neg_integer()},
custom_field_map: %{String.t() => non_neg_integer()},
custom_field_lookup: %{String.t() => %{id: String.t(), value_type: atom()}},
warnings: list(String.t())
}
@type chunk_result :: %{
inserted: non_neg_integer(),
failed: non_neg_integer(),
errors: list(Error.t()),
errors_truncated?: boolean()
}
alias Mv.Membership.Import.CsvParser
alias Mv.Membership.Import.HeaderMapper
use Gettext, backend: MvWeb.Gettext
# Configuration constants
@default_max_errors 50
@default_chunk_size 200
@default_max_rows 1000
@doc """
Prepares CSV content for import by parsing, mapping headers, and validating limits.
This function:
1. Strips UTF-8 BOM if present
2. Detects CSV delimiter (semicolon or comma)
3. Parses headers and data rows
4. Maps headers to canonical member fields
5. Maps custom field columns by name
6. Validates row count limits
7. Chunks rows for processing
## Parameters
- `file_content` - The raw CSV file content as a string
- `opts` - Optional keyword list:
- `:max_rows` - Maximum number of data rows allowed (default: 1000)
- `:chunk_size` - Number of rows per chunk (default: 200)
## Returns
- `{:ok, import_state}` - Successfully prepared import state
- `{:error, reason}` - Error reason (string or error struct)
## Examples
iex> MemberCSV.prepare("email\\njohn@example.com")
{:ok, %{chunks: [...], column_map: %{email: 0}, ...}}
iex> MemberCSV.prepare("")
{:error, "CSV file is empty"}
"""
@spec prepare(String.t(), keyword()) :: {:ok, import_state()} | {:error, String.t()}
def prepare(file_content, opts \\ []) do
max_rows = Keyword.get(opts, :max_rows, @default_max_rows)
chunk_size = Keyword.get(opts, :chunk_size, @default_chunk_size)
with {:ok, headers, rows} <- CsvParser.parse(file_content),
{:ok, custom_fields} <- load_custom_fields(),
{:ok, maps, warnings} <- build_header_maps(headers, custom_fields),
:ok <- validate_row_count(rows, max_rows) do
chunks = chunk_rows(rows, maps, chunk_size)
# Build custom field lookup for efficient value processing
custom_field_lookup = build_custom_field_lookup(custom_fields)
{:ok,
%{
chunks: chunks,
column_map: maps.member,
custom_field_map: maps.custom,
custom_field_lookup: custom_field_lookup,
warnings: warnings
}}
end
end
# Loads all custom fields from the database
defp load_custom_fields do
custom_fields =
Mv.Membership.CustomField
|> Ash.read!()
{:ok, custom_fields}
rescue
e ->
{:error, "Failed to load custom fields: #{Exception.message(e)}"}
end
# Builds custom field lookup map for efficient value processing
defp build_custom_field_lookup(custom_fields) do
custom_fields
|> Enum.reduce(%{}, fn cf, acc ->
id_str = to_string(cf.id)
Map.put(acc, id_str, %{id: cf.id, value_type: cf.value_type})
end)
end
# Builds header maps using HeaderMapper and collects warnings for unknown custom fields
defp build_header_maps(headers, custom_fields) do
# Convert custom fields to maps with id and name
custom_field_maps =
Enum.map(custom_fields, fn cf ->
%{id: to_string(cf.id), name: cf.name}
end)
case HeaderMapper.build_maps(headers, custom_field_maps) do
{:ok, %{member: member_map, custom: custom_map, unknown: unknown}} ->
# Build warnings for unknown custom field columns
warnings =
unknown
|> Enum.filter(fn header ->
# Check if it could be a custom field (not a known member field)
normalized = HeaderMapper.normalize_header(header)
# If it's not empty and not a member field, it might be a custom field
normalized != "" && not member_field?(normalized)
end)
|> Enum.map(fn header ->
"Unknown column '#{header}' will be ignored. " <>
"If this is a custom field, create it in Mila before importing."
end)
{:ok, %{member: member_map, custom: custom_map}, warnings}
{:error, reason} ->
{:error, reason}
end
end
# Checks if a normalized header matches a member field
# Uses HeaderMapper.known_member_fields/0 as single source of truth
defp member_field?(normalized) when is_binary(normalized) do
MapSet.member?(HeaderMapper.known_member_fields(), normalized)
end
defp member_field?(_), do: false
# Validates that row count doesn't exceed limit
defp validate_row_count(rows, max_rows) do
if length(rows) > max_rows do
{:error, "CSV file exceeds maximum row limit of #{max_rows} rows"}
else
:ok
end
end
# Chunks rows and converts them to row maps using column maps
defp chunk_rows(rows, maps, chunk_size) do
rows
|> Enum.chunk_every(chunk_size)
|> Enum.map(fn chunk ->
Enum.map(chunk, fn {line_number, row_values} ->
row_map = build_row_map(row_values, maps)
{line_number, row_map}
end)
end)
end
# Builds a row map from raw row values using column maps
defp build_row_map(row_values, maps) do
member_map =
maps.member
|> Enum.reduce(%{}, fn {field, index}, acc ->
value = Enum.at(row_values, index, "")
Map.put(acc, field, value)
end)
custom_map =
maps.custom
|> Enum.reduce(%{}, fn {custom_field_id, index}, acc ->
value = Enum.at(row_values, index, "")
Map.put(acc, custom_field_id, value)
end)
%{member: member_map, custom: custom_map}
end
@doc """
Processes a chunk of CSV rows and creates members.
This function:
1. Validates each row
2. Creates members via Ash resource
3. Creates custom field values for each member
4. Collects errors with correct CSV line numbers
5. Returns chunk processing results
## Parameters
- `chunk_rows_with_lines` - List of tuples `{csv_line_number, row_map}` where:
- `csv_line_number` - Physical line number in CSV (1-based)
- `row_map` - Map with `:member` and `:custom` keys containing field values
- `column_map` - Map of canonical field names (atoms) to column indices (for reference)
- `custom_field_map` - Map of custom field IDs (strings) to column indices (for reference)
- `opts` - Optional keyword list for processing options:
- `:custom_field_lookup` - Map of custom field IDs to metadata (default: `%{}`)
- `:existing_error_count` - Number of errors already collected in previous chunks (default: `0`)
- `:max_errors` - Maximum number of errors to collect per import overall (default: `50`)
## Error Capping
Errors are capped at `max_errors` per import overall. When the limit is reached:
- No additional errors are collected in the `errors` list
- Processing continues for all rows
- The `failed` count continues to increment correctly for all failed rows
- The `errors_truncated?` flag is set to `true` to indicate that additional errors were suppressed
## Returns
- `{:ok, chunk_result}` - Chunk processing results
- `{:error, reason}` - Error reason (string)
## Examples
iex> chunk = [{2, %{member: %{email: "john@example.com"}, custom: %{}}}]
iex> column_map = %{email: 0}
iex> custom_field_map = %{}
iex> MemberCSV.process_chunk(chunk, column_map, custom_field_map)
{:ok, %{inserted: 1, failed: 0, errors: []}}
iex> chunk = [{2, %{member: %{email: "invalid"}, custom: %{}}}]
iex> opts = [existing_error_count: 25, max_errors: 50]
iex> MemberCSV.process_chunk(chunk, %{}, %{}, opts)
{:ok, %{inserted: 0, failed: 1, errors: [%Error{}], errors_truncated?: false}}
"""
@spec process_chunk(
list({pos_integer(), map()}),
%{atom() => non_neg_integer()},
%{String.t() => non_neg_integer()},
keyword()
) :: {:ok, chunk_result()} | {:error, String.t()}
def process_chunk(chunk_rows_with_lines, _column_map, _custom_field_map, opts \\ []) do
custom_field_lookup = Keyword.get(opts, :custom_field_lookup, %{})
existing_error_count = Keyword.get(opts, :existing_error_count, 0)
max_errors = Keyword.get(opts, :max_errors, @default_max_errors)
actor = Keyword.get(opts, :actor)
{inserted, failed, errors, _collected_error_count, truncated?} =
Enum.reduce(chunk_rows_with_lines, {0, 0, [], 0, false}, fn {line_number, row_map},
{acc_inserted, acc_failed,
acc_errors, acc_error_count,
acc_truncated?} ->
current_error_count = existing_error_count + acc_error_count
case process_row(row_map, line_number, custom_field_lookup, actor) do
{:ok, _member} ->
update_inserted(
{acc_inserted, acc_failed, acc_errors, acc_error_count, acc_truncated?}
)
{:error, error} ->
handle_row_error(
{acc_inserted, acc_failed, acc_errors, acc_error_count, acc_truncated?},
error,
current_error_count,
max_errors
)
end
end)
{:ok,
%{
inserted: inserted,
failed: failed,
errors: Enum.reverse(errors),
errors_truncated?: truncated?
}}
end
@doc """
Validates a single CSV row before database insertion.
This function:
1. Trims all string values in the member map
2. Validates that email is present and not empty after trimming
3. Validates email format using EctoCommons.EmailValidator
4. Returns structured errors with Gettext-backed messages
## Parameters
- `row_map` - Map with `:member` and `:custom` keys containing field values
- `csv_line_number` - Physical line number in CSV (1-based, header is line 1)
- `opts` - Optional keyword list (for future extensions)
## Returns
- `{:ok, trimmed_row_map}` - Successfully validated row with trimmed values
- `{:error, %Error{}}` - Validation error with structured error information
## Examples
iex> row_map = %{member: %{email: " john@example.com "}, custom: %{}}
iex> MemberCSV.validate_row(row_map, 2, [])
{:ok, %{member: %{email: "john@example.com"}, custom: %{}}}
iex> row_map = %{member: %{}, custom: %{}}
iex> MemberCSV.validate_row(row_map, 3, [])
{:error, %MemberCSV.Error{csv_line_number: 3, field: :email, message: "Email is required."}}
"""
@spec validate_row(map(), pos_integer(), keyword()) ::
{:ok, map()} | {:error, Error.t()}
def validate_row(row_map, csv_line_number, _opts \\ []) do
# Safely get member map (handle missing key)
member_attrs = Map.get(row_map, :member, %{})
custom_attrs = Map.get(row_map, :custom, %{})
# Validate email using schemaless changeset
changeset =
{%{}, %{email: :string}}
|> Ecto.Changeset.cast(%{email: Map.get(member_attrs, :email)}, [:email])
|> Ecto.Changeset.update_change(:email, &String.trim/1)
|> Ecto.Changeset.validate_required([:email])
|> EctoCommons.EmailValidator.validate_email(:email,
checks: Mv.Constants.email_validator_checks()
)
if changeset.valid? do
# Apply trimmed email back to member_attrs
trimmed_email = Ecto.Changeset.get_change(changeset, :email)
trimmed_member = Map.put(member_attrs, :email, trimmed_email) |> trim_string_values()
{:ok, %{member: trimmed_member, custom: custom_attrs}}
else
# Extract first error
error = extract_changeset_error(changeset, csv_line_number)
{:error, error}
end
end
# Extracts the first error from a changeset and converts it to a MemberCSV.Error struct
defp extract_changeset_error(changeset, csv_line_number) do
errors = Ecto.Changeset.traverse_errors(changeset, &format_error_message/1)
case errors do
%{email: [message | _]} ->
# Email-specific error
%Error{
csv_line_number: csv_line_number,
field: :email,
message: gettext_error_message(message)
}
errors when map_size(errors) > 0 ->
# Get first error (any field)
{field, [message | _]} = Enum.at(Enum.to_list(errors), 0)
%Error{
csv_line_number: csv_line_number,
field: String.to_existing_atom(to_string(field)),
message: gettext_error_message(message)
}
_ ->
# Fallback
%Error{
csv_line_number: csv_line_number,
field: :email,
message: gettext("Email is invalid.")
}
end
end
# Helper function to update accumulator when row is successfully inserted
defp update_inserted({acc_inserted, acc_failed, acc_errors, acc_error_count, acc_truncated?}) do
{acc_inserted + 1, acc_failed, acc_errors, acc_error_count, acc_truncated?}
end
# Helper function to handle row error with error count limit checking
defp handle_row_error(
{acc_inserted, acc_failed, acc_errors, acc_error_count, acc_truncated?},
error,
current_error_count,
max_errors
) do
new_acc_failed = acc_failed + 1
{new_acc_errors, new_error_count, new_truncated?} =
collect_error_if_under_limit(
error,
acc_errors,
acc_error_count,
acc_truncated?,
current_error_count,
max_errors
)
{acc_inserted, new_acc_failed, new_acc_errors, new_error_count, new_truncated?}
end
# Helper function to collect error only if under limit
defp collect_error_if_under_limit(
error,
acc_errors,
acc_error_count,
acc_truncated?,
current_error_count,
max_errors
) do
if current_error_count < max_errors do
{[error | acc_errors], acc_error_count + 1, acc_truncated?}
else
{acc_errors, acc_error_count, true}
end
end
# Formats error message by replacing placeholders
defp format_error_message({msg, opts}) do
Enum.reduce(opts, msg, fn {key, value}, acc ->
String.replace(acc, "%{#{key}}", to_string(value))
end)
end
# Maps changeset error messages to appropriate Gettext messages
defp gettext_error_message(message) when is_binary(message) do
cond do
String.contains?(String.downcase(message), "required") or
String.contains?(String.downcase(message), "can't be blank") ->
gettext("Email is required.")
String.contains?(String.downcase(message), "invalid") or
String.contains?(String.downcase(message), "not a valid") ->
gettext("Email is invalid.")
true ->
message
end
end
defp gettext_error_message(_), do: gettext("Email is invalid.")
# Processes a single row and creates member with custom field values
defp process_row(
row_map,
line_number,
custom_field_lookup,
actor
) do
# Validate row before database insertion
case validate_row(row_map, line_number, []) do
{:error, error} ->
# Return validation error immediately, no DB insert attempted
{:error, error}
{:ok, %{member: trimmed_member_attrs, custom: custom_attrs}} ->
# Prepare custom field values for Ash
custom_field_values = prepare_custom_field_values(custom_attrs, custom_field_lookup)
# Create member with custom field values
member_attrs_with_cf =
trimmed_member_attrs
|> Map.put(:custom_field_values, custom_field_values)
# Only include custom_field_values if not empty
final_attrs =
if Enum.empty?(custom_field_values) do
Map.delete(member_attrs_with_cf, :custom_field_values)
else
member_attrs_with_cf
end
case Mv.Membership.create_member(final_attrs, actor: actor) do
{:ok, member} ->
{:ok, member}
{:error, %Ash.Error.Invalid{} = error} ->
{:error, format_ash_error(error, line_number)}
{:error, error} ->
{:error, %Error{csv_line_number: line_number, field: nil, message: inspect(error)}}
end
end
rescue
e ->
{:error, %Error{csv_line_number: line_number, field: nil, message: Exception.message(e)}}
end
# Prepares custom field values from row map for Ash
defp prepare_custom_field_values(custom_attrs, custom_field_lookup) when is_map(custom_attrs) do
custom_attrs
|> Enum.filter(fn {_id, value} -> value != nil && value != "" end)
|> Enum.map(fn {custom_field_id_str, value} ->
case Map.get(custom_field_lookup, custom_field_id_str) do
nil ->
# Custom field not found, skip
nil
%{id: custom_field_id, value_type: value_type} ->
%{
"custom_field_id" => to_string(custom_field_id),
"value" => format_custom_field_value(value, value_type)
}
end
end)
|> Enum.filter(&(&1 != nil))
end
defp prepare_custom_field_values(_, _), do: []
# Formats a custom field value according to its type
# Uses _union_type and _union_value format as expected by Ash
defp format_custom_field_value(value, :string) when is_binary(value) do
%{"_union_type" => "string", "_union_value" => String.trim(value)}
end
defp format_custom_field_value(value, :integer) when is_binary(value) do
case Integer.parse(value) do
{int_value, _} -> %{"_union_type" => "integer", "_union_value" => int_value}
:error -> %{"_union_type" => "string", "_union_value" => String.trim(value)}
end
end
defp format_custom_field_value(value, :boolean) when is_binary(value) do
bool_value =
value
|> String.trim()
|> String.downcase()
|> case do
"true" -> true
"1" -> true
"yes" -> true
"ja" -> true
_ -> false
end
%{"_union_type" => "boolean", "_union_value" => bool_value}
end
defp format_custom_field_value(value, :date) when is_binary(value) do
case Date.from_iso8601(String.trim(value)) do
{:ok, date} -> %{"_union_type" => "date", "_union_value" => date}
{:error, _} -> %{"_union_type" => "string", "_union_value" => String.trim(value)}
end
end
defp format_custom_field_value(value, :email) when is_binary(value) do
%{"_union_type" => "email", "_union_value" => String.trim(value)}
end
defp format_custom_field_value(value, _type) when is_binary(value) do
# Default to string if type is unknown
%{"_union_type" => "string", "_union_value" => String.trim(value)}
end
# Trims all string values in member attributes
defp trim_string_values(attrs) do
Enum.reduce(attrs, %{}, fn {key, value}, acc ->
trimmed_value =
if is_binary(value) do
String.trim(value)
else
value
end
Map.put(acc, key, trimmed_value)
end)
end
# Formats Ash errors into MemberCSV.Error structs
defp format_ash_error(%Ash.Error.Invalid{errors: errors}, line_number) do
# Try to find email-related errors first (for better error messages)
email_error =
Enum.find(errors, fn error ->
case error do
%{field: :email} -> true
_ -> false
end
end)
case email_error || List.first(errors) do
%{field: field, message: message} when is_atom(field) ->
%Error{
csv_line_number: line_number,
field: field,
message: format_error_message(message, field)
}
%{message: message} ->
%Error{
csv_line_number: line_number,
field: nil,
message: format_error_message(message, nil)
}
_ ->
%Error{
csv_line_number: line_number,
field: nil,
message: "Validation failed"
}
end
end
# Formats error messages, handling common cases like email uniqueness
defp format_error_message(message, field) when is_binary(message) do
if email_uniqueness_error?(message, field) do
"email has already been taken"
else
message
end
end
defp format_error_message(message, _field), do: to_string(message)
# Checks if error message indicates email uniqueness constraint violation
defp email_uniqueness_error?(message, :email) do
message_lower = String.downcase(message)
String.contains?(message_lower, "unique") or
String.contains?(message_lower, "constraint") or
String.contains?(message_lower, "duplicate") or
String.contains?(message_lower, "already been taken") or
String.contains?(message_lower, "already exists") or
String.contains?(message_lower, "violates unique constraint")
end
defp email_uniqueness_error?(_message, _field), do: false
end