defmodule MvWeb.ImportExportLive do @moduledoc """ LiveView for importing and exporting members via CSV. ## Features - CSV member import (admin only) - Real-time import progress tracking - Error and warning reporting - Custom fields support ## CSV Import The CSV import feature allows administrators to upload CSV files and import members. ### File Upload Files are uploaded automatically when selected (`auto_upload: true`). No manual upload trigger is required. ### Rate Limiting Currently, there is no rate limiting for CSV imports. Administrators can start multiple imports in quick succession. This is intentional for bulk data migration scenarios, but should be monitored in production. ### Limits - Maximum file size: configurable via `config :mv, csv_import: [max_file_size_mb: ...]` - Maximum rows: configurable via `config :mv, csv_import: [max_rows: ...]` (excluding header) - Processing: chunks of 200 rows - Errors: capped at 50 per import """ use MvWeb, :live_view alias Mv.Authorization.Actor alias Mv.Config alias Mv.Membership alias Mv.Membership.Import.MemberCSV alias MvWeb.Authorization on_mount {MvWeb.LiveHelpers, :ensure_user_role_loaded} # Maximum number of errors to collect per import to prevent memory issues # and keep error display manageable. Additional errors are silently dropped # after this limit is reached. @max_errors 50 # Maximum length for error messages before truncation @max_error_message_length 200 @impl true def mount(_params, session, socket) do # Get locale from session for translations locale = session["locale"] || "de" Gettext.put_locale(MvWeb.Gettext, locale) # Get club name from settings club_name = case Membership.get_settings() do {:ok, settings} -> settings.club_name _ -> "Mitgliederverwaltung" end socket = socket |> assign(:page_title, gettext("Import/Export")) |> assign(:club_name, club_name) |> assign(:import_state, nil) |> assign(:import_progress, nil) |> assign(:import_status, :idle) |> assign(:locale, locale) |> assign(:max_errors, @max_errors) |> assign(:csv_import_max_rows, Config.csv_import_max_rows()) |> assign(:csv_import_max_file_size_mb, Config.csv_import_max_file_size_mb()) # Configure file upload with auto-upload enabled # Files are uploaded automatically when selected, no need for manual trigger |> allow_upload(:csv_file, accept: ~w(.csv), max_entries: 1, max_file_size: Config.csv_import_max_file_size_bytes(), auto_upload: true ) {:ok, socket} end @impl true def render(assigns) do ~H""" <.header> {gettext("Import/Export")} <:subtitle> {gettext("Import members from CSV files or export member data.")} <%= if Authorization.can?(@current_user, :create, Mv.Membership.Member) do %> <%!-- CSV Import Section --%> <.form_section title={gettext("Import Members (CSV)")}> {import_info_box(assigns)} {template_links(assigns)} {import_form(assigns)} <%= if @import_status == :running or @import_status == :done do %> {import_progress(assigns)} <% end %> <%!-- Export Section (Placeholder) --%> <.form_section title={gettext("Export Members (CSV)")}>
<.icon name="hero-information-circle" class="size-5" aria-hidden="true" />

{gettext("Export functionality will be available in a future release.")}

<% else %> <% end %>
""" end # Renders the info box explaining CSV import requirements defp import_info_box(assigns) do ~H"""
<.icon name="hero-information-circle" class="size-5" aria-hidden="true" />

{gettext( "Use the data field name as the CSV column header in your file. Data fields must exist in Mila before importing, so they must be listed in the list of member data (like e-mail or first name). Unknown data field columns will be ignored with a warning." )}

<.link href={~p"/settings#custom_fields"} class="link" data-testid="custom-fields-link" > {gettext("Manage Member Data")}

""" end # Renders template download links defp template_links(assigns) do ~H"""

{gettext("Download CSV templates:")}

""" end # Renders the CSV upload form defp import_form(assigns) do ~H""" <.form id="csv-upload-form" for={%{}} multipart={true} phx-change="validate_csv_upload" phx-submit="start_import" data-testid="csv-upload-form" >
<.live_file_input upload={@uploads.csv_file} id="csv_file" class="file-input file-input-bordered w-full" aria-describedby="csv_file_help" />

{gettext("CSV files only, maximum %{size} MB", size: @csv_import_max_file_size_mb)}

<.button type="submit" phx-disable-with={gettext("Starting import...")} variant="primary" disabled={import_button_disabled?(@import_status, @uploads.csv_file.entries)} data-testid="start-import-button" > {gettext("Start Import")} """ end # Renders import progress and results defp import_progress(assigns) do ~H""" <%= if @import_progress do %>
<%= if @import_progress.status == :running do %>

{gettext("Processing chunk %{current} of %{total}...", current: @import_progress.current_chunk, total: @import_progress.total_chunks )}

<% end %> <%= if @import_progress.status == :done do %> {import_results(assigns)} <% end %>
<% end %> """ end # Renders import results summary, errors, and warnings defp import_results(assigns) do ~H"""

{gettext("Import Results")}

{gettext("Summary")}

<.icon name="hero-check-circle" class="size-4 inline mr-1" aria-hidden="true" /> {gettext("Successfully inserted: %{count} member(s)", count: @import_progress.inserted )}

<%= if @import_progress.failed > 0 do %>

<.icon name="hero-exclamation-circle" class="size-4 inline mr-1" aria-hidden="true" /> {gettext("Failed: %{count} row(s)", count: @import_progress.failed)}

<% end %> <%= if @import_progress.errors_truncated? do %>

<.icon name="hero-information-circle" class="size-4 inline mr-1" aria-hidden="true" /> {gettext("Error list truncated to %{count} entries", count: @max_errors)}

<% end %>
<%= if length(@import_progress.errors) > 0 do %>

<.icon name="hero-exclamation-circle" class="size-4 inline mr-1" aria-hidden="true" /> {gettext("Errors")}

    <%= for error <- @import_progress.errors do %>
  • {gettext("Line %{line}: %{message}", line: error.csv_line_number || "?", message: error.message || gettext("Unknown error") )} <%= if error.field do %> {gettext(" (Field: %{field})", field: error.field)} <% end %>
  • <% end %>
<% end %> <%= if length(@import_progress.warnings) > 0 do %> <% end %>
""" end @impl true def handle_event("validate_csv_upload", _params, socket) do {:noreply, socket} end @impl true def handle_event("start_import", _params, socket) do case check_import_prerequisites(socket) do {:error, message} -> {:noreply, put_flash(socket, :error, message)} :ok -> process_csv_upload(socket) end end # Checks if all prerequisites for starting an import are met. # # Validates: # - User has admin permissions # - No import is currently running # - CSV file is uploaded and ready # # Returns `:ok` if all checks pass, `{:error, message}` otherwise. # # Note: on_mount :ensure_user_role_loaded already guarantees the role is loaded, # so ensure_actor_loaded is primarily for clarity. @spec check_import_prerequisites(Phoenix.LiveView.Socket.t()) :: :ok | {:error, String.t()} defp check_import_prerequisites(socket) do # on_mount already ensures role is loaded, but we keep this for clarity user_with_role = ensure_actor_loaded(socket) cond do not Authorization.can?(user_with_role, :create, Mv.Membership.Member) -> {:error, gettext("Only administrators can import members from CSV files.")} socket.assigns.import_status == :running -> {:error, gettext("Import is already running. Please wait for it to complete.")} Enum.empty?(socket.assigns.uploads.csv_file.entries) -> {:error, gettext("Please select a CSV file to import.")} not List.first(socket.assigns.uploads.csv_file.entries).done? -> {:error, gettext("Please wait for the file upload to complete before starting the import.")} true -> :ok end end # Processes CSV upload and starts import process. # # Reads the uploaded CSV file, prepares it for import, and initiates # the chunked processing workflow. @spec process_csv_upload(Phoenix.LiveView.Socket.t()) :: {:noreply, Phoenix.LiveView.Socket.t()} defp process_csv_upload(socket) do actor = MvWeb.LiveHelpers.current_actor(socket) with {:ok, content} <- consume_and_read_csv(socket), {:ok, import_state} <- MemberCSV.prepare(content, max_rows: Config.csv_import_max_rows(), actor: actor) do start_import(socket, import_state) else {:error, reason} when is_binary(reason) -> {:noreply, put_flash( socket, :error, gettext("Failed to prepare CSV import: %{reason}", reason: reason) )} {:error, error} -> error_message = format_error_message(error) {:noreply, put_flash( socket, :error, gettext("Failed to prepare CSV import: %{reason}", reason: error_message) )} end end # Starts the import process by initializing progress tracking and scheduling the first chunk. @spec start_import(Phoenix.LiveView.Socket.t(), map()) :: {:noreply, Phoenix.LiveView.Socket.t()} defp start_import(socket, import_state) do progress = initialize_import_progress(import_state) socket = socket |> assign(:import_state, import_state) |> assign(:import_progress, progress) |> assign(:import_status, :running) send(self(), {:process_chunk, 0}) {:noreply, socket} end # Initializes the import progress tracking structure with default values. @spec initialize_import_progress(map()) :: map() defp initialize_import_progress(import_state) do %{ inserted: 0, failed: 0, errors: [], warnings: import_state.warnings || [], status: :running, current_chunk: 0, total_chunks: length(import_state.chunks), errors_truncated?: false } end # Formats error messages for user-friendly display. # # Handles various error types including Ash errors, maps with message fields, # lists of errors, and fallback formatting for unknown types. @spec format_error_message(any()) :: String.t() defp format_error_message(error) do case error do %Ash.Error.Invalid{} = ash_error -> format_ash_error(ash_error) %{message: msg} when is_binary(msg) -> msg %{errors: errors} when is_list(errors) -> format_error_list(errors) reason when is_binary(reason) -> reason other -> format_unknown_error(other) end end # Formats Ash validation errors for display defp format_ash_error(%Ash.Error.Invalid{errors: errors}) when is_list(errors) do Enum.map_join(errors, ", ", &format_single_error/1) end defp format_ash_error(error) do format_unknown_error(error) end # Formats a list of errors into a readable string defp format_error_list(errors) do Enum.map_join(errors, ", ", &format_single_error/1) end # Formats a single error item defp format_single_error(error) when is_map(error) do Map.get(error, :message) || Map.get(error, :field) || inspect(error, limit: :infinity) end defp format_single_error(error) do to_string(error) end # Formats unknown error types with truncation for very long messages defp format_unknown_error(other) do error_str = inspect(other, limit: :infinity, pretty: true) if String.length(error_str) > @max_error_message_length do String.slice(error_str, 0, @max_error_message_length - 3) <> "..." else error_str end end @impl true def handle_info({:process_chunk, idx}, socket) do case socket.assigns do %{import_state: import_state, import_progress: progress} when is_map(import_state) and is_map(progress) -> if idx < length(import_state.chunks) do start_chunk_processing_task(socket, import_state, progress, idx) else handle_chunk_error(socket, :invalid_index, idx) end _ -> # Missing required assigns - mark as error handle_chunk_error(socket, :missing_state, idx) end end @impl true def handle_info({:chunk_done, idx, result}, socket) do case socket.assigns do %{import_state: import_state, import_progress: progress} when is_map(import_state) and is_map(progress) -> handle_chunk_result(socket, import_state, progress, idx, result) _ -> # Missing required assigns - mark as error handle_chunk_error(socket, :missing_state, idx) end end @impl true def handle_info({:chunk_error, idx, reason}, socket) do handle_chunk_error(socket, :processing_failed, idx, reason) end # Processes a chunk with error handling and sends result message to LiveView. # # Handles errors from MemberCSV.process_chunk and sends appropriate messages # to the LiveView process for progress tracking. @spec process_chunk_with_error_handling( list(), map(), map(), keyword(), pid(), non_neg_integer() ) :: :ok defp process_chunk_with_error_handling( chunk, column_map, custom_field_map, opts, live_view_pid, idx ) do result = try do MemberCSV.process_chunk(chunk, column_map, custom_field_map, opts) rescue e -> {:error, Exception.message(e)} catch :exit, reason -> {:error, inspect(reason)} :throw, reason -> {:error, inspect(reason)} end case result do {:ok, chunk_result} -> send(live_view_pid, {:chunk_done, idx, chunk_result}) {:error, reason} -> send(live_view_pid, {:chunk_error, idx, reason}) end end # Starts async task to process a chunk of CSV rows. # # In tests (SQL sandbox mode), runs synchronously to avoid Ecto Sandbox issues. @spec start_chunk_processing_task( Phoenix.LiveView.Socket.t(), map(), map(), non_neg_integer() ) :: {:noreply, Phoenix.LiveView.Socket.t()} defp start_chunk_processing_task(socket, import_state, progress, idx) do chunk = Enum.at(import_state.chunks, idx) actor = ensure_actor_loaded(socket) live_view_pid = self() # Process chunk with existing error count for capping opts = [ custom_field_lookup: import_state.custom_field_lookup, existing_error_count: length(progress.errors), max_errors: @max_errors, actor: actor ] # Get locale from socket for translations in background tasks locale = socket.assigns[:locale] || "de" Gettext.put_locale(MvWeb.Gettext, locale) if Config.sql_sandbox?() do # Run synchronously in tests to avoid Ecto Sandbox issues with async tasks # In test mode, send the message - it will be processed when render() is called # in the test. The test helper wait_for_import_completion() handles message processing process_chunk_with_error_handling( chunk, import_state.column_map, import_state.custom_field_map, opts, live_view_pid, idx ) else # Start async task to process chunk in production # Use start_child for fire-and-forget: no monitor, no Task messages # We only use our own send/2 messages for communication Task.Supervisor.start_child(Mv.TaskSupervisor, fn -> # Set locale in task process for translations Gettext.put_locale(MvWeb.Gettext, locale) process_chunk_with_error_handling( chunk, import_state.column_map, import_state.custom_field_map, opts, live_view_pid, idx ) end) end {:noreply, socket} end # Handles chunk processing result from async task and schedules the next chunk. @spec handle_chunk_result( Phoenix.LiveView.Socket.t(), map(), map(), non_neg_integer(), map() ) :: {:noreply, Phoenix.LiveView.Socket.t()} defp handle_chunk_result(socket, import_state, progress, idx, chunk_result) do # Merge progress new_progress = merge_progress(progress, chunk_result, idx) socket = socket |> assign(:import_progress, new_progress) |> assign(:import_status, new_progress.status) # Schedule next chunk or mark as done socket = schedule_next_chunk(socket, idx, length(import_state.chunks)) {:noreply, socket} end # Handles chunk processing errors and updates socket with error status. @spec handle_chunk_error( Phoenix.LiveView.Socket.t(), :invalid_index | :missing_state | :processing_failed, non_neg_integer(), any() ) :: {:noreply, Phoenix.LiveView.Socket.t()} defp handle_chunk_error(socket, error_type, idx, reason \\ nil) do error_message = case error_type do :invalid_index -> gettext("Invalid chunk index: %{idx}", idx: idx) :missing_state -> gettext("Import state is missing. Cannot process chunk %{idx}.", idx: idx) :processing_failed -> gettext("Failed to process chunk %{idx}: %{reason}", idx: idx, reason: inspect(reason) ) end socket = socket |> assign(:import_status, :error) |> put_flash(:error, error_message) {:noreply, socket} end # Consumes uploaded CSV file entries and reads the file content. # # Returns the file content as a binary string or an error tuple. @spec consume_and_read_csv(Phoenix.LiveView.Socket.t()) :: {:ok, String.t()} | {:error, String.t()} defp consume_and_read_csv(socket) do raw = consume_uploaded_entries(socket, :csv_file, &read_file_entry/2) case raw do [{:ok, content}] when is_binary(content) -> {:ok, content} # Phoenix LiveView test (render_upload) can return raw content list when callback return is treated as value [content] when is_binary(content) -> {:ok, content} [{:error, reason}] -> {:error, gettext("Failed to read file: %{reason}", reason: reason)} [] -> {:error, gettext("No file was uploaded")} _other -> {:error, gettext("Failed to read uploaded file: unexpected format")} end end # Reads a single file entry from the uploaded path @spec read_file_entry(map(), map()) :: {:ok, String.t()} | {:error, String.t()} defp read_file_entry(%{path: path}, _entry) do case File.read(path) do {:ok, content} -> {:ok, content} {:error, reason} when is_atom(reason) -> # POSIX error atoms (e.g., :enoent) need to be formatted {:error, :file.format_error(reason)} {:error, %File.Error{reason: reason}} -> # File.Error struct with reason atom {:error, :file.format_error(reason)} {:error, reason} -> # Fallback for other error types {:error, Exception.message(reason)} end end # Merges chunk processing results into the overall import progress. # # Handles error capping, warning merging, and status updates. @spec merge_progress(map(), map(), non_neg_integer()) :: map() defp merge_progress(progress, chunk_result, current_chunk_idx) do # Merge errors with cap of @max_errors overall all_errors = progress.errors ++ chunk_result.errors new_errors = Enum.take(all_errors, @max_errors) errors_truncated? = length(all_errors) > @max_errors # Merge warnings (optional dedupe - simple append for now) new_warnings = progress.warnings ++ Map.get(chunk_result, :warnings, []) # Update status based on whether we're done # current_chunk_idx is 0-based, so after processing chunk 0, we've processed 1 chunk chunks_processed = current_chunk_idx + 1 new_status = if chunks_processed >= progress.total_chunks, do: :done, else: :running %{ inserted: progress.inserted + chunk_result.inserted, failed: progress.failed + chunk_result.failed, errors: new_errors, warnings: new_warnings, status: new_status, current_chunk: chunks_processed, total_chunks: progress.total_chunks, errors_truncated?: errors_truncated? || chunk_result.errors_truncated? } end # Schedules the next chunk for processing or marks import as complete. @spec schedule_next_chunk(Phoenix.LiveView.Socket.t(), non_neg_integer(), non_neg_integer()) :: Phoenix.LiveView.Socket.t() defp schedule_next_chunk(socket, current_idx, total_chunks) do next_idx = current_idx + 1 if next_idx < total_chunks do # Schedule next chunk send(self(), {:process_chunk, next_idx}) socket else # All chunks processed - status already set to :done in merge_progress socket end end # Determines if the import button should be disabled based on import status and upload state @spec import_button_disabled?(:idle | :running | :done | :error, [map()]) :: boolean() defp import_button_disabled?(:running, _entries), do: true defp import_button_disabled?(_status, []), do: true defp import_button_disabled?(_status, [entry | _]) when not entry.done?, do: true defp import_button_disabled?(_status, _entries), do: false # Ensures the actor (user with role) is loaded from socket assigns. # # Note: on_mount :ensure_user_role_loaded already guarantees the role is loaded, # so this is primarily for clarity and defensive programming. @spec ensure_actor_loaded(Phoenix.LiveView.Socket.t()) :: Mv.Accounts.User.t() | nil defp ensure_actor_loaded(socket) do user = socket.assigns[:current_user] # on_mount already ensures role is loaded, but we keep this for clarity Actor.ensure_loaded(user) end end