defmodule MvWeb.ImportExportLive do @moduledoc """ LiveView for importing and exporting members via CSV. ## Features - CSV member import (admin only) - Real-time import progress tracking - Error and warning reporting - Custom fields support ## CSV Import The CSV import feature allows administrators to upload CSV files and import members. ### File Upload Files are uploaded automatically when selected (`auto_upload: true`). No manual upload trigger is required. ### Rate Limiting Currently, there is no rate limiting for CSV imports. Administrators can start multiple imports in quick succession. This is intentional for bulk data migration scenarios, but should be monitored in production. ### Limits - Maximum file size: configurable via `config :mv, csv_import: [max_file_size_mb: ...]` - Maximum rows: configurable via `config :mv, csv_import: [max_rows: ...]` (excluding header) - Processing: chunks of 200 rows - Errors: capped at 50 per import """ use MvWeb, :live_view alias Mv.Authorization.Actor alias Mv.Config alias Mv.Membership alias Mv.Membership.Import.MemberCSV alias MvWeb.Authorization on_mount {MvWeb.LiveHelpers, :ensure_user_role_loaded} # CSV Import configuration constants @max_errors 50 @impl true def mount(_params, session, socket) do # Get locale from session for translations locale = session["locale"] || "de" Gettext.put_locale(MvWeb.Gettext, locale) # Get club name from settings club_name = case Membership.get_settings() do {:ok, settings} -> settings.club_name _ -> "Mitgliederverwaltung" end socket = socket |> assign(:page_title, gettext("Import/Export")) |> assign(:club_name, club_name) |> assign(:import_state, nil) |> assign(:import_progress, nil) |> assign(:import_status, :idle) |> assign(:locale, locale) |> assign(:max_errors, @max_errors) |> assign(:csv_import_max_rows, Config.csv_import_max_rows()) |> assign(:csv_import_max_file_size_mb, Config.csv_import_max_file_size_mb()) # Configure file upload with auto-upload enabled # Files are uploaded automatically when selected, no need for manual trigger |> allow_upload(:csv_file, accept: ~w(.csv), max_entries: 1, max_file_size: Config.csv_import_max_file_size_bytes(), auto_upload: true ) {:ok, socket} end @impl true def render(assigns) do ~H""" <.header> {gettext("Import/Export")} <:subtitle> {gettext("Import members from CSV files or export member data.")} <%= if Authorization.can?(@current_user, :create, Mv.Membership.Member) do %> <%!-- CSV Import Section --%> <.form_section title={gettext("Import Members (CSV)")}>
<.icon name="hero-information-circle" class="size-5" aria-hidden="true" />

{gettext( "Use the data field name as the CSV column header in your file. Data fields must exist in Mila before importing, so they must be listed in the list of memberdate (like e-mail or first name). Unknown data field columns will be ignored with a warning." )}

<.link href={~p"/settings#custom_fields"} class="link" data-testid="custom-fields-link" > {gettext("Manage Memberdata")}

{gettext("Download CSV templates:")}

<.form id="csv-upload-form" for={%{}} multipart={true} phx-change="validate_csv_upload" phx-submit="start_import" data-testid="csv-upload-form" >
<.live_file_input upload={@uploads.csv_file} id="csv_file" class="file-input file-input-bordered w-full" aria-describedby="csv_file_help" />
<.button type="submit" phx-disable-with={gettext("Starting import...")} variant="primary" disabled={ @import_status == :running or Enum.empty?(@uploads.csv_file.entries) or @uploads.csv_file.entries |> List.first() |> then(&(&1 && not &1.done?)) } data-testid="start-import-button" > {gettext("Start Import")} <%= if @import_status == :running or @import_status == :done do %> <%= if @import_progress do %>
<%= if @import_progress.status == :running do %>

{gettext("Processing chunk %{current} of %{total}...", current: @import_progress.current_chunk, total: @import_progress.total_chunks )}

<% end %> <%= if @import_progress.status == :done do %>

{gettext("Import Results")}

{gettext("Summary")}

<.icon name="hero-check-circle" class="size-4 inline mr-1" aria-hidden="true" /> {gettext("Successfully inserted: %{count} member(s)", count: @import_progress.inserted )}

<%= if @import_progress.failed > 0 do %>

<.icon name="hero-exclamation-circle" class="size-4 inline mr-1" aria-hidden="true" /> {gettext("Failed: %{count} row(s)", count: @import_progress.failed)}

<% end %> <%= if @import_progress.errors_truncated? do %>

<.icon name="hero-information-circle" class="size-4 inline mr-1" aria-hidden="true" /> {gettext("Error list truncated to %{count} entries", count: @max_errors )}

<% end %>
<%= if length(@import_progress.errors) > 0 do %>

<.icon name="hero-exclamation-circle" class="size-4 inline mr-1" aria-hidden="true" /> {gettext("Errors")}

    <%= for error <- @import_progress.errors do %>
  • {gettext("Line %{line}: %{message}", line: error.csv_line_number || "?", message: error.message || gettext("Unknown error") )} <%= if error.field do %> {gettext(" (Field: %{field})", field: error.field)} <% end %>
  • <% end %>
<% end %> <%= if length(@import_progress.warnings) > 0 do %>
<.icon name="hero-information-circle" class="size-5" aria-hidden="true" />

{gettext("Warnings")}

    <%= for warning <- @import_progress.warnings do %>
  • {warning}
  • <% end %>
<% end %>
<% end %>
<% end %> <% end %> <%!-- Export Section (Placeholder) --%> <.form_section title={gettext("Export Members (CSV)")}>
<.icon name="hero-information-circle" class="size-5" aria-hidden="true" />

{gettext("Export functionality will be available in a future release.")}

<% else %> <% end %>
""" end @impl true def handle_event("validate_csv_upload", _params, socket) do {:noreply, socket} end @impl true def handle_event("start_import", _params, socket) do case check_import_prerequisites(socket) do {:error, message} -> {:noreply, put_flash(socket, :error, message)} :ok -> process_csv_upload(socket) end end # Checks if import can be started (admin permission, status, upload ready) defp check_import_prerequisites(socket) do # Ensure user role is loaded before authorization check user = socket.assigns[:current_user] user_with_role = Actor.ensure_loaded(user) cond do not Authorization.can?(user_with_role, :create, Mv.Membership.Member) -> {:error, gettext("Only administrators can import members from CSV files.")} socket.assigns.import_status == :running -> {:error, gettext("Import is already running. Please wait for it to complete.")} Enum.empty?(socket.assigns.uploads.csv_file.entries) -> {:error, gettext("Please select a CSV file to import.")} not List.first(socket.assigns.uploads.csv_file.entries).done? -> {:error, gettext("Please wait for the file upload to complete before starting the import.")} true -> :ok end end # Processes CSV upload and starts import defp process_csv_upload(socket) do actor = MvWeb.LiveHelpers.current_actor(socket) with {:ok, content} <- consume_and_read_csv(socket), {:ok, import_state} <- MemberCSV.prepare(content, max_rows: Config.csv_import_max_rows(), actor: actor) do start_import(socket, import_state) else {:error, reason} when is_binary(reason) -> {:noreply, put_flash( socket, :error, gettext("Failed to prepare CSV import: %{reason}", reason: reason) )} {:error, error} -> error_message = format_error_message(error) {:noreply, put_flash( socket, :error, gettext("Failed to prepare CSV import: %{error}", error: error_message) )} end end # Starts the import process defp start_import(socket, import_state) do progress = initialize_import_progress(import_state) socket = socket |> assign(:import_state, import_state) |> assign(:import_progress, progress) |> assign(:import_status, :running) send(self(), {:process_chunk, 0}) {:noreply, socket} end # Initializes import progress structure defp initialize_import_progress(import_state) do %{ inserted: 0, failed: 0, errors: [], warnings: import_state.warnings || [], status: :running, current_chunk: 0, total_chunks: length(import_state.chunks), errors_truncated?: false } end # Formats error messages for display defp format_error_message(error) do case error do %{message: msg} when is_binary(msg) -> msg %{errors: errors} when is_list(errors) -> inspect(errors) reason when is_binary(reason) -> reason other -> inspect(other) end end @impl true def handle_info({:process_chunk, idx}, socket) do case socket.assigns do %{import_state: import_state, import_progress: progress} when is_map(import_state) and is_map(progress) -> if idx >= 0 and idx < length(import_state.chunks) do start_chunk_processing_task(socket, import_state, progress, idx) else handle_chunk_error(socket, :invalid_index, idx) end _ -> # Missing required assigns - mark as error handle_chunk_error(socket, :missing_state, idx) end end @impl true def handle_info({:chunk_done, idx, result}, socket) do case socket.assigns do %{import_state: import_state, import_progress: progress} when is_map(import_state) and is_map(progress) -> handle_chunk_result(socket, import_state, progress, idx, result) _ -> # Missing required assigns - mark as error handle_chunk_error(socket, :missing_state, idx) end end @impl true def handle_info({:chunk_error, idx, reason}, socket) do handle_chunk_error(socket, :processing_failed, idx, reason) end # Starts async task to process a chunk # In tests (SQL sandbox mode), runs synchronously to avoid Ecto Sandbox issues defp start_chunk_processing_task(socket, import_state, progress, idx) do chunk = Enum.at(import_state.chunks, idx) # Ensure user role is loaded before using as actor user = socket.assigns[:current_user] actor = Actor.ensure_loaded(user) live_view_pid = self() # Process chunk with existing error count for capping opts = [ custom_field_lookup: import_state.custom_field_lookup, existing_error_count: length(progress.errors), max_errors: @max_errors, actor: actor ] # Get locale from socket for translations in background tasks locale = socket.assigns[:locale] || "de" Gettext.put_locale(MvWeb.Gettext, locale) if Config.sql_sandbox?() do # Run synchronously in tests to avoid Ecto Sandbox issues with async tasks {:ok, chunk_result} = MemberCSV.process_chunk( chunk, import_state.column_map, import_state.custom_field_map, opts ) # In test mode, send the message - it will be processed when render() is called # in the test. The test helper wait_for_import_completion() handles message processing send(live_view_pid, {:chunk_done, idx, chunk_result}) else # Start async task to process chunk in production # Use start_child for fire-and-forget: no monitor, no Task messages # We only use our own send/2 messages for communication Task.Supervisor.start_child(Mv.TaskSupervisor, fn -> # Set locale in task process for translations Gettext.put_locale(MvWeb.Gettext, locale) {:ok, chunk_result} = MemberCSV.process_chunk( chunk, import_state.column_map, import_state.custom_field_map, opts ) send(live_view_pid, {:chunk_done, idx, chunk_result}) end) end {:noreply, socket} end # Handles chunk processing result from async task defp handle_chunk_result(socket, import_state, progress, idx, chunk_result) do # Merge progress new_progress = merge_progress(progress, chunk_result, idx) socket = socket |> assign(:import_progress, new_progress) |> assign(:import_status, new_progress.status) # Schedule next chunk or mark as done socket = schedule_next_chunk(socket, idx, length(import_state.chunks)) {:noreply, socket} end # Handles chunk processing errors defp handle_chunk_error(socket, error_type, idx, reason \\ nil) do error_message = case error_type do :invalid_index -> gettext("Invalid chunk index: %{idx}", idx: idx) :missing_state -> gettext("Import state is missing. Cannot process chunk %{idx}.", idx: idx) :processing_failed -> gettext("Failed to process chunk %{idx}: %{reason}", idx: idx, reason: inspect(reason) ) end socket = socket |> assign(:import_status, :error) |> put_flash(:error, error_message) {:noreply, socket} end defp consume_and_read_csv(socket) do result = consume_uploaded_entries(socket, :csv_file, fn %{path: path}, _entry -> case File.read(path) do {:ok, content} -> {:ok, content} {:error, reason} -> {:error, Exception.message(reason)} end end) result |> case do [content] when is_binary(content) -> {:ok, content} [{:ok, content}] when is_binary(content) -> {:ok, content} [{:error, reason}] -> {:error, gettext("Failed to read file: %{reason}", reason: reason)} [] -> {:error, gettext("No file was uploaded")} _other -> {:error, gettext("Failed to read uploaded file")} end end defp merge_progress(progress, chunk_result, current_chunk_idx) do # Merge errors with cap of @max_errors overall all_errors = progress.errors ++ chunk_result.errors new_errors = Enum.take(all_errors, @max_errors) errors_truncated? = length(all_errors) > @max_errors # Merge warnings (optional dedupe - simple append for now) new_warnings = progress.warnings ++ Map.get(chunk_result, :warnings, []) # Update status based on whether we're done # current_chunk_idx is 0-based, so after processing chunk 0, we've processed 1 chunk chunks_processed = current_chunk_idx + 1 new_status = if chunks_processed >= progress.total_chunks, do: :done, else: :running %{ inserted: progress.inserted + chunk_result.inserted, failed: progress.failed + chunk_result.failed, errors: new_errors, warnings: new_warnings, status: new_status, current_chunk: chunks_processed, total_chunks: progress.total_chunks, errors_truncated?: errors_truncated? || chunk_result.errors_truncated? } end defp schedule_next_chunk(socket, current_idx, total_chunks) do next_idx = current_idx + 1 if next_idx < total_chunks do # Schedule next chunk send(self(), {:process_chunk, next_idx}) socket else # All chunks processed - status already set to :done in merge_progress socket end end end