+
<.icon
name="hero-exclamation-circle"
@@ -402,8 +411,21 @@ defmodule MvWeb.GlobalSettingsLive do
{:noreply, socket}
- {:error, _error} = error_result ->
- error_result
+ {:error, error} ->
+ error_message =
+ case error do
+ %{message: msg} when is_binary(msg) -> msg
+ %{errors: errors} when is_list(errors) -> inspect(errors)
+ reason when is_binary(reason) -> reason
+ other -> inspect(other)
+ end
+
+ {:noreply,
+ put_flash(
+ socket,
+ :error,
+ gettext("Failed to prepare CSV import: %{error}", error: error_message)
+ )}
end
else
{:error, reason} when is_binary(reason) ->
@@ -524,7 +546,7 @@ defmodule MvWeb.GlobalSettingsLive do
%{import_state: import_state, import_progress: progress}
when is_map(import_state) and is_map(progress) ->
if idx >= 0 and idx < length(import_state.chunks) do
- process_chunk_and_schedule_next(socket, import_state, progress, idx)
+ start_chunk_processing_task(socket, import_state, progress, idx)
else
handle_chunk_error(socket, :invalid_index, idx)
end
@@ -535,44 +557,73 @@ defmodule MvWeb.GlobalSettingsLive do
end
end
- # Processes a chunk and schedules the next one
- defp process_chunk_and_schedule_next(socket, import_state, progress, idx) do
- chunk = Enum.at(import_state.chunks, idx)
+ @impl true
+ def handle_info({:chunk_done, idx, result}, socket) do
+ case socket.assigns do
+ %{import_state: import_state, import_progress: progress}
+ when is_map(import_state) and is_map(progress) ->
+ handle_chunk_result(socket, import_state, progress, idx, result)
- # Process chunk with existing error count for capping
- opts = [
- custom_field_lookup: import_state.custom_field_lookup,
- existing_error_count: length(progress.errors),
- max_errors: @max_errors,
- actor: socket.assigns[:current_user]
- ]
-
- case MemberCSV.process_chunk(
- chunk,
- import_state.column_map,
- import_state.custom_field_map,
- opts
- ) do
- {:ok, chunk_result} ->
- # Merge progress
- new_progress = merge_progress(progress, chunk_result, idx)
-
- socket =
- socket
- |> assign(:import_progress, new_progress)
- |> assign(:import_status, new_progress.status)
-
- # Schedule next chunk or mark as done
- socket = schedule_next_chunk(socket, idx, length(import_state.chunks))
-
- {:noreply, socket}
-
- {:error, reason} ->
- # Chunk processing failed - mark as error
- handle_chunk_error(socket, :processing_failed, idx, reason)
+ _ ->
+ # Missing required assigns - mark as error
+ handle_chunk_error(socket, :missing_state, idx)
end
end
+ @impl true
+ def handle_info({:chunk_error, idx, reason}, socket) do
+ handle_chunk_error(socket, :processing_failed, idx, reason)
+ end
+
+ # Starts async task to process a chunk
+ defp start_chunk_processing_task(socket, import_state, progress, idx) do
+ chunk = Enum.at(import_state.chunks, idx)
+ actor = socket.assigns[:current_user]
+ live_view_pid = self()
+
+ # Process chunk with existing error count for capping
+ opts = [
+ custom_field_lookup: import_state.custom_field_lookup,
+ existing_error_count: length(progress.errors),
+ max_errors: @max_errors,
+ actor: actor
+ ]
+
+ # Start async task to process chunk
+ Task.Supervisor.async_nolink(Mv.TaskSupervisor, fn ->
+ case MemberCSV.process_chunk(
+ chunk,
+ import_state.column_map,
+ import_state.custom_field_map,
+ opts
+ ) do
+ {:ok, chunk_result} ->
+ send(live_view_pid, {:chunk_done, idx, chunk_result})
+
+ {:error, reason} ->
+ send(live_view_pid, {:chunk_error, idx, reason})
+ end
+ end)
+
+ {:noreply, socket}
+ end
+
+ # Handles chunk processing result from async task
+ defp handle_chunk_result(socket, import_state, progress, idx, chunk_result) do
+ # Merge progress
+ new_progress = merge_progress(progress, chunk_result, idx)
+
+ socket =
+ socket
+ |> assign(:import_progress, new_progress)
+ |> assign(:import_status, new_progress.status)
+
+ # Schedule next chunk or mark as done
+ socket = schedule_next_chunk(socket, idx, length(import_state.chunks))
+
+ {:noreply, socket}
+ end
+
# Handles chunk processing errors
defp handle_chunk_error(socket, error_type, idx, reason \\ nil) do
error_message =
@@ -612,39 +663,32 @@ defmodule MvWeb.GlobalSettingsLive do
end
defp consume_and_read_csv(socket) do
- result =
- case consume_uploaded_entries(socket, :csv_file, fn %{path: path}, _entry ->
- File.read!(path)
- end) do
- [{_name, content}] when is_binary(content) ->
- {:ok, content}
-
- [] ->
- {:error, gettext("No file was uploaded")}
-
- [{_name, {:ok, content}}] when is_binary(content) ->
- # Handle case where callback returns {:ok, content}
- {:ok, content}
-
- [content] when is_binary(content) ->
- # Handle case where consume_uploaded_entries returns a list with the content directly
- {:ok, content}
-
- _other ->
- {:error, gettext("Failed to read uploaded file")}
+ consume_uploaded_entries(socket, :csv_file, fn %{path: path}, _entry ->
+ case File.read(path) do
+ {:ok, content} -> {:ok, content}
+ {:error, reason} -> {:error, Exception.message(reason)}
end
+ end)
+ |> case do
+ [{_name, {:ok, content}}] when is_binary(content) ->
+ {:ok, content}
- result
- rescue
- e in File.Error ->
- {:error, gettext("Failed to read file: %{reason}", reason: Exception.message(e))}
+ [{_name, {:error, reason}}] ->
+ {:error, gettext("Failed to read file: %{reason}", reason: reason)}
+
+ [] ->
+ {:error, gettext("No file was uploaded")}
+
+ _other ->
+ {:error, gettext("Failed to read uploaded file")}
+ end
end
defp merge_progress(progress, chunk_result, current_chunk_idx) do
- # Merge errors with cap of 50 overall
+ # Merge errors with cap of @max_errors overall
all_errors = progress.errors ++ chunk_result.errors
- new_errors = Enum.take(all_errors, 50)
- errors_truncated? = length(all_errors) > 50
+ new_errors = Enum.take(all_errors, @max_errors)
+ errors_truncated? = length(all_errors) > @max_errors
# Merge warnings (optional dedupe - simple append for now)
new_warnings = progress.warnings ++ Map.get(chunk_result, :warnings, [])
diff --git a/test/mv_web/live/global_settings_live_test.exs b/test/mv_web/live/global_settings_live_test.exs
index f403c6d..1756dc1 100644
--- a/test/mv_web/live/global_settings_live_test.exs
+++ b/test/mv_web/live/global_settings_live_test.exs
@@ -19,6 +19,26 @@ defmodule MvWeb.GlobalSettingsLiveTest do
|> render_upload(filename)
end
+ # Helper function to wait for import completion by checking for results panel
+ # Uses deterministic checks instead of Process.sleep/1
+ defp wait_for_import_completion(view, max_attempts \\ 10) do
+ Enum.reduce_while(1..max_attempts, nil, fn attempt, _acc ->
+ html = render(view)
+
+ if has_element?(view, "[data-testid='import-results-panel']") do
+ {:halt, html}
+ else
+ if attempt < max_attempts do
+ # Process any pending messages
+ :timer.sleep(50)
+ {:cont, nil}
+ else
+ {:halt, html}
+ end
+ end
+ end)
+ end
+
describe "Global Settings LiveView" do
setup %{conn: conn} do
user = create_test_user(%{email: "admin@example.com"})
@@ -333,18 +353,13 @@ defmodule MvWeb.GlobalSettingsLiveTest do
|> form("#csv-upload-form", %{})
|> render_submit()
- # Wait for chunk processing to complete
- # Process all chunks by waiting for final state
- Process.sleep(500)
+ # Wait for import completion deterministically
+ html = wait_for_import_completion(view)
- # Check final status
- html = render(view)
- # Should show done status or success message
- assert html =~ "done" or html =~ "complete" or html =~ "success" or
- html =~ "finished" or html =~ "imported" or html =~ "Inserted"
-
- # Should show success count > 0
- assert html =~ "2" or html =~ "inserted" or html =~ "success"
+ # Check final status using data-testid
+ assert has_element?(view, "[data-testid='import-results-panel']")
+ # Should show success count
+ assert html =~ "Successfully inserted"
end
test "error handling: invalid CSV shows errors with line numbers", %{