refactor
This commit is contained in:
parent
092fd99d48
commit
04b0916c1e
4 changed files with 182 additions and 95 deletions
|
|
@ -52,7 +52,8 @@ defmodule MvWeb.GlobalSettingsLive do
|
|||
on_mount {MvWeb.LiveHelpers, :ensure_user_role_loaded}
|
||||
|
||||
# CSV Import configuration constants
|
||||
@max_file_size_bytes 10_485_760 # 10 MB
|
||||
# 10 MB
|
||||
@max_file_size_bytes 10_485_760
|
||||
@max_errors 50
|
||||
|
||||
@impl true
|
||||
|
|
@ -182,6 +183,7 @@ defmodule MvWeb.GlobalSettingsLive do
|
|||
multipart={true}
|
||||
phx-change="validate_csv_upload"
|
||||
phx-submit="start_import"
|
||||
data-testid="csv-upload-form"
|
||||
>
|
||||
<div class="form-control">
|
||||
<label for="csv_file" class="label">
|
||||
|
|
@ -204,13 +206,13 @@ defmodule MvWeb.GlobalSettingsLive do
|
|||
|
||||
<.button
|
||||
type="submit"
|
||||
phx-click="start_import"
|
||||
phx-disable-with={gettext("Starting import...")}
|
||||
variant="primary"
|
||||
disabled={
|
||||
Enum.empty?(@uploads.csv_file.entries) or
|
||||
@uploads.csv_file.entries |> List.first() |> then(&(&1 && not &1.done?))
|
||||
}
|
||||
data-testid="start-import-button"
|
||||
>
|
||||
{gettext("Start Import")}
|
||||
</.button>
|
||||
|
|
@ -218,9 +220,14 @@ defmodule MvWeb.GlobalSettingsLive do
|
|||
|
||||
<%= if @import_status == :running or @import_status == :done do %>
|
||||
<%= if @import_progress do %>
|
||||
<div role="status" aria-live="polite" class="mt-4">
|
||||
<div
|
||||
role="status"
|
||||
aria-live="polite"
|
||||
class="mt-4"
|
||||
data-testid="import-progress-container"
|
||||
>
|
||||
<%= if @import_status == :running do %>
|
||||
<p class="text-sm">
|
||||
<p class="text-sm" data-testid="import-progress-text">
|
||||
{gettext("Processing chunk %{current} of %{total}...",
|
||||
current: @import_progress.current_chunk,
|
||||
total: @import_progress.total_chunks
|
||||
|
|
@ -229,7 +236,7 @@ defmodule MvWeb.GlobalSettingsLive do
|
|||
<% end %>
|
||||
|
||||
<%= if @import_status == :done do %>
|
||||
<section class="space-y-4">
|
||||
<section class="space-y-4" data-testid="import-results-panel">
|
||||
<h2 class="text-lg font-semibold">
|
||||
{gettext("Import Results")}
|
||||
</h2>
|
||||
|
|
@ -267,14 +274,16 @@ defmodule MvWeb.GlobalSettingsLive do
|
|||
class="size-4 inline mr-1"
|
||||
aria-hidden="true"
|
||||
/>
|
||||
{gettext("Error list truncated to 50 entries")}
|
||||
{gettext("Error list truncated to %{count} entries",
|
||||
count: @max_errors
|
||||
)}
|
||||
</p>
|
||||
<% end %>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<%= if length(@import_progress.errors) > 0 do %>
|
||||
<div>
|
||||
<div data-testid="import-error-list">
|
||||
<h3 class="text-sm font-semibold mb-2">
|
||||
<.icon
|
||||
name="hero-exclamation-circle"
|
||||
|
|
@ -402,8 +411,21 @@ defmodule MvWeb.GlobalSettingsLive do
|
|||
|
||||
{:noreply, socket}
|
||||
|
||||
{:error, _error} = error_result ->
|
||||
error_result
|
||||
{:error, error} ->
|
||||
error_message =
|
||||
case error do
|
||||
%{message: msg} when is_binary(msg) -> msg
|
||||
%{errors: errors} when is_list(errors) -> inspect(errors)
|
||||
reason when is_binary(reason) -> reason
|
||||
other -> inspect(other)
|
||||
end
|
||||
|
||||
{:noreply,
|
||||
put_flash(
|
||||
socket,
|
||||
:error,
|
||||
gettext("Failed to prepare CSV import: %{error}", error: error_message)
|
||||
)}
|
||||
end
|
||||
else
|
||||
{:error, reason} when is_binary(reason) ->
|
||||
|
|
@ -524,7 +546,7 @@ defmodule MvWeb.GlobalSettingsLive do
|
|||
%{import_state: import_state, import_progress: progress}
|
||||
when is_map(import_state) and is_map(progress) ->
|
||||
if idx >= 0 and idx < length(import_state.chunks) do
|
||||
process_chunk_and_schedule_next(socket, import_state, progress, idx)
|
||||
start_chunk_processing_task(socket, import_state, progress, idx)
|
||||
else
|
||||
handle_chunk_error(socket, :invalid_index, idx)
|
||||
end
|
||||
|
|
@ -535,44 +557,73 @@ defmodule MvWeb.GlobalSettingsLive do
|
|||
end
|
||||
end
|
||||
|
||||
# Processes a chunk and schedules the next one
|
||||
defp process_chunk_and_schedule_next(socket, import_state, progress, idx) do
|
||||
chunk = Enum.at(import_state.chunks, idx)
|
||||
@impl true
|
||||
def handle_info({:chunk_done, idx, result}, socket) do
|
||||
case socket.assigns do
|
||||
%{import_state: import_state, import_progress: progress}
|
||||
when is_map(import_state) and is_map(progress) ->
|
||||
handle_chunk_result(socket, import_state, progress, idx, result)
|
||||
|
||||
# Process chunk with existing error count for capping
|
||||
opts = [
|
||||
custom_field_lookup: import_state.custom_field_lookup,
|
||||
existing_error_count: length(progress.errors),
|
||||
max_errors: @max_errors,
|
||||
actor: socket.assigns[:current_user]
|
||||
]
|
||||
|
||||
case MemberCSV.process_chunk(
|
||||
chunk,
|
||||
import_state.column_map,
|
||||
import_state.custom_field_map,
|
||||
opts
|
||||
) do
|
||||
{:ok, chunk_result} ->
|
||||
# Merge progress
|
||||
new_progress = merge_progress(progress, chunk_result, idx)
|
||||
|
||||
socket =
|
||||
socket
|
||||
|> assign(:import_progress, new_progress)
|
||||
|> assign(:import_status, new_progress.status)
|
||||
|
||||
# Schedule next chunk or mark as done
|
||||
socket = schedule_next_chunk(socket, idx, length(import_state.chunks))
|
||||
|
||||
{:noreply, socket}
|
||||
|
||||
{:error, reason} ->
|
||||
# Chunk processing failed - mark as error
|
||||
handle_chunk_error(socket, :processing_failed, idx, reason)
|
||||
_ ->
|
||||
# Missing required assigns - mark as error
|
||||
handle_chunk_error(socket, :missing_state, idx)
|
||||
end
|
||||
end
|
||||
|
||||
@impl true
|
||||
def handle_info({:chunk_error, idx, reason}, socket) do
|
||||
handle_chunk_error(socket, :processing_failed, idx, reason)
|
||||
end
|
||||
|
||||
# Starts async task to process a chunk
|
||||
defp start_chunk_processing_task(socket, import_state, progress, idx) do
|
||||
chunk = Enum.at(import_state.chunks, idx)
|
||||
actor = socket.assigns[:current_user]
|
||||
live_view_pid = self()
|
||||
|
||||
# Process chunk with existing error count for capping
|
||||
opts = [
|
||||
custom_field_lookup: import_state.custom_field_lookup,
|
||||
existing_error_count: length(progress.errors),
|
||||
max_errors: @max_errors,
|
||||
actor: actor
|
||||
]
|
||||
|
||||
# Start async task to process chunk
|
||||
Task.Supervisor.async_nolink(Mv.TaskSupervisor, fn ->
|
||||
case MemberCSV.process_chunk(
|
||||
chunk,
|
||||
import_state.column_map,
|
||||
import_state.custom_field_map,
|
||||
opts
|
||||
) do
|
||||
{:ok, chunk_result} ->
|
||||
send(live_view_pid, {:chunk_done, idx, chunk_result})
|
||||
|
||||
{:error, reason} ->
|
||||
send(live_view_pid, {:chunk_error, idx, reason})
|
||||
end
|
||||
end)
|
||||
|
||||
{:noreply, socket}
|
||||
end
|
||||
|
||||
# Handles chunk processing result from async task
|
||||
defp handle_chunk_result(socket, import_state, progress, idx, chunk_result) do
|
||||
# Merge progress
|
||||
new_progress = merge_progress(progress, chunk_result, idx)
|
||||
|
||||
socket =
|
||||
socket
|
||||
|> assign(:import_progress, new_progress)
|
||||
|> assign(:import_status, new_progress.status)
|
||||
|
||||
# Schedule next chunk or mark as done
|
||||
socket = schedule_next_chunk(socket, idx, length(import_state.chunks))
|
||||
|
||||
{:noreply, socket}
|
||||
end
|
||||
|
||||
# Handles chunk processing errors
|
||||
defp handle_chunk_error(socket, error_type, idx, reason \\ nil) do
|
||||
error_message =
|
||||
|
|
@ -612,39 +663,32 @@ defmodule MvWeb.GlobalSettingsLive do
|
|||
end
|
||||
|
||||
defp consume_and_read_csv(socket) do
|
||||
result =
|
||||
case consume_uploaded_entries(socket, :csv_file, fn %{path: path}, _entry ->
|
||||
File.read!(path)
|
||||
end) do
|
||||
[{_name, content}] when is_binary(content) ->
|
||||
{:ok, content}
|
||||
|
||||
[] ->
|
||||
{:error, gettext("No file was uploaded")}
|
||||
|
||||
[{_name, {:ok, content}}] when is_binary(content) ->
|
||||
# Handle case where callback returns {:ok, content}
|
||||
{:ok, content}
|
||||
|
||||
[content] when is_binary(content) ->
|
||||
# Handle case where consume_uploaded_entries returns a list with the content directly
|
||||
{:ok, content}
|
||||
|
||||
_other ->
|
||||
{:error, gettext("Failed to read uploaded file")}
|
||||
consume_uploaded_entries(socket, :csv_file, fn %{path: path}, _entry ->
|
||||
case File.read(path) do
|
||||
{:ok, content} -> {:ok, content}
|
||||
{:error, reason} -> {:error, Exception.message(reason)}
|
||||
end
|
||||
end)
|
||||
|> case do
|
||||
[{_name, {:ok, content}}] when is_binary(content) ->
|
||||
{:ok, content}
|
||||
|
||||
result
|
||||
rescue
|
||||
e in File.Error ->
|
||||
{:error, gettext("Failed to read file: %{reason}", reason: Exception.message(e))}
|
||||
[{_name, {:error, reason}}] ->
|
||||
{:error, gettext("Failed to read file: %{reason}", reason: reason)}
|
||||
|
||||
[] ->
|
||||
{:error, gettext("No file was uploaded")}
|
||||
|
||||
_other ->
|
||||
{:error, gettext("Failed to read uploaded file")}
|
||||
end
|
||||
end
|
||||
|
||||
defp merge_progress(progress, chunk_result, current_chunk_idx) do
|
||||
# Merge errors with cap of 50 overall
|
||||
# Merge errors with cap of @max_errors overall
|
||||
all_errors = progress.errors ++ chunk_result.errors
|
||||
new_errors = Enum.take(all_errors, 50)
|
||||
errors_truncated? = length(all_errors) > 50
|
||||
new_errors = Enum.take(all_errors, @max_errors)
|
||||
errors_truncated? = length(all_errors) > @max_errors
|
||||
|
||||
# Merge warnings (optional dedupe - simple append for now)
|
||||
new_warnings = progress.warnings ++ Map.get(chunk_result, :warnings, [])
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue