diff --git a/apps/transport/lib/transport_web/controllers/dataset_controller.ex b/apps/transport/lib/transport_web/controllers/dataset_controller.ex
index 9807246a51..036ac3c4b1 100644
--- a/apps/transport/lib/transport_web/controllers/dataset_controller.ex
+++ b/apps/transport/lib/transport_web/controllers/dataset_controller.ex
@@ -307,6 +307,67 @@ defmodule TransportWeb.DatasetController do
|> Enum.reject(fn t -> is_nil(t.msg) end)
end
+ def resources_history_csv(%Plug.Conn{} = conn, %{"dataset_id" => dataset_id}) do
+ filename = "historisation-dataset-#{dataset_id}-#{Date.utc_today() |> Date.to_iso8601()}.csv"
+ # We define CSV columns explicitly because ordering matters for humans
+ columns = [
+ "resource_history_id",
+ "resource_id",
+ "permanent_url",
+ "payload",
+ "validation_validator",
+ "validation_result",
+ "metadata",
+ "inserted_at"
+ ]
+
+ content =
+ Transport.History.Fetcher.history_resources(%DB.Dataset{id: String.to_integer(dataset_id)})
+ |> Enum.map(fn row -> build_history_csv_row(columns, row) end)
+ |> CSV.encode(headers: columns)
+ |> Enum.to_list()
+ |> to_string()
+
+ conn
+ |> put_resp_content_type("text/csv")
+ |> put_resp_header("content-disposition", ~s|attachment; filename="#{filename}"|)
+ |> send_resp(200, content)
+ end
+
+ defp build_history_csv_row(
+ columns,
+ %DB.ResourceHistory{id: rh_id, resource_id: resource_id, payload: payload, inserted_at: inserted_at} = rh
+ ) do
+ {validation, metadata} = validation_and_metadata(rh)
+
+ row =
+ %{
+ "resource_history_id" => rh_id,
+ "resource_id" => resource_id,
+ "permanent_url" => Map.fetch!(payload, "permanent_url"),
+ "payload" => Jason.encode!(payload),
+ "validation_validator" => Map.get(validation, :validator),
+ "validation_result" => Map.get(validation, :result) |> Jason.encode!(),
+ "metadata" => Jason.encode!(metadata),
+ "inserted_at" => inserted_at
+ }
+
+ # Make sure CSV columns match what we're building
+ if MapSet.new(columns) == MapSet.new(Map.keys(row)) do
+ row
+ else
+ raise "Unexpected columns: #{Map.keys(row)} != #{inspect(columns)}"
+ end
+ end
+
+ defp validation_and_metadata(%DB.ResourceHistory{
+ validations: [%{metadata: %DB.ResourceMetadata{metadata: metadata}} = validation]
+ }) do
+ {validation, metadata}
+ end
+
+ defp validation_and_metadata(_), do: {%{}, nil}
+
defp add_current_type(results, type) do
case Enum.any?(results, &(&1.type == type)) do
true -> results
diff --git a/apps/transport/lib/transport_web/router.ex b/apps/transport/lib/transport_web/router.ex
index c10e5ee8ff..742a8a7ab7 100644
--- a/apps/transport/lib/transport_web/router.ex
+++ b/apps/transport/lib/transport_web/router.ex
@@ -144,6 +144,7 @@ defmodule TransportWeb.Router do
scope "/datasets" do
get("/", DatasetController, :index)
get("/:slug/", DatasetController, :details)
+ get("/:dataset_id/resources_history_csv", DatasetController, :resources_history_csv)
get("/aom/:aom", DatasetController, :by_aom)
get("/region/:region", DatasetController, :by_region)
get("/commune/:insee_commune", DatasetController, :by_commune_insee)
diff --git a/apps/transport/lib/transport_web/templates/dataset/_dataset_resources_history.html.heex b/apps/transport/lib/transport_web/templates/dataset/_dataset_resources_history.html.heex
index d0231cbc4f..7f63929ec7 100644
--- a/apps/transport/lib/transport_web/templates/dataset/_dataset_resources_history.html.heex
+++ b/apps/transport/lib/transport_web/templates/dataset/_dataset_resources_history.html.heex
@@ -56,16 +56,25 @@
<%= if Enum.count(@history_resources) == max_nb_history_resources() do %>
- <%= raw(
- dgettext(
- "page-dataset-details",
- ~s|Displaying the last %{nb} backed up resources. Contact us if you want to access previous data.|,
- nb: max_nb_history_resources()
- )
+ <%= dgettext("page-dataset-details", "Displaying the last %{nb} backed up resources.",
+ nb: max_nb_history_resources()
) %>
<% end %>
+
diff --git a/apps/transport/lib/transport_web/templates/dataset/details.html.heex b/apps/transport/lib/transport_web/templates/dataset/details.html.heex
index 1c82d33000..65150acaaa 100644
--- a/apps/transport/lib/transport_web/templates/dataset/details.html.heex
+++ b/apps/transport/lib/transport_web/templates/dataset/details.html.heex
@@ -213,7 +213,12 @@
<% end %>
- <%= render("_dataset_resources_history.html", history_resources: @history_resources, locale: locale, conn: @conn) %>
+ <%= render("_dataset_resources_history.html",
+ history_resources: @history_resources,
+ locale: locale,
+ conn: @conn,
+ dataset_id: @dataset.id
+ ) %>
<%= unless is_nil(@other_datasets) or @other_datasets == [] do %>
<%= dgettext("page-dataset-details", "Other datasets of %{name}", name: @territory) %>
diff --git a/apps/transport/priv/gettext/en/LC_MESSAGES/page-dataset-details.po b/apps/transport/priv/gettext/en/LC_MESSAGES/page-dataset-details.po
index 279ffdd78f..ada54693a4 100644
--- a/apps/transport/priv/gettext/en/LC_MESSAGES/page-dataset-details.po
+++ b/apps/transport/priv/gettext/en/LC_MESSAGES/page-dataset-details.po
@@ -495,7 +495,11 @@ msgid "The timestamp field appears to
msgstr ""
#, elixir-autogen, elixir-format
-msgid "Displaying the last %{nb} backed up resources. Contact us if you want to access previous data."
+msgid "Displaying the last %{nb} backed up resources."
+msgstr ""
+
+#, elixir-autogen, elixir-format
+msgid "Download history details"
msgstr ""
#, elixir-autogen, elixir-format
diff --git a/apps/transport/priv/gettext/fr/LC_MESSAGES/page-dataset-details.po b/apps/transport/priv/gettext/fr/LC_MESSAGES/page-dataset-details.po
index 9170a8c3fb..db5bec573a 100644
--- a/apps/transport/priv/gettext/fr/LC_MESSAGES/page-dataset-details.po
+++ b/apps/transport/priv/gettext/fr/LC_MESSAGES/page-dataset-details.po
@@ -495,8 +495,12 @@ msgid "The timestamp field appears to
msgstr "Le champ timestamp contient une valeur ancienne par rapport à la date courante : l'écart est de %{seconds} secondes. Essayez de mettre à jour le flux toutes les 30 secondes au plus."
#, elixir-autogen, elixir-format
-msgid "Displaying the last %{nb} backed up resources. Contact us if you want to access previous data."
-msgstr "Affiche les %{nb} dernières ressources historisées. Contactez-nous si vous souhaitez accéder aux données passées."
+msgid "Displaying the last %{nb} backed up resources."
+msgstr "Affiche les %{nb} dernières ressources historisées."
+
+#, elixir-autogen, elixir-format
+msgid "Download history details"
+msgstr "Télécharger les ressources historisées"
#, elixir-autogen, elixir-format
msgid "Data published by"
diff --git a/apps/transport/priv/gettext/page-dataset-details.pot b/apps/transport/priv/gettext/page-dataset-details.pot
index 860753be4d..c3d96ad6f6 100644
--- a/apps/transport/priv/gettext/page-dataset-details.pot
+++ b/apps/transport/priv/gettext/page-dataset-details.pot
@@ -495,7 +495,11 @@ msgid "The timestamp field appears to
msgstr ""
#, elixir-autogen, elixir-format
-msgid "Displaying the last %{nb} backed up resources. Contact us if you want to access previous data."
+msgid "Displaying the last %{nb} backed up resources."
+msgstr ""
+
+#, elixir-autogen, elixir-format
+msgid "Download history details"
msgstr ""
#, elixir-autogen, elixir-format
diff --git a/apps/transport/test/transport_web/controllers/dataset_controller_test.exs b/apps/transport/test/transport_web/controllers/dataset_controller_test.exs
index 98debde22d..4aadaf8299 100644
--- a/apps/transport/test/transport_web/controllers/dataset_controller_test.exs
+++ b/apps/transport/test/transport_web/controllers/dataset_controller_test.exs
@@ -812,6 +812,95 @@ defmodule TransportWeb.DatasetControllerTest do
assert title == "Autocars longue distance"
end
+ test "resources_history_csv", %{conn: conn} do
+ # Using the real implementation to test end-to-end
+ Mox.stub_with(Transport.History.Fetcher.Mock, Transport.History.Fetcher.Database)
+
+ dataset = insert(:dataset)
+ resource = insert(:resource, dataset: dataset)
+ other_resource = insert(:resource, dataset: dataset)
+ # another resource, no history for this one
+ insert(:resource, dataset: dataset, format: "gtfs-rt")
+
+ rh1 =
+ insert(:resource_history,
+ resource_id: resource.id,
+ payload: %{"foo" => "bar", "permanent_url" => "https://example.com/1"}
+ )
+
+ mv =
+ insert(:multi_validation,
+ resource_history_id: rh1.id,
+ validator: "validator_name",
+ result: %{"validation_details" => 42}
+ )
+
+ insert(:resource_metadata, multi_validation_id: mv.id, metadata: %{"metadata" => 1337})
+
+ # resource_id is nil, but dataset_id is filled in the payload
+ # no resource_metadata/multi_validation associated
+ rh2 =
+ insert(:resource_history,
+ resource_id: nil,
+ payload: %{"dataset_id" => dataset.id, "bar" => "baz", "permanent_url" => "https://example.com/2"}
+ )
+
+ # another resource for this dataset
+ # no resource_metadata/multi_validation associated
+ rh3 =
+ insert(:resource_history,
+ resource_id: other_resource.id,
+ payload: %{"dataset_id" => dataset.id, "permanent_url" => "https://example.com/3"}
+ )
+
+ response = conn |> get(dataset_path(conn, :resources_history_csv, dataset.id))
+ content = response(response, 200)
+
+ # Check CSV header
+ assert content |> String.split("\r\n") |> hd() ==
+ "resource_history_id,resource_id,permanent_url,payload,validation_validator,validation_result,metadata,inserted_at"
+
+ # Check CSV content
+ assert [content] |> CSV.decode!(headers: true) |> Enum.to_list() == [
+ %{
+ "inserted_at" => to_string(rh3.inserted_at),
+ "metadata" => "null",
+ "payload" => Jason.encode!(rh3.payload),
+ "permanent_url" => "https://example.com/3",
+ "resource_history_id" => to_string(rh3.id),
+ "resource_id" => to_string(rh3.resource_id),
+ "validation_result" => "null",
+ "validation_validator" => ""
+ },
+ %{
+ "inserted_at" => to_string(rh2.inserted_at),
+ "metadata" => "null",
+ "payload" => Jason.encode!(rh2.payload),
+ "permanent_url" => "https://example.com/2",
+ "resource_history_id" => to_string(rh2.id),
+ "resource_id" => to_string(rh2.resource_id),
+ "validation_result" => "null",
+ "validation_validator" => ""
+ },
+ %{
+ "inserted_at" => to_string(rh1.inserted_at),
+ "metadata" => ~s|{"metadata":1337}|,
+ "payload" => Jason.encode!(rh1.payload),
+ "permanent_url" => "https://example.com/1",
+ "resource_history_id" => to_string(rh1.id),
+ "resource_id" => to_string(rh1.resource_id),
+ "validation_result" => ~s|{"validation_details":42}|,
+ "validation_validator" => "validator_name"
+ }
+ ]
+
+ assert response_content_type(response, :csv) == "text/csv; charset=utf-8"
+
+ assert Plug.Conn.get_resp_header(response, "content-disposition") == [
+ ~s(attachment; filename="historisation-dataset-#{dataset.id}-#{Date.utc_today() |> Date.to_iso8601()}.csv")
+ ]
+ end
+
defp dataset_page_title(content) do
content
|> Floki.parse_document!()