From 14cf4223ae7be7921e5f7505cce06a3be53df427 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Martin=20Wo=CC=88ginger?= Date: Sun, 1 Sep 2024 19:49:08 +0200 Subject: [PATCH] extract urls in async task --- lib/radiator/outline/node_change_listener.ex | 38 +++++++++++++++++--- 1 file changed, 34 insertions(+), 4 deletions(-) diff --git a/lib/radiator/outline/node_change_listener.ex b/lib/radiator/outline/node_change_listener.ex index bf159821..676b2853 100644 --- a/lib/radiator/outline/node_change_listener.ex +++ b/lib/radiator/outline/node_change_listener.ex @@ -4,7 +4,18 @@ defmodule Radiator.Outline.NodeChangeListener do It is an eventconsumer that listens to changes in the outline and starts workers """ use GenServer + + alias Radiator.Outline.Event.{ + NodeContentChangedEvent, + NodeDeletedEvent, + NodeInsertedEvent, + NodeMovedEvent + } + alias Radiator.Outline.Dispatch + alias Radiator.UrlExtractor + + require Logger def start_link(_) do GenServer.start_link(__MODULE__, :ok, []) @@ -15,19 +26,38 @@ defmodule Radiator.Outline.NodeChangeListener do {:ok, []} end - def handle_info(%Radiator.Outline.Event.NodeContentChangedEvent{} = _event, state) do + def handle_info(%NodeContentChangedEvent{node_id: node_id, content: content}, state) do + scan_content_for_urls(node_id, content) + {:noreply, state} + end + + def handle_info(%NodeInsertedEvent{} = _event, state) do {:noreply, state} end - def handle_info(%Radiator.Outline.Event.NodeInsertedEvent{} = _event, state) do + def handle_info(%NodeMovedEvent{} = _event, state) do {:noreply, state} end - def handle_info(%Radiator.Outline.Event.NodeMovedEvent{} = _event, state) do + def handle_info(%NodeDeletedEvent{} = _event, state) do {:noreply, state} end - def handle_info(%Radiator.Outline.Event.NodeDeletedEvent{} = _event, state) do + def handle_info(_reference, state) do + Logger.warning("Unknown event type") {:noreply, state} end + + defp scan_content_for_urls(_node_id, nil), do: nil + + defp scan_content_for_urls(_node_id, content) do + Task.async(fn -> + result = UrlExtractor.extract_urls(content) + Logger.debug("Extracted #{Enum.count(result)} Urls!!") + + Enum.each(result, fn info -> + Logger.debug("URL: #{info.url}") + end) + end) + end end