2018-12-24 13:34:45 +01:00
|
|
|
|
# Portions of this file are derived from Pleroma:
|
|
|
|
|
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social>
|
|
|
|
|
# SPDX-License-Identifier: AGPL-3.0-only
|
2018-12-27 11:24:04 +01:00
|
|
|
|
# Upstream: https://git.pleroma.social/pleroma/pleroma/blob/develop/lib/pleroma/web/activity_pub/transmogrifier.ex
|
2018-12-24 13:34:45 +01:00
|
|
|
|
|
2020-01-22 02:14:42 +01:00
|
|
|
|
defmodule Mobilizon.Federation.ActivityPub.Transmogrifier do
|
2018-05-17 11:32:23 +02:00
|
|
|
|
@moduledoc """
|
|
|
|
|
A module to handle coding from internal to wire ActivityPub and back.
|
|
|
|
|
"""
|
2019-09-22 16:26:23 +02:00
|
|
|
|
|
2020-07-09 17:24:28 +02:00
|
|
|
|
alias Mobilizon.{Actors, Discussions, Events, Posts, Resources, Todos}
|
2020-02-18 08:57:00 +01:00
|
|
|
|
alias Mobilizon.Actors.{Actor, Follower, Member}
|
2020-07-09 17:24:28 +02:00
|
|
|
|
alias Mobilizon.Discussions.Comment
|
2020-02-18 08:57:00 +01:00
|
|
|
|
alias Mobilizon.Events.{Event, Participant}
|
2020-07-09 17:24:28 +02:00
|
|
|
|
alias Mobilizon.Posts.Post
|
2020-02-18 08:57:00 +01:00
|
|
|
|
alias Mobilizon.Resources.Resource
|
|
|
|
|
alias Mobilizon.Todos.{Todo, TodoList}
|
2018-05-17 11:32:23 +02:00
|
|
|
|
|
2020-01-22 02:14:42 +01:00
|
|
|
|
alias Mobilizon.Federation.ActivityPub
|
2020-07-09 17:24:28 +02:00
|
|
|
|
alias Mobilizon.Federation.ActivityPub.{Activity, Relay, Utils}
|
|
|
|
|
alias Mobilizon.Federation.ActivityPub.Types.Ownable
|
2020-01-22 22:40:40 +01:00
|
|
|
|
alias Mobilizon.Federation.ActivityStream.{Converter, Convertible}
|
2020-07-09 17:24:28 +02:00
|
|
|
|
alias Mobilizon.Tombstone
|
2020-02-18 08:57:00 +01:00
|
|
|
|
alias Mobilizon.Web.Email.{Group, Participation}
|
2019-10-25 17:43:37 +02:00
|
|
|
|
|
2018-05-17 11:32:23 +02:00
|
|
|
|
require Logger
|
|
|
|
|
|
2019-07-30 16:40:59 +02:00
|
|
|
|
def handle_incoming(%{"id" => nil}), do: :error
|
|
|
|
|
def handle_incoming(%{"id" => ""}), do: :error
|
|
|
|
|
|
2019-07-23 13:49:22 +02:00
|
|
|
|
def handle_incoming(%{"type" => "Flag"} = data) do
|
2019-09-22 18:29:13 +02:00
|
|
|
|
with params <- Converter.Flag.as_to_model(data) do
|
2019-07-23 13:49:22 +02:00
|
|
|
|
params = %{
|
2019-11-15 18:36:47 +01:00
|
|
|
|
reporter_id: params["reporter"].id,
|
|
|
|
|
reported_id: params["reported"].id,
|
|
|
|
|
comments_ids: params["comments"] |> Enum.map(& &1.id),
|
2019-07-23 13:49:22 +02:00
|
|
|
|
content: params["content"] || "",
|
|
|
|
|
additional: %{
|
|
|
|
|
"cc" => [params["reported"].url]
|
2019-11-15 18:36:47 +01:00
|
|
|
|
},
|
2019-12-03 11:29:51 +01:00
|
|
|
|
event_id: if(is_nil(params["event"]), do: nil, else: params["event"].id || nil),
|
2019-11-15 18:36:47 +01:00
|
|
|
|
local: false
|
2019-07-23 13:49:22 +02:00
|
|
|
|
}
|
|
|
|
|
|
2019-11-15 18:36:47 +01:00
|
|
|
|
ActivityPub.flag(params, false)
|
2019-07-23 13:49:22 +02:00
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2019-10-25 17:43:37 +02:00
|
|
|
|
@doc """
|
|
|
|
|
Handles a `Create` activity for `Note` (comments) objects
|
|
|
|
|
|
|
|
|
|
The following actions are performed
|
2020-06-04 10:58:27 +02:00
|
|
|
|
* Fetch the author of the activity
|
|
|
|
|
* Convert the ActivityStream data to the comment model format (it also finds and inserts tags)
|
|
|
|
|
* Get (by it's URL) or create the comment with this data
|
|
|
|
|
* Insert eventual mentions in the database
|
|
|
|
|
* Convert the comment back in ActivityStreams data
|
|
|
|
|
* Wrap this data back into a `Create` activity
|
|
|
|
|
* Return the activity and the comment object
|
2019-10-25 17:43:37 +02:00
|
|
|
|
"""
|
|
|
|
|
def handle_incoming(%{"type" => "Create", "object" => %{"type" => "Note"} = object}) do
|
2018-08-24 11:34:00 +02:00
|
|
|
|
Logger.info("Handle incoming to create notes")
|
2018-07-27 10:45:35 +02:00
|
|
|
|
|
2020-06-11 19:13:21 +02:00
|
|
|
|
with object_data when is_map(object_data) <-
|
2019-12-03 11:29:51 +01:00
|
|
|
|
object |> Converter.Comment.as_to_model_data(),
|
2019-10-25 17:43:37 +02:00
|
|
|
|
{:existing_comment, {:error, :comment_not_found}} <-
|
2020-07-09 17:24:28 +02:00
|
|
|
|
{:existing_comment, Discussions.get_comment_from_url_with_preload(object_data.url)},
|
|
|
|
|
object_data <- transform_object_data_for_discussion(object_data) do
|
|
|
|
|
# Check should be better
|
|
|
|
|
|
|
|
|
|
{:ok, %Activity{} = activity, entity} =
|
|
|
|
|
if is_data_for_comment_or_discussion?(object_data) do
|
|
|
|
|
Logger.debug("Chosing to create a regular comment")
|
|
|
|
|
ActivityPub.create(:comment, object_data, false)
|
|
|
|
|
else
|
|
|
|
|
Logger.debug("Chosing to initialize or add a comment to a conversation")
|
|
|
|
|
ActivityPub.create(:discussion, object_data, false)
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
{:ok, activity, entity}
|
2019-10-25 17:43:37 +02:00
|
|
|
|
else
|
|
|
|
|
{:existing_comment, {:ok, %Comment{} = comment}} ->
|
|
|
|
|
{:ok, nil, comment}
|
2018-05-17 11:32:23 +02:00
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2019-10-25 17:43:37 +02:00
|
|
|
|
@doc """
|
|
|
|
|
Handles a `Create` activity for `Event` objects
|
|
|
|
|
|
|
|
|
|
The following actions are performed
|
2020-06-04 10:58:27 +02:00
|
|
|
|
* Fetch the author of the activity
|
|
|
|
|
* Convert the ActivityStream data to the event model format (it also finds and inserts tags)
|
|
|
|
|
* Get (by it's URL) or create the event with this data
|
|
|
|
|
* Insert eventual mentions in the database
|
|
|
|
|
* Convert the event back in ActivityStreams data
|
|
|
|
|
* Wrap this data back into a `Create` activity
|
|
|
|
|
* Return the activity and the event object
|
2019-10-25 17:43:37 +02:00
|
|
|
|
"""
|
|
|
|
|
def handle_incoming(%{"type" => "Create", "object" => %{"type" => "Event"} = object}) do
|
2019-07-30 10:35:29 +02:00
|
|
|
|
Logger.info("Handle incoming to create event")
|
|
|
|
|
|
2020-06-11 19:13:21 +02:00
|
|
|
|
with object_data when is_map(object_data) <-
|
2019-12-03 11:29:51 +01:00
|
|
|
|
object |> Converter.Event.as_to_model_data(),
|
2019-10-25 17:43:37 +02:00
|
|
|
|
{:existing_event, nil} <- {:existing_event, Events.get_event_by_url(object_data.url)},
|
|
|
|
|
{:ok, %Activity{} = activity, %Event{} = event} <-
|
|
|
|
|
ActivityPub.create(:event, object_data, false) do
|
|
|
|
|
{:ok, activity, event}
|
|
|
|
|
else
|
|
|
|
|
{:existing_event, %Event{} = event} -> {:ok, nil, event}
|
2020-06-09 11:13:22 +02:00
|
|
|
|
{:error, _, _} -> :error
|
|
|
|
|
{:error, _} -> :error
|
2019-07-30 10:35:29 +02:00
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2020-07-09 17:24:28 +02:00
|
|
|
|
def handle_incoming(%{
|
|
|
|
|
"type" => "Create",
|
|
|
|
|
"object" => %{"type" => "Group", "id" => group_url} = _object
|
|
|
|
|
}) do
|
|
|
|
|
Logger.info("Handle incoming to create a group")
|
|
|
|
|
|
|
|
|
|
with {:ok, %Actor{} = group} <- ActivityPub.get_or_fetch_actor_by_url(group_url) do
|
|
|
|
|
{:ok, nil, group}
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def handle_incoming(%{
|
|
|
|
|
"type" => "Create",
|
|
|
|
|
"object" => %{"type" => "Member"} = object
|
|
|
|
|
}) do
|
|
|
|
|
Logger.info("Handle incoming to create a member")
|
|
|
|
|
|
|
|
|
|
with object_data when is_map(object_data) <-
|
|
|
|
|
object |> Converter.Member.as_to_model_data(),
|
|
|
|
|
{:existing_member, nil} <-
|
|
|
|
|
{:existing_member, Actors.get_member_by_url(object_data.url)},
|
|
|
|
|
{:ok, %Activity{} = activity, %Member{} = member} <-
|
|
|
|
|
ActivityPub.join_group(object_data, false) do
|
|
|
|
|
{:ok, activity, member}
|
|
|
|
|
else
|
|
|
|
|
{:existing_member, %Member{} = member} ->
|
|
|
|
|
{:ok, nil, member}
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def handle_incoming(%{
|
|
|
|
|
"type" => "Create",
|
|
|
|
|
"object" =>
|
|
|
|
|
%{"type" => "Article", "actor" => _actor, "attributedTo" => _attributed_to} = object
|
|
|
|
|
}) do
|
|
|
|
|
Logger.info("Handle incoming to create articles")
|
|
|
|
|
|
|
|
|
|
with object_data when is_map(object_data) <-
|
|
|
|
|
object |> Converter.Post.as_to_model_data(),
|
|
|
|
|
{:existing_post, nil} <-
|
|
|
|
|
{:existing_post, Posts.get_post_by_url(object_data.url)},
|
|
|
|
|
{:ok, %Activity{} = activity, %Post{} = post} <-
|
|
|
|
|
ActivityPub.create(:post, object_data, false) do
|
|
|
|
|
{:ok, activity, post}
|
|
|
|
|
else
|
|
|
|
|
{:existing_post, %Post{} = post} ->
|
|
|
|
|
{:ok, nil, post}
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
# This is a hack to handle Tombstones fetched by AP
|
|
|
|
|
def handle_incoming(%{
|
|
|
|
|
"type" => "Create",
|
|
|
|
|
"object" => %{"type" => "Tombstone", "id" => object_url} = _object
|
|
|
|
|
}) do
|
|
|
|
|
Logger.info("Handle incoming to create a tombstone")
|
|
|
|
|
|
|
|
|
|
case ActivityPub.fetch_object_from_url(object_url, force: true) do
|
|
|
|
|
# We already have the tombstone, object is probably already deleted
|
|
|
|
|
{:ok, %Tombstone{} = tombstone} ->
|
|
|
|
|
{:ok, nil, tombstone}
|
|
|
|
|
|
|
|
|
|
# Hack because deleted comments
|
|
|
|
|
{:ok, %Comment{deleted_at: deleted_at} = comment} when not is_nil(deleted_at) ->
|
|
|
|
|
{:ok, nil, comment}
|
|
|
|
|
|
|
|
|
|
{:ok, entity} ->
|
|
|
|
|
ActivityPub.delete(entity, Relay.get_actor(), false)
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2018-07-27 10:45:35 +02:00
|
|
|
|
def handle_incoming(
|
2019-08-14 17:45:11 +02:00
|
|
|
|
%{"type" => "Follow", "object" => followed, "actor" => follower, "id" => id} = _data
|
2018-07-27 10:45:35 +02:00
|
|
|
|
) do
|
2019-10-25 17:43:37 +02:00
|
|
|
|
with {:ok, %Actor{} = followed} <- ActivityPub.get_or_fetch_actor_by_url(followed, true),
|
|
|
|
|
{:ok, %Actor{} = follower} <- ActivityPub.get_or_fetch_actor_by_url(follower),
|
2019-07-30 10:35:29 +02:00
|
|
|
|
{:ok, activity, object} <- ActivityPub.follow(follower, followed, id, false) do
|
2019-07-30 16:40:59 +02:00
|
|
|
|
{:ok, activity, object}
|
|
|
|
|
else
|
|
|
|
|
e ->
|
|
|
|
|
Logger.warn("Unable to handle Follow activity #{inspect(e)}")
|
|
|
|
|
:error
|
|
|
|
|
end
|
|
|
|
|
end
|
2018-05-17 11:32:23 +02:00
|
|
|
|
|
2020-02-18 08:57:00 +01:00
|
|
|
|
def handle_incoming(%{
|
|
|
|
|
"type" => "Create",
|
|
|
|
|
"object" => %{"type" => "TodoList", "id" => object_url} = object,
|
|
|
|
|
"actor" => actor_url
|
|
|
|
|
}) do
|
|
|
|
|
Logger.info("Handle incoming to create a todo list")
|
|
|
|
|
|
|
|
|
|
with {:existing_todo_list, nil} <-
|
|
|
|
|
{:existing_todo_list, Todos.get_todo_list_by_url(object_url)},
|
|
|
|
|
{:ok, %Actor{url: actor_url}} <- ActivityPub.get_or_fetch_actor_by_url(actor_url),
|
|
|
|
|
object_data when is_map(object_data) <-
|
|
|
|
|
object |> Converter.TodoList.as_to_model_data(),
|
|
|
|
|
{:ok, %Activity{} = activity, %TodoList{} = todo_list} <-
|
|
|
|
|
ActivityPub.create(:todo_list, object_data, false, %{"actor" => actor_url}) do
|
|
|
|
|
{:ok, activity, todo_list}
|
|
|
|
|
else
|
|
|
|
|
{:error, :group_not_found} -> :error
|
|
|
|
|
{:existing_todo_list, %TodoList{} = todo_list} -> {:ok, nil, todo_list}
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def handle_incoming(%{
|
|
|
|
|
"type" => "Create",
|
|
|
|
|
"object" => %{"type" => "Todo", "id" => object_url} = object
|
|
|
|
|
}) do
|
|
|
|
|
Logger.info("Handle incoming to create a todo")
|
|
|
|
|
|
|
|
|
|
with {:existing_todo, nil} <-
|
|
|
|
|
{:existing_todo, Todos.get_todo_by_url(object_url)},
|
|
|
|
|
object_data <-
|
|
|
|
|
object |> Converter.Todo.as_to_model_data(),
|
|
|
|
|
{:ok, %Activity{} = activity, %Todo{} = todo} <-
|
|
|
|
|
ActivityPub.create(:todo, object_data, false) do
|
|
|
|
|
{:ok, activity, todo}
|
|
|
|
|
else
|
|
|
|
|
{:existing_todo, %Todo{} = todo} -> {:ok, nil, todo}
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def handle_incoming(
|
|
|
|
|
%{
|
|
|
|
|
"type" => activity_type,
|
|
|
|
|
"object" => %{"type" => object_type, "id" => object_url} = object,
|
|
|
|
|
"to" => to
|
|
|
|
|
} = data
|
|
|
|
|
)
|
|
|
|
|
when activity_type in ["Create", "Add"] and
|
|
|
|
|
object_type in ["Document", "ResourceCollection"] do
|
|
|
|
|
Logger.info("Handle incoming to create a resource")
|
|
|
|
|
Logger.debug(inspect(data))
|
|
|
|
|
|
|
|
|
|
with {:existing_resource, nil} <-
|
|
|
|
|
{:existing_resource, Resources.get_resource_by_url(object_url)},
|
|
|
|
|
object_data when is_map(object_data) <-
|
|
|
|
|
object |> Converter.Resource.as_to_model_data(),
|
|
|
|
|
{:member, true} <-
|
|
|
|
|
{:member, Actors.is_member?(object_data.creator_id, object_data.actor_id)},
|
|
|
|
|
{:ok, %Activity{} = activity, %Resource{} = resource} <-
|
2020-07-30 17:57:32 +02:00
|
|
|
|
ActivityPub.create(:resource, object_data, false) do
|
2020-02-18 08:57:00 +01:00
|
|
|
|
{:ok, activity, resource}
|
|
|
|
|
else
|
|
|
|
|
{:existing_resource, %Resource{} = resource} ->
|
|
|
|
|
{:ok, nil, resource}
|
|
|
|
|
|
|
|
|
|
{:member, false} ->
|
|
|
|
|
# At some point this should refresh the list of group members
|
|
|
|
|
# if the group is not local before simply returning an error
|
|
|
|
|
:error
|
|
|
|
|
|
|
|
|
|
{:error, e} ->
|
2020-07-09 17:24:28 +02:00
|
|
|
|
Logger.debug(inspect(e))
|
2020-02-18 08:57:00 +01:00
|
|
|
|
:error
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2019-07-30 16:40:59 +02:00
|
|
|
|
def handle_incoming(
|
|
|
|
|
%{
|
|
|
|
|
"type" => "Accept",
|
2019-08-14 17:45:11 +02:00
|
|
|
|
"object" => accepted_object,
|
2019-07-30 16:40:59 +02:00
|
|
|
|
"actor" => _actor,
|
2019-08-14 17:45:11 +02:00
|
|
|
|
"id" => id
|
2019-07-30 16:40:59 +02:00
|
|
|
|
} = data
|
|
|
|
|
) do
|
2020-01-22 02:14:42 +01:00
|
|
|
|
with actor_url <- Utils.get_actor(data),
|
2019-10-25 17:43:37 +02:00
|
|
|
|
{:ok, %Actor{} = actor} <- ActivityPub.get_or_fetch_actor_by_url(actor_url),
|
|
|
|
|
{:object_not_found, {:ok, %Activity{} = activity, object}} <-
|
2019-08-14 17:45:11 +02:00
|
|
|
|
{:object_not_found,
|
|
|
|
|
do_handle_incoming_accept_following(accepted_object, actor) ||
|
|
|
|
|
do_handle_incoming_accept_join(accepted_object, actor)} do
|
|
|
|
|
{:ok, activity, object}
|
2019-07-30 16:40:59 +02:00
|
|
|
|
else
|
2019-08-14 17:45:11 +02:00
|
|
|
|
{:object_not_found, nil} ->
|
|
|
|
|
Logger.warn(
|
|
|
|
|
"Unable to process Accept activity #{inspect(id)}. Object #{inspect(accepted_object)} wasn't found."
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
:error
|
2019-07-30 16:40:59 +02:00
|
|
|
|
|
|
|
|
|
e ->
|
2019-08-14 17:45:11 +02:00
|
|
|
|
Logger.warn(
|
|
|
|
|
"Unable to process Accept activity #{inspect(id)} for object #{inspect(accepted_object)} only returned #{
|
|
|
|
|
inspect(e)
|
|
|
|
|
}"
|
|
|
|
|
)
|
|
|
|
|
|
2019-07-30 16:40:59 +02:00
|
|
|
|
:error
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def handle_incoming(
|
2019-08-14 17:45:11 +02:00
|
|
|
|
%{"type" => "Reject", "object" => rejected_object, "actor" => _actor, "id" => id} = data
|
2019-07-30 16:40:59 +02:00
|
|
|
|
) do
|
2020-01-22 02:14:42 +01:00
|
|
|
|
with actor_url <- Utils.get_actor(data),
|
2019-10-25 17:43:37 +02:00
|
|
|
|
{:ok, %Actor{} = actor} <- ActivityPub.get_or_fetch_actor_by_url(actor_url),
|
2019-08-14 17:45:11 +02:00
|
|
|
|
{:object_not_found, {:ok, activity, object}} <-
|
|
|
|
|
{:object_not_found,
|
|
|
|
|
do_handle_incoming_reject_following(rejected_object, actor) ||
|
|
|
|
|
do_handle_incoming_reject_join(rejected_object, actor)} do
|
2019-07-30 10:35:29 +02:00
|
|
|
|
{:ok, activity, object}
|
2018-05-17 11:32:23 +02:00
|
|
|
|
else
|
2019-08-14 17:45:11 +02:00
|
|
|
|
{:object_not_found, nil} ->
|
|
|
|
|
Logger.warn(
|
|
|
|
|
"Unable to process Reject activity #{inspect(id)}. Object #{inspect(rejected_object)} wasn't found."
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
:error
|
|
|
|
|
|
2018-11-07 18:31:44 +01:00
|
|
|
|
e ->
|
2019-08-14 17:45:11 +02:00
|
|
|
|
Logger.warn(
|
|
|
|
|
"Unable to process Reject activity #{inspect(id)} for object #{inspect(rejected_object)} only returned #{
|
|
|
|
|
inspect(e)
|
|
|
|
|
}"
|
|
|
|
|
)
|
|
|
|
|
|
2018-11-07 18:31:44 +01:00
|
|
|
|
:error
|
2018-05-17 11:32:23 +02:00
|
|
|
|
end
|
|
|
|
|
end
|
2018-07-27 10:45:35 +02:00
|
|
|
|
|
2019-07-30 16:40:59 +02:00
|
|
|
|
def handle_incoming(
|
2019-12-03 11:29:51 +01:00
|
|
|
|
%{"type" => "Announce", "object" => object, "actor" => _actor, "id" => _id} = data
|
2019-07-30 16:40:59 +02:00
|
|
|
|
) do
|
2020-07-09 17:24:28 +02:00
|
|
|
|
with actor_url <- Utils.get_actor(data),
|
|
|
|
|
{:ok, %Actor{id: actor_id, suspended: false} = actor} <-
|
|
|
|
|
ActivityPub.get_or_fetch_actor_by_url(actor_url),
|
2019-10-25 17:43:37 +02:00
|
|
|
|
:ok <- Logger.debug("Fetching contained object"),
|
2020-07-09 17:24:28 +02:00
|
|
|
|
{:ok, entity} <-
|
|
|
|
|
object |> Utils.get_url() |> fetch_object_optionnally_authenticated(actor),
|
|
|
|
|
:ok <- eventually_create_share(object, entity, actor_id) do
|
|
|
|
|
{:ok, nil, entity}
|
2019-07-30 16:40:59 +02:00
|
|
|
|
else
|
|
|
|
|
e ->
|
|
|
|
|
Logger.debug(inspect(e))
|
|
|
|
|
:error
|
|
|
|
|
end
|
|
|
|
|
end
|
2018-12-14 17:41:55 +01:00
|
|
|
|
|
2019-10-25 17:43:37 +02:00
|
|
|
|
def handle_incoming(%{
|
|
|
|
|
"type" => "Update",
|
|
|
|
|
"object" => %{"type" => object_type} = object,
|
|
|
|
|
"actor" => _actor_id
|
|
|
|
|
})
|
2019-09-04 18:24:31 +02:00
|
|
|
|
when object_type in ["Person", "Group", "Application", "Service", "Organization"] do
|
2020-06-15 11:13:20 +02:00
|
|
|
|
with {:ok, %Actor{suspended: false} = old_actor} <-
|
|
|
|
|
ActivityPub.get_or_fetch_actor_by_url(object["id"]),
|
2020-02-18 08:57:00 +01:00
|
|
|
|
object_data <-
|
2019-12-03 11:29:51 +01:00
|
|
|
|
object |> Converter.Actor.as_to_model_data(),
|
2019-10-25 17:43:37 +02:00
|
|
|
|
{:ok, %Activity{} = activity, %Actor{} = new_actor} <-
|
2020-07-09 17:24:28 +02:00
|
|
|
|
ActivityPub.update(old_actor, object_data, false) do
|
2019-10-25 17:43:37 +02:00
|
|
|
|
{:ok, activity, new_actor}
|
|
|
|
|
else
|
2018-12-14 17:41:55 +01:00
|
|
|
|
e ->
|
2019-07-30 16:40:59 +02:00
|
|
|
|
Logger.debug(inspect(e))
|
2018-12-14 17:41:55 +01:00
|
|
|
|
:error
|
2019-09-04 18:24:31 +02:00
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def handle_incoming(
|
2019-10-25 17:43:37 +02:00
|
|
|
|
%{"type" => "Update", "object" => %{"type" => "Event"} = object, "actor" => _actor} =
|
2019-12-03 11:29:51 +01:00
|
|
|
|
update_data
|
2019-09-04 18:24:31 +02:00
|
|
|
|
) do
|
2020-01-23 21:59:50 +01:00
|
|
|
|
with actor <- Utils.get_actor(update_data),
|
2020-06-15 11:13:20 +02:00
|
|
|
|
{:ok, %Actor{url: actor_url, suspended: false}} <-
|
|
|
|
|
ActivityPub.get_or_fetch_actor_by_url(actor),
|
2019-12-03 11:29:51 +01:00
|
|
|
|
{:ok, %Event{} = old_event} <-
|
|
|
|
|
object |> Utils.get_url() |> ActivityPub.fetch_object_from_url(),
|
2020-02-18 08:57:00 +01:00
|
|
|
|
object_data <- Converter.Event.as_to_model_data(object),
|
2020-01-22 02:14:42 +01:00
|
|
|
|
{:origin_check, true} <- {:origin_check, Utils.origin_check?(actor_url, update_data)},
|
2019-10-25 17:43:37 +02:00
|
|
|
|
{:ok, %Activity{} = activity, %Event{} = new_event} <-
|
2020-07-09 17:24:28 +02:00
|
|
|
|
ActivityPub.update(old_event, object_data, false) do
|
2019-10-25 17:43:37 +02:00
|
|
|
|
{:ok, activity, new_event}
|
2019-09-04 18:24:31 +02:00
|
|
|
|
else
|
2019-11-18 18:40:03 +01:00
|
|
|
|
_e ->
|
2019-09-04 18:24:31 +02:00
|
|
|
|
:error
|
2018-12-14 17:41:55 +01:00
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2020-07-09 17:24:28 +02:00
|
|
|
|
def handle_incoming(
|
|
|
|
|
%{"type" => "Update", "object" => %{"type" => "Note"} = object, "actor" => _actor} =
|
|
|
|
|
update_data
|
|
|
|
|
) do
|
|
|
|
|
with actor <- Utils.get_actor(update_data),
|
|
|
|
|
{:ok, %Actor{url: actor_url, suspended: false}} <-
|
|
|
|
|
ActivityPub.get_or_fetch_actor_by_url(actor),
|
|
|
|
|
{:origin_check, true} <- {:origin_check, Utils.origin_check?(actor_url, update_data)},
|
|
|
|
|
object_data <- Converter.Comment.as_to_model_data(object),
|
|
|
|
|
{:ok, old_entity} <- object |> Utils.get_url() |> ActivityPub.fetch_object_from_url(),
|
|
|
|
|
object_data <- transform_object_data_for_discussion(object_data),
|
|
|
|
|
{:ok, %Activity{} = activity, new_entity} <-
|
|
|
|
|
ActivityPub.update(old_entity, object_data, false) do
|
|
|
|
|
{:ok, activity, new_entity}
|
|
|
|
|
else
|
|
|
|
|
_e ->
|
|
|
|
|
:error
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def handle_incoming(%{
|
|
|
|
|
"type" => "Update",
|
|
|
|
|
"object" => %{"type" => "Tombstone"} = object,
|
|
|
|
|
"actor" => _actor
|
|
|
|
|
}) do
|
|
|
|
|
Logger.info("Handle incoming to update a tombstone")
|
|
|
|
|
|
|
|
|
|
with object_url <- Utils.get_url(object),
|
|
|
|
|
{:ok, entity} <- ActivityPub.fetch_object_from_url(object_url) do
|
|
|
|
|
ActivityPub.delete(entity, Relay.get_actor(), false)
|
|
|
|
|
else
|
|
|
|
|
{:ok, %Tombstone{} = tombstone} ->
|
|
|
|
|
{:ok, nil, tombstone}
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2019-07-30 16:40:59 +02:00
|
|
|
|
def handle_incoming(
|
|
|
|
|
%{
|
|
|
|
|
"type" => "Undo",
|
|
|
|
|
"object" => %{
|
|
|
|
|
"type" => "Announce",
|
|
|
|
|
"object" => object_id,
|
|
|
|
|
"id" => cancelled_activity_id
|
|
|
|
|
},
|
2019-08-14 17:45:11 +02:00
|
|
|
|
"actor" => _actor,
|
2019-07-30 16:40:59 +02:00
|
|
|
|
"id" => id
|
|
|
|
|
} = data
|
|
|
|
|
) do
|
2020-01-22 02:14:42 +01:00
|
|
|
|
with actor <- Utils.get_actor(data),
|
2019-10-25 17:43:37 +02:00
|
|
|
|
{:ok, %Actor{} = actor} <- ActivityPub.get_or_fetch_actor_by_url(actor),
|
2019-07-30 16:40:59 +02:00
|
|
|
|
{:ok, object} <- fetch_obj_helper_as_activity_streams(object_id),
|
|
|
|
|
{:ok, activity, object} <-
|
|
|
|
|
ActivityPub.unannounce(actor, object, id, cancelled_activity_id, false) do
|
|
|
|
|
{:ok, activity, object}
|
|
|
|
|
else
|
|
|
|
|
_e -> :error
|
|
|
|
|
end
|
|
|
|
|
end
|
2018-07-27 10:45:35 +02:00
|
|
|
|
|
2018-12-14 17:41:55 +01:00
|
|
|
|
def handle_incoming(
|
|
|
|
|
%{
|
|
|
|
|
"type" => "Undo",
|
|
|
|
|
"object" => %{"type" => "Follow", "object" => followed},
|
|
|
|
|
"actor" => follower,
|
|
|
|
|
"id" => id
|
|
|
|
|
} = _data
|
|
|
|
|
) do
|
2020-06-15 11:13:20 +02:00
|
|
|
|
with {:ok, %Actor{domain: nil} = followed} <- ActivityPub.get_or_fetch_actor_by_url(followed),
|
|
|
|
|
{:ok, %Actor{} = follower} <- ActivityPub.get_or_fetch_actor_by_url(follower),
|
2019-07-30 16:40:59 +02:00
|
|
|
|
{:ok, activity, object} <- ActivityPub.unfollow(follower, followed, id, false) do
|
2019-07-30 10:35:29 +02:00
|
|
|
|
{:ok, activity, object}
|
2018-12-14 17:41:55 +01:00
|
|
|
|
else
|
|
|
|
|
e ->
|
2019-07-30 16:40:59 +02:00
|
|
|
|
Logger.debug(inspect(e))
|
2018-12-14 17:41:55 +01:00
|
|
|
|
:error
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2020-07-09 17:24:28 +02:00
|
|
|
|
# We assume everyone on the same instance as the object
|
|
|
|
|
# or who is member of a group has the right to delete the object
|
2018-12-14 17:41:55 +01:00
|
|
|
|
def handle_incoming(
|
|
|
|
|
%{"type" => "Delete", "object" => object, "actor" => _actor, "id" => _id} = data
|
|
|
|
|
) do
|
2020-07-09 17:24:28 +02:00
|
|
|
|
with actor_url <- Utils.get_actor(data),
|
|
|
|
|
{:ok, %Actor{} = actor} <- ActivityPub.get_or_fetch_actor_by_url(actor_url),
|
2019-12-03 11:29:51 +01:00
|
|
|
|
object_id <- Utils.get_url(object),
|
|
|
|
|
{:ok, object} <- ActivityPub.fetch_object_from_url(object_id),
|
2020-07-09 17:24:28 +02:00
|
|
|
|
{:origin_check, true} <-
|
|
|
|
|
{:origin_check,
|
|
|
|
|
Utils.origin_check_from_id?(actor_url, object_id) ||
|
|
|
|
|
Utils.activity_actor_is_group_member?(actor, object)},
|
|
|
|
|
{:ok, activity, object} <- ActivityPub.delete(object, actor, false) do
|
2019-07-30 10:35:29 +02:00
|
|
|
|
{:ok, activity, object}
|
2018-12-14 17:41:55 +01:00
|
|
|
|
else
|
2019-12-03 11:29:51 +01:00
|
|
|
|
{:origin_check, false} ->
|
|
|
|
|
Logger.warn("Object origin check failed")
|
|
|
|
|
:error
|
|
|
|
|
|
2018-12-14 17:41:55 +01:00
|
|
|
|
e ->
|
2019-07-30 16:40:59 +02:00
|
|
|
|
Logger.debug(inspect(e))
|
2018-12-14 17:41:55 +01:00
|
|
|
|
:error
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2019-08-14 17:45:11 +02:00
|
|
|
|
def handle_incoming(
|
2020-03-05 19:32:34 +01:00
|
|
|
|
%{
|
|
|
|
|
"type" => "Join",
|
|
|
|
|
"object" => object,
|
|
|
|
|
"actor" => _actor,
|
|
|
|
|
"id" => id,
|
|
|
|
|
"participationMessage" => note
|
|
|
|
|
} = data
|
2019-08-14 17:45:11 +02:00
|
|
|
|
) do
|
2020-01-22 02:14:42 +01:00
|
|
|
|
with actor <- Utils.get_actor(data),
|
2020-06-15 11:13:20 +02:00
|
|
|
|
{:ok, %Actor{url: _actor_url, suspended: false} = actor} <-
|
|
|
|
|
ActivityPub.get_or_fetch_actor_by_url(actor),
|
2019-12-03 11:29:51 +01:00
|
|
|
|
object <- Utils.get_url(object),
|
|
|
|
|
{:ok, object} <- ActivityPub.fetch_object_from_url(object),
|
2020-03-05 19:32:34 +01:00
|
|
|
|
{:ok, activity, object} <-
|
|
|
|
|
ActivityPub.join(object, actor, false, %{url: id, metadata: %{message: note}}) do
|
2019-08-14 17:45:11 +02:00
|
|
|
|
{:ok, activity, object}
|
|
|
|
|
else
|
|
|
|
|
e ->
|
|
|
|
|
Logger.debug(inspect(e))
|
|
|
|
|
:error
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def handle_incoming(
|
|
|
|
|
%{"type" => "Leave", "object" => object, "actor" => actor, "id" => _id} = data
|
|
|
|
|
) do
|
2020-01-22 02:14:42 +01:00
|
|
|
|
with actor <- Utils.get_actor(data),
|
2020-06-15 11:13:20 +02:00
|
|
|
|
{:ok, %Actor{} = actor} <- ActivityPub.get_or_fetch_actor_by_url(actor),
|
2019-12-03 11:29:51 +01:00
|
|
|
|
object <- Utils.get_url(object),
|
|
|
|
|
{:ok, object} <- ActivityPub.fetch_object_from_url(object),
|
2019-08-14 17:45:11 +02:00
|
|
|
|
{:ok, activity, object} <- ActivityPub.leave(object, actor, false) do
|
|
|
|
|
{:ok, activity, object}
|
|
|
|
|
else
|
|
|
|
|
{:only_organizer, true} ->
|
|
|
|
|
Logger.warn(
|
|
|
|
|
"Actor #{inspect(actor)} tried to leave event #{inspect(object)} but it was the only organizer so we didn't detach it"
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
:error
|
|
|
|
|
|
2019-11-18 18:40:03 +01:00
|
|
|
|
_e ->
|
2019-08-14 17:45:11 +02:00
|
|
|
|
:error
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2020-02-18 08:57:00 +01:00
|
|
|
|
def handle_incoming(
|
|
|
|
|
%{
|
|
|
|
|
"type" => "Invite",
|
|
|
|
|
"object" => object,
|
|
|
|
|
"actor" => _actor,
|
|
|
|
|
"id" => id,
|
|
|
|
|
"target" => target
|
|
|
|
|
} = data
|
|
|
|
|
) do
|
2020-07-09 17:24:28 +02:00
|
|
|
|
Logger.info("Handle incoming to invite someone")
|
|
|
|
|
|
2020-06-15 11:13:20 +02:00
|
|
|
|
with {:ok, %Actor{} = actor} <-
|
|
|
|
|
data |> Utils.get_actor() |> ActivityPub.get_or_fetch_actor_by_url(),
|
2020-02-18 08:57:00 +01:00
|
|
|
|
{:ok, object} <- object |> Utils.get_url() |> ActivityPub.fetch_object_from_url(),
|
|
|
|
|
{:ok, %Actor{} = target} <-
|
|
|
|
|
target |> Utils.get_url() |> ActivityPub.get_or_fetch_actor_by_url(),
|
|
|
|
|
{:ok, activity, %Member{} = member} <-
|
|
|
|
|
ActivityPub.invite(object, actor, target, false, %{url: id}),
|
|
|
|
|
:ok <- Group.send_invite_to_user(member) do
|
|
|
|
|
{:ok, activity, member}
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2018-07-27 10:45:35 +02:00
|
|
|
|
#
|
|
|
|
|
# # TODO
|
|
|
|
|
# # Accept
|
|
|
|
|
# # Undo
|
|
|
|
|
#
|
2018-12-14 17:41:55 +01:00
|
|
|
|
# def handle_incoming(
|
|
|
|
|
# %{
|
|
|
|
|
# "type" => "Undo",
|
|
|
|
|
# "object" => %{"type" => "Like", "object" => object_id},
|
|
|
|
|
# "actor" => _actor,
|
|
|
|
|
# "id" => id
|
|
|
|
|
# } = data
|
|
|
|
|
# ) do
|
2020-01-22 02:14:42 +01:00
|
|
|
|
# with actor <- Utils.get_actor(data),
|
2019-10-25 17:43:37 +02:00
|
|
|
|
# %Actor{} = actor <- ActivityPub.get_or_fetch_actor_by_url(actor),
|
2019-07-30 16:40:59 +02:00
|
|
|
|
# {:ok, object} <- fetch_obj_helper(object_id) || fetch_obj_helper(object_id),
|
2018-12-14 17:41:55 +01:00
|
|
|
|
# {:ok, activity, _, _} <- ActivityPub.unlike(actor, object, id, false) do
|
|
|
|
|
# {:ok, activity}
|
|
|
|
|
# else
|
|
|
|
|
# _e -> :error
|
|
|
|
|
# end
|
|
|
|
|
# end
|
|
|
|
|
|
2020-02-18 08:57:00 +01:00
|
|
|
|
def handle_incoming(object) do
|
2020-07-09 17:24:28 +02:00
|
|
|
|
Logger.info("Handing something with type #{object["type"]} not supported")
|
2020-02-18 08:57:00 +01:00
|
|
|
|
Logger.debug(inspect(object))
|
2018-12-14 17:41:55 +01:00
|
|
|
|
{:error, :not_supported}
|
|
|
|
|
end
|
2018-05-19 20:29:11 +02:00
|
|
|
|
|
2019-08-14 17:45:11 +02:00
|
|
|
|
@doc """
|
|
|
|
|
Handle incoming `Accept` activities wrapping a `Follow` activity
|
|
|
|
|
"""
|
|
|
|
|
def do_handle_incoming_accept_following(follow_object, %Actor{} = actor) do
|
2019-10-25 17:43:37 +02:00
|
|
|
|
with {:follow, {:ok, %Follower{approved: false, target_actor: followed} = follow}} <-
|
2019-08-14 17:45:11 +02:00
|
|
|
|
{:follow, get_follow(follow_object)},
|
|
|
|
|
{:same_actor, true} <- {:same_actor, actor.id == followed.id},
|
2019-10-25 17:43:37 +02:00
|
|
|
|
{:ok, %Activity{} = activity, %Follower{approved: true} = follow} <-
|
2019-08-14 17:45:11 +02:00
|
|
|
|
ActivityPub.accept(
|
2019-10-25 17:43:37 +02:00
|
|
|
|
:follow,
|
|
|
|
|
follow,
|
|
|
|
|
false
|
|
|
|
|
) do
|
2019-08-14 17:45:11 +02:00
|
|
|
|
{:ok, activity, follow}
|
|
|
|
|
else
|
|
|
|
|
{:follow, _} ->
|
|
|
|
|
Logger.debug(
|
|
|
|
|
"Tried to handle an Accept activity but it's not containing a Follow activity"
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
nil
|
|
|
|
|
|
|
|
|
|
{:same_actor} ->
|
|
|
|
|
{:error, "Actor who accepted the follow wasn't the target. Quite odd."}
|
|
|
|
|
|
|
|
|
|
{:ok, %Follower{approved: true} = _follow} ->
|
|
|
|
|
{:error, "Follow already accepted"}
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
@doc """
|
|
|
|
|
Handle incoming `Reject` activities wrapping a `Follow` activity
|
|
|
|
|
"""
|
|
|
|
|
def do_handle_incoming_reject_following(follow_object, %Actor{} = actor) do
|
2020-01-30 20:27:25 +01:00
|
|
|
|
with {:follow, {:ok, %Follower{target_actor: followed} = follow}} <-
|
2019-08-14 17:45:11 +02:00
|
|
|
|
{:follow, get_follow(follow_object)},
|
|
|
|
|
{:same_actor, true} <- {:same_actor, actor.id == followed.id},
|
|
|
|
|
{:ok, activity, _} <-
|
2019-12-03 11:29:51 +01:00
|
|
|
|
ActivityPub.reject(:follow, follow) do
|
2019-08-14 17:45:11 +02:00
|
|
|
|
{:ok, activity, follow}
|
|
|
|
|
else
|
2020-01-30 20:27:25 +01:00
|
|
|
|
{:follow, _err} ->
|
2019-08-14 17:45:11 +02:00
|
|
|
|
Logger.debug(
|
|
|
|
|
"Tried to handle a Reject activity but it's not containing a Follow activity"
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
nil
|
|
|
|
|
|
|
|
|
|
{:same_actor} ->
|
|
|
|
|
{:error, "Actor who rejected the follow wasn't the target. Quite odd."}
|
|
|
|
|
|
|
|
|
|
{:ok, %Follower{approved: true} = _follow} ->
|
|
|
|
|
{:error, "Follow already accepted"}
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2019-09-30 13:48:47 +02:00
|
|
|
|
# Handle incoming `Accept` activities wrapping a `Join` activity on an event
|
|
|
|
|
defp do_handle_incoming_accept_join(join_object, %Actor{} = actor_accepting) do
|
2020-02-18 08:57:00 +01:00
|
|
|
|
case get_participant(join_object) do
|
|
|
|
|
{:ok, participant} ->
|
|
|
|
|
do_handle_incoming_accept_join_event(participant, actor_accepting)
|
|
|
|
|
|
|
|
|
|
{:error, _err} ->
|
|
|
|
|
case get_member(join_object) do
|
|
|
|
|
{:ok, member} ->
|
|
|
|
|
do_handle_incoming_accept_join_group(member, actor_accepting)
|
|
|
|
|
|
|
|
|
|
{:error, _err} ->
|
|
|
|
|
nil
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
defp do_handle_incoming_accept_join_event(%Participant{role: :participant}, _actor) do
|
|
|
|
|
Logger.debug(
|
|
|
|
|
"Tried to handle an Accept activity on a Join activity with a event object but the participant is already validated"
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
nil
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
defp do_handle_incoming_accept_join_event(
|
|
|
|
|
%Participant{role: role, event: event} = participant,
|
|
|
|
|
%Actor{} = actor_accepting
|
|
|
|
|
)
|
|
|
|
|
when role in [:not_approved, :rejected] do
|
|
|
|
|
# TODO: The actor that accepts the Join activity may another one that the event organizer ?
|
|
|
|
|
# Or maybe for groups it's the group that sends the Accept activity
|
|
|
|
|
with {:same_actor, true} <- {:same_actor, actor_accepting.id == event.organizer_actor_id},
|
2019-10-25 17:43:37 +02:00
|
|
|
|
{:ok, %Activity{} = activity, %Participant{role: :participant} = participant} <-
|
2019-08-14 17:45:11 +02:00
|
|
|
|
ActivityPub.accept(
|
2019-10-25 17:43:37 +02:00
|
|
|
|
:join,
|
|
|
|
|
participant,
|
|
|
|
|
false
|
2019-08-14 17:45:11 +02:00
|
|
|
|
),
|
2019-09-30 13:48:47 +02:00
|
|
|
|
:ok <-
|
|
|
|
|
Participation.send_emails_to_local_user(participant) do
|
2019-08-14 17:45:11 +02:00
|
|
|
|
{:ok, activity, participant}
|
|
|
|
|
else
|
|
|
|
|
{:same_actor} ->
|
|
|
|
|
{:error, "Actor who accepted the join wasn't the event organizer. Quite odd."}
|
|
|
|
|
|
|
|
|
|
{:ok, %Participant{role: :participant} = _follow} ->
|
|
|
|
|
{:error, "Participant"}
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2020-02-18 08:57:00 +01:00
|
|
|
|
defp do_handle_incoming_accept_join_group(%Member{role: :member}, _actor) do
|
|
|
|
|
Logger.debug(
|
|
|
|
|
"Tried to handle an Accept activity on a Join activity with a group object but the member is already validated"
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
nil
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
defp do_handle_incoming_accept_join_group(
|
|
|
|
|
%Member{role: role, parent: _group} = member,
|
|
|
|
|
%Actor{} = _actor_accepting
|
|
|
|
|
)
|
|
|
|
|
when role in [:not_approved, :rejected, :invited] do
|
|
|
|
|
# TODO: The actor that accepts the Join activity may another one that the event organizer ?
|
|
|
|
|
# Or maybe for groups it's the group that sends the Accept activity
|
|
|
|
|
with {:ok, %Activity{} = activity, %Member{role: :member} = member} <-
|
|
|
|
|
ActivityPub.accept(
|
|
|
|
|
:invite,
|
|
|
|
|
member,
|
|
|
|
|
false
|
|
|
|
|
) do
|
|
|
|
|
{:ok, activity, member}
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2019-09-30 13:48:47 +02:00
|
|
|
|
# Handle incoming `Reject` activities wrapping a `Join` activity on an event
|
|
|
|
|
defp do_handle_incoming_reject_join(join_object, %Actor{} = actor_accepting) do
|
2019-12-03 11:29:51 +01:00
|
|
|
|
with {:join_event, {:ok, %Participant{event: event, role: role} = participant}}
|
|
|
|
|
when role != :rejected <-
|
2019-08-14 17:45:11 +02:00
|
|
|
|
{:join_event, get_participant(join_object)},
|
|
|
|
|
# TODO: The actor that accepts the Join activity may another one that the event organizer ?
|
|
|
|
|
# Or maybe for groups it's the group that sends the Accept activity
|
|
|
|
|
{:same_actor, true} <- {:same_actor, actor_accepting.id == event.organizer_actor_id},
|
2019-12-03 11:29:51 +01:00
|
|
|
|
{:ok, activity, participant} <-
|
|
|
|
|
ActivityPub.reject(:join, participant, false),
|
2019-09-30 13:48:47 +02:00
|
|
|
|
:ok <- Participation.send_emails_to_local_user(participant) do
|
2019-08-14 17:45:11 +02:00
|
|
|
|
{:ok, activity, participant}
|
|
|
|
|
else
|
2019-12-03 11:29:51 +01:00
|
|
|
|
{:join_event, {:ok, %Participant{role: :rejected}}} ->
|
|
|
|
|
Logger.warn(
|
|
|
|
|
"Tried to handle an Reject activity on a Join activity with a event object but the participant is already rejected"
|
2019-08-14 17:45:11 +02:00
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
nil
|
|
|
|
|
|
|
|
|
|
{:join_event, _err} ->
|
|
|
|
|
Logger.debug(
|
|
|
|
|
"Tried to handle an Reject activity but it's not containing a Join activity on a event"
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
nil
|
|
|
|
|
|
|
|
|
|
{:same_actor} ->
|
|
|
|
|
{:error, "Actor who rejected the join wasn't the event organizer. Quite odd."}
|
|
|
|
|
|
|
|
|
|
{:ok, %Participant{role: :participant} = _follow} ->
|
|
|
|
|
{:error, "Participant"}
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2020-07-09 17:24:28 +02:00
|
|
|
|
# If the object has been announced by a group let's use one of our members to fetch it
|
|
|
|
|
@spec fetch_object_optionnally_authenticated(String.t(), Actor.t() | any()) ::
|
|
|
|
|
{:ok, struct()} | {:error, any()}
|
|
|
|
|
defp fetch_object_optionnally_authenticated(url, %Actor{type: :Group, id: group_id}) do
|
|
|
|
|
case Actors.get_single_group_member_actor(group_id) do
|
|
|
|
|
%Actor{} = actor ->
|
|
|
|
|
ActivityPub.fetch_object_from_url(url, on_behalf_of: actor, force: true)
|
|
|
|
|
|
|
|
|
|
_err ->
|
|
|
|
|
fetch_object_optionnally_authenticated(url, nil)
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
defp fetch_object_optionnally_authenticated(url, _),
|
|
|
|
|
do: ActivityPub.fetch_object_from_url(url, force: true)
|
|
|
|
|
|
|
|
|
|
defp eventually_create_share(object, entity, actor_id) do
|
|
|
|
|
with object_id <- object |> Utils.get_url(),
|
|
|
|
|
%Actor{id: object_owner_actor_id} <- Ownable.actor(entity) do
|
|
|
|
|
{:ok, %Mobilizon.Share{} = _share} =
|
|
|
|
|
Mobilizon.Share.create(object_id, actor_id, object_owner_actor_id)
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
:ok
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
@spec is_data_for_comment_or_discussion?(map()) :: boolean()
|
|
|
|
|
defp is_data_for_comment_or_discussion?(object_data) do
|
|
|
|
|
(not Map.has_key?(object_data, :title) or
|
|
|
|
|
is_nil(object_data.title) or object_data.title == "") and
|
|
|
|
|
is_nil(object_data.discussion_id)
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
# Comment and conversations have different attributes for actor and groups
|
|
|
|
|
defp transform_object_data_for_discussion(object_data) do
|
|
|
|
|
# Basic comment
|
|
|
|
|
if is_data_for_comment_or_discussion?(object_data) do
|
|
|
|
|
object_data
|
|
|
|
|
else
|
|
|
|
|
# Conversation
|
|
|
|
|
object_data
|
|
|
|
|
|> Map.put(:creator_id, object_data.actor_id)
|
|
|
|
|
|> Map.put(:actor_id, object_data.attributed_to_id)
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2019-07-30 16:40:59 +02:00
|
|
|
|
defp get_follow(follow_object) do
|
|
|
|
|
with follow_object_id when not is_nil(follow_object_id) <- Utils.get_url(follow_object),
|
|
|
|
|
{:not_found, %Follower{} = follow} <-
|
2019-09-11 03:16:37 +02:00
|
|
|
|
{:not_found, Actors.get_follower_by_url(follow_object_id)} do
|
2019-07-30 16:40:59 +02:00
|
|
|
|
{:ok, follow}
|
|
|
|
|
else
|
2019-08-14 17:45:11 +02:00
|
|
|
|
{:not_found, _err} ->
|
2019-07-30 16:40:59 +02:00
|
|
|
|
{:error, "Follow URL not found"}
|
|
|
|
|
|
|
|
|
|
_ ->
|
|
|
|
|
{:error, "ActivityPub ID not found in Accept Follow object"}
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2019-08-14 17:45:11 +02:00
|
|
|
|
defp get_participant(join_object) do
|
|
|
|
|
with join_object_id when not is_nil(join_object_id) <- Utils.get_url(join_object),
|
|
|
|
|
{:not_found, %Participant{} = participant} <-
|
|
|
|
|
{:not_found, Events.get_participant_by_url(join_object_id)} do
|
|
|
|
|
{:ok, participant}
|
|
|
|
|
else
|
|
|
|
|
{:not_found, _err} ->
|
|
|
|
|
{:error, "Participant URL not found"}
|
|
|
|
|
|
|
|
|
|
_ ->
|
|
|
|
|
{:error, "ActivityPub ID not found in Accept Join object"}
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2020-02-18 08:57:00 +01:00
|
|
|
|
@spec get_member(String.t() | map()) :: {:ok, Member.t()} | {:error, String.t()}
|
|
|
|
|
defp get_member(member_object) do
|
|
|
|
|
with member_object_id when not is_nil(member_object_id) <- Utils.get_url(member_object),
|
|
|
|
|
%Member{} = member <-
|
|
|
|
|
Actors.get_member_by_url(member_object_id) do
|
|
|
|
|
{:ok, member}
|
|
|
|
|
else
|
|
|
|
|
{:error, :member_not_found} ->
|
|
|
|
|
{:error, "Member URL not found"}
|
|
|
|
|
|
|
|
|
|
_ ->
|
|
|
|
|
{:error, "ActivityPub ID not found in Accept Join object"}
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2018-11-12 23:30:47 +01:00
|
|
|
|
def prepare_outgoing(%{"type" => _type} = data) do
|
2018-05-17 11:32:23 +02:00
|
|
|
|
data =
|
|
|
|
|
data
|
2018-11-12 23:30:47 +01:00
|
|
|
|
|> Map.merge(Utils.make_json_ld_header())
|
2018-05-17 11:32:23 +02:00
|
|
|
|
|
|
|
|
|
{:ok, data}
|
|
|
|
|
end
|
|
|
|
|
|
2019-07-30 16:40:59 +02:00
|
|
|
|
@spec fetch_obj_helper(map() | String.t()) :: Event.t() | Comment.t() | Actor.t() | any()
|
|
|
|
|
def fetch_obj_helper(object) do
|
2019-10-25 17:43:37 +02:00
|
|
|
|
Logger.debug("fetch_obj_helper")
|
2019-07-30 16:40:59 +02:00
|
|
|
|
Logger.debug("Fetching object #{inspect(object)}")
|
2018-12-14 17:41:55 +01:00
|
|
|
|
|
2019-07-30 16:40:59 +02:00
|
|
|
|
case object |> Utils.get_url() |> ActivityPub.fetch_object_from_url() do
|
|
|
|
|
{:ok, object} ->
|
|
|
|
|
{:ok, object}
|
2018-12-14 17:41:55 +01:00
|
|
|
|
|
2019-07-30 16:40:59 +02:00
|
|
|
|
err ->
|
2019-12-03 11:29:51 +01:00
|
|
|
|
Logger.warn("Error while fetching #{inspect(object)}")
|
2019-07-30 16:40:59 +02:00
|
|
|
|
{:error, err}
|
2018-12-14 17:41:55 +01:00
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2019-07-30 16:40:59 +02:00
|
|
|
|
def fetch_obj_helper_as_activity_streams(object) do
|
2019-10-25 17:43:37 +02:00
|
|
|
|
Logger.debug("fetch_obj_helper_as_activity_streams")
|
|
|
|
|
|
2019-07-30 16:40:59 +02:00
|
|
|
|
with {:ok, object} <- fetch_obj_helper(object) do
|
2019-09-22 18:29:13 +02:00
|
|
|
|
{:ok, Convertible.model_to_as(object)}
|
2018-12-14 17:41:55 +01:00
|
|
|
|
end
|
|
|
|
|
end
|
2018-05-17 11:32:23 +02:00
|
|
|
|
end
|