2018-12-24 13:34:45 +01:00
|
|
|
# Portions of this file are derived from Pleroma:
|
|
|
|
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social>
|
|
|
|
# SPDX-License-Identifier: AGPL-3.0-only
|
2018-12-27 11:24:04 +01:00
|
|
|
# Upstream: https://git.pleroma.social/pleroma/pleroma/blob/develop/lib/pleroma/web/activity_pub/activity_pub.ex
|
2018-12-24 13:34:45 +01:00
|
|
|
|
2018-10-11 17:37:39 +02:00
|
|
|
defmodule Mobilizon.Service.ActivityPub do
|
2018-06-14 18:15:27 +02:00
|
|
|
@moduledoc """
|
2019-09-22 16:26:23 +02:00
|
|
|
# ActivityPub context.
|
2018-06-14 18:15:27 +02:00
|
|
|
"""
|
|
|
|
|
2019-09-22 16:26:23 +02:00
|
|
|
import Mobilizon.Service.ActivityPub.{Utils, Visibility}
|
2018-05-17 11:32:23 +02:00
|
|
|
|
2019-09-22 16:26:23 +02:00
|
|
|
alias Mobilizon.{Actors, Config, Events}
|
2019-08-14 17:45:11 +02:00
|
|
|
alias Mobilizon.Actors.{Actor, Follower}
|
2019-09-22 16:26:23 +02:00
|
|
|
alias Mobilizon.Events.{Comment, Event, Participant}
|
2019-09-22 18:29:13 +02:00
|
|
|
alias Mobilizon.Service.ActivityPub.{Activity, Converter, Convertible, Relay, Transmogrifier}
|
2019-09-22 16:26:23 +02:00
|
|
|
alias Mobilizon.Service.{Federator, WebFinger}
|
2019-07-30 16:40:59 +02:00
|
|
|
alias Mobilizon.Service.HTTPSignatures.Signature
|
2018-05-17 11:32:23 +02:00
|
|
|
|
2018-11-12 09:05:31 +01:00
|
|
|
require Logger
|
2018-05-17 11:32:23 +02:00
|
|
|
|
2018-11-12 18:17:53 +01:00
|
|
|
@doc """
|
|
|
|
Get recipients for an activity or object
|
|
|
|
"""
|
|
|
|
@spec get_recipients(map()) :: list()
|
2018-05-17 11:32:23 +02:00
|
|
|
def get_recipients(data) do
|
|
|
|
(data["to"] || []) ++ (data["cc"] || [])
|
|
|
|
end
|
|
|
|
|
2018-11-12 18:17:53 +01:00
|
|
|
@doc """
|
|
|
|
Wraps an object into an activity
|
|
|
|
"""
|
2019-09-04 18:24:31 +02:00
|
|
|
@spec create_activity(map(), boolean()) :: {:ok, %Activity{}}
|
2019-09-03 08:38:04 +02:00
|
|
|
def create_activity(map, local \\ true) when is_map(map) do
|
|
|
|
with map <- lazy_put_activity_defaults(map) do
|
2019-09-04 18:24:31 +02:00
|
|
|
{:ok,
|
|
|
|
%Activity{
|
|
|
|
data: map,
|
|
|
|
local: local,
|
|
|
|
actor: map["actor"],
|
|
|
|
recipients: get_recipients(map)
|
|
|
|
}}
|
2018-05-17 11:32:23 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-11-12 18:17:53 +01:00
|
|
|
@doc """
|
|
|
|
Fetch an object from an URL, from our local database of events and comments, then eventually remote
|
|
|
|
"""
|
2019-02-21 18:11:49 +01:00
|
|
|
# TODO: Make database calls parallel
|
2018-11-12 18:17:53 +01:00
|
|
|
@spec fetch_object_from_url(String.t()) :: {:ok, %Event{}} | {:ok, %Comment{}} | {:error, any()}
|
2018-08-24 11:34:00 +02:00
|
|
|
def fetch_object_from_url(url) do
|
2018-12-14 17:41:55 +01:00
|
|
|
Logger.info("Fetching object from url #{url}")
|
|
|
|
|
2019-07-30 10:35:29 +02:00
|
|
|
with {:not_http, true} <- {:not_http, String.starts_with?(url, "http")},
|
|
|
|
{:existing_event, nil} <- {:existing_event, Events.get_event_by_url(url)},
|
|
|
|
{:existing_comment, nil} <- {:existing_comment, Events.get_comment_from_url(url)},
|
|
|
|
{:existing_actor, {:error, :actor_not_found}} <-
|
|
|
|
{:existing_actor, Actors.get_actor_by_url(url)},
|
2018-08-24 11:34:00 +02:00
|
|
|
{:ok, %{body: body, status_code: code}} when code in 200..299 <-
|
|
|
|
HTTPoison.get(
|
|
|
|
url,
|
|
|
|
[Accept: "application/activity+json"],
|
|
|
|
follow_redirect: true,
|
|
|
|
timeout: 10_000,
|
|
|
|
recv_timeout: 20_000
|
|
|
|
),
|
|
|
|
{:ok, data} <- Jason.decode(body),
|
|
|
|
params <- %{
|
|
|
|
"type" => "Create",
|
|
|
|
"to" => data["to"],
|
|
|
|
"cc" => data["cc"],
|
|
|
|
"actor" => data["attributedTo"],
|
|
|
|
"object" => data
|
|
|
|
},
|
2019-07-30 10:35:29 +02:00
|
|
|
{:ok, _activity, %{url: object_url} = _object} <- Transmogrifier.handle_incoming(params) do
|
2018-08-24 11:34:00 +02:00
|
|
|
case data["type"] do
|
|
|
|
"Event" ->
|
2019-09-16 02:07:44 +02:00
|
|
|
{:ok, Events.get_public_event_by_url_with_preload!(object_url)}
|
2018-08-24 11:34:00 +02:00
|
|
|
|
|
|
|
"Note" ->
|
2019-09-16 02:07:44 +02:00
|
|
|
{:ok, Events.get_comment_from_url_with_preload!(object_url)}
|
2018-12-14 17:41:55 +01:00
|
|
|
|
2019-02-21 18:11:49 +01:00
|
|
|
"Actor" ->
|
2019-07-30 10:35:29 +02:00
|
|
|
{:ok, Actors.get_actor_by_url!(object_url, true)}
|
2019-02-21 18:11:49 +01:00
|
|
|
|
2018-12-14 17:41:55 +01:00
|
|
|
other ->
|
|
|
|
{:error, other}
|
2018-08-24 11:34:00 +02:00
|
|
|
end
|
|
|
|
else
|
2019-07-30 10:35:29 +02:00
|
|
|
{:existing_event, %Event{url: event_url}} ->
|
2019-09-16 02:07:44 +02:00
|
|
|
{:ok, Events.get_public_event_by_url_with_preload!(event_url)}
|
2019-07-30 10:35:29 +02:00
|
|
|
|
|
|
|
{:existing_comment, %Comment{url: comment_url}} ->
|
2019-09-16 02:07:44 +02:00
|
|
|
{:ok, Events.get_comment_from_url_with_preload!(comment_url)}
|
2019-07-30 10:35:29 +02:00
|
|
|
|
2019-07-30 16:40:59 +02:00
|
|
|
{:existing_actor, {:ok, %Actor{url: actor_url}}} ->
|
2019-07-30 10:35:29 +02:00
|
|
|
{:ok, Actors.get_actor_by_url!(actor_url, true)}
|
|
|
|
|
|
|
|
e ->
|
2019-07-30 16:40:59 +02:00
|
|
|
require Logger
|
|
|
|
Logger.error(inspect(e))
|
2019-07-30 10:35:29 +02:00
|
|
|
{:error, e}
|
2018-08-24 11:34:00 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-09-09 00:52:49 +02:00
|
|
|
@doc """
|
|
|
|
Getting an actor from url, eventually creating it
|
|
|
|
"""
|
|
|
|
@spec get_or_fetch_by_url(String.t(), boolean) :: {:ok, Actor.t()} | {:error, String.t()}
|
|
|
|
def get_or_fetch_by_url(url, preload \\ false) do
|
|
|
|
case Actors.get_actor_by_url(url, preload) do
|
|
|
|
{:ok, %Actor{} = actor} ->
|
|
|
|
{:ok, actor}
|
|
|
|
|
|
|
|
_ ->
|
|
|
|
case make_actor_from_url(url, preload) do
|
|
|
|
{:ok, %Actor{} = actor} ->
|
|
|
|
{:ok, actor}
|
|
|
|
|
|
|
|
_ ->
|
|
|
|
Logger.warn("Could not fetch by AP id")
|
|
|
|
|
|
|
|
{:error, "Could not fetch by AP id"}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-02-22 16:11:57 +01:00
|
|
|
@doc """
|
|
|
|
Create an activity of type "Create"
|
|
|
|
"""
|
2018-11-12 09:05:31 +01:00
|
|
|
def create(%{to: to, actor: actor, object: object} = params) do
|
|
|
|
Logger.debug("creating an activity")
|
2018-12-14 17:41:55 +01:00
|
|
|
Logger.debug(inspect(params))
|
|
|
|
Logger.debug(inspect(object))
|
2018-05-17 11:32:23 +02:00
|
|
|
additional = params[:additional] || %{}
|
|
|
|
# only accept false as false value
|
|
|
|
local = !(params[:local] == false)
|
|
|
|
published = params[:published]
|
|
|
|
|
|
|
|
with create_data <-
|
|
|
|
make_create_data(
|
2018-11-12 09:05:31 +01:00
|
|
|
%{to: to, actor: actor, published: published, object: object},
|
2018-05-17 11:32:23 +02:00
|
|
|
additional
|
|
|
|
),
|
2019-09-03 08:38:04 +02:00
|
|
|
{:ok, activity} <- create_activity(create_data, local),
|
|
|
|
{:ok, object} <- insert_full_object(create_data),
|
2018-05-17 11:32:23 +02:00
|
|
|
:ok <- maybe_federate(activity) do
|
2018-07-27 10:45:35 +02:00
|
|
|
# {:ok, actor} <- Actors.increase_event_count(actor) do
|
2019-07-30 10:35:29 +02:00
|
|
|
{:ok, activity, object}
|
2018-05-18 09:56:21 +02:00
|
|
|
else
|
|
|
|
err ->
|
2019-07-30 16:40:59 +02:00
|
|
|
Logger.error("Something went wrong while creating an activity")
|
|
|
|
Logger.debug(inspect(err))
|
2018-12-14 17:41:55 +01:00
|
|
|
err
|
2018-05-17 11:32:23 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-08-14 17:45:11 +02:00
|
|
|
def accept(%{to: to, actor: actor, object: object} = params, activity_wrapper_id \\ nil) do
|
2019-07-30 16:40:59 +02:00
|
|
|
# only accept false as false value
|
|
|
|
local = !(params[:local] == false)
|
|
|
|
|
|
|
|
with data <- %{
|
|
|
|
"to" => to,
|
|
|
|
"type" => "Accept",
|
|
|
|
"actor" => actor,
|
|
|
|
"object" => object,
|
2019-08-14 17:45:11 +02:00
|
|
|
"id" => activity_wrapper_id || get_url(object) <> "/activity"
|
2019-07-30 16:40:59 +02:00
|
|
|
},
|
2019-09-03 08:38:04 +02:00
|
|
|
{:ok, activity} <- create_activity(data, local),
|
|
|
|
{:ok, object} <- insert_full_object(data),
|
2019-07-30 16:40:59 +02:00
|
|
|
:ok <- maybe_federate(activity) do
|
|
|
|
{:ok, activity, object}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-08-14 17:45:11 +02:00
|
|
|
def reject(%{to: to, actor: actor, object: object} = params, activity_wrapper_id \\ nil) do
|
2018-05-17 11:32:23 +02:00
|
|
|
# only accept false as false value
|
|
|
|
local = !(params[:local] == false)
|
|
|
|
|
2019-08-14 17:45:11 +02:00
|
|
|
with data <- %{
|
|
|
|
"to" => to,
|
|
|
|
"type" => "Reject",
|
|
|
|
"actor" => actor,
|
|
|
|
"object" => object,
|
|
|
|
"id" => activity_wrapper_id || get_url(object) <> "/activity"
|
|
|
|
},
|
2019-09-03 08:38:04 +02:00
|
|
|
{:ok, activity} <- create_activity(data, local),
|
|
|
|
{:ok, object} <- insert_full_object(data),
|
2018-05-17 11:32:23 +02:00
|
|
|
:ok <- maybe_federate(activity) do
|
2019-07-30 10:35:29 +02:00
|
|
|
{:ok, activity, object}
|
2018-05-17 11:32:23 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def update(%{to: to, cc: cc, actor: actor, object: object} = params) do
|
|
|
|
# only accept false as false value
|
|
|
|
local = !(params[:local] == false)
|
|
|
|
|
|
|
|
with data <- %{
|
2018-07-27 10:45:35 +02:00
|
|
|
"to" => to,
|
|
|
|
"cc" => cc,
|
2019-07-30 16:40:59 +02:00
|
|
|
"id" => object["url"],
|
2018-07-27 10:45:35 +02:00
|
|
|
"type" => "Update",
|
|
|
|
"actor" => actor,
|
|
|
|
"object" => object
|
|
|
|
},
|
2019-09-03 08:38:04 +02:00
|
|
|
{:ok, activity} <- create_activity(data, local),
|
2019-09-04 18:24:31 +02:00
|
|
|
{:ok, object} <- update_object(object["id"], data),
|
2018-05-17 11:32:23 +02:00
|
|
|
:ok <- maybe_federate(activity) do
|
2019-07-30 10:35:29 +02:00
|
|
|
{:ok, activity, object}
|
2018-05-17 11:32:23 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-12-14 17:41:55 +01:00
|
|
|
# TODO: This is weird, maybe we shouldn't check here if we can make the activity.
|
|
|
|
# def like(
|
|
|
|
# %Actor{url: url} = actor,
|
|
|
|
# object,
|
|
|
|
# activity_id \\ nil,
|
|
|
|
# local \\ true
|
|
|
|
# ) do
|
|
|
|
# with nil <- get_existing_like(url, object),
|
|
|
|
# like_data <- make_like_data(user, object, activity_id),
|
2019-09-03 08:38:04 +02:00
|
|
|
# {:ok, activity} <- create_activity(like_data, local),
|
|
|
|
# {:ok, object} <- insert_full_object(data),
|
2018-12-14 17:41:55 +01:00
|
|
|
# {:ok, object} <- add_like_to_object(activity, object),
|
|
|
|
# :ok <- maybe_federate(activity) do
|
|
|
|
# {:ok, activity, object}
|
|
|
|
# else
|
|
|
|
# %Activity{} = activity -> {:ok, activity, object}
|
|
|
|
# error -> {:error, error}
|
|
|
|
# end
|
|
|
|
# end
|
|
|
|
|
|
|
|
# def unlike(
|
|
|
|
# %User{} = actor,
|
|
|
|
# %Object{} = object,
|
|
|
|
# activity_id \\ nil,
|
|
|
|
# local \\ true
|
|
|
|
# ) do
|
|
|
|
# with %Activity{} = like_activity <- get_existing_like(actor.ap_id, object),
|
|
|
|
# unlike_data <- make_unlike_data(actor, like_activity, activity_id),
|
2019-09-03 08:38:04 +02:00
|
|
|
# {:ok, unlike_activity} <- create_activity(unlike_data, local),
|
|
|
|
# {:ok, _object} <- insert_full_object(data),
|
2018-12-14 17:41:55 +01:00
|
|
|
# {:ok, _activity} <- Repo.delete(like_activity),
|
|
|
|
# {:ok, object} <- remove_like_from_object(like_activity, object),
|
|
|
|
# :ok <- maybe_federate(unlike_activity) do
|
|
|
|
# {:ok, unlike_activity, like_activity, object}
|
|
|
|
# else
|
|
|
|
# _e -> {:ok, object}
|
|
|
|
# end
|
|
|
|
# end
|
|
|
|
|
2019-07-30 16:40:59 +02:00
|
|
|
def announce(
|
|
|
|
%Actor{} = actor,
|
|
|
|
object,
|
|
|
|
activity_id \\ nil,
|
|
|
|
local \\ true,
|
|
|
|
public \\ true
|
|
|
|
) do
|
|
|
|
with true <- is_public?(object),
|
|
|
|
announce_data <- make_announce_data(actor, object, activity_id, public),
|
2019-09-03 08:38:04 +02:00
|
|
|
{:ok, activity} <- create_activity(announce_data, local),
|
|
|
|
{:ok, object} <- insert_full_object(announce_data),
|
2019-07-30 16:40:59 +02:00
|
|
|
:ok <- maybe_federate(activity) do
|
|
|
|
{:ok, activity, object}
|
|
|
|
else
|
|
|
|
error ->
|
|
|
|
{:error, error}
|
|
|
|
end
|
|
|
|
end
|
2018-12-14 17:41:55 +01:00
|
|
|
|
2019-07-30 16:40:59 +02:00
|
|
|
def unannounce(
|
|
|
|
%Actor{} = actor,
|
|
|
|
object,
|
|
|
|
activity_id \\ nil,
|
|
|
|
cancelled_activity_id \\ nil,
|
|
|
|
local \\ true
|
|
|
|
) do
|
|
|
|
with announce_activity <- make_announce_data(actor, object, cancelled_activity_id),
|
|
|
|
unannounce_data <- make_unannounce_data(actor, announce_activity, activity_id),
|
2019-09-03 08:38:04 +02:00
|
|
|
{:ok, unannounce_activity} <- create_activity(unannounce_data, local),
|
|
|
|
{:ok, object} <- insert_full_object(unannounce_data),
|
2019-07-30 16:40:59 +02:00
|
|
|
:ok <- maybe_federate(unannounce_activity) do
|
|
|
|
{:ok, unannounce_activity, object}
|
|
|
|
else
|
|
|
|
_e -> {:ok, object}
|
|
|
|
end
|
|
|
|
end
|
2018-12-14 17:41:55 +01:00
|
|
|
|
2019-02-22 16:11:57 +01:00
|
|
|
@doc """
|
|
|
|
Make an actor follow another
|
|
|
|
"""
|
2018-12-14 17:41:55 +01:00
|
|
|
def follow(%Actor{} = follower, %Actor{} = followed, activity_id \\ nil, local \\ true) do
|
2019-07-30 16:40:59 +02:00
|
|
|
with {:ok, %Follower{url: follow_url}} <-
|
2019-09-09 00:52:49 +02:00
|
|
|
Actors.follow(followed, follower, activity_id, false),
|
2019-02-22 18:07:20 +01:00
|
|
|
activity_follow_id <-
|
2019-07-30 16:40:59 +02:00
|
|
|
activity_id || follow_url,
|
2018-12-14 17:41:55 +01:00
|
|
|
data <- make_follow_data(followed, follower, activity_follow_id),
|
2019-09-03 08:38:04 +02:00
|
|
|
{:ok, activity} <- create_activity(data, local),
|
|
|
|
{:ok, object} <- insert_full_object(data),
|
2018-05-17 11:32:23 +02:00
|
|
|
:ok <- maybe_federate(activity) do
|
2019-07-30 10:35:29 +02:00
|
|
|
{:ok, activity, object}
|
2018-11-12 09:05:31 +01:00
|
|
|
else
|
2019-07-30 16:40:59 +02:00
|
|
|
{:error, err, msg} when err in [:already_following, :suspended] ->
|
|
|
|
{:error, msg}
|
2018-05-17 11:32:23 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-02-22 16:11:57 +01:00
|
|
|
@doc """
|
|
|
|
Make an actor unfollow another
|
|
|
|
"""
|
2018-12-14 17:41:55 +01:00
|
|
|
@spec unfollow(Actor.t(), Actor.t(), String.t(), boolean()) :: {:ok, map()} | any()
|
2019-07-30 16:40:59 +02:00
|
|
|
def unfollow(%Actor{} = follower, %Actor{} = followed, activity_id \\ nil, local \\ true) do
|
2019-09-09 00:52:49 +02:00
|
|
|
with {:ok, %Follower{id: follow_id}} <- Actors.unfollow(followed, follower),
|
2018-12-14 17:41:55 +01:00
|
|
|
# We recreate the follow activity
|
2019-07-30 16:40:59 +02:00
|
|
|
data <-
|
|
|
|
make_follow_data(
|
|
|
|
followed,
|
|
|
|
follower,
|
|
|
|
"#{MobilizonWeb.Endpoint.url()}/follow/#{follow_id}/activity"
|
|
|
|
),
|
2019-09-03 08:38:04 +02:00
|
|
|
{:ok, follow_activity} <- create_activity(data, local),
|
|
|
|
{:ok, _object} <- insert_full_object(data),
|
2019-07-30 16:40:59 +02:00
|
|
|
activity_unfollow_id <-
|
|
|
|
activity_id || "#{MobilizonWeb.Endpoint.url()}/unfollow/#{follow_id}/activity",
|
|
|
|
unfollow_data <-
|
|
|
|
make_unfollow_data(follower, followed, follow_activity, activity_unfollow_id),
|
2019-09-03 08:38:04 +02:00
|
|
|
{:ok, activity} <- create_activity(unfollow_data, local),
|
|
|
|
{:ok, object} <- insert_full_object(unfollow_data),
|
2018-12-14 17:41:55 +01:00
|
|
|
:ok <- maybe_federate(activity) do
|
2019-07-30 10:35:29 +02:00
|
|
|
{:ok, activity, object}
|
2018-12-14 17:41:55 +01:00
|
|
|
else
|
|
|
|
err ->
|
2019-07-30 16:40:59 +02:00
|
|
|
Logger.debug("Error while unfollowing an actor #{inspect(err)}")
|
2018-12-14 17:41:55 +01:00
|
|
|
err
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-08-24 11:34:00 +02:00
|
|
|
def delete(object, local \\ true)
|
|
|
|
|
|
|
|
def delete(%Event{url: url, organizer_actor: actor} = event, local) do
|
2018-05-17 11:32:23 +02:00
|
|
|
data = %{
|
|
|
|
"type" => "Delete",
|
2018-05-18 09:56:21 +02:00
|
|
|
"actor" => actor.url,
|
2018-05-17 11:32:23 +02:00
|
|
|
"object" => url,
|
2019-07-30 16:40:59 +02:00
|
|
|
"to" => [actor.url <> "/followers", "https://www.w3.org/ns/activitystreams#Public"],
|
|
|
|
"id" => url <> "/delete"
|
2018-05-17 11:32:23 +02:00
|
|
|
}
|
|
|
|
|
2019-07-23 18:06:22 +02:00
|
|
|
with {:ok, _} <- Events.delete_event(event),
|
2019-09-03 08:38:04 +02:00
|
|
|
{:ok, activity} <- create_activity(data, local),
|
2018-07-27 10:45:35 +02:00
|
|
|
:ok <- maybe_federate(activity) do
|
2019-09-09 09:31:08 +02:00
|
|
|
{:ok, activity, event}
|
2018-05-17 11:32:23 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-08-24 11:34:00 +02:00
|
|
|
def delete(%Comment{url: url, actor: actor} = comment, local) do
|
|
|
|
data = %{
|
|
|
|
"type" => "Delete",
|
|
|
|
"actor" => actor.url,
|
|
|
|
"object" => url,
|
2019-07-30 16:40:59 +02:00
|
|
|
"id" => url <> "/delete",
|
2018-08-24 11:34:00 +02:00
|
|
|
"to" => [actor.url <> "/followers", "https://www.w3.org/ns/activitystreams#Public"]
|
|
|
|
}
|
|
|
|
|
2019-07-23 18:06:22 +02:00
|
|
|
with {:ok, _} <- Events.delete_comment(comment),
|
2019-09-03 08:38:04 +02:00
|
|
|
{:ok, activity} <- create_activity(data, local),
|
|
|
|
{:ok, object} <- insert_full_object(data),
|
2018-08-24 11:34:00 +02:00
|
|
|
:ok <- maybe_federate(activity) do
|
2019-07-30 10:35:29 +02:00
|
|
|
{:ok, activity, object}
|
2018-08-24 11:34:00 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-12-14 17:41:55 +01:00
|
|
|
def delete(%Actor{url: url} = actor, local) do
|
|
|
|
data = %{
|
|
|
|
"type" => "Delete",
|
|
|
|
"actor" => url,
|
|
|
|
"object" => url,
|
2019-07-30 16:40:59 +02:00
|
|
|
"id" => url <> "/delete",
|
2018-12-14 17:41:55 +01:00
|
|
|
"to" => [url <> "/followers", "https://www.w3.org/ns/activitystreams#Public"]
|
|
|
|
}
|
|
|
|
|
2019-07-23 18:06:22 +02:00
|
|
|
with {:ok, _} <- Actors.delete_actor(actor),
|
2019-09-03 08:38:04 +02:00
|
|
|
{:ok, activity} <- create_activity(data, local),
|
|
|
|
{:ok, object} <- insert_full_object(data),
|
2018-12-14 17:41:55 +01:00
|
|
|
:ok <- maybe_federate(activity) do
|
2019-07-30 10:35:29 +02:00
|
|
|
{:ok, activity, object}
|
2018-12-14 17:41:55 +01:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-07-23 13:49:22 +02:00
|
|
|
def flag(params) do
|
|
|
|
# only accept false as false value
|
|
|
|
local = !(params[:local] == false)
|
|
|
|
forward = !(params[:forward] == false)
|
|
|
|
|
|
|
|
additional = params[:additional] || %{}
|
|
|
|
|
|
|
|
additional =
|
|
|
|
if forward do
|
|
|
|
Map.merge(additional, %{"to" => [], "cc" => [params.reported_actor_url]})
|
|
|
|
else
|
|
|
|
Map.merge(additional, %{"to" => [], "cc" => []})
|
|
|
|
end
|
|
|
|
|
|
|
|
with flag_data <- make_flag_data(params, additional),
|
2019-09-03 08:38:04 +02:00
|
|
|
{:ok, activity} <- create_activity(flag_data, local),
|
|
|
|
{:ok, object} <- insert_full_object(flag_data),
|
2019-07-23 13:49:22 +02:00
|
|
|
:ok <- maybe_federate(activity) do
|
2019-09-03 08:38:04 +02:00
|
|
|
{:ok, activity, object}
|
2019-07-23 13:49:22 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-08-14 17:45:11 +02:00
|
|
|
def join(object, actor, local \\ true)
|
|
|
|
|
2019-10-11 11:50:06 +02:00
|
|
|
def join(%Event{options: options} = event, %Actor{} = actor, local) do
|
|
|
|
# TODO Refactor me for federation
|
|
|
|
with maximum_attendee_capacity <-
|
|
|
|
Map.get(options, :maximum_attendee_capacity, 2_000_000) || false,
|
|
|
|
{:maximum_attendee_capacity, true} <-
|
|
|
|
{:maximum_attendee_capacity,
|
|
|
|
!maximum_attendee_capacity ||
|
|
|
|
Mobilizon.Events.count_participant_participants(event.id) <
|
|
|
|
maximum_attendee_capacity},
|
|
|
|
role <- Mobilizon.Events.get_default_participant_role(event),
|
2019-08-14 17:45:11 +02:00
|
|
|
{:ok, %Participant{} = participant} <-
|
|
|
|
Mobilizon.Events.create_participant(%{
|
|
|
|
role: role,
|
|
|
|
event_id: event.id,
|
|
|
|
actor_id: actor.id
|
|
|
|
}),
|
|
|
|
join_data <- Convertible.model_to_as(participant),
|
|
|
|
join_data <- Map.put(join_data, "to", [event.organizer_actor.url]),
|
|
|
|
join_data <- Map.put(join_data, "cc", []),
|
2019-09-03 08:38:04 +02:00
|
|
|
{:ok, activity} <- create_activity(join_data, local),
|
|
|
|
{:ok, _object} <- insert_full_object(join_data),
|
2019-08-14 17:45:11 +02:00
|
|
|
:ok <- maybe_federate(activity) do
|
|
|
|
if role === :participant do
|
|
|
|
accept(
|
|
|
|
%{to: [actor.url], actor: event.organizer_actor.url, object: join_data["id"]},
|
|
|
|
"#{MobilizonWeb.Endpoint.url()}/accept/join/#{participant.id}"
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
{:ok, activity, participant}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
# TODO: Implement me
|
|
|
|
def join(%Actor{type: :Group} = _group, %Actor{} = _actor, _local) do
|
|
|
|
:error
|
|
|
|
end
|
|
|
|
|
|
|
|
def leave(object, actor, local \\ true)
|
|
|
|
|
2019-09-22 16:26:23 +02:00
|
|
|
# TODO: If we want to use this for exclusion we need to have an extra field
|
|
|
|
# for the actor that excluded the participant
|
2019-08-14 17:45:11 +02:00
|
|
|
def leave(
|
|
|
|
%Event{id: event_id, url: event_url} = event,
|
|
|
|
%Actor{id: actor_id, url: actor_url} = _actor,
|
|
|
|
local
|
|
|
|
) do
|
|
|
|
with {:only_organizer, false} <-
|
2019-09-13 01:01:17 +02:00
|
|
|
{:only_organizer, Participant.is_not_only_organizer(event_id, actor_id)},
|
2019-08-14 17:45:11 +02:00
|
|
|
{:ok, %Participant{} = participant} <-
|
|
|
|
Mobilizon.Events.get_participant(event_id, actor_id),
|
2019-09-30 13:48:47 +02:00
|
|
|
{:ok, %Participant{} = participant} <-
|
|
|
|
Events.delete_participant(participant),
|
2019-08-14 17:45:11 +02:00
|
|
|
leave_data <- %{
|
|
|
|
"type" => "Leave",
|
|
|
|
# If it's an exclusion it should be something else
|
|
|
|
"actor" => actor_url,
|
|
|
|
"object" => event_url,
|
|
|
|
"to" => [event.organizer_actor.url],
|
|
|
|
"cc" => []
|
|
|
|
},
|
2019-09-03 08:38:04 +02:00
|
|
|
{:ok, activity} <- create_activity(leave_data, local),
|
|
|
|
{:ok, _object} <- insert_full_object(leave_data),
|
2019-08-14 17:45:11 +02:00
|
|
|
:ok <- maybe_federate(activity) do
|
|
|
|
{:ok, activity, participant}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-11-12 18:17:53 +01:00
|
|
|
@doc """
|
|
|
|
Create an actor locally by it's URL (AP ID)
|
|
|
|
"""
|
|
|
|
@spec make_actor_from_url(String.t(), boolean()) :: {:ok, %Actor{}} | {:error, any()}
|
2018-11-12 09:05:31 +01:00
|
|
|
def make_actor_from_url(url, preload \\ false) do
|
2019-07-23 18:06:22 +02:00
|
|
|
case fetch_and_prepare_actor_from_url(url) do
|
|
|
|
{:ok, data} ->
|
2019-09-09 00:52:49 +02:00
|
|
|
Actors.upsert_actor(data, preload)
|
2019-07-23 18:06:22 +02:00
|
|
|
|
2018-11-08 16:11:23 +01:00
|
|
|
# Request returned 410
|
|
|
|
{:error, :actor_deleted} ->
|
2019-07-30 16:40:59 +02:00
|
|
|
Logger.info("Actor was deleted")
|
2018-11-08 16:11:23 +01:00
|
|
|
{:error, :actor_deleted}
|
|
|
|
|
2018-05-17 11:32:23 +02:00
|
|
|
e ->
|
2019-07-30 16:40:59 +02:00
|
|
|
Logger.warn("Failed to make actor from url")
|
2018-05-17 11:32:23 +02:00
|
|
|
{:error, e}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-11-12 18:17:53 +01:00
|
|
|
@doc """
|
2019-02-22 16:11:57 +01:00
|
|
|
Find an actor in our local database or call WebFinger to find what's its AP ID is and then fetch it
|
2018-11-12 18:17:53 +01:00
|
|
|
"""
|
2018-11-23 15:03:53 +01:00
|
|
|
@spec find_or_make_actor_from_nickname(String.t(), atom() | nil) :: tuple()
|
|
|
|
def find_or_make_actor_from_nickname(nickname, type \\ nil) do
|
2019-07-23 18:06:22 +02:00
|
|
|
case Actors.get_actor_by_name(nickname, type) do
|
|
|
|
%Actor{} = actor ->
|
|
|
|
{:ok, actor}
|
|
|
|
|
|
|
|
nil ->
|
|
|
|
make_actor_from_nickname(nickname)
|
2018-05-30 14:27:21 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-11-23 15:03:53 +01:00
|
|
|
@spec find_or_make_person_from_nickname(String.t()) :: tuple()
|
|
|
|
def find_or_make_person_from_nickname(nick), do: find_or_make_actor_from_nickname(nick, :Person)
|
|
|
|
|
|
|
|
@spec find_or_make_group_from_nickname(String.t()) :: tuple()
|
|
|
|
def find_or_make_group_from_nickname(nick), do: find_or_make_actor_from_nickname(nick, :Group)
|
|
|
|
|
2018-11-12 18:17:53 +01:00
|
|
|
@doc """
|
2019-02-22 16:11:57 +01:00
|
|
|
Create an actor inside our database from username, using WebFinger to find out it's AP ID and then fetch it
|
2018-11-12 18:17:53 +01:00
|
|
|
"""
|
|
|
|
@spec make_actor_from_nickname(String.t()) :: {:ok, %Actor{}} | {:error, any()}
|
2018-05-18 09:56:21 +02:00
|
|
|
def make_actor_from_nickname(nickname) do
|
2019-07-23 18:06:22 +02:00
|
|
|
case WebFinger.finger(nickname) do
|
|
|
|
{:ok, %{"url" => url}} when not is_nil(url) ->
|
|
|
|
make_actor_from_url(url)
|
|
|
|
|
|
|
|
_e ->
|
|
|
|
{:error, "No ActivityPub URL found in WebFinger"}
|
2018-05-17 11:32:23 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-02-22 16:11:57 +01:00
|
|
|
@doc """
|
|
|
|
Publish an activity to all appropriated audiences inboxes
|
|
|
|
"""
|
2018-05-17 11:32:23 +02:00
|
|
|
def publish(actor, activity) do
|
2018-05-18 09:56:21 +02:00
|
|
|
Logger.debug("Publishing an activity")
|
2019-07-30 16:40:59 +02:00
|
|
|
Logger.debug(inspect(activity))
|
|
|
|
|
|
|
|
public = is_public?(activity)
|
|
|
|
|
2019-09-21 23:59:07 +02:00
|
|
|
if public && !is_delete_activity?(activity) && Config.get([:instance, :allow_relay]) do
|
2019-07-30 16:40:59 +02:00
|
|
|
Logger.info(fn -> "Relaying #{activity.data["id"]} out" end)
|
2019-09-22 18:29:13 +02:00
|
|
|
|
|
|
|
Relay.publish(activity)
|
2019-07-30 16:40:59 +02:00
|
|
|
end
|
2018-07-27 10:45:35 +02:00
|
|
|
|
2018-05-18 09:56:21 +02:00
|
|
|
followers =
|
|
|
|
if actor.followers_url in activity.recipients do
|
2019-09-11 03:16:37 +02:00
|
|
|
Actors.list_external_followers_for_actor(actor)
|
2018-05-18 09:56:21 +02:00
|
|
|
else
|
|
|
|
[]
|
|
|
|
end
|
2018-05-17 11:32:23 +02:00
|
|
|
|
2018-05-18 09:56:21 +02:00
|
|
|
remote_inboxes =
|
2018-11-12 09:05:31 +01:00
|
|
|
(remote_actors(activity) ++ followers)
|
2018-05-18 09:56:21 +02:00
|
|
|
|> Enum.map(fn follower -> follower.shared_inbox_url end)
|
|
|
|
|> Enum.uniq()
|
2018-05-17 11:32:23 +02:00
|
|
|
|
|
|
|
{:ok, data} = Transmogrifier.prepare_outgoing(activity.data)
|
|
|
|
json = Jason.encode!(data)
|
2019-01-03 14:59:59 +01:00
|
|
|
Logger.debug(fn -> "Remote inboxes are : #{inspect(remote_inboxes)}" end)
|
2018-05-17 11:32:23 +02:00
|
|
|
|
|
|
|
Enum.each(remote_inboxes, fn inbox ->
|
|
|
|
Federator.enqueue(:publish_single_ap, %{
|
|
|
|
inbox: inbox,
|
|
|
|
json: json,
|
|
|
|
actor: actor,
|
|
|
|
id: activity.data["id"]
|
|
|
|
})
|
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
2019-09-09 09:31:08 +02:00
|
|
|
defp is_delete_activity?(%Activity{data: %{"type" => "Delete"}}), do: true
|
|
|
|
defp is_delete_activity?(_), do: false
|
|
|
|
|
2019-02-22 16:11:57 +01:00
|
|
|
@doc """
|
|
|
|
Publish an activity to a specific inbox
|
|
|
|
"""
|
2018-05-17 11:32:23 +02:00
|
|
|
def publish_one(%{inbox: inbox, json: json, actor: actor, id: id}) do
|
|
|
|
Logger.info("Federating #{id} to #{inbox}")
|
2019-08-14 17:45:11 +02:00
|
|
|
%URI{host: host, path: _path} = URI.parse(inbox)
|
2018-12-07 10:47:31 +01:00
|
|
|
|
2019-07-30 16:40:59 +02:00
|
|
|
digest = Signature.build_digest(json)
|
|
|
|
date = Signature.generate_date_header()
|
|
|
|
# request_target = Signature.generate_request_target("POST", path)
|
2018-05-17 11:32:23 +02:00
|
|
|
|
|
|
|
signature =
|
2019-07-30 16:40:59 +02:00
|
|
|
Signature.sign(actor, %{
|
2018-10-11 17:47:02 +02:00
|
|
|
host: host,
|
2018-12-07 10:47:31 +01:00
|
|
|
"content-length": byte_size(json),
|
2019-07-30 16:40:59 +02:00
|
|
|
# TODO : Look me up in depth why Pleroma handles this inside lib/mobilizon_web/http_signature.ex
|
|
|
|
# "(request-target)": request_target,
|
2018-12-07 10:47:31 +01:00
|
|
|
digest: digest,
|
|
|
|
date: date
|
2018-10-11 17:47:02 +02:00
|
|
|
})
|
2018-07-27 10:45:35 +02:00
|
|
|
|
2018-11-12 23:30:47 +01:00
|
|
|
HTTPoison.post(
|
|
|
|
inbox,
|
|
|
|
json,
|
2018-12-07 10:47:31 +01:00
|
|
|
[
|
|
|
|
{"Content-Type", "application/activity+json"},
|
|
|
|
{"signature", signature},
|
|
|
|
{"digest", digest},
|
|
|
|
{"date", date}
|
|
|
|
],
|
2018-11-12 23:30:47 +01:00
|
|
|
hackney: [pool: :default]
|
|
|
|
)
|
2018-05-17 11:32:23 +02:00
|
|
|
end
|
|
|
|
|
2019-02-22 16:11:57 +01:00
|
|
|
# Fetching a remote actor's information through it's AP ID
|
2018-11-12 18:17:53 +01:00
|
|
|
@spec fetch_and_prepare_actor_from_url(String.t()) :: {:ok, struct()} | {:error, atom()} | any()
|
|
|
|
defp fetch_and_prepare_actor_from_url(url) do
|
2018-11-12 09:05:31 +01:00
|
|
|
Logger.debug("Fetching and preparing actor from url")
|
2019-07-30 16:40:59 +02:00
|
|
|
Logger.debug(inspect(url))
|
|
|
|
|
|
|
|
res =
|
|
|
|
with %HTTPoison.Response{status_code: 200, body: body} <-
|
|
|
|
HTTPoison.get!(url, [Accept: "application/activity+json"], follow_redirect: true),
|
|
|
|
:ok <- Logger.debug("response okay, now decoding json"),
|
|
|
|
{:ok, data} <- Jason.decode(body) do
|
|
|
|
Logger.debug("Got activity+json response at actor's endpoint, now converting data")
|
|
|
|
actor_data_from_actor_object(data)
|
|
|
|
else
|
|
|
|
# Actor is gone, probably deleted
|
|
|
|
{:ok, %HTTPoison.Response{status_code: 410}} ->
|
|
|
|
Logger.info("Response HTTP 410")
|
|
|
|
{:error, :actor_deleted}
|
|
|
|
|
|
|
|
e ->
|
|
|
|
Logger.warn("Could not decode actor at fetch #{url}, #{inspect(e)}")
|
|
|
|
{:error, e}
|
|
|
|
end
|
2018-07-27 10:45:35 +02:00
|
|
|
|
2019-07-30 16:40:59 +02:00
|
|
|
res
|
2018-05-17 11:32:23 +02:00
|
|
|
end
|
|
|
|
|
2018-11-12 18:17:53 +01:00
|
|
|
@doc """
|
|
|
|
Creating proper actor data struct from AP data
|
2019-02-22 16:11:57 +01:00
|
|
|
|
|
|
|
|
|
|
|
Convert ActivityPub data to our internal format
|
2018-11-12 18:17:53 +01:00
|
|
|
"""
|
|
|
|
@spec actor_data_from_actor_object(map()) :: {:ok, map()}
|
|
|
|
def actor_data_from_actor_object(data) when is_map(data) do
|
2019-05-22 14:12:11 +02:00
|
|
|
avatar =
|
|
|
|
data["icon"]["url"] &&
|
|
|
|
%{
|
|
|
|
"name" => data["icon"]["name"] || "avatar",
|
|
|
|
"url" => data["icon"]["url"]
|
|
|
|
}
|
|
|
|
|
|
|
|
banner =
|
|
|
|
data["image"]["url"] &&
|
|
|
|
%{
|
|
|
|
"name" => data["image"]["name"] || "banner",
|
|
|
|
"url" => data["image"]["url"]
|
|
|
|
}
|
|
|
|
|
2018-11-12 18:17:53 +01:00
|
|
|
actor_data = %{
|
2018-05-17 11:32:23 +02:00
|
|
|
url: data["id"],
|
2019-05-22 14:12:11 +02:00
|
|
|
avatar: avatar,
|
|
|
|
banner: banner,
|
2018-11-07 18:25:45 +01:00
|
|
|
name: data["name"],
|
2018-05-18 09:56:21 +02:00
|
|
|
preferred_username: data["preferredUsername"],
|
|
|
|
summary: data["summary"],
|
2018-06-14 17:25:55 +02:00
|
|
|
keys: data["publicKey"]["publicKeyPem"],
|
2018-05-18 09:56:21 +02:00
|
|
|
inbox_url: data["inbox"],
|
|
|
|
outbox_url: data["outbox"],
|
|
|
|
following_url: data["following"],
|
|
|
|
followers_url: data["followers"],
|
2018-05-30 18:59:13 +02:00
|
|
|
shared_inbox_url: data["endpoints"]["sharedInbox"],
|
2018-05-18 09:56:21 +02:00
|
|
|
domain: URI.parse(data["id"]).host,
|
|
|
|
manually_approves_followers: data["manuallyApprovesFollowers"],
|
2018-07-27 10:45:35 +02:00
|
|
|
type: data["type"]
|
2018-05-17 11:32:23 +02:00
|
|
|
}
|
|
|
|
|
2018-11-12 18:17:53 +01:00
|
|
|
{:ok, actor_data}
|
2018-05-17 11:32:23 +02:00
|
|
|
end
|
|
|
|
|
2018-11-12 18:17:53 +01:00
|
|
|
@doc """
|
|
|
|
Return all public activities (events & comments) for an actor
|
|
|
|
"""
|
2019-04-25 19:05:05 +02:00
|
|
|
@spec fetch_public_activities_for_actor(Actor.t(), integer(), integer()) :: map()
|
|
|
|
def fetch_public_activities_for_actor(%Actor{} = actor, page \\ 1, limit \\ 10) do
|
2019-09-16 02:07:44 +02:00
|
|
|
{:ok, events, total_events} = Events.list_public_events_for_actor(actor, page, limit)
|
|
|
|
{:ok, comments, total_comments} = Events.list_public_comments_for_actor(actor, page, limit)
|
2018-12-14 11:23:36 +01:00
|
|
|
|
2019-04-25 19:05:05 +02:00
|
|
|
event_activities = Enum.map(events, &event_to_activity/1)
|
2018-07-27 10:45:35 +02:00
|
|
|
|
2019-04-25 19:05:05 +02:00
|
|
|
comment_activities = Enum.map(comments, &comment_to_activity/1)
|
2018-07-27 10:45:35 +02:00
|
|
|
|
2019-04-25 19:05:05 +02:00
|
|
|
activities = event_activities ++ comment_activities
|
2018-08-24 11:34:00 +02:00
|
|
|
|
2019-04-25 19:05:05 +02:00
|
|
|
%{elements: activities, total: total_events + total_comments}
|
2018-05-17 11:32:23 +02:00
|
|
|
end
|
|
|
|
|
2018-11-12 23:30:47 +01:00
|
|
|
# Create an activity from an event
|
2018-11-12 18:17:53 +01:00
|
|
|
@spec event_to_activity(%Event{}, boolean()) :: Activity.t()
|
2018-05-30 14:27:21 +02:00
|
|
|
defp event_to_activity(%Event{} = event, local \\ true) do
|
2018-11-12 18:17:53 +01:00
|
|
|
%Activity{
|
|
|
|
recipients: ["https://www.w3.org/ns/activitystreams#Public"],
|
2018-05-18 09:56:21 +02:00
|
|
|
actor: event.organizer_actor.url,
|
2019-09-22 18:29:13 +02:00
|
|
|
data: Converter.Event.model_to_as(event),
|
2018-11-12 18:17:53 +01:00
|
|
|
local: local
|
2018-05-17 11:32:23 +02:00
|
|
|
}
|
|
|
|
end
|
2018-05-30 14:27:21 +02:00
|
|
|
|
2018-11-12 23:30:47 +01:00
|
|
|
# Create an activity from a comment
|
2018-11-12 18:17:53 +01:00
|
|
|
@spec comment_to_activity(%Comment{}, boolean()) :: Activity.t()
|
2019-02-22 16:11:57 +01:00
|
|
|
defp comment_to_activity(%Comment{} = comment, local \\ true) do
|
2018-11-12 18:17:53 +01:00
|
|
|
%Activity{
|
|
|
|
recipients: ["https://www.w3.org/ns/activitystreams#Public"],
|
2018-08-24 11:34:00 +02:00
|
|
|
actor: comment.actor.url,
|
2019-09-22 18:29:13 +02:00
|
|
|
data: Converter.Comment.model_to_as(comment),
|
2018-11-12 18:17:53 +01:00
|
|
|
local: local
|
2018-08-24 11:34:00 +02:00
|
|
|
}
|
|
|
|
end
|
|
|
|
|
2019-02-22 16:11:57 +01:00
|
|
|
# # Whether the Public audience is in the activity's audience
|
|
|
|
# defp is_public?(activity) do
|
|
|
|
# "https://www.w3.org/ns/activitystreams#Public" in (activity.data["to"] ++
|
|
|
|
# (activity.data["cc"] || []))
|
|
|
|
# end
|
2018-05-17 11:32:23 +02:00
|
|
|
end
|