2018-12-23 21:04:54 +01:00
|
|
|
# Pleroma: A lightweight social networking server
|
2020-02-26 17:13:53 +01:00
|
|
|
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
|
2018-12-23 21:04:54 +01:00
|
|
|
# SPDX-License-Identifier: AGPL-3.0-only
|
|
|
|
|
2017-03-20 21:28:31 +01:00
|
|
|
defmodule Pleroma.User do
|
|
|
|
use Ecto.Schema
|
2017-05-05 11:46:59 +02:00
|
|
|
|
2019-02-09 16:16:26 +01:00
|
|
|
import Ecto.Changeset
|
|
|
|
import Ecto.Query
|
2019-11-10 14:30:21 +01:00
|
|
|
import Ecto, only: [assoc: 2]
|
2019-02-09 16:16:26 +01:00
|
|
|
|
2019-06-24 21:01:56 +02:00
|
|
|
alias Ecto.Multi
|
2019-03-05 03:52:23 +01:00
|
|
|
alias Pleroma.Activity
|
2020-01-17 12:55:36 +01:00
|
|
|
alias Pleroma.Config
|
2019-10-01 23:37:08 +02:00
|
|
|
alias Pleroma.Conversation.Participation
|
2019-09-12 20:37:36 +02:00
|
|
|
alias Pleroma.Delivery
|
2020-04-03 13:03:32 +02:00
|
|
|
alias Pleroma.Emoji
|
2019-10-10 21:35:32 +02:00
|
|
|
alias Pleroma.FollowingRelationship
|
2020-03-31 13:13:53 +02:00
|
|
|
alias Pleroma.Formatter
|
2020-03-15 17:00:54 +01:00
|
|
|
alias Pleroma.HTML
|
2019-05-22 05:58:15 +02:00
|
|
|
alias Pleroma.Keys
|
2020-05-07 10:14:54 +02:00
|
|
|
alias Pleroma.MFA
|
2019-03-05 03:52:23 +01:00
|
|
|
alias Pleroma.Notification
|
|
|
|
alias Pleroma.Object
|
2019-03-18 15:23:38 +01:00
|
|
|
alias Pleroma.Registration
|
2019-02-09 16:16:26 +01:00
|
|
|
alias Pleroma.Repo
|
2019-06-24 20:59:12 +02:00
|
|
|
alias Pleroma.RepoStreamer
|
2019-02-09 16:16:26 +01:00
|
|
|
alias Pleroma.User
|
2019-11-18 18:38:56 +01:00
|
|
|
alias Pleroma.UserRelationship
|
2019-02-09 16:16:26 +01:00
|
|
|
alias Pleroma.Web
|
2019-03-05 03:52:23 +01:00
|
|
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
2020-05-05 15:08:41 +02:00
|
|
|
alias Pleroma.Web.ActivityPub.Builder
|
2020-04-09 13:01:35 +02:00
|
|
|
alias Pleroma.Web.ActivityPub.ObjectValidators.Types
|
2020-05-05 15:08:41 +02:00
|
|
|
alias Pleroma.Web.ActivityPub.Pipeline
|
2019-03-05 03:52:23 +01:00
|
|
|
alias Pleroma.Web.ActivityPub.Utils
|
2019-08-18 21:29:31 +02:00
|
|
|
alias Pleroma.Web.CommonAPI
|
2018-12-02 20:03:53 +01:00
|
|
|
alias Pleroma.Web.CommonAPI.Utils, as: CommonUtils
|
2019-02-09 16:16:26 +01:00
|
|
|
alias Pleroma.Web.OAuth
|
2019-02-11 22:27:02 +01:00
|
|
|
alias Pleroma.Web.RelMe
|
2019-08-13 19:20:26 +02:00
|
|
|
alias Pleroma.Workers.BackgroundWorker
|
2017-03-20 21:28:31 +01:00
|
|
|
|
2018-12-29 10:02:37 +01:00
|
|
|
require Logger
|
|
|
|
|
2018-12-09 10:12:48 +01:00
|
|
|
@type t :: %__MODULE__{}
|
2020-01-17 12:55:36 +01:00
|
|
|
@type account_status :: :active | :deactivated | :password_reset_pending | :confirmation_pending
|
2019-09-18 16:54:31 +02:00
|
|
|
@primary_key {:id, FlakeId.Ecto.CompatType, autogenerate: true}
|
2019-01-09 16:08:24 +01:00
|
|
|
|
2019-03-05 05:37:33 +01:00
|
|
|
# credo:disable-for-next-line Credo.Check.Readability.MaxLineLength
|
2018-12-12 18:17:15 +01:00
|
|
|
@email_regex ~r/^[a-zA-Z0-9.!#$%&'*+\/=?^_`{|}~-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$/
|
|
|
|
|
|
|
|
@strict_local_nickname_regex ~r/^[a-zA-Z\d]+$/
|
2018-12-12 21:44:08 +01:00
|
|
|
@extended_local_nickname_regex ~r/^[a-zA-Z\d_-]+$/
|
2018-12-12 18:17:15 +01:00
|
|
|
|
2019-11-20 13:46:11 +01:00
|
|
|
# AP ID user relationships (blocks, mutes etc.)
|
2019-11-19 21:22:10 +01:00
|
|
|
# Format: [rel_type: [outgoing_rel: :outgoing_rel_target, incoming_rel: :incoming_rel_source]]
|
|
|
|
@user_relationships_config [
|
2019-11-20 13:46:11 +01:00
|
|
|
block: [
|
|
|
|
blocker_blocks: :blocked_users,
|
|
|
|
blockee_blocks: :blocker_users
|
|
|
|
],
|
|
|
|
mute: [
|
|
|
|
muter_mutes: :muted_users,
|
|
|
|
mutee_mutes: :muter_users
|
|
|
|
],
|
2019-11-19 21:22:10 +01:00
|
|
|
reblog_mute: [
|
|
|
|
reblog_muter_mutes: :reblog_muted_users,
|
|
|
|
reblog_mutee_mutes: :reblog_muter_users
|
|
|
|
],
|
|
|
|
notification_mute: [
|
|
|
|
notification_muter_mutes: :notification_muted_users,
|
|
|
|
notification_mutee_mutes: :notification_muter_users
|
2019-11-20 13:46:11 +01:00
|
|
|
],
|
|
|
|
# Note: `inverse_subscription` relationship is inverse: subscriber acts as relationship target
|
|
|
|
inverse_subscription: [
|
|
|
|
subscribee_subscriptions: :subscriber_users,
|
|
|
|
subscriber_subscriptions: :subscribee_users
|
2019-11-19 21:22:10 +01:00
|
|
|
]
|
|
|
|
]
|
|
|
|
|
2017-03-20 21:28:31 +01:00
|
|
|
schema "users" do
|
2018-03-30 15:01:53 +02:00
|
|
|
field(:bio, :string)
|
|
|
|
field(:email, :string)
|
|
|
|
field(:name, :string)
|
|
|
|
field(:nickname, :string)
|
|
|
|
field(:password_hash, :string)
|
|
|
|
field(:password, :string, virtual: true)
|
|
|
|
field(:password_confirmation, :string, virtual: true)
|
2019-10-06 15:22:35 +02:00
|
|
|
field(:keys, :string)
|
2020-04-01 06:58:48 +02:00
|
|
|
field(:public_key, :string)
|
2018-03-30 15:01:53 +02:00
|
|
|
field(:ap_id, :string)
|
|
|
|
field(:avatar, :map)
|
|
|
|
field(:local, :boolean, default: true)
|
|
|
|
field(:follower_address, :string)
|
2019-07-10 15:01:32 +02:00
|
|
|
field(:following_address, :string)
|
2019-01-14 18:04:45 +01:00
|
|
|
field(:search_rank, :float, virtual: true)
|
2019-03-22 06:39:49 +01:00
|
|
|
field(:search_type, :integer, virtual: true)
|
2018-12-06 18:06:50 +01:00
|
|
|
field(:tags, {:array, :string}, default: [])
|
2019-03-20 13:59:27 +01:00
|
|
|
field(:last_refreshed_at, :naive_datetime_usec)
|
2019-04-17 11:59:05 +02:00
|
|
|
field(:last_digest_emailed_at, :naive_datetime)
|
2019-10-16 20:59:21 +02:00
|
|
|
field(:banner, :map, default: %{})
|
|
|
|
field(:background, :map, default: %{})
|
|
|
|
field(:note_count, :integer, default: 0)
|
|
|
|
field(:follower_count, :integer, default: 0)
|
2019-11-21 10:31:13 +01:00
|
|
|
field(:following_count, :integer, default: 0)
|
2019-10-16 20:59:21 +02:00
|
|
|
field(:locked, :boolean, default: false)
|
|
|
|
field(:confirmation_pending, :boolean, default: false)
|
|
|
|
field(:password_reset_pending, :boolean, default: false)
|
|
|
|
field(:confirmation_token, :string, default: nil)
|
|
|
|
field(:default_scope, :string, default: "public")
|
|
|
|
field(:domain_blocks, {:array, :string}, default: [])
|
|
|
|
field(:deactivated, :boolean, default: false)
|
|
|
|
field(:no_rich_text, :boolean, default: false)
|
|
|
|
field(:ap_enabled, :boolean, default: false)
|
|
|
|
field(:is_moderator, :boolean, default: false)
|
|
|
|
field(:is_admin, :boolean, default: false)
|
|
|
|
field(:show_role, :boolean, default: true)
|
|
|
|
field(:settings, :map, default: nil)
|
2020-04-09 13:01:35 +02:00
|
|
|
field(:uri, Types.Uri, default: nil)
|
2019-10-16 20:59:21 +02:00
|
|
|
field(:hide_followers_count, :boolean, default: false)
|
|
|
|
field(:hide_follows_count, :boolean, default: false)
|
|
|
|
field(:hide_followers, :boolean, default: false)
|
|
|
|
field(:hide_follows, :boolean, default: false)
|
|
|
|
field(:hide_favorites, :boolean, default: true)
|
|
|
|
field(:unread_conversation_count, :integer, default: 0)
|
|
|
|
field(:pinned_activities, {:array, :string}, default: [])
|
|
|
|
field(:email_notifications, :map, default: %{"digest" => false})
|
|
|
|
field(:mascot, :map, default: nil)
|
2020-04-03 13:03:32 +02:00
|
|
|
field(:emoji, :map, default: %{})
|
2019-10-16 20:59:21 +02:00
|
|
|
field(:pleroma_settings_store, :map, default: %{})
|
2019-10-23 16:15:48 +02:00
|
|
|
field(:fields, {:array, :map}, default: [])
|
2019-10-16 20:59:21 +02:00
|
|
|
field(:raw_fields, {:array, :map}, default: [])
|
|
|
|
field(:discoverable, :boolean, default: false)
|
2019-10-21 10:58:22 +02:00
|
|
|
field(:invisible, :boolean, default: false)
|
2019-11-12 12:36:50 +01:00
|
|
|
field(:allow_following_move, :boolean, default: true)
|
2019-10-16 20:59:21 +02:00
|
|
|
field(:skip_thread_containment, :boolean, default: false)
|
2019-12-10 14:19:26 +01:00
|
|
|
field(:actor_type, :string, default: "Person")
|
2019-10-25 14:14:18 +02:00
|
|
|
field(:also_known_as, {:array, :string}, default: [])
|
2020-04-01 07:47:07 +02:00
|
|
|
field(:inbox, :string)
|
|
|
|
field(:shared_inbox, :string)
|
2019-10-16 20:59:21 +02:00
|
|
|
|
2019-10-28 10:47:23 +01:00
|
|
|
embeds_one(
|
|
|
|
:notification_settings,
|
|
|
|
Pleroma.User.NotificationSetting,
|
|
|
|
on_replace: :update
|
2019-10-16 20:59:21 +02:00
|
|
|
)
|
|
|
|
|
2018-03-30 15:01:53 +02:00
|
|
|
has_many(:notifications, Notification)
|
2019-03-18 15:23:38 +01:00
|
|
|
has_many(:registrations, Registration)
|
2019-09-12 20:37:36 +02:00
|
|
|
has_many(:deliveries, Delivery)
|
2019-10-20 12:42:42 +02:00
|
|
|
|
2019-11-18 18:38:56 +01:00
|
|
|
has_many(:outgoing_relationships, UserRelationship, foreign_key: :source_id)
|
|
|
|
has_many(:incoming_relationships, UserRelationship, foreign_key: :target_id)
|
|
|
|
|
2019-11-19 21:22:10 +01:00
|
|
|
for {relationship_type,
|
|
|
|
[
|
|
|
|
{outgoing_relation, outgoing_relation_target},
|
|
|
|
{incoming_relation, incoming_relation_source}
|
|
|
|
]} <- @user_relationships_config do
|
2020-03-15 19:00:12 +01:00
|
|
|
# Definitions of `has_many` relations: :blocker_blocks, :muter_mutes, :reblog_muter_mutes,
|
|
|
|
# :notification_muter_mutes, :subscribee_subscriptions
|
2019-11-19 21:22:10 +01:00
|
|
|
has_many(outgoing_relation, UserRelationship,
|
|
|
|
foreign_key: :source_id,
|
|
|
|
where: [relationship_type: relationship_type]
|
|
|
|
)
|
2019-10-20 12:42:42 +02:00
|
|
|
|
2020-03-15 19:00:12 +01:00
|
|
|
# Definitions of `has_many` relations: :blockee_blocks, :mutee_mutes, :reblog_mutee_mutes,
|
|
|
|
# :notification_mutee_mutes, :subscriber_subscriptions
|
2019-11-19 21:22:10 +01:00
|
|
|
has_many(incoming_relation, UserRelationship,
|
|
|
|
foreign_key: :target_id,
|
|
|
|
where: [relationship_type: relationship_type]
|
|
|
|
)
|
2019-11-18 18:38:56 +01:00
|
|
|
|
2020-03-15 19:00:12 +01:00
|
|
|
# Definitions of `has_many` relations: :blocked_users, :muted_users, :reblog_muted_users,
|
|
|
|
# :notification_muted_users, :subscriber_users
|
2019-11-19 21:22:10 +01:00
|
|
|
has_many(outgoing_relation_target, through: [outgoing_relation, :target])
|
2019-11-18 18:38:56 +01:00
|
|
|
|
2020-03-15 19:00:12 +01:00
|
|
|
# Definitions of `has_many` relations: :blocker_users, :muter_users, :reblog_muter_users,
|
|
|
|
# :notification_muter_users, :subscribee_users
|
2019-11-19 21:22:10 +01:00
|
|
|
has_many(incoming_relation_source, through: [incoming_relation, :source])
|
|
|
|
end
|
2019-11-15 19:38:54 +01:00
|
|
|
|
2019-11-10 14:30:21 +01:00
|
|
|
# `:blocks` is deprecated (replaced with `blocked_users` relation)
|
|
|
|
field(:blocks, {:array, :string}, default: [])
|
2019-11-18 18:38:56 +01:00
|
|
|
# `:mutes` is deprecated (replaced with `muted_users` relation)
|
|
|
|
field(:mutes, {:array, :string}, default: [])
|
2019-11-19 21:22:10 +01:00
|
|
|
# `:muted_reblogs` is deprecated (replaced with `reblog_muted_users` relation)
|
|
|
|
field(:muted_reblogs, {:array, :string}, default: [])
|
|
|
|
# `:muted_notifications` is deprecated (replaced with `notification_muted_users` relation)
|
|
|
|
field(:muted_notifications, {:array, :string}, default: [])
|
2019-11-20 13:46:11 +01:00
|
|
|
# `:subscribers` is deprecated (replaced with `subscriber_users` relation)
|
|
|
|
field(:subscribers, {:array, :string}, default: [])
|
2017-03-20 21:28:31 +01:00
|
|
|
|
2020-05-07 10:14:54 +02:00
|
|
|
embeds_one(
|
|
|
|
:multi_factor_authentication_settings,
|
|
|
|
MFA.Settings,
|
|
|
|
on_replace: :delete
|
|
|
|
)
|
|
|
|
|
2017-03-20 21:28:31 +01:00
|
|
|
timestamps()
|
|
|
|
end
|
2017-03-21 17:53:20 +01:00
|
|
|
|
2019-11-19 21:22:10 +01:00
|
|
|
for {_relationship_type, [{_outgoing_relation, outgoing_relation_target}, _]} <-
|
|
|
|
@user_relationships_config do
|
2020-03-15 19:00:12 +01:00
|
|
|
# `def blocked_users_relation/2`, `def muted_users_relation/2`,
|
|
|
|
# `def reblog_muted_users_relation/2`, `def notification_muted_users/2`,
|
|
|
|
# `def subscriber_users/2`
|
2019-11-19 21:22:10 +01:00
|
|
|
def unquote(:"#{outgoing_relation_target}_relation")(user, restrict_deactivated? \\ false) do
|
|
|
|
target_users_query = assoc(user, unquote(outgoing_relation_target))
|
|
|
|
|
|
|
|
if restrict_deactivated? do
|
|
|
|
restrict_deactivated(target_users_query)
|
|
|
|
else
|
|
|
|
target_users_query
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-03-15 19:00:12 +01:00
|
|
|
# `def blocked_users/2`, `def muted_users/2`, `def reblog_muted_users/2`,
|
|
|
|
# `def notification_muted_users/2`, `def subscriber_users/2`
|
2019-11-19 21:22:10 +01:00
|
|
|
def unquote(outgoing_relation_target)(user, restrict_deactivated? \\ false) do
|
|
|
|
__MODULE__
|
|
|
|
|> apply(unquote(:"#{outgoing_relation_target}_relation"), [
|
|
|
|
user,
|
|
|
|
restrict_deactivated?
|
|
|
|
])
|
|
|
|
|> Repo.all()
|
|
|
|
end
|
|
|
|
|
2020-03-15 19:00:12 +01:00
|
|
|
# `def blocked_users_ap_ids/2`, `def muted_users_ap_ids/2`, `def reblog_muted_users_ap_ids/2`,
|
|
|
|
# `def notification_muted_users_ap_ids/2`, `def subscriber_users_ap_ids/2`
|
2019-11-19 21:22:10 +01:00
|
|
|
def unquote(:"#{outgoing_relation_target}_ap_ids")(user, restrict_deactivated? \\ false) do
|
|
|
|
__MODULE__
|
|
|
|
|> apply(unquote(:"#{outgoing_relation_target}_relation"), [
|
|
|
|
user,
|
|
|
|
restrict_deactivated?
|
|
|
|
])
|
|
|
|
|> select([u], u.ap_id)
|
|
|
|
|> Repo.all()
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-03-25 18:33:34 +01:00
|
|
|
@doc """
|
|
|
|
Dumps Flake Id to SQL-compatible format (16-byte UUID).
|
|
|
|
E.g. "9pQtDGXuq4p3VlcJEm" -> <<0, 0, 1, 110, 179, 218, 42, 92, 213, 41, 44, 227, 95, 213, 0, 0>>
|
|
|
|
"""
|
2020-03-25 15:01:45 +01:00
|
|
|
def binary_id(source_id) when is_binary(source_id) do
|
|
|
|
with {:ok, dumped_id} <- FlakeId.Ecto.CompatType.dump(source_id) do
|
|
|
|
dumped_id
|
|
|
|
else
|
|
|
|
_ -> source_id
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def binary_id(source_ids) when is_list(source_ids) do
|
|
|
|
Enum.map(source_ids, &binary_id/1)
|
|
|
|
end
|
|
|
|
|
|
|
|
def binary_id(%User{} = user), do: binary_id(user.id)
|
|
|
|
|
2020-01-17 12:55:36 +01:00
|
|
|
@doc "Returns status account"
|
|
|
|
@spec account_status(User.t()) :: account_status()
|
|
|
|
def account_status(%User{deactivated: true}), do: :deactivated
|
|
|
|
def account_status(%User{password_reset_pending: true}), do: :password_reset_pending
|
2019-11-11 12:37:13 +01:00
|
|
|
|
2020-01-17 12:55:36 +01:00
|
|
|
def account_status(%User{confirmation_pending: true}) do
|
|
|
|
case Config.get([:instance, :account_activation_required]) do
|
|
|
|
true -> :confirmation_pending
|
|
|
|
_ -> :active
|
|
|
|
end
|
|
|
|
end
|
2018-12-19 16:56:52 +01:00
|
|
|
|
2020-01-17 12:55:36 +01:00
|
|
|
def account_status(%User{}), do: :active
|
2018-12-27 13:46:18 +01:00
|
|
|
|
2020-01-17 12:55:36 +01:00
|
|
|
@spec visible_for?(User.t(), User.t() | nil) :: boolean()
|
2019-01-09 07:21:21 +01:00
|
|
|
def visible_for?(user, for_user \\ nil)
|
|
|
|
|
2019-11-04 18:44:24 +01:00
|
|
|
def visible_for?(%User{invisible: true}, _), do: false
|
|
|
|
|
2020-03-20 11:04:37 +01:00
|
|
|
def visible_for?(%User{id: user_id}, %User{id: user_id}), do: true
|
|
|
|
|
|
|
|
def visible_for?(%User{local: local} = user, nil) do
|
|
|
|
cfg_key =
|
|
|
|
if local,
|
|
|
|
do: :local,
|
|
|
|
else: :remote
|
|
|
|
|
|
|
|
if Config.get([:restrict_unauthenticated, :profiles, cfg_key]),
|
|
|
|
do: false,
|
|
|
|
else: account_status(user) == :active
|
|
|
|
end
|
2019-01-09 07:21:21 +01:00
|
|
|
|
|
|
|
def visible_for?(%User{} = user, for_user) do
|
2020-01-17 12:55:36 +01:00
|
|
|
account_status(user) == :active || superuser?(for_user)
|
2018-12-28 12:35:25 +01:00
|
|
|
end
|
|
|
|
|
2019-01-09 07:21:21 +01:00
|
|
|
def visible_for?(_, _), do: false
|
|
|
|
|
2020-01-17 12:55:36 +01:00
|
|
|
@spec superuser?(User.t()) :: boolean()
|
2019-10-16 20:59:21 +02:00
|
|
|
def superuser?(%User{local: true, is_admin: true}), do: true
|
|
|
|
def superuser?(%User{local: true, is_moderator: true}), do: true
|
2019-01-09 07:21:21 +01:00
|
|
|
def superuser?(_), do: false
|
2018-12-17 15:28:58 +01:00
|
|
|
|
2020-01-17 12:55:36 +01:00
|
|
|
@spec invisible?(User.t()) :: boolean()
|
2019-10-21 10:58:22 +02:00
|
|
|
def invisible?(%User{invisible: true}), do: true
|
2019-10-05 22:11:43 +02:00
|
|
|
def invisible?(_), do: false
|
|
|
|
|
2019-03-26 16:40:09 +01:00
|
|
|
def avatar_url(user, options \\ []) do
|
2017-04-17 14:12:36 +02:00
|
|
|
case user.avatar do
|
2020-04-12 21:54:43 +02:00
|
|
|
%{"url" => [%{"href" => href} | _]} ->
|
|
|
|
href
|
|
|
|
|
|
|
|
_ ->
|
|
|
|
unless options[:no_default] do
|
|
|
|
Config.get([:assets, :default_user_avatar], "#{Web.base_url()}/images/avi.png")
|
|
|
|
end
|
2017-04-17 14:12:36 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-03-26 16:40:09 +01:00
|
|
|
def banner_url(user, options \\ []) do
|
2019-10-16 20:59:21 +02:00
|
|
|
case user.banner do
|
2017-09-16 13:44:08 +02:00
|
|
|
%{"url" => [%{"href" => href} | _]} -> href
|
2019-03-26 16:40:09 +01:00
|
|
|
_ -> !options[:no_default] && "#{Web.base_url()}/images/banner.png"
|
2017-09-16 13:44:08 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-04-01 01:50:53 +02:00
|
|
|
# Should probably be renamed or removed
|
2019-09-24 09:14:34 +02:00
|
|
|
def ap_id(%User{nickname: nickname}), do: "#{Web.base_url()}/users/#{nickname}"
|
2017-03-21 17:53:20 +01:00
|
|
|
|
2019-03-19 19:23:06 +01:00
|
|
|
def ap_followers(%User{follower_address: fa}) when is_binary(fa), do: fa
|
|
|
|
def ap_followers(%User{} = user), do: "#{ap_id(user)}/followers"
|
2017-03-22 18:36:08 +01:00
|
|
|
|
2020-03-29 22:30:50 +02:00
|
|
|
@spec ap_following(User.t()) :: String.t()
|
2019-07-10 16:39:07 +02:00
|
|
|
def ap_following(%User{following_address: fa}) when is_binary(fa), do: fa
|
|
|
|
def ap_following(%User{} = user), do: "#{ap_id(user)}/following"
|
|
|
|
|
|
|
|
@spec restrict_deactivated(Ecto.Query.t()) :: Ecto.Query.t()
|
2019-05-14 13:29:10 +02:00
|
|
|
def restrict_deactivated(query) do
|
2019-10-16 20:59:21 +02:00
|
|
|
from(u in query, where: u.deactivated != ^true)
|
2019-03-04 13:55:11 +01:00
|
|
|
end
|
|
|
|
|
2019-10-10 21:35:32 +02:00
|
|
|
defdelegate following_count(user), to: FollowingRelationship
|
2019-03-04 13:55:11 +01:00
|
|
|
|
2019-10-16 20:59:21 +02:00
|
|
|
defp truncate_fields_param(params) do
|
|
|
|
if Map.has_key?(params, :fields) do
|
|
|
|
Map.put(params, :fields, Enum.map(params[:fields], &truncate_field/1))
|
|
|
|
else
|
|
|
|
params
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-09-04 16:45:40 +02:00
|
|
|
defp truncate_if_exists(params, key, max_length) do
|
2019-09-04 16:57:42 +02:00
|
|
|
if Map.has_key?(params, key) and is_binary(params[key]) do
|
2019-09-04 16:45:40 +02:00
|
|
|
{value, _chopped} = String.split_at(params[key], max_length)
|
|
|
|
Map.put(params, key, value)
|
|
|
|
else
|
|
|
|
params
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-04-01 01:50:53 +02:00
|
|
|
defp fix_follower_address(%{follower_address: _, following_address: _} = params), do: params
|
|
|
|
|
|
|
|
defp fix_follower_address(%{nickname: nickname} = params),
|
|
|
|
do: Map.put(params, :follower_address, ap_followers(%User{nickname: nickname}))
|
|
|
|
|
|
|
|
defp fix_follower_address(params), do: params
|
|
|
|
|
2020-04-11 20:44:52 +02:00
|
|
|
def remote_user_changeset(struct \\ %User{local: false}, params) do
|
2019-08-01 10:53:37 +02:00
|
|
|
bio_limit = Pleroma.Config.get([:instance, :user_bio_length], 5000)
|
|
|
|
name_limit = Pleroma.Config.get([:instance, :user_name_length], 100)
|
2018-11-18 21:40:52 +01:00
|
|
|
|
2020-04-16 16:54:57 +02:00
|
|
|
name =
|
|
|
|
case params[:name] do
|
|
|
|
name when is_binary(name) and byte_size(name) > 0 -> name
|
|
|
|
_ -> params[:nickname]
|
|
|
|
end
|
|
|
|
|
2019-09-04 16:45:40 +02:00
|
|
|
params =
|
|
|
|
params
|
2020-04-16 16:54:57 +02:00
|
|
|
|> Map.put(:name, name)
|
2020-04-11 20:44:52 +02:00
|
|
|
|> Map.put_new(:last_refreshed_at, NaiveDateTime.utc_now())
|
2019-09-04 16:45:40 +02:00
|
|
|
|> truncate_if_exists(:name, name_limit)
|
|
|
|
|> truncate_if_exists(:bio, bio_limit)
|
2019-10-16 20:59:21 +02:00
|
|
|
|> truncate_fields_param()
|
2020-04-01 01:50:53 +02:00
|
|
|
|> fix_follower_address()
|
2019-09-04 16:45:40 +02:00
|
|
|
|
2020-04-16 20:28:52 +02:00
|
|
|
struct
|
2020-04-01 01:50:53 +02:00
|
|
|
|> cast(
|
|
|
|
params,
|
|
|
|
[
|
|
|
|
:bio,
|
|
|
|
:name,
|
2020-04-03 13:03:32 +02:00
|
|
|
:emoji,
|
2020-04-01 01:50:53 +02:00
|
|
|
:ap_id,
|
2020-04-01 07:47:07 +02:00
|
|
|
:inbox,
|
|
|
|
:shared_inbox,
|
2020-04-01 01:50:53 +02:00
|
|
|
:nickname,
|
2020-04-01 06:58:48 +02:00
|
|
|
:public_key,
|
2020-04-01 01:50:53 +02:00
|
|
|
:avatar,
|
|
|
|
:ap_enabled,
|
|
|
|
:banner,
|
|
|
|
:locked,
|
2020-04-16 20:28:52 +02:00
|
|
|
:last_refreshed_at,
|
2020-04-01 01:50:53 +02:00
|
|
|
:uri,
|
|
|
|
:follower_address,
|
|
|
|
:following_address,
|
|
|
|
:hide_followers,
|
|
|
|
:hide_follows,
|
|
|
|
:hide_followers_count,
|
|
|
|
:hide_follows_count,
|
|
|
|
:follower_count,
|
|
|
|
:fields,
|
|
|
|
:following_count,
|
|
|
|
:discoverable,
|
|
|
|
:invisible,
|
|
|
|
:actor_type,
|
|
|
|
:also_known_as
|
|
|
|
]
|
|
|
|
)
|
|
|
|
|> validate_required([:name, :ap_id])
|
|
|
|
|> unique_constraint(:nickname)
|
|
|
|
|> validate_format(:nickname, @email_regex)
|
|
|
|
|> validate_length(:bio, max: bio_limit)
|
|
|
|
|> validate_length(:name, max: name_limit)
|
|
|
|
|> validate_fields(true)
|
2017-05-09 18:11:51 +02:00
|
|
|
end
|
|
|
|
|
2017-08-29 15:14:00 +02:00
|
|
|
def update_changeset(struct, params \\ %{}) do
|
2019-08-01 10:53:37 +02:00
|
|
|
bio_limit = Pleroma.Config.get([:instance, :user_bio_length], 5000)
|
|
|
|
name_limit = Pleroma.Config.get([:instance, :user_name_length], 100)
|
|
|
|
|
2017-11-19 02:22:07 +01:00
|
|
|
struct
|
2019-10-20 12:42:42 +02:00
|
|
|
|> cast(
|
|
|
|
params,
|
|
|
|
[
|
|
|
|
:bio,
|
|
|
|
:name,
|
2020-04-03 13:03:32 +02:00
|
|
|
:emoji,
|
2019-10-20 12:42:42 +02:00
|
|
|
:avatar,
|
2020-04-01 06:58:48 +02:00
|
|
|
:public_key,
|
2020-04-01 07:47:07 +02:00
|
|
|
:inbox,
|
|
|
|
:shared_inbox,
|
2019-10-20 12:42:42 +02:00
|
|
|
:locked,
|
|
|
|
:no_rich_text,
|
|
|
|
:default_scope,
|
|
|
|
:banner,
|
|
|
|
:hide_follows,
|
|
|
|
:hide_followers,
|
|
|
|
:hide_followers_count,
|
|
|
|
:hide_follows_count,
|
|
|
|
:hide_favorites,
|
2019-11-12 12:36:50 +01:00
|
|
|
:allow_following_move,
|
2019-10-20 12:42:42 +02:00
|
|
|
:background,
|
|
|
|
:show_role,
|
|
|
|
:skip_thread_containment,
|
|
|
|
:fields,
|
|
|
|
:raw_fields,
|
|
|
|
:pleroma_settings_store,
|
2019-10-25 14:14:18 +02:00
|
|
|
:discoverable,
|
2019-12-10 14:19:26 +01:00
|
|
|
:actor_type,
|
2019-10-25 14:14:18 +02:00
|
|
|
:also_known_as
|
2019-10-20 12:42:42 +02:00
|
|
|
]
|
|
|
|
)
|
2017-08-29 15:14:00 +02:00
|
|
|
|> unique_constraint(:nickname)
|
2018-12-12 18:17:15 +01:00
|
|
|
|> validate_format(:nickname, local_nickname_regex())
|
2019-08-01 10:53:37 +02:00
|
|
|
|> validate_length(:bio, max: bio_limit)
|
|
|
|
|> validate_length(:name, min: 1, max: name_limit)
|
2020-01-31 19:07:46 +01:00
|
|
|
|> put_fields()
|
2020-04-03 13:03:32 +02:00
|
|
|
|> put_emoji()
|
2020-01-31 19:07:46 +01:00
|
|
|
|> put_change_if_present(:bio, &{:ok, parse_bio(&1, struct)})
|
|
|
|
|> put_change_if_present(:avatar, &put_upload(&1, :avatar))
|
|
|
|
|> put_change_if_present(:banner, &put_upload(&1, :banner))
|
|
|
|
|> put_change_if_present(:background, &put_upload(&1, :background))
|
|
|
|
|> put_change_if_present(
|
|
|
|
:pleroma_settings_store,
|
|
|
|
&{:ok, Map.merge(struct.pleroma_settings_store, &1)}
|
|
|
|
)
|
2019-10-16 20:59:21 +02:00
|
|
|
|> validate_fields(false)
|
2017-08-29 15:14:00 +02:00
|
|
|
end
|
|
|
|
|
2020-01-31 19:07:46 +01:00
|
|
|
defp put_fields(changeset) do
|
|
|
|
if raw_fields = get_change(changeset, :raw_fields) do
|
|
|
|
raw_fields =
|
|
|
|
raw_fields
|
|
|
|
|> Enum.filter(fn %{"name" => n} -> n != "" end)
|
|
|
|
|
|
|
|
fields =
|
|
|
|
raw_fields
|
2020-03-31 13:13:53 +02:00
|
|
|
|> Enum.map(fn f -> Map.update!(f, "value", &parse_fields(&1)) end)
|
2020-01-31 19:07:46 +01:00
|
|
|
|
|
|
|
changeset
|
|
|
|
|> put_change(:raw_fields, raw_fields)
|
|
|
|
|> put_change(:fields, fields)
|
|
|
|
else
|
|
|
|
changeset
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-03-31 13:13:53 +02:00
|
|
|
defp parse_fields(value) do
|
|
|
|
value
|
|
|
|
|> Formatter.linkify(mentions_format: :full)
|
|
|
|
|> elem(0)
|
|
|
|
end
|
|
|
|
|
2020-04-03 13:03:32 +02:00
|
|
|
defp put_emoji(changeset) do
|
|
|
|
bio = get_change(changeset, :bio)
|
|
|
|
name = get_change(changeset, :name)
|
|
|
|
|
|
|
|
if bio || name do
|
|
|
|
emoji = Map.merge(Emoji.Formatter.get_emoji_map(bio), Emoji.Formatter.get_emoji_map(name))
|
|
|
|
put_change(changeset, :emoji, emoji)
|
|
|
|
else
|
|
|
|
changeset
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2020-01-31 19:07:46 +01:00
|
|
|
defp put_change_if_present(changeset, map_field, value_function) do
|
|
|
|
if value = get_change(changeset, map_field) do
|
|
|
|
with {:ok, new_value} <- value_function.(value) do
|
|
|
|
put_change(changeset, map_field, new_value)
|
|
|
|
else
|
|
|
|
_ -> changeset
|
|
|
|
end
|
|
|
|
else
|
|
|
|
changeset
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
defp put_upload(value, type) do
|
|
|
|
with %Plug.Upload{} <- value,
|
|
|
|
{:ok, object} <- ActivityPub.upload(value, type: type) do
|
|
|
|
{:ok, object.data}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def update_as_admin_changeset(struct, params) do
|
|
|
|
struct
|
|
|
|
|> update_changeset(params)
|
|
|
|
|> cast(params, [:email])
|
|
|
|
|> delete_change(:also_known_as)
|
|
|
|
|> unique_constraint(:email)
|
|
|
|
|> validate_format(:email, @email_regex)
|
2020-05-27 08:42:28 +02:00
|
|
|
|> validate_inclusion(:actor_type, ["Person", "Service"])
|
2020-01-31 19:07:46 +01:00
|
|
|
end
|
|
|
|
|
2020-05-27 08:42:28 +02:00
|
|
|
@spec update_as_admin(User.t(), map()) :: {:ok, User.t()} | {:error, Changeset.t()}
|
2020-01-31 19:07:46 +01:00
|
|
|
def update_as_admin(user, params) do
|
|
|
|
params = Map.put(params, "password_confirmation", params["password"])
|
|
|
|
changeset = update_as_admin_changeset(user, params)
|
|
|
|
|
|
|
|
if params["password"] do
|
|
|
|
reset_password(user, changeset, params)
|
|
|
|
else
|
|
|
|
User.update_and_set_cache(changeset)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2017-10-19 17:37:24 +02:00
|
|
|
def password_update_changeset(struct, params) do
|
2019-06-24 21:01:56 +02:00
|
|
|
struct
|
|
|
|
|> cast(params, [:password, :password_confirmation])
|
|
|
|
|> validate_required([:password, :password_confirmation])
|
|
|
|
|> validate_confirmation(:password)
|
2019-10-16 20:59:21 +02:00
|
|
|
|> put_password_hash()
|
|
|
|
|> put_change(:password_reset_pending, false)
|
2019-06-24 21:01:56 +02:00
|
|
|
end
|
|
|
|
|
2020-05-27 08:42:28 +02:00
|
|
|
@spec reset_password(User.t(), map()) :: {:ok, User.t()} | {:error, Changeset.t()}
|
2020-01-31 19:07:46 +01:00
|
|
|
def reset_password(%User{} = user, params) do
|
|
|
|
reset_password(user, user, params)
|
|
|
|
end
|
|
|
|
|
|
|
|
def reset_password(%User{id: user_id} = user, struct, params) do
|
2019-06-24 21:01:56 +02:00
|
|
|
multi =
|
|
|
|
Multi.new()
|
2020-01-31 19:07:46 +01:00
|
|
|
|> Multi.update(:user, password_update_changeset(struct, params))
|
2019-06-24 21:01:56 +02:00
|
|
|
|> Multi.delete_all(:tokens, OAuth.Token.Query.get_by_user(user_id))
|
|
|
|
|> Multi.delete_all(:auth, OAuth.Authorization.delete_by_user_query(user))
|
|
|
|
|
|
|
|
case Repo.transaction(multi) do
|
|
|
|
{:ok, %{user: user} = _} -> set_cache(user)
|
|
|
|
{:error, _, changeset, _} -> {:error, changeset}
|
2017-10-19 17:37:24 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-10-16 20:59:21 +02:00
|
|
|
def update_password_reset_pending(user, value) do
|
|
|
|
user
|
|
|
|
|> change()
|
|
|
|
|> put_change(:password_reset_pending, value)
|
|
|
|
|> update_and_set_cache()
|
|
|
|
end
|
|
|
|
|
2019-09-22 15:08:07 +02:00
|
|
|
def force_password_reset_async(user) do
|
|
|
|
BackgroundWorker.enqueue("force_password_reset", %{"user_id" => user.id})
|
|
|
|
end
|
|
|
|
|
|
|
|
@spec force_password_reset(User.t()) :: {:ok, User.t()} | {:error, Ecto.Changeset.t()}
|
2019-10-16 20:59:21 +02:00
|
|
|
def force_password_reset(user), do: update_password_reset_pending(user, true)
|
2019-09-22 15:08:07 +02:00
|
|
|
|
2018-12-18 11:13:57 +01:00
|
|
|
def register_changeset(struct, params \\ %{}, opts \\ []) do
|
2019-08-01 10:53:37 +02:00
|
|
|
bio_limit = Pleroma.Config.get([:instance, :user_bio_length], 5000)
|
|
|
|
name_limit = Pleroma.Config.get([:instance, :user_name_length], 100)
|
|
|
|
|
2019-05-13 20:35:45 +02:00
|
|
|
need_confirmation? =
|
|
|
|
if is_nil(opts[:need_confirmation]) do
|
|
|
|
Pleroma.Config.get([:instance, :account_activation_required])
|
2018-12-18 11:13:57 +01:00
|
|
|
else
|
2019-05-13 20:35:45 +02:00
|
|
|
opts[:need_confirmation]
|
2018-12-18 11:13:57 +01:00
|
|
|
end
|
|
|
|
|
2019-09-24 09:14:34 +02:00
|
|
|
struct
|
2019-10-16 20:59:21 +02:00
|
|
|
|> confirmation_changeset(need_confirmation: need_confirmation?)
|
2020-04-03 13:03:32 +02:00
|
|
|
|> cast(params, [:bio, :email, :name, :nickname, :password, :password_confirmation, :emoji])
|
2019-09-24 09:14:34 +02:00
|
|
|
|> validate_required([:name, :nickname, :password, :password_confirmation])
|
|
|
|
|> validate_confirmation(:password)
|
|
|
|
|> unique_constraint(:email)
|
|
|
|
|> unique_constraint(:nickname)
|
|
|
|
|> validate_exclusion(:nickname, Pleroma.Config.get([User, :restricted_nicknames]))
|
|
|
|
|> validate_format(:nickname, local_nickname_regex())
|
|
|
|
|> validate_format(:email, @email_regex)
|
|
|
|
|> validate_length(:bio, max: bio_limit)
|
|
|
|
|> validate_length(:name, min: 1, max: name_limit)
|
|
|
|
|> maybe_validate_required_email(opts[:external])
|
|
|
|
|> put_password_hash
|
|
|
|
|> put_ap_id()
|
|
|
|
|> unique_constraint(:ap_id)
|
|
|
|
|> put_following_and_follower_address()
|
|
|
|
end
|
2018-12-20 10:55:12 +01:00
|
|
|
|
2019-09-24 09:14:34 +02:00
|
|
|
def maybe_validate_required_email(changeset, true), do: changeset
|
2020-02-26 17:13:53 +01:00
|
|
|
|
|
|
|
def maybe_validate_required_email(changeset, _) do
|
|
|
|
if Pleroma.Config.get([:instance, :account_activation_required]) do
|
|
|
|
validate_required(changeset, [:email])
|
|
|
|
else
|
|
|
|
changeset
|
|
|
|
end
|
|
|
|
end
|
2017-04-15 16:40:09 +02:00
|
|
|
|
2019-09-24 09:14:34 +02:00
|
|
|
defp put_ap_id(changeset) do
|
|
|
|
ap_id = ap_id(%User{nickname: get_field(changeset, :nickname)})
|
|
|
|
put_change(changeset, :ap_id, ap_id)
|
|
|
|
end
|
2019-03-18 16:09:53 +01:00
|
|
|
|
2019-09-24 09:14:34 +02:00
|
|
|
defp put_following_and_follower_address(changeset) do
|
|
|
|
followers = ap_followers(%User{nickname: get_field(changeset, :nickname)})
|
2018-03-30 15:01:53 +02:00
|
|
|
|
2019-09-24 09:14:34 +02:00
|
|
|
changeset
|
|
|
|
|> put_change(:follower_address, followers)
|
2017-04-15 16:40:09 +02:00
|
|
|
end
|
|
|
|
|
2019-01-08 09:55:33 +01:00
|
|
|
defp autofollow_users(user) do
|
|
|
|
candidates = Pleroma.Config.get([:instance, :autofollowed_nicknames])
|
|
|
|
|
|
|
|
autofollowed_users =
|
2019-05-14 13:15:56 +02:00
|
|
|
User.Query.build(%{nickname: candidates, local: true, deactivated: false})
|
2019-01-08 09:55:33 +01:00
|
|
|
|> Repo.all()
|
|
|
|
|
2019-01-09 11:38:45 +01:00
|
|
|
follow_all(user, autofollowed_users)
|
2019-01-08 09:55:33 +01:00
|
|
|
end
|
|
|
|
|
2018-12-18 11:13:57 +01:00
|
|
|
@doc "Inserts provided changeset, performs post-registration actions (confirmation email sending etc.)"
|
|
|
|
def register(%Ecto.Changeset{} = changeset) do
|
2019-09-24 09:14:34 +02:00
|
|
|
with {:ok, user} <- Repo.insert(changeset) do
|
|
|
|
post_register_action(user)
|
2019-06-01 07:32:53 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def post_register_action(%User{} = user) do
|
|
|
|
with {:ok, user} <- autofollow_users(user),
|
2019-04-22 09:20:43 +02:00
|
|
|
{:ok, user} <- set_cache(user),
|
2019-05-17 09:25:20 +02:00
|
|
|
{:ok, _} <- User.WelcomeMessage.post_welcome_message_to_user(user),
|
2019-04-14 17:01:48 +02:00
|
|
|
{:ok, _} <- try_send_confirmation_email(user) do
|
2018-12-18 11:13:57 +01:00
|
|
|
{:ok, user}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-12-18 15:13:52 +01:00
|
|
|
def try_send_confirmation_email(%User{} = user) do
|
2019-10-16 20:59:21 +02:00
|
|
|
if user.confirmation_pending &&
|
2018-12-20 12:48:48 +01:00
|
|
|
Pleroma.Config.get([:instance, :account_activation_required]) do
|
2019-04-14 17:12:54 +02:00
|
|
|
user
|
|
|
|
|> Pleroma.Emails.UserEmail.account_confirmation_email()
|
|
|
|
|> Pleroma.Emails.Mailer.deliver_async()
|
2019-04-14 17:01:48 +02:00
|
|
|
|
|
|
|
{:ok, :enqueued}
|
2018-12-18 15:13:52 +01:00
|
|
|
else
|
|
|
|
{:ok, :noop}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-11-19 12:14:02 +01:00
|
|
|
def try_send_confirmation_email(users) do
|
2019-11-22 01:11:36 +01:00
|
|
|
Enum.each(users, &try_send_confirmation_email/1)
|
2019-11-19 12:14:02 +01:00
|
|
|
end
|
|
|
|
|
2018-09-19 08:13:18 +02:00
|
|
|
def needs_update?(%User{local: true}), do: false
|
|
|
|
|
|
|
|
def needs_update?(%User{local: false, last_refreshed_at: nil}), do: true
|
|
|
|
|
|
|
|
def needs_update?(%User{local: false} = user) do
|
2019-03-05 05:03:13 +01:00
|
|
|
NaiveDateTime.diff(NaiveDateTime.utc_now(), user.last_refreshed_at) >= 86_400
|
2018-09-19 08:13:18 +02:00
|
|
|
end
|
|
|
|
|
|
|
|
def needs_update?(_), do: true
|
|
|
|
|
2019-07-31 17:14:36 +02:00
|
|
|
@spec maybe_direct_follow(User.t(), User.t()) :: {:ok, User.t()} | {:error, String.t()}
|
2020-04-07 20:52:32 +02:00
|
|
|
|
|
|
|
# "Locked" (self-locked) users demand explicit authorization of follow requests
|
2019-10-24 09:42:14 +02:00
|
|
|
def maybe_direct_follow(%User{} = follower, %User{local: true, locked: true} = followed) do
|
2020-03-28 16:49:03 +01:00
|
|
|
follow(follower, followed, :follow_pending)
|
2018-10-11 12:49:54 +02:00
|
|
|
end
|
|
|
|
|
|
|
|
def maybe_direct_follow(%User{} = follower, %User{local: true} = followed) do
|
|
|
|
follow(follower, followed)
|
|
|
|
end
|
|
|
|
|
|
|
|
def maybe_direct_follow(%User{} = follower, %User{} = followed) do
|
2019-09-24 09:14:34 +02:00
|
|
|
if not ap_enabled?(followed) do
|
2018-05-25 11:31:42 +02:00
|
|
|
follow(follower, followed)
|
2018-05-28 18:42:18 +02:00
|
|
|
else
|
|
|
|
{:ok, follower}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-02-13 13:52:27 +01:00
|
|
|
@doc "A mass follow for local users. Respects blocks in both directions but does not create activities."
|
2019-01-09 11:35:23 +01:00
|
|
|
@spec follow_all(User.t(), list(User.t())) :: {atom(), User.t()}
|
|
|
|
def follow_all(follower, followeds) do
|
2019-11-21 10:31:13 +01:00
|
|
|
followeds
|
|
|
|
|> Enum.reject(fn followed -> blocks?(follower, followed) || blocks?(followed, follower) end)
|
2020-03-28 16:49:03 +01:00
|
|
|
|> Enum.each(&follow(follower, &1, :follow_accept))
|
2019-01-09 11:35:23 +01:00
|
|
|
|
2019-01-30 19:33:25 +01:00
|
|
|
set_cache(follower)
|
2019-01-09 11:35:23 +01:00
|
|
|
end
|
|
|
|
|
2019-10-10 21:35:32 +02:00
|
|
|
defdelegate following(user), to: FollowingRelationship
|
|
|
|
|
2020-03-28 16:49:03 +01:00
|
|
|
def follow(%User{} = follower, %User{} = followed, state \\ :follow_accept) do
|
2019-05-30 10:33:58 +02:00
|
|
|
deny_follow_blocked = Pleroma.Config.get([:user, :deny_follow_blocked])
|
2018-02-17 16:08:55 +01:00
|
|
|
|
2018-05-25 05:16:02 +02:00
|
|
|
cond do
|
2019-10-16 20:59:21 +02:00
|
|
|
followed.deactivated ->
|
|
|
|
{:error, "Could not follow user: #{followed.nickname} is deactivated."}
|
2017-05-06 14:09:39 +02:00
|
|
|
|
2018-06-23 23:27:07 +02:00
|
|
|
deny_follow_blocked and blocks?(followed, follower) ->
|
2018-05-25 05:16:02 +02:00
|
|
|
{:error, "Could not follow user: #{followed.nickname} blocked you."}
|
2017-03-22 18:36:08 +01:00
|
|
|
|
2018-05-25 05:16:02 +02:00
|
|
|
true ->
|
2019-10-10 21:35:32 +02:00
|
|
|
FollowingRelationship.follow(follower, followed, state)
|
2017-07-22 17:42:15 +02:00
|
|
|
|
2018-05-25 05:16:02 +02:00
|
|
|
{:ok, _} = update_follower_count(followed)
|
|
|
|
|
2019-11-21 10:31:13 +01:00
|
|
|
follower
|
|
|
|
|> update_following_count()
|
|
|
|
|> set_cache()
|
2017-04-12 16:34:36 +02:00
|
|
|
end
|
2017-03-22 18:36:08 +01:00
|
|
|
end
|
2017-03-23 13:13:09 +01:00
|
|
|
|
2020-02-04 17:35:32 +01:00
|
|
|
def unfollow(%User{ap_id: ap_id}, %User{ap_id: ap_id}) do
|
|
|
|
{:error, "Not subscribed!"}
|
|
|
|
end
|
|
|
|
|
2020-05-12 05:44:33 +02:00
|
|
|
@spec unfollow(User.t(), User.t()) :: {:ok, User.t(), Activity.t()} | {:error, String.t()}
|
2017-03-23 13:13:09 +01:00
|
|
|
def unfollow(%User{} = follower, %User{} = followed) do
|
2020-05-12 05:44:33 +02:00
|
|
|
case do_unfollow(follower, followed) do
|
|
|
|
{:ok, follower, followed} ->
|
|
|
|
{:ok, follower, Utils.fetch_latest_follow(follower, followed)}
|
|
|
|
|
|
|
|
error ->
|
|
|
|
error
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
@spec do_unfollow(User.t(), User.t()) :: {:ok, User.t(), User.t()} | {:error, String.t()}
|
|
|
|
defp do_unfollow(%User{} = follower, %User{} = followed) do
|
2020-02-07 13:17:34 +01:00
|
|
|
case get_follow_state(follower, followed) do
|
2020-03-28 16:49:03 +01:00
|
|
|
state when state in [:follow_pending, :follow_accept] ->
|
2020-02-04 17:35:32 +01:00
|
|
|
FollowingRelationship.unfollow(follower, followed)
|
|
|
|
{:ok, followed} = update_follower_count(followed)
|
2017-07-22 17:42:15 +02:00
|
|
|
|
2020-02-04 17:35:32 +01:00
|
|
|
{:ok, follower} =
|
|
|
|
follower
|
|
|
|
|> update_following_count()
|
|
|
|
|> set_cache()
|
2017-07-22 17:42:15 +02:00
|
|
|
|
2020-05-12 05:44:33 +02:00
|
|
|
{:ok, follower, followed}
|
2019-01-30 19:21:04 +01:00
|
|
|
|
2020-02-04 17:35:32 +01:00
|
|
|
nil ->
|
|
|
|
{:error, "Not subscribed!"}
|
2017-04-12 16:34:36 +02:00
|
|
|
end
|
2017-03-23 13:13:09 +01:00
|
|
|
end
|
2017-03-23 15:51:34 +01:00
|
|
|
|
2019-10-10 21:35:32 +02:00
|
|
|
defdelegate following?(follower, followed), to: FollowingRelationship
|
2017-04-14 17:13:51 +02:00
|
|
|
|
2020-03-31 08:21:42 +02:00
|
|
|
@doc "Returns follow state as Pleroma.FollowingRelationship.State value"
|
2020-02-07 13:17:34 +01:00
|
|
|
def get_follow_state(%User{} = follower, %User{} = following) do
|
|
|
|
following_relationship = FollowingRelationship.get(follower, following)
|
2020-03-23 10:01:11 +01:00
|
|
|
get_follow_state(follower, following, following_relationship)
|
|
|
|
end
|
2020-02-07 13:17:34 +01:00
|
|
|
|
2020-03-23 10:01:11 +01:00
|
|
|
def get_follow_state(
|
|
|
|
%User{} = follower,
|
|
|
|
%User{} = following,
|
|
|
|
following_relationship
|
|
|
|
) do
|
2020-02-07 13:17:34 +01:00
|
|
|
case {following_relationship, following.local} do
|
|
|
|
{nil, false} ->
|
|
|
|
case Utils.fetch_latest_follow(follower, following) do
|
2020-03-28 16:49:03 +01:00
|
|
|
%Activity{data: %{"state" => state}} when state in ["pending", "accept"] ->
|
|
|
|
FollowingRelationship.state_to_enum(state)
|
|
|
|
|
|
|
|
_ ->
|
|
|
|
nil
|
2020-02-07 13:17:34 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
{%{state: state}, _} ->
|
|
|
|
state
|
|
|
|
|
|
|
|
{nil, _} ->
|
|
|
|
nil
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-05-26 16:55:16 +02:00
|
|
|
def locked?(%User{} = user) do
|
2019-10-16 20:59:21 +02:00
|
|
|
user.locked || false
|
2018-05-26 16:55:16 +02:00
|
|
|
end
|
|
|
|
|
2018-12-14 19:55:40 +01:00
|
|
|
def get_by_id(id) do
|
|
|
|
Repo.get_by(User, id: id)
|
|
|
|
end
|
|
|
|
|
2017-05-11 17:59:11 +02:00
|
|
|
def get_by_ap_id(ap_id) do
|
|
|
|
Repo.get_by(User, ap_id: ap_id)
|
|
|
|
end
|
|
|
|
|
2019-08-02 11:55:41 +02:00
|
|
|
def get_all_by_ap_id(ap_ids) do
|
|
|
|
from(u in __MODULE__,
|
|
|
|
where: u.ap_id in ^ap_ids
|
|
|
|
)
|
|
|
|
|> Repo.all()
|
|
|
|
end
|
|
|
|
|
2019-09-06 20:50:00 +02:00
|
|
|
def get_all_by_ids(ids) do
|
|
|
|
from(u in __MODULE__, where: u.id in ^ids)
|
|
|
|
|> Repo.all()
|
|
|
|
end
|
|
|
|
|
2019-03-05 05:37:33 +01:00
|
|
|
# This is mostly an SPC migration fix. This guesses the user nickname by taking the last part
|
|
|
|
# of the ap_id and the domain and tries to get that user
|
2019-01-07 12:41:31 +01:00
|
|
|
def get_by_guessed_nickname(ap_id) do
|
|
|
|
domain = URI.parse(ap_id).host
|
|
|
|
name = List.last(String.split(ap_id, "/"))
|
|
|
|
nickname = "#{name}@#{domain}"
|
|
|
|
|
2019-04-22 09:20:43 +02:00
|
|
|
get_cached_by_nickname(nickname)
|
2019-01-07 12:41:31 +01:00
|
|
|
end
|
|
|
|
|
2019-04-22 09:20:43 +02:00
|
|
|
def set_cache({:ok, user}), do: set_cache(user)
|
|
|
|
def set_cache({:error, err}), do: {:error, err}
|
|
|
|
|
|
|
|
def set_cache(%User{} = user) do
|
2019-01-30 19:21:04 +01:00
|
|
|
Cachex.put(:user_cache, "ap_id:#{user.ap_id}", user)
|
|
|
|
Cachex.put(:user_cache, "nickname:#{user.nickname}", user)
|
2019-12-04 07:49:17 +01:00
|
|
|
Cachex.put(:user_cache, "friends_ap_ids:#{user.nickname}", get_user_friends_ap_ids(user))
|
2019-01-30 19:21:04 +01:00
|
|
|
{:ok, user}
|
|
|
|
end
|
|
|
|
|
2019-10-16 20:59:21 +02:00
|
|
|
def update_and_set_cache(struct, params) do
|
|
|
|
struct
|
|
|
|
|> update_changeset(params)
|
|
|
|
|> update_and_set_cache()
|
|
|
|
end
|
|
|
|
|
2017-12-08 17:50:11 +01:00
|
|
|
def update_and_set_cache(changeset) do
|
2019-07-28 22:29:26 +02:00
|
|
|
with {:ok, user} <- Repo.update(changeset, stale_error_field: :id) do
|
2019-01-30 19:21:04 +01:00
|
|
|
set_cache(user)
|
2017-12-08 17:50:11 +01:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-12-04 07:49:17 +01:00
|
|
|
def get_user_friends_ap_ids(user) do
|
|
|
|
from(u in User.get_friends_query(user), select: u.ap_id)
|
|
|
|
|> Repo.all()
|
|
|
|
end
|
|
|
|
|
|
|
|
@spec get_cached_user_friends_ap_ids(User.t()) :: [String.t()]
|
|
|
|
def get_cached_user_friends_ap_ids(user) do
|
|
|
|
Cachex.fetch!(:user_cache, "friends_ap_ids:#{user.ap_id}", fn _ ->
|
|
|
|
get_user_friends_ap_ids(user)
|
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
2018-02-25 16:14:25 +01:00
|
|
|
def invalidate_cache(user) do
|
|
|
|
Cachex.del(:user_cache, "ap_id:#{user.ap_id}")
|
|
|
|
Cachex.del(:user_cache, "nickname:#{user.nickname}")
|
2019-12-04 07:49:17 +01:00
|
|
|
Cachex.del(:user_cache, "friends_ap_ids:#{user.ap_id}")
|
2018-02-25 16:14:25 +01:00
|
|
|
end
|
|
|
|
|
2019-12-30 09:30:20 +01:00
|
|
|
@spec get_cached_by_ap_id(String.t()) :: User.t() | nil
|
2017-04-14 17:13:51 +02:00
|
|
|
def get_cached_by_ap_id(ap_id) do
|
2017-04-17 11:36:17 +02:00
|
|
|
key = "ap_id:#{ap_id}"
|
2019-12-30 09:30:20 +01:00
|
|
|
|
|
|
|
with {:ok, nil} <- Cachex.get(:user_cache, key),
|
|
|
|
user when not is_nil(user) <- get_by_ap_id(ap_id),
|
|
|
|
{:ok, true} <- Cachex.put(:user_cache, key, user) do
|
|
|
|
user
|
|
|
|
else
|
|
|
|
{:ok, user} -> user
|
|
|
|
nil -> nil
|
|
|
|
end
|
2017-04-14 17:13:51 +02:00
|
|
|
end
|
|
|
|
|
2018-12-14 19:55:40 +01:00
|
|
|
def get_cached_by_id(id) do
|
|
|
|
key = "id:#{id}"
|
2019-01-16 15:44:08 +01:00
|
|
|
|
|
|
|
ap_id =
|
|
|
|
Cachex.fetch!(:user_cache, key, fn _ ->
|
|
|
|
user = get_by_id(id)
|
2019-01-17 17:00:08 +01:00
|
|
|
|
|
|
|
if user do
|
|
|
|
Cachex.put(:user_cache, "ap_id:#{user.ap_id}", user)
|
|
|
|
{:commit, user.ap_id}
|
|
|
|
else
|
|
|
|
{:ignore, ""}
|
|
|
|
end
|
2019-01-16 15:44:08 +01:00
|
|
|
end)
|
|
|
|
|
|
|
|
get_cached_by_ap_id(ap_id)
|
2018-12-14 19:55:40 +01:00
|
|
|
end
|
|
|
|
|
2017-04-14 17:13:51 +02:00
|
|
|
def get_cached_by_nickname(nickname) do
|
2017-04-17 11:36:17 +02:00
|
|
|
key = "nickname:#{nickname}"
|
2019-03-18 15:53:30 +01:00
|
|
|
|
2019-03-18 14:56:59 +01:00
|
|
|
Cachex.fetch!(:user_cache, key, fn ->
|
2019-09-24 09:14:34 +02:00
|
|
|
case get_or_fetch_by_nickname(nickname) do
|
2019-03-18 14:56:59 +01:00
|
|
|
{:ok, user} -> {:commit, user}
|
2019-05-01 11:09:53 +02:00
|
|
|
{:error, _error} -> {:ignore, nil}
|
2019-03-18 14:56:59 +01:00
|
|
|
end
|
|
|
|
end)
|
2017-04-14 17:13:51 +02:00
|
|
|
end
|
2017-04-30 10:04:54 +02:00
|
|
|
|
2019-09-03 16:54:21 +02:00
|
|
|
def get_cached_by_nickname_or_id(nickname_or_id, opts \\ []) do
|
2019-09-05 14:33:49 +02:00
|
|
|
restrict_to_local = Pleroma.Config.get([:instance, :limit_to_local_content])
|
|
|
|
|
|
|
|
cond do
|
2019-09-18 16:54:31 +02:00
|
|
|
is_integer(nickname_or_id) or FlakeId.flake_id?(nickname_or_id) ->
|
2019-09-05 14:33:49 +02:00
|
|
|
get_cached_by_id(nickname_or_id) || get_cached_by_nickname(nickname_or_id)
|
|
|
|
|
2019-10-04 00:05:50 +02:00
|
|
|
restrict_to_local == false or not String.contains?(nickname_or_id, "@") ->
|
2019-09-05 14:33:49 +02:00
|
|
|
get_cached_by_nickname(nickname_or_id)
|
|
|
|
|
|
|
|
restrict_to_local == :unauthenticated and match?(%User{}, opts[:for]) ->
|
|
|
|
get_cached_by_nickname(nickname_or_id)
|
|
|
|
|
|
|
|
true ->
|
|
|
|
nil
|
2019-09-03 16:54:21 +02:00
|
|
|
end
|
2018-12-14 19:55:40 +01:00
|
|
|
end
|
|
|
|
|
2020-05-07 10:14:54 +02:00
|
|
|
@spec get_by_nickname(String.t()) :: User.t() | nil
|
2017-04-30 15:06:22 +02:00
|
|
|
def get_by_nickname(nickname) do
|
2018-12-29 10:15:46 +01:00
|
|
|
Repo.get_by(User, nickname: nickname) ||
|
2018-12-29 10:26:23 +01:00
|
|
|
if Regex.match?(~r(@#{Pleroma.Web.Endpoint.host()})i, nickname) do
|
2019-01-18 07:30:16 +01:00
|
|
|
Repo.get_by(User, nickname: local_nickname(nickname))
|
2018-12-29 10:15:46 +01:00
|
|
|
end
|
2017-04-30 15:05:16 +02:00
|
|
|
end
|
|
|
|
|
2019-03-15 15:08:03 +01:00
|
|
|
def get_by_email(email), do: Repo.get_by(User, email: email)
|
|
|
|
|
2018-04-18 12:13:57 +02:00
|
|
|
def get_by_nickname_or_email(nickname_or_email) do
|
2019-03-15 15:08:03 +01:00
|
|
|
get_by_nickname(nickname_or_email) || get_by_email(nickname_or_email)
|
2018-04-18 12:13:57 +02:00
|
|
|
end
|
|
|
|
|
2019-10-18 01:37:21 +02:00
|
|
|
def fetch_by_nickname(nickname), do: ActivityPub.make_user_from_nickname(nickname)
|
2018-02-18 12:27:05 +01:00
|
|
|
|
2017-04-30 18:48:48 +02:00
|
|
|
def get_or_fetch_by_nickname(nickname) do
|
2018-03-30 15:01:53 +02:00
|
|
|
with %User{} = user <- get_by_nickname(nickname) do
|
2019-03-18 14:56:59 +01:00
|
|
|
{:ok, user}
|
2018-03-30 15:01:53 +02:00
|
|
|
else
|
|
|
|
_e ->
|
|
|
|
with [_nick, _domain] <- String.split(nickname, "@"),
|
|
|
|
{:ok, user} <- fetch_by_nickname(nickname) do
|
2019-03-18 14:56:59 +01:00
|
|
|
{:ok, user}
|
2018-03-30 15:01:53 +02:00
|
|
|
else
|
2019-05-01 11:09:53 +02:00
|
|
|
_e -> {:error, "not found " <> nickname}
|
2018-03-30 15:01:53 +02:00
|
|
|
end
|
2017-04-30 18:48:48 +02:00
|
|
|
end
|
2017-04-14 17:13:51 +02:00
|
|
|
end
|
2017-07-20 19:37:41 +02:00
|
|
|
|
2019-05-08 16:34:36 +02:00
|
|
|
@spec get_followers_query(User.t(), pos_integer() | nil) :: Ecto.Query.t()
|
|
|
|
def get_followers_query(%User{} = user, nil) do
|
2019-05-14 13:15:56 +02:00
|
|
|
User.Query.build(%{followers: user, deactivated: false})
|
2018-03-31 20:02:09 +02:00
|
|
|
end
|
|
|
|
|
2019-01-09 18:14:32 +01:00
|
|
|
def get_followers_query(user, page) do
|
2019-09-24 09:14:34 +02:00
|
|
|
user
|
|
|
|
|> get_followers_query(nil)
|
2019-05-08 16:34:36 +02:00
|
|
|
|> User.Query.paginate(page, 20)
|
2019-01-09 18:14:32 +01:00
|
|
|
end
|
|
|
|
|
2019-05-08 16:34:36 +02:00
|
|
|
@spec get_followers_query(User.t()) :: Ecto.Query.t()
|
2019-01-09 18:14:32 +01:00
|
|
|
def get_followers_query(user), do: get_followers_query(user, nil)
|
|
|
|
|
2020-02-25 15:34:56 +01:00
|
|
|
@spec get_followers(User.t(), pos_integer() | nil) :: {:ok, list(User.t())}
|
2019-01-09 18:14:32 +01:00
|
|
|
def get_followers(user, page \\ nil) do
|
2019-09-24 09:16:52 +02:00
|
|
|
user
|
|
|
|
|> get_followers_query(page)
|
|
|
|
|> Repo.all()
|
2017-07-20 19:37:41 +02:00
|
|
|
end
|
|
|
|
|
2020-02-25 15:34:56 +01:00
|
|
|
@spec get_external_followers(User.t(), pos_integer() | nil) :: {:ok, list(User.t())}
|
2019-07-23 21:15:48 +02:00
|
|
|
def get_external_followers(user, page \\ nil) do
|
2019-09-24 09:14:34 +02:00
|
|
|
user
|
|
|
|
|> get_followers_query(page)
|
|
|
|
|> User.Query.build(%{external: true})
|
|
|
|
|> Repo.all()
|
2019-07-23 21:15:48 +02:00
|
|
|
end
|
|
|
|
|
2019-01-14 18:04:45 +01:00
|
|
|
def get_followers_ids(user, page \\ nil) do
|
2019-09-24 09:14:34 +02:00
|
|
|
user
|
|
|
|
|> get_followers_query(page)
|
|
|
|
|> select([u], u.id)
|
|
|
|
|> Repo.all()
|
2019-01-14 18:04:45 +01:00
|
|
|
end
|
|
|
|
|
2019-05-08 16:34:36 +02:00
|
|
|
@spec get_friends_query(User.t(), pos_integer() | nil) :: Ecto.Query.t()
|
|
|
|
def get_friends_query(%User{} = user, nil) do
|
2019-05-14 13:15:56 +02:00
|
|
|
User.Query.build(%{friends: user, deactivated: false})
|
2018-03-31 20:02:09 +02:00
|
|
|
end
|
|
|
|
|
2019-01-09 18:14:32 +01:00
|
|
|
def get_friends_query(user, page) do
|
2019-09-24 09:14:34 +02:00
|
|
|
user
|
|
|
|
|> get_friends_query(nil)
|
2019-05-08 16:34:36 +02:00
|
|
|
|> User.Query.paginate(page, 20)
|
2019-01-09 18:14:32 +01:00
|
|
|
end
|
|
|
|
|
2019-05-08 16:34:36 +02:00
|
|
|
@spec get_friends_query(User.t()) :: Ecto.Query.t()
|
2019-01-09 18:14:32 +01:00
|
|
|
def get_friends_query(user), do: get_friends_query(user, nil)
|
|
|
|
|
|
|
|
def get_friends(user, page \\ nil) do
|
2019-09-24 09:16:52 +02:00
|
|
|
user
|
|
|
|
|> get_friends_query(page)
|
|
|
|
|> Repo.all()
|
2017-07-20 19:37:41 +02:00
|
|
|
end
|
2017-07-22 17:42:15 +02:00
|
|
|
|
2019-12-10 17:40:12 +01:00
|
|
|
def get_friends_ap_ids(user) do
|
|
|
|
user
|
|
|
|
|> get_friends_query(nil)
|
|
|
|
|> select([u], u.ap_id)
|
|
|
|
|> Repo.all()
|
|
|
|
end
|
|
|
|
|
2019-01-14 18:04:45 +01:00
|
|
|
def get_friends_ids(user, page \\ nil) do
|
2019-09-24 09:14:34 +02:00
|
|
|
user
|
|
|
|
|> get_friends_query(page)
|
|
|
|
|> select([u], u.id)
|
|
|
|
|> Repo.all()
|
2019-01-14 18:04:45 +01:00
|
|
|
end
|
|
|
|
|
2019-10-10 21:35:32 +02:00
|
|
|
defdelegate get_follow_requests(user), to: FollowingRelationship
|
2018-05-26 18:03:32 +02:00
|
|
|
|
2017-10-31 16:37:11 +01:00
|
|
|
def increase_note_count(%User{} = user) do
|
2019-03-03 15:27:09 +01:00
|
|
|
User
|
|
|
|
|> where(id: ^user.id)
|
2019-10-16 20:59:21 +02:00
|
|
|
|> update([u], inc: [note_count: 1])
|
2019-03-20 13:59:27 +01:00
|
|
|
|> select([u], u)
|
|
|
|
|> Repo.update_all([])
|
2019-03-03 15:27:09 +01:00
|
|
|
|> case do
|
|
|
|
{1, [user]} -> set_cache(user)
|
|
|
|
_ -> {:error, user}
|
|
|
|
end
|
2017-10-31 16:37:11 +01:00
|
|
|
end
|
|
|
|
|
2018-04-24 11:34:18 +02:00
|
|
|
def decrease_note_count(%User{} = user) do
|
2019-03-03 15:27:09 +01:00
|
|
|
User
|
|
|
|
|> where(id: ^user.id)
|
|
|
|
|> update([u],
|
|
|
|
set: [
|
2019-10-16 20:59:21 +02:00
|
|
|
note_count: fragment("greatest(0, note_count - 1)")
|
2019-03-03 15:27:09 +01:00
|
|
|
]
|
|
|
|
)
|
2019-03-20 13:59:27 +01:00
|
|
|
|> select([u], u)
|
|
|
|
|> Repo.update_all([])
|
2019-03-03 15:27:09 +01:00
|
|
|
|> case do
|
|
|
|
{1, [user]} -> set_cache(user)
|
|
|
|
_ -> {:error, user}
|
|
|
|
end
|
2018-04-24 11:34:18 +02:00
|
|
|
end
|
|
|
|
|
2019-10-16 20:59:21 +02:00
|
|
|
def update_note_count(%User{} = user, note_count \\ nil) do
|
2019-09-24 14:50:07 +02:00
|
|
|
note_count =
|
2019-10-16 20:59:21 +02:00
|
|
|
note_count ||
|
|
|
|
from(
|
|
|
|
a in Object,
|
|
|
|
where: fragment("?->>'actor' = ? and ?->>'type' = 'Note'", a.data, ^user.ap_id, a.data),
|
|
|
|
select: count(a.id)
|
|
|
|
)
|
|
|
|
|> Repo.one()
|
2019-09-06 20:50:00 +02:00
|
|
|
|
2019-10-20 12:42:42 +02:00
|
|
|
user
|
|
|
|
|> cast(%{note_count: note_count}, [:note_count])
|
|
|
|
|> update_and_set_cache()
|
2019-09-06 20:50:00 +02:00
|
|
|
end
|
|
|
|
|
2019-08-16 14:58:42 +02:00
|
|
|
@spec maybe_fetch_follow_information(User.t()) :: User.t()
|
2019-07-14 00:58:39 +02:00
|
|
|
def maybe_fetch_follow_information(user) do
|
|
|
|
with {:ok, user} <- fetch_follow_information(user) do
|
|
|
|
user
|
|
|
|
else
|
|
|
|
e ->
|
2019-07-20 21:04:47 +02:00
|
|
|
Logger.error("Follower/Following counter update for #{user.ap_id} failed.\n#{inspect(e)}")
|
2019-07-14 00:58:39 +02:00
|
|
|
|
|
|
|
user
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def fetch_follow_information(user) do
|
|
|
|
with {:ok, info} <- ActivityPub.fetch_follow_information_for_user(user) do
|
2019-10-16 20:59:21 +02:00
|
|
|
user
|
|
|
|
|> follow_information_changeset(info)
|
|
|
|
|> update_and_set_cache()
|
2019-07-14 00:58:39 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-10-16 20:59:21 +02:00
|
|
|
defp follow_information_changeset(user, params) do
|
|
|
|
user
|
|
|
|
|> cast(params, [
|
|
|
|
:hide_followers,
|
|
|
|
:hide_follows,
|
|
|
|
:follower_count,
|
|
|
|
:following_count,
|
|
|
|
:hide_followers_count,
|
|
|
|
:hide_follows_count
|
|
|
|
])
|
|
|
|
end
|
|
|
|
|
2017-07-22 17:42:15 +02:00
|
|
|
def update_follower_count(%User{} = user) do
|
2019-07-31 18:37:55 +02:00
|
|
|
if user.local or !Pleroma.Config.get([:instance, :external_user_synchronization]) do
|
2019-07-14 00:58:39 +02:00
|
|
|
follower_count_query =
|
|
|
|
User.Query.build(%{followers: user, deactivated: false})
|
|
|
|
|> select([u], %{count: count(u.id)})
|
|
|
|
|
|
|
|
User
|
|
|
|
|> where(id: ^user.id)
|
|
|
|
|> join(:inner, [u], s in subquery(follower_count_query))
|
|
|
|
|> update([u, s],
|
2019-10-16 20:59:21 +02:00
|
|
|
set: [follower_count: s.count]
|
2019-07-14 00:58:39 +02:00
|
|
|
)
|
|
|
|
|> select([u], u)
|
|
|
|
|> Repo.update_all([])
|
|
|
|
|> case do
|
|
|
|
{1, [user]} -> set_cache(user)
|
|
|
|
_ -> {:error, user}
|
|
|
|
end
|
|
|
|
else
|
|
|
|
{:ok, maybe_fetch_follow_information(user)}
|
|
|
|
end
|
|
|
|
end
|
2017-07-22 17:42:15 +02:00
|
|
|
|
2019-11-21 10:31:13 +01:00
|
|
|
@spec update_following_count(User.t()) :: User.t()
|
|
|
|
def update_following_count(%User{local: false} = user) do
|
2019-07-14 00:58:39 +02:00
|
|
|
if Pleroma.Config.get([:instance, :external_user_synchronization]) do
|
2019-08-16 14:58:42 +02:00
|
|
|
maybe_fetch_follow_information(user)
|
2019-07-14 00:58:39 +02:00
|
|
|
else
|
|
|
|
user
|
2019-03-03 15:27:09 +01:00
|
|
|
end
|
2017-07-22 17:42:15 +02:00
|
|
|
end
|
2017-09-11 16:15:28 +02:00
|
|
|
|
2019-11-21 10:31:13 +01:00
|
|
|
def update_following_count(%User{local: true} = user) do
|
|
|
|
following_count = FollowingRelationship.following_count(user)
|
|
|
|
|
|
|
|
user
|
|
|
|
|> follow_information_changeset(%{following_count: following_count})
|
|
|
|
|> Repo.update!()
|
|
|
|
end
|
2019-07-14 00:58:39 +02:00
|
|
|
|
2019-10-01 23:37:08 +02:00
|
|
|
def set_unread_conversation_count(%User{local: true} = user) do
|
|
|
|
unread_query = Participation.unread_conversation_count_for_user(user)
|
|
|
|
|
|
|
|
User
|
|
|
|
|> join(:inner, [u], p in subquery(unread_query))
|
|
|
|
|> update([u, p],
|
2019-10-16 20:59:21 +02:00
|
|
|
set: [unread_conversation_count: p.count]
|
2019-10-01 23:37:08 +02:00
|
|
|
)
|
|
|
|
|> where([u], u.id == ^user.id)
|
|
|
|
|> select([u], u)
|
|
|
|
|> Repo.update_all([])
|
|
|
|
|> case do
|
|
|
|
{1, [user]} -> set_cache(user)
|
|
|
|
_ -> {:error, user}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-10-25 20:29:23 +02:00
|
|
|
def set_unread_conversation_count(user), do: {:ok, user}
|
2019-10-01 23:37:08 +02:00
|
|
|
|
|
|
|
def increment_unread_conversation_count(conversation, %User{local: true} = user) do
|
|
|
|
unread_query =
|
|
|
|
Participation.unread_conversation_count_for_user(user)
|
|
|
|
|> where([p], p.conversation_id == ^conversation.id)
|
|
|
|
|
|
|
|
User
|
|
|
|
|> join(:inner, [u], p in subquery(unread_query))
|
|
|
|
|> update([u, p],
|
2019-10-16 20:59:21 +02:00
|
|
|
inc: [unread_conversation_count: 1]
|
2019-10-01 23:37:08 +02:00
|
|
|
)
|
|
|
|
|> where([u], u.id == ^user.id)
|
|
|
|
|> where([u, p], p.count == 0)
|
|
|
|
|> select([u], u)
|
|
|
|
|> Repo.update_all([])
|
|
|
|
|> case do
|
|
|
|
{1, [user]} -> set_cache(user)
|
|
|
|
_ -> {:error, user}
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-10-25 20:29:23 +02:00
|
|
|
def increment_unread_conversation_count(_, user), do: {:ok, user}
|
2019-10-01 23:37:08 +02:00
|
|
|
|
2020-05-25 15:06:35 +02:00
|
|
|
@spec get_users_from_set([String.t()], keyword()) :: [User.t()]
|
|
|
|
def get_users_from_set(ap_ids, opts \\ []) do
|
|
|
|
local_only = Keyword.get(opts, :local_only, true)
|
2019-05-14 13:15:56 +02:00
|
|
|
criteria = %{ap_id: ap_ids, deactivated: false}
|
2019-05-08 16:34:36 +02:00
|
|
|
criteria = if local_only, do: Map.put(criteria, :local, true), else: criteria
|
|
|
|
|
|
|
|
User.Query.build(criteria)
|
2018-11-09 09:23:45 +01:00
|
|
|
|> Repo.all()
|
|
|
|
end
|
|
|
|
|
2019-05-08 16:34:36 +02:00
|
|
|
@spec get_recipients_from_activity(Activity.t()) :: [User.t()]
|
2020-04-22 17:21:13 +02:00
|
|
|
def get_recipients_from_activity(%Activity{recipients: to, actor: actor}) do
|
|
|
|
to = [actor | to]
|
|
|
|
|
2020-05-19 14:11:32 +02:00
|
|
|
query = User.Query.build(%{recipients_from_activity: to, local: true, deactivated: false})
|
|
|
|
|
|
|
|
query
|
2019-05-08 16:34:36 +02:00
|
|
|
|> Repo.all()
|
2017-11-16 16:49:51 +01:00
|
|
|
end
|
|
|
|
|
2019-11-19 21:22:10 +01:00
|
|
|
@spec mute(User.t(), User.t(), boolean()) ::
|
|
|
|
{:ok, list(UserRelationship.t())} | {:error, String.t()}
|
2019-11-20 13:46:11 +01:00
|
|
|
def mute(%User{} = muter, %User{} = mutee, notifications? \\ true) do
|
2019-11-15 19:38:54 +01:00
|
|
|
add_to_mutes(muter, mutee, notifications?)
|
2018-09-05 22:49:15 +02:00
|
|
|
end
|
|
|
|
|
2019-11-20 13:46:11 +01:00
|
|
|
def unmute(%User{} = muter, %User{} = mutee) do
|
2019-11-15 19:38:54 +01:00
|
|
|
remove_from_mutes(muter, mutee)
|
2018-09-05 22:49:15 +02:00
|
|
|
end
|
|
|
|
|
2019-11-20 13:46:11 +01:00
|
|
|
def subscribe(%User{} = subscriber, %User{} = target) do
|
|
|
|
deny_follow_blocked = Pleroma.Config.get([:user, :deny_follow_blocked])
|
2019-04-08 13:46:12 +02:00
|
|
|
|
2019-11-20 13:46:11 +01:00
|
|
|
if blocks?(target, subscriber) and deny_follow_blocked do
|
|
|
|
{:error, "Could not subscribe: #{target.nickname} is blocking you"}
|
|
|
|
else
|
|
|
|
# Note: the relationship is inverse: subscriber acts as relationship target
|
|
|
|
UserRelationship.create_inverse_subscription(target, subscriber)
|
2019-04-05 17:51:45 +02:00
|
|
|
end
|
2019-04-05 14:49:33 +02:00
|
|
|
end
|
|
|
|
|
2019-11-20 13:46:11 +01:00
|
|
|
def subscribe(%User{} = subscriber, %{ap_id: ap_id}) do
|
|
|
|
with %User{} = subscribee <- get_cached_by_ap_id(ap_id) do
|
|
|
|
subscribe(subscriber, subscribee)
|
2019-04-05 17:51:45 +02:00
|
|
|
end
|
2019-04-05 14:49:33 +02:00
|
|
|
end
|
|
|
|
|
2019-11-20 13:46:11 +01:00
|
|
|
def unsubscribe(%User{} = unsubscriber, %User{} = target) do
|
|
|
|
# Note: the relationship is inverse: subscriber acts as relationship target
|
|
|
|
UserRelationship.delete_inverse_subscription(target, unsubscriber)
|
|
|
|
end
|
|
|
|
|
|
|
|
def unsubscribe(%User{} = unsubscriber, %{ap_id: ap_id}) do
|
2019-04-08 16:56:14 +02:00
|
|
|
with %User{} = user <- get_cached_by_ap_id(ap_id) do
|
2019-11-20 13:46:11 +01:00
|
|
|
unsubscribe(unsubscriber, user)
|
2019-04-05 17:51:45 +02:00
|
|
|
end
|
2019-04-05 14:49:33 +02:00
|
|
|
end
|
|
|
|
|
2019-11-20 13:46:11 +01:00
|
|
|
def block(%User{} = blocker, %User{} = blocked) do
|
2018-06-19 02:36:40 +02:00
|
|
|
# sever any follow relationships to prevent leaks per activitypub (Pleroma issue #213)
|
|
|
|
blocker =
|
|
|
|
if following?(blocker, blocked) do
|
|
|
|
{:ok, blocker, _} = unfollow(blocker, blocked)
|
|
|
|
blocker
|
|
|
|
else
|
|
|
|
blocker
|
|
|
|
end
|
|
|
|
|
2019-08-18 21:29:31 +02:00
|
|
|
# clear any requested follows as well
|
|
|
|
blocked =
|
|
|
|
case CommonAPI.reject_follow_request(blocked, blocker) do
|
|
|
|
{:ok, %User{} = updated_blocked} -> updated_blocked
|
|
|
|
nil -> blocked
|
|
|
|
end
|
|
|
|
|
2019-11-20 13:46:11 +01:00
|
|
|
unsubscribe(blocked, blocker)
|
2019-04-08 13:46:12 +02:00
|
|
|
|
2019-09-24 09:14:34 +02:00
|
|
|
if following?(blocked, blocker), do: unfollow(blocked, blocker)
|
2018-06-19 02:36:40 +02:00
|
|
|
|
2019-04-08 11:31:18 +02:00
|
|
|
{:ok, blocker} = update_follower_count(blocker)
|
2019-10-25 20:29:23 +02:00
|
|
|
{:ok, blocker, _} = Participation.mark_all_as_read(blocker, blocked)
|
2019-11-10 14:30:21 +01:00
|
|
|
add_to_block(blocker, blocked)
|
2017-11-02 21:57:37 +01:00
|
|
|
end
|
|
|
|
|
2018-06-19 10:31:06 +02:00
|
|
|
# helper to handle the block given only an actor's AP id
|
2019-11-20 13:46:11 +01:00
|
|
|
def block(%User{} = blocker, %{ap_id: ap_id}) do
|
2019-04-22 09:20:43 +02:00
|
|
|
block(blocker, get_cached_by_ap_id(ap_id))
|
2018-06-19 10:31:06 +02:00
|
|
|
end
|
|
|
|
|
2019-11-20 13:46:11 +01:00
|
|
|
def unblock(%User{} = blocker, %User{} = blocked) do
|
2019-11-10 14:30:21 +01:00
|
|
|
remove_from_block(blocker, blocked)
|
|
|
|
end
|
|
|
|
|
|
|
|
# helper to handle the block given only an actor's AP id
|
2019-11-20 13:46:11 +01:00
|
|
|
def unblock(%User{} = blocker, %{ap_id: ap_id}) do
|
2019-11-10 14:30:21 +01:00
|
|
|
unblock(blocker, get_cached_by_ap_id(ap_id))
|
2017-11-02 21:57:37 +01:00
|
|
|
end
|
|
|
|
|
2019-02-27 16:46:47 +01:00
|
|
|
def mutes?(nil, _), do: false
|
2019-11-15 19:38:54 +01:00
|
|
|
def mutes?(%User{} = user, %User{} = target), do: mutes_user?(user, target)
|
|
|
|
|
|
|
|
def mutes_user?(%User{} = user, %User{} = target) do
|
2019-11-18 18:38:56 +01:00
|
|
|
UserRelationship.mute_exists?(user, target)
|
2019-11-15 19:38:54 +01:00
|
|
|
end
|
2018-09-05 22:49:15 +02:00
|
|
|
|
2019-07-14 15:29:31 +02:00
|
|
|
@spec muted_notifications?(User.t() | nil, User.t() | map()) :: boolean()
|
|
|
|
def muted_notifications?(nil, _), do: false
|
|
|
|
|
2019-11-20 13:46:11 +01:00
|
|
|
def muted_notifications?(%User{} = user, %User{} = target),
|
2019-11-19 21:22:10 +01:00
|
|
|
do: UserRelationship.notification_mute_exists?(user, target)
|
2018-09-05 22:49:15 +02:00
|
|
|
|
2019-11-15 19:38:54 +01:00
|
|
|
def blocks?(nil, _), do: false
|
2018-09-05 22:49:15 +02:00
|
|
|
|
2019-07-24 17:12:27 +02:00
|
|
|
def blocks?(%User{} = user, %User{} = target) do
|
2019-12-10 19:49:41 +01:00
|
|
|
blocks_user?(user, target) ||
|
2020-03-28 16:49:03 +01:00
|
|
|
(blocks_domain?(user, target) and not User.following?(user, target))
|
2019-07-24 17:12:27 +02:00
|
|
|
end
|
2019-07-22 16:33:58 +02:00
|
|
|
|
2019-11-15 19:38:54 +01:00
|
|
|
def blocks_user?(%User{} = user, %User{} = target) do
|
2019-11-18 18:38:56 +01:00
|
|
|
UserRelationship.block_exists?(user, target)
|
2019-07-24 17:12:27 +02:00
|
|
|
end
|
2018-06-03 21:21:23 +02:00
|
|
|
|
2019-11-15 19:38:54 +01:00
|
|
|
def blocks_user?(_, _), do: false
|
2019-07-24 17:12:27 +02:00
|
|
|
|
|
|
|
def blocks_domain?(%User{} = user, %User{} = target) do
|
2019-10-20 12:42:42 +02:00
|
|
|
domain_blocks = Pleroma.Web.ActivityPub.MRF.subdomains_regex(user.domain_blocks)
|
2019-07-24 17:12:27 +02:00
|
|
|
%{host: host} = URI.parse(target.ap_id)
|
|
|
|
Pleroma.Web.ActivityPub.MRF.subdomain_match?(domain_blocks, host)
|
2018-06-03 21:01:37 +02:00
|
|
|
end
|
|
|
|
|
2019-07-24 17:12:27 +02:00
|
|
|
def blocks_domain?(_, _), do: false
|
2019-07-23 20:44:47 +02:00
|
|
|
|
2019-11-20 13:46:11 +01:00
|
|
|
def subscribed_to?(%User{} = user, %User{} = target) do
|
|
|
|
# Note: the relationship is inverse: subscriber acts as relationship target
|
|
|
|
UserRelationship.inverse_subscription_exists?(target, user)
|
|
|
|
end
|
|
|
|
|
|
|
|
def subscribed_to?(%User{} = user, %{ap_id: ap_id}) do
|
2019-04-22 09:20:43 +02:00
|
|
|
with %User{} = target <- get_cached_by_ap_id(ap_id) do
|
2019-11-20 13:46:11 +01:00
|
|
|
subscribed_to?(user, target)
|
2019-04-05 17:51:45 +02:00
|
|
|
end
|
2019-04-05 16:21:33 +02:00
|
|
|
end
|
|
|
|
|
2019-11-18 18:38:56 +01:00
|
|
|
@doc """
|
2020-03-25 07:04:00 +01:00
|
|
|
Returns map of outgoing (blocked, muted etc.) relationships' user AP IDs by relation type.
|
|
|
|
E.g. `outgoing_relationships_ap_ids(user, [:block])` -> `%{block: ["https://some.site/users/userapid"]}`
|
2019-11-18 18:38:56 +01:00
|
|
|
"""
|
2020-03-25 07:04:00 +01:00
|
|
|
@spec outgoing_relationships_ap_ids(User.t(), list(atom())) :: %{atom() => list(String.t())}
|
|
|
|
def outgoing_relationships_ap_ids(_user, []), do: %{}
|
2019-02-19 21:09:16 +01:00
|
|
|
|
2020-03-25 07:04:00 +01:00
|
|
|
def outgoing_relationships_ap_ids(nil, _relationship_types), do: %{}
|
2019-02-19 21:09:16 +01:00
|
|
|
|
2020-03-25 07:04:00 +01:00
|
|
|
def outgoing_relationships_ap_ids(%User{} = user, relationship_types)
|
2019-11-20 10:19:07 +01:00
|
|
|
when is_list(relationship_types) do
|
2019-11-19 21:22:10 +01:00
|
|
|
db_result =
|
|
|
|
user
|
|
|
|
|> assoc(:outgoing_relationships)
|
|
|
|
|> join(:inner, [user_rel], u in assoc(user_rel, :target))
|
|
|
|
|> where([user_rel, u], user_rel.relationship_type in ^relationship_types)
|
|
|
|
|> select([user_rel, u], [user_rel.relationship_type, fragment("array_agg(?)", u.ap_id)])
|
|
|
|
|> group_by([user_rel, u], user_rel.relationship_type)
|
|
|
|
|> Repo.all()
|
|
|
|
|> Enum.into(%{}, fn [k, v] -> {k, v} end)
|
2018-12-28 19:08:07 +01:00
|
|
|
|
2019-11-19 21:22:10 +01:00
|
|
|
Enum.into(
|
|
|
|
relationship_types,
|
|
|
|
%{},
|
|
|
|
fn rel_type -> {rel_type, db_result[rel_type] || []} end
|
|
|
|
)
|
2019-05-08 16:34:36 +02:00
|
|
|
end
|
2019-04-05 15:20:13 +02:00
|
|
|
|
2020-03-25 07:04:00 +01:00
|
|
|
def incoming_relationships_ungrouped_ap_ids(user, relationship_types, ap_ids \\ nil)
|
2020-03-15 19:00:12 +01:00
|
|
|
|
2020-03-25 07:04:00 +01:00
|
|
|
def incoming_relationships_ungrouped_ap_ids(_user, [], _ap_ids), do: []
|
2020-03-15 19:00:12 +01:00
|
|
|
|
2020-03-25 07:04:00 +01:00
|
|
|
def incoming_relationships_ungrouped_ap_ids(nil, _relationship_types, _ap_ids), do: []
|
2020-03-15 19:00:12 +01:00
|
|
|
|
2020-03-25 07:04:00 +01:00
|
|
|
def incoming_relationships_ungrouped_ap_ids(%User{} = user, relationship_types, ap_ids)
|
2020-03-15 19:00:12 +01:00
|
|
|
when is_list(relationship_types) do
|
|
|
|
user
|
|
|
|
|> assoc(:incoming_relationships)
|
|
|
|
|> join(:inner, [user_rel], u in assoc(user_rel, :source))
|
|
|
|
|> where([user_rel, u], user_rel.relationship_type in ^relationship_types)
|
|
|
|
|> maybe_filter_on_ap_id(ap_ids)
|
|
|
|
|> select([user_rel, u], u.ap_id)
|
|
|
|
|> distinct(true)
|
|
|
|
|> Repo.all()
|
|
|
|
end
|
|
|
|
|
|
|
|
defp maybe_filter_on_ap_id(query, ap_ids) when is_list(ap_ids) do
|
|
|
|
where(query, [user_rel, u], u.ap_id in ^ap_ids)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp maybe_filter_on_ap_id(query, _ap_ids), do: query
|
|
|
|
|
2019-04-11 12:22:42 +02:00
|
|
|
def deactivate_async(user, status \\ true) do
|
2019-08-31 20:58:42 +02:00
|
|
|
BackgroundWorker.enqueue("deactivate_user", %{"user_id" => user.id, "status" => status})
|
2019-04-11 12:22:42 +02:00
|
|
|
end
|
|
|
|
|
2019-10-09 16:03:54 +02:00
|
|
|
def deactivate(user, status \\ true)
|
|
|
|
|
|
|
|
def deactivate(users, status) when is_list(users) do
|
|
|
|
Repo.transaction(fn ->
|
|
|
|
for user <- users, do: deactivate(user, status)
|
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
|
|
|
def deactivate(%User{} = user, status) do
|
2019-10-16 20:59:21 +02:00
|
|
|
with {:ok, user} <- set_activation_status(user, status) do
|
2019-11-21 10:31:13 +01:00
|
|
|
user
|
|
|
|
|> get_followers()
|
|
|
|
|> Enum.filter(& &1.local)
|
2020-05-12 05:44:33 +02:00
|
|
|
|> Enum.each(&set_cache(update_following_count(&1)))
|
2019-11-04 14:36:54 +01:00
|
|
|
|
|
|
|
# Only update local user counts, remote will be update during the next pull.
|
|
|
|
user
|
|
|
|
|> get_friends()
|
|
|
|
|> Enum.filter(& &1.local)
|
2020-05-12 05:44:33 +02:00
|
|
|
|> Enum.each(&do_unfollow(user, &1))
|
2018-11-18 18:06:02 +01:00
|
|
|
|
2019-04-11 12:22:42 +02:00
|
|
|
{:ok, user}
|
|
|
|
end
|
2017-12-07 17:47:23 +01:00
|
|
|
end
|
2017-12-07 18:13:05 +01:00
|
|
|
|
2019-10-16 20:59:21 +02:00
|
|
|
def update_notification_settings(%User{} = user, settings) do
|
|
|
|
user
|
2019-10-28 10:47:23 +01:00
|
|
|
|> cast(%{notification_settings: settings}, [])
|
|
|
|
|> cast_embed(:notification_settings)
|
2019-10-16 20:59:21 +02:00
|
|
|
|> validate_required([:notification_settings])
|
|
|
|
|> update_and_set_cache()
|
2019-03-28 12:52:09 +01:00
|
|
|
end
|
|
|
|
|
2019-10-15 17:33:29 +02:00
|
|
|
def delete(users) when is_list(users) do
|
|
|
|
for user <- users, do: delete(user)
|
|
|
|
end
|
|
|
|
|
2019-08-13 19:20:26 +02:00
|
|
|
def delete(%User{} = user) do
|
2019-08-31 20:58:42 +02:00
|
|
|
BackgroundWorker.enqueue("delete_user", %{"user_id" => user.id})
|
2019-08-13 19:20:26 +02:00
|
|
|
end
|
2019-05-06 18:45:22 +02:00
|
|
|
|
2020-05-08 15:54:53 +02:00
|
|
|
defp delete_and_invalidate_cache(%User{} = user) do
|
|
|
|
invalidate_cache(user)
|
|
|
|
Repo.delete(user)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp delete_or_deactivate(%User{local: false} = user), do: delete_and_invalidate_cache(user)
|
|
|
|
|
|
|
|
defp delete_or_deactivate(%User{local: true} = user) do
|
|
|
|
status = account_status(user)
|
|
|
|
|
|
|
|
if status == :confirmation_pending do
|
|
|
|
delete_and_invalidate_cache(user)
|
|
|
|
else
|
|
|
|
user
|
|
|
|
|> change(%{deactivated: true, email: nil})
|
|
|
|
|> update_and_set_cache()
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-09-22 15:08:07 +02:00
|
|
|
def perform(:force_password_reset, user), do: force_password_reset(user)
|
|
|
|
|
2019-05-06 18:45:22 +02:00
|
|
|
@spec perform(atom(), User.t()) :: {:ok, User.t()}
|
|
|
|
def perform(:delete, %User{} = user) do
|
2017-12-07 18:13:05 +01:00
|
|
|
# Remove all relationships
|
2019-09-24 09:16:52 +02:00
|
|
|
user
|
|
|
|
|> get_followers()
|
|
|
|
|> Enum.each(fn follower ->
|
2019-06-24 20:59:12 +02:00
|
|
|
ActivityPub.unfollow(follower, user)
|
2019-09-24 09:16:52 +02:00
|
|
|
unfollow(follower, user)
|
2019-06-24 20:59:12 +02:00
|
|
|
end)
|
2017-12-07 18:13:05 +01:00
|
|
|
|
2019-09-24 09:16:52 +02:00
|
|
|
user
|
|
|
|
|> get_friends()
|
|
|
|
|> Enum.each(fn followed ->
|
2019-06-24 20:59:12 +02:00
|
|
|
ActivityPub.unfollow(user, followed)
|
2019-09-24 09:16:52 +02:00
|
|
|
unfollow(user, followed)
|
2019-06-24 20:59:12 +02:00
|
|
|
end)
|
2017-12-07 18:13:05 +01:00
|
|
|
|
2019-04-02 11:30:11 +02:00
|
|
|
delete_user_activities(user)
|
2020-05-18 17:46:04 +02:00
|
|
|
delete_notifications_from_user_activities(user)
|
2020-04-29 13:26:31 +02:00
|
|
|
|
2020-06-07 17:31:37 +02:00
|
|
|
delete_outgoing_pending_follow_requests(user)
|
|
|
|
|
2020-05-08 15:54:53 +02:00
|
|
|
delete_or_deactivate(user)
|
2019-04-02 11:30:11 +02:00
|
|
|
end
|
2017-12-08 17:50:11 +01:00
|
|
|
|
2019-05-14 19:33:03 +02:00
|
|
|
def perform(:deactivate_async, user, status), do: deactivate(user, status)
|
|
|
|
|
2019-05-13 04:09:28 +02:00
|
|
|
@spec perform(atom(), User.t(), list()) :: list() | {:error, any()}
|
|
|
|
def perform(:blocks_import, %User{} = blocker, blocked_identifiers)
|
|
|
|
when is_list(blocked_identifiers) do
|
|
|
|
Enum.map(
|
|
|
|
blocked_identifiers,
|
|
|
|
fn blocked_identifier ->
|
|
|
|
with {:ok, %User{} = blocked} <- get_or_fetch(blocked_identifier),
|
2019-11-10 14:30:21 +01:00
|
|
|
{:ok, _user_block} <- block(blocker, blocked),
|
2019-05-13 04:09:28 +02:00
|
|
|
{:ok, _} <- ActivityPub.block(blocker, blocked) do
|
|
|
|
blocked
|
|
|
|
else
|
|
|
|
err ->
|
|
|
|
Logger.debug("blocks_import failed for #{blocked_identifier} with: #{inspect(err)}")
|
|
|
|
err
|
|
|
|
end
|
|
|
|
end
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
|
|
|
def perform(:follow_import, %User{} = follower, followed_identifiers)
|
|
|
|
when is_list(followed_identifiers) do
|
|
|
|
Enum.map(
|
|
|
|
followed_identifiers,
|
|
|
|
fn followed_identifier ->
|
|
|
|
with {:ok, %User{} = followed} <- get_or_fetch(followed_identifier),
|
|
|
|
{:ok, follower} <- maybe_direct_follow(follower, followed),
|
|
|
|
{:ok, _} <- ActivityPub.follow(follower, followed) do
|
|
|
|
followed
|
|
|
|
else
|
|
|
|
err ->
|
|
|
|
Logger.debug("follow_import failed for #{followed_identifier} with: #{inspect(err)}")
|
|
|
|
err
|
|
|
|
end
|
|
|
|
end
|
|
|
|
)
|
|
|
|
end
|
|
|
|
|
2019-07-10 15:01:32 +02:00
|
|
|
@spec external_users_query() :: Ecto.Query.t()
|
|
|
|
def external_users_query do
|
|
|
|
User.Query.build(%{
|
|
|
|
external: true,
|
|
|
|
active: true,
|
|
|
|
order_by: :id
|
|
|
|
})
|
2019-07-09 19:36:35 +02:00
|
|
|
end
|
|
|
|
|
|
|
|
@spec external_users(keyword()) :: [User.t()]
|
|
|
|
def external_users(opts \\ []) do
|
|
|
|
query =
|
2019-07-10 15:01:32 +02:00
|
|
|
external_users_query()
|
2019-11-23 12:06:19 +01:00
|
|
|
|> select([u], struct(u, [:id, :ap_id]))
|
2019-07-09 19:36:35 +02:00
|
|
|
|
|
|
|
query =
|
|
|
|
if opts[:max_id],
|
|
|
|
do: where(query, [u], u.id > ^opts[:max_id]),
|
|
|
|
else: query
|
|
|
|
|
|
|
|
query =
|
|
|
|
if opts[:limit],
|
|
|
|
do: limit(query, ^opts[:limit]),
|
|
|
|
else: query
|
|
|
|
|
|
|
|
Repo.all(query)
|
|
|
|
end
|
|
|
|
|
2019-08-13 19:20:26 +02:00
|
|
|
def blocks_import(%User{} = blocker, blocked_identifiers) when is_list(blocked_identifiers) do
|
2019-08-31 20:58:42 +02:00
|
|
|
BackgroundWorker.enqueue("blocks_import", %{
|
2019-08-13 19:20:26 +02:00
|
|
|
"blocker_id" => blocker.id,
|
|
|
|
"blocked_identifiers" => blocked_identifiers
|
2019-08-31 20:58:42 +02:00
|
|
|
})
|
2019-08-13 19:20:26 +02:00
|
|
|
end
|
2019-05-14 17:07:38 +02:00
|
|
|
|
2019-08-13 19:20:26 +02:00
|
|
|
def follow_import(%User{} = follower, followed_identifiers)
|
|
|
|
when is_list(followed_identifiers) do
|
2019-08-31 20:58:42 +02:00
|
|
|
BackgroundWorker.enqueue("follow_import", %{
|
2019-08-13 19:20:26 +02:00
|
|
|
"follower_id" => follower.id,
|
|
|
|
"followed_identifiers" => followed_identifiers
|
2019-08-31 20:58:42 +02:00
|
|
|
})
|
2019-08-13 19:20:26 +02:00
|
|
|
end
|
2019-05-14 17:07:38 +02:00
|
|
|
|
2020-05-18 17:46:04 +02:00
|
|
|
def delete_notifications_from_user_activities(%User{ap_id: ap_id}) do
|
|
|
|
Notification
|
|
|
|
|> join(:inner, [n], activity in assoc(n, :activity))
|
|
|
|
|> where([n, a], fragment("? = ?", a.actor, ^ap_id))
|
|
|
|
|> Repo.delete_all()
|
|
|
|
end
|
|
|
|
|
2020-04-30 20:08:25 +02:00
|
|
|
def delete_user_activities(%User{ap_id: ap_id} = user) do
|
2019-06-24 20:59:12 +02:00
|
|
|
ap_id
|
2019-09-03 16:58:30 +02:00
|
|
|
|> Activity.Queries.by_actor()
|
2019-06-24 20:59:12 +02:00
|
|
|
|> RepoStreamer.chunk_stream(50)
|
2020-04-30 20:08:25 +02:00
|
|
|
|> Stream.each(fn activities ->
|
|
|
|
Enum.each(activities, fn activity -> delete_activity(activity, user) end)
|
|
|
|
end)
|
2019-06-24 20:59:12 +02:00
|
|
|
|> Stream.run()
|
2017-12-07 18:13:05 +01:00
|
|
|
end
|
2017-12-12 10:17:21 +01:00
|
|
|
|
2020-05-11 15:06:23 +02:00
|
|
|
defp delete_activity(%{data: %{"type" => "Create", "object" => object}} = activity, user) do
|
|
|
|
with {_, %Object{}} <- {:find_object, Object.get_by_ap_id(object)},
|
|
|
|
{:ok, delete_data, _} <- Builder.delete(user, object) do
|
|
|
|
Pipeline.common_pipeline(delete_data, local: user.local)
|
|
|
|
else
|
|
|
|
{:find_object, nil} ->
|
|
|
|
# We have the create activity, but not the object, it was probably pruned.
|
|
|
|
# Insert a tombstone and try again
|
|
|
|
with {:ok, tombstone_data, _} <- Builder.tombstone(user.ap_id, object),
|
|
|
|
{:ok, _tombstone} <- Object.create(tombstone_data) do
|
|
|
|
delete_activity(activity, user)
|
|
|
|
end
|
2020-04-30 20:08:25 +02:00
|
|
|
|
2020-05-11 15:06:23 +02:00
|
|
|
e ->
|
|
|
|
Logger.error("Could not delete #{object} created by #{activity.data["ap_id"]}")
|
|
|
|
Logger.error("Error: #{inspect(e)}")
|
|
|
|
end
|
2019-06-24 20:59:12 +02:00
|
|
|
end
|
|
|
|
|
2020-05-07 14:52:37 +02:00
|
|
|
defp delete_activity(%{data: %{"type" => type}} = activity, user)
|
|
|
|
when type in ["Like", "Announce"] do
|
|
|
|
{:ok, undo, _} = Builder.undo(user, activity)
|
|
|
|
Pipeline.common_pipeline(undo, local: user.local)
|
2019-06-24 20:59:12 +02:00
|
|
|
end
|
|
|
|
|
2020-04-30 20:08:25 +02:00
|
|
|
defp delete_activity(_activity, _user), do: "Doing nothing"
|
2019-05-06 18:45:22 +02:00
|
|
|
|
2020-06-07 17:31:37 +02:00
|
|
|
defp delete_outgoing_pending_follow_requests(user) do
|
|
|
|
user
|
|
|
|
|> FollowingRelationship.outgoing_pending_follow_requests_query()
|
|
|
|
|> Repo.delete_all()
|
|
|
|
end
|
|
|
|
|
2019-10-16 20:59:21 +02:00
|
|
|
def html_filter_policy(%User{no_rich_text: true}) do
|
2018-09-22 03:37:05 +02:00
|
|
|
Pleroma.HTML.Scrubber.TwitterText
|
|
|
|
end
|
|
|
|
|
2019-06-14 17:45:05 +02:00
|
|
|
def html_filter_policy(_), do: Pleroma.Config.get([:markup, :scrub_policy])
|
2018-09-22 03:37:05 +02:00
|
|
|
|
2019-10-18 01:37:21 +02:00
|
|
|
def fetch_by_ap_id(ap_id), do: ActivityPub.make_user_from_ap_id(ap_id)
|
2019-03-06 22:13:26 +01:00
|
|
|
|
2018-02-11 20:43:33 +01:00
|
|
|
def get_or_fetch_by_ap_id(ap_id) do
|
2020-05-25 12:46:14 +02:00
|
|
|
cached_user = get_cached_by_ap_id(ap_id)
|
2018-09-19 08:13:18 +02:00
|
|
|
|
2020-05-25 12:46:14 +02:00
|
|
|
maybe_fetched_user = needs_update?(cached_user) && fetch_by_ap_id(ap_id)
|
|
|
|
|
|
|
|
case {cached_user, maybe_fetched_user} do
|
|
|
|
{_, {:ok, %User{} = user}} ->
|
|
|
|
{:ok, user}
|
|
|
|
|
|
|
|
{%User{} = user, _} ->
|
|
|
|
{:ok, user}
|
|
|
|
|
|
|
|
_ ->
|
|
|
|
{:error, :not_found}
|
2018-02-11 20:43:33 +01:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-11-04 18:44:24 +01:00
|
|
|
@doc """
|
|
|
|
Creates an internal service actor by URI if missing.
|
|
|
|
Optionally takes nickname for addressing.
|
|
|
|
"""
|
2020-01-08 14:40:38 +01:00
|
|
|
@spec get_or_create_service_actor_by_ap_id(String.t(), String.t()) :: User.t() | nil
|
|
|
|
def get_or_create_service_actor_by_ap_id(uri, nickname) do
|
|
|
|
{_, user} =
|
|
|
|
case get_cached_by_ap_id(uri) do
|
|
|
|
nil ->
|
|
|
|
with {:error, %{errors: errors}} <- create_service_actor(uri, nickname) do
|
|
|
|
Logger.error("Cannot create service actor: #{uri}/.\n#{inspect(errors)}")
|
|
|
|
{:error, nil}
|
|
|
|
end
|
2019-09-24 09:14:34 +02:00
|
|
|
|
2020-01-08 14:40:38 +01:00
|
|
|
%User{invisible: false} = user ->
|
|
|
|
set_invisible(user)
|
|
|
|
|
|
|
|
user ->
|
|
|
|
{:ok, user}
|
|
|
|
end
|
|
|
|
|
|
|
|
user
|
|
|
|
end
|
|
|
|
|
|
|
|
@spec set_invisible(User.t()) :: {:ok, User.t()}
|
|
|
|
defp set_invisible(user) do
|
|
|
|
user
|
|
|
|
|> change(%{invisible: true})
|
|
|
|
|> update_and_set_cache()
|
|
|
|
end
|
|
|
|
|
|
|
|
@spec create_service_actor(String.t(), String.t()) ::
|
|
|
|
{:ok, User.t()} | {:error, Ecto.Changeset.t()}
|
|
|
|
defp create_service_actor(uri, nickname) do
|
|
|
|
%User{
|
|
|
|
invisible: true,
|
|
|
|
local: true,
|
|
|
|
ap_id: uri,
|
|
|
|
nickname: nickname,
|
|
|
|
follower_address: uri <> "/followers"
|
|
|
|
}
|
|
|
|
|> change
|
|
|
|
|> unique_constraint(:nickname)
|
|
|
|
|> Repo.insert()
|
|
|
|
|> set_cache()
|
2018-08-06 07:46:37 +02:00
|
|
|
end
|
|
|
|
|
2020-04-01 06:58:48 +02:00
|
|
|
def public_key(%{public_key: public_key_pem}) when is_binary(public_key_pem) do
|
2018-03-30 15:01:53 +02:00
|
|
|
key =
|
2018-12-11 13:31:52 +01:00
|
|
|
public_key_pem
|
|
|
|
|> :public_key.pem_decode()
|
2018-03-30 15:01:53 +02:00
|
|
|
|> hd()
|
|
|
|
|> :public_key.pem_entry_decode()
|
2018-02-11 20:43:33 +01:00
|
|
|
|
2018-03-30 15:01:53 +02:00
|
|
|
{:ok, key}
|
2018-02-11 20:43:33 +01:00
|
|
|
end
|
|
|
|
|
2020-04-01 06:58:48 +02:00
|
|
|
def public_key(_), do: {:error, "key not found"}
|
2019-07-15 15:01:22 +02:00
|
|
|
|
2017-12-12 10:17:21 +01:00
|
|
|
def get_public_key_for_ap_id(ap_id) do
|
2019-03-18 14:56:59 +01:00
|
|
|
with {:ok, %User{} = user} <- get_or_fetch_by_ap_id(ap_id),
|
2019-10-16 20:59:21 +02:00
|
|
|
{:ok, public_key} <- public_key(user) do
|
2017-12-12 10:17:21 +01:00
|
|
|
{:ok, public_key}
|
|
|
|
else
|
|
|
|
_ -> :error
|
|
|
|
end
|
|
|
|
end
|
2018-02-11 17:20:02 +01:00
|
|
|
|
2018-10-11 12:35:11 +02:00
|
|
|
def ap_enabled?(%User{local: true}), do: true
|
2019-10-16 20:59:21 +02:00
|
|
|
def ap_enabled?(%User{ap_enabled: ap_enabled}), do: ap_enabled
|
2018-02-25 16:40:37 +01:00
|
|
|
def ap_enabled?(_), do: false
|
2018-03-24 15:09:09 +01:00
|
|
|
|
2018-12-11 13:31:52 +01:00
|
|
|
@doc "Gets or fetch a user by uri or nickname."
|
2019-05-08 16:34:36 +02:00
|
|
|
@spec get_or_fetch(String.t()) :: {:ok, User.t()} | {:error, String.t()}
|
2018-12-11 13:31:52 +01:00
|
|
|
def get_or_fetch("http" <> _host = uri), do: get_or_fetch_by_ap_id(uri)
|
|
|
|
def get_or_fetch(nickname), do: get_or_fetch_by_nickname(nickname)
|
2018-10-06 01:40:49 +02:00
|
|
|
|
|
|
|
# wait a period of time and return newest version of the User structs
|
|
|
|
# this is because we have synchronous follow APIs and need to simulate them
|
|
|
|
# with an async handshake
|
|
|
|
def wait_and_refresh(_, %User{local: true} = a, %User{local: true} = b) do
|
2019-09-24 09:14:34 +02:00
|
|
|
with %User{} = a <- get_cached_by_id(a.id),
|
|
|
|
%User{} = b <- get_cached_by_id(b.id) do
|
2018-10-06 01:40:49 +02:00
|
|
|
{:ok, a, b}
|
|
|
|
else
|
2019-09-24 09:14:34 +02:00
|
|
|
nil -> :error
|
2018-10-06 01:40:49 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def wait_and_refresh(timeout, %User{} = a, %User{} = b) do
|
|
|
|
with :ok <- :timer.sleep(timeout),
|
2019-09-24 09:14:34 +02:00
|
|
|
%User{} = a <- get_cached_by_id(a.id),
|
|
|
|
%User{} = b <- get_cached_by_id(b.id) do
|
2018-10-06 01:40:49 +02:00
|
|
|
{:ok, a, b}
|
|
|
|
else
|
2019-09-24 09:14:34 +02:00
|
|
|
nil -> :error
|
2018-10-06 01:40:49 +02:00
|
|
|
end
|
|
|
|
end
|
2018-12-02 20:03:53 +01:00
|
|
|
|
2019-02-11 23:47:32 +01:00
|
|
|
def parse_bio(bio) when is_binary(bio) and bio != "" do
|
|
|
|
bio
|
|
|
|
|> CommonUtils.format_input("text/plain", mentions_format: :full)
|
|
|
|
|> elem(0)
|
|
|
|
end
|
2018-12-09 22:01:43 +01:00
|
|
|
|
2019-02-11 23:47:32 +01:00
|
|
|
def parse_bio(_), do: ""
|
2018-12-02 20:03:53 +01:00
|
|
|
|
2019-02-11 23:47:32 +01:00
|
|
|
def parse_bio(bio, user) when is_binary(bio) and bio != "" do
|
2019-02-11 22:27:02 +01:00
|
|
|
# TODO: get profile URLs other than user.ap_id
|
2019-03-02 06:57:28 +01:00
|
|
|
profile_urls = [user.ap_id]
|
2019-02-11 22:27:02 +01:00
|
|
|
|
2018-12-11 13:31:52 +01:00
|
|
|
bio
|
2019-03-02 07:04:49 +01:00
|
|
|
|> CommonUtils.format_input("text/plain",
|
2019-02-11 22:27:02 +01:00
|
|
|
mentions_format: :full,
|
|
|
|
rel: &RelMe.maybe_put_rel_me(&1, profile_urls)
|
2019-03-02 07:04:49 +01:00
|
|
|
)
|
2019-02-27 00:32:26 +01:00
|
|
|
|> elem(0)
|
2018-12-02 20:03:53 +01:00
|
|
|
end
|
2018-12-06 18:06:50 +01:00
|
|
|
|
2019-02-11 23:47:32 +01:00
|
|
|
def parse_bio(_, _), do: ""
|
|
|
|
|
2018-12-07 10:27:32 +01:00
|
|
|
def tag(user_identifiers, tags) when is_list(user_identifiers) do
|
|
|
|
Repo.transaction(fn ->
|
|
|
|
for user_identifier <- user_identifiers, do: tag(user_identifier, tags)
|
|
|
|
end)
|
|
|
|
end
|
2018-12-06 18:06:50 +01:00
|
|
|
|
2018-12-09 10:12:48 +01:00
|
|
|
def tag(nickname, tags) when is_binary(nickname),
|
2019-04-22 09:20:43 +02:00
|
|
|
do: tag(get_by_nickname(nickname), tags)
|
2018-12-09 10:12:48 +01:00
|
|
|
|
|
|
|
def tag(%User{} = user, tags),
|
2018-12-17 20:12:01 +01:00
|
|
|
do: update_tags(user, Enum.uniq((user.tags || []) ++ normalize_tags(tags)))
|
2018-12-09 10:12:48 +01:00
|
|
|
|
2018-12-07 10:27:32 +01:00
|
|
|
def untag(user_identifiers, tags) when is_list(user_identifiers) do
|
|
|
|
Repo.transaction(fn ->
|
|
|
|
for user_identifier <- user_identifiers, do: untag(user_identifier, tags)
|
|
|
|
end)
|
|
|
|
end
|
2018-12-06 18:06:50 +01:00
|
|
|
|
2018-12-07 10:27:32 +01:00
|
|
|
def untag(nickname, tags) when is_binary(nickname),
|
2019-04-22 09:20:43 +02:00
|
|
|
do: untag(get_by_nickname(nickname), tags)
|
2018-12-06 18:06:50 +01:00
|
|
|
|
2018-12-17 20:12:01 +01:00
|
|
|
def untag(%User{} = user, tags),
|
|
|
|
do: update_tags(user, (user.tags || []) -- normalize_tags(tags))
|
2018-12-06 18:06:50 +01:00
|
|
|
|
2018-12-07 10:27:32 +01:00
|
|
|
defp update_tags(%User{} = user, new_tags) do
|
|
|
|
{:ok, updated_user} =
|
|
|
|
user
|
|
|
|
|> change(%{tags: new_tags})
|
2019-02-19 08:43:37 +01:00
|
|
|
|> update_and_set_cache()
|
2018-12-06 18:06:50 +01:00
|
|
|
|
2018-12-07 10:27:32 +01:00
|
|
|
updated_user
|
2018-12-06 18:06:50 +01:00
|
|
|
end
|
2018-12-06 18:23:16 +01:00
|
|
|
|
2018-12-07 10:27:32 +01:00
|
|
|
defp normalize_tags(tags) do
|
|
|
|
[tags]
|
|
|
|
|> List.flatten()
|
2019-09-24 09:14:34 +02:00
|
|
|
|> Enum.map(&String.downcase/1)
|
2018-12-07 10:27:32 +01:00
|
|
|
end
|
2018-12-12 18:17:15 +01:00
|
|
|
|
2019-03-05 04:18:43 +01:00
|
|
|
defp local_nickname_regex do
|
2018-12-12 18:17:15 +01:00
|
|
|
if Pleroma.Config.get([:instance, :extended_nickname_format]) do
|
|
|
|
@extended_local_nickname_regex
|
|
|
|
else
|
|
|
|
@strict_local_nickname_regex
|
|
|
|
end
|
|
|
|
end
|
2019-01-16 15:13:09 +01:00
|
|
|
|
2019-01-18 07:30:16 +01:00
|
|
|
def local_nickname(nickname_or_mention) do
|
|
|
|
nickname_or_mention
|
|
|
|
|> full_nickname()
|
|
|
|
|> String.split("@")
|
|
|
|
|> hd()
|
|
|
|
end
|
|
|
|
|
|
|
|
def full_nickname(nickname_or_mention),
|
|
|
|
do: String.trim_leading(nickname_or_mention, "@")
|
|
|
|
|
2019-01-16 15:13:09 +01:00
|
|
|
def error_user(ap_id) do
|
|
|
|
%User{
|
|
|
|
name: ap_id,
|
|
|
|
ap_id: ap_id,
|
|
|
|
nickname: "erroruser@example.com",
|
|
|
|
inserted_at: NaiveDateTime.utc_now()
|
|
|
|
}
|
|
|
|
end
|
2019-02-20 17:51:25 +01:00
|
|
|
|
2019-05-08 16:34:36 +02:00
|
|
|
@spec all_superusers() :: [User.t()]
|
2019-02-20 17:51:25 +01:00
|
|
|
def all_superusers do
|
2019-05-14 13:15:56 +02:00
|
|
|
User.Query.build(%{super_users: true, local: true, deactivated: false})
|
2019-02-20 17:51:25 +01:00
|
|
|
|> Repo.all()
|
|
|
|
end
|
2019-03-02 15:21:18 +01:00
|
|
|
|
2020-03-22 19:51:44 +01:00
|
|
|
def muting_reblogs?(%User{} = user, %User{} = target) do
|
|
|
|
UserRelationship.reblog_mute_exists?(user, target)
|
|
|
|
end
|
|
|
|
|
2019-03-09 14:08:41 +01:00
|
|
|
def showing_reblogs?(%User{} = user, %User{} = target) do
|
2020-03-22 19:51:44 +01:00
|
|
|
not muting_reblogs?(user, target)
|
2019-03-09 14:08:41 +01:00
|
|
|
end
|
2019-04-19 17:17:54 +02:00
|
|
|
|
|
|
|
@doc """
|
|
|
|
The function returns a query to get users with no activity for given interval of days.
|
|
|
|
Inactive users are those who didn't read any notification, or had any activity where
|
|
|
|
the user is the activity's actor, during `inactivity_threshold` days.
|
|
|
|
Deactivated users will not appear in this list.
|
|
|
|
|
|
|
|
## Examples
|
|
|
|
|
|
|
|
iex> Pleroma.User.list_inactive_users()
|
|
|
|
%Ecto.Query{}
|
|
|
|
"""
|
|
|
|
@spec list_inactive_users_query(integer()) :: Ecto.Query.t()
|
|
|
|
def list_inactive_users_query(inactivity_threshold \\ 7) do
|
|
|
|
negative_inactivity_threshold = -inactivity_threshold
|
|
|
|
now = NaiveDateTime.truncate(NaiveDateTime.utc_now(), :second)
|
|
|
|
# Subqueries are not supported in `where` clauses, join gets too complicated.
|
|
|
|
has_read_notifications =
|
|
|
|
from(n in Pleroma.Notification,
|
|
|
|
where: n.seen == true,
|
|
|
|
group_by: n.id,
|
|
|
|
having: max(n.updated_at) > datetime_add(^now, ^negative_inactivity_threshold, "day"),
|
|
|
|
select: n.user_id
|
|
|
|
)
|
|
|
|
|> Pleroma.Repo.all()
|
|
|
|
|
|
|
|
from(u in Pleroma.User,
|
|
|
|
left_join: a in Pleroma.Activity,
|
|
|
|
on: u.ap_id == a.actor,
|
|
|
|
where: not is_nil(u.nickname),
|
2019-10-16 20:59:21 +02:00
|
|
|
where: u.deactivated != ^true,
|
2019-04-19 17:17:54 +02:00
|
|
|
where: u.id not in ^has_read_notifications,
|
|
|
|
group_by: u.id,
|
|
|
|
having:
|
|
|
|
max(a.inserted_at) < datetime_add(^now, ^negative_inactivity_threshold, "day") or
|
|
|
|
is_nil(max(a.inserted_at))
|
|
|
|
)
|
|
|
|
end
|
2019-04-20 14:42:19 +02:00
|
|
|
|
|
|
|
@doc """
|
|
|
|
Enable or disable email notifications for user
|
|
|
|
|
|
|
|
## Examples
|
|
|
|
|
2019-10-16 20:59:21 +02:00
|
|
|
iex> Pleroma.User.switch_email_notifications(Pleroma.User{email_notifications: %{"digest" => false}}, "digest", true)
|
|
|
|
Pleroma.User{email_notifications: %{"digest" => true}}
|
2019-04-20 14:42:19 +02:00
|
|
|
|
2019-10-16 20:59:21 +02:00
|
|
|
iex> Pleroma.User.switch_email_notifications(Pleroma.User{email_notifications: %{"digest" => true}}, "digest", false)
|
|
|
|
Pleroma.User{email_notifications: %{"digest" => false}}
|
2019-04-20 14:42:19 +02:00
|
|
|
"""
|
|
|
|
@spec switch_email_notifications(t(), String.t(), boolean()) ::
|
|
|
|
{:ok, t()} | {:error, Ecto.Changeset.t()}
|
|
|
|
def switch_email_notifications(user, type, status) do
|
2019-10-16 20:59:21 +02:00
|
|
|
User.update_email_notifications(user, %{type => status})
|
2019-04-20 14:42:19 +02:00
|
|
|
end
|
|
|
|
|
|
|
|
@doc """
|
|
|
|
Set `last_digest_emailed_at` value for the user to current time
|
|
|
|
"""
|
|
|
|
@spec touch_last_digest_emailed_at(t()) :: t()
|
|
|
|
def touch_last_digest_emailed_at(user) do
|
|
|
|
now = NaiveDateTime.truncate(NaiveDateTime.utc_now(), :second)
|
|
|
|
|
|
|
|
{:ok, updated_user} =
|
|
|
|
user
|
|
|
|
|> change(%{last_digest_emailed_at: now})
|
|
|
|
|> update_and_set_cache()
|
|
|
|
|
|
|
|
updated_user
|
|
|
|
end
|
2019-05-29 17:18:22 +02:00
|
|
|
|
2019-05-16 15:23:41 +02:00
|
|
|
@spec toggle_confirmation(User.t()) :: {:ok, User.t()} | {:error, Changeset.t()}
|
|
|
|
def toggle_confirmation(%User{} = user) do
|
|
|
|
user
|
2019-10-16 20:59:21 +02:00
|
|
|
|> confirmation_changeset(need_confirmation: !user.confirmation_pending)
|
|
|
|
|> update_and_set_cache()
|
2019-05-16 15:23:41 +02:00
|
|
|
end
|
2019-05-20 17:12:55 +02:00
|
|
|
|
2019-11-19 12:14:02 +01:00
|
|
|
@spec toggle_confirmation([User.t()]) :: [{:ok, User.t()} | {:error, Changeset.t()}]
|
|
|
|
def toggle_confirmation(users) do
|
|
|
|
Enum.map(users, &toggle_confirmation/1)
|
|
|
|
end
|
|
|
|
|
2019-10-16 20:59:21 +02:00
|
|
|
def get_mascot(%{mascot: %{} = mascot}) when not is_nil(mascot) do
|
2019-05-20 17:12:55 +02:00
|
|
|
mascot
|
|
|
|
end
|
|
|
|
|
2019-10-16 20:59:21 +02:00
|
|
|
def get_mascot(%{mascot: mascot}) when is_nil(mascot) do
|
2019-05-20 17:12:55 +02:00
|
|
|
# use instance-default
|
|
|
|
config = Pleroma.Config.get([:assets, :mascots])
|
|
|
|
default_mascot = Pleroma.Config.get([:assets, :default_mascot])
|
|
|
|
mascot = Keyword.get(config, default_mascot)
|
|
|
|
|
|
|
|
%{
|
|
|
|
"id" => "default-mascot",
|
|
|
|
"url" => mascot[:url],
|
|
|
|
"preview_url" => mascot[:url],
|
|
|
|
"pleroma" => %{
|
|
|
|
"mime_type" => mascot[:mime_type]
|
|
|
|
}
|
|
|
|
}
|
|
|
|
end
|
2019-05-22 05:58:15 +02:00
|
|
|
|
2019-10-06 15:22:35 +02:00
|
|
|
def ensure_keys_present(%{keys: keys} = user) when not is_nil(keys), do: {:ok, user}
|
2019-05-22 05:58:15 +02:00
|
|
|
|
2019-09-24 14:50:07 +02:00
|
|
|
def ensure_keys_present(%User{} = user) do
|
|
|
|
with {:ok, pem} <- Keys.generate_rsa_pem() do
|
2019-10-06 15:22:35 +02:00
|
|
|
user
|
|
|
|
|> cast(%{keys: pem}, [:keys])
|
|
|
|
|> validate_required([:keys])
|
|
|
|
|> update_and_set_cache()
|
2019-05-22 05:58:15 +02:00
|
|
|
end
|
|
|
|
end
|
2019-06-03 18:16:11 +02:00
|
|
|
|
|
|
|
def get_ap_ids_by_nicknames(nicknames) do
|
|
|
|
from(u in User,
|
|
|
|
where: u.nickname in ^nicknames,
|
|
|
|
select: u.ap_id
|
|
|
|
)
|
|
|
|
|> Repo.all()
|
|
|
|
end
|
2019-06-05 11:34:14 +02:00
|
|
|
|
|
|
|
defdelegate search(query, opts \\ []), to: User.Search
|
2019-06-24 21:01:56 +02:00
|
|
|
|
|
|
|
defp put_password_hash(
|
|
|
|
%Ecto.Changeset{valid?: true, changes: %{password: password}} = changeset
|
|
|
|
) do
|
2020-05-12 23:42:24 +02:00
|
|
|
change(changeset, password_hash: Pbkdf2.hash_pwd_salt(password))
|
2019-06-24 21:01:56 +02:00
|
|
|
end
|
|
|
|
|
|
|
|
defp put_password_hash(changeset), do: changeset
|
2019-07-17 19:12:42 +02:00
|
|
|
|
|
|
|
def is_internal_user?(%User{nickname: nil}), do: true
|
|
|
|
def is_internal_user?(%User{local: true, nickname: "internal." <> _}), do: true
|
|
|
|
def is_internal_user?(_), do: false
|
2019-09-13 08:09:35 +02:00
|
|
|
|
2019-09-13 10:36:49 +02:00
|
|
|
# A hack because user delete activities have a fake id for whatever reason
|
|
|
|
# TODO: Get rid of this
|
|
|
|
def get_delivered_users_by_object_id("pleroma:fake_object_id"), do: []
|
|
|
|
|
2019-09-12 20:37:36 +02:00
|
|
|
def get_delivered_users_by_object_id(object_id) do
|
|
|
|
from(u in User,
|
|
|
|
inner_join: delivery in assoc(u, :deliveries),
|
|
|
|
where: delivery.object_id == ^object_id
|
|
|
|
)
|
|
|
|
|> Repo.all()
|
|
|
|
end
|
2019-09-13 18:42:53 +02:00
|
|
|
|
2019-09-13 08:09:35 +02:00
|
|
|
def change_email(user, email) do
|
|
|
|
user
|
|
|
|
|> cast(%{email: email}, [:email])
|
|
|
|
|> validate_required([:email])
|
|
|
|
|> unique_constraint(:email)
|
|
|
|
|> validate_format(:email, @email_regex)
|
|
|
|
|> update_and_set_cache()
|
|
|
|
end
|
2019-09-24 14:50:07 +02:00
|
|
|
|
2019-10-16 20:59:21 +02:00
|
|
|
# Internal function; public one is `deactivate/2`
|
|
|
|
defp set_activation_status(user, deactivated) do
|
|
|
|
user
|
|
|
|
|> cast(%{deactivated: deactivated}, [:deactivated])
|
|
|
|
|> update_and_set_cache()
|
|
|
|
end
|
|
|
|
|
|
|
|
def update_banner(user, banner) do
|
|
|
|
user
|
|
|
|
|> cast(%{banner: banner}, [:banner])
|
|
|
|
|> update_and_set_cache()
|
|
|
|
end
|
|
|
|
|
|
|
|
def update_background(user, background) do
|
|
|
|
user
|
|
|
|
|> cast(%{background: background}, [:background])
|
|
|
|
|> update_and_set_cache()
|
|
|
|
end
|
|
|
|
|
|
|
|
def roles(%{is_moderator: is_moderator, is_admin: is_admin}) do
|
|
|
|
%{
|
|
|
|
admin: is_admin,
|
|
|
|
moderator: is_moderator
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
|
|
|
def validate_fields(changeset, remote? \\ false) do
|
|
|
|
limit_name = if remote?, do: :max_remote_account_fields, else: :max_account_fields
|
|
|
|
limit = Pleroma.Config.get([:instance, limit_name], 0)
|
|
|
|
|
|
|
|
changeset
|
|
|
|
|> validate_length(:fields, max: limit)
|
|
|
|
|> validate_change(:fields, fn :fields, fields ->
|
|
|
|
if Enum.all?(fields, &valid_field?/1) do
|
|
|
|
[]
|
|
|
|
else
|
|
|
|
[fields: "invalid"]
|
|
|
|
end
|
|
|
|
end)
|
|
|
|
end
|
|
|
|
|
|
|
|
defp valid_field?(%{"name" => name, "value" => value}) do
|
|
|
|
name_limit = Pleroma.Config.get([:instance, :account_field_name_length], 255)
|
|
|
|
value_limit = Pleroma.Config.get([:instance, :account_field_value_length], 255)
|
|
|
|
|
|
|
|
is_binary(name) && is_binary(value) && String.length(name) <= name_limit &&
|
|
|
|
String.length(value) <= value_limit
|
|
|
|
end
|
|
|
|
|
|
|
|
defp valid_field?(_), do: false
|
|
|
|
|
|
|
|
defp truncate_field(%{"name" => name, "value" => value}) do
|
|
|
|
{name, _chopped} =
|
|
|
|
String.split_at(name, Pleroma.Config.get([:instance, :account_field_name_length], 255))
|
|
|
|
|
|
|
|
{value, _chopped} =
|
|
|
|
String.split_at(value, Pleroma.Config.get([:instance, :account_field_value_length], 255))
|
|
|
|
|
|
|
|
%{"name" => name, "value" => value}
|
|
|
|
end
|
|
|
|
|
|
|
|
def admin_api_update(user, params) do
|
2020-01-10 08:52:21 +01:00
|
|
|
user
|
|
|
|
|> cast(params, [
|
|
|
|
:is_moderator,
|
|
|
|
:is_admin,
|
|
|
|
:show_role
|
|
|
|
])
|
|
|
|
|> update_and_set_cache()
|
2019-12-05 22:25:44 +01:00
|
|
|
end
|
|
|
|
|
|
|
|
@doc "Signs user out of all applications"
|
|
|
|
def global_sign_out(user) do
|
|
|
|
OAuth.Authorization.delete_user_authorizations(user)
|
|
|
|
OAuth.Token.delete_user_tokens(user)
|
2019-10-16 20:59:21 +02:00
|
|
|
end
|
|
|
|
|
|
|
|
def mascot_update(user, url) do
|
|
|
|
user
|
|
|
|
|> cast(%{mascot: url}, [:mascot])
|
|
|
|
|> validate_required([:mascot])
|
|
|
|
|> update_and_set_cache()
|
|
|
|
end
|
|
|
|
|
|
|
|
def mastodon_settings_update(user, settings) do
|
|
|
|
user
|
|
|
|
|> cast(%{settings: settings}, [:settings])
|
|
|
|
|> validate_required([:settings])
|
|
|
|
|> update_and_set_cache()
|
|
|
|
end
|
|
|
|
|
|
|
|
@spec confirmation_changeset(User.t(), keyword()) :: Changeset.t()
|
|
|
|
def confirmation_changeset(user, need_confirmation: need_confirmation?) do
|
|
|
|
params =
|
|
|
|
if need_confirmation? do
|
|
|
|
%{
|
|
|
|
confirmation_pending: true,
|
|
|
|
confirmation_token: :crypto.strong_rand_bytes(32) |> Base.url_encode64()
|
|
|
|
}
|
|
|
|
else
|
|
|
|
%{
|
|
|
|
confirmation_pending: false,
|
|
|
|
confirmation_token: nil
|
|
|
|
}
|
|
|
|
end
|
|
|
|
|
|
|
|
cast(user, params, [:confirmation_pending, :confirmation_token])
|
|
|
|
end
|
|
|
|
|
|
|
|
def add_pinnned_activity(user, %Pleroma.Activity{id: id}) do
|
|
|
|
if id not in user.pinned_activities do
|
|
|
|
max_pinned_statuses = Pleroma.Config.get([:instance, :max_pinned_statuses], 0)
|
|
|
|
params = %{pinned_activities: user.pinned_activities ++ [id]}
|
|
|
|
|
|
|
|
user
|
|
|
|
|> cast(params, [:pinned_activities])
|
|
|
|
|> validate_length(:pinned_activities,
|
|
|
|
max: max_pinned_statuses,
|
|
|
|
message: "You have already pinned the maximum number of statuses"
|
|
|
|
)
|
|
|
|
else
|
|
|
|
change(user)
|
|
|
|
end
|
|
|
|
|> update_and_set_cache()
|
|
|
|
end
|
|
|
|
|
|
|
|
def remove_pinnned_activity(user, %Pleroma.Activity{id: id}) do
|
|
|
|
params = %{pinned_activities: List.delete(user.pinned_activities, id)}
|
|
|
|
|
|
|
|
user
|
|
|
|
|> cast(params, [:pinned_activities])
|
|
|
|
|> update_and_set_cache()
|
|
|
|
end
|
|
|
|
|
|
|
|
def update_email_notifications(user, settings) do
|
|
|
|
email_notifications =
|
|
|
|
user.email_notifications
|
|
|
|
|> Map.merge(settings)
|
|
|
|
|> Map.take(["digest"])
|
|
|
|
|
|
|
|
params = %{email_notifications: email_notifications}
|
|
|
|
fields = [:email_notifications]
|
|
|
|
|
|
|
|
user
|
|
|
|
|> cast(params, fields)
|
|
|
|
|> validate_required(fields)
|
|
|
|
|> update_and_set_cache()
|
|
|
|
end
|
|
|
|
|
2019-10-20 12:42:42 +02:00
|
|
|
defp set_domain_blocks(user, domain_blocks) do
|
|
|
|
params = %{domain_blocks: domain_blocks}
|
|
|
|
|
|
|
|
user
|
|
|
|
|> cast(params, [:domain_blocks])
|
|
|
|
|> validate_required([:domain_blocks])
|
|
|
|
|> update_and_set_cache()
|
|
|
|
end
|
|
|
|
|
|
|
|
def block_domain(user, domain_blocked) do
|
|
|
|
set_domain_blocks(user, Enum.uniq([domain_blocked | user.domain_blocks]))
|
|
|
|
end
|
|
|
|
|
|
|
|
def unblock_domain(user, domain_blocked) do
|
|
|
|
set_domain_blocks(user, List.delete(user.domain_blocks, domain_blocked))
|
|
|
|
end
|
|
|
|
|
2019-11-18 18:38:56 +01:00
|
|
|
@spec add_to_block(User.t(), User.t()) ::
|
|
|
|
{:ok, UserRelationship.t()} | {:error, Ecto.Changeset.t()}
|
2019-11-10 14:30:21 +01:00
|
|
|
defp add_to_block(%User{} = user, %User{} = blocked) do
|
2019-11-18 18:38:56 +01:00
|
|
|
UserRelationship.create_block(user, blocked)
|
2019-10-20 12:42:42 +02:00
|
|
|
end
|
|
|
|
|
2019-11-10 14:30:21 +01:00
|
|
|
@spec add_to_block(User.t(), User.t()) ::
|
2019-11-18 18:38:56 +01:00
|
|
|
{:ok, UserRelationship.t()} | {:ok, nil} | {:error, Ecto.Changeset.t()}
|
2019-11-10 14:30:21 +01:00
|
|
|
defp remove_from_block(%User{} = user, %User{} = blocked) do
|
2019-11-18 18:38:56 +01:00
|
|
|
UserRelationship.delete_block(user, blocked)
|
2019-10-20 12:42:42 +02:00
|
|
|
end
|
|
|
|
|
2019-11-19 21:22:10 +01:00
|
|
|
defp add_to_mutes(%User{} = user, %User{} = muted_user, notifications?) do
|
2019-11-18 18:38:56 +01:00
|
|
|
with {:ok, user_mute} <- UserRelationship.create_mute(user, muted_user),
|
2019-11-19 21:22:10 +01:00
|
|
|
{:ok, user_notification_mute} <-
|
|
|
|
(notifications? && UserRelationship.create_notification_mute(user, muted_user)) ||
|
|
|
|
{:ok, nil} do
|
|
|
|
{:ok, Enum.filter([user_mute, user_notification_mute], & &1)}
|
2019-10-20 12:42:42 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-11-19 21:22:10 +01:00
|
|
|
defp remove_from_mutes(user, %User{} = muted_user) do
|
2019-11-18 18:38:56 +01:00
|
|
|
with {:ok, user_mute} <- UserRelationship.delete_mute(user, muted_user),
|
2019-11-19 21:22:10 +01:00
|
|
|
{:ok, user_notification_mute} <-
|
|
|
|
UserRelationship.delete_notification_mute(user, muted_user) do
|
|
|
|
{:ok, [user_mute, user_notification_mute]}
|
2019-10-20 12:42:42 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2019-10-21 10:58:22 +02:00
|
|
|
def set_invisible(user, invisible) do
|
|
|
|
params = %{invisible: invisible}
|
|
|
|
|
|
|
|
user
|
|
|
|
|> cast(params, [:invisible])
|
|
|
|
|> validate_required([:invisible])
|
|
|
|
|> update_and_set_cache()
|
|
|
|
end
|
2020-03-15 17:00:54 +01:00
|
|
|
|
|
|
|
def sanitize_html(%User{} = user) do
|
|
|
|
sanitize_html(user, nil)
|
|
|
|
end
|
|
|
|
|
|
|
|
# User data that mastodon isn't filtering (treated as plaintext):
|
|
|
|
# - field name
|
|
|
|
# - display name
|
|
|
|
def sanitize_html(%User{} = user, filter) do
|
|
|
|
fields =
|
2020-04-06 10:44:48 +02:00
|
|
|
Enum.map(user.fields, fn %{"name" => name, "value" => value} ->
|
2020-03-15 17:00:54 +01:00
|
|
|
%{
|
|
|
|
"name" => name,
|
|
|
|
"value" => HTML.filter_tags(value, Pleroma.HTML.Scrubber.LinksOnly)
|
|
|
|
}
|
|
|
|
end)
|
|
|
|
|
|
|
|
user
|
|
|
|
|> Map.put(:bio, HTML.filter_tags(user.bio, filter))
|
|
|
|
|> Map.put(:fields, fields)
|
|
|
|
end
|
2017-03-20 21:28:31 +01:00
|
|
|
end
|