Merge branch 'develop' into issue/733
This commit is contained in:
commit
d75bc728e7
1
.gitignore
vendored
1
.gitignore
vendored
@ -38,6 +38,7 @@ erl_crash.dump
|
|||||||
|
|
||||||
# Prevent committing docs files
|
# Prevent committing docs files
|
||||||
/priv/static/doc/*
|
/priv/static/doc/*
|
||||||
|
docs/generated_config.md
|
||||||
|
|
||||||
# Code test coverage
|
# Code test coverage
|
||||||
/cover
|
/cover
|
||||||
|
@ -7,6 +7,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
|||||||
### Security
|
### Security
|
||||||
- OStatus: eliminate the possibility of a protocol downgrade attack.
|
- OStatus: eliminate the possibility of a protocol downgrade attack.
|
||||||
- OStatus: prevent following locked accounts, bypassing the approval process.
|
- OStatus: prevent following locked accounts, bypassing the approval process.
|
||||||
|
- Mastodon API: respect post privacy in `/api/v1/statuses/:id/{favourited,reblogged}_by`
|
||||||
|
|
||||||
### Removed
|
### Removed
|
||||||
- **Breaking:** GNU Social API with Qvitter extensions support
|
- **Breaking:** GNU Social API with Qvitter extensions support
|
||||||
@ -20,14 +21,17 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
|||||||
- **Breaking:** `/api/pleroma/notifications/read` is moved to `/api/v1/pleroma/notifications/read` and now supports `max_id` and responds with Mastodon API entities.
|
- **Breaking:** `/api/pleroma/notifications/read` is moved to `/api/v1/pleroma/notifications/read` and now supports `max_id` and responds with Mastodon API entities.
|
||||||
- Configuration: OpenGraph and TwitterCard providers enabled by default
|
- Configuration: OpenGraph and TwitterCard providers enabled by default
|
||||||
- Configuration: Filter.AnonymizeFilename added ability to retain file extension with custom text
|
- Configuration: Filter.AnonymizeFilename added ability to retain file extension with custom text
|
||||||
- Mastodon API: `pleroma.thread_muted` key in the Status entity
|
- Configuration: added `config/description.exs`, from which `docs/config.md` is generated
|
||||||
- Federation: Return 403 errors when trying to request pages from a user's follower/following collections if they have `hide_followers`/`hide_follows` set
|
- Federation: Return 403 errors when trying to request pages from a user's follower/following collections if they have `hide_followers`/`hide_follows` set
|
||||||
- NodeInfo: Return `skipThreadContainment` in `metadata` for the `skip_thread_containment` option
|
- NodeInfo: Return `skipThreadContainment` in `metadata` for the `skip_thread_containment` option
|
||||||
- NodeInfo: Return `mailerEnabled` in `metadata`
|
- NodeInfo: Return `mailerEnabled` in `metadata`
|
||||||
- Mastodon API: Unsubscribe followers when they unfollow a user
|
- Mastodon API: Unsubscribe followers when they unfollow a user
|
||||||
|
- Mastodon API: `pleroma.thread_muted` key in the Status entity
|
||||||
- AdminAPI: Add "godmode" while fetching user statuses (i.e. admin can see private statuses)
|
- AdminAPI: Add "godmode" while fetching user statuses (i.e. admin can see private statuses)
|
||||||
- Improve digest email template
|
- Improve digest email template
|
||||||
– Pagination: (optional) return `total` alongside with `items` when paginating
|
– Pagination: (optional) return `total` alongside with `items` when paginating
|
||||||
|
- Replaced [pleroma_job_queue](https://git.pleroma.social/pleroma/pleroma_job_queue) and `Pleroma.Web.Federator.RetryQueue` with [Oban](https://github.com/sorentwo/oban) (see [`docs/config.md`](docs/config.md) on migrating customized worker / retry settings)
|
||||||
|
- Introduced [quantum](https://github.com/quantum-elixir/quantum-core) job scheduler
|
||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
- Following from Osada
|
- Following from Osada
|
||||||
@ -105,6 +109,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
|||||||
- ActivityPub: Optional signing of ActivityPub object fetches.
|
- ActivityPub: Optional signing of ActivityPub object fetches.
|
||||||
- Admin API: Endpoint for fetching latest user's statuses
|
- Admin API: Endpoint for fetching latest user's statuses
|
||||||
- Pleroma API: Add `/api/v1/pleroma/accounts/confirmation_resend?email=<email>` for resending account confirmation.
|
- Pleroma API: Add `/api/v1/pleroma/accounts/confirmation_resend?email=<email>` for resending account confirmation.
|
||||||
|
- Pleroma API: Email change endpoint.
|
||||||
- Relays: Added a task to list relay subscriptions.
|
- Relays: Added a task to list relay subscriptions.
|
||||||
- Mix Tasks: `mix pleroma.database fix_likes_collections`
|
- Mix Tasks: `mix pleroma.database fix_likes_collections`
|
||||||
- Federation: Remove `likes` from objects.
|
- Federation: Remove `likes` from objects.
|
||||||
|
@ -51,6 +51,24 @@
|
|||||||
telemetry_event: [Pleroma.Repo.Instrumenter],
|
telemetry_event: [Pleroma.Repo.Instrumenter],
|
||||||
migration_lock: nil
|
migration_lock: nil
|
||||||
|
|
||||||
|
scheduled_jobs =
|
||||||
|
with digest_config <- Application.get_env(:pleroma, :email_notifications)[:digest],
|
||||||
|
true <- digest_config[:active] do
|
||||||
|
[{digest_config[:schedule], {Pleroma.Daemons.DigestEmailDaemon, :perform, []}}]
|
||||||
|
else
|
||||||
|
_ -> []
|
||||||
|
end
|
||||||
|
|
||||||
|
scheduled_jobs =
|
||||||
|
scheduled_jobs ++
|
||||||
|
[{"0 */6 * * * *", {Pleroma.Web.Websub, :refresh_subscriptions, []}}]
|
||||||
|
|
||||||
|
config :pleroma, Pleroma.Scheduler,
|
||||||
|
global: true,
|
||||||
|
overlap: true,
|
||||||
|
timezone: :utc,
|
||||||
|
jobs: scheduled_jobs
|
||||||
|
|
||||||
config :pleroma, Pleroma.Captcha,
|
config :pleroma, Pleroma.Captcha,
|
||||||
enabled: false,
|
enabled: false,
|
||||||
seconds_valid: 60,
|
seconds_valid: 60,
|
||||||
@ -451,21 +469,26 @@
|
|||||||
"web"
|
"web"
|
||||||
]
|
]
|
||||||
|
|
||||||
config :pleroma, Pleroma.Web.Federator.RetryQueue,
|
config :pleroma, Oban,
|
||||||
enabled: false,
|
repo: Pleroma.Repo,
|
||||||
max_jobs: 20,
|
verbose: false,
|
||||||
initial_timeout: 30,
|
prune: {:maxlen, 1500},
|
||||||
max_retries: 5
|
queues: [
|
||||||
|
activity_expiration: 10,
|
||||||
|
federator_incoming: 50,
|
||||||
|
federator_outgoing: 50,
|
||||||
|
web_push: 50,
|
||||||
|
mailer: 10,
|
||||||
|
transmogrifier: 20,
|
||||||
|
scheduled_activities: 10,
|
||||||
|
background: 5
|
||||||
|
]
|
||||||
|
|
||||||
config :pleroma_job_queue, :queues,
|
config :pleroma, :workers,
|
||||||
activity_expiration: 10,
|
retries: [
|
||||||
federator_incoming: 50,
|
federator_incoming: 5,
|
||||||
federator_outgoing: 50,
|
federator_outgoing: 5
|
||||||
web_push: 50,
|
]
|
||||||
mailer: 10,
|
|
||||||
transmogrifier: 20,
|
|
||||||
scheduled_activities: 10,
|
|
||||||
background: 5
|
|
||||||
|
|
||||||
config :pleroma, :fetch_initial_posts,
|
config :pleroma, :fetch_initial_posts,
|
||||||
enabled: false,
|
enabled: false,
|
||||||
|
2891
config/description.exs
Normal file
2891
config/description.exs
Normal file
File diff suppressed because it is too large
Load Diff
@ -61,7 +61,11 @@
|
|||||||
|
|
||||||
config :web_push_encryption, :http_client, Pleroma.Web.WebPushHttpClientMock
|
config :web_push_encryption, :http_client, Pleroma.Web.WebPushHttpClientMock
|
||||||
|
|
||||||
config :pleroma_job_queue, disabled: true
|
config :pleroma, Oban,
|
||||||
|
queues: false,
|
||||||
|
prune: :disabled
|
||||||
|
|
||||||
|
config :pleroma, Pleroma.Scheduler, jobs: []
|
||||||
|
|
||||||
config :pleroma, Pleroma.ScheduledActivity,
|
config :pleroma, Pleroma.ScheduledActivity,
|
||||||
daily_user_limit: 2,
|
daily_user_limit: 2,
|
||||||
|
@ -60,9 +60,13 @@ Authentication is required and the user must be an admin.
|
|||||||
|
|
||||||
- Method: `POST`
|
- Method: `POST`
|
||||||
- Params:
|
- Params:
|
||||||
- `nickname`
|
`users`: [
|
||||||
- `email`
|
{
|
||||||
- `password`
|
`nickname`,
|
||||||
|
`email`,
|
||||||
|
`password`
|
||||||
|
}
|
||||||
|
]
|
||||||
- Response: User’s nickname
|
- Response: User’s nickname
|
||||||
|
|
||||||
## `/api/pleroma/admin/users/follow`
|
## `/api/pleroma/admin/users/follow`
|
||||||
|
@ -252,7 +252,7 @@ See [Admin-API](Admin-API.md)
|
|||||||
* Params:
|
* Params:
|
||||||
* `email`: email of that needs to be verified
|
* `email`: email of that needs to be verified
|
||||||
* Authentication: not required
|
* Authentication: not required
|
||||||
* Response: 204 No Content
|
* Response: 204 No Content
|
||||||
|
|
||||||
## `/api/v1/pleroma/mascot`
|
## `/api/v1/pleroma/mascot`
|
||||||
### Gets user mascot image
|
### Gets user mascot image
|
||||||
@ -321,11 +321,21 @@ See [Admin-API](Admin-API.md)
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## `/api/pleroma/change_email`
|
||||||
|
### Change account email
|
||||||
|
* Method `POST`
|
||||||
|
* Authentication: required
|
||||||
|
* Params:
|
||||||
|
* `password`: user's password
|
||||||
|
* `email`: new email
|
||||||
|
* Response: JSON. Returns `{"status": "success"}` if the change was successful, `{"error": "[error message]"}` otherwise
|
||||||
|
* Note: Currently, Mastodon has no API for changing email. If they add it in future it might be incompatible with Pleroma.
|
||||||
|
|
||||||
# Pleroma Conversations
|
# Pleroma Conversations
|
||||||
|
|
||||||
Pleroma Conversations have the same general structure that Mastodon Conversations have. The behavior differs in the following ways when using these endpoints:
|
Pleroma Conversations have the same general structure that Mastodon Conversations have. The behavior differs in the following ways when using these endpoints:
|
||||||
|
|
||||||
1. Pleroma Conversations never add or remove recipients, unless explicitly changed by the user.
|
1. Pleroma Conversations never add or remove recipients, unless explicitly changed by the user.
|
||||||
2. Pleroma Conversations statuses can be requested by Conversation id.
|
2. Pleroma Conversations statuses can be requested by Conversation id.
|
||||||
3. Pleroma Conversations can be replied to.
|
3. Pleroma Conversations can be replied to.
|
||||||
|
|
||||||
|
@ -400,35 +400,71 @@ You can then do
|
|||||||
curl "http://localhost:4000/api/pleroma/admin/invite_token?admin_token=somerandomtoken"
|
curl "http://localhost:4000/api/pleroma/admin/invite_token?admin_token=somerandomtoken"
|
||||||
```
|
```
|
||||||
|
|
||||||
## :pleroma_job_queue
|
## Oban
|
||||||
|
|
||||||
[Pleroma Job Queue](https://git.pleroma.social/pleroma/pleroma_job_queue) configuration: a list of queues with maximum concurrent jobs.
|
[Oban](https://github.com/sorentwo/oban) asynchronous job processor configuration.
|
||||||
|
|
||||||
|
Configuration options described in [Oban readme](https://github.com/sorentwo/oban#usage):
|
||||||
|
* `repo` - app's Ecto repo (`Pleroma.Repo`)
|
||||||
|
* `verbose` - logs verbosity
|
||||||
|
* `prune` - non-retryable jobs [pruning settings](https://github.com/sorentwo/oban#pruning) (`:disabled` / `{:maxlen, value}` / `{:maxage, value}`)
|
||||||
|
* `queues` - job queues (see below)
|
||||||
|
|
||||||
Pleroma has the following queues:
|
Pleroma has the following queues:
|
||||||
|
|
||||||
|
* `activity_expiration` - Activity expiration
|
||||||
* `federator_outgoing` - Outgoing federation
|
* `federator_outgoing` - Outgoing federation
|
||||||
* `federator_incoming` - Incoming federation
|
* `federator_incoming` - Incoming federation
|
||||||
* `mailer` - Email sender, see [`Pleroma.Emails.Mailer`](#pleroma-emails-mailer)
|
* `mailer` - Email sender, see [`Pleroma.Emails.Mailer`](#pleromaemailsmailer)
|
||||||
* `transmogrifier` - Transmogrifier
|
* `transmogrifier` - Transmogrifier
|
||||||
* `web_push` - Web push notifications
|
* `web_push` - Web push notifications
|
||||||
* `scheduled_activities` - Scheduled activities, see [`Pleroma.ScheduledActivities`](#pleromascheduledactivity)
|
* `scheduled_activities` - Scheduled activities, see [`Pleroma.ScheduledActivity`](#pleromascheduledactivity)
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
|
|
||||||
```elixir
|
```elixir
|
||||||
config :pleroma_job_queue, :queues,
|
config :pleroma, Oban,
|
||||||
federator_incoming: 50,
|
repo: Pleroma.Repo,
|
||||||
federator_outgoing: 50
|
verbose: false,
|
||||||
|
prune: {:maxlen, 1500},
|
||||||
|
queues: [
|
||||||
|
federator_incoming: 50,
|
||||||
|
federator_outgoing: 50
|
||||||
|
]
|
||||||
```
|
```
|
||||||
|
|
||||||
This config contains two queues: `federator_incoming` and `federator_outgoing`. Both have the `max_jobs` set to `50`.
|
This config contains two queues: `federator_incoming` and `federator_outgoing`. Both have the number of max concurrent jobs set to `50`.
|
||||||
|
|
||||||
## Pleroma.Web.Federator.RetryQueue
|
### Migrating `pleroma_job_queue` settings
|
||||||
|
|
||||||
* `enabled`: If set to `true`, failed federation jobs will be retried
|
`config :pleroma_job_queue, :queues` is replaced by `config :pleroma, Oban, :queues` and uses the same format (keys are queues' names, values are max concurrent jobs numbers).
|
||||||
* `max_jobs`: The maximum amount of parallel federation jobs running at the same time.
|
|
||||||
* `initial_timeout`: The initial timeout in seconds
|
### Note on running with PostgreSQL in silent mode
|
||||||
* `max_retries`: The maximum number of times a federation job is retried
|
|
||||||
|
If you are running PostgreSQL in [`silent_mode`](https://postgresqlco.nf/en/doc/param/silent_mode?version=9.1), it's advised to set [`log_destination`](https://postgresqlco.nf/en/doc/param/log_destination?version=9.1) to `syslog`,
|
||||||
|
otherwise `postmaster.log` file may grow because of "you don't own a lock of type ShareLock" warnings (see https://github.com/sorentwo/oban/issues/52).
|
||||||
|
|
||||||
|
## :workers
|
||||||
|
|
||||||
|
Includes custom worker options not interpretable directly by `Oban`.
|
||||||
|
|
||||||
|
* `retries` — keyword lists where keys are `Oban` queues (see above) and values are numbers of max attempts for failed jobs.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```elixir
|
||||||
|
config :pleroma, :workers,
|
||||||
|
retries: [
|
||||||
|
federator_incoming: 5,
|
||||||
|
federator_outgoing: 5
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
### Migrating `Pleroma.Web.Federator.RetryQueue` settings
|
||||||
|
|
||||||
|
* `max_retries` is replaced with `config :pleroma, :workers, retries: [federator_outgoing: 5]`
|
||||||
|
* `enabled: false` corresponds to `config :pleroma, :workers, retries: [federator_outgoing: 1]`
|
||||||
|
* deprecated options: `max_jobs`, `initial_timeout`
|
||||||
|
|
||||||
## Pleroma.Web.Metadata
|
## Pleroma.Web.Metadata
|
||||||
* `providers`: a list of metadata providers to enable. Providers available:
|
* `providers`: a list of metadata providers to enable. Providers available:
|
||||||
@ -489,6 +525,24 @@ config :auto_linker,
|
|||||||
]
|
]
|
||||||
```
|
```
|
||||||
|
|
||||||
|
## Pleroma.Scheduler
|
||||||
|
|
||||||
|
Configuration for [Quantum](https://github.com/quantum-elixir/quantum-core) jobs scheduler.
|
||||||
|
|
||||||
|
See [Quantum readme](https://github.com/quantum-elixir/quantum-core#usage) for the list of supported options.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```elixir
|
||||||
|
config :pleroma, Pleroma.Scheduler,
|
||||||
|
global: true,
|
||||||
|
overlap: true,
|
||||||
|
timezone: :utc,
|
||||||
|
jobs: [{"0 */6 * * * *", {Pleroma.Web.Websub, :refresh_subscriptions, []}}]
|
||||||
|
```
|
||||||
|
|
||||||
|
The above example defines a single job which invokes `Pleroma.Web.Websub.refresh_subscriptions()` every 6 hours ("0 */6 * * * *", [crontab format](https://en.wikipedia.org/wiki/Cron)).
|
||||||
|
|
||||||
## Pleroma.ScheduledActivity
|
## Pleroma.ScheduledActivity
|
||||||
|
|
||||||
* `daily_user_limit`: the number of scheduled activities a user is allowed to create in a single day (Default: `25`)
|
* `daily_user_limit`: the number of scheduled activities a user is allowed to create in a single day (Default: `25`)
|
||||||
|
42
lib/mix/tasks/pleroma/docs.ex
Normal file
42
lib/mix/tasks/pleroma/docs.ex
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
defmodule Mix.Tasks.Pleroma.Docs do
|
||||||
|
use Mix.Task
|
||||||
|
import Mix.Pleroma
|
||||||
|
|
||||||
|
@shortdoc "Generates docs from descriptions.exs"
|
||||||
|
@moduledoc """
|
||||||
|
Generates docs from `descriptions.exs`.
|
||||||
|
|
||||||
|
Supports two formats: `markdown` and `json`.
|
||||||
|
|
||||||
|
## Generate Markdown docs
|
||||||
|
|
||||||
|
`mix pleroma.docs`
|
||||||
|
|
||||||
|
## Generate JSON docs
|
||||||
|
|
||||||
|
`mix pleroma.docs json`
|
||||||
|
"""
|
||||||
|
|
||||||
|
def run(["json"]) do
|
||||||
|
do_run(Pleroma.Docs.JSON)
|
||||||
|
end
|
||||||
|
|
||||||
|
def run(_) do
|
||||||
|
do_run(Pleroma.Docs.Markdown)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp do_run(implementation) do
|
||||||
|
start_pleroma()
|
||||||
|
|
||||||
|
with {descriptions, _paths} <- Mix.Config.eval!("config/description.exs"),
|
||||||
|
{:ok, file_path} <-
|
||||||
|
Pleroma.Docs.Generator.process(
|
||||||
|
implementation,
|
||||||
|
descriptions[:pleroma][:config_description]
|
||||||
|
) do
|
||||||
|
type = if implementation == Pleroma.Docs.Markdown, do: "Markdown", else: "JSON"
|
||||||
|
|
||||||
|
Mix.shell().info([:green, "#{type} docs successfully generated to #{file_path}."])
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
@ -6,6 +6,7 @@ defmodule Pleroma.Activity do
|
|||||||
use Ecto.Schema
|
use Ecto.Schema
|
||||||
|
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
|
alias Pleroma.Activity.Queries
|
||||||
alias Pleroma.ActivityExpiration
|
alias Pleroma.ActivityExpiration
|
||||||
alias Pleroma.Bookmark
|
alias Pleroma.Bookmark
|
||||||
alias Pleroma.Notification
|
alias Pleroma.Notification
|
||||||
@ -65,8 +66,8 @@ defmodule Pleroma.Activity do
|
|||||||
timestamps()
|
timestamps()
|
||||||
end
|
end
|
||||||
|
|
||||||
def with_joined_object(query) do
|
def with_joined_object(query, join_type \\ :inner) do
|
||||||
join(query, :inner, [activity], o in Object,
|
join(query, join_type, [activity], o in Object,
|
||||||
on:
|
on:
|
||||||
fragment(
|
fragment(
|
||||||
"(?->>'id') = COALESCE(?->'object'->>'id', ?->>'object')",
|
"(?->>'id') = COALESCE(?->'object'->>'id', ?->>'object')",
|
||||||
@ -78,10 +79,10 @@ def with_joined_object(query) do
|
|||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
def with_preloaded_object(query) do
|
def with_preloaded_object(query, join_type \\ :inner) do
|
||||||
query
|
query
|
||||||
|> has_named_binding?(:object)
|
|> has_named_binding?(:object)
|
||||||
|> if(do: query, else: with_joined_object(query))
|
|> if(do: query, else: with_joined_object(query, join_type))
|
||||||
|> preload([activity, object: object], object: object)
|
|> preload([activity, object: object], object: object)
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -107,12 +108,9 @@ def with_set_thread_muted_field(query, %User{} = user) do
|
|||||||
def with_set_thread_muted_field(query, _), do: query
|
def with_set_thread_muted_field(query, _), do: query
|
||||||
|
|
||||||
def get_by_ap_id(ap_id) do
|
def get_by_ap_id(ap_id) do
|
||||||
Repo.one(
|
ap_id
|
||||||
from(
|
|> Queries.by_ap_id()
|
||||||
activity in Activity,
|
|> Repo.one()
|
||||||
where: fragment("(?)->>'id' = ?", activity.data, ^to_string(ap_id))
|
|
||||||
)
|
|
||||||
)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def get_bookmark(%Activity{} = activity, %User{} = user) do
|
def get_bookmark(%Activity{} = activity, %User{} = user) do
|
||||||
@ -133,21 +131,10 @@ def change(struct, params \\ %{}) do
|
|||||||
end
|
end
|
||||||
|
|
||||||
def get_by_ap_id_with_object(ap_id) do
|
def get_by_ap_id_with_object(ap_id) do
|
||||||
Repo.one(
|
ap_id
|
||||||
from(
|
|> Queries.by_ap_id()
|
||||||
activity in Activity,
|
|> with_preloaded_object(:left)
|
||||||
where: fragment("(?)->>'id' = ?", activity.data, ^to_string(ap_id)),
|
|> Repo.one()
|
||||||
left_join: o in Object,
|
|
||||||
on:
|
|
||||||
fragment(
|
|
||||||
"(?->>'id') = COALESCE(?->'object'->>'id', ?->>'object')",
|
|
||||||
o.data,
|
|
||||||
activity.data,
|
|
||||||
activity.data
|
|
||||||
),
|
|
||||||
preload: [object: o]
|
|
||||||
)
|
|
||||||
)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def get_by_id(id) do
|
def get_by_id(id) do
|
||||||
@ -158,18 +145,9 @@ def get_by_id(id) do
|
|||||||
end
|
end
|
||||||
|
|
||||||
def get_by_id_with_object(id) do
|
def get_by_id_with_object(id) do
|
||||||
from(activity in Activity,
|
Activity
|
||||||
where: activity.id == ^id,
|
|> where(id: ^id)
|
||||||
inner_join: o in Object,
|
|> with_preloaded_object()
|
||||||
on:
|
|
||||||
fragment(
|
|
||||||
"(?->>'id') = COALESCE(?->'object'->>'id', ?->>'object')",
|
|
||||||
o.data,
|
|
||||||
activity.data,
|
|
||||||
activity.data
|
|
||||||
),
|
|
||||||
preload: [object: o]
|
|
||||||
)
|
|
||||||
|> Repo.one()
|
|> Repo.one()
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -180,51 +158,21 @@ def all_by_ids_with_object(ids) do
|
|||||||
|> Repo.all()
|
|> Repo.all()
|
||||||
end
|
end
|
||||||
|
|
||||||
def by_object_ap_id(ap_id) do
|
@doc """
|
||||||
from(
|
Accepts `ap_id` or list of `ap_id`.
|
||||||
activity in Activity,
|
Returns a query.
|
||||||
where:
|
"""
|
||||||
fragment(
|
@spec create_by_object_ap_id(String.t() | [String.t()]) :: Ecto.Queryable.t()
|
||||||
"coalesce((?)->'object'->>'id', (?)->>'object') = ?",
|
def create_by_object_ap_id(ap_id) do
|
||||||
activity.data,
|
ap_id
|
||||||
activity.data,
|
|> Queries.by_object_id()
|
||||||
^to_string(ap_id)
|
|> Queries.by_type("Create")
|
||||||
)
|
|
||||||
)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def create_by_object_ap_id(ap_ids) when is_list(ap_ids) do
|
|
||||||
from(
|
|
||||||
activity in Activity,
|
|
||||||
where:
|
|
||||||
fragment(
|
|
||||||
"coalesce((?)->'object'->>'id', (?)->>'object') = ANY(?)",
|
|
||||||
activity.data,
|
|
||||||
activity.data,
|
|
||||||
^ap_ids
|
|
||||||
),
|
|
||||||
where: fragment("(?)->>'type' = 'Create'", activity.data)
|
|
||||||
)
|
|
||||||
end
|
|
||||||
|
|
||||||
def create_by_object_ap_id(ap_id) when is_binary(ap_id) do
|
|
||||||
from(
|
|
||||||
activity in Activity,
|
|
||||||
where:
|
|
||||||
fragment(
|
|
||||||
"coalesce((?)->'object'->>'id', (?)->>'object') = ?",
|
|
||||||
activity.data,
|
|
||||||
activity.data,
|
|
||||||
^to_string(ap_id)
|
|
||||||
),
|
|
||||||
where: fragment("(?)->>'type' = 'Create'", activity.data)
|
|
||||||
)
|
|
||||||
end
|
|
||||||
|
|
||||||
def create_by_object_ap_id(_), do: nil
|
|
||||||
|
|
||||||
def get_all_create_by_object_ap_id(ap_id) do
|
def get_all_create_by_object_ap_id(ap_id) do
|
||||||
Repo.all(create_by_object_ap_id(ap_id))
|
ap_id
|
||||||
|
|> create_by_object_ap_id()
|
||||||
|
|> Repo.all()
|
||||||
end
|
end
|
||||||
|
|
||||||
def get_create_by_object_ap_id(ap_id) when is_binary(ap_id) do
|
def get_create_by_object_ap_id(ap_id) when is_binary(ap_id) do
|
||||||
@ -235,54 +183,17 @@ def get_create_by_object_ap_id(ap_id) when is_binary(ap_id) do
|
|||||||
|
|
||||||
def get_create_by_object_ap_id(_), do: nil
|
def get_create_by_object_ap_id(_), do: nil
|
||||||
|
|
||||||
def create_by_object_ap_id_with_object(ap_ids) when is_list(ap_ids) do
|
@doc """
|
||||||
from(
|
Accepts `ap_id` or list of `ap_id`.
|
||||||
activity in Activity,
|
Returns a query.
|
||||||
where:
|
"""
|
||||||
fragment(
|
@spec create_by_object_ap_id_with_object(String.t() | [String.t()]) :: Ecto.Queryable.t()
|
||||||
"coalesce((?)->'object'->>'id', (?)->>'object') = ANY(?)",
|
def create_by_object_ap_id_with_object(ap_id) do
|
||||||
activity.data,
|
ap_id
|
||||||
activity.data,
|
|> create_by_object_ap_id()
|
||||||
^ap_ids
|
|> with_preloaded_object()
|
||||||
),
|
|
||||||
where: fragment("(?)->>'type' = 'Create'", activity.data),
|
|
||||||
inner_join: o in Object,
|
|
||||||
on:
|
|
||||||
fragment(
|
|
||||||
"(?->>'id') = COALESCE(?->'object'->>'id', ?->>'object')",
|
|
||||||
o.data,
|
|
||||||
activity.data,
|
|
||||||
activity.data
|
|
||||||
),
|
|
||||||
preload: [object: o]
|
|
||||||
)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def create_by_object_ap_id_with_object(ap_id) when is_binary(ap_id) do
|
|
||||||
from(
|
|
||||||
activity in Activity,
|
|
||||||
where:
|
|
||||||
fragment(
|
|
||||||
"coalesce((?)->'object'->>'id', (?)->>'object') = ?",
|
|
||||||
activity.data,
|
|
||||||
activity.data,
|
|
||||||
^to_string(ap_id)
|
|
||||||
),
|
|
||||||
where: fragment("(?)->>'type' = 'Create'", activity.data),
|
|
||||||
inner_join: o in Object,
|
|
||||||
on:
|
|
||||||
fragment(
|
|
||||||
"(?->>'id') = COALESCE(?->'object'->>'id', ?->>'object')",
|
|
||||||
o.data,
|
|
||||||
activity.data,
|
|
||||||
activity.data
|
|
||||||
),
|
|
||||||
preload: [object: o]
|
|
||||||
)
|
|
||||||
end
|
|
||||||
|
|
||||||
def create_by_object_ap_id_with_object(_), do: nil
|
|
||||||
|
|
||||||
def get_create_by_object_ap_id_with_object(ap_id) when is_binary(ap_id) do
|
def get_create_by_object_ap_id_with_object(ap_id) when is_binary(ap_id) do
|
||||||
ap_id
|
ap_id
|
||||||
|> create_by_object_ap_id_with_object()
|
|> create_by_object_ap_id_with_object()
|
||||||
@ -306,7 +217,8 @@ def normalize(ap_id) when is_binary(ap_id), do: get_by_ap_id_with_object(ap_id)
|
|||||||
def normalize(_), do: nil
|
def normalize(_), do: nil
|
||||||
|
|
||||||
def delete_by_ap_id(id) when is_binary(id) do
|
def delete_by_ap_id(id) when is_binary(id) do
|
||||||
by_object_ap_id(id)
|
id
|
||||||
|
|> Queries.by_object_id()
|
||||||
|> select([u], u)
|
|> select([u], u)
|
||||||
|> Repo.delete_all()
|
|> Repo.delete_all()
|
||||||
|> elem(1)
|
|> elem(1)
|
||||||
@ -350,31 +262,10 @@ def all_by_actor_and_id(actor, status_ids) do
|
|||||||
end
|
end
|
||||||
|
|
||||||
def follow_requests_for_actor(%Pleroma.User{ap_id: ap_id}) do
|
def follow_requests_for_actor(%Pleroma.User{ap_id: ap_id}) do
|
||||||
from(
|
ap_id
|
||||||
a in Activity,
|
|> Queries.by_object_id()
|
||||||
where:
|
|> Queries.by_type("Follow")
|
||||||
fragment(
|
|> where([a], fragment("? ->> 'state' = 'pending'", a.data))
|
||||||
"? ->> 'type' = 'Follow'",
|
|
||||||
a.data
|
|
||||||
),
|
|
||||||
where:
|
|
||||||
fragment(
|
|
||||||
"? ->> 'state' = 'pending'",
|
|
||||||
a.data
|
|
||||||
),
|
|
||||||
where:
|
|
||||||
fragment(
|
|
||||||
"coalesce((?)->'object'->>'id', (?)->>'object') = ?",
|
|
||||||
a.data,
|
|
||||||
a.data,
|
|
||||||
^ap_id
|
|
||||||
)
|
|
||||||
)
|
|
||||||
end
|
|
||||||
|
|
||||||
@spec query_by_actor(actor()) :: Ecto.Query.t()
|
|
||||||
def query_by_actor(actor) do
|
|
||||||
from(a in Activity, where: a.actor == ^actor)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def restrict_deactivated_users(query) do
|
def restrict_deactivated_users(query) do
|
||||||
|
@ -13,6 +13,14 @@ defmodule Pleroma.Activity.Queries do
|
|||||||
|
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
|
|
||||||
|
@spec by_ap_id(query, String.t()) :: query
|
||||||
|
def by_ap_id(query \\ Activity, ap_id) do
|
||||||
|
from(
|
||||||
|
activity in query,
|
||||||
|
where: fragment("(?)->>'id' = ?", activity.data, ^to_string(ap_id))
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
@spec by_actor(query, String.t()) :: query
|
@spec by_actor(query, String.t()) :: query
|
||||||
def by_actor(query \\ Activity, actor) do
|
def by_actor(query \\ Activity, actor) do
|
||||||
from(
|
from(
|
||||||
@ -21,8 +29,23 @@ def by_actor(query \\ Activity, actor) do
|
|||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec by_object_id(query, String.t()) :: query
|
@spec by_object_id(query, String.t() | [String.t()]) :: query
|
||||||
def by_object_id(query \\ Activity, object_id) do
|
def by_object_id(query \\ Activity, object_id)
|
||||||
|
|
||||||
|
def by_object_id(query, object_ids) when is_list(object_ids) do
|
||||||
|
from(
|
||||||
|
activity in query,
|
||||||
|
where:
|
||||||
|
fragment(
|
||||||
|
"coalesce((?)->'object'->>'id', (?)->>'object') = ANY(?)",
|
||||||
|
activity.data,
|
||||||
|
activity.data,
|
||||||
|
^object_ids
|
||||||
|
)
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
def by_object_id(query, object_id) when is_binary(object_id) do
|
||||||
from(activity in query,
|
from(activity in query,
|
||||||
where:
|
where:
|
||||||
fragment(
|
fragment(
|
||||||
@ -41,9 +64,4 @@ def by_type(query \\ Activity, activity_type) do
|
|||||||
where: fragment("(?)->>'type' = ?", activity.data, ^activity_type)
|
where: fragment("(?)->>'type' = ?", activity.data, ^activity_type)
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec limit(query, pos_integer()) :: query
|
|
||||||
def limit(query \\ Activity, limit) do
|
|
||||||
from(activity in query, limit: ^limit)
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
@ -31,18 +31,19 @@ def start(_type, _args) do
|
|||||||
children =
|
children =
|
||||||
[
|
[
|
||||||
Pleroma.Repo,
|
Pleroma.Repo,
|
||||||
|
Pleroma.Scheduler,
|
||||||
Pleroma.Config.TransferTask,
|
Pleroma.Config.TransferTask,
|
||||||
Pleroma.Emoji,
|
Pleroma.Emoji,
|
||||||
Pleroma.Captcha,
|
Pleroma.Captcha,
|
||||||
Pleroma.FlakeId,
|
Pleroma.FlakeId,
|
||||||
Pleroma.ScheduledActivityWorker,
|
Pleroma.Daemons.ScheduledActivityDaemon,
|
||||||
Pleroma.ActivityExpirationWorker
|
Pleroma.Daemons.ActivityExpirationDaemon
|
||||||
] ++
|
] ++
|
||||||
cachex_children() ++
|
cachex_children() ++
|
||||||
hackney_pool_children() ++
|
hackney_pool_children() ++
|
||||||
[
|
[
|
||||||
Pleroma.Web.Federator.RetryQueue,
|
|
||||||
Pleroma.Stats,
|
Pleroma.Stats,
|
||||||
|
{Oban, Pleroma.Config.get(Oban)},
|
||||||
%{
|
%{
|
||||||
id: :web_push_init,
|
id: :web_push_init,
|
||||||
start: {Task, :start_link, [&Pleroma.Web.Push.init/0]},
|
start: {Task, :start_link, [&Pleroma.Web.Push.init/0]},
|
||||||
@ -70,9 +71,7 @@ def start(_type, _args) do
|
|||||||
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
|
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
|
||||||
# for other strategies and supported options
|
# for other strategies and supported options
|
||||||
opts = [strategy: :one_for_one, name: Pleroma.Supervisor]
|
opts = [strategy: :one_for_one, name: Pleroma.Supervisor]
|
||||||
result = Supervisor.start_link(children, opts)
|
Supervisor.start_link(children, opts)
|
||||||
:ok = after_supervisor_start()
|
|
||||||
result
|
|
||||||
end
|
end
|
||||||
|
|
||||||
defp setup_instrumenters do
|
defp setup_instrumenters do
|
||||||
@ -164,17 +163,4 @@ defp hackney_pool_children do
|
|||||||
:hackney_pool.child_spec(pool, options)
|
:hackney_pool.child_spec(pool, options)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp after_supervisor_start do
|
|
||||||
with digest_config <- Application.get_env(:pleroma, :email_notifications)[:digest],
|
|
||||||
true <- digest_config[:active] do
|
|
||||||
PleromaJobQueue.schedule(
|
|
||||||
digest_config[:schedule],
|
|
||||||
:digest_emails,
|
|
||||||
Pleroma.DigestEmailWorker
|
|
||||||
)
|
|
||||||
end
|
|
||||||
|
|
||||||
:ok
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
@ -2,13 +2,14 @@
|
|||||||
# Copyright © 2019 Pleroma Authors <https://pleroma.social/>
|
# Copyright © 2019 Pleroma Authors <https://pleroma.social/>
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.ActivityExpirationWorker do
|
defmodule Pleroma.Daemons.ActivityExpirationDaemon do
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
alias Pleroma.ActivityExpiration
|
alias Pleroma.ActivityExpiration
|
||||||
alias Pleroma.Config
|
alias Pleroma.Config
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.CommonAPI
|
alias Pleroma.Web.CommonAPI
|
||||||
|
|
||||||
require Logger
|
require Logger
|
||||||
use GenServer
|
use GenServer
|
||||||
import Ecto.Query
|
import Ecto.Query
|
||||||
@ -49,7 +50,10 @@ def perform(:execute, expiration_id) do
|
|||||||
def handle_info(:perform, state) do
|
def handle_info(:perform, state) do
|
||||||
ActivityExpiration.due_expirations(@schedule_interval)
|
ActivityExpiration.due_expirations(@schedule_interval)
|
||||||
|> Enum.each(fn expiration ->
|
|> Enum.each(fn expiration ->
|
||||||
PleromaJobQueue.enqueue(:activity_expiration, __MODULE__, [:execute, expiration.id])
|
Pleroma.Workers.ActivityExpirationWorker.enqueue(
|
||||||
|
"activity_expiration",
|
||||||
|
%{"activity_expiration_id" => expiration.id}
|
||||||
|
)
|
||||||
end)
|
end)
|
||||||
|
|
||||||
schedule_next()
|
schedule_next()
|
@ -2,10 +2,11 @@
|
|||||||
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.DigestEmailWorker do
|
defmodule Pleroma.Daemons.DigestEmailDaemon do
|
||||||
import Ecto.Query
|
alias Pleroma.Repo
|
||||||
|
alias Pleroma.Workers.DigestEmailsWorker
|
||||||
|
|
||||||
@queue_name :digest_emails
|
import Ecto.Query
|
||||||
|
|
||||||
def perform do
|
def perform do
|
||||||
config = Pleroma.Config.get([:email_notifications, :digest])
|
config = Pleroma.Config.get([:email_notifications, :digest])
|
||||||
@ -20,8 +21,10 @@ def perform do
|
|||||||
where: u.last_digest_emailed_at < datetime_add(^now, ^negative_interval, "day"),
|
where: u.last_digest_emailed_at < datetime_add(^now, ^negative_interval, "day"),
|
||||||
select: u
|
select: u
|
||||||
)
|
)
|
||||||
|> Pleroma.Repo.all()
|
|> Repo.all()
|
||||||
|> Enum.each(&PleromaJobQueue.enqueue(@queue_name, __MODULE__, [&1]))
|
|> Enum.each(fn user ->
|
||||||
|
DigestEmailsWorker.enqueue("digest_email", %{"user_id" => user.id})
|
||||||
|
end)
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc """
|
@doc """
|
@ -2,7 +2,7 @@
|
|||||||
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.ScheduledActivityWorker do
|
defmodule Pleroma.Daemons.ScheduledActivityDaemon do
|
||||||
@moduledoc """
|
@moduledoc """
|
||||||
Sends scheduled activities to the job queue.
|
Sends scheduled activities to the job queue.
|
||||||
"""
|
"""
|
||||||
@ -11,6 +11,7 @@ defmodule Pleroma.ScheduledActivityWorker do
|
|||||||
alias Pleroma.ScheduledActivity
|
alias Pleroma.ScheduledActivity
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.CommonAPI
|
alias Pleroma.Web.CommonAPI
|
||||||
|
|
||||||
use GenServer
|
use GenServer
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
@ -45,7 +46,10 @@ def perform(:execute, scheduled_activity_id) do
|
|||||||
def handle_info(:perform, state) do
|
def handle_info(:perform, state) do
|
||||||
ScheduledActivity.due_activities(@schedule_interval)
|
ScheduledActivity.due_activities(@schedule_interval)
|
||||||
|> Enum.each(fn scheduled_activity ->
|
|> Enum.each(fn scheduled_activity ->
|
||||||
PleromaJobQueue.enqueue(:scheduled_activities, __MODULE__, [:execute, scheduled_activity.id])
|
Pleroma.Workers.ScheduledActivityWorker.enqueue(
|
||||||
|
"execute",
|
||||||
|
%{"activity_id" => scheduled_activity.id}
|
||||||
|
)
|
||||||
end)
|
end)
|
||||||
|
|
||||||
schedule_next()
|
schedule_next()
|
73
lib/pleroma/docs/generator.ex
Normal file
73
lib/pleroma/docs/generator.ex
Normal file
@ -0,0 +1,73 @@
|
|||||||
|
defmodule Pleroma.Docs.Generator do
|
||||||
|
@callback process(keyword()) :: {:ok, String.t()}
|
||||||
|
|
||||||
|
@spec process(module(), keyword()) :: {:ok, String.t()}
|
||||||
|
def process(implementation, descriptions) do
|
||||||
|
implementation.process(descriptions)
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec uploaders_list() :: [module()]
|
||||||
|
def uploaders_list do
|
||||||
|
{:ok, modules} = :application.get_key(:pleroma, :modules)
|
||||||
|
|
||||||
|
Enum.filter(modules, fn module ->
|
||||||
|
name_as_list = Module.split(module)
|
||||||
|
|
||||||
|
List.starts_with?(name_as_list, ["Pleroma", "Uploaders"]) and
|
||||||
|
List.last(name_as_list) != "Uploader"
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec filters_list() :: [module()]
|
||||||
|
def filters_list do
|
||||||
|
{:ok, modules} = :application.get_key(:pleroma, :modules)
|
||||||
|
|
||||||
|
Enum.filter(modules, fn module ->
|
||||||
|
name_as_list = Module.split(module)
|
||||||
|
|
||||||
|
List.starts_with?(name_as_list, ["Pleroma", "Upload", "Filter"])
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec mrf_list() :: [module()]
|
||||||
|
def mrf_list do
|
||||||
|
{:ok, modules} = :application.get_key(:pleroma, :modules)
|
||||||
|
|
||||||
|
Enum.filter(modules, fn module ->
|
||||||
|
name_as_list = Module.split(module)
|
||||||
|
|
||||||
|
List.starts_with?(name_as_list, ["Pleroma", "Web", "ActivityPub", "MRF"]) and
|
||||||
|
length(name_as_list) > 4
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec richmedia_parsers() :: [module()]
|
||||||
|
def richmedia_parsers do
|
||||||
|
{:ok, modules} = :application.get_key(:pleroma, :modules)
|
||||||
|
|
||||||
|
Enum.filter(modules, fn module ->
|
||||||
|
name_as_list = Module.split(module)
|
||||||
|
|
||||||
|
List.starts_with?(name_as_list, ["Pleroma", "Web", "RichMedia", "Parsers"]) and
|
||||||
|
length(name_as_list) == 5
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defimpl Jason.Encoder, for: Tuple do
|
||||||
|
def encode(tuple, opts) do
|
||||||
|
Jason.Encode.list(Tuple.to_list(tuple), opts)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defimpl Jason.Encoder, for: [Regex, Function] do
|
||||||
|
def encode(term, opts) do
|
||||||
|
Jason.Encode.string(inspect(term), opts)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defimpl String.Chars, for: Regex do
|
||||||
|
def to_string(term) do
|
||||||
|
inspect(term)
|
||||||
|
end
|
||||||
|
end
|
20
lib/pleroma/docs/json.ex
Normal file
20
lib/pleroma/docs/json.ex
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
defmodule Pleroma.Docs.JSON do
|
||||||
|
@behaviour Pleroma.Docs.Generator
|
||||||
|
|
||||||
|
@spec process(keyword()) :: {:ok, String.t()}
|
||||||
|
def process(descriptions) do
|
||||||
|
config_path = "docs/generate_config.json"
|
||||||
|
|
||||||
|
with {:ok, file} <- File.open(config_path, [:write]),
|
||||||
|
json <- generate_json(descriptions),
|
||||||
|
:ok <- IO.write(file, json),
|
||||||
|
:ok <- File.close(file) do
|
||||||
|
{:ok, config_path}
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
@spec generate_json([keyword()]) :: String.t()
|
||||||
|
def generate_json(descriptions) do
|
||||||
|
Jason.encode!(descriptions)
|
||||||
|
end
|
||||||
|
end
|
78
lib/pleroma/docs/markdown.ex
Normal file
78
lib/pleroma/docs/markdown.ex
Normal file
@ -0,0 +1,78 @@
|
|||||||
|
defmodule Pleroma.Docs.Markdown do
|
||||||
|
@behaviour Pleroma.Docs.Generator
|
||||||
|
|
||||||
|
@spec process(keyword()) :: {:ok, String.t()}
|
||||||
|
def process(descriptions) do
|
||||||
|
config_path = "docs/generated_config.md"
|
||||||
|
{:ok, file} = File.open(config_path, [:utf8, :write])
|
||||||
|
IO.write(file, "# Generated configuration\n")
|
||||||
|
IO.write(file, "Date of generation: #{Date.utc_today()}\n\n")
|
||||||
|
|
||||||
|
IO.write(
|
||||||
|
file,
|
||||||
|
"This file describe the configuration, it is recommended to edit the relevant `*.secret.exs` file instead of the others founds in the ``config`` directory.\n\n" <>
|
||||||
|
"If you run Pleroma with ``MIX_ENV=prod`` the file is ``prod.secret.exs``, otherwise it is ``dev.secret.exs``.\n\n"
|
||||||
|
)
|
||||||
|
|
||||||
|
for group <- descriptions do
|
||||||
|
if is_nil(group[:key]) do
|
||||||
|
IO.write(file, "## #{inspect(group[:group])}\n")
|
||||||
|
else
|
||||||
|
IO.write(file, "## #{inspect(group[:key])}\n")
|
||||||
|
end
|
||||||
|
|
||||||
|
IO.write(file, "#{group[:description]}\n")
|
||||||
|
|
||||||
|
for child <- group[:children] do
|
||||||
|
print_child_header(file, child)
|
||||||
|
|
||||||
|
print_suggestions(file, child[:suggestions])
|
||||||
|
|
||||||
|
if child[:children] do
|
||||||
|
for subchild <- child[:children] do
|
||||||
|
print_child_header(file, subchild)
|
||||||
|
|
||||||
|
print_suggestions(file, subchild[:suggestions])
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
IO.write(file, "\n")
|
||||||
|
end
|
||||||
|
|
||||||
|
:ok = File.close(file)
|
||||||
|
{:ok, config_path}
|
||||||
|
end
|
||||||
|
|
||||||
|
defp print_suggestion(file, suggestion) when is_list(suggestion) do
|
||||||
|
IO.write(file, " `#{inspect(suggestion)}`\n")
|
||||||
|
end
|
||||||
|
|
||||||
|
defp print_suggestion(file, suggestion) when is_function(suggestion) do
|
||||||
|
IO.write(file, " `#{inspect(suggestion.())}`\n")
|
||||||
|
end
|
||||||
|
|
||||||
|
defp print_suggestion(file, suggestion, as_list \\ false) do
|
||||||
|
list_mark = if as_list, do: "- ", else: ""
|
||||||
|
IO.write(file, " #{list_mark}`#{inspect(suggestion)}`\n")
|
||||||
|
end
|
||||||
|
|
||||||
|
defp print_suggestions(_file, nil), do: nil
|
||||||
|
|
||||||
|
defp print_suggestions(file, suggestions) do
|
||||||
|
IO.write(file, "Suggestions:\n")
|
||||||
|
|
||||||
|
if length(suggestions) > 1 do
|
||||||
|
for suggestion <- suggestions do
|
||||||
|
print_suggestion(file, suggestion, true)
|
||||||
|
end
|
||||||
|
else
|
||||||
|
print_suggestion(file, List.first(suggestions))
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
defp print_child_header(file, child) do
|
||||||
|
IO.write(file, "- `#{inspect(child[:key])}` -`#{inspect(child[:type])}` \n")
|
||||||
|
IO.write(file, "#{child[:description]} \n")
|
||||||
|
end
|
||||||
|
end
|
@ -9,6 +9,7 @@ defmodule Pleroma.Emails.Mailer do
|
|||||||
The module contains functions to delivery email using Swoosh.Mailer.
|
The module contains functions to delivery email using Swoosh.Mailer.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
alias Pleroma.Workers.MailerWorker
|
||||||
alias Swoosh.DeliveryError
|
alias Swoosh.DeliveryError
|
||||||
|
|
||||||
@otp_app :pleroma
|
@otp_app :pleroma
|
||||||
@ -19,7 +20,12 @@ def enabled?, do: Pleroma.Config.get([__MODULE__, :enabled])
|
|||||||
|
|
||||||
@doc "add email to queue"
|
@doc "add email to queue"
|
||||||
def deliver_async(email, config \\ []) do
|
def deliver_async(email, config \\ []) do
|
||||||
PleromaJobQueue.enqueue(:mailer, __MODULE__, [:deliver_async, email, config])
|
encoded_email =
|
||||||
|
email
|
||||||
|
|> :erlang.term_to_binary()
|
||||||
|
|> Base.encode64()
|
||||||
|
|
||||||
|
MailerWorker.enqueue("email", %{"encoded_email" => encoded_email, "config" => config})
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc "callback to perform send email from queue"
|
@doc "callback to perform send email from queue"
|
||||||
|
@ -90,7 +90,7 @@ def set_reachable(_), do: {:error, nil}
|
|||||||
def set_unreachable(url_or_host, unreachable_since \\ nil)
|
def set_unreachable(url_or_host, unreachable_since \\ nil)
|
||||||
|
|
||||||
def set_unreachable(url_or_host, unreachable_since) when is_binary(url_or_host) do
|
def set_unreachable(url_or_host, unreachable_since) when is_binary(url_or_host) do
|
||||||
unreachable_since = unreachable_since || DateTime.utc_now()
|
unreachable_since = parse_datetime(unreachable_since) || NaiveDateTime.utc_now()
|
||||||
host = host(url_or_host)
|
host = host(url_or_host)
|
||||||
existing_record = Repo.get_by(Instance, %{host: host})
|
existing_record = Repo.get_by(Instance, %{host: host})
|
||||||
|
|
||||||
@ -114,4 +114,10 @@ def set_unreachable(url_or_host, unreachable_since) when is_binary(url_or_host)
|
|||||||
end
|
end
|
||||||
|
|
||||||
def set_unreachable(_, _), do: {:error, nil}
|
def set_unreachable(_, _), do: {:error, nil}
|
||||||
|
|
||||||
|
defp parse_datetime(datetime) when is_binary(datetime) do
|
||||||
|
NaiveDateTime.from_iso8601(datetime)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp parse_datetime(datetime), do: datetime
|
||||||
end
|
end
|
||||||
|
7
lib/pleroma/scheduler.ex
Normal file
7
lib/pleroma/scheduler.ex
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Scheduler do
|
||||||
|
use Quantum.Scheduler, otp_app: :pleroma
|
||||||
|
end
|
@ -27,6 +27,7 @@ defmodule Pleroma.User do
|
|||||||
alias Pleroma.Web.OStatus
|
alias Pleroma.Web.OStatus
|
||||||
alias Pleroma.Web.RelMe
|
alias Pleroma.Web.RelMe
|
||||||
alias Pleroma.Web.Websub
|
alias Pleroma.Web.Websub
|
||||||
|
alias Pleroma.Workers.BackgroundWorker
|
||||||
|
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
@ -174,11 +175,25 @@ def following_count(%User{} = user) do
|
|||||||
|> Repo.aggregate(:count, :id)
|
|> Repo.aggregate(:count, :id)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp truncate_if_exists(params, key, max_length) do
|
||||||
|
if Map.has_key?(params, key) and is_binary(params[key]) do
|
||||||
|
{value, _chopped} = String.split_at(params[key], max_length)
|
||||||
|
Map.put(params, key, value)
|
||||||
|
else
|
||||||
|
params
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def remote_user_creation(params) do
|
def remote_user_creation(params) do
|
||||||
bio_limit = Pleroma.Config.get([:instance, :user_bio_length], 5000)
|
bio_limit = Pleroma.Config.get([:instance, :user_bio_length], 5000)
|
||||||
name_limit = Pleroma.Config.get([:instance, :user_name_length], 100)
|
name_limit = Pleroma.Config.get([:instance, :user_name_length], 100)
|
||||||
|
|
||||||
params = Map.put(params, :info, params[:info] || %{})
|
params =
|
||||||
|
params
|
||||||
|
|> Map.put(:info, params[:info] || %{})
|
||||||
|
|> truncate_if_exists(:name, name_limit)
|
||||||
|
|> truncate_if_exists(:bio, bio_limit)
|
||||||
|
|
||||||
info_cng = User.Info.remote_user_creation(%User.Info{}, params[:info])
|
info_cng = User.Info.remote_user_creation(%User.Info{}, params[:info])
|
||||||
|
|
||||||
changes =
|
changes =
|
||||||
@ -633,8 +648,9 @@ def get_or_fetch_by_nickname(nickname) do
|
|||||||
end
|
end
|
||||||
|
|
||||||
@doc "Fetch some posts when the user has just been federated with"
|
@doc "Fetch some posts when the user has just been federated with"
|
||||||
def fetch_initial_posts(user),
|
def fetch_initial_posts(user) do
|
||||||
do: PleromaJobQueue.enqueue(:background, __MODULE__, [:fetch_initial_posts, user])
|
BackgroundWorker.enqueue("fetch_initial_posts", %{"user_id" => user.id})
|
||||||
|
end
|
||||||
|
|
||||||
@spec get_followers_query(User.t(), pos_integer() | nil) :: Ecto.Query.t()
|
@spec get_followers_query(User.t(), pos_integer() | nil) :: Ecto.Query.t()
|
||||||
def get_followers_query(%User{} = user, nil) do
|
def get_followers_query(%User{} = user, nil) do
|
||||||
@ -1064,7 +1080,7 @@ def unblock_domain(user, domain) do
|
|||||||
end
|
end
|
||||||
|
|
||||||
def deactivate_async(user, status \\ true) do
|
def deactivate_async(user, status \\ true) do
|
||||||
PleromaJobQueue.enqueue(:background, __MODULE__, [:deactivate_async, user, status])
|
BackgroundWorker.enqueue("deactivate_user", %{"user_id" => user.id, "status" => status})
|
||||||
end
|
end
|
||||||
|
|
||||||
def deactivate(%User{} = user, status \\ true) do
|
def deactivate(%User{} = user, status \\ true) do
|
||||||
@ -1092,9 +1108,9 @@ def update_notification_settings(%User{} = user, settings \\ %{}) do
|
|||||||
|> update_and_set_cache()
|
|> update_and_set_cache()
|
||||||
end
|
end
|
||||||
|
|
||||||
@spec delete(User.t()) :: :ok
|
def delete(%User{} = user) do
|
||||||
def delete(%User{} = user),
|
BackgroundWorker.enqueue("delete_user", %{"user_id" => user.id})
|
||||||
do: PleromaJobQueue.enqueue(:background, __MODULE__, [:delete, user])
|
end
|
||||||
|
|
||||||
@spec perform(atom(), User.t()) :: {:ok, User.t()}
|
@spec perform(atom(), User.t()) :: {:ok, User.t()}
|
||||||
def perform(:delete, %User{} = user) do
|
def perform(:delete, %User{} = user) do
|
||||||
@ -1201,25 +1217,24 @@ def external_users(opts \\ []) do
|
|||||||
Repo.all(query)
|
Repo.all(query)
|
||||||
end
|
end
|
||||||
|
|
||||||
def blocks_import(%User{} = blocker, blocked_identifiers) when is_list(blocked_identifiers),
|
def blocks_import(%User{} = blocker, blocked_identifiers) when is_list(blocked_identifiers) do
|
||||||
do:
|
BackgroundWorker.enqueue("blocks_import", %{
|
||||||
PleromaJobQueue.enqueue(:background, __MODULE__, [
|
"blocker_id" => blocker.id,
|
||||||
:blocks_import,
|
"blocked_identifiers" => blocked_identifiers
|
||||||
blocker,
|
})
|
||||||
blocked_identifiers
|
end
|
||||||
])
|
|
||||||
|
|
||||||
def follow_import(%User{} = follower, followed_identifiers) when is_list(followed_identifiers),
|
def follow_import(%User{} = follower, followed_identifiers)
|
||||||
do:
|
when is_list(followed_identifiers) do
|
||||||
PleromaJobQueue.enqueue(:background, __MODULE__, [
|
BackgroundWorker.enqueue("follow_import", %{
|
||||||
:follow_import,
|
"follower_id" => follower.id,
|
||||||
follower,
|
"followed_identifiers" => followed_identifiers
|
||||||
followed_identifiers
|
})
|
||||||
])
|
end
|
||||||
|
|
||||||
def delete_user_activities(%User{ap_id: ap_id} = user) do
|
def delete_user_activities(%User{ap_id: ap_id} = user) do
|
||||||
ap_id
|
ap_id
|
||||||
|> Activity.query_by_actor()
|
|> Activity.Queries.by_actor()
|
||||||
|> RepoStreamer.chunk_stream(50)
|
|> RepoStreamer.chunk_stream(50)
|
||||||
|> Stream.each(fn activities ->
|
|> Stream.each(fn activities ->
|
||||||
Enum.each(activities, &delete_activity(&1))
|
Enum.each(activities, &delete_activity(&1))
|
||||||
@ -1624,4 +1639,13 @@ defp put_password_hash(changeset), do: changeset
|
|||||||
def is_internal_user?(%User{nickname: nil}), do: true
|
def is_internal_user?(%User{nickname: nil}), do: true
|
||||||
def is_internal_user?(%User{local: true, nickname: "internal." <> _}), do: true
|
def is_internal_user?(%User{local: true, nickname: "internal." <> _}), do: true
|
||||||
def is_internal_user?(_), do: false
|
def is_internal_user?(_), do: false
|
||||||
|
|
||||||
|
def change_email(user, email) do
|
||||||
|
user
|
||||||
|
|> cast(%{email: email}, [:email])
|
||||||
|
|> validate_required([:email])
|
||||||
|
|> unique_constraint(:email)
|
||||||
|
|> validate_format(:email, @email_regex)
|
||||||
|
|> update_and_set_cache()
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
@ -242,6 +242,13 @@ def set_keys(info, keys) do
|
|||||||
end
|
end
|
||||||
|
|
||||||
def remote_user_creation(info, params) do
|
def remote_user_creation(info, params) do
|
||||||
|
params =
|
||||||
|
if Map.has_key?(params, :fields) do
|
||||||
|
Map.put(params, :fields, Enum.map(params[:fields], &truncate_field/1))
|
||||||
|
else
|
||||||
|
params
|
||||||
|
end
|
||||||
|
|
||||||
info
|
info
|
||||||
|> cast(params, [
|
|> cast(params, [
|
||||||
:ap_enabled,
|
:ap_enabled,
|
||||||
@ -326,6 +333,16 @@ defp valid_field?(%{"name" => name, "value" => value}) do
|
|||||||
|
|
||||||
defp valid_field?(_), do: false
|
defp valid_field?(_), do: false
|
||||||
|
|
||||||
|
defp truncate_field(%{"name" => name, "value" => value}) do
|
||||||
|
{name, _chopped} =
|
||||||
|
String.split_at(name, Pleroma.Config.get([:instance, :account_field_name_length], 255))
|
||||||
|
|
||||||
|
{value, _chopped} =
|
||||||
|
String.split_at(value, Pleroma.Config.get([:instance, :account_field_value_length], 255))
|
||||||
|
|
||||||
|
%{"name" => name, "value" => value}
|
||||||
|
end
|
||||||
|
|
||||||
@spec confirmation_changeset(Info.t(), keyword()) :: Changeset.t()
|
@spec confirmation_changeset(Info.t(), keyword()) :: Changeset.t()
|
||||||
def confirmation_changeset(info, opts) do
|
def confirmation_changeset(info, opts) do
|
||||||
need_confirmation? = Keyword.get(opts, :need_confirmation)
|
need_confirmation? = Keyword.get(opts, :need_confirmation)
|
||||||
|
@ -17,6 +17,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
|||||||
alias Pleroma.Web.ActivityPub.MRF
|
alias Pleroma.Web.ActivityPub.MRF
|
||||||
alias Pleroma.Web.ActivityPub.Transmogrifier
|
alias Pleroma.Web.ActivityPub.Transmogrifier
|
||||||
alias Pleroma.Web.WebFinger
|
alias Pleroma.Web.WebFinger
|
||||||
|
alias Pleroma.Workers.BackgroundWorker
|
||||||
|
|
||||||
import Ecto.Query
|
import Ecto.Query
|
||||||
import Pleroma.Web.ActivityPub.Utils
|
import Pleroma.Web.ActivityPub.Utils
|
||||||
@ -145,7 +146,7 @@ def insert(map, local \\ true, fake \\ false, bypass_actor_check \\ false) when
|
|||||||
activity
|
activity
|
||||||
end
|
end
|
||||||
|
|
||||||
PleromaJobQueue.enqueue(:background, Pleroma.Web.RichMedia.Helpers, [:fetch, activity])
|
BackgroundWorker.enqueue("fetch_data_for_activity", %{"activity_id" => activity.id})
|
||||||
|
|
||||||
Notification.create_notifications(activity)
|
Notification.create_notifications(activity)
|
||||||
|
|
||||||
|
@ -8,6 +8,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy do
|
|||||||
|
|
||||||
alias Pleroma.HTTP
|
alias Pleroma.HTTP
|
||||||
alias Pleroma.Web.MediaProxy
|
alias Pleroma.Web.MediaProxy
|
||||||
|
alias Pleroma.Workers.BackgroundWorker
|
||||||
|
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
@ -30,7 +31,7 @@ def perform(:preload, %{"object" => %{"attachment" => attachments}} = _message)
|
|||||||
url
|
url
|
||||||
|> Enum.each(fn
|
|> Enum.each(fn
|
||||||
%{"href" => href} ->
|
%{"href" => href} ->
|
||||||
PleromaJobQueue.enqueue(:background, __MODULE__, [:prefetch, href])
|
BackgroundWorker.enqueue("media_proxy_prefetch", %{"url" => href})
|
||||||
|
|
||||||
x ->
|
x ->
|
||||||
Logger.debug("Unhandled attachment URL object #{inspect(x)}")
|
Logger.debug("Unhandled attachment URL object #{inspect(x)}")
|
||||||
@ -46,7 +47,7 @@ def filter(
|
|||||||
%{"type" => "Create", "object" => %{"attachment" => attachments} = _object} = message
|
%{"type" => "Create", "object" => %{"attachment" => attachments} = _object} = message
|
||||||
)
|
)
|
||||||
when is_list(attachments) and length(attachments) > 0 do
|
when is_list(attachments) and length(attachments) > 0 do
|
||||||
PleromaJobQueue.enqueue(:background, __MODULE__, [:preload, message])
|
BackgroundWorker.enqueue("media_proxy_preload", %{"message" => message})
|
||||||
|
|
||||||
{:ok, message}
|
{:ok, message}
|
||||||
end
|
end
|
||||||
|
@ -84,6 +84,15 @@ def publish_one(%{inbox: inbox, json: json, actor: %User{} = actor, id: id} = pa
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def publish_one(%{actor_id: actor_id} = params) do
|
||||||
|
actor = User.get_cached_by_id(actor_id)
|
||||||
|
|
||||||
|
params
|
||||||
|
|> Map.delete(:actor_id)
|
||||||
|
|> Map.put(:actor, actor)
|
||||||
|
|> publish_one()
|
||||||
|
end
|
||||||
|
|
||||||
defp should_federate?(inbox, public) do
|
defp should_federate?(inbox, public) do
|
||||||
if public do
|
if public do
|
||||||
true
|
true
|
||||||
@ -159,7 +168,8 @@ def determine_inbox(
|
|||||||
Publishes an activity with BCC to all relevant peers.
|
Publishes an activity with BCC to all relevant peers.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def publish(actor, %{data: %{"bcc" => bcc}} = activity) when is_list(bcc) and bcc != [] do
|
def publish(%User{} = actor, %{data: %{"bcc" => bcc}} = activity)
|
||||||
|
when is_list(bcc) and bcc != [] do
|
||||||
public = is_public?(activity)
|
public = is_public?(activity)
|
||||||
{:ok, data} = Transmogrifier.prepare_outgoing(activity.data)
|
{:ok, data} = Transmogrifier.prepare_outgoing(activity.data)
|
||||||
|
|
||||||
@ -186,7 +196,7 @@ def publish(actor, %{data: %{"bcc" => bcc}} = activity) when is_list(bcc) and bc
|
|||||||
Pleroma.Web.Federator.Publisher.enqueue_one(__MODULE__, %{
|
Pleroma.Web.Federator.Publisher.enqueue_one(__MODULE__, %{
|
||||||
inbox: inbox,
|
inbox: inbox,
|
||||||
json: json,
|
json: json,
|
||||||
actor: actor,
|
actor_id: actor.id,
|
||||||
id: activity.data["id"],
|
id: activity.data["id"],
|
||||||
unreachable_since: unreachable_since
|
unreachable_since: unreachable_since
|
||||||
})
|
})
|
||||||
@ -221,7 +231,7 @@ def publish(%User{} = actor, %Activity{} = activity) do
|
|||||||
%{
|
%{
|
||||||
inbox: inbox,
|
inbox: inbox,
|
||||||
json: json,
|
json: json,
|
||||||
actor: actor,
|
actor_id: actor.id,
|
||||||
id: activity.data["id"],
|
id: activity.data["id"],
|
||||||
unreachable_since: unreachable_since
|
unreachable_since: unreachable_since
|
||||||
}
|
}
|
||||||
|
@ -15,6 +15,7 @@ defmodule Pleroma.Web.ActivityPub.Transmogrifier do
|
|||||||
alias Pleroma.Web.ActivityPub.Utils
|
alias Pleroma.Web.ActivityPub.Utils
|
||||||
alias Pleroma.Web.ActivityPub.Visibility
|
alias Pleroma.Web.ActivityPub.Visibility
|
||||||
alias Pleroma.Web.Federator
|
alias Pleroma.Web.Federator
|
||||||
|
alias Pleroma.Workers.TransmogrifierWorker
|
||||||
|
|
||||||
import Ecto.Query
|
import Ecto.Query
|
||||||
|
|
||||||
@ -185,12 +186,12 @@ def fix_in_reply_to(%{"inReplyTo" => in_reply_to} = object, options)
|
|||||||
|> Map.put("context", replied_object.data["context"] || object["conversation"])
|
|> Map.put("context", replied_object.data["context"] || object["conversation"])
|
||||||
else
|
else
|
||||||
e ->
|
e ->
|
||||||
Logger.error("Couldn't fetch \"#{inspect(in_reply_to_id)}\", error: #{inspect(e)}")
|
Logger.error("Couldn't fetch #{inspect(in_reply_to_id)}, error: #{inspect(e)}")
|
||||||
object
|
object
|
||||||
end
|
end
|
||||||
|
|
||||||
e ->
|
e ->
|
||||||
Logger.error("Couldn't fetch \"#{inspect(in_reply_to_id)}\", error: #{inspect(e)}")
|
Logger.error("Couldn't fetch #{inspect(in_reply_to_id)}, error: #{inspect(e)}")
|
||||||
object
|
object
|
||||||
end
|
end
|
||||||
else
|
else
|
||||||
@ -1051,7 +1052,7 @@ def upgrade_user_from_ap_id(ap_id) do
|
|||||||
already_ap <- User.ap_enabled?(user),
|
already_ap <- User.ap_enabled?(user),
|
||||||
{:ok, user} <- upgrade_user(user, data) do
|
{:ok, user} <- upgrade_user(user, data) do
|
||||||
if not already_ap do
|
if not already_ap do
|
||||||
PleromaJobQueue.enqueue(:transmogrifier, __MODULE__, [:user_upgrade, user])
|
TransmogrifierWorker.enqueue("user_upgrade", %{"user_id" => user.id})
|
||||||
end
|
end
|
||||||
|
|
||||||
{:ok, user}
|
{:ok, user}
|
||||||
|
@ -85,15 +85,13 @@ defp extract_list(lst) when is_list(lst), do: lst
|
|||||||
defp extract_list(_), do: []
|
defp extract_list(_), do: []
|
||||||
|
|
||||||
def maybe_splice_recipient(ap_id, params) do
|
def maybe_splice_recipient(ap_id, params) do
|
||||||
need_splice =
|
need_splice? =
|
||||||
!recipient_in_collection(ap_id, params["to"]) &&
|
!recipient_in_collection(ap_id, params["to"]) &&
|
||||||
!recipient_in_collection(ap_id, params["cc"])
|
!recipient_in_collection(ap_id, params["cc"])
|
||||||
|
|
||||||
cc_list = extract_list(params["cc"])
|
if need_splice? do
|
||||||
|
cc_list = extract_list(params["cc"])
|
||||||
if need_splice do
|
Map.put(params, "cc", [ap_id | cc_list])
|
||||||
params
|
|
||||||
|> Map.put("cc", [ap_id | cc_list])
|
|
||||||
else
|
else
|
||||||
params
|
params
|
||||||
end
|
end
|
||||||
@ -139,7 +137,7 @@ def get_notified_from_object(%{"type" => type} = object) when type in @supported
|
|||||||
"object" => object
|
"object" => object
|
||||||
}
|
}
|
||||||
|
|
||||||
Notification.get_notified_from_activity(%Activity{data: fake_create_activity}, false)
|
get_notified_from_object(fake_create_activity)
|
||||||
end
|
end
|
||||||
|
|
||||||
def get_notified_from_object(object) do
|
def get_notified_from_object(object) do
|
||||||
@ -169,14 +167,7 @@ def create_context(context) do
|
|||||||
@spec maybe_federate(any()) :: :ok
|
@spec maybe_federate(any()) :: :ok
|
||||||
def maybe_federate(%Activity{local: true} = activity) do
|
def maybe_federate(%Activity{local: true} = activity) do
|
||||||
if Pleroma.Config.get!([:instance, :federating]) do
|
if Pleroma.Config.get!([:instance, :federating]) do
|
||||||
priority =
|
Pleroma.Web.Federator.publish(activity)
|
||||||
case activity.data["type"] do
|
|
||||||
"Delete" -> 10
|
|
||||||
"Create" -> 1
|
|
||||||
_ -> 5
|
|
||||||
end
|
|
||||||
|
|
||||||
Pleroma.Web.Federator.publish(activity, priority)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
:ok
|
:ok
|
||||||
@ -188,9 +179,9 @@ def maybe_federate(_), do: :ok
|
|||||||
Adds an id and a published data if they aren't there,
|
Adds an id and a published data if they aren't there,
|
||||||
also adds it to an included object
|
also adds it to an included object
|
||||||
"""
|
"""
|
||||||
def lazy_put_activity_defaults(map, fake \\ false) do
|
def lazy_put_activity_defaults(map, fake? \\ false) do
|
||||||
map =
|
map =
|
||||||
unless fake do
|
if not fake? do
|
||||||
%{data: %{"id" => context}, id: context_id} = create_context(map["context"])
|
%{data: %{"id" => context}, id: context_id} = create_context(map["context"])
|
||||||
|
|
||||||
map
|
map
|
||||||
@ -207,7 +198,7 @@ def lazy_put_activity_defaults(map, fake \\ false) do
|
|||||||
end
|
end
|
||||||
|
|
||||||
if is_map(map["object"]) do
|
if is_map(map["object"]) do
|
||||||
object = lazy_put_object_defaults(map["object"], map, fake)
|
object = lazy_put_object_defaults(map["object"], map, fake?)
|
||||||
%{map | "object" => object}
|
%{map | "object" => object}
|
||||||
else
|
else
|
||||||
map
|
map
|
||||||
@ -217,9 +208,9 @@ def lazy_put_activity_defaults(map, fake \\ false) do
|
|||||||
@doc """
|
@doc """
|
||||||
Adds an id and published date if they aren't there.
|
Adds an id and published date if they aren't there.
|
||||||
"""
|
"""
|
||||||
def lazy_put_object_defaults(map, activity \\ %{}, fake)
|
def lazy_put_object_defaults(map, activity \\ %{}, fake?)
|
||||||
|
|
||||||
def lazy_put_object_defaults(map, activity, true = _fake) do
|
def lazy_put_object_defaults(map, activity, true = _fake?) do
|
||||||
map
|
map
|
||||||
|> Map.put_new_lazy("published", &make_date/0)
|
|> Map.put_new_lazy("published", &make_date/0)
|
||||||
|> Map.put_new("id", "pleroma:fake_object_id")
|
|> Map.put_new("id", "pleroma:fake_object_id")
|
||||||
@ -228,7 +219,7 @@ def lazy_put_object_defaults(map, activity, true = _fake) do
|
|||||||
|> Map.put_new("context_id", activity["context_id"])
|
|> Map.put_new("context_id", activity["context_id"])
|
||||||
end
|
end
|
||||||
|
|
||||||
def lazy_put_object_defaults(map, activity, _fake) do
|
def lazy_put_object_defaults(map, activity, _fake?) do
|
||||||
map
|
map
|
||||||
|> Map.put_new_lazy("id", &generate_object_id/0)
|
|> Map.put_new_lazy("id", &generate_object_id/0)
|
||||||
|> Map.put_new_lazy("published", &make_date/0)
|
|> Map.put_new_lazy("published", &make_date/0)
|
||||||
@ -242,9 +233,7 @@ def lazy_put_object_defaults(map, activity, _fake) do
|
|||||||
def insert_full_object(%{"object" => %{"type" => type} = object_data} = map)
|
def insert_full_object(%{"object" => %{"type" => type} = object_data} = map)
|
||||||
when is_map(object_data) and type in @supported_object_types do
|
when is_map(object_data) and type in @supported_object_types do
|
||||||
with {:ok, object} <- Object.create(object_data) do
|
with {:ok, object} <- Object.create(object_data) do
|
||||||
map =
|
map = Map.put(map, "object", object.data["id"])
|
||||||
map
|
|
||||||
|> Map.put("object", object.data["id"])
|
|
||||||
|
|
||||||
{:ok, map, object}
|
{:ok, map, object}
|
||||||
end
|
end
|
||||||
@ -263,7 +252,7 @@ def get_existing_like(actor, %{data: %{"id" => id}}) do
|
|||||||
|> Activity.Queries.by_actor()
|
|> Activity.Queries.by_actor()
|
||||||
|> Activity.Queries.by_object_id(id)
|
|> Activity.Queries.by_object_id(id)
|
||||||
|> Activity.Queries.by_type("Like")
|
|> Activity.Queries.by_type("Like")
|
||||||
|> Activity.Queries.limit(1)
|
|> limit(1)
|
||||||
|> Repo.one()
|
|> Repo.one()
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -380,12 +369,11 @@ def update_follow_state(
|
|||||||
%Activity{data: %{"actor" => actor, "object" => object}} = activity,
|
%Activity{data: %{"actor" => actor, "object" => object}} = activity,
|
||||||
state
|
state
|
||||||
) do
|
) do
|
||||||
with new_data <-
|
new_data = Map.put(activity.data, "state", state)
|
||||||
activity.data
|
changeset = Changeset.change(activity, data: new_data)
|
||||||
|> Map.put("state", state),
|
|
||||||
changeset <- Changeset.change(activity, data: new_data),
|
with {:ok, activity} <- Repo.update(changeset) do
|
||||||
{:ok, activity} <- Repo.update(changeset),
|
User.set_follow_state_cache(actor, object, state)
|
||||||
_ <- User.set_follow_state_cache(actor, object, state) do
|
|
||||||
{:ok, activity}
|
{:ok, activity}
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@ -410,28 +398,14 @@ def make_follow_data(
|
|||||||
end
|
end
|
||||||
|
|
||||||
def fetch_latest_follow(%User{ap_id: follower_id}, %User{ap_id: followed_id}) do
|
def fetch_latest_follow(%User{ap_id: follower_id}, %User{ap_id: followed_id}) do
|
||||||
query =
|
"Follow"
|
||||||
from(
|
|> Activity.Queries.by_type()
|
||||||
activity in Activity,
|
|> where(actor: ^follower_id)
|
||||||
where:
|
# this is to use the index
|
||||||
fragment(
|
|> Activity.Queries.by_object_id(followed_id)
|
||||||
"? ->> 'type' = 'Follow'",
|
|> order_by([activity], fragment("? desc nulls last", activity.id))
|
||||||
activity.data
|
|> limit(1)
|
||||||
),
|
|> Repo.one()
|
||||||
where: activity.actor == ^follower_id,
|
|
||||||
# this is to use the index
|
|
||||||
where:
|
|
||||||
fragment(
|
|
||||||
"coalesce((?)->'object'->>'id', (?)->>'object') = ?",
|
|
||||||
activity.data,
|
|
||||||
activity.data,
|
|
||||||
^followed_id
|
|
||||||
),
|
|
||||||
order_by: [fragment("? desc nulls last", activity.id)],
|
|
||||||
limit: 1
|
|
||||||
)
|
|
||||||
|
|
||||||
Repo.one(query)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
#### Announce-related helpers
|
#### Announce-related helpers
|
||||||
@ -439,23 +413,13 @@ def fetch_latest_follow(%User{ap_id: follower_id}, %User{ap_id: followed_id}) do
|
|||||||
@doc """
|
@doc """
|
||||||
Retruns an existing announce activity if the notice has already been announced
|
Retruns an existing announce activity if the notice has already been announced
|
||||||
"""
|
"""
|
||||||
def get_existing_announce(actor, %{data: %{"id" => id}}) do
|
def get_existing_announce(actor, %{data: %{"id" => ap_id}}) do
|
||||||
query =
|
"Announce"
|
||||||
from(
|
|> Activity.Queries.by_type()
|
||||||
activity in Activity,
|
|> where(actor: ^actor)
|
||||||
where: activity.actor == ^actor,
|
# this is to use the index
|
||||||
# this is to use the index
|
|> Activity.Queries.by_object_id(ap_id)
|
||||||
where:
|
|> Repo.one()
|
||||||
fragment(
|
|
||||||
"coalesce((?)->'object'->>'id', (?)->>'object') = ?",
|
|
||||||
activity.data,
|
|
||||||
activity.data,
|
|
||||||
^id
|
|
||||||
),
|
|
||||||
where: fragment("(?)->>'type' = 'Announce'", activity.data)
|
|
||||||
)
|
|
||||||
|
|
||||||
Repo.one(query)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
@ -538,11 +502,13 @@ def add_announce_to_object(
|
|||||||
object
|
object
|
||||||
) do
|
) do
|
||||||
announcements =
|
announcements =
|
||||||
if is_list(object.data["announcements"]), do: object.data["announcements"], else: []
|
if is_list(object.data["announcements"]) do
|
||||||
|
Enum.uniq([actor | object.data["announcements"]])
|
||||||
|
else
|
||||||
|
[actor]
|
||||||
|
end
|
||||||
|
|
||||||
with announcements <- [actor | announcements] |> Enum.uniq() do
|
update_element_in_object("announcement", announcements, object)
|
||||||
update_element_in_object("announcement", announcements, object)
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def add_announce_to_object(_, object), do: {:ok, object}
|
def add_announce_to_object(_, object), do: {:ok, object}
|
||||||
@ -570,28 +536,14 @@ def make_unfollow_data(follower, followed, follow_activity, activity_id) do
|
|||||||
|
|
||||||
#### Block-related helpers
|
#### Block-related helpers
|
||||||
def fetch_latest_block(%User{ap_id: blocker_id}, %User{ap_id: blocked_id}) do
|
def fetch_latest_block(%User{ap_id: blocker_id}, %User{ap_id: blocked_id}) do
|
||||||
query =
|
"Block"
|
||||||
from(
|
|> Activity.Queries.by_type()
|
||||||
activity in Activity,
|
|> where(actor: ^blocker_id)
|
||||||
where:
|
# this is to use the index
|
||||||
fragment(
|
|> Activity.Queries.by_object_id(blocked_id)
|
||||||
"? ->> 'type' = 'Block'",
|
|> order_by([activity], fragment("? desc nulls last", activity.id))
|
||||||
activity.data
|
|> limit(1)
|
||||||
),
|
|> Repo.one()
|
||||||
where: activity.actor == ^blocker_id,
|
|
||||||
# this is to use the index
|
|
||||||
where:
|
|
||||||
fragment(
|
|
||||||
"coalesce((?)->'object'->>'id', (?)->>'object') = ?",
|
|
||||||
activity.data,
|
|
||||||
activity.data,
|
|
||||||
^blocked_id
|
|
||||||
),
|
|
||||||
order_by: [fragment("? desc nulls last", activity.id)],
|
|
||||||
limit: 1
|
|
||||||
)
|
|
||||||
|
|
||||||
Repo.one(query)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def make_block_data(blocker, blocked, activity_id) do
|
def make_block_data(blocker, blocked, activity_id) do
|
||||||
@ -695,11 +647,11 @@ def fetch_ordered_collection(from, pages_left, acc \\ []) do
|
|||||||
#### Report-related helpers
|
#### Report-related helpers
|
||||||
|
|
||||||
def update_report_state(%Activity{} = activity, state) when state in @supported_report_states do
|
def update_report_state(%Activity{} = activity, state) when state in @supported_report_states do
|
||||||
with new_data <- Map.put(activity.data, "state", state),
|
new_data = Map.put(activity.data, "state", state)
|
||||||
changeset <- Changeset.change(activity, data: new_data),
|
|
||||||
{:ok, activity} <- Repo.update(changeset) do
|
activity
|
||||||
{:ok, activity}
|
|> Changeset.change(data: new_data)
|
||||||
end
|
|> Repo.update()
|
||||||
end
|
end
|
||||||
|
|
||||||
def update_report_state(_, _), do: {:error, "Unsupported state"}
|
def update_report_state(_, _), do: {:error, "Unsupported state"}
|
||||||
@ -766,21 +718,13 @@ defp get_updated_targets(
|
|||||||
end
|
end
|
||||||
|
|
||||||
def get_existing_votes(actor, %{data: %{"id" => id}}) do
|
def get_existing_votes(actor, %{data: %{"id" => id}}) do
|
||||||
query =
|
actor
|
||||||
from(
|
|> Activity.Queries.by_actor()
|
||||||
[activity, object: object] in Activity.with_preloaded_object(Activity),
|
|> Activity.Queries.by_type("Create")
|
||||||
where: fragment("(?)->>'type' = 'Create'", activity.data),
|
|> Activity.with_preloaded_object()
|
||||||
where: fragment("(?)->>'actor' = ?", activity.data, ^actor),
|
|> where([a, object: o], fragment("(?)->>'inReplyTo' = ?", o.data, ^to_string(id)))
|
||||||
where:
|
|> where([a, object: o], fragment("(?)->>'type' = 'Answer'", o.data))
|
||||||
fragment(
|
|> Repo.all()
|
||||||
"(?)->>'inReplyTo' = ?",
|
|
||||||
object.data,
|
|
||||||
^to_string(id)
|
|
||||||
),
|
|
||||||
where: fragment("(?)->>'type' = 'Answer'", object.data)
|
|
||||||
)
|
|
||||||
|
|
||||||
Repo.all(query)
|
|
||||||
end
|
end
|
||||||
|
|
||||||
defp maybe_put(map, _key, nil), do: map
|
defp maybe_put(map, _key, nil), do: map
|
||||||
|
@ -90,6 +90,8 @@ defp do_convert(entity) when is_list(entity) do
|
|||||||
for v <- entity, into: [], do: do_convert(v)
|
for v <- entity, into: [], do: do_convert(v)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp do_convert(%Regex{} = entity), do: inspect(entity)
|
||||||
|
|
||||||
defp do_convert(entity) when is_map(entity) do
|
defp do_convert(entity) when is_map(entity) do
|
||||||
for {k, v} <- entity, into: %{}, do: {do_convert(k), do_convert(v)}
|
for {k, v} <- entity, into: %{}, do: {do_convert(k), do_convert(v)}
|
||||||
end
|
end
|
||||||
@ -122,7 +124,7 @@ def transform(entity) when is_binary(entity) or is_map(entity) or is_list(entity
|
|||||||
|
|
||||||
def transform(entity), do: :erlang.term_to_binary(entity)
|
def transform(entity), do: :erlang.term_to_binary(entity)
|
||||||
|
|
||||||
defp do_transform(%Regex{} = entity) when is_map(entity), do: entity
|
defp do_transform(%Regex{} = entity), do: entity
|
||||||
|
|
||||||
defp do_transform(%{"tuple" => [":dispatch", [entity]]}) do
|
defp do_transform(%{"tuple" => [":dispatch", [entity]]}) do
|
||||||
{dispatch_settings, []} = do_eval(entity)
|
{dispatch_settings, []} = do_eval(entity)
|
||||||
@ -154,8 +156,15 @@ defp do_transform(entity) when is_binary(entity) do
|
|||||||
defp do_transform(entity), do: entity
|
defp do_transform(entity), do: entity
|
||||||
|
|
||||||
defp do_transform_string("~r/" <> pattern) do
|
defp do_transform_string("~r/" <> pattern) do
|
||||||
pattern = String.trim_trailing(pattern, "/")
|
modificator = String.split(pattern, "/") |> List.last()
|
||||||
~r/#{pattern}/
|
pattern = String.trim_trailing(pattern, "/" <> modificator)
|
||||||
|
|
||||||
|
case modificator do
|
||||||
|
"" -> ~r/#{pattern}/
|
||||||
|
"i" -> ~r/#{pattern}/i
|
||||||
|
"u" -> ~r/#{pattern}/u
|
||||||
|
"s" -> ~r/#{pattern}/s
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
defp do_transform_string(":" <> atom), do: String.to_atom(atom)
|
defp do_transform_string(":" <> atom), do: String.to_atom(atom)
|
||||||
|
@ -34,79 +34,38 @@ defp param_to_integer(val, default) when is_binary(val) do
|
|||||||
|
|
||||||
defp param_to_integer(_, default), do: default
|
defp param_to_integer(_, default), do: default
|
||||||
|
|
||||||
def add_link_headers(
|
def add_link_headers(conn, activities, extra_params \\ %{}) do
|
||||||
conn,
|
case List.last(activities) do
|
||||||
method,
|
%{id: max_id} ->
|
||||||
activities,
|
params =
|
||||||
param \\ nil,
|
conn.params
|
||||||
params \\ %{},
|
|> Map.drop(Map.keys(conn.path_params))
|
||||||
func3 \\ nil,
|
|> Map.drop(["since_id", "max_id", "min_id"])
|
||||||
func4 \\ nil
|
|> Map.merge(extra_params)
|
||||||
) do
|
|
||||||
params =
|
|
||||||
conn.params
|
|
||||||
|> Map.drop(["since_id", "max_id", "min_id"])
|
|
||||||
|> Map.merge(params)
|
|
||||||
|
|
||||||
last = List.last(activities)
|
limit =
|
||||||
|
params
|
||||||
|
|> Map.get("limit", "20")
|
||||||
|
|> String.to_integer()
|
||||||
|
|
||||||
func3 = func3 || (&mastodon_api_url/3)
|
min_id =
|
||||||
func4 = func4 || (&mastodon_api_url/4)
|
if length(activities) <= limit do
|
||||||
|
activities
|
||||||
|
|> List.first()
|
||||||
|
|> Map.get(:id)
|
||||||
|
else
|
||||||
|
activities
|
||||||
|
|> Enum.at(limit * -1)
|
||||||
|
|> Map.get(:id)
|
||||||
|
end
|
||||||
|
|
||||||
if last do
|
next_url = current_url(conn, Map.merge(params, %{max_id: max_id}))
|
||||||
max_id = last.id
|
prev_url = current_url(conn, Map.merge(params, %{min_id: min_id}))
|
||||||
|
|
||||||
limit =
|
put_resp_header(conn, "link", "<#{next_url}>; rel=\"next\", <#{prev_url}>; rel=\"prev\"")
|
||||||
params
|
|
||||||
|> Map.get("limit", "20")
|
|
||||||
|> String.to_integer()
|
|
||||||
|
|
||||||
min_id =
|
_ ->
|
||||||
if length(activities) <= limit do
|
conn
|
||||||
activities
|
|
||||||
|> List.first()
|
|
||||||
|> Map.get(:id)
|
|
||||||
else
|
|
||||||
activities
|
|
||||||
|> Enum.at(limit * -1)
|
|
||||||
|> Map.get(:id)
|
|
||||||
end
|
|
||||||
|
|
||||||
{next_url, prev_url} =
|
|
||||||
if param do
|
|
||||||
{
|
|
||||||
func4.(
|
|
||||||
Pleroma.Web.Endpoint,
|
|
||||||
method,
|
|
||||||
param,
|
|
||||||
Map.merge(params, %{max_id: max_id})
|
|
||||||
),
|
|
||||||
func4.(
|
|
||||||
Pleroma.Web.Endpoint,
|
|
||||||
method,
|
|
||||||
param,
|
|
||||||
Map.merge(params, %{min_id: min_id})
|
|
||||||
)
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
func3.(
|
|
||||||
Pleroma.Web.Endpoint,
|
|
||||||
method,
|
|
||||||
Map.merge(params, %{max_id: max_id})
|
|
||||||
),
|
|
||||||
func3.(
|
|
||||||
Pleroma.Web.Endpoint,
|
|
||||||
method,
|
|
||||||
Map.merge(params, %{min_id: min_id})
|
|
||||||
)
|
|
||||||
}
|
|
||||||
end
|
|
||||||
|
|
||||||
conn
|
|
||||||
|> put_resp_header("link", "<#{next_url}>; rel=\"next\", <#{prev_url}>; rel=\"prev\"")
|
|
||||||
else
|
|
||||||
conn
|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -10,16 +10,17 @@ defmodule Pleroma.Web.Federator do
|
|||||||
alias Pleroma.Web.ActivityPub.Transmogrifier
|
alias Pleroma.Web.ActivityPub.Transmogrifier
|
||||||
alias Pleroma.Web.ActivityPub.Utils
|
alias Pleroma.Web.ActivityPub.Utils
|
||||||
alias Pleroma.Web.Federator.Publisher
|
alias Pleroma.Web.Federator.Publisher
|
||||||
alias Pleroma.Web.Federator.RetryQueue
|
|
||||||
alias Pleroma.Web.OStatus
|
alias Pleroma.Web.OStatus
|
||||||
alias Pleroma.Web.Websub
|
alias Pleroma.Web.Websub
|
||||||
|
alias Pleroma.Workers.PublisherWorker
|
||||||
|
alias Pleroma.Workers.ReceiverWorker
|
||||||
|
alias Pleroma.Workers.SubscriberWorker
|
||||||
|
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
def init do
|
def init do
|
||||||
# 1 minute
|
# To do: consider removing this call in favor of scheduled execution (`quantum`-based)
|
||||||
Process.sleep(1000 * 60)
|
refresh_subscriptions(schedule_in: 60)
|
||||||
refresh_subscriptions()
|
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc "Addresses [memory leaks on recursive replies fetching](https://git.pleroma.social/pleroma/pleroma/issues/161)"
|
@doc "Addresses [memory leaks on recursive replies fetching](https://git.pleroma.social/pleroma/pleroma/issues/161)"
|
||||||
@ -37,50 +38,38 @@ def allowed_incoming_reply_depth?(depth) do
|
|||||||
# Client API
|
# Client API
|
||||||
|
|
||||||
def incoming_doc(doc) do
|
def incoming_doc(doc) do
|
||||||
PleromaJobQueue.enqueue(:federator_incoming, __MODULE__, [:incoming_doc, doc])
|
ReceiverWorker.enqueue("incoming_doc", %{"body" => doc})
|
||||||
end
|
end
|
||||||
|
|
||||||
def incoming_ap_doc(params) do
|
def incoming_ap_doc(params) do
|
||||||
PleromaJobQueue.enqueue(:federator_incoming, __MODULE__, [:incoming_ap_doc, params])
|
ReceiverWorker.enqueue("incoming_ap_doc", %{"params" => params})
|
||||||
end
|
end
|
||||||
|
|
||||||
def publish(activity, priority \\ 1) do
|
def publish(%{id: "pleroma:fakeid"} = activity) do
|
||||||
PleromaJobQueue.enqueue(:federator_outgoing, __MODULE__, [:publish, activity], priority)
|
perform(:publish, activity)
|
||||||
|
end
|
||||||
|
|
||||||
|
def publish(activity) do
|
||||||
|
PublisherWorker.enqueue("publish", %{"activity_id" => activity.id})
|
||||||
end
|
end
|
||||||
|
|
||||||
def verify_websub(websub) do
|
def verify_websub(websub) do
|
||||||
PleromaJobQueue.enqueue(:federator_outgoing, __MODULE__, [:verify_websub, websub])
|
SubscriberWorker.enqueue("verify_websub", %{"websub_id" => websub.id})
|
||||||
end
|
end
|
||||||
|
|
||||||
def request_subscription(sub) do
|
def request_subscription(websub) do
|
||||||
PleromaJobQueue.enqueue(:federator_outgoing, __MODULE__, [:request_subscription, sub])
|
SubscriberWorker.enqueue("request_subscription", %{"websub_id" => websub.id})
|
||||||
end
|
end
|
||||||
|
|
||||||
def refresh_subscriptions do
|
def refresh_subscriptions(worker_args \\ []) do
|
||||||
PleromaJobQueue.enqueue(:federator_outgoing, __MODULE__, [:refresh_subscriptions])
|
SubscriberWorker.enqueue("refresh_subscriptions", %{}, worker_args ++ [max_attempts: 1])
|
||||||
end
|
end
|
||||||
|
|
||||||
# Job Worker Callbacks
|
# Job Worker Callbacks
|
||||||
|
|
||||||
def perform(:refresh_subscriptions) do
|
@spec perform(atom(), module(), any()) :: {:ok, any()} | {:error, any()}
|
||||||
Logger.debug("Federator running refresh subscriptions")
|
def perform(:publish_one, module, params) do
|
||||||
Websub.refresh_subscriptions()
|
apply(module, :publish_one, [params])
|
||||||
|
|
||||||
spawn(fn ->
|
|
||||||
# 6 hours
|
|
||||||
Process.sleep(1000 * 60 * 60 * 6)
|
|
||||||
refresh_subscriptions()
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
|
|
||||||
def perform(:request_subscription, websub) do
|
|
||||||
Logger.debug("Refreshing #{websub.topic}")
|
|
||||||
|
|
||||||
with {:ok, websub} <- Websub.request_subscription(websub) do
|
|
||||||
Logger.debug("Successfully refreshed #{websub.topic}")
|
|
||||||
else
|
|
||||||
_e -> Logger.debug("Couldn't refresh #{websub.topic}")
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
def perform(:publish, activity) do
|
def perform(:publish, activity) do
|
||||||
@ -92,14 +81,6 @@ def perform(:publish, activity) do
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def perform(:verify_websub, websub) do
|
|
||||||
Logger.debug(fn ->
|
|
||||||
"Running WebSub verification for #{websub.id} (#{websub.topic}, #{websub.callback})"
|
|
||||||
end)
|
|
||||||
|
|
||||||
Websub.verify(websub)
|
|
||||||
end
|
|
||||||
|
|
||||||
def perform(:incoming_doc, doc) do
|
def perform(:incoming_doc, doc) do
|
||||||
Logger.info("Got document, trying to parse")
|
Logger.info("Got document, trying to parse")
|
||||||
OStatus.handle_incoming(doc)
|
OStatus.handle_incoming(doc)
|
||||||
@ -130,22 +111,27 @@ def perform(:incoming_ap_doc, params) do
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def perform(
|
def perform(:request_subscription, websub) do
|
||||||
:publish_single_websub,
|
Logger.debug("Refreshing #{websub.topic}")
|
||||||
%{xml: _xml, topic: _topic, callback: _callback, secret: _secret} = params
|
|
||||||
) do
|
|
||||||
case Websub.publish_one(params) do
|
|
||||||
{:ok, _} ->
|
|
||||||
:ok
|
|
||||||
|
|
||||||
{:error, _} ->
|
with {:ok, websub} <- Websub.request_subscription(websub) do
|
||||||
RetryQueue.enqueue(params, Websub)
|
Logger.debug("Successfully refreshed #{websub.topic}")
|
||||||
|
else
|
||||||
|
_e -> Logger.debug("Couldn't refresh #{websub.topic}")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def perform(type, _) do
|
def perform(:verify_websub, websub) do
|
||||||
Logger.debug(fn -> "Unknown task: #{type}" end)
|
Logger.debug(fn ->
|
||||||
{:error, "Don't know what to do with this"}
|
"Running WebSub verification for #{websub.id} (#{websub.topic}, #{websub.callback})"
|
||||||
|
end)
|
||||||
|
|
||||||
|
Websub.verify(websub)
|
||||||
|
end
|
||||||
|
|
||||||
|
def perform(:refresh_subscriptions) do
|
||||||
|
Logger.debug("Federator running refresh subscriptions")
|
||||||
|
Websub.refresh_subscriptions()
|
||||||
end
|
end
|
||||||
|
|
||||||
def ap_enabled_actor(id) do
|
def ap_enabled_actor(id) do
|
||||||
|
@ -6,7 +6,7 @@ defmodule Pleroma.Web.Federator.Publisher do
|
|||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
alias Pleroma.Config
|
alias Pleroma.Config
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.Federator.RetryQueue
|
alias Pleroma.Workers.PublisherWorker
|
||||||
|
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
@ -30,23 +30,11 @@ defmodule Pleroma.Web.Federator.Publisher do
|
|||||||
Enqueue publishing a single activity.
|
Enqueue publishing a single activity.
|
||||||
"""
|
"""
|
||||||
@spec enqueue_one(module(), Map.t()) :: :ok
|
@spec enqueue_one(module(), Map.t()) :: :ok
|
||||||
def enqueue_one(module, %{} = params),
|
def enqueue_one(module, %{} = params) do
|
||||||
do: PleromaJobQueue.enqueue(:federator_outgoing, __MODULE__, [:publish_one, module, params])
|
PublisherWorker.enqueue(
|
||||||
|
"publish_one",
|
||||||
@spec perform(atom(), module(), any()) :: {:ok, any()} | {:error, any()}
|
%{"module" => to_string(module), "params" => params}
|
||||||
def perform(:publish_one, module, params) do
|
)
|
||||||
case apply(module, :publish_one, [params]) do
|
|
||||||
{:ok, _} ->
|
|
||||||
:ok
|
|
||||||
|
|
||||||
{:error, _e} ->
|
|
||||||
RetryQueue.enqueue(params, module)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def perform(type, _, _) do
|
|
||||||
Logger.debug("Unknown task: #{type}")
|
|
||||||
{:error, "Don't know what to do with this"}
|
|
||||||
end
|
end
|
||||||
|
|
||||||
@doc """
|
@doc """
|
||||||
|
@ -1,239 +0,0 @@
|
|||||||
# Pleroma: A lightweight social networking server
|
|
||||||
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
|
||||||
|
|
||||||
defmodule Pleroma.Web.Federator.RetryQueue do
|
|
||||||
use GenServer
|
|
||||||
|
|
||||||
require Logger
|
|
||||||
|
|
||||||
def init(args) do
|
|
||||||
queue_table = :ets.new(:pleroma_retry_queue, [:bag, :protected])
|
|
||||||
|
|
||||||
{:ok, %{args | queue_table: queue_table, running_jobs: :sets.new()}}
|
|
||||||
end
|
|
||||||
|
|
||||||
def start_link(_) do
|
|
||||||
enabled =
|
|
||||||
if Pleroma.Config.get(:env) == :test,
|
|
||||||
do: true,
|
|
||||||
else: Pleroma.Config.get([__MODULE__, :enabled], false)
|
|
||||||
|
|
||||||
if enabled do
|
|
||||||
Logger.info("Starting retry queue")
|
|
||||||
|
|
||||||
linkres =
|
|
||||||
GenServer.start_link(
|
|
||||||
__MODULE__,
|
|
||||||
%{delivered: 0, dropped: 0, queue_table: nil, running_jobs: nil},
|
|
||||||
name: __MODULE__
|
|
||||||
)
|
|
||||||
|
|
||||||
maybe_kickoff_timer()
|
|
||||||
linkres
|
|
||||||
else
|
|
||||||
Logger.info("Retry queue disabled")
|
|
||||||
:ignore
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def enqueue(data, transport, retries \\ 0) do
|
|
||||||
GenServer.cast(__MODULE__, {:maybe_enqueue, data, transport, retries + 1})
|
|
||||||
end
|
|
||||||
|
|
||||||
def get_stats do
|
|
||||||
GenServer.call(__MODULE__, :get_stats)
|
|
||||||
end
|
|
||||||
|
|
||||||
def reset_stats do
|
|
||||||
GenServer.call(__MODULE__, :reset_stats)
|
|
||||||
end
|
|
||||||
|
|
||||||
def get_retry_params(retries) do
|
|
||||||
if retries > Pleroma.Config.get([__MODULE__, :max_retries]) do
|
|
||||||
{:drop, "Max retries reached"}
|
|
||||||
else
|
|
||||||
{:retry, growth_function(retries)}
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def get_retry_timer_interval do
|
|
||||||
Pleroma.Config.get([:retry_queue, :interval], 1000)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp ets_count_expires(table, current_time) do
|
|
||||||
:ets.select_count(
|
|
||||||
table,
|
|
||||||
[
|
|
||||||
{
|
|
||||||
{:"$1", :"$2"},
|
|
||||||
[{:"=<", :"$1", {:const, current_time}}],
|
|
||||||
[true]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
)
|
|
||||||
end
|
|
||||||
|
|
||||||
defp ets_pop_n_expired(table, current_time, desired) do
|
|
||||||
{popped, _continuation} =
|
|
||||||
:ets.select(
|
|
||||||
table,
|
|
||||||
[
|
|
||||||
{
|
|
||||||
{:"$1", :"$2"},
|
|
||||||
[{:"=<", :"$1", {:const, current_time}}],
|
|
||||||
[:"$_"]
|
|
||||||
}
|
|
||||||
],
|
|
||||||
desired
|
|
||||||
)
|
|
||||||
|
|
||||||
popped
|
|
||||||
|> Enum.each(fn e ->
|
|
||||||
:ets.delete_object(table, e)
|
|
||||||
end)
|
|
||||||
|
|
||||||
popped
|
|
||||||
end
|
|
||||||
|
|
||||||
def maybe_start_job(running_jobs, queue_table) do
|
|
||||||
# we don't want to hit the ets or the DateTime more times than we have to
|
|
||||||
# could optimize slightly further by not using the count, and instead grabbing
|
|
||||||
# up to N objects early...
|
|
||||||
current_time = DateTime.to_unix(DateTime.utc_now())
|
|
||||||
n_running_jobs = :sets.size(running_jobs)
|
|
||||||
|
|
||||||
if n_running_jobs < Pleroma.Config.get([__MODULE__, :max_jobs]) do
|
|
||||||
n_ready_jobs = ets_count_expires(queue_table, current_time)
|
|
||||||
|
|
||||||
if n_ready_jobs > 0 do
|
|
||||||
# figure out how many we could start
|
|
||||||
available_job_slots = Pleroma.Config.get([__MODULE__, :max_jobs]) - n_running_jobs
|
|
||||||
start_n_jobs(running_jobs, queue_table, current_time, available_job_slots)
|
|
||||||
else
|
|
||||||
running_jobs
|
|
||||||
end
|
|
||||||
else
|
|
||||||
running_jobs
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
defp start_n_jobs(running_jobs, _queue_table, _current_time, 0) do
|
|
||||||
running_jobs
|
|
||||||
end
|
|
||||||
|
|
||||||
defp start_n_jobs(running_jobs, queue_table, current_time, available_job_slots)
|
|
||||||
when available_job_slots > 0 do
|
|
||||||
candidates = ets_pop_n_expired(queue_table, current_time, available_job_slots)
|
|
||||||
|
|
||||||
candidates
|
|
||||||
|> List.foldl(running_jobs, fn {_, e}, rj ->
|
|
||||||
{:ok, pid} = Task.start(fn -> worker(e) end)
|
|
||||||
mref = Process.monitor(pid)
|
|
||||||
:sets.add_element(mref, rj)
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
|
|
||||||
def worker({:send, data, transport, retries}) do
|
|
||||||
case transport.publish_one(data) do
|
|
||||||
{:ok, _} ->
|
|
||||||
GenServer.cast(__MODULE__, :inc_delivered)
|
|
||||||
:delivered
|
|
||||||
|
|
||||||
{:error, _reason} ->
|
|
||||||
enqueue(data, transport, retries)
|
|
||||||
:retry
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_call(:get_stats, _from, %{delivered: delivery_count, dropped: drop_count} = state) do
|
|
||||||
{:reply, %{delivered: delivery_count, dropped: drop_count}, state}
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_call(:reset_stats, _from, %{delivered: delivery_count, dropped: drop_count} = state) do
|
|
||||||
{:reply, %{delivered: delivery_count, dropped: drop_count},
|
|
||||||
%{state | delivered: 0, dropped: 0}}
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_cast(:reset_stats, state) do
|
|
||||||
{:noreply, %{state | delivered: 0, dropped: 0}}
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_cast(
|
|
||||||
{:maybe_enqueue, data, transport, retries},
|
|
||||||
%{dropped: drop_count, queue_table: queue_table, running_jobs: running_jobs} = state
|
|
||||||
) do
|
|
||||||
case get_retry_params(retries) do
|
|
||||||
{:retry, timeout} ->
|
|
||||||
:ets.insert(queue_table, {timeout, {:send, data, transport, retries}})
|
|
||||||
running_jobs = maybe_start_job(running_jobs, queue_table)
|
|
||||||
{:noreply, %{state | running_jobs: running_jobs}}
|
|
||||||
|
|
||||||
{:drop, message} ->
|
|
||||||
Logger.debug(message)
|
|
||||||
{:noreply, %{state | dropped: drop_count + 1}}
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_cast(:kickoff_timer, state) do
|
|
||||||
retry_interval = get_retry_timer_interval()
|
|
||||||
Process.send_after(__MODULE__, :retry_timer_run, retry_interval)
|
|
||||||
{:noreply, state}
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_cast(:inc_delivered, %{delivered: delivery_count} = state) do
|
|
||||||
{:noreply, %{state | delivered: delivery_count + 1}}
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_cast(:inc_dropped, %{dropped: drop_count} = state) do
|
|
||||||
{:noreply, %{state | dropped: drop_count + 1}}
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_info({:send, data, transport, retries}, %{delivered: delivery_count} = state) do
|
|
||||||
case transport.publish_one(data) do
|
|
||||||
{:ok, _} ->
|
|
||||||
{:noreply, %{state | delivered: delivery_count + 1}}
|
|
||||||
|
|
||||||
{:error, _reason} ->
|
|
||||||
enqueue(data, transport, retries)
|
|
||||||
{:noreply, state}
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_info(
|
|
||||||
:retry_timer_run,
|
|
||||||
%{queue_table: queue_table, running_jobs: running_jobs} = state
|
|
||||||
) do
|
|
||||||
maybe_kickoff_timer()
|
|
||||||
running_jobs = maybe_start_job(running_jobs, queue_table)
|
|
||||||
{:noreply, %{state | running_jobs: running_jobs}}
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_info({:DOWN, ref, :process, _pid, _reason}, state) do
|
|
||||||
%{running_jobs: running_jobs, queue_table: queue_table} = state
|
|
||||||
running_jobs = :sets.del_element(ref, running_jobs)
|
|
||||||
running_jobs = maybe_start_job(running_jobs, queue_table)
|
|
||||||
{:noreply, %{state | running_jobs: running_jobs}}
|
|
||||||
end
|
|
||||||
|
|
||||||
def handle_info(unknown, state) do
|
|
||||||
Logger.debug("RetryQueue: don't know what to do with #{inspect(unknown)}, ignoring")
|
|
||||||
{:noreply, state}
|
|
||||||
end
|
|
||||||
|
|
||||||
if Pleroma.Config.get(:env) == :test do
|
|
||||||
defp growth_function(_retries) do
|
|
||||||
_shutit = Pleroma.Config.get([__MODULE__, :initial_timeout])
|
|
||||||
DateTime.to_unix(DateTime.utc_now()) - 1
|
|
||||||
end
|
|
||||||
else
|
|
||||||
defp growth_function(retries) do
|
|
||||||
round(Pleroma.Config.get([__MODULE__, :initial_timeout]) * :math.pow(retries, 3)) +
|
|
||||||
DateTime.to_unix(DateTime.utc_now())
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
defp maybe_kickoff_timer do
|
|
||||||
GenServer.cast(__MODULE__, :kickoff_timer)
|
|
||||||
end
|
|
||||||
end
|
|
@ -6,7 +6,7 @@ defmodule Pleroma.Web.MastodonAPI.MastodonAPIController do
|
|||||||
use Pleroma.Web, :controller
|
use Pleroma.Web, :controller
|
||||||
|
|
||||||
import Pleroma.Web.ControllerHelper,
|
import Pleroma.Web.ControllerHelper,
|
||||||
only: [json_response: 3, add_link_headers: 5, add_link_headers: 4, add_link_headers: 3]
|
only: [json_response: 3, add_link_headers: 2, add_link_headers: 3]
|
||||||
|
|
||||||
alias Ecto.Changeset
|
alias Ecto.Changeset
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
@ -365,7 +365,7 @@ def home_timeline(%{assigns: %{user: user}} = conn, params) do
|
|||||||
|> Enum.reverse()
|
|> Enum.reverse()
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> add_link_headers(:home_timeline, activities)
|
|> add_link_headers(activities)
|
||||||
|> put_view(StatusView)
|
|> put_view(StatusView)
|
||||||
|> render("index.json", %{activities: activities, for: user, as: :activity})
|
|> render("index.json", %{activities: activities, for: user, as: :activity})
|
||||||
end
|
end
|
||||||
@ -384,7 +384,7 @@ def public_timeline(%{assigns: %{user: user}} = conn, params) do
|
|||||||
|> Enum.reverse()
|
|> Enum.reverse()
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> add_link_headers(:public_timeline, activities, false, %{"local" => local_only})
|
|> add_link_headers(activities, %{"local" => local_only})
|
||||||
|> put_view(StatusView)
|
|> put_view(StatusView)
|
||||||
|> render("index.json", %{activities: activities, for: user, as: :activity})
|
|> render("index.json", %{activities: activities, for: user, as: :activity})
|
||||||
end
|
end
|
||||||
@ -398,7 +398,7 @@ def user_statuses(%{assigns: %{user: reading_user}} = conn, params) do
|
|||||||
activities = ActivityPub.fetch_user_activities(user, reading_user, params)
|
activities = ActivityPub.fetch_user_activities(user, reading_user, params)
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> add_link_headers(:user_statuses, activities, params["id"])
|
|> add_link_headers(activities)
|
||||||
|> put_view(StatusView)
|
|> put_view(StatusView)
|
||||||
|> render("index.json", %{
|
|> render("index.json", %{
|
||||||
activities: activities,
|
activities: activities,
|
||||||
@ -422,7 +422,7 @@ def dm_timeline(%{assigns: %{user: user}} = conn, params) do
|
|||||||
|> Pagination.fetch_paginated(params)
|
|> Pagination.fetch_paginated(params)
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> add_link_headers(:dm_timeline, activities)
|
|> add_link_headers(activities)
|
||||||
|> put_view(StatusView)
|
|> put_view(StatusView)
|
||||||
|> render("index.json", %{activities: activities, for: user, as: :activity})
|
|> render("index.json", %{activities: activities, for: user, as: :activity})
|
||||||
end
|
end
|
||||||
@ -537,7 +537,7 @@ def poll_vote(%{assigns: %{user: user}} = conn, %{"id" => id, "choices" => choic
|
|||||||
def scheduled_statuses(%{assigns: %{user: user}} = conn, params) do
|
def scheduled_statuses(%{assigns: %{user: user}} = conn, params) do
|
||||||
with scheduled_activities <- MastodonAPI.get_scheduled_activities(user, params) do
|
with scheduled_activities <- MastodonAPI.get_scheduled_activities(user, params) do
|
||||||
conn
|
conn
|
||||||
|> add_link_headers(:scheduled_statuses, scheduled_activities)
|
|> add_link_headers(scheduled_activities)
|
||||||
|> put_view(ScheduledActivityView)
|
|> put_view(ScheduledActivityView)
|
||||||
|> render("index.json", %{scheduled_activities: scheduled_activities})
|
|> render("index.json", %{scheduled_activities: scheduled_activities})
|
||||||
end
|
end
|
||||||
@ -720,7 +720,7 @@ def notifications(%{assigns: %{user: user}} = conn, params) do
|
|||||||
notifications = MastodonAPI.get_notifications(user, params)
|
notifications = MastodonAPI.get_notifications(user, params)
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> add_link_headers(:notifications, notifications)
|
|> add_link_headers(notifications)
|
||||||
|> put_view(NotificationView)
|
|> put_view(NotificationView)
|
||||||
|> render("index.json", %{notifications: notifications, for: user})
|
|> render("index.json", %{notifications: notifications, for: user})
|
||||||
end
|
end
|
||||||
@ -842,6 +842,7 @@ def get_mascot(%{assigns: %{user: user}} = conn, _params) do
|
|||||||
|
|
||||||
def favourited_by(%{assigns: %{user: user}} = conn, %{"id" => id}) do
|
def favourited_by(%{assigns: %{user: user}} = conn, %{"id" => id}) do
|
||||||
with %Activity{} = activity <- Activity.get_by_id_with_object(id),
|
with %Activity{} = activity <- Activity.get_by_id_with_object(id),
|
||||||
|
{:visible, true} <- {:visible, Visibility.visible_for_user?(activity, user)},
|
||||||
%Object{data: %{"likes" => likes}} <- Object.normalize(activity) do
|
%Object{data: %{"likes" => likes}} <- Object.normalize(activity) do
|
||||||
q = from(u in User, where: u.ap_id in ^likes)
|
q = from(u in User, where: u.ap_id in ^likes)
|
||||||
|
|
||||||
@ -853,12 +854,14 @@ def favourited_by(%{assigns: %{user: user}} = conn, %{"id" => id}) do
|
|||||||
|> put_view(AccountView)
|
|> put_view(AccountView)
|
||||||
|> render("accounts.json", %{for: user, users: users, as: :user})
|
|> render("accounts.json", %{for: user, users: users, as: :user})
|
||||||
else
|
else
|
||||||
|
{:visible, false} -> {:error, :not_found}
|
||||||
_ -> json(conn, [])
|
_ -> json(conn, [])
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def reblogged_by(%{assigns: %{user: user}} = conn, %{"id" => id}) do
|
def reblogged_by(%{assigns: %{user: user}} = conn, %{"id" => id}) do
|
||||||
with %Activity{} = activity <- Activity.get_by_id_with_object(id),
|
with %Activity{} = activity <- Activity.get_by_id_with_object(id),
|
||||||
|
{:visible, true} <- {:visible, Visibility.visible_for_user?(activity, user)},
|
||||||
%Object{data: %{"announcements" => announces}} <- Object.normalize(activity) do
|
%Object{data: %{"announcements" => announces}} <- Object.normalize(activity) do
|
||||||
q = from(u in User, where: u.ap_id in ^announces)
|
q = from(u in User, where: u.ap_id in ^announces)
|
||||||
|
|
||||||
@ -870,6 +873,7 @@ def reblogged_by(%{assigns: %{user: user}} = conn, %{"id" => id}) do
|
|||||||
|> put_view(AccountView)
|
|> put_view(AccountView)
|
||||||
|> render("accounts.json", %{for: user, users: users, as: :user})
|
|> render("accounts.json", %{for: user, users: users, as: :user})
|
||||||
else
|
else
|
||||||
|
{:visible, false} -> {:error, :not_found}
|
||||||
_ -> json(conn, [])
|
_ -> json(conn, [])
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@ -908,7 +912,7 @@ def hashtag_timeline(%{assigns: %{user: user}} = conn, params) do
|
|||||||
|> Enum.reverse()
|
|> Enum.reverse()
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> add_link_headers(:hashtag_timeline, activities, params["tag"], %{"local" => local_only})
|
|> add_link_headers(activities, %{"local" => local_only})
|
||||||
|> put_view(StatusView)
|
|> put_view(StatusView)
|
||||||
|> render("index.json", %{activities: activities, for: user, as: :activity})
|
|> render("index.json", %{activities: activities, for: user, as: :activity})
|
||||||
end
|
end
|
||||||
@ -924,7 +928,7 @@ def followers(%{assigns: %{user: for_user}} = conn, %{"id" => id} = params) do
|
|||||||
end
|
end
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> add_link_headers(:followers, followers, user)
|
|> add_link_headers(followers)
|
||||||
|> put_view(AccountView)
|
|> put_view(AccountView)
|
||||||
|> render("accounts.json", %{for: for_user, users: followers, as: :user})
|
|> render("accounts.json", %{for: for_user, users: followers, as: :user})
|
||||||
end
|
end
|
||||||
@ -941,7 +945,7 @@ def following(%{assigns: %{user: for_user}} = conn, %{"id" => id} = params) do
|
|||||||
end
|
end
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> add_link_headers(:following, followers, user)
|
|> add_link_headers(followers)
|
||||||
|> put_view(AccountView)
|
|> put_view(AccountView)
|
||||||
|> render("accounts.json", %{for: for_user, users: followers, as: :user})
|
|> render("accounts.json", %{for: for_user, users: followers, as: :user})
|
||||||
end
|
end
|
||||||
@ -1166,7 +1170,7 @@ def favourites(%{assigns: %{user: user}} = conn, params) do
|
|||||||
|> Enum.reverse()
|
|> Enum.reverse()
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> add_link_headers(:favourites, activities)
|
|> add_link_headers(activities)
|
||||||
|> put_view(StatusView)
|
|> put_view(StatusView)
|
||||||
|> render("index.json", %{activities: activities, for: user, as: :activity})
|
|> render("index.json", %{activities: activities, for: user, as: :activity})
|
||||||
end
|
end
|
||||||
@ -1193,7 +1197,7 @@ def user_favourites(%{assigns: %{user: for_user}} = conn, %{"id" => id} = params
|
|||||||
|> Enum.reverse()
|
|> Enum.reverse()
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> add_link_headers(:favourites, activities)
|
|> add_link_headers(activities)
|
||||||
|> put_view(StatusView)
|
|> put_view(StatusView)
|
||||||
|> render("index.json", %{activities: activities, for: for_user, as: :activity})
|
|> render("index.json", %{activities: activities, for: for_user, as: :activity})
|
||||||
else
|
else
|
||||||
@ -1214,7 +1218,7 @@ def bookmarks(%{assigns: %{user: user}} = conn, params) do
|
|||||||
|> Enum.map(fn b -> Map.put(b.activity, :bookmark, Map.delete(b, :activity)) end)
|
|> Enum.map(fn b -> Map.put(b.activity, :bookmark, Map.delete(b, :activity)) end)
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> add_link_headers(:bookmarks, bookmarks)
|
|> add_link_headers(bookmarks)
|
||||||
|> put_view(StatusView)
|
|> put_view(StatusView)
|
||||||
|> render("index.json", %{activities: activities, for: user, as: :activity})
|
|> render("index.json", %{activities: activities, for: user, as: :activity})
|
||||||
end
|
end
|
||||||
@ -1654,7 +1658,7 @@ def conversations(%{assigns: %{user: user}} = conn, params) do
|
|||||||
end)
|
end)
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> add_link_headers(:conversations, participations)
|
|> add_link_headers(participations)
|
||||||
|> json(conversations)
|
|> json(conversations)
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -17,6 +17,7 @@ defmodule Pleroma.Web.OAuth.Token.CleanWorker do
|
|||||||
)
|
)
|
||||||
|
|
||||||
alias Pleroma.Web.OAuth.Token
|
alias Pleroma.Web.OAuth.Token
|
||||||
|
alias Pleroma.Workers.BackgroundWorker
|
||||||
|
|
||||||
def start_link(_), do: GenServer.start_link(__MODULE__, %{})
|
def start_link(_), do: GenServer.start_link(__MODULE__, %{})
|
||||||
|
|
||||||
@ -27,9 +28,11 @@ def init(_) do
|
|||||||
|
|
||||||
@doc false
|
@doc false
|
||||||
def handle_info(:perform, state) do
|
def handle_info(:perform, state) do
|
||||||
Token.delete_expired_tokens()
|
BackgroundWorker.enqueue("clean_expired_tokens", %{})
|
||||||
|
|
||||||
Process.send_after(self(), :perform, @interval)
|
Process.send_after(self(), :perform, @interval)
|
||||||
{:noreply, state}
|
{:noreply, state}
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def perform(:clean), do: Token.delete_expired_tokens()
|
||||||
end
|
end
|
||||||
|
@ -5,7 +5,7 @@
|
|||||||
defmodule Pleroma.Web.PleromaAPI.PleromaAPIController do
|
defmodule Pleroma.Web.PleromaAPI.PleromaAPIController do
|
||||||
use Pleroma.Web, :controller
|
use Pleroma.Web, :controller
|
||||||
|
|
||||||
import Pleroma.Web.ControllerHelper, only: [add_link_headers: 7]
|
import Pleroma.Web.ControllerHelper, only: [add_link_headers: 2]
|
||||||
|
|
||||||
alias Pleroma.Conversation.Participation
|
alias Pleroma.Conversation.Participation
|
||||||
alias Pleroma.Notification
|
alias Pleroma.Notification
|
||||||
@ -27,31 +27,22 @@ def conversation_statuses(
|
|||||||
%{assigns: %{user: user}} = conn,
|
%{assigns: %{user: user}} = conn,
|
||||||
%{"id" => participation_id} = params
|
%{"id" => participation_id} = params
|
||||||
) do
|
) do
|
||||||
params =
|
participation = Participation.get(participation_id, preload: [:conversation])
|
||||||
params
|
|
||||||
|> Map.put("blocking_user", user)
|
|
||||||
|> Map.put("muting_user", user)
|
|
||||||
|> Map.put("user", user)
|
|
||||||
|
|
||||||
participation =
|
|
||||||
participation_id
|
|
||||||
|> Participation.get(preload: [:conversation])
|
|
||||||
|
|
||||||
if user.id == participation.user_id do
|
if user.id == participation.user_id do
|
||||||
|
params =
|
||||||
|
params
|
||||||
|
|> Map.put("blocking_user", user)
|
||||||
|
|> Map.put("muting_user", user)
|
||||||
|
|> Map.put("user", user)
|
||||||
|
|
||||||
activities =
|
activities =
|
||||||
participation.conversation.ap_id
|
participation.conversation.ap_id
|
||||||
|> ActivityPub.fetch_activities_for_context(params)
|
|> ActivityPub.fetch_activities_for_context(params)
|
||||||
|> Enum.reverse()
|
|> Enum.reverse()
|
||||||
|
|
||||||
conn
|
conn
|
||||||
|> add_link_headers(
|
|> add_link_headers(activities)
|
||||||
:conversation_statuses,
|
|
||||||
activities,
|
|
||||||
participation_id,
|
|
||||||
params,
|
|
||||||
nil,
|
|
||||||
&pleroma_api_url/4
|
|
||||||
)
|
|
||||||
|> put_view(StatusView)
|
|> put_view(StatusView)
|
||||||
|> render("index.json", %{activities: activities, for: user, as: :activity})
|
|> render("index.json", %{activities: activities, for: user, as: :activity})
|
||||||
end
|
end
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.Web.Push do
|
defmodule Pleroma.Web.Push do
|
||||||
alias Pleroma.Web.Push.Impl
|
alias Pleroma.Workers.WebPusherWorker
|
||||||
|
|
||||||
require Logger
|
require Logger
|
||||||
|
|
||||||
@ -31,6 +31,7 @@ def enabled do
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
def send(notification),
|
def send(notification) do
|
||||||
do: PleromaJobQueue.enqueue(:web_push, Impl, [notification])
|
WebPusherWorker.enqueue("web_push", %{"notification_id" => notification.id})
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
@ -224,6 +224,7 @@ defmodule Pleroma.Web.Router do
|
|||||||
scope [] do
|
scope [] do
|
||||||
pipe_through(:oauth_write)
|
pipe_through(:oauth_write)
|
||||||
|
|
||||||
|
post("/change_email", UtilController, :change_email)
|
||||||
post("/change_password", UtilController, :change_password)
|
post("/change_password", UtilController, :change_password)
|
||||||
post("/delete_account", UtilController, :delete_account)
|
post("/delete_account", UtilController, :delete_account)
|
||||||
put("/notification_settings", UtilController, :update_notificaton_settings)
|
put("/notification_settings", UtilController, :update_notificaton_settings)
|
||||||
|
@ -170,6 +170,15 @@ def publish_one(%{recipient: url, feed: feed} = params) when is_binary(url) do
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def publish_one(%{recipient_id: recipient_id} = params) do
|
||||||
|
recipient = User.get_cached_by_id(recipient_id)
|
||||||
|
|
||||||
|
params
|
||||||
|
|> Map.delete(:recipient_id)
|
||||||
|
|> Map.put(:recipient, recipient)
|
||||||
|
|> publish_one()
|
||||||
|
end
|
||||||
|
|
||||||
def publish_one(_), do: :noop
|
def publish_one(_), do: :noop
|
||||||
|
|
||||||
@supported_activities [
|
@supported_activities [
|
||||||
@ -218,7 +227,7 @@ def publish(%{info: %{keys: keys}} = user, %{data: %{"type" => type}} = activity
|
|||||||
Logger.debug(fn -> "Sending Salmon to #{remote_user.ap_id}" end)
|
Logger.debug(fn -> "Sending Salmon to #{remote_user.ap_id}" end)
|
||||||
|
|
||||||
Publisher.enqueue_one(__MODULE__, %{
|
Publisher.enqueue_one(__MODULE__, %{
|
||||||
recipient: remote_user,
|
recipient_id: remote_user.id,
|
||||||
feed: feed,
|
feed: feed,
|
||||||
unreachable_since: reachable_urls_metadata[remote_user.info.salmon]
|
unreachable_since: reachable_urls_metadata[remote_user.info.salmon]
|
||||||
})
|
})
|
||||||
|
@ -265,12 +265,7 @@ def follow_import(%{assigns: %{user: follower}} = conn, %{"list" => list}) do
|
|||||||
String.split(line, ",") |> List.first()
|
String.split(line, ",") |> List.first()
|
||||||
end)
|
end)
|
||||||
|> List.delete("Account address") do
|
|> List.delete("Account address") do
|
||||||
PleromaJobQueue.enqueue(:background, User, [
|
User.follow_import(follower, followed_identifiers)
|
||||||
:follow_import,
|
|
||||||
follower,
|
|
||||||
followed_identifiers
|
|
||||||
])
|
|
||||||
|
|
||||||
json(conn, "job started")
|
json(conn, "job started")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@ -281,12 +276,7 @@ def blocks_import(conn, %{"list" => %Plug.Upload{} = listfile}) do
|
|||||||
|
|
||||||
def blocks_import(%{assigns: %{user: blocker}} = conn, %{"list" => list}) do
|
def blocks_import(%{assigns: %{user: blocker}} = conn, %{"list" => list}) do
|
||||||
with blocked_identifiers <- String.split(list) do
|
with blocked_identifiers <- String.split(list) do
|
||||||
PleromaJobQueue.enqueue(:background, User, [
|
User.blocks_import(blocker, blocked_identifiers)
|
||||||
:blocks_import,
|
|
||||||
blocker,
|
|
||||||
blocked_identifiers
|
|
||||||
])
|
|
||||||
|
|
||||||
json(conn, "job started")
|
json(conn, "job started")
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@ -314,6 +304,25 @@ def change_password(%{assigns: %{user: user}} = conn, params) do
|
|||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def change_email(%{assigns: %{user: user}} = conn, params) do
|
||||||
|
case CommonAPI.Utils.confirm_current_password(user, params["password"]) do
|
||||||
|
{:ok, user} ->
|
||||||
|
with {:ok, _user} <- User.change_email(user, params["email"]) do
|
||||||
|
json(conn, %{status: "success"})
|
||||||
|
else
|
||||||
|
{:error, changeset} ->
|
||||||
|
{_, {error, _}} = Enum.at(changeset.errors, 0)
|
||||||
|
json(conn, %{error: "Email #{error}."})
|
||||||
|
|
||||||
|
_ ->
|
||||||
|
json(conn, %{error: "Unable to change email."})
|
||||||
|
end
|
||||||
|
|
||||||
|
{:error, msg} ->
|
||||||
|
json(conn, %{error: msg})
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
def delete_account(%{assigns: %{user: user}} = conn, params) do
|
def delete_account(%{assigns: %{user: user}} = conn, params) do
|
||||||
case CommonAPI.Utils.confirm_current_password(user, params["password"]) do
|
case CommonAPI.Utils.confirm_current_password(user, params["password"]) do
|
||||||
{:ok, user} ->
|
{:ok, user} ->
|
||||||
|
18
lib/pleroma/workers/activity_expiration_worker.ex
Normal file
18
lib/pleroma/workers/activity_expiration_worker.ex
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Workers.ActivityExpirationWorker do
|
||||||
|
use Pleroma.Workers.WorkerHelper, queue: "activity_expiration"
|
||||||
|
|
||||||
|
@impl Oban.Worker
|
||||||
|
def perform(
|
||||||
|
%{
|
||||||
|
"op" => "activity_expiration",
|
||||||
|
"activity_expiration_id" => activity_expiration_id
|
||||||
|
},
|
||||||
|
_job
|
||||||
|
) do
|
||||||
|
Pleroma.Daemons.ActivityExpirationDaemon.perform(:execute, activity_expiration_id)
|
||||||
|
end
|
||||||
|
end
|
69
lib/pleroma/workers/background_worker.ex
Normal file
69
lib/pleroma/workers/background_worker.ex
Normal file
@ -0,0 +1,69 @@
|
|||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Workers.BackgroundWorker do
|
||||||
|
alias Pleroma.Activity
|
||||||
|
alias Pleroma.User
|
||||||
|
alias Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy
|
||||||
|
alias Pleroma.Web.OAuth.Token.CleanWorker
|
||||||
|
|
||||||
|
use Pleroma.Workers.WorkerHelper, queue: "background"
|
||||||
|
|
||||||
|
@impl Oban.Worker
|
||||||
|
def perform(%{"op" => "fetch_initial_posts", "user_id" => user_id}, _job) do
|
||||||
|
user = User.get_cached_by_id(user_id)
|
||||||
|
User.perform(:fetch_initial_posts, user)
|
||||||
|
end
|
||||||
|
|
||||||
|
def perform(%{"op" => "deactivate_user", "user_id" => user_id, "status" => status}, _job) do
|
||||||
|
user = User.get_cached_by_id(user_id)
|
||||||
|
User.perform(:deactivate_async, user, status)
|
||||||
|
end
|
||||||
|
|
||||||
|
def perform(%{"op" => "delete_user", "user_id" => user_id}, _job) do
|
||||||
|
user = User.get_cached_by_id(user_id)
|
||||||
|
User.perform(:delete, user)
|
||||||
|
end
|
||||||
|
|
||||||
|
def perform(
|
||||||
|
%{
|
||||||
|
"op" => "blocks_import",
|
||||||
|
"blocker_id" => blocker_id,
|
||||||
|
"blocked_identifiers" => blocked_identifiers
|
||||||
|
},
|
||||||
|
_job
|
||||||
|
) do
|
||||||
|
blocker = User.get_cached_by_id(blocker_id)
|
||||||
|
User.perform(:blocks_import, blocker, blocked_identifiers)
|
||||||
|
end
|
||||||
|
|
||||||
|
def perform(
|
||||||
|
%{
|
||||||
|
"op" => "follow_import",
|
||||||
|
"follower_id" => follower_id,
|
||||||
|
"followed_identifiers" => followed_identifiers
|
||||||
|
},
|
||||||
|
_job
|
||||||
|
) do
|
||||||
|
follower = User.get_cached_by_id(follower_id)
|
||||||
|
User.perform(:follow_import, follower, followed_identifiers)
|
||||||
|
end
|
||||||
|
|
||||||
|
def perform(%{"op" => "clean_expired_tokens"}, _job) do
|
||||||
|
CleanWorker.perform(:clean)
|
||||||
|
end
|
||||||
|
|
||||||
|
def perform(%{"op" => "media_proxy_preload", "message" => message}, _job) do
|
||||||
|
MediaProxyWarmingPolicy.perform(:preload, message)
|
||||||
|
end
|
||||||
|
|
||||||
|
def perform(%{"op" => "media_proxy_prefetch", "url" => url}, _job) do
|
||||||
|
MediaProxyWarmingPolicy.perform(:prefetch, url)
|
||||||
|
end
|
||||||
|
|
||||||
|
def perform(%{"op" => "fetch_data_for_activity", "activity_id" => activity_id}, _job) do
|
||||||
|
activity = Activity.get_by_id(activity_id)
|
||||||
|
Pleroma.Web.RichMedia.Helpers.perform(:fetch, activity)
|
||||||
|
end
|
||||||
|
end
|
16
lib/pleroma/workers/digest_emails_worker.ex
Normal file
16
lib/pleroma/workers/digest_emails_worker.ex
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Workers.DigestEmailsWorker do
|
||||||
|
alias Pleroma.User
|
||||||
|
|
||||||
|
use Pleroma.Workers.WorkerHelper, queue: "digest_emails"
|
||||||
|
|
||||||
|
@impl Oban.Worker
|
||||||
|
def perform(%{"op" => "digest_email", "user_id" => user_id}, _job) do
|
||||||
|
user_id
|
||||||
|
|> User.get_cached_by_id()
|
||||||
|
|> Pleroma.Daemons.DigestEmailDaemon.perform()
|
||||||
|
end
|
||||||
|
end
|
15
lib/pleroma/workers/mailer_worker.ex
Normal file
15
lib/pleroma/workers/mailer_worker.ex
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Workers.MailerWorker do
|
||||||
|
use Pleroma.Workers.WorkerHelper, queue: "mailer"
|
||||||
|
|
||||||
|
@impl Oban.Worker
|
||||||
|
def perform(%{"op" => "email", "encoded_email" => encoded_email, "config" => config}, _job) do
|
||||||
|
encoded_email
|
||||||
|
|> Base.decode64!()
|
||||||
|
|> :erlang.binary_to_term()
|
||||||
|
|> Pleroma.Emails.Mailer.deliver(config)
|
||||||
|
end
|
||||||
|
end
|
25
lib/pleroma/workers/publisher_worker.ex
Normal file
25
lib/pleroma/workers/publisher_worker.ex
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Workers.PublisherWorker do
|
||||||
|
alias Pleroma.Activity
|
||||||
|
alias Pleroma.Web.Federator
|
||||||
|
|
||||||
|
use Pleroma.Workers.WorkerHelper, queue: "federator_outgoing"
|
||||||
|
|
||||||
|
def backoff(attempt) when is_integer(attempt) do
|
||||||
|
Pleroma.Workers.WorkerHelper.sidekiq_backoff(attempt, 5)
|
||||||
|
end
|
||||||
|
|
||||||
|
@impl Oban.Worker
|
||||||
|
def perform(%{"op" => "publish", "activity_id" => activity_id}, _job) do
|
||||||
|
activity = Activity.get_by_id(activity_id)
|
||||||
|
Federator.perform(:publish, activity)
|
||||||
|
end
|
||||||
|
|
||||||
|
def perform(%{"op" => "publish_one", "module" => module_name, "params" => params}, _job) do
|
||||||
|
params = Map.new(params, fn {k, v} -> {String.to_atom(k), v} end)
|
||||||
|
Federator.perform(:publish_one, String.to_atom(module_name), params)
|
||||||
|
end
|
||||||
|
end
|
18
lib/pleroma/workers/receiver_worker.ex
Normal file
18
lib/pleroma/workers/receiver_worker.ex
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Workers.ReceiverWorker do
|
||||||
|
alias Pleroma.Web.Federator
|
||||||
|
|
||||||
|
use Pleroma.Workers.WorkerHelper, queue: "federator_incoming"
|
||||||
|
|
||||||
|
@impl Oban.Worker
|
||||||
|
def perform(%{"op" => "incoming_doc", "body" => doc}, _job) do
|
||||||
|
Federator.perform(:incoming_doc, doc)
|
||||||
|
end
|
||||||
|
|
||||||
|
def perform(%{"op" => "incoming_ap_doc", "params" => params}, _job) do
|
||||||
|
Federator.perform(:incoming_ap_doc, params)
|
||||||
|
end
|
||||||
|
end
|
12
lib/pleroma/workers/scheduled_activity_worker.ex
Normal file
12
lib/pleroma/workers/scheduled_activity_worker.ex
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Workers.ScheduledActivityWorker do
|
||||||
|
use Pleroma.Workers.WorkerHelper, queue: "scheduled_activities"
|
||||||
|
|
||||||
|
@impl Oban.Worker
|
||||||
|
def perform(%{"op" => "execute", "activity_id" => activity_id}, _job) do
|
||||||
|
Pleroma.Daemons.ScheduledActivityDaemon.perform(:execute, activity_id)
|
||||||
|
end
|
||||||
|
end
|
26
lib/pleroma/workers/subscriber_worker.ex
Normal file
26
lib/pleroma/workers/subscriber_worker.ex
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Workers.SubscriberWorker do
|
||||||
|
alias Pleroma.Repo
|
||||||
|
alias Pleroma.Web.Federator
|
||||||
|
alias Pleroma.Web.Websub
|
||||||
|
|
||||||
|
use Pleroma.Workers.WorkerHelper, queue: "federator_outgoing"
|
||||||
|
|
||||||
|
@impl Oban.Worker
|
||||||
|
def perform(%{"op" => "refresh_subscriptions"}, _job) do
|
||||||
|
Federator.perform(:refresh_subscriptions)
|
||||||
|
end
|
||||||
|
|
||||||
|
def perform(%{"op" => "request_subscription", "websub_id" => websub_id}, _job) do
|
||||||
|
websub = Repo.get(Websub.WebsubClientSubscription, websub_id)
|
||||||
|
Federator.perform(:request_subscription, websub)
|
||||||
|
end
|
||||||
|
|
||||||
|
def perform(%{"op" => "verify_websub", "websub_id" => websub_id}, _job) do
|
||||||
|
websub = Repo.get(Websub.WebsubServerSubscription, websub_id)
|
||||||
|
Federator.perform(:verify_websub, websub)
|
||||||
|
end
|
||||||
|
end
|
15
lib/pleroma/workers/transmogrifier_worker.ex
Normal file
15
lib/pleroma/workers/transmogrifier_worker.ex
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Workers.TransmogrifierWorker do
|
||||||
|
alias Pleroma.User
|
||||||
|
|
||||||
|
use Pleroma.Workers.WorkerHelper, queue: "transmogrifier"
|
||||||
|
|
||||||
|
@impl Oban.Worker
|
||||||
|
def perform(%{"op" => "user_upgrade", "user_id" => user_id}, _job) do
|
||||||
|
user = User.get_cached_by_id(user_id)
|
||||||
|
Pleroma.Web.ActivityPub.Transmogrifier.perform(:user_upgrade, user)
|
||||||
|
end
|
||||||
|
end
|
16
lib/pleroma/workers/web_pusher_worker.ex
Normal file
16
lib/pleroma/workers/web_pusher_worker.ex
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Workers.WebPusherWorker do
|
||||||
|
alias Pleroma.Notification
|
||||||
|
alias Pleroma.Repo
|
||||||
|
|
||||||
|
use Pleroma.Workers.WorkerHelper, queue: "web_push"
|
||||||
|
|
||||||
|
@impl Oban.Worker
|
||||||
|
def perform(%{"op" => "web_push", "notification_id" => notification_id}, _job) do
|
||||||
|
notification = Repo.get(Notification, notification_id)
|
||||||
|
Pleroma.Web.Push.Impl.perform(notification)
|
||||||
|
end
|
||||||
|
end
|
46
lib/pleroma/workers/worker_helper.ex
Normal file
46
lib/pleroma/workers/worker_helper.ex
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Workers.WorkerHelper do
|
||||||
|
alias Pleroma.Config
|
||||||
|
alias Pleroma.Workers.WorkerHelper
|
||||||
|
|
||||||
|
def worker_args(queue) do
|
||||||
|
case Config.get([:workers, :retries, queue]) do
|
||||||
|
nil -> []
|
||||||
|
max_attempts -> [max_attempts: max_attempts]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
def sidekiq_backoff(attempt, pow \\ 4, base_backoff \\ 15) do
|
||||||
|
backoff =
|
||||||
|
:math.pow(attempt, pow) +
|
||||||
|
base_backoff +
|
||||||
|
:rand.uniform(2 * base_backoff) * attempt
|
||||||
|
|
||||||
|
trunc(backoff)
|
||||||
|
end
|
||||||
|
|
||||||
|
defmacro __using__(opts) do
|
||||||
|
caller_module = __CALLER__.module
|
||||||
|
queue = Keyword.fetch!(opts, :queue)
|
||||||
|
|
||||||
|
quote do
|
||||||
|
# Note: `max_attempts` is intended to be overridden in `new/2` call
|
||||||
|
use Oban.Worker,
|
||||||
|
queue: unquote(queue),
|
||||||
|
max_attempts: 1
|
||||||
|
|
||||||
|
def enqueue(op, params, worker_args \\ []) do
|
||||||
|
params = Map.merge(%{"op" => op}, params)
|
||||||
|
queue_atom = String.to_atom(unquote(queue))
|
||||||
|
worker_args = worker_args ++ WorkerHelper.worker_args(queue_atom)
|
||||||
|
|
||||||
|
unquote(caller_module)
|
||||||
|
|> apply(:new, [params, worker_args])
|
||||||
|
|> Pleroma.Repo.insert()
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
5
mix.exs
5
mix.exs
@ -101,6 +101,8 @@ defp deps do
|
|||||||
{:phoenix_ecto, "~> 4.0"},
|
{:phoenix_ecto, "~> 4.0"},
|
||||||
{:ecto_sql, "~> 3.1"},
|
{:ecto_sql, "~> 3.1"},
|
||||||
{:postgrex, ">= 0.13.5"},
|
{:postgrex, ">= 0.13.5"},
|
||||||
|
{:oban, "~> 0.7"},
|
||||||
|
{:quantum, "~> 2.3"},
|
||||||
{:gettext, "~> 0.15"},
|
{:gettext, "~> 0.15"},
|
||||||
{:comeonin, "~> 4.1.1"},
|
{:comeonin, "~> 4.1.1"},
|
||||||
{:pbkdf2_elixir, "~> 0.12.3"},
|
{:pbkdf2_elixir, "~> 0.12.3"},
|
||||||
@ -125,7 +127,7 @@ defp deps do
|
|||||||
{:crypt,
|
{:crypt,
|
||||||
git: "https://github.com/msantos/crypt", ref: "1f2b58927ab57e72910191a7ebaeff984382a1d3"},
|
git: "https://github.com/msantos/crypt", ref: "1f2b58927ab57e72910191a7ebaeff984382a1d3"},
|
||||||
{:cors_plug, "~> 1.5"},
|
{:cors_plug, "~> 1.5"},
|
||||||
{:ex_doc, "~> 0.20.2", only: :dev, runtime: false},
|
{:ex_doc, "~> 0.21", only: :dev, runtime: false},
|
||||||
{:web_push_encryption, "~> 0.2.1"},
|
{:web_push_encryption, "~> 0.2.1"},
|
||||||
{:swoosh, "~> 0.23.2"},
|
{:swoosh, "~> 0.23.2"},
|
||||||
{:phoenix_swoosh, "~> 0.2"},
|
{:phoenix_swoosh, "~> 0.2"},
|
||||||
@ -141,7 +143,6 @@ defp deps do
|
|||||||
{:http_signatures,
|
{:http_signatures,
|
||||||
git: "https://git.pleroma.social/pleroma/http_signatures.git",
|
git: "https://git.pleroma.social/pleroma/http_signatures.git",
|
||||||
ref: "293d77bb6f4a67ac8bde1428735c3b42f22cbb30"},
|
ref: "293d77bb6f4a67ac8bde1428735c3b42f22cbb30"},
|
||||||
{:pleroma_job_queue, "~> 0.3"},
|
|
||||||
{:telemetry, "~> 0.3"},
|
{:telemetry, "~> 0.3"},
|
||||||
{:prometheus_ex, "~> 3.0"},
|
{:prometheus_ex, "~> 3.0"},
|
||||||
{:prometheus_plugs, "~> 1.1"},
|
{:prometheus_plugs, "~> 1.1"},
|
||||||
|
19
mix.lock
19
mix.lock
@ -17,10 +17,10 @@
|
|||||||
"credo": {:hex, :credo, "0.9.3", "76fa3e9e497ab282e0cf64b98a624aa11da702854c52c82db1bf24e54ab7c97a", [:mix], [{:bunt, "~> 0.2.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:poison, ">= 0.0.0", [hex: :poison, repo: "hexpm", optional: false]}], "hexpm"},
|
"credo": {:hex, :credo, "0.9.3", "76fa3e9e497ab282e0cf64b98a624aa11da702854c52c82db1bf24e54ab7c97a", [:mix], [{:bunt, "~> 0.2.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:poison, ">= 0.0.0", [hex: :poison, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"crontab": {:hex, :crontab, "1.1.7", "b9219f0bdc8678b94143655a8f229716c5810c0636a4489f98c0956137e53985", [:mix], [{:ecto, "~> 1.0 or ~> 2.0 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: true]}], "hexpm"},
|
"crontab": {:hex, :crontab, "1.1.7", "b9219f0bdc8678b94143655a8f229716c5810c0636a4489f98c0956137e53985", [:mix], [{:ecto, "~> 1.0 or ~> 2.0 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: true]}], "hexpm"},
|
||||||
"crypt": {:git, "https://github.com/msantos/crypt", "1f2b58927ab57e72910191a7ebaeff984382a1d3", [ref: "1f2b58927ab57e72910191a7ebaeff984382a1d3"]},
|
"crypt": {:git, "https://github.com/msantos/crypt", "1f2b58927ab57e72910191a7ebaeff984382a1d3", [ref: "1f2b58927ab57e72910191a7ebaeff984382a1d3"]},
|
||||||
"db_connection": {:hex, :db_connection, "2.0.6", "bde2f85d047969c5b5800cb8f4b3ed6316c8cb11487afedac4aa5f93fd39abfa", [:mix], [{:connection, "~> 1.0.2", [hex: :connection, repo: "hexpm", optional: false]}], "hexpm"},
|
"db_connection": {:hex, :db_connection, "2.1.1", "a51e8a2ee54ef2ae6ec41a668c85787ed40cb8944928c191280fe34c15b76ae5", [:mix], [{:connection, "~> 1.0.2", [hex: :connection, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"decimal": {:hex, :decimal, "1.8.0", "ca462e0d885f09a1c5a342dbd7c1dcf27ea63548c65a65e67334f4b61803822e", [:mix], [], "hexpm"},
|
"decimal": {:hex, :decimal, "1.8.0", "ca462e0d885f09a1c5a342dbd7c1dcf27ea63548c65a65e67334f4b61803822e", [:mix], [], "hexpm"},
|
||||||
"deep_merge": {:hex, :deep_merge, "1.0.0", "b4aa1a0d1acac393bdf38b2291af38cb1d4a52806cf7a4906f718e1feb5ee961", [:mix], [], "hexpm"},
|
"deep_merge": {:hex, :deep_merge, "1.0.0", "b4aa1a0d1acac393bdf38b2291af38cb1d4a52806cf7a4906f718e1feb5ee961", [:mix], [], "hexpm"},
|
||||||
"earmark": {:hex, :earmark, "1.3.2", "b840562ea3d67795ffbb5bd88940b1bed0ed9fa32834915125ea7d02e35888a5", [:mix], [], "hexpm"},
|
"earmark": {:hex, :earmark, "1.3.6", "ce1d0675e10a5bb46b007549362bd3f5f08908843957687d8484fe7f37466b19", [:mix], [], "hexpm"},
|
||||||
"ecto": {:hex, :ecto, "3.1.4", "69d852da7a9f04ede725855a35ede48d158ca11a404fe94f8b2fb3b2162cd3c9", [:mix], [{:decimal, "~> 1.6", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm"},
|
"ecto": {:hex, :ecto, "3.1.4", "69d852da7a9f04ede725855a35ede48d158ca11a404fe94f8b2fb3b2162cd3c9", [:mix], [{:decimal, "~> 1.6", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm"},
|
||||||
"ecto_sql": {:hex, :ecto_sql, "3.1.3", "2c536139190492d9de33c5fefac7323c5eaaa82e1b9bf93482a14649042f7cd9", [:mix], [{:db_connection, "~> 2.0", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.1.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:mariaex, "~> 0.9.1", [hex: :mariaex, repo: "hexpm", optional: true]}, {:myxql, "~> 0.2.0", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.14.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm"},
|
"ecto_sql": {:hex, :ecto_sql, "3.1.3", "2c536139190492d9de33c5fefac7323c5eaaa82e1b9bf93482a14649042f7cd9", [:mix], [{:db_connection, "~> 2.0", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.1.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:mariaex, "~> 0.9.1", [hex: :mariaex, repo: "hexpm", optional: true]}, {:myxql, "~> 0.2.0", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.14.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"esshd": {:hex, :esshd, "0.1.0", "6f93a2062adb43637edad0ea7357db2702a4b80dd9683482fe00f5134e97f4c1", [:mix], [], "hexpm"},
|
"esshd": {:hex, :esshd, "0.1.0", "6f93a2062adb43637edad0ea7357db2702a4b80dd9683482fe00f5134e97f4c1", [:mix], [], "hexpm"},
|
||||||
@ -29,13 +29,15 @@
|
|||||||
"ex_aws": {:hex, :ex_aws, "2.1.0", "b92651527d6c09c479f9013caa9c7331f19cba38a650590d82ebf2c6c16a1d8a", [:mix], [{:configparser_ex, "~> 2.0", [hex: :configparser_ex, repo: "hexpm", optional: true]}, {:hackney, "1.6.3 or 1.6.5 or 1.7.1 or 1.8.6 or ~> 1.9", [hex: :hackney, repo: "hexpm", optional: true]}, {:jsx, "~> 2.8", [hex: :jsx, repo: "hexpm", optional: true]}, {:poison, ">= 1.2.0", [hex: :poison, repo: "hexpm", optional: true]}, {:sweet_xml, "~> 0.6", [hex: :sweet_xml, repo: "hexpm", optional: true]}, {:xml_builder, "~> 0.1.0", [hex: :xml_builder, repo: "hexpm", optional: true]}], "hexpm"},
|
"ex_aws": {:hex, :ex_aws, "2.1.0", "b92651527d6c09c479f9013caa9c7331f19cba38a650590d82ebf2c6c16a1d8a", [:mix], [{:configparser_ex, "~> 2.0", [hex: :configparser_ex, repo: "hexpm", optional: true]}, {:hackney, "1.6.3 or 1.6.5 or 1.7.1 or 1.8.6 or ~> 1.9", [hex: :hackney, repo: "hexpm", optional: true]}, {:jsx, "~> 2.8", [hex: :jsx, repo: "hexpm", optional: true]}, {:poison, ">= 1.2.0", [hex: :poison, repo: "hexpm", optional: true]}, {:sweet_xml, "~> 0.6", [hex: :sweet_xml, repo: "hexpm", optional: true]}, {:xml_builder, "~> 0.1.0", [hex: :xml_builder, repo: "hexpm", optional: true]}], "hexpm"},
|
||||||
"ex_aws_s3": {:hex, :ex_aws_s3, "2.0.1", "9e09366e77f25d3d88c5393824e613344631be8db0d1839faca49686e99b6704", [:mix], [{:ex_aws, "~> 2.0", [hex: :ex_aws, repo: "hexpm", optional: false]}, {:sweet_xml, ">= 0.0.0", [hex: :sweet_xml, repo: "hexpm", optional: true]}], "hexpm"},
|
"ex_aws_s3": {:hex, :ex_aws_s3, "2.0.1", "9e09366e77f25d3d88c5393824e613344631be8db0d1839faca49686e99b6704", [:mix], [{:ex_aws, "~> 2.0", [hex: :ex_aws, repo: "hexpm", optional: false]}, {:sweet_xml, ">= 0.0.0", [hex: :sweet_xml, repo: "hexpm", optional: true]}], "hexpm"},
|
||||||
"ex_const": {:hex, :ex_const, "0.2.4", "d06e540c9d834865b012a17407761455efa71d0ce91e5831e86881b9c9d82448", [:mix], [], "hexpm"},
|
"ex_const": {:hex, :ex_const, "0.2.4", "d06e540c9d834865b012a17407761455efa71d0ce91e5831e86881b9c9d82448", [:mix], [], "hexpm"},
|
||||||
"ex_doc": {:hex, :ex_doc, "0.20.2", "1bd0dfb0304bade58beb77f20f21ee3558cc3c753743ae0ddbb0fd7ba2912331", [:mix], [{:earmark, "~> 1.3", [hex: :earmark, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.10", [hex: :makeup_elixir, repo: "hexpm", optional: false]}], "hexpm"},
|
"ex_doc": {:hex, :ex_doc, "0.21.2", "caca5bc28ed7b3bdc0b662f8afe2bee1eedb5c3cf7b322feeeb7c6ebbde089d6", [:mix], [{:earmark, "~> 1.3.3 or ~> 1.4", [hex: :earmark, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"ex_machina": {:hex, :ex_machina, "2.3.0", "92a5ad0a8b10ea6314b876a99c8c9e3f25f4dde71a2a835845b136b9adaf199a", [:mix], [{:ecto, "~> 2.2 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: true]}, {:ecto_sql, "~> 3.0", [hex: :ecto_sql, repo: "hexpm", optional: true]}], "hexpm"},
|
"ex_machina": {:hex, :ex_machina, "2.3.0", "92a5ad0a8b10ea6314b876a99c8c9e3f25f4dde71a2a835845b136b9adaf199a", [:mix], [{:ecto, "~> 2.2 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: true]}, {:ecto_sql, "~> 3.0", [hex: :ecto_sql, repo: "hexpm", optional: true]}], "hexpm"},
|
||||||
"ex_rated": {:hex, :ex_rated, "1.3.3", "30ecbdabe91f7eaa9d37fa4e81c85ba420f371babeb9d1910adbcd79ec798d27", [:mix], [{:ex2ms, "~> 1.5", [hex: :ex2ms, repo: "hexpm", optional: false]}], "hexpm"},
|
"ex_rated": {:hex, :ex_rated, "1.3.3", "30ecbdabe91f7eaa9d37fa4e81c85ba420f371babeb9d1910adbcd79ec798d27", [:mix], [{:ex2ms, "~> 1.5", [hex: :ex2ms, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"ex_syslogger": {:git, "https://github.com/slashmili/ex_syslogger.git", "f3963399047af17e038897c69e20d552e6899e1d", [tag: "1.4.0"]},
|
"ex_syslogger": {:git, "https://github.com/slashmili/ex_syslogger.git", "f3963399047af17e038897c69e20d552e6899e1d", [tag: "1.4.0"]},
|
||||||
"excoveralls": {:hex, :excoveralls, "0.11.1", "dd677fbdd49114fdbdbf445540ec735808250d56b011077798316505064edb2c", [:mix], [{:hackney, "~> 1.0", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm"},
|
"excoveralls": {:hex, :excoveralls, "0.11.1", "dd677fbdd49114fdbdbf445540ec735808250d56b011077798316505064edb2c", [:mix], [{:hackney, "~> 1.0", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"floki": {:hex, :floki, "0.20.4", "be42ac911fece24b4c72f3b5846774b6e61b83fe685c2fc9d62093277fb3bc86", [:mix], [{:html_entities, "~> 0.4.0", [hex: :html_entities, repo: "hexpm", optional: false]}, {:mochiweb, "~> 2.15", [hex: :mochiweb, repo: "hexpm", optional: false]}], "hexpm"},
|
"floki": {:hex, :floki, "0.20.4", "be42ac911fece24b4c72f3b5846774b6e61b83fe685c2fc9d62093277fb3bc86", [:mix], [{:html_entities, "~> 0.4.0", [hex: :html_entities, repo: "hexpm", optional: false]}, {:mochiweb, "~> 2.15", [hex: :mochiweb, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"gen_smtp": {:hex, :gen_smtp, "0.14.0", "39846a03522456077c6429b4badfd1d55e5e7d0fdfb65e935b7c5e38549d9202", [:rebar3], [], "hexpm"},
|
"gen_smtp": {:hex, :gen_smtp, "0.14.0", "39846a03522456077c6429b4badfd1d55e5e7d0fdfb65e935b7c5e38549d9202", [:rebar3], [], "hexpm"},
|
||||||
|
"gen_stage": {:hex, :gen_stage, "0.14.2", "6a2a578a510c5bfca8a45e6b27552f613b41cf584b58210f017088d3d17d0b14", [:mix], [], "hexpm"},
|
||||||
|
"gen_state_machine": {:hex, :gen_state_machine, "2.0.5", "9ac15ec6e66acac994cc442dcc2c6f9796cf380ec4b08267223014be1c728a95", [:mix], [], "hexpm"},
|
||||||
"gettext": {:hex, :gettext, "0.17.0", "abe21542c831887a2b16f4c94556db9c421ab301aee417b7c4fbde7fbdbe01ec", [:mix], [], "hexpm"},
|
"gettext": {:hex, :gettext, "0.17.0", "abe21542c831887a2b16f4c94556db9c421ab301aee417b7c4fbde7fbdbe01ec", [:mix], [], "hexpm"},
|
||||||
"hackney": {:hex, :hackney, "1.15.1", "9f8f471c844b8ce395f7b6d8398139e26ddca9ebc171a8b91342ee15a19963f4", [:rebar3], [{:certifi, "2.5.1", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "6.0.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "1.0.1", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~>1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "1.1.4", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}], "hexpm"},
|
"hackney": {:hex, :hackney, "1.15.1", "9f8f471c844b8ce395f7b6d8398139e26ddca9ebc171a8b91342ee15a19963f4", [:rebar3], [{:certifi, "2.5.1", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "6.0.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "1.0.1", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~>1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "1.1.4", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"html_entities": {:hex, :html_entities, "0.4.0", "f2fee876858cf6aaa9db608820a3209e45a087c5177332799592142b50e89a6b", [:mix], [], "hexpm"},
|
"html_entities": {:hex, :html_entities, "0.4.0", "f2fee876858cf6aaa9db608820a3209e45a087c5177332799592142b50e89a6b", [:mix], [], "hexpm"},
|
||||||
@ -46,8 +48,9 @@
|
|||||||
"jason": {:hex, :jason, "1.1.2", "b03dedea67a99223a2eaf9f1264ce37154564de899fd3d8b9a21b1a6fd64afe7", [:mix], [{:decimal, "~> 1.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm"},
|
"jason": {:hex, :jason, "1.1.2", "b03dedea67a99223a2eaf9f1264ce37154564de899fd3d8b9a21b1a6fd64afe7", [:mix], [{:decimal, "~> 1.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm"},
|
||||||
"joken": {:hex, :joken, "2.0.1", "ec9ab31bf660f343380da033b3316855197c8d4c6ef597fa3fcb451b326beb14", [:mix], [{:jose, "~> 1.9", [hex: :jose, repo: "hexpm", optional: false]}], "hexpm"},
|
"joken": {:hex, :joken, "2.0.1", "ec9ab31bf660f343380da033b3316855197c8d4c6ef597fa3fcb451b326beb14", [:mix], [{:jose, "~> 1.9", [hex: :jose, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"jose": {:hex, :jose, "1.9.0", "4167c5f6d06ffaebffd15cdb8da61a108445ef5e85ab8f5a7ad926fdf3ada154", [:mix, :rebar3], [{:base64url, "~> 0.0.1", [hex: :base64url, repo: "hexpm", optional: false]}], "hexpm"},
|
"jose": {:hex, :jose, "1.9.0", "4167c5f6d06ffaebffd15cdb8da61a108445ef5e85ab8f5a7ad926fdf3ada154", [:mix, :rebar3], [{:base64url, "~> 0.0.1", [hex: :base64url, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"makeup": {:hex, :makeup, "0.8.0", "9cf32aea71c7fe0a4b2e9246c2c4978f9070257e5c9ce6d4a28ec450a839b55f", [:mix], [{:nimble_parsec, "~> 0.5.0", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm"},
|
"libring": {:hex, :libring, "1.4.0", "41246ba2f3fbc76b3971f6bce83119dfec1eee17e977a48d8a9cfaaf58c2a8d6", [:mix], [], "hexpm"},
|
||||||
"makeup_elixir": {:hex, :makeup_elixir, "0.13.0", "be7a477997dcac2e48a9d695ec730b2d22418292675c75aa2d34ba0909dcdeda", [:mix], [{:makeup, "~> 0.8", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm"},
|
"makeup": {:hex, :makeup, "1.0.0", "671df94cf5a594b739ce03b0d0316aa64312cee2574b6a44becb83cd90fb05dc", [:mix], [{:nimble_parsec, "~> 0.5.0", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
|
"makeup_elixir": {:hex, :makeup_elixir, "0.14.0", "cf8b7c66ad1cff4c14679698d532f0b5d45a3968ffbcbfd590339cb57742f1ae", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"meck": {:hex, :meck, "0.8.13", "ffedb39f99b0b99703b8601c6f17c7f76313ee12de6b646e671e3188401f7866", [:rebar3], [], "hexpm"},
|
"meck": {:hex, :meck, "0.8.13", "ffedb39f99b0b99703b8601c6f17c7f76313ee12de6b646e671e3188401f7866", [:rebar3], [], "hexpm"},
|
||||||
"metrics": {:hex, :metrics, "1.0.1", "25f094dea2cda98213cecc3aeff09e940299d950904393b2a29d191c346a8486", [:rebar3], [], "hexpm"},
|
"metrics": {:hex, :metrics, "1.0.1", "25f094dea2cda98213cecc3aeff09e940299d950904393b2a29d191c346a8486", [:rebar3], [], "hexpm"},
|
||||||
"mime": {:hex, :mime, "1.3.1", "30ce04ab3175b6ad0bdce0035cba77bba68b813d523d1aac73d9781b4d193cf8", [:mix], [], "hexpm"},
|
"mime": {:hex, :mime, "1.3.1", "30ce04ab3175b6ad0bdce0035cba77bba68b813d523d1aac73d9781b4d193cf8", [:mix], [], "hexpm"},
|
||||||
@ -56,7 +59,8 @@
|
|||||||
"mock": {:hex, :mock, "0.3.3", "42a433794b1291a9cf1525c6d26b38e039e0d3a360732b5e467bfc77ef26c914", [:mix], [{:meck, "~> 0.8.13", [hex: :meck, repo: "hexpm", optional: false]}], "hexpm"},
|
"mock": {:hex, :mock, "0.3.3", "42a433794b1291a9cf1525c6d26b38e039e0d3a360732b5e467bfc77ef26c914", [:mix], [{:meck, "~> 0.8.13", [hex: :meck, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"mogrify": {:hex, :mogrify, "0.6.1", "de1b527514f2d95a7bbe9642eb556061afb337e220cf97adbf3a4e6438ed70af", [:mix], [], "hexpm"},
|
"mogrify": {:hex, :mogrify, "0.6.1", "de1b527514f2d95a7bbe9642eb556061afb337e220cf97adbf3a4e6438ed70af", [:mix], [], "hexpm"},
|
||||||
"mox": {:hex, :mox, "0.5.1", "f86bb36026aac1e6f924a4b6d024b05e9adbed5c63e8daa069bd66fb3292165b", [:mix], [], "hexpm"},
|
"mox": {:hex, :mox, "0.5.1", "f86bb36026aac1e6f924a4b6d024b05e9adbed5c63e8daa069bd66fb3292165b", [:mix], [], "hexpm"},
|
||||||
"nimble_parsec": {:hex, :nimble_parsec, "0.5.0", "90e2eca3d0266e5c53f8fbe0079694740b9c91b6747f2b7e3c5d21966bba8300", [:mix], [], "hexpm"},
|
"nimble_parsec": {:hex, :nimble_parsec, "0.5.1", "c90796ecee0289dbb5ad16d3ad06f957b0cd1199769641c961cfe0b97db190e0", [:mix], [], "hexpm"},
|
||||||
|
"oban": {:hex, :oban, "0.7.1", "171bdd1b69c1a4a839f8c768f5e962fc22d1de1513d459fb6b8e0cbd34817a9a", [:mix], [{:ecto_sql, "~> 3.1", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.14", [hex: :postgrex, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"parse_trans": {:hex, :parse_trans, "3.3.0", "09765507a3c7590a784615cfd421d101aec25098d50b89d7aa1d66646bc571c1", [:rebar3], [], "hexpm"},
|
"parse_trans": {:hex, :parse_trans, "3.3.0", "09765507a3c7590a784615cfd421d101aec25098d50b89d7aa1d66646bc571c1", [:rebar3], [], "hexpm"},
|
||||||
"pbkdf2_elixir": {:hex, :pbkdf2_elixir, "0.12.3", "6706a148809a29c306062862c803406e88f048277f6e85b68faf73291e820b84", [:mix], [], "hexpm"},
|
"pbkdf2_elixir": {:hex, :pbkdf2_elixir, "0.12.3", "6706a148809a29c306062862c803406e88f048277f6e85b68faf73291e820b84", [:mix], [], "hexpm"},
|
||||||
"phoenix": {:hex, :phoenix, "1.4.9", "746d098e10741c334d88143d3c94cab1756435f94387a63441792e66ec0ee974", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 1.1", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}, {:plug, "~> 1.8.1 or ~> 1.9", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 1.0 or ~> 2.0", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm"},
|
"phoenix": {:hex, :phoenix, "1.4.9", "746d098e10741c334d88143d3c94cab1756435f94387a63441792e66ec0ee974", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 1.1", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}, {:plug, "~> 1.8.1 or ~> 1.9", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 1.0 or ~> 2.0", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
@ -64,7 +68,6 @@
|
|||||||
"phoenix_html": {:hex, :phoenix_html, "2.13.1", "fa8f034b5328e2dfa0e4131b5569379003f34bc1fafdaa84985b0b9d2f12e68b", [:mix], [{:plug, "~> 1.5", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
|
"phoenix_html": {:hex, :phoenix_html, "2.13.1", "fa8f034b5328e2dfa0e4131b5569379003f34bc1fafdaa84985b0b9d2f12e68b", [:mix], [{:plug, "~> 1.5", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"phoenix_pubsub": {:hex, :phoenix_pubsub, "1.1.2", "496c303bdf1b2e98a9d26e89af5bba3ab487ba3a3735f74bf1f4064d2a845a3e", [:mix], [], "hexpm"},
|
"phoenix_pubsub": {:hex, :phoenix_pubsub, "1.1.2", "496c303bdf1b2e98a9d26e89af5bba3ab487ba3a3735f74bf1f4064d2a845a3e", [:mix], [], "hexpm"},
|
||||||
"phoenix_swoosh": {:hex, :phoenix_swoosh, "0.2.0", "a7e0b32077cd6d2323ae15198839b05d9caddfa20663fd85787479e81f89520e", [:mix], [{:phoenix, "~> 1.0", [hex: :phoenix, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 2.2", [hex: :phoenix_html, repo: "hexpm", optional: false]}, {:swoosh, "~> 0.1", [hex: :swoosh, repo: "hexpm", optional: false]}], "hexpm"},
|
"phoenix_swoosh": {:hex, :phoenix_swoosh, "0.2.0", "a7e0b32077cd6d2323ae15198839b05d9caddfa20663fd85787479e81f89520e", [:mix], [{:phoenix, "~> 1.0", [hex: :phoenix, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 2.2", [hex: :phoenix_html, repo: "hexpm", optional: false]}, {:swoosh, "~> 0.1", [hex: :swoosh, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"pleroma_job_queue": {:hex, :pleroma_job_queue, "0.3.0", "b84538d621f0c3d6fcc1cff9d5648d3faaf873b8b21b94e6503428a07a48ec47", [:mix], [{:crontab, "~> 1.1", [hex: :crontab, repo: "hexpm", optional: false]}], "hexpm"},
|
|
||||||
"plug": {:hex, :plug, "1.8.2", "0bcce1daa420f189a6491f3940cc77ea7fb1919761175c9c3b59800d897440fc", [:mix], [{:mime, "~> 1.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: true]}], "hexpm"},
|
"plug": {:hex, :plug, "1.8.2", "0bcce1daa420f189a6491f3940cc77ea7fb1919761175c9c3b59800d897440fc", [:mix], [{:mime, "~> 1.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: true]}], "hexpm"},
|
||||||
"plug_cowboy": {:hex, :plug_cowboy, "2.1.0", "b75768153c3a8a9e8039d4b25bb9b14efbc58e9c4a6e6a270abff1cd30cbe320", [:mix], [{:cowboy, "~> 2.5", [hex: :cowboy, repo: "hexpm", optional: false]}, {:plug, "~> 1.7", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
|
"plug_cowboy": {:hex, :plug_cowboy, "2.1.0", "b75768153c3a8a9e8039d4b25bb9b14efbc58e9c4a6e6a270abff1cd30cbe320", [:mix], [{:cowboy, "~> 2.5", [hex: :cowboy, repo: "hexpm", optional: false]}, {:plug, "~> 1.7", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"plug_crypto": {:hex, :plug_crypto, "1.0.0", "18e49317d3fa343f24620ed22795ec29d4a5e602d52d1513ccea0b07d8ea7d4d", [:mix], [], "hexpm"},
|
"plug_crypto": {:hex, :plug_crypto, "1.0.0", "18e49317d3fa343f24620ed22795ec29d4a5e602d52d1513ccea0b07d8ea7d4d", [:mix], [], "hexpm"},
|
||||||
@ -77,9 +80,11 @@
|
|||||||
"prometheus_phoenix": {:hex, :prometheus_phoenix, "1.3.0", "c4b527e0b3a9ef1af26bdcfbfad3998f37795b9185d475ca610fe4388fdd3bb5", [:mix], [{:phoenix, "~> 1.4", [hex: :phoenix, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.3 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}], "hexpm"},
|
"prometheus_phoenix": {:hex, :prometheus_phoenix, "1.3.0", "c4b527e0b3a9ef1af26bdcfbfad3998f37795b9185d475ca610fe4388fdd3bb5", [:mix], [{:phoenix, "~> 1.4", [hex: :phoenix, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.3 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"prometheus_plugs": {:hex, :prometheus_plugs, "1.1.5", "25933d48f8af3a5941dd7b621c889749894d8a1082a6ff7c67cc99dec26377c5", [:mix], [{:accept, "~> 0.1", [hex: :accept, repo: "hexpm", optional: false]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.1 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}, {:prometheus_process_collector, "~> 1.1", [hex: :prometheus_process_collector, repo: "hexpm", optional: true]}], "hexpm"},
|
"prometheus_plugs": {:hex, :prometheus_plugs, "1.1.5", "25933d48f8af3a5941dd7b621c889749894d8a1082a6ff7c67cc99dec26377c5", [:mix], [{:accept, "~> 0.1", [hex: :accept, repo: "hexpm", optional: false]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}, {:prometheus_ex, "~> 1.1 or ~> 2.0 or ~> 3.0", [hex: :prometheus_ex, repo: "hexpm", optional: false]}, {:prometheus_process_collector, "~> 1.1", [hex: :prometheus_process_collector, repo: "hexpm", optional: true]}], "hexpm"},
|
||||||
"quack": {:hex, :quack, "0.1.1", "cca7b4da1a233757fdb44b3334fce80c94785b3ad5a602053b7a002b5a8967bf", [:mix], [{:poison, ">= 1.0.0", [hex: :poison, repo: "hexpm", optional: false]}, {:tesla, "~> 1.2.0", [hex: :tesla, repo: "hexpm", optional: false]}], "hexpm"},
|
"quack": {:hex, :quack, "0.1.1", "cca7b4da1a233757fdb44b3334fce80c94785b3ad5a602053b7a002b5a8967bf", [:mix], [{:poison, ">= 1.0.0", [hex: :poison, repo: "hexpm", optional: false]}, {:tesla, "~> 1.2.0", [hex: :tesla, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
|
"quantum": {:hex, :quantum, "2.3.4", "72a0e8855e2adc101459eac8454787cb74ab4169de6ca50f670e72142d4960e9", [:mix], [{:calendar, "~> 0.17", [hex: :calendar, repo: "hexpm", optional: true]}, {:crontab, "~> 1.1", [hex: :crontab, repo: "hexpm", optional: false]}, {:gen_stage, "~> 0.12", [hex: :gen_stage, repo: "hexpm", optional: false]}, {:swarm, "~> 3.3", [hex: :swarm, repo: "hexpm", optional: false]}, {:timex, "~> 3.1", [hex: :timex, repo: "hexpm", optional: true]}], "hexpm"},
|
||||||
"ranch": {:hex, :ranch, "1.7.1", "6b1fab51b49196860b733a49c07604465a47bdb78aa10c1c16a3d199f7f8c881", [:rebar3], [], "hexpm"},
|
"ranch": {:hex, :ranch, "1.7.1", "6b1fab51b49196860b733a49c07604465a47bdb78aa10c1c16a3d199f7f8c881", [:rebar3], [], "hexpm"},
|
||||||
"recon": {:git, "https://github.com/ferd/recon.git", "75d70c7c08926d2f24f1ee6de14ee50fe8a52763", [tag: "2.4.0"]},
|
"recon": {:git, "https://github.com/ferd/recon.git", "75d70c7c08926d2f24f1ee6de14ee50fe8a52763", [tag: "2.4.0"]},
|
||||||
"ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.4", "f0eafff810d2041e93f915ef59899c923f4568f4585904d010387ed74988e77b", [:make, :mix, :rebar3], [], "hexpm"},
|
"ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.4", "f0eafff810d2041e93f915ef59899c923f4568f4585904d010387ed74988e77b", [:make, :mix, :rebar3], [], "hexpm"},
|
||||||
|
"swarm": {:hex, :swarm, "3.4.0", "64f8b30055d74640d2186c66354b33b999438692a91be275bb89cdc7e401f448", [:mix], [{:gen_state_machine, "~> 2.0", [hex: :gen_state_machine, repo: "hexpm", optional: false]}, {:libring, "~> 1.0", [hex: :libring, repo: "hexpm", optional: false]}], "hexpm"},
|
||||||
"sweet_xml": {:hex, :sweet_xml, "0.6.6", "fc3e91ec5dd7c787b6195757fbcf0abc670cee1e4172687b45183032221b66b8", [:mix], [], "hexpm"},
|
"sweet_xml": {:hex, :sweet_xml, "0.6.6", "fc3e91ec5dd7c787b6195757fbcf0abc670cee1e4172687b45183032221b66b8", [:mix], [], "hexpm"},
|
||||||
"swoosh": {:hex, :swoosh, "0.23.2", "7dda95ff0bf54a2298328d6899c74dae1223777b43563ccebebb4b5d2b61df38", [:mix], [{:cowboy, "~> 1.0.1 or ~> 1.1 or ~> 2.4", [hex: :cowboy, repo: "hexpm", optional: true]}, {:gen_smtp, "~> 0.13", [hex: :gen_smtp, repo: "hexpm", optional: true]}, {:hackney, "~> 1.9", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:mail, "~> 0.2", [hex: :mail, repo: "hexpm", optional: true]}, {:mime, "~> 1.1", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_cowboy, ">= 1.0.0", [hex: :plug_cowboy, repo: "hexpm", optional: true]}], "hexpm"},
|
"swoosh": {:hex, :swoosh, "0.23.2", "7dda95ff0bf54a2298328d6899c74dae1223777b43563ccebebb4b5d2b61df38", [:mix], [{:cowboy, "~> 1.0.1 or ~> 1.1 or ~> 2.4", [hex: :cowboy, repo: "hexpm", optional: true]}, {:gen_smtp, "~> 0.13", [hex: :gen_smtp, repo: "hexpm", optional: true]}, {:hackney, "~> 1.9", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:mail, "~> 0.2", [hex: :mail, repo: "hexpm", optional: true]}, {:mime, "~> 1.1", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_cowboy, ">= 1.0.0", [hex: :plug_cowboy, repo: "hexpm", optional: true]}], "hexpm"},
|
||||||
"syslog": {:git, "https://github.com/Vagabond/erlang-syslog.git", "4a6c6f2c996483e86c1320e9553f91d337bcb6aa", [tag: "1.0.5"]},
|
"syslog": {:git, "https://github.com/Vagabond/erlang-syslog.git", "4a6c6f2c996483e86c1320e9553f91d337bcb6aa", [tag: "1.0.5"]},
|
||||||
|
@ -0,0 +1,6 @@
|
|||||||
|
defmodule Pleroma.Repo.Migrations.AddObanJobsTable do
|
||||||
|
use Ecto.Migration
|
||||||
|
|
||||||
|
defdelegate up, to: Oban.Migrations
|
||||||
|
defdelegate down, to: Oban.Migrations
|
||||||
|
end
|
@ -7,6 +7,7 @@ defmodule Pleroma.ActivityTest do
|
|||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
alias Pleroma.Bookmark
|
alias Pleroma.Bookmark
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
|
alias Pleroma.Tests.ObanHelpers
|
||||||
alias Pleroma.ThreadMute
|
alias Pleroma.ThreadMute
|
||||||
import Pleroma.Factory
|
import Pleroma.Factory
|
||||||
|
|
||||||
@ -125,7 +126,8 @@ test "when association is not loaded" do
|
|||||||
}
|
}
|
||||||
|
|
||||||
{:ok, local_activity} = Pleroma.Web.CommonAPI.post(user, %{"status" => "find me!"})
|
{:ok, local_activity} = Pleroma.Web.CommonAPI.post(user, %{"status" => "find me!"})
|
||||||
{:ok, remote_activity} = Pleroma.Web.Federator.incoming_ap_doc(params)
|
{:ok, job} = Pleroma.Web.Federator.incoming_ap_doc(params)
|
||||||
|
{:ok, remote_activity} = ObanHelpers.perform(job)
|
||||||
%{local_activity: local_activity, remote_activity: remote_activity, user: user}
|
%{local_activity: local_activity, remote_activity: remote_activity, user: user}
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -185,4 +187,39 @@ test "all_by_ids_with_object/1" do
|
|||||||
|
|
||||||
assert [%{id: ^id1, object: %Object{}}, %{id: ^id2, object: %Object{}}] = activities
|
assert [%{id: ^id1, object: %Object{}}, %{id: ^id2, object: %Object{}}] = activities
|
||||||
end
|
end
|
||||||
|
|
||||||
|
test "get_by_id_with_object/1" do
|
||||||
|
%{id: id} = insert(:note_activity)
|
||||||
|
|
||||||
|
assert %Activity{id: ^id, object: %Object{}} = Activity.get_by_id_with_object(id)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "get_by_ap_id_with_object/1" do
|
||||||
|
%{data: %{"id" => ap_id}} = insert(:note_activity)
|
||||||
|
|
||||||
|
assert %Activity{data: %{"id" => ^ap_id}, object: %Object{}} =
|
||||||
|
Activity.get_by_ap_id_with_object(ap_id)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "get_by_id/1" do
|
||||||
|
%{id: id} = insert(:note_activity)
|
||||||
|
|
||||||
|
assert %Activity{id: ^id} = Activity.get_by_id(id)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "all_by_actor_and_id/2" do
|
||||||
|
user = insert(:user)
|
||||||
|
|
||||||
|
{:ok, %{id: id1}} = Pleroma.Web.CommonAPI.post(user, %{"status" => "cofe"})
|
||||||
|
{:ok, %{id: id2}} = Pleroma.Web.CommonAPI.post(user, %{"status" => "cofefe"})
|
||||||
|
|
||||||
|
assert [] == Activity.all_by_actor_and_id(user, [])
|
||||||
|
|
||||||
|
activities =
|
||||||
|
user.ap_id
|
||||||
|
|> Activity.all_by_actor_and_id([id1, id2])
|
||||||
|
|> Enum.sort(&(&1.id < &2.id))
|
||||||
|
|
||||||
|
assert [%Activity{id: ^id1}, %Activity{id: ^id2}] = activities
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
@ -22,6 +22,8 @@ test "it goes through old direct conversations" do
|
|||||||
{:ok, _activity} =
|
{:ok, _activity} =
|
||||||
CommonAPI.post(user, %{"visibility" => "direct", "status" => "hey @#{other_user.nickname}"})
|
CommonAPI.post(user, %{"visibility" => "direct", "status" => "hey @#{other_user.nickname}"})
|
||||||
|
|
||||||
|
Pleroma.Tests.ObanHelpers.perform_all()
|
||||||
|
|
||||||
Repo.delete_all(Conversation)
|
Repo.delete_all(Conversation)
|
||||||
Repo.delete_all(Conversation.Participation)
|
Repo.delete_all(Conversation.Participation)
|
||||||
|
|
||||||
|
@ -10,7 +10,7 @@ defmodule Pleroma.ActivityExpirationWorkerTest do
|
|||||||
test "deletes an activity" do
|
test "deletes an activity" do
|
||||||
activity = insert(:note_activity)
|
activity = insert(:note_activity)
|
||||||
expiration = insert(:expiration_in_the_past, %{activity_id: activity.id})
|
expiration = insert(:expiration_in_the_past, %{activity_id: activity.id})
|
||||||
Pleroma.ActivityExpirationWorker.perform(:execute, expiration.id)
|
Pleroma.Daemons.ActivityExpirationDaemon.perform(:execute, expiration.id)
|
||||||
|
|
||||||
refute Repo.get(Activity, activity.id)
|
refute Repo.get(Activity, activity.id)
|
||||||
end
|
end
|
@ -2,11 +2,12 @@
|
|||||||
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
# Copyright © 2017-2019 Pleroma Authors <https://pleroma.social/>
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.DigestEmailWorkerTest do
|
defmodule Pleroma.DigestEmailDaemonTest do
|
||||||
use Pleroma.DataCase
|
use Pleroma.DataCase
|
||||||
import Pleroma.Factory
|
import Pleroma.Factory
|
||||||
|
|
||||||
alias Pleroma.DigestEmailWorker
|
alias Pleroma.Daemons.DigestEmailDaemon
|
||||||
|
alias Pleroma.Tests.ObanHelpers
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.CommonAPI
|
alias Pleroma.Web.CommonAPI
|
||||||
|
|
||||||
@ -22,7 +23,10 @@ test "it sends digest emails" do
|
|||||||
User.switch_email_notifications(user2, "digest", true)
|
User.switch_email_notifications(user2, "digest", true)
|
||||||
CommonAPI.post(user, %{"status" => "hey @#{user2.nickname}!"})
|
CommonAPI.post(user, %{"status" => "hey @#{user2.nickname}!"})
|
||||||
|
|
||||||
DigestEmailWorker.perform()
|
DigestEmailDaemon.perform()
|
||||||
|
ObanHelpers.perform_all()
|
||||||
|
# Performing job(s) enqueued at previous step
|
||||||
|
ObanHelpers.perform_all()
|
||||||
|
|
||||||
assert_received {:email, email}
|
assert_received {:email, email}
|
||||||
assert email.to == [{user2.name, user2.email}]
|
assert email.to == [{user2.name, user2.email}]
|
@ -2,7 +2,7 @@
|
|||||||
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
|
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
defmodule Pleroma.ScheduledActivityWorkerTest do
|
defmodule Pleroma.ScheduledActivityDaemonTest do
|
||||||
use Pleroma.DataCase
|
use Pleroma.DataCase
|
||||||
alias Pleroma.ScheduledActivity
|
alias Pleroma.ScheduledActivity
|
||||||
import Pleroma.Factory
|
import Pleroma.Factory
|
||||||
@ -10,7 +10,7 @@ defmodule Pleroma.ScheduledActivityWorkerTest do
|
|||||||
test "creates a status from the scheduled activity" do
|
test "creates a status from the scheduled activity" do
|
||||||
user = insert(:user)
|
user = insert(:user)
|
||||||
scheduled_activity = insert(:scheduled_activity, user: user, params: %{status: "hi"})
|
scheduled_activity = insert(:scheduled_activity, user: user, params: %{status: "hi"})
|
||||||
Pleroma.ScheduledActivityWorker.perform(:execute, scheduled_activity.id)
|
Pleroma.Daemons.ScheduledActivityDaemon.perform(:execute, scheduled_activity.id)
|
||||||
|
|
||||||
refute Repo.get(ScheduledActivity, scheduled_activity.id)
|
refute Repo.get(ScheduledActivity, scheduled_activity.id)
|
||||||
activity = Repo.all(Pleroma.Activity) |> Enum.find(&(&1.actor == user.ap_id))
|
activity = Repo.all(Pleroma.Activity) |> Enum.find(&(&1.actor == user.ap_id))
|
@ -5,6 +5,7 @@
|
|||||||
defmodule Pleroma.Integration.MastodonWebsocketTest do
|
defmodule Pleroma.Integration.MastodonWebsocketTest do
|
||||||
use Pleroma.DataCase
|
use Pleroma.DataCase
|
||||||
|
|
||||||
|
import ExUnit.CaptureLog
|
||||||
import Pleroma.Factory
|
import Pleroma.Factory
|
||||||
|
|
||||||
alias Pleroma.Integration.WebsocketClient
|
alias Pleroma.Integration.WebsocketClient
|
||||||
@ -39,13 +40,17 @@ def start_socket(qs \\ nil, headers \\ []) do
|
|||||||
end
|
end
|
||||||
|
|
||||||
test "refuses invalid requests" do
|
test "refuses invalid requests" do
|
||||||
assert {:error, {400, _}} = start_socket()
|
capture_log(fn ->
|
||||||
assert {:error, {404, _}} = start_socket("?stream=ncjdk")
|
assert {:error, {400, _}} = start_socket()
|
||||||
|
assert {:error, {404, _}} = start_socket("?stream=ncjdk")
|
||||||
|
end)
|
||||||
end
|
end
|
||||||
|
|
||||||
test "requires authentication and a valid token for protected streams" do
|
test "requires authentication and a valid token for protected streams" do
|
||||||
assert {:error, {403, _}} = start_socket("?stream=user&access_token=aaaaaaaaaaaa")
|
capture_log(fn ->
|
||||||
assert {:error, {403, _}} = start_socket("?stream=user")
|
assert {:error, {403, _}} = start_socket("?stream=user&access_token=aaaaaaaaaaaa")
|
||||||
|
assert {:error, {403, _}} = start_socket("?stream=user")
|
||||||
|
end)
|
||||||
end
|
end
|
||||||
|
|
||||||
test "allows public streams without authentication" do
|
test "allows public streams without authentication" do
|
||||||
@ -100,19 +105,27 @@ test "accepts valid tokens", state do
|
|||||||
|
|
||||||
test "accepts the 'user' stream", %{token: token} = _state do
|
test "accepts the 'user' stream", %{token: token} = _state do
|
||||||
assert {:ok, _} = start_socket("?stream=user&access_token=#{token.token}")
|
assert {:ok, _} = start_socket("?stream=user&access_token=#{token.token}")
|
||||||
assert {:error, {403, "Forbidden"}} = start_socket("?stream=user")
|
|
||||||
|
assert capture_log(fn ->
|
||||||
|
assert {:error, {403, "Forbidden"}} = start_socket("?stream=user")
|
||||||
|
end) =~ ":badarg"
|
||||||
end
|
end
|
||||||
|
|
||||||
test "accepts the 'user:notification' stream", %{token: token} = _state do
|
test "accepts the 'user:notification' stream", %{token: token} = _state do
|
||||||
assert {:ok, _} = start_socket("?stream=user:notification&access_token=#{token.token}")
|
assert {:ok, _} = start_socket("?stream=user:notification&access_token=#{token.token}")
|
||||||
assert {:error, {403, "Forbidden"}} = start_socket("?stream=user:notification")
|
|
||||||
|
assert capture_log(fn ->
|
||||||
|
assert {:error, {403, "Forbidden"}} = start_socket("?stream=user:notification")
|
||||||
|
end) =~ ":badarg"
|
||||||
end
|
end
|
||||||
|
|
||||||
test "accepts valid token on Sec-WebSocket-Protocol header", %{token: token} do
|
test "accepts valid token on Sec-WebSocket-Protocol header", %{token: token} do
|
||||||
assert {:ok, _} = start_socket("?stream=user", [{"Sec-WebSocket-Protocol", token.token}])
|
assert {:ok, _} = start_socket("?stream=user", [{"Sec-WebSocket-Protocol", token.token}])
|
||||||
|
|
||||||
assert {:error, {403, "Forbidden"}} =
|
assert capture_log(fn ->
|
||||||
start_socket("?stream=user", [{"Sec-WebSocket-Protocol", "I am a friend"}])
|
assert {:error, {403, "Forbidden"}} =
|
||||||
|
start_socket("?stream=user", [{"Sec-WebSocket-Protocol", "I am a friend"}])
|
||||||
|
end) =~ ":badarg"
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -8,6 +8,7 @@ defmodule Pleroma.NotificationTest do
|
|||||||
import Pleroma.Factory
|
import Pleroma.Factory
|
||||||
|
|
||||||
alias Pleroma.Notification
|
alias Pleroma.Notification
|
||||||
|
alias Pleroma.Tests.ObanHelpers
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.ActivityPub.Transmogrifier
|
alias Pleroma.Web.ActivityPub.Transmogrifier
|
||||||
alias Pleroma.Web.CommonAPI
|
alias Pleroma.Web.CommonAPI
|
||||||
@ -588,7 +589,8 @@ test "notifications are deleted if a local user is deleted" do
|
|||||||
|
|
||||||
refute Enum.empty?(Notification.for_user(other_user))
|
refute Enum.empty?(Notification.for_user(other_user))
|
||||||
|
|
||||||
User.delete(user)
|
{:ok, job} = User.delete(user)
|
||||||
|
ObanHelpers.perform(job)
|
||||||
|
|
||||||
assert Enum.empty?(Notification.for_user(other_user))
|
assert Enum.empty?(Notification.for_user(other_user))
|
||||||
end
|
end
|
||||||
@ -633,6 +635,7 @@ test "notifications are deleted if a remote user is deleted" do
|
|||||||
}
|
}
|
||||||
|
|
||||||
{:ok, _delete_activity} = Transmogrifier.handle_incoming(delete_user_message)
|
{:ok, _delete_activity} = Transmogrifier.handle_incoming(delete_user_message)
|
||||||
|
ObanHelpers.perform_all()
|
||||||
|
|
||||||
assert Enum.empty?(Notification.for_user(local_user))
|
assert Enum.empty?(Notification.for_user(local_user))
|
||||||
end
|
end
|
||||||
|
42
test/support/oban_helpers.ex
Normal file
42
test/support/oban_helpers.ex
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
# Pleroma: A lightweight social networking server
|
||||||
|
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
|
||||||
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
defmodule Pleroma.Tests.ObanHelpers do
|
||||||
|
@moduledoc """
|
||||||
|
Oban test helpers.
|
||||||
|
"""
|
||||||
|
|
||||||
|
alias Pleroma.Repo
|
||||||
|
|
||||||
|
def perform_all do
|
||||||
|
Oban.Job
|
||||||
|
|> Repo.all()
|
||||||
|
|> perform()
|
||||||
|
end
|
||||||
|
|
||||||
|
def perform(%Oban.Job{} = job) do
|
||||||
|
res = apply(String.to_existing_atom("Elixir." <> job.worker), :perform, [job.args, job])
|
||||||
|
Repo.delete(job)
|
||||||
|
res
|
||||||
|
end
|
||||||
|
|
||||||
|
def perform(jobs) when is_list(jobs) do
|
||||||
|
for job <- jobs, do: perform(job)
|
||||||
|
end
|
||||||
|
|
||||||
|
def member?(%{} = job_args, jobs) when is_list(jobs) do
|
||||||
|
Enum.any?(jobs, fn job ->
|
||||||
|
member?(job_args, job.args)
|
||||||
|
end)
|
||||||
|
end
|
||||||
|
|
||||||
|
def member?(%{} = test_attrs, %{} = attrs) do
|
||||||
|
Enum.all?(
|
||||||
|
test_attrs,
|
||||||
|
fn {k, _v} -> member?(test_attrs[k], attrs[k]) end
|
||||||
|
)
|
||||||
|
end
|
||||||
|
|
||||||
|
def member?(x, y), do: x == y
|
||||||
|
end
|
@ -4,6 +4,7 @@ defmodule Mix.Tasks.Pleroma.DigestTest do
|
|||||||
import Pleroma.Factory
|
import Pleroma.Factory
|
||||||
import Swoosh.TestAssertions
|
import Swoosh.TestAssertions
|
||||||
|
|
||||||
|
alias Pleroma.Tests.ObanHelpers
|
||||||
alias Pleroma.Web.CommonAPI
|
alias Pleroma.Web.CommonAPI
|
||||||
|
|
||||||
setup_all do
|
setup_all do
|
||||||
@ -39,6 +40,8 @@ test "Sends digest to the given user" do
|
|||||||
|
|
||||||
:ok = Mix.Tasks.Pleroma.Digest.run(["test", user2.nickname, yesterday_date])
|
:ok = Mix.Tasks.Pleroma.Digest.run(["test", user2.nickname, yesterday_date])
|
||||||
|
|
||||||
|
ObanHelpers.perform_all()
|
||||||
|
|
||||||
assert_receive {:mix_shell, :info, [message]}
|
assert_receive {:mix_shell, :info, [message]}
|
||||||
assert message =~ "Digest email have been sent"
|
assert message =~ "Digest email have been sent"
|
||||||
|
|
||||||
|
@ -7,14 +7,16 @@ defmodule Pleroma.UserTest do
|
|||||||
alias Pleroma.Builders.UserBuilder
|
alias Pleroma.Builders.UserBuilder
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
|
alias Pleroma.Tests.ObanHelpers
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||||
alias Pleroma.Web.CommonAPI
|
alias Pleroma.Web.CommonAPI
|
||||||
|
|
||||||
use Pleroma.DataCase
|
use Pleroma.DataCase
|
||||||
|
use Oban.Testing, repo: Pleroma.Repo
|
||||||
|
|
||||||
import Pleroma.Factory
|
|
||||||
import Mock
|
import Mock
|
||||||
|
import Pleroma.Factory
|
||||||
|
|
||||||
setup_all do
|
setup_all do
|
||||||
Tesla.Mock.mock_global(fn env -> apply(HttpRequestMock, :request, [env]) end)
|
Tesla.Mock.mock_global(fn env -> apply(HttpRequestMock, :request, [env]) end)
|
||||||
@ -570,22 +572,6 @@ test "it has required fields" do
|
|||||||
refute cs.valid?
|
refute cs.valid?
|
||||||
end)
|
end)
|
||||||
end
|
end
|
||||||
|
|
||||||
test "it restricts some sizes" do
|
|
||||||
bio_limit = Pleroma.Config.get([:instance, :user_bio_length], 5000)
|
|
||||||
name_limit = Pleroma.Config.get([:instance, :user_name_length], 100)
|
|
||||||
|
|
||||||
[bio: bio_limit, name: name_limit]
|
|
||||||
|> Enum.each(fn {field, size} ->
|
|
||||||
string = String.pad_leading(".", size)
|
|
||||||
cs = User.remote_user_creation(Map.put(@valid_remote, field, string))
|
|
||||||
assert cs.valid?
|
|
||||||
|
|
||||||
string = String.pad_leading(".", size + 1)
|
|
||||||
cs = User.remote_user_creation(Map.put(@valid_remote, field, string))
|
|
||||||
refute cs.valid?
|
|
||||||
end)
|
|
||||||
end
|
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "followers and friends" do
|
describe "followers and friends" do
|
||||||
@ -725,7 +711,9 @@ test "it imports user followings from list" do
|
|||||||
user3.nickname
|
user3.nickname
|
||||||
]
|
]
|
||||||
|
|
||||||
result = User.follow_import(user1, identifiers)
|
{:ok, job} = User.follow_import(user1, identifiers)
|
||||||
|
result = ObanHelpers.perform(job)
|
||||||
|
|
||||||
assert is_list(result)
|
assert is_list(result)
|
||||||
assert result == [user2, user3]
|
assert result == [user2, user3]
|
||||||
end
|
end
|
||||||
@ -936,7 +924,9 @@ test "it imports user blocks from list" do
|
|||||||
user3.nickname
|
user3.nickname
|
||||||
]
|
]
|
||||||
|
|
||||||
result = User.blocks_import(user1, identifiers)
|
{:ok, job} = User.blocks_import(user1, identifiers)
|
||||||
|
result = ObanHelpers.perform(job)
|
||||||
|
|
||||||
assert is_list(result)
|
assert is_list(result)
|
||||||
assert result == [user2, user3]
|
assert result == [user2, user3]
|
||||||
end
|
end
|
||||||
@ -1053,7 +1043,9 @@ test ".delete_user_activities deletes all create activities", %{user: user} do
|
|||||||
test "it deletes deactivated user" do
|
test "it deletes deactivated user" do
|
||||||
{:ok, user} = insert(:user, info: %{deactivated: true}) |> User.set_cache()
|
{:ok, user} = insert(:user, info: %{deactivated: true}) |> User.set_cache()
|
||||||
|
|
||||||
assert {:ok, _} = User.delete(user)
|
{:ok, job} = User.delete(user)
|
||||||
|
{:ok, _user} = ObanHelpers.perform(job)
|
||||||
|
|
||||||
refute User.get_by_id(user.id)
|
refute User.get_by_id(user.id)
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -1071,7 +1063,8 @@ test "it deletes a user, all follow relationships and all activities", %{user: u
|
|||||||
{:ok, like_two, _} = CommonAPI.favorite(activity.id, follower)
|
{:ok, like_two, _} = CommonAPI.favorite(activity.id, follower)
|
||||||
{:ok, repeat, _} = CommonAPI.repeat(activity_two.id, user)
|
{:ok, repeat, _} = CommonAPI.repeat(activity_two.id, user)
|
||||||
|
|
||||||
{:ok, _} = User.delete(user)
|
{:ok, job} = User.delete(user)
|
||||||
|
{:ok, _user} = ObanHelpers.perform(job)
|
||||||
|
|
||||||
follower = User.get_cached_by_id(follower.id)
|
follower = User.get_cached_by_id(follower.id)
|
||||||
|
|
||||||
@ -1081,7 +1074,7 @@ test "it deletes a user, all follow relationships and all activities", %{user: u
|
|||||||
|
|
||||||
user_activities =
|
user_activities =
|
||||||
user.ap_id
|
user.ap_id
|
||||||
|> Activity.query_by_actor()
|
|> Activity.Queries.by_actor()
|
||||||
|> Repo.all()
|
|> Repo.all()
|
||||||
|> Enum.map(fn act -> act.data["type"] end)
|
|> Enum.map(fn act -> act.data["type"] end)
|
||||||
|
|
||||||
@ -1103,12 +1096,18 @@ test "it deletes a user, all follow relationships and all activities", %{user: u
|
|||||||
{:ok, follower} = User.get_or_fetch_by_ap_id("http://mastodon.example.org/users/admin")
|
{:ok, follower} = User.get_or_fetch_by_ap_id("http://mastodon.example.org/users/admin")
|
||||||
{:ok, _} = User.follow(follower, user)
|
{:ok, _} = User.follow(follower, user)
|
||||||
|
|
||||||
{:ok, _user} = User.delete(user)
|
{:ok, job} = User.delete(user)
|
||||||
|
{:ok, _user} = ObanHelpers.perform(job)
|
||||||
|
|
||||||
assert called(
|
assert ObanHelpers.member?(
|
||||||
Pleroma.Web.ActivityPub.Publisher.publish_one(%{
|
%{
|
||||||
inbox: "http://mastodon.example.org/inbox"
|
"op" => "publish_one",
|
||||||
})
|
"params" => %{
|
||||||
|
"inbox" => "http://mastodon.example.org/inbox",
|
||||||
|
"id" => "pleroma:fakeid"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
all_enqueued(worker: Pleroma.Workers.PublisherWorker)
|
||||||
)
|
)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@ -1117,11 +1116,60 @@ test "get_public_key_for_ap_id fetches a user that's not in the db" do
|
|||||||
assert {:ok, _key} = User.get_public_key_for_ap_id("http://mastodon.example.org/users/admin")
|
assert {:ok, _key} = User.get_public_key_for_ap_id("http://mastodon.example.org/users/admin")
|
||||||
end
|
end
|
||||||
|
|
||||||
test "insert or update a user from given data" do
|
describe "insert or update a user from given data" do
|
||||||
user = insert(:user, %{nickname: "nick@name.de"})
|
test "with normal data" do
|
||||||
data = %{ap_id: user.ap_id <> "xxx", name: user.name, nickname: user.nickname}
|
user = insert(:user, %{nickname: "nick@name.de"})
|
||||||
|
data = %{ap_id: user.ap_id <> "xxx", name: user.name, nickname: user.nickname}
|
||||||
|
|
||||||
assert {:ok, %User{}} = User.insert_or_update_user(data)
|
assert {:ok, %User{}} = User.insert_or_update_user(data)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "with overly long fields" do
|
||||||
|
current_max_length = Pleroma.Config.get([:instance, :account_field_value_length], 255)
|
||||||
|
user = insert(:user, nickname: "nickname@supergood.domain")
|
||||||
|
|
||||||
|
data = %{
|
||||||
|
ap_id: user.ap_id,
|
||||||
|
name: user.name,
|
||||||
|
nickname: user.nickname,
|
||||||
|
info: %{
|
||||||
|
fields: [
|
||||||
|
%{"name" => "myfield", "value" => String.duplicate("h", current_max_length + 1)}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
assert {:ok, %User{}} = User.insert_or_update_user(data)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "with an overly long bio" do
|
||||||
|
current_max_length = Pleroma.Config.get([:instance, :user_bio_length], 5000)
|
||||||
|
user = insert(:user, nickname: "nickname@supergood.domain")
|
||||||
|
|
||||||
|
data = %{
|
||||||
|
ap_id: user.ap_id,
|
||||||
|
name: user.name,
|
||||||
|
nickname: user.nickname,
|
||||||
|
bio: String.duplicate("h", current_max_length + 1),
|
||||||
|
info: %{}
|
||||||
|
}
|
||||||
|
|
||||||
|
assert {:ok, %User{}} = User.insert_or_update_user(data)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "with an overly long display name" do
|
||||||
|
current_max_length = Pleroma.Config.get([:instance, :user_name_length], 100)
|
||||||
|
user = insert(:user, nickname: "nickname@supergood.domain")
|
||||||
|
|
||||||
|
data = %{
|
||||||
|
ap_id: user.ap_id,
|
||||||
|
name: String.duplicate("h", current_max_length + 1),
|
||||||
|
nickname: user.nickname,
|
||||||
|
info: %{}
|
||||||
|
}
|
||||||
|
|
||||||
|
assert {:ok, %User{}} = User.insert_or_update_user(data)
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "per-user rich-text filtering" do
|
describe "per-user rich-text filtering" do
|
||||||
@ -1153,7 +1201,8 @@ test "invalidate_cache works" do
|
|||||||
test "User.delete() plugs any possible zombie objects" do
|
test "User.delete() plugs any possible zombie objects" do
|
||||||
user = insert(:user)
|
user = insert(:user)
|
||||||
|
|
||||||
{:ok, _} = User.delete(user)
|
{:ok, job} = User.delete(user)
|
||||||
|
{:ok, _} = ObanHelpers.perform(job)
|
||||||
|
|
||||||
{:ok, cached_user} = Cachex.get(:user_cache, "ap_id:#{user.ap_id}")
|
{:ok, cached_user} = Cachex.get(:user_cache, "ap_id:#{user.ap_id}")
|
||||||
|
|
||||||
@ -1614,4 +1663,31 @@ test "syncronizes the counters with the remote instance for the follower when en
|
|||||||
assert User.user_info(other_user).following_count == 152
|
assert User.user_info(other_user).following_count == 152
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
describe "change_email/2" do
|
||||||
|
setup do
|
||||||
|
[user: insert(:user)]
|
||||||
|
end
|
||||||
|
|
||||||
|
test "blank email returns error", %{user: user} do
|
||||||
|
assert {:error, %{errors: [email: {"can't be blank", _}]}} = User.change_email(user, "")
|
||||||
|
assert {:error, %{errors: [email: {"can't be blank", _}]}} = User.change_email(user, nil)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "non unique email returns error", %{user: user} do
|
||||||
|
%{email: email} = insert(:user)
|
||||||
|
|
||||||
|
assert {:error, %{errors: [email: {"has already been taken", _}]}} =
|
||||||
|
User.change_email(user, email)
|
||||||
|
end
|
||||||
|
|
||||||
|
test "invalid email returns error", %{user: user} do
|
||||||
|
assert {:error, %{errors: [email: {"has invalid format", _}]}} =
|
||||||
|
User.change_email(user, "cofe")
|
||||||
|
end
|
||||||
|
|
||||||
|
test "changes email", %{user: user} do
|
||||||
|
assert {:ok, %User{email: "cofe@cofe.party"}} = User.change_email(user, "cofe@cofe.party")
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
@ -4,16 +4,20 @@
|
|||||||
|
|
||||||
defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
|
defmodule Pleroma.Web.ActivityPub.ActivityPubControllerTest do
|
||||||
use Pleroma.Web.ConnCase
|
use Pleroma.Web.ConnCase
|
||||||
|
use Oban.Testing, repo: Pleroma.Repo
|
||||||
|
|
||||||
import Pleroma.Factory
|
import Pleroma.Factory
|
||||||
alias Pleroma.Activity
|
alias Pleroma.Activity
|
||||||
alias Pleroma.Instances
|
alias Pleroma.Instances
|
||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
|
alias Pleroma.Tests.ObanHelpers
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.ActivityPub.ObjectView
|
alias Pleroma.Web.ActivityPub.ObjectView
|
||||||
alias Pleroma.Web.ActivityPub.Relay
|
alias Pleroma.Web.ActivityPub.Relay
|
||||||
alias Pleroma.Web.ActivityPub.UserView
|
alias Pleroma.Web.ActivityPub.UserView
|
||||||
alias Pleroma.Web.ActivityPub.Utils
|
alias Pleroma.Web.ActivityPub.Utils
|
||||||
alias Pleroma.Web.CommonAPI
|
alias Pleroma.Web.CommonAPI
|
||||||
|
alias Pleroma.Workers.ReceiverWorker
|
||||||
|
|
||||||
setup_all do
|
setup_all do
|
||||||
Tesla.Mock.mock_global(fn env -> apply(HttpRequestMock, :request, [env]) end)
|
Tesla.Mock.mock_global(fn env -> apply(HttpRequestMock, :request, [env]) end)
|
||||||
@ -365,7 +369,8 @@ test "it inserts an incoming activity into the database", %{conn: conn} do
|
|||||||
|> post("/inbox", data)
|
|> post("/inbox", data)
|
||||||
|
|
||||||
assert "ok" == json_response(conn, 200)
|
assert "ok" == json_response(conn, 200)
|
||||||
:timer.sleep(500)
|
|
||||||
|
ObanHelpers.perform(all_enqueued(worker: ReceiverWorker))
|
||||||
assert Activity.get_by_ap_id(data["id"])
|
assert Activity.get_by_ap_id(data["id"])
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -407,7 +412,7 @@ test "it inserts an incoming activity into the database", %{conn: conn, data: da
|
|||||||
|> post("/users/#{user.nickname}/inbox", data)
|
|> post("/users/#{user.nickname}/inbox", data)
|
||||||
|
|
||||||
assert "ok" == json_response(conn, 200)
|
assert "ok" == json_response(conn, 200)
|
||||||
:timer.sleep(500)
|
ObanHelpers.perform(all_enqueued(worker: ReceiverWorker))
|
||||||
assert Activity.get_by_ap_id(data["id"])
|
assert Activity.get_by_ap_id(data["id"])
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -436,7 +441,7 @@ test "it accepts messages from actors that are followed by the user", %{
|
|||||||
|> post("/users/#{recipient.nickname}/inbox", data)
|
|> post("/users/#{recipient.nickname}/inbox", data)
|
||||||
|
|
||||||
assert "ok" == json_response(conn, 200)
|
assert "ok" == json_response(conn, 200)
|
||||||
:timer.sleep(500)
|
ObanHelpers.perform(all_enqueued(worker: ReceiverWorker))
|
||||||
assert Activity.get_by_ap_id(data["id"])
|
assert Activity.get_by_ap_id(data["id"])
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -526,6 +531,8 @@ test "it removes all follower collections but actor's", %{conn: conn} do
|
|||||||
|> post("/users/#{recipient.nickname}/inbox", data)
|
|> post("/users/#{recipient.nickname}/inbox", data)
|
||||||
|> json_response(200)
|
|> json_response(200)
|
||||||
|
|
||||||
|
ObanHelpers.perform(all_enqueued(worker: ReceiverWorker))
|
||||||
|
|
||||||
activity = Activity.get_by_ap_id(data["id"])
|
activity = Activity.get_by_ap_id(data["id"])
|
||||||
|
|
||||||
assert activity.id
|
assert activity.id
|
||||||
@ -601,6 +608,7 @@ test "it inserts an incoming create activity into the database", %{conn: conn} d
|
|||||||
|> post("/users/#{user.nickname}/outbox", data)
|
|> post("/users/#{user.nickname}/outbox", data)
|
||||||
|
|
||||||
result = json_response(conn, 201)
|
result = json_response(conn, 201)
|
||||||
|
|
||||||
assert Activity.get_by_ap_id(result["id"])
|
assert Activity.get_by_ap_id(result["id"])
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -686,7 +686,7 @@ test "returns reblogs for users for whom reblogs have not been muted" do
|
|||||||
user = insert(:user)
|
user = insert(:user)
|
||||||
|
|
||||||
{:ok, like_activity, _object} = ActivityPub.like(user, object_activity)
|
{:ok, like_activity, _object} = ActivityPub.like(user, object_activity)
|
||||||
assert called(Pleroma.Web.Federator.publish(like_activity, 5))
|
assert called(Pleroma.Web.Federator.publish(like_activity))
|
||||||
end
|
end
|
||||||
|
|
||||||
test "returns exist activity if object already liked" do
|
test "returns exist activity if object already liked" do
|
||||||
@ -747,7 +747,7 @@ test "adds a like activity to the db" do
|
|||||||
{:ok, unlike_activity, _, object} = ActivityPub.unlike(user, object)
|
{:ok, unlike_activity, _, object} = ActivityPub.unlike(user, object)
|
||||||
assert object.data["like_count"] == 0
|
assert object.data["like_count"] == 0
|
||||||
|
|
||||||
assert called(Pleroma.Web.Federator.publish(unlike_activity, 5))
|
assert called(Pleroma.Web.Federator.publish(unlike_activity))
|
||||||
end
|
end
|
||||||
|
|
||||||
test "unliking a previously liked object" do
|
test "unliking a previously liked object" do
|
||||||
|
@ -6,6 +6,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicyTest do
|
|||||||
use Pleroma.DataCase
|
use Pleroma.DataCase
|
||||||
|
|
||||||
alias Pleroma.HTTP
|
alias Pleroma.HTTP
|
||||||
|
alias Pleroma.Tests.ObanHelpers
|
||||||
alias Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy
|
alias Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy
|
||||||
|
|
||||||
import Mock
|
import Mock
|
||||||
@ -24,6 +25,11 @@ defmodule Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicyTest do
|
|||||||
test "it prefetches media proxy URIs" do
|
test "it prefetches media proxy URIs" do
|
||||||
with_mock HTTP, get: fn _, _, _ -> {:ok, []} end do
|
with_mock HTTP, get: fn _, _, _ -> {:ok, []} end do
|
||||||
MediaProxyWarmingPolicy.filter(@message)
|
MediaProxyWarmingPolicy.filter(@message)
|
||||||
|
|
||||||
|
ObanHelpers.perform_all()
|
||||||
|
# Performing jobs which has been just enqueued
|
||||||
|
ObanHelpers.perform_all()
|
||||||
|
|
||||||
assert called(HTTP.get(:_, :_, :_))
|
assert called(HTTP.get(:_, :_, :_))
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -5,6 +5,7 @@
|
|||||||
defmodule Pleroma.Web.ActivityPub.PublisherTest do
|
defmodule Pleroma.Web.ActivityPub.PublisherTest do
|
||||||
use Pleroma.DataCase
|
use Pleroma.DataCase
|
||||||
|
|
||||||
|
import ExUnit.CaptureLog
|
||||||
import Pleroma.Factory
|
import Pleroma.Factory
|
||||||
import Tesla.Mock
|
import Tesla.Mock
|
||||||
import Mock
|
import Mock
|
||||||
@ -188,7 +189,10 @@ test "it returns inbox for messages involving single recipients in total" do
|
|||||||
actor = insert(:user)
|
actor = insert(:user)
|
||||||
inbox = "http://connrefused.site/users/nick1/inbox"
|
inbox = "http://connrefused.site/users/nick1/inbox"
|
||||||
|
|
||||||
assert {:error, _} = Publisher.publish_one(%{inbox: inbox, json: "{}", actor: actor, id: 1})
|
assert capture_log(fn ->
|
||||||
|
assert {:error, _} =
|
||||||
|
Publisher.publish_one(%{inbox: inbox, json: "{}", actor: actor, id: 1})
|
||||||
|
end) =~ "connrefused"
|
||||||
|
|
||||||
assert called(Instances.set_unreachable(inbox))
|
assert called(Instances.set_unreachable(inbox))
|
||||||
end
|
end
|
||||||
@ -212,14 +216,16 @@ test "it returns inbox for messages involving single recipients in total" do
|
|||||||
actor = insert(:user)
|
actor = insert(:user)
|
||||||
inbox = "http://connrefused.site/users/nick1/inbox"
|
inbox = "http://connrefused.site/users/nick1/inbox"
|
||||||
|
|
||||||
assert {:error, _} =
|
assert capture_log(fn ->
|
||||||
Publisher.publish_one(%{
|
assert {:error, _} =
|
||||||
inbox: inbox,
|
Publisher.publish_one(%{
|
||||||
json: "{}",
|
inbox: inbox,
|
||||||
actor: actor,
|
json: "{}",
|
||||||
id: 1,
|
actor: actor,
|
||||||
unreachable_since: NaiveDateTime.utc_now()
|
id: 1,
|
||||||
})
|
unreachable_since: NaiveDateTime.utc_now()
|
||||||
|
})
|
||||||
|
end) =~ "connrefused"
|
||||||
|
|
||||||
refute called(Instances.set_unreachable(inbox))
|
refute called(Instances.set_unreachable(inbox))
|
||||||
end
|
end
|
||||||
@ -257,7 +263,7 @@ test "it returns inbox for messages involving single recipients in total" do
|
|||||||
assert called(
|
assert called(
|
||||||
Pleroma.Web.Federator.Publisher.enqueue_one(Publisher, %{
|
Pleroma.Web.Federator.Publisher.enqueue_one(Publisher, %{
|
||||||
inbox: "https://domain.com/users/nick1/inbox",
|
inbox: "https://domain.com/users/nick1/inbox",
|
||||||
actor: actor,
|
actor_id: actor.id,
|
||||||
id: note_activity.data["id"]
|
id: note_activity.data["id"]
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
|
@ -10,6 +10,7 @@ defmodule Pleroma.Web.ActivityPub.RelayTest do
|
|||||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||||
alias Pleroma.Web.ActivityPub.Relay
|
alias Pleroma.Web.ActivityPub.Relay
|
||||||
|
|
||||||
|
import ExUnit.CaptureLog
|
||||||
import Pleroma.Factory
|
import Pleroma.Factory
|
||||||
import Mock
|
import Mock
|
||||||
|
|
||||||
@ -20,7 +21,9 @@ test "gets an actor for the relay" do
|
|||||||
|
|
||||||
describe "follow/1" do
|
describe "follow/1" do
|
||||||
test "returns errors when user not found" do
|
test "returns errors when user not found" do
|
||||||
assert Relay.follow("test-ap-id") == {:error, "Could not fetch by AP id"}
|
assert capture_log(fn ->
|
||||||
|
assert Relay.follow("test-ap-id") == {:error, "Could not fetch by AP id"}
|
||||||
|
end) =~ "Could not fetch by AP id"
|
||||||
end
|
end
|
||||||
|
|
||||||
test "returns activity" do
|
test "returns activity" do
|
||||||
@ -37,7 +40,9 @@ test "returns activity" do
|
|||||||
|
|
||||||
describe "unfollow/1" do
|
describe "unfollow/1" do
|
||||||
test "returns errors when user not found" do
|
test "returns errors when user not found" do
|
||||||
assert Relay.unfollow("test-ap-id") == {:error, "Could not fetch by AP id"}
|
assert capture_log(fn ->
|
||||||
|
assert Relay.unfollow("test-ap-id") == {:error, "Could not fetch by AP id"}
|
||||||
|
end) =~ "Could not fetch by AP id"
|
||||||
end
|
end
|
||||||
|
|
||||||
test "returns activity" do
|
test "returns activity" do
|
||||||
@ -78,7 +83,9 @@ test "returns error when object is unknown" do
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
assert Relay.publish(activity) == {:error, nil}
|
assert capture_log(fn ->
|
||||||
|
assert Relay.publish(activity) == {:error, nil}
|
||||||
|
end) =~ "[error] error: nil"
|
||||||
end
|
end
|
||||||
|
|
||||||
test_with_mock "returns announce activity and publish to federate",
|
test_with_mock "returns announce activity and publish to federate",
|
||||||
@ -92,7 +99,7 @@ test "returns error when object is unknown" do
|
|||||||
assert activity.data["type"] == "Announce"
|
assert activity.data["type"] == "Announce"
|
||||||
assert activity.data["actor"] == service_actor.ap_id
|
assert activity.data["actor"] == service_actor.ap_id
|
||||||
assert activity.data["object"] == obj.data["id"]
|
assert activity.data["object"] == obj.data["id"]
|
||||||
assert called(Pleroma.Web.Federator.publish(activity, 5))
|
assert called(Pleroma.Web.Federator.publish(activity))
|
||||||
end
|
end
|
||||||
|
|
||||||
test_with_mock "returns announce activity and not publish to federate",
|
test_with_mock "returns announce activity and not publish to federate",
|
||||||
@ -106,7 +113,7 @@ test "returns error when object is unknown" do
|
|||||||
assert activity.data["type"] == "Announce"
|
assert activity.data["type"] == "Announce"
|
||||||
assert activity.data["actor"] == service_actor.ap_id
|
assert activity.data["actor"] == service_actor.ap_id
|
||||||
assert activity.data["object"] == obj.data["id"]
|
assert activity.data["object"] == obj.data["id"]
|
||||||
refute called(Pleroma.Web.Federator.publish(activity, 5))
|
refute called(Pleroma.Web.Federator.publish(activity))
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -8,6 +8,7 @@ defmodule Pleroma.Web.ActivityPub.TransmogrifierTest do
|
|||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
alias Pleroma.Object.Fetcher
|
alias Pleroma.Object.Fetcher
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
|
alias Pleroma.Tests.ObanHelpers
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||||
alias Pleroma.Web.ActivityPub.Transmogrifier
|
alias Pleroma.Web.ActivityPub.Transmogrifier
|
||||||
@ -102,7 +103,7 @@ test "it does not crash if the object in inReplyTo can't be fetched" do
|
|||||||
|
|
||||||
assert capture_log(fn ->
|
assert capture_log(fn ->
|
||||||
{:ok, _returned_activity} = Transmogrifier.handle_incoming(data)
|
{:ok, _returned_activity} = Transmogrifier.handle_incoming(data)
|
||||||
end) =~ "[error] Couldn't fetch \"\"https://404.site/whatever\"\", error: nil"
|
end) =~ "[error] Couldn't fetch \"https://404.site/whatever\", error: nil"
|
||||||
end
|
end
|
||||||
|
|
||||||
test "it works for incoming notices" do
|
test "it works for incoming notices" do
|
||||||
@ -648,6 +649,7 @@ test "it works for incoming user deletes" do
|
|||||||
|> Poison.decode!()
|
|> Poison.decode!()
|
||||||
|
|
||||||
{:ok, _} = Transmogrifier.handle_incoming(data)
|
{:ok, _} = Transmogrifier.handle_incoming(data)
|
||||||
|
ObanHelpers.perform_all()
|
||||||
|
|
||||||
refute User.get_cached_by_ap_id(ap_id)
|
refute User.get_cached_by_ap_id(ap_id)
|
||||||
end
|
end
|
||||||
@ -1210,6 +1212,8 @@ test "it upgrades a user to activitypub" do
|
|||||||
assert user.info.note_count == 1
|
assert user.info.note_count == 1
|
||||||
|
|
||||||
{:ok, user} = Transmogrifier.upgrade_user_from_ap_id("https://niu.moe/users/rye")
|
{:ok, user} = Transmogrifier.upgrade_user_from_ap_id("https://niu.moe/users/rye")
|
||||||
|
ObanHelpers.perform_all()
|
||||||
|
|
||||||
assert user.info.ap_enabled
|
assert user.info.ap_enabled
|
||||||
assert user.info.note_count == 1
|
assert user.info.note_count == 1
|
||||||
assert user.follower_address == "https://niu.moe/users/rye/followers"
|
assert user.follower_address == "https://niu.moe/users/rye/followers"
|
||||||
|
@ -1779,7 +1779,11 @@ test "common config example", %{conn: conn} do
|
|||||||
%{"tuple" => [":seconds_valid", 60]},
|
%{"tuple" => [":seconds_valid", 60]},
|
||||||
%{"tuple" => [":path", ""]},
|
%{"tuple" => [":path", ""]},
|
||||||
%{"tuple" => [":key1", nil]},
|
%{"tuple" => [":key1", nil]},
|
||||||
%{"tuple" => [":partial_chain", "&:hackney_connect.partial_chain/1"]}
|
%{"tuple" => [":partial_chain", "&:hackney_connect.partial_chain/1"]},
|
||||||
|
%{"tuple" => [":regex1", "~r/https:\/\/example.com/"]},
|
||||||
|
%{"tuple" => [":regex2", "~r/https:\/\/example.com/u"]},
|
||||||
|
%{"tuple" => [":regex3", "~r/https:\/\/example.com/i"]},
|
||||||
|
%{"tuple" => [":regex4", "~r/https:\/\/example.com/s"]}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@ -1796,7 +1800,11 @@ test "common config example", %{conn: conn} do
|
|||||||
%{"tuple" => [":seconds_valid", 60]},
|
%{"tuple" => [":seconds_valid", 60]},
|
||||||
%{"tuple" => [":path", ""]},
|
%{"tuple" => [":path", ""]},
|
||||||
%{"tuple" => [":key1", nil]},
|
%{"tuple" => [":key1", nil]},
|
||||||
%{"tuple" => [":partial_chain", "&:hackney_connect.partial_chain/1"]}
|
%{"tuple" => [":partial_chain", "&:hackney_connect.partial_chain/1"]},
|
||||||
|
%{"tuple" => [":regex1", "~r/https:\\/\\/example.com/"]},
|
||||||
|
%{"tuple" => [":regex2", "~r/https:\\/\\/example.com/u"]},
|
||||||
|
%{"tuple" => [":regex3", "~r/https:\\/\\/example.com/i"]},
|
||||||
|
%{"tuple" => [":regex4", "~r/https:\\/\\/example.com/s"]}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
@ -2088,7 +2096,7 @@ test "queues key as atom", %{conn: conn} do
|
|||||||
post(conn, "/api/pleroma/admin/config", %{
|
post(conn, "/api/pleroma/admin/config", %{
|
||||||
configs: [
|
configs: [
|
||||||
%{
|
%{
|
||||||
"group" => "pleroma_job_queue",
|
"group" => "oban",
|
||||||
"key" => ":queues",
|
"key" => ":queues",
|
||||||
"value" => [
|
"value" => [
|
||||||
%{"tuple" => [":federator_incoming", 50]},
|
%{"tuple" => [":federator_incoming", 50]},
|
||||||
@ -2106,7 +2114,7 @@ test "queues key as atom", %{conn: conn} do
|
|||||||
assert json_response(conn, 200) == %{
|
assert json_response(conn, 200) == %{
|
||||||
"configs" => [
|
"configs" => [
|
||||||
%{
|
%{
|
||||||
"group" => "pleroma_job_queue",
|
"group" => "oban",
|
||||||
"key" => ":queues",
|
"key" => ":queues",
|
||||||
"value" => [
|
"value" => [
|
||||||
%{"tuple" => [":federator_incoming", 50]},
|
%{"tuple" => [":federator_incoming", 50]},
|
||||||
|
@ -103,6 +103,30 @@ test "sigil" do
|
|||||||
assert Config.from_binary(binary) == ~r/comp[lL][aA][iI][nN]er/
|
assert Config.from_binary(binary) == ~r/comp[lL][aA][iI][nN]er/
|
||||||
end
|
end
|
||||||
|
|
||||||
|
test "link sigil" do
|
||||||
|
binary = Config.transform("~r/https:\/\/example.com/")
|
||||||
|
assert binary == :erlang.term_to_binary(~r/https:\/\/example.com/)
|
||||||
|
assert Config.from_binary(binary) == ~r/https:\/\/example.com/
|
||||||
|
end
|
||||||
|
|
||||||
|
test "link sigil with u modifier" do
|
||||||
|
binary = Config.transform("~r/https:\/\/example.com/u")
|
||||||
|
assert binary == :erlang.term_to_binary(~r/https:\/\/example.com/u)
|
||||||
|
assert Config.from_binary(binary) == ~r/https:\/\/example.com/u
|
||||||
|
end
|
||||||
|
|
||||||
|
test "link sigil with i modifier" do
|
||||||
|
binary = Config.transform("~r/https:\/\/example.com/i")
|
||||||
|
assert binary == :erlang.term_to_binary(~r/https:\/\/example.com/i)
|
||||||
|
assert Config.from_binary(binary) == ~r/https:\/\/example.com/i
|
||||||
|
end
|
||||||
|
|
||||||
|
test "link sigil with s modifier" do
|
||||||
|
binary = Config.transform("~r/https:\/\/example.com/s")
|
||||||
|
assert binary == :erlang.term_to_binary(~r/https:\/\/example.com/s)
|
||||||
|
assert Config.from_binary(binary) == ~r/https:\/\/example.com/s
|
||||||
|
end
|
||||||
|
|
||||||
test "2 child tuple" do
|
test "2 child tuple" do
|
||||||
binary = Config.transform(%{"tuple" => ["v1", ":v2"]})
|
binary = Config.transform(%{"tuple" => ["v1", ":v2"]})
|
||||||
assert binary == :erlang.term_to_binary({"v1", :v2})
|
assert binary == :erlang.term_to_binary({"v1", :v2})
|
||||||
|
@ -4,9 +4,14 @@
|
|||||||
|
|
||||||
defmodule Pleroma.Web.FederatorTest do
|
defmodule Pleroma.Web.FederatorTest do
|
||||||
alias Pleroma.Instances
|
alias Pleroma.Instances
|
||||||
|
alias Pleroma.Tests.ObanHelpers
|
||||||
alias Pleroma.Web.CommonAPI
|
alias Pleroma.Web.CommonAPI
|
||||||
alias Pleroma.Web.Federator
|
alias Pleroma.Web.Federator
|
||||||
|
alias Pleroma.Workers.PublisherWorker
|
||||||
|
|
||||||
use Pleroma.DataCase
|
use Pleroma.DataCase
|
||||||
|
use Oban.Testing, repo: Pleroma.Repo
|
||||||
|
|
||||||
import Pleroma.Factory
|
import Pleroma.Factory
|
||||||
import Mock
|
import Mock
|
||||||
|
|
||||||
@ -24,15 +29,6 @@ defmodule Pleroma.Web.FederatorTest do
|
|||||||
clear_config([:instance, :rewrite_policy])
|
clear_config([:instance, :rewrite_policy])
|
||||||
clear_config([:mrf_keyword])
|
clear_config([:mrf_keyword])
|
||||||
|
|
||||||
describe "Publisher.perform" do
|
|
||||||
test "call `perform` with unknown task" do
|
|
||||||
assert {
|
|
||||||
:error,
|
|
||||||
"Don't know what to do with this"
|
|
||||||
} = Pleroma.Web.Federator.Publisher.perform("test", :ok, :ok)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
describe "Publish an activity" do
|
describe "Publish an activity" do
|
||||||
setup do
|
setup do
|
||||||
user = insert(:user)
|
user = insert(:user)
|
||||||
@ -53,6 +49,7 @@ test "with relays active, it publishes to the relay", %{
|
|||||||
} do
|
} do
|
||||||
with_mocks([relay_mock]) do
|
with_mocks([relay_mock]) do
|
||||||
Federator.publish(activity)
|
Federator.publish(activity)
|
||||||
|
ObanHelpers.perform(all_enqueued(worker: PublisherWorker))
|
||||||
end
|
end
|
||||||
|
|
||||||
assert_received :relay_publish
|
assert_received :relay_publish
|
||||||
@ -66,6 +63,7 @@ test "with relays deactivated, it does not publish to the relay", %{
|
|||||||
|
|
||||||
with_mocks([relay_mock]) do
|
with_mocks([relay_mock]) do
|
||||||
Federator.publish(activity)
|
Federator.publish(activity)
|
||||||
|
ObanHelpers.perform(all_enqueued(worker: PublisherWorker))
|
||||||
end
|
end
|
||||||
|
|
||||||
refute_received :relay_publish
|
refute_received :relay_publish
|
||||||
@ -73,10 +71,7 @@ test "with relays deactivated, it does not publish to the relay", %{
|
|||||||
end
|
end
|
||||||
|
|
||||||
describe "Targets reachability filtering in `publish`" do
|
describe "Targets reachability filtering in `publish`" do
|
||||||
test_with_mock "it federates only to reachable instances via AP",
|
test "it federates only to reachable instances via AP" do
|
||||||
Pleroma.Web.ActivityPub.Publisher,
|
|
||||||
[:passthrough],
|
|
||||||
[] do
|
|
||||||
user = insert(:user)
|
user = insert(:user)
|
||||||
|
|
||||||
{inbox1, inbox2} =
|
{inbox1, inbox2} =
|
||||||
@ -104,20 +99,20 @@ test "with relays deactivated, it does not publish to the relay", %{
|
|||||||
{:ok, _activity} =
|
{:ok, _activity} =
|
||||||
CommonAPI.post(user, %{"status" => "HI @nick1@domain.com, @nick2@domain2.com!"})
|
CommonAPI.post(user, %{"status" => "HI @nick1@domain.com, @nick2@domain2.com!"})
|
||||||
|
|
||||||
assert called(
|
expected_dt = NaiveDateTime.to_iso8601(dt)
|
||||||
Pleroma.Web.ActivityPub.Publisher.publish_one(%{
|
|
||||||
inbox: inbox1,
|
|
||||||
unreachable_since: dt
|
|
||||||
})
|
|
||||||
)
|
|
||||||
|
|
||||||
refute called(Pleroma.Web.ActivityPub.Publisher.publish_one(%{inbox: inbox2}))
|
ObanHelpers.perform(all_enqueued(worker: PublisherWorker))
|
||||||
|
|
||||||
|
assert ObanHelpers.member?(
|
||||||
|
%{
|
||||||
|
"op" => "publish_one",
|
||||||
|
"params" => %{"inbox" => inbox1, "unreachable_since" => expected_dt}
|
||||||
|
},
|
||||||
|
all_enqueued(worker: PublisherWorker)
|
||||||
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
test_with_mock "it federates only to reachable instances via Websub",
|
test "it federates only to reachable instances via Websub" do
|
||||||
Pleroma.Web.Websub,
|
|
||||||
[:passthrough],
|
|
||||||
[] do
|
|
||||||
user = insert(:user)
|
user = insert(:user)
|
||||||
websub_topic = Pleroma.Web.OStatus.feed_path(user)
|
websub_topic = Pleroma.Web.OStatus.feed_path(user)
|
||||||
|
|
||||||
@ -142,23 +137,27 @@ test "with relays deactivated, it does not publish to the relay", %{
|
|||||||
|
|
||||||
{:ok, _activity} = CommonAPI.post(user, %{"status" => "HI"})
|
{:ok, _activity} = CommonAPI.post(user, %{"status" => "HI"})
|
||||||
|
|
||||||
assert called(
|
expected_callback = sub2.callback
|
||||||
Pleroma.Web.Websub.publish_one(%{
|
expected_dt = NaiveDateTime.to_iso8601(dt)
|
||||||
callback: sub2.callback,
|
|
||||||
unreachable_since: dt
|
|
||||||
})
|
|
||||||
)
|
|
||||||
|
|
||||||
refute called(Pleroma.Web.Websub.publish_one(%{callback: sub1.callback}))
|
ObanHelpers.perform(all_enqueued(worker: PublisherWorker))
|
||||||
|
|
||||||
|
assert ObanHelpers.member?(
|
||||||
|
%{
|
||||||
|
"op" => "publish_one",
|
||||||
|
"params" => %{
|
||||||
|
"callback" => expected_callback,
|
||||||
|
"unreachable_since" => expected_dt
|
||||||
|
}
|
||||||
|
},
|
||||||
|
all_enqueued(worker: PublisherWorker)
|
||||||
|
)
|
||||||
end
|
end
|
||||||
|
|
||||||
test_with_mock "it federates only to reachable instances via Salmon",
|
test "it federates only to reachable instances via Salmon" do
|
||||||
Pleroma.Web.Salmon,
|
|
||||||
[:passthrough],
|
|
||||||
[] do
|
|
||||||
user = insert(:user)
|
user = insert(:user)
|
||||||
|
|
||||||
remote_user1 =
|
_remote_user1 =
|
||||||
insert(:user, %{
|
insert(:user, %{
|
||||||
local: false,
|
local: false,
|
||||||
nickname: "nick1@domain.com",
|
nickname: "nick1@domain.com",
|
||||||
@ -174,6 +173,8 @@ test "with relays deactivated, it does not publish to the relay", %{
|
|||||||
info: %{salmon: "https://domain2.com/salmon"}
|
info: %{salmon: "https://domain2.com/salmon"}
|
||||||
})
|
})
|
||||||
|
|
||||||
|
remote_user2_id = remote_user2.id
|
||||||
|
|
||||||
dt = NaiveDateTime.utc_now()
|
dt = NaiveDateTime.utc_now()
|
||||||
Instances.set_unreachable(remote_user2.ap_id, dt)
|
Instances.set_unreachable(remote_user2.ap_id, dt)
|
||||||
|
|
||||||
@ -182,14 +183,20 @@ test "with relays deactivated, it does not publish to the relay", %{
|
|||||||
{:ok, _activity} =
|
{:ok, _activity} =
|
||||||
CommonAPI.post(user, %{"status" => "HI @nick1@domain.com, @nick2@domain2.com!"})
|
CommonAPI.post(user, %{"status" => "HI @nick1@domain.com, @nick2@domain2.com!"})
|
||||||
|
|
||||||
assert called(
|
expected_dt = NaiveDateTime.to_iso8601(dt)
|
||||||
Pleroma.Web.Salmon.publish_one(%{
|
|
||||||
recipient: remote_user2,
|
|
||||||
unreachable_since: dt
|
|
||||||
})
|
|
||||||
)
|
|
||||||
|
|
||||||
refute called(Pleroma.Web.Salmon.publish_one(%{recipient: remote_user1}))
|
ObanHelpers.perform(all_enqueued(worker: PublisherWorker))
|
||||||
|
|
||||||
|
assert ObanHelpers.member?(
|
||||||
|
%{
|
||||||
|
"op" => "publish_one",
|
||||||
|
"params" => %{
|
||||||
|
"recipient_id" => remote_user2_id,
|
||||||
|
"unreachable_since" => expected_dt
|
||||||
|
}
|
||||||
|
},
|
||||||
|
all_enqueued(worker: PublisherWorker)
|
||||||
|
)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -209,7 +216,8 @@ test "successfully processes incoming AP docs with correct origin" do
|
|||||||
"to" => ["https://www.w3.org/ns/activitystreams#Public"]
|
"to" => ["https://www.w3.org/ns/activitystreams#Public"]
|
||||||
}
|
}
|
||||||
|
|
||||||
{:ok, _activity} = Federator.incoming_ap_doc(params)
|
assert {:ok, job} = Federator.incoming_ap_doc(params)
|
||||||
|
assert {:ok, _activity} = ObanHelpers.perform(job)
|
||||||
end
|
end
|
||||||
|
|
||||||
test "rejects incoming AP docs with incorrect origin" do
|
test "rejects incoming AP docs with incorrect origin" do
|
||||||
@ -227,7 +235,8 @@ test "rejects incoming AP docs with incorrect origin" do
|
|||||||
"to" => ["https://www.w3.org/ns/activitystreams#Public"]
|
"to" => ["https://www.w3.org/ns/activitystreams#Public"]
|
||||||
}
|
}
|
||||||
|
|
||||||
:error = Federator.incoming_ap_doc(params)
|
assert {:ok, job} = Federator.incoming_ap_doc(params)
|
||||||
|
assert :error = ObanHelpers.perform(job)
|
||||||
end
|
end
|
||||||
|
|
||||||
test "it does not crash if MRF rejects the post" do
|
test "it does not crash if MRF rejects the post" do
|
||||||
@ -242,7 +251,8 @@ test "it does not crash if MRF rejects the post" do
|
|||||||
File.read!("test/fixtures/mastodon-post-activity.json")
|
File.read!("test/fixtures/mastodon-post-activity.json")
|
||||||
|> Poison.decode!()
|
|> Poison.decode!()
|
||||||
|
|
||||||
assert Federator.incoming_ap_doc(params) == :error
|
assert {:ok, job} = Federator.incoming_ap_doc(params)
|
||||||
|
assert :error = ObanHelpers.perform(job)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -16,7 +16,8 @@ defmodule Pleroma.Instances.InstanceTest do
|
|||||||
|
|
||||||
describe "set_reachable/1" do
|
describe "set_reachable/1" do
|
||||||
test "clears `unreachable_since` of existing matching Instance record having non-nil `unreachable_since`" do
|
test "clears `unreachable_since` of existing matching Instance record having non-nil `unreachable_since`" do
|
||||||
instance = insert(:instance, unreachable_since: NaiveDateTime.utc_now())
|
unreachable_since = NaiveDateTime.to_iso8601(NaiveDateTime.utc_now())
|
||||||
|
instance = insert(:instance, unreachable_since: unreachable_since)
|
||||||
|
|
||||||
assert {:ok, instance} = Instance.set_reachable(instance.host)
|
assert {:ok, instance} = Instance.set_reachable(instance.host)
|
||||||
refute instance.unreachable_since
|
refute instance.unreachable_since
|
||||||
|
@ -13,6 +13,7 @@ defmodule Pleroma.Web.MastodonAPI.MastodonAPIControllerTest do
|
|||||||
alias Pleroma.Object
|
alias Pleroma.Object
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
alias Pleroma.ScheduledActivity
|
alias Pleroma.ScheduledActivity
|
||||||
|
alias Pleroma.Tests.ObanHelpers
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.ActivityPub.ActivityPub
|
alias Pleroma.Web.ActivityPub.ActivityPub
|
||||||
alias Pleroma.Web.CommonAPI
|
alias Pleroma.Web.CommonAPI
|
||||||
@ -3698,7 +3699,7 @@ test "returns 404 when poll is private and not available for user", %{conn: conn
|
|||||||
build_conn()
|
build_conn()
|
||||||
|> assign(:user, user)
|
|> assign(:user, user)
|
||||||
|
|
||||||
[conn: conn, activity: activity]
|
[conn: conn, activity: activity, user: user]
|
||||||
end
|
end
|
||||||
|
|
||||||
test "returns users who have favorited the status", %{conn: conn, activity: activity} do
|
test "returns users who have favorited the status", %{conn: conn, activity: activity} do
|
||||||
@ -3758,6 +3759,32 @@ test "does not fail on an unauthenticated request", %{conn: conn, activity: acti
|
|||||||
[%{"id" => id}] = response
|
[%{"id" => id}] = response
|
||||||
assert id == other_user.id
|
assert id == other_user.id
|
||||||
end
|
end
|
||||||
|
|
||||||
|
test "requires authentification for private posts", %{conn: conn, user: user} do
|
||||||
|
other_user = insert(:user)
|
||||||
|
|
||||||
|
{:ok, activity} =
|
||||||
|
CommonAPI.post(user, %{
|
||||||
|
"status" => "@#{other_user.nickname} wanna get some #cofe together?",
|
||||||
|
"visibility" => "direct"
|
||||||
|
})
|
||||||
|
|
||||||
|
{:ok, _, _} = CommonAPI.favorite(activity.id, other_user)
|
||||||
|
|
||||||
|
conn
|
||||||
|
|> assign(:user, nil)
|
||||||
|
|> get("/api/v1/statuses/#{activity.id}/favourited_by")
|
||||||
|
|> json_response(404)
|
||||||
|
|
||||||
|
response =
|
||||||
|
build_conn()
|
||||||
|
|> assign(:user, other_user)
|
||||||
|
|> get("/api/v1/statuses/#{activity.id}/favourited_by")
|
||||||
|
|> json_response(200)
|
||||||
|
|
||||||
|
[%{"id" => id}] = response
|
||||||
|
assert id == other_user.id
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "GET /api/v1/statuses/:id/reblogged_by" do
|
describe "GET /api/v1/statuses/:id/reblogged_by" do
|
||||||
@ -3769,7 +3796,7 @@ test "does not fail on an unauthenticated request", %{conn: conn, activity: acti
|
|||||||
build_conn()
|
build_conn()
|
||||||
|> assign(:user, user)
|
|> assign(:user, user)
|
||||||
|
|
||||||
[conn: conn, activity: activity]
|
[conn: conn, activity: activity, user: user]
|
||||||
end
|
end
|
||||||
|
|
||||||
test "returns users who have reblogged the status", %{conn: conn, activity: activity} do
|
test "returns users who have reblogged the status", %{conn: conn, activity: activity} do
|
||||||
@ -3829,6 +3856,29 @@ test "does not fail on an unauthenticated request", %{conn: conn, activity: acti
|
|||||||
[%{"id" => id}] = response
|
[%{"id" => id}] = response
|
||||||
assert id == other_user.id
|
assert id == other_user.id
|
||||||
end
|
end
|
||||||
|
|
||||||
|
test "requires authentification for private posts", %{conn: conn, user: user} do
|
||||||
|
other_user = insert(:user)
|
||||||
|
|
||||||
|
{:ok, activity} =
|
||||||
|
CommonAPI.post(user, %{
|
||||||
|
"status" => "@#{other_user.nickname} wanna get some #cofe together?",
|
||||||
|
"visibility" => "direct"
|
||||||
|
})
|
||||||
|
|
||||||
|
conn
|
||||||
|
|> assign(:user, nil)
|
||||||
|
|> get("/api/v1/statuses/#{activity.id}/reblogged_by")
|
||||||
|
|> json_response(404)
|
||||||
|
|
||||||
|
response =
|
||||||
|
build_conn()
|
||||||
|
|> assign(:user, other_user)
|
||||||
|
|> get("/api/v1/statuses/#{activity.id}/reblogged_by")
|
||||||
|
|> json_response(200)
|
||||||
|
|
||||||
|
assert [] == response
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
describe "POST /auth/password, with valid parameters" do
|
describe "POST /auth/password, with valid parameters" do
|
||||||
@ -3848,6 +3898,7 @@ test "it creates a PasswordResetToken record for user", %{user: user} do
|
|||||||
end
|
end
|
||||||
|
|
||||||
test "it sends an email to user", %{user: user} do
|
test "it sends an email to user", %{user: user} do
|
||||||
|
ObanHelpers.perform_all()
|
||||||
token_record = Repo.get_by(Pleroma.PasswordResetToken, user_id: user.id)
|
token_record = Repo.get_by(Pleroma.PasswordResetToken, user_id: user.id)
|
||||||
|
|
||||||
email = Pleroma.Emails.UserEmail.password_reset_email(user, token_record.token)
|
email = Pleroma.Emails.UserEmail.password_reset_email(user, token_record.token)
|
||||||
@ -3908,6 +3959,8 @@ test "resend account confirmation email", %{conn: conn, user: user} do
|
|||||||
|> post("/api/v1/pleroma/accounts/confirmation_resend?email=#{user.email}")
|
|> post("/api/v1/pleroma/accounts/confirmation_resend?email=#{user.email}")
|
||||||
|> json_response(:no_content)
|
|> json_response(:no_content)
|
||||||
|
|
||||||
|
ObanHelpers.perform_all()
|
||||||
|
|
||||||
email = Pleroma.Emails.UserEmail.account_confirmation_email(user)
|
email = Pleroma.Emails.UserEmail.account_confirmation_email(user)
|
||||||
notify_email = Config.get([:instance, :notify_email])
|
notify_email = Config.get([:instance, :notify_email])
|
||||||
instance_name = Config.get([:instance, :name])
|
instance_name = Config.get([:instance, :name])
|
||||||
@ -3963,13 +4016,15 @@ test "returns error", %{conn: conn, user: user} do
|
|||||||
Config.put([:suggestions, :enabled], true)
|
Config.put([:suggestions, :enabled], true)
|
||||||
Config.put([:suggestions, :third_party_engine], "http://test500?{{host}}&{{user}}")
|
Config.put([:suggestions, :third_party_engine], "http://test500?{{host}}&{{user}}")
|
||||||
|
|
||||||
res =
|
assert capture_log(fn ->
|
||||||
conn
|
res =
|
||||||
|> assign(:user, user)
|
conn
|
||||||
|> get("/api/v1/suggestions")
|
|> assign(:user, user)
|
||||||
|> json_response(500)
|
|> get("/api/v1/suggestions")
|
||||||
|
|> json_response(500)
|
||||||
|
|
||||||
assert res == "Something went wrong"
|
assert res == "Something went wrong"
|
||||||
|
end) =~ "Could not retrieve suggestions"
|
||||||
end
|
end
|
||||||
|
|
||||||
test "returns suggestions", %{conn: conn, user: user, other_user: other_user} do
|
test "returns suggestions", %{conn: conn, user: user, other_user: other_user} do
|
||||||
|
@ -1,48 +0,0 @@
|
|||||||
# Pleroma: A lightweight social networking server
|
|
||||||
# Copyright © 2017-2018 Pleroma Authors <https://pleroma.social/>
|
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
|
||||||
|
|
||||||
defmodule MockActivityPub do
|
|
||||||
def publish_one({ret, waiter}) do
|
|
||||||
send(waiter, :complete)
|
|
||||||
{ret, "success"}
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
defmodule Pleroma.Web.Federator.RetryQueueTest do
|
|
||||||
use Pleroma.DataCase
|
|
||||||
alias Pleroma.Web.Federator.RetryQueue
|
|
||||||
|
|
||||||
@small_retry_count 0
|
|
||||||
@hopeless_retry_count 10
|
|
||||||
|
|
||||||
setup do
|
|
||||||
RetryQueue.reset_stats()
|
|
||||||
end
|
|
||||||
|
|
||||||
test "RetryQueue responds to stats request" do
|
|
||||||
assert %{delivered: 0, dropped: 0} == RetryQueue.get_stats()
|
|
||||||
end
|
|
||||||
|
|
||||||
test "failed posts are retried" do
|
|
||||||
{:retry, _timeout} = RetryQueue.get_retry_params(@small_retry_count)
|
|
||||||
|
|
||||||
wait_task =
|
|
||||||
Task.async(fn ->
|
|
||||||
receive do
|
|
||||||
:complete -> :ok
|
|
||||||
end
|
|
||||||
end)
|
|
||||||
|
|
||||||
RetryQueue.enqueue({:ok, wait_task.pid}, MockActivityPub, @small_retry_count)
|
|
||||||
Task.await(wait_task)
|
|
||||||
assert %{delivered: 1, dropped: 0} == RetryQueue.get_stats()
|
|
||||||
end
|
|
||||||
|
|
||||||
test "posts that have been tried too many times are dropped" do
|
|
||||||
{:drop, _timeout} = RetryQueue.get_retry_params(@hopeless_retry_count)
|
|
||||||
|
|
||||||
RetryQueue.enqueue({:ok, nil}, MockActivityPub, @hopeless_retry_count)
|
|
||||||
assert %{delivered: 0, dropped: 1} == RetryQueue.get_stats()
|
|
||||||
end
|
|
||||||
end
|
|
@ -96,6 +96,6 @@ test "it gets a magic key" do
|
|||||||
|
|
||||||
Salmon.publish(user, activity)
|
Salmon.publish(user, activity)
|
||||||
|
|
||||||
assert called(Publisher.enqueue_one(Salmon, %{recipient: mentioned_user}))
|
assert called(Publisher.enqueue_one(Salmon, %{recipient_id: mentioned_user.id}))
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
@ -5,6 +5,7 @@
|
|||||||
defmodule Pleroma.Web.TwitterAPI.TwitterAPITest do
|
defmodule Pleroma.Web.TwitterAPI.TwitterAPITest do
|
||||||
use Pleroma.DataCase
|
use Pleroma.DataCase
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
|
alias Pleroma.Tests.ObanHelpers
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.UserInviteToken
|
alias Pleroma.UserInviteToken
|
||||||
alias Pleroma.Web.MastodonAPI.AccountView
|
alias Pleroma.Web.MastodonAPI.AccountView
|
||||||
@ -68,6 +69,7 @@ test "it sends confirmation email if :account_activation_required is specified i
|
|||||||
}
|
}
|
||||||
|
|
||||||
{:ok, user} = TwitterAPI.register_user(data)
|
{:ok, user} = TwitterAPI.register_user(data)
|
||||||
|
ObanHelpers.perform_all()
|
||||||
|
|
||||||
assert user.info.confirmation_pending
|
assert user.info.confirmation_pending
|
||||||
|
|
||||||
|
@ -4,10 +4,13 @@
|
|||||||
|
|
||||||
defmodule Pleroma.Web.TwitterAPI.UtilControllerTest do
|
defmodule Pleroma.Web.TwitterAPI.UtilControllerTest do
|
||||||
use Pleroma.Web.ConnCase
|
use Pleroma.Web.ConnCase
|
||||||
|
use Oban.Testing, repo: Pleroma.Repo
|
||||||
|
|
||||||
alias Pleroma.Repo
|
alias Pleroma.Repo
|
||||||
|
alias Pleroma.Tests.ObanHelpers
|
||||||
alias Pleroma.User
|
alias Pleroma.User
|
||||||
alias Pleroma.Web.CommonAPI
|
alias Pleroma.Web.CommonAPI
|
||||||
|
import ExUnit.CaptureLog
|
||||||
import Pleroma.Factory
|
import Pleroma.Factory
|
||||||
import Mock
|
import Mock
|
||||||
|
|
||||||
@ -42,8 +45,7 @@ test "it imports follow lists from file", %{conn: conn} do
|
|||||||
{File, [],
|
{File, [],
|
||||||
read!: fn "follow_list.txt" ->
|
read!: fn "follow_list.txt" ->
|
||||||
"Account address,Show boosts\n#{user2.ap_id},true"
|
"Account address,Show boosts\n#{user2.ap_id},true"
|
||||||
end},
|
end}
|
||||||
{PleromaJobQueue, [:passthrough], []}
|
|
||||||
]) do
|
]) do
|
||||||
response =
|
response =
|
||||||
conn
|
conn
|
||||||
@ -51,15 +53,16 @@ test "it imports follow lists from file", %{conn: conn} do
|
|||||||
|> post("/api/pleroma/follow_import", %{"list" => %Plug.Upload{path: "follow_list.txt"}})
|
|> post("/api/pleroma/follow_import", %{"list" => %Plug.Upload{path: "follow_list.txt"}})
|
||||||
|> json_response(:ok)
|
|> json_response(:ok)
|
||||||
|
|
||||||
assert called(
|
|
||||||
PleromaJobQueue.enqueue(
|
|
||||||
:background,
|
|
||||||
User,
|
|
||||||
[:follow_import, user1, [user2.ap_id]]
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response == "job started"
|
assert response == "job started"
|
||||||
|
|
||||||
|
assert ObanHelpers.member?(
|
||||||
|
%{
|
||||||
|
"op" => "follow_import",
|
||||||
|
"follower_id" => user1.id,
|
||||||
|
"followed_identifiers" => [user2.ap_id]
|
||||||
|
},
|
||||||
|
all_enqueued(worker: Pleroma.Workers.BackgroundWorker)
|
||||||
|
)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -118,8 +121,7 @@ test "it imports blocks users from file", %{conn: conn} do
|
|||||||
user3 = insert(:user)
|
user3 = insert(:user)
|
||||||
|
|
||||||
with_mocks([
|
with_mocks([
|
||||||
{File, [], read!: fn "blocks_list.txt" -> "#{user2.ap_id} #{user3.ap_id}" end},
|
{File, [], read!: fn "blocks_list.txt" -> "#{user2.ap_id} #{user3.ap_id}" end}
|
||||||
{PleromaJobQueue, [:passthrough], []}
|
|
||||||
]) do
|
]) do
|
||||||
response =
|
response =
|
||||||
conn
|
conn
|
||||||
@ -127,15 +129,16 @@ test "it imports blocks users from file", %{conn: conn} do
|
|||||||
|> post("/api/pleroma/blocks_import", %{"list" => %Plug.Upload{path: "blocks_list.txt"}})
|
|> post("/api/pleroma/blocks_import", %{"list" => %Plug.Upload{path: "blocks_list.txt"}})
|
||||||
|> json_response(:ok)
|
|> json_response(:ok)
|
||||||
|
|
||||||
assert called(
|
|
||||||
PleromaJobQueue.enqueue(
|
|
||||||
:background,
|
|
||||||
User,
|
|
||||||
[:blocks_import, user1, [user2.ap_id, user3.ap_id]]
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response == "job started"
|
assert response == "job started"
|
||||||
|
|
||||||
|
assert ObanHelpers.member?(
|
||||||
|
%{
|
||||||
|
"op" => "blocks_import",
|
||||||
|
"blocker_id" => user1.id,
|
||||||
|
"blocked_identifiers" => [user2.ap_id, user3.ap_id]
|
||||||
|
},
|
||||||
|
all_enqueued(worker: Pleroma.Workers.BackgroundWorker)
|
||||||
|
)
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
@ -338,12 +341,14 @@ test "show follow page if the `acct` is a account link", %{conn: conn} do
|
|||||||
test "show follow page with error when user cannot fecth by `acct` link", %{conn: conn} do
|
test "show follow page with error when user cannot fecth by `acct` link", %{conn: conn} do
|
||||||
user = insert(:user)
|
user = insert(:user)
|
||||||
|
|
||||||
response =
|
assert capture_log(fn ->
|
||||||
conn
|
response =
|
||||||
|> assign(:user, user)
|
conn
|
||||||
|> get("/ostatus_subscribe?acct=https://mastodon.social/users/not_found")
|
|> assign(:user, user)
|
||||||
|
|> get("/ostatus_subscribe?acct=https://mastodon.social/users/not_found")
|
||||||
|
|
||||||
assert html_response(response, 200) =~ "Error fetching user"
|
assert html_response(response, 200) =~ "Error fetching user"
|
||||||
|
end) =~ "Object has been deleted"
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
@ -557,6 +562,7 @@ test "it returns HTTP 200", %{conn: conn} do
|
|||||||
|> json_response(:ok)
|
|> json_response(:ok)
|
||||||
|
|
||||||
assert response == %{"status" => "success"}
|
assert response == %{"status" => "success"}
|
||||||
|
ObanHelpers.perform_all()
|
||||||
|
|
||||||
user = User.get_cached_by_id(user.id)
|
user = User.get_cached_by_id(user.id)
|
||||||
|
|
||||||
@ -662,4 +668,111 @@ test "it returns new captcha", %{conn: conn} do
|
|||||||
assert called(Pleroma.Captcha.new())
|
assert called(Pleroma.Captcha.new())
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
defp with_credentials(conn, username, password) do
|
||||||
|
header_content = "Basic " <> Base.encode64("#{username}:#{password}")
|
||||||
|
put_req_header(conn, "authorization", header_content)
|
||||||
|
end
|
||||||
|
|
||||||
|
defp valid_user(_context) do
|
||||||
|
user = insert(:user)
|
||||||
|
[user: user]
|
||||||
|
end
|
||||||
|
|
||||||
|
describe "POST /api/pleroma/change_email" do
|
||||||
|
setup [:valid_user]
|
||||||
|
|
||||||
|
test "without credentials", %{conn: conn} do
|
||||||
|
conn = post(conn, "/api/pleroma/change_email")
|
||||||
|
assert json_response(conn, 403) == %{"error" => "Invalid credentials."}
|
||||||
|
end
|
||||||
|
|
||||||
|
test "with credentials and invalid password", %{conn: conn, user: current_user} do
|
||||||
|
conn =
|
||||||
|
conn
|
||||||
|
|> with_credentials(current_user.nickname, "test")
|
||||||
|
|> post("/api/pleroma/change_email", %{
|
||||||
|
"password" => "hi",
|
||||||
|
"email" => "test@test.com"
|
||||||
|
})
|
||||||
|
|
||||||
|
assert json_response(conn, 200) == %{"error" => "Invalid password."}
|
||||||
|
end
|
||||||
|
|
||||||
|
test "with credentials, valid password and invalid email", %{
|
||||||
|
conn: conn,
|
||||||
|
user: current_user
|
||||||
|
} do
|
||||||
|
conn =
|
||||||
|
conn
|
||||||
|
|> with_credentials(current_user.nickname, "test")
|
||||||
|
|> post("/api/pleroma/change_email", %{
|
||||||
|
"password" => "test",
|
||||||
|
"email" => "foobar"
|
||||||
|
})
|
||||||
|
|
||||||
|
assert json_response(conn, 200) == %{"error" => "Email has invalid format."}
|
||||||
|
end
|
||||||
|
|
||||||
|
test "with credentials, valid password and no email", %{
|
||||||
|
conn: conn,
|
||||||
|
user: current_user
|
||||||
|
} do
|
||||||
|
conn =
|
||||||
|
conn
|
||||||
|
|> with_credentials(current_user.nickname, "test")
|
||||||
|
|> post("/api/pleroma/change_email", %{
|
||||||
|
"password" => "test"
|
||||||
|
})
|
||||||
|
|
||||||
|
assert json_response(conn, 200) == %{"error" => "Email can't be blank."}
|
||||||
|
end
|
||||||
|
|
||||||
|
test "with credentials, valid password and blank email", %{
|
||||||
|
conn: conn,
|
||||||
|
user: current_user
|
||||||
|
} do
|
||||||
|
conn =
|
||||||
|
conn
|
||||||
|
|> with_credentials(current_user.nickname, "test")
|
||||||
|
|> post("/api/pleroma/change_email", %{
|
||||||
|
"password" => "test",
|
||||||
|
"email" => ""
|
||||||
|
})
|
||||||
|
|
||||||
|
assert json_response(conn, 200) == %{"error" => "Email can't be blank."}
|
||||||
|
end
|
||||||
|
|
||||||
|
test "with credentials, valid password and non unique email", %{
|
||||||
|
conn: conn,
|
||||||
|
user: current_user
|
||||||
|
} do
|
||||||
|
user = insert(:user)
|
||||||
|
|
||||||
|
conn =
|
||||||
|
conn
|
||||||
|
|> with_credentials(current_user.nickname, "test")
|
||||||
|
|> post("/api/pleroma/change_email", %{
|
||||||
|
"password" => "test",
|
||||||
|
"email" => user.email
|
||||||
|
})
|
||||||
|
|
||||||
|
assert json_response(conn, 200) == %{"error" => "Email has already been taken."}
|
||||||
|
end
|
||||||
|
|
||||||
|
test "with credentials, valid password and valid email", %{
|
||||||
|
conn: conn,
|
||||||
|
user: current_user
|
||||||
|
} do
|
||||||
|
conn =
|
||||||
|
conn
|
||||||
|
|> with_credentials(current_user.nickname, "test")
|
||||||
|
|> post("/api/pleroma/change_email", %{
|
||||||
|
"password" => "test",
|
||||||
|
"email" => "cofe@foobar.com"
|
||||||
|
})
|
||||||
|
|
||||||
|
assert json_response(conn, 200) == %{"status" => "success"}
|
||||||
|
end
|
||||||
|
end
|
||||||
end
|
end
|
||||||
|
@ -5,6 +5,7 @@
|
|||||||
defmodule Pleroma.Web.WebFinger.WebFingerControllerTest do
|
defmodule Pleroma.Web.WebFinger.WebFingerControllerTest do
|
||||||
use Pleroma.Web.ConnCase
|
use Pleroma.Web.ConnCase
|
||||||
|
|
||||||
|
import ExUnit.CaptureLog
|
||||||
import Pleroma.Factory
|
import Pleroma.Factory
|
||||||
import Tesla.Mock
|
import Tesla.Mock
|
||||||
|
|
||||||
@ -75,11 +76,13 @@ test "it returns 404 when user isn't found (XML)" do
|
|||||||
test "Sends a 404 when invalid format" do
|
test "Sends a 404 when invalid format" do
|
||||||
user = insert(:user)
|
user = insert(:user)
|
||||||
|
|
||||||
assert_raise Phoenix.NotAcceptableError, fn ->
|
assert capture_log(fn ->
|
||||||
build_conn()
|
assert_raise Phoenix.NotAcceptableError, fn ->
|
||||||
|> put_req_header("accept", "text/html")
|
build_conn()
|
||||||
|> get("/.well-known/webfinger?resource=acct:#{user.nickname}@localhost")
|
|> put_req_header("accept", "text/html")
|
||||||
end
|
|> get("/.well-known/webfinger?resource=acct:#{user.nickname}@localhost")
|
||||||
|
end
|
||||||
|
end) =~ "no supported media type in accept header"
|
||||||
end
|
end
|
||||||
|
|
||||||
test "Sends a 400 when resource param is missing" do
|
test "Sends a 400 when resource param is missing" do
|
||||||
|
@ -4,11 +4,14 @@
|
|||||||
|
|
||||||
defmodule Pleroma.Web.WebsubTest do
|
defmodule Pleroma.Web.WebsubTest do
|
||||||
use Pleroma.DataCase
|
use Pleroma.DataCase
|
||||||
|
use Oban.Testing, repo: Pleroma.Repo
|
||||||
|
|
||||||
|
alias Pleroma.Tests.ObanHelpers
|
||||||
alias Pleroma.Web.Router.Helpers
|
alias Pleroma.Web.Router.Helpers
|
||||||
alias Pleroma.Web.Websub
|
alias Pleroma.Web.Websub
|
||||||
alias Pleroma.Web.Websub.WebsubClientSubscription
|
alias Pleroma.Web.Websub.WebsubClientSubscription
|
||||||
alias Pleroma.Web.Websub.WebsubServerSubscription
|
alias Pleroma.Web.Websub.WebsubServerSubscription
|
||||||
|
alias Pleroma.Workers.SubscriberWorker
|
||||||
|
|
||||||
import Pleroma.Factory
|
import Pleroma.Factory
|
||||||
import Tesla.Mock
|
import Tesla.Mock
|
||||||
@ -224,6 +227,7 @@ test "it renews subscriptions that have less than a day of time left" do
|
|||||||
})
|
})
|
||||||
|
|
||||||
_refresh = Websub.refresh_subscriptions()
|
_refresh = Websub.refresh_subscriptions()
|
||||||
|
ObanHelpers.perform(all_enqueued(worker: SubscriberWorker))
|
||||||
|
|
||||||
assert still_good == Repo.get(WebsubClientSubscription, still_good.id)
|
assert still_good == Repo.get(WebsubClientSubscription, still_good.id)
|
||||||
refute needs_refresh == Repo.get(WebsubClientSubscription, needs_refresh.id)
|
refute needs_refresh == Repo.get(WebsubClientSubscription, needs_refresh.id)
|
||||||
|
Loading…
Reference in New Issue
Block a user