Page MenuHomePhorge

No OneTemporary

Size
402 KB
Referenced Files
None
Subscribers
None
This file is larger than 256 KB, so syntax highlighting was skipped.
diff --git a/config/config.exs b/config/config.exs
index 0b8a75aad..1ac140ed0 100644
--- a/config/config.exs
+++ b/config/config.exs
@@ -1,847 +1,846 @@
# .i;;;;i.
# iYcviii;vXY:
# .YXi .i1c.
# .YC. . in7.
# .vc. ...... ;1c.
# i7, .. .;1;
# i7, .. ... .Y1i
# ,7v .6MMM@; .YX,
# .7;. ..IMMMMMM1 :t7.
# .;Y. ;$MMMMMM9. :tc.
# vY. .. .nMMM@MMU. ;1v.
# i7i ... .#MM@M@C. .....:71i
# it: .... $MMM@9;.,i;;;i,;tti
# :t7. ..... 0MMMWv.,iii:::,,;St.
# .nC. ..... IMMMQ..,::::::,.,czX.
# .ct: ....... .ZMMMI..,:::::::,,:76Y.
# c2: ......,i..Y$M@t..:::::::,,..inZY
# vov ......:ii..c$MBc..,,,,,,,,,,..iI9i
# i9Y ......iii:..7@MA,..,,,,,,,,,....;AA:
# iIS. ......:ii::..;@MI....,............;Ez.
# .I9. ......:i::::...8M1..................C0z.
# .z9; ......:i::::,.. .i:...................zWX.
# vbv ......,i::::,,. ................. :AQY
# c6Y. .,...,::::,,..:t0@@QY. ................ :8bi
# :6S. ..,,...,:::,,,..EMMMMMMI. ............... .;bZ,
# :6o, .,,,,..:::,,,..i#MMMMMM#v................. YW2.
# .n8i ..,,,,,,,::,,,,.. tMMMMM@C:.................. .1Wn
# 7Uc. .:::,,,,,::,,,,.. i1t;,..................... .UEi
# 7C...::::::::::::,,,,.. .................... vSi.
# ;1;...,,::::::,......... .................. Yz:
# v97,......... .voC.
# izAotX7777777777777777777777777777777777777777Y7n92:
# .;CoIIIIIUAA666666699999ZZZZZZZZZZZZZZZZZZZZ6ov.
#
# !!! ATTENTION !!!
# DO NOT EDIT THIS FILE! THIS FILE CONTAINS THE DEFAULT VALUES FOR THE CON-
# FIGURATION! EDIT YOUR SECRET FILE (either prod.secret.exs, dev.secret.exs).
#
# This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
use Mix.Config
# General application configuration
config :pleroma, ecto_repos: [Pleroma.Repo]
config :pleroma, Pleroma.Repo,
types: Pleroma.PostgresTypes,
telemetry_event: [Pleroma.Repo.Instrumenter],
migration_lock: nil
config :pleroma, Pleroma.Captcha,
enabled: true,
seconds_valid: 300,
method: Pleroma.Captcha.Native
config :pleroma, Pleroma.Captcha.Kocaptcha, endpoint: "https://captcha.kotobank.ch"
# Upload configuration
config :pleroma, Pleroma.Upload,
uploader: Pleroma.Uploaders.Local,
filters: [Pleroma.Upload.Filter.Dedupe],
link_name: false,
proxy_remote: false,
proxy_opts: [
redirect_on_failure: false,
max_body_length: 25 * 1_048_576,
http: [
follow_redirect: true,
pool: :upload
]
],
filename_display_max_length: 30,
default_description: nil
config :pleroma, Pleroma.Uploaders.Local, uploads: "uploads"
config :pleroma, Pleroma.Uploaders.S3,
bucket: nil,
streaming_enabled: true,
public_endpoint: "https://s3.amazonaws.com"
config :pleroma, :emoji,
shortcode_globs: ["/emoji/custom/**/*.png"],
pack_extensions: [".png", ".gif"],
groups: [
Custom: ["/emoji/*.png", "/emoji/**/*.png"]
],
default_manifest: "https://git.pleroma.social/pleroma/emoji-index/raw/master/index.json",
shared_pack_cache_seconds_per_file: 60
config :pleroma, :uri_schemes,
valid_schemes: [
"https",
"http",
"dat",
"dweb",
"gopher",
"hyper",
"ipfs",
"ipns",
"irc",
"ircs",
"magnet",
"mailto",
"mumble",
"ssb",
"xmpp"
]
websocket_config = [
path: "/websocket",
serializer: [
{Phoenix.Socket.V1.JSONSerializer, "~> 1.0.0"},
{Phoenix.Socket.V2.JSONSerializer, "~> 2.0.0"}
],
timeout: 60_000,
transport_log: false,
compress: false
]
# Configures the endpoint
config :pleroma, Pleroma.Web.Endpoint,
url: [host: "localhost"],
http: [
ip: {127, 0, 0, 1},
dispatch: [
{:_,
[
- {"/api/fedsocket/v1", Pleroma.Web.FedSockets.IncomingHandler, []},
{"/api/v1/streaming", Pleroma.Web.MastodonAPI.WebsocketHandler, []},
{"/websocket", Phoenix.Endpoint.CowboyWebSocket,
{Phoenix.Transports.WebSocket,
{Pleroma.Web.Endpoint, Pleroma.Web.UserSocket, websocket_config}}},
{:_, Phoenix.Endpoint.Cowboy2Handler, {Pleroma.Web.Endpoint, []}}
]}
]
],
protocol: "https",
secret_key_base: "aK4Abxf29xU9TTDKre9coZPUgevcVCFQJe/5xP/7Lt4BEif6idBIbjupVbOrbKxl",
signing_salt: "CqaoopA2",
render_errors: [view: Pleroma.Web.ErrorView, accepts: ~w(json)],
pubsub_server: Pleroma.PubSub,
secure_cookie_flag: true,
extra_cookie_attrs: [
"SameSite=Lax"
]
config :pleroma, :fed_sockets,
enabled: false,
connection_duration: :timer.hours(8),
rejection_duration: :timer.minutes(15),
fed_socket_fetches: [
default: 12_000,
interval: 3_000,
lazy: false
]
# Configures Elixir's Logger
config :logger, :console,
level: :debug,
format: "\n$time $metadata[$level] $message\n",
metadata: [:request_id]
config :logger, :ex_syslogger,
level: :debug,
ident: "pleroma",
format: "$metadata[$level] $message",
metadata: [:request_id]
config :quack,
level: :warn,
meta: [:all],
webhook_url: "https://hooks.slack.com/services/YOUR-KEY-HERE"
config :mime, :types, %{
"application/xml" => ["xml"],
"application/xrd+xml" => ["xrd+xml"],
"application/jrd+json" => ["jrd+json"],
"application/activity+json" => ["activity+json"],
"application/ld+json" => ["activity+json"]
}
config :tesla, adapter: Tesla.Adapter.Hackney
# Configures http settings, upstream proxy etc.
config :pleroma, :http,
proxy_url: nil,
send_user_agent: true,
user_agent: :default,
adapter: []
config :pleroma, :instance,
name: "Pleroma",
email: "example@example.com",
notify_email: "noreply@example.com",
description: "Pleroma: An efficient and flexible fediverse server",
background_image: "/images/city.jpg",
instance_thumbnail: "/instance/thumbnail.jpeg",
limit: 5_000,
description_limit: 5_000,
chat_limit: 5_000,
remote_limit: 100_000,
upload_limit: 16_000_000,
avatar_upload_limit: 2_000_000,
background_upload_limit: 4_000_000,
banner_upload_limit: 4_000_000,
poll_limits: %{
max_options: 20,
max_option_chars: 200,
min_expiration: 0,
max_expiration: 365 * 24 * 60 * 60
},
registrations_open: true,
invites_enabled: false,
account_activation_required: false,
account_approval_required: false,
federating: true,
federation_incoming_replies_max_depth: 100,
federation_reachability_timeout_days: 7,
federation_publisher_modules: [
Pleroma.Web.ActivityPub.Publisher
],
allow_relay: true,
public: true,
quarantined_instances: [],
static_dir: "instance/static/",
allowed_post_formats: [
"text/plain",
"text/html",
"text/markdown",
"text/bbcode"
],
autofollowed_nicknames: [],
autofollowing_nicknames: [],
max_pinned_statuses: 1,
attachment_links: false,
max_report_comment_size: 1000,
safe_dm_mentions: false,
healthcheck: false,
remote_post_retention_days: 90,
skip_thread_containment: true,
limit_to_local_content: :unauthenticated,
user_bio_length: 5000,
user_name_length: 100,
max_account_fields: 10,
max_remote_account_fields: 20,
account_field_name_length: 512,
account_field_value_length: 2048,
registration_reason_length: 500,
external_user_synchronization: true,
extended_nickname_format: true,
cleanup_attachments: false,
multi_factor_authentication: [
totp: [
# digits 6 or 8
digits: 6,
period: 30
],
backup_codes: [
number: 5,
length: 16
]
],
show_reactions: true
config :pleroma, :welcome,
direct_message: [
enabled: false,
sender_nickname: nil,
message: nil
],
chat_message: [
enabled: false,
sender_nickname: nil,
message: nil
],
email: [
enabled: false,
sender: nil,
subject: "Welcome to <%= instance_name %>",
html: "Welcome to <%= instance_name %>",
text: "Welcome to <%= instance_name %>"
]
config :pleroma, :feed,
post_title: %{
max_length: 100,
omission: "..."
}
config :pleroma, :markup,
# XXX - unfortunately, inline images must be enabled by default right now, because
# of custom emoji. Issue #275 discusses defanging that somehow.
allow_inline_images: true,
allow_headings: false,
allow_tables: false,
allow_fonts: false,
scrub_policy: [
Pleroma.HTML.Scrubber.Default,
Pleroma.HTML.Transform.MediaProxy
]
config :pleroma, :frontend_configurations,
pleroma_fe: %{
alwaysShowSubjectInput: true,
background: "/images/city.jpg",
collapseMessageWithSubject: false,
disableChat: false,
greentext: false,
hideFilteredStatuses: false,
hideMutedPosts: false,
hidePostStats: false,
hideSitename: false,
hideUserStats: false,
loginMethod: "password",
logo: "/static/logo.png",
logoMargin: ".1em",
logoMask: true,
minimalScopesMode: false,
noAttachmentLinks: false,
nsfwCensorImage: "",
postContentType: "text/plain",
redirectRootLogin: "/main/friends",
redirectRootNoLogin: "/main/all",
scopeCopy: true,
sidebarRight: false,
showFeaturesPanel: true,
showInstanceSpecificPanel: false,
subjectLineBehavior: "email",
theme: "pleroma-dark",
webPushNotifications: false
},
masto_fe: %{
showInstanceSpecificPanel: true
}
config :pleroma, :assets,
mascots: [
pleroma_fox_tan: %{
url: "/images/pleroma-fox-tan-smol.png",
mime_type: "image/png"
},
pleroma_fox_tan_shy: %{
url: "/images/pleroma-fox-tan-shy.png",
mime_type: "image/png"
}
],
default_mascot: :pleroma_fox_tan
config :pleroma, :manifest,
icons: [
%{
src: "/static/logo.png",
type: "image/png"
}
],
theme_color: "#282c37",
background_color: "#191b22"
config :pleroma, :activitypub,
unfollow_blocked: true,
outgoing_blocks: true,
follow_handshake_timeout: 500,
note_replies_output_limit: 5,
sign_object_fetches: true,
authorized_fetch_mode: false
config :pleroma, :streamer,
workers: 3,
overflow_workers: 2
config :pleroma, :user, deny_follow_blocked: true
config :pleroma, :mrf_normalize_markup, scrub_policy: Pleroma.HTML.Scrubber.Default
config :pleroma, :mrf_rejectnonpublic,
allow_followersonly: false,
allow_direct: false
config :pleroma, :mrf_hellthread,
delist_threshold: 10,
reject_threshold: 20
config :pleroma, :mrf_simple,
media_removal: [],
media_nsfw: [],
federated_timeline_removal: [],
report_removal: [],
reject: [],
followers_only: [],
accept: [],
avatar_removal: [],
banner_removal: [],
reject_deletes: []
config :pleroma, :mrf_keyword,
reject: [],
federated_timeline_removal: [],
replace: []
config :pleroma, :mrf_subchain, match_actor: %{}
config :pleroma, :mrf_activity_expiration, days: 365
config :pleroma, :mrf_vocabulary,
accept: [],
reject: []
# threshold of 7 days
config :pleroma, :mrf_object_age,
threshold: 604_800,
actions: [:delist, :strip_followers]
config :pleroma, :rich_media,
enabled: true,
ignore_hosts: [],
ignore_tld: ["local", "localdomain", "lan"],
parsers: [
Pleroma.Web.RichMedia.Parsers.TwitterCard,
Pleroma.Web.RichMedia.Parsers.OEmbed
],
failure_backoff: 60_000,
ttl_setters: [Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl]
config :pleroma, :media_proxy,
enabled: false,
invalidation: [
enabled: false,
provider: Pleroma.Web.MediaProxy.Invalidation.Script
],
proxy_opts: [
redirect_on_failure: false,
max_body_length: 25 * 1_048_576,
# Note: max_read_duration defaults to Pleroma.ReverseProxy.max_read_duration_default/1
max_read_duration: 30_000,
http: [
follow_redirect: true,
pool: :media
]
],
whitelist: []
config :pleroma, Pleroma.Web.MediaProxy.Invalidation.Http,
method: :purge,
headers: [],
options: []
config :pleroma, Pleroma.Web.MediaProxy.Invalidation.Script, script_path: nil
# Note: media preview proxy depends on media proxy to be enabled
config :pleroma, :media_preview_proxy,
enabled: false,
thumbnail_max_width: 600,
thumbnail_max_height: 600,
image_quality: 85,
min_content_length: 100 * 1024
config :pleroma, :chat, enabled: true
config :phoenix, :format_encoders, json: Jason
config :phoenix, :json_library, Jason
config :phoenix, :filter_parameters, ["password", "confirm"]
config :pleroma, :gopher,
enabled: false,
ip: {0, 0, 0, 0},
port: 9999
config :pleroma, Pleroma.Web.Metadata,
providers: [
Pleroma.Web.Metadata.Providers.OpenGraph,
Pleroma.Web.Metadata.Providers.TwitterCard
],
unfurl_nsfw: false
config :pleroma, Pleroma.Web.Preload,
providers: [
Pleroma.Web.Preload.Providers.Instance
]
config :pleroma, :http_security,
enabled: true,
sts: false,
sts_max_age: 31_536_000,
ct_max_age: 2_592_000,
referrer_policy: "same-origin"
config :cors_plug,
max_age: 86_400,
methods: ["POST", "PUT", "DELETE", "GET", "PATCH", "OPTIONS"],
expose: [
"Link",
"X-RateLimit-Reset",
"X-RateLimit-Limit",
"X-RateLimit-Remaining",
"X-Request-Id",
"Idempotency-Key"
],
credentials: true,
headers: ["Authorization", "Content-Type", "Idempotency-Key"]
config :pleroma, Pleroma.User,
restricted_nicknames: [
".well-known",
"~",
"about",
"activities",
"api",
"auth",
"check_password",
"dev",
"friend-requests",
"inbox",
"internal",
"main",
"media",
"nodeinfo",
"notice",
"oauth",
"objects",
"ostatus_subscribe",
"pleroma",
"proxy",
"push",
"registration",
"relay",
"settings",
"status",
"tag",
"user-search",
"user_exists",
"users",
"web",
"verify_credentials",
"update_credentials",
"relationships",
"search",
"confirmation_resend",
"mfa"
],
email_blacklist: []
config :pleroma, Oban,
repo: Pleroma.Repo,
log: false,
queues: [
activity_expiration: 10,
token_expiration: 5,
backup: 1,
federator_incoming: 50,
federator_outgoing: 50,
ingestion_queue: 50,
web_push: 50,
mailer: 10,
transmogrifier: 20,
scheduled_activities: 10,
background: 5,
remote_fetcher: 2,
attachments_cleanup: 5,
new_users_digest: 1,
mute_expire: 5
],
plugins: [Oban.Plugins.Pruner],
crontab: [
{"0 0 * * 0", Pleroma.Workers.Cron.DigestEmailsWorker},
{"0 0 * * *", Pleroma.Workers.Cron.NewUsersDigestWorker}
]
config :pleroma, :workers,
retries: [
federator_incoming: 5,
federator_outgoing: 5
]
config :pleroma, Pleroma.Formatter,
class: false,
rel: "ugc",
new_window: false,
truncate: false,
strip_prefix: false,
extra: true,
validate_tld: :no_scheme
config :pleroma, :ldap,
enabled: System.get_env("LDAP_ENABLED") == "true",
host: System.get_env("LDAP_HOST") || "localhost",
port: String.to_integer(System.get_env("LDAP_PORT") || "389"),
ssl: System.get_env("LDAP_SSL") == "true",
sslopts: [],
tls: System.get_env("LDAP_TLS") == "true",
tlsopts: [],
base: System.get_env("LDAP_BASE") || "dc=example,dc=com",
uid: System.get_env("LDAP_UID") || "cn"
config :esshd,
enabled: false
oauth_consumer_strategies =
System.get_env("OAUTH_CONSUMER_STRATEGIES")
|> to_string()
|> String.split()
|> Enum.map(&hd(String.split(&1, ":")))
ueberauth_providers =
for strategy <- oauth_consumer_strategies do
strategy_module_name = "Elixir.Ueberauth.Strategy.#{String.capitalize(strategy)}"
strategy_module = String.to_atom(strategy_module_name)
{String.to_atom(strategy), {strategy_module, [callback_params: ["state"]]}}
end
config :ueberauth,
Ueberauth,
base_path: "/oauth",
providers: ueberauth_providers
config :pleroma,
:auth,
enforce_oauth_admin_scope_usage: true,
oauth_consumer_strategies: oauth_consumer_strategies
config :pleroma, Pleroma.Emails.Mailer, adapter: Swoosh.Adapters.Sendmail, enabled: false
config :pleroma, Pleroma.Emails.UserEmail,
logo: nil,
styling: %{
link_color: "#d8a070",
background_color: "#2C3645",
content_background_color: "#1B2635",
header_color: "#d8a070",
text_color: "#b9b9ba",
text_muted_color: "#b9b9ba"
}
config :pleroma, Pleroma.Emails.NewUsersDigestEmail, enabled: false
config :prometheus, Pleroma.Web.Endpoint.MetricsExporter,
enabled: false,
auth: false,
ip_whitelist: [],
path: "/api/pleroma/app_metrics",
format: :text
config :pleroma, Pleroma.ScheduledActivity,
daily_user_limit: 25,
total_user_limit: 300,
enabled: true
config :pleroma, :email_notifications,
digest: %{
active: false,
interval: 7,
inactivity_threshold: 7
}
config :pleroma, :oauth2,
token_expires_in: 600,
issue_new_refresh_token: true,
clean_expired_tokens: false
config :pleroma, :database, rum_enabled: false
config :pleroma, :env, Mix.env()
config :http_signatures,
adapter: Pleroma.Signature
config :pleroma, :rate_limit,
authentication: {60_000, 15},
timeline: {500, 3},
search: [{1000, 10}, {1000, 30}],
app_account_creation: {1_800_000, 25},
relations_actions: {10_000, 10},
relation_id_action: {60_000, 2},
statuses_actions: {10_000, 15},
status_id_action: {60_000, 3},
password_reset: {1_800_000, 5},
account_confirmation_resend: {8_640_000, 5},
ap_routes: {60_000, 15}
config :pleroma, Pleroma.Workers.PurgeExpiredActivity, enabled: true, min_lifetime: 600
config :pleroma, Pleroma.Web.Plugs.RemoteIp,
enabled: true,
headers: ["x-forwarded-for"],
proxies: [],
reserved: [
"127.0.0.0/8",
"::1/128",
"fc00::/7",
"10.0.0.0/8",
"172.16.0.0/12",
"192.168.0.0/16"
]
config :pleroma, :static_fe, enabled: false
# Example of frontend configuration
# This example will make us serve the primary frontend from the
# frontends directory within your `:pleroma, :instance, static_dir`.
# e.g., instance/static/frontends/pleroma/develop/
#
# With no frontend configuration, the bundled files from the `static` directory will
# be used.
#
# config :pleroma, :frontends,
# primary: %{"name" => "pleroma-fe", "ref" => "develop"},
# admin: %{"name" => "admin-fe", "ref" => "stable"},
# available: %{...}
config :pleroma, :frontends,
available: %{
"kenoma" => %{
"name" => "kenoma",
"git" => "https://git.pleroma.social/lambadalambda/kenoma",
"build_url" =>
"https://git.pleroma.social/lambadalambda/kenoma/-/jobs/artifacts/${ref}/download?job=build",
"ref" => "master"
},
"pleroma-fe" => %{
"name" => "pleroma-fe",
"git" => "https://git.pleroma.social/pleroma/pleroma-fe",
"build_url" =>
"https://git.pleroma.social/pleroma/pleroma-fe/-/jobs/artifacts/${ref}/download?job=build",
"ref" => "develop"
},
"fedi-fe" => %{
"name" => "fedi-fe",
"git" => "https://git.pleroma.social/pleroma/fedi-fe",
"build_url" =>
"https://git.pleroma.social/pleroma/fedi-fe/-/jobs/artifacts/${ref}/download?job=build",
"ref" => "master"
},
"admin-fe" => %{
"name" => "admin-fe",
"git" => "https://git.pleroma.social/pleroma/admin-fe",
"build_url" =>
"https://git.pleroma.social/pleroma/admin-fe/-/jobs/artifacts/${ref}/download?job=build",
"ref" => "develop"
},
"soapbox-fe" => %{
"name" => "soapbox-fe",
"git" => "https://gitlab.com/soapbox-pub/soapbox-fe",
"build_url" =>
"https://gitlab.com/soapbox-pub/soapbox-fe/-/jobs/artifacts/${ref}/download?job=build-production",
"ref" => "v1.0.0",
"build_dir" => "static"
}
}
config :pleroma, :web_cache_ttl,
activity_pub: nil,
activity_pub_question: 30_000
config :pleroma, :modules, runtime_dir: "instance/modules"
config :pleroma, configurable_from_database: false
config :pleroma, Pleroma.Repo,
parameters: [gin_fuzzy_search_limit: "500"],
prepare: :unnamed
config :pleroma, :connections_pool,
reclaim_multiplier: 0.1,
connection_acquisition_wait: 250,
connection_acquisition_retries: 5,
max_connections: 250,
max_idle_time: 30_000,
retry: 0,
connect_timeout: 5_000
config :pleroma, :pools,
federation: [
size: 50,
max_waiting: 10,
recv_timeout: 10_000
],
media: [
size: 50,
max_waiting: 20,
recv_timeout: 15_000
],
upload: [
size: 25,
max_waiting: 5,
recv_timeout: 15_000
],
default: [
size: 10,
max_waiting: 2,
recv_timeout: 5_000
]
config :pleroma, :hackney_pools,
federation: [
max_connections: 50,
timeout: 150_000
],
media: [
max_connections: 50,
timeout: 150_000
],
upload: [
max_connections: 25,
timeout: 300_000
]
config :pleroma, :majic_pool, size: 2
private_instance? = :if_instance_is_private
config :pleroma, :restrict_unauthenticated,
timelines: %{local: private_instance?, federated: private_instance?},
profiles: %{local: private_instance?, remote: private_instance?},
activities: %{local: private_instance?, remote: private_instance?}
config :pleroma, Pleroma.Web.ApiSpec.CastAndValidate, strict: false
config :pleroma, :mrf,
policies: [Pleroma.Web.ActivityPub.MRF.ObjectAgePolicy, Pleroma.Web.ActivityPub.MRF.TagPolicy],
transparency: true,
transparency_exclusions: []
config :tzdata, :http_client, Pleroma.HTTP.Tzdata
config :ex_aws, http_client: Pleroma.HTTP.ExAws
config :web_push_encryption, http_client: Pleroma.HTTP.WebPush
config :pleroma, :instances_favicons, enabled: false
config :floki, :html_parser, Floki.HTMLParser.FastHtml
config :pleroma, Pleroma.Web.Auth.Authenticator, Pleroma.Web.Auth.PleromaAuthenticator
config :pleroma, Pleroma.User.Backup,
purge_after_days: 30,
limit_days: 7,
dir: nil
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env()}.exs"
diff --git a/config/description.exs b/config/description.exs
index 0552b37e0..a663d8127 100644
--- a/config/description.exs
+++ b/config/description.exs
@@ -1,3462 +1,3449 @@
use Mix.Config
websocket_config = [
path: "/websocket",
serializer: [
{Phoenix.Socket.V1.JSONSerializer, "~> 1.0.0"},
{Phoenix.Socket.V2.JSONSerializer, "~> 2.0.0"}
],
timeout: 60_000,
transport_log: false,
compress: false
]
installed_frontend_options = [
%{
key: "name",
label: "Name",
type: :string,
description:
"Name of the installed frontend. Valid config must include both `Name` and `Reference` values."
},
%{
key: "ref",
label: "Reference",
type: :string,
description:
"Reference of the installed frontend to be used. Valid config must include both `Name` and `Reference` values."
}
]
frontend_options = [
%{
key: "name",
label: "Name",
type: :string,
description: "Name of the frontend."
},
%{
key: "ref",
label: "Reference",
type: :string,
description: "Reference of the frontend to be used."
},
%{
key: "git",
label: "Git Repository URL",
type: :string,
description: "URL of the git repository of the frontend"
},
%{
key: "build_url",
label: "Build URL",
type: :string,
description:
"Either an url to a zip file containing the frontend or a template to build it by inserting the `ref`. The string `${ref}` will be replaced by the configured `ref`.",
example: "https://some.url/builds/${ref}.zip"
},
%{
key: "build_dir",
label: "Build directory",
type: :string,
description: "The directory inside the zip file "
}
]
config :pleroma, :config_description, [
%{
group: :pleroma,
key: Pleroma.Upload,
type: :group,
description: "Upload general settings",
children: [
%{
key: :uploader,
type: :module,
description: "Module which will be used for uploads",
suggestions: {:list_behaviour_implementations, Pleroma.Uploaders.Uploader}
},
%{
key: :filters,
type: {:list, :module},
description:
"List of filter modules for uploads. Module names are shortened (removed leading `Pleroma.Upload.Filter.` part), but on adding custom module you need to use full name.",
suggestions: {:list_behaviour_implementations, Pleroma.Upload.Filter}
},
%{
key: :link_name,
type: :boolean,
description:
"If enabled, a name parameter will be added to the URL of the upload. For example `https://instance.tld/media/imagehash.png?name=realname.png`."
},
%{
key: :base_url,
label: "Base URL",
type: :string,
description: "Base URL for the uploads, needed if you use CDN",
suggestions: [
"https://cdn-host.com"
]
},
%{
key: :proxy_remote,
type: :boolean,
description:
"If enabled, requests to media stored using a remote uploader will be proxied instead of being redirected"
},
%{
key: :proxy_opts,
label: "Proxy Options",
type: :keyword,
description: "Options for Pleroma.ReverseProxy",
suggestions: [
redirect_on_failure: false,
max_body_length: 25 * 1_048_576,
http: [
follow_redirect: true,
pool: :media
]
],
children: [
%{
key: :redirect_on_failure,
type: :boolean,
description:
"Redirects the client to the real remote URL if there's any HTTP errors. " <>
"Any error during body processing will not be redirected as the response is chunked."
},
%{
key: :max_body_length,
type: :integer,
description:
"Limits the content length to be approximately the " <>
"specified length. It is validated with the `content-length` header and also verified when proxying."
},
%{
key: :http,
label: "HTTP",
type: :keyword,
description: "HTTP options",
children: [
%{
key: :adapter,
type: :keyword,
description: "Adapter specific options",
children: [
%{
key: :ssl_options,
type: :keyword,
label: "SSL Options",
description: "SSL options for HTTP adapter",
children: [
%{
key: :versions,
type: {:list, :atom},
description: "List of TLS versions to use",
suggestions: [:tlsv1, ":tlsv1.1", ":tlsv1.2"]
}
]
}
]
},
%{
key: :proxy_url,
label: "Proxy URL",
type: [:string, :tuple],
description: "Proxy URL",
suggestions: ["127.0.0.1:8123", {:socks5, :localhost, 9050}]
}
]
}
]
},
%{
key: :filename_display_max_length,
type: :integer,
description: "Set max length of a filename to display. 0 = no limit. Default: 30"
}
]
},
%{
group: :pleroma,
key: Pleroma.Uploaders.Local,
type: :group,
description: "Local uploader-related settings",
children: [
%{
key: :uploads,
type: :string,
description: "Path where user's uploads will be saved",
suggestions: [
"uploads"
]
}
]
},
%{
group: :pleroma,
key: Pleroma.Uploaders.S3,
type: :group,
description: "S3 uploader-related settings",
children: [
%{
key: :bucket,
type: :string,
description: "S3 bucket",
suggestions: [
"bucket"
]
},
%{
key: :bucket_namespace,
type: :string,
description: "S3 bucket namespace",
suggestions: ["pleroma"]
},
%{
key: :public_endpoint,
type: :string,
description: "S3 endpoint",
suggestions: ["https://s3.amazonaws.com"]
},
%{
key: :truncated_namespace,
type: :string,
description:
"If you use S3 compatible service such as Digital Ocean Spaces or CDN, set folder name or \"\" etc." <>
" For example, when using CDN to S3 virtual host format, set \"\". At this time, write CNAME to CDN in public_endpoint."
},
%{
key: :streaming_enabled,
type: :boolean,
description:
"Enable streaming uploads, when enabled the file will be sent to the server in chunks as it's being read. This may be unsupported by some providers, try disabling this if you have upload problems."
}
]
},
%{
group: :pleroma,
key: Pleroma.Upload.Filter.Mogrify,
type: :group,
description: "Uploads mogrify filter settings",
children: [
%{
key: :args,
type: [:string, {:list, :string}, {:list, :tuple}],
description:
"List of actions for the mogrify command. It's possible to add self-written settings as string. " <>
"For example `auto-orient, strip, {\"resize\", \"3840x1080>\"}` value will be parsed into valid list of the settings.",
suggestions: [
"strip",
"auto-orient",
{"implode", "1"}
]
}
]
},
%{
group: :pleroma,
key: Pleroma.Upload.Filter.AnonymizeFilename,
type: :group,
description: "Filter replaces the filename of the upload",
children: [
%{
key: :text,
type: :string,
description:
"Text to replace filenames in links. If no setting, {random}.extension will be used. You can get the original" <>
" filename extension by using {extension}, for example custom-file-name.{extension}.",
suggestions: [
"custom-file-name.{extension}"
]
}
]
},
- %{
- group: :pleroma,
- key: :fed_sockets,
- type: :group,
- description: "Websocket based federation",
- children: [
- %{
- key: :enabled,
- type: :boolean,
- description: "Enable FedSockets"
- }
- ]
- },
%{
group: :pleroma,
key: Pleroma.Emails.Mailer,
type: :group,
description: "Mailer-related settings",
children: [
%{
key: :adapter,
type: :module,
description:
"One of the mail adapters listed in [Swoosh readme](https://github.com/swoosh/swoosh#adapters)," <>
" or Swoosh.Adapters.Local for in-memory mailbox",
suggestions: [
Swoosh.Adapters.SMTP,
Swoosh.Adapters.Sendgrid,
Swoosh.Adapters.Sendmail,
Swoosh.Adapters.Mandrill,
Swoosh.Adapters.Mailgun,
Swoosh.Adapters.Mailjet,
Swoosh.Adapters.Postmark,
Swoosh.Adapters.SparkPost,
Swoosh.Adapters.AmazonSES,
Swoosh.Adapters.Dyn,
Swoosh.Adapters.SocketLabs,
Swoosh.Adapters.Gmail,
Swoosh.Adapters.Local
]
},
%{
key: :enabled,
type: :boolean,
description: "Allow/disallow send emails"
},
%{
group: {:subgroup, Swoosh.Adapters.SMTP},
key: :relay,
type: :string,
description: "`Swoosh.Adapters.SMTP` adapter specific setting",
suggestions: ["smtp.gmail.com"]
},
%{
group: {:subgroup, Swoosh.Adapters.SMTP},
key: :username,
type: :string,
description: "`Swoosh.Adapters.SMTP` adapter specific setting",
suggestions: ["pleroma"]
},
%{
group: {:subgroup, Swoosh.Adapters.SMTP},
key: :password,
type: :string,
description: "`Swoosh.Adapters.SMTP` adapter specific setting",
suggestions: ["password"]
},
%{
group: {:subgroup, Swoosh.Adapters.SMTP},
key: :ssl,
label: "SSL",
type: :boolean,
description: "`Swoosh.Adapters.SMTP` adapter specific setting"
},
%{
group: {:subgroup, Swoosh.Adapters.SMTP},
key: :tls,
label: "TLS",
type: :atom,
description: "`Swoosh.Adapters.SMTP` adapter specific setting",
suggestions: [:always, :never, :if_available]
},
%{
group: {:subgroup, Swoosh.Adapters.SMTP},
key: :auth,
type: :atom,
description: "`Swoosh.Adapters.SMTP` adapter specific setting",
suggestions: [:always, :never, :if_available]
},
%{
group: {:subgroup, Swoosh.Adapters.SMTP},
key: :port,
type: :integer,
description: "`Swoosh.Adapters.SMTP` adapter specific setting",
suggestions: [1025]
},
%{
group: {:subgroup, Swoosh.Adapters.SMTP},
key: :retries,
type: :integer,
description: "`Swoosh.Adapters.SMTP` adapter specific setting",
suggestions: [5]
},
%{
group: {:subgroup, Swoosh.Adapters.SMTP},
key: :no_mx_lookups,
label: "No MX lookups",
type: :boolean,
description: "`Swoosh.Adapters.SMTP` adapter specific setting"
},
%{
group: {:subgroup, Swoosh.Adapters.Sendgrid},
key: :api_key,
label: "API key",
type: :string,
description: "`Swoosh.Adapters.Sendgrid` adapter specific setting",
suggestions: ["my-api-key"]
},
%{
group: {:subgroup, Swoosh.Adapters.Sendmail},
key: :cmd_path,
type: :string,
description: "`Swoosh.Adapters.Sendmail` adapter specific setting",
suggestions: ["/usr/bin/sendmail"]
},
%{
group: {:subgroup, Swoosh.Adapters.Sendmail},
key: :cmd_args,
type: :string,
description: "`Swoosh.Adapters.Sendmail` adapter specific setting",
suggestions: ["-N delay,failure,success"]
},
%{
group: {:subgroup, Swoosh.Adapters.Sendmail},
key: :qmail,
type: :boolean,
description: "`Swoosh.Adapters.Sendmail` adapter specific setting"
},
%{
group: {:subgroup, Swoosh.Adapters.Mandrill},
key: :api_key,
label: "API key",
type: :string,
description: "`Swoosh.Adapters.Mandrill` adapter specific setting",
suggestions: ["my-api-key"]
},
%{
group: {:subgroup, Swoosh.Adapters.Mailgun},
key: :api_key,
label: "API key",
type: :string,
description: "`Swoosh.Adapters.Mailgun` adapter specific setting",
suggestions: ["my-api-key"]
},
%{
group: {:subgroup, Swoosh.Adapters.Mailgun},
key: :domain,
type: :string,
description: "`Swoosh.Adapters.Mailgun` adapter specific setting",
suggestions: ["pleroma.com"]
},
%{
group: {:subgroup, Swoosh.Adapters.Mailjet},
key: :api_key,
label: "API key",
type: :string,
description: "`Swoosh.Adapters.Mailjet` adapter specific setting",
suggestions: ["my-api-key"]
},
%{
group: {:subgroup, Swoosh.Adapters.Mailjet},
key: :secret,
type: :string,
description: "`Swoosh.Adapters.Mailjet` adapter specific setting",
suggestions: ["my-secret-key"]
},
%{
group: {:subgroup, Swoosh.Adapters.Postmark},
key: :api_key,
label: "API key",
type: :string,
description: "`Swoosh.Adapters.Postmark` adapter specific setting",
suggestions: ["my-api-key"]
},
%{
group: {:subgroup, Swoosh.Adapters.SparkPost},
key: :api_key,
label: "API key",
type: :string,
description: "`Swoosh.Adapters.SparkPost` adapter specific setting",
suggestions: ["my-api-key"]
},
%{
group: {:subgroup, Swoosh.Adapters.SparkPost},
key: :endpoint,
type: :string,
description: "`Swoosh.Adapters.SparkPost` adapter specific setting",
suggestions: ["https://api.sparkpost.com/api/v1"]
},
%{
group: {:subgroup, Swoosh.Adapters.AmazonSES},
key: :region,
type: :string,
description: "`Swoosh.Adapters.AmazonSES` adapter specific setting",
suggestions: ["us-east-1", "us-east-2"]
},
%{
group: {:subgroup, Swoosh.Adapters.AmazonSES},
key: :access_key,
type: :string,
description: "`Swoosh.Adapters.AmazonSES` adapter specific setting",
suggestions: ["aws-access-key"]
},
%{
group: {:subgroup, Swoosh.Adapters.AmazonSES},
key: :secret,
type: :string,
description: "`Swoosh.Adapters.AmazonSES` adapter specific setting",
suggestions: ["aws-secret-key"]
},
%{
group: {:subgroup, Swoosh.Adapters.Dyn},
key: :api_key,
label: "API key",
type: :string,
description: "`Swoosh.Adapters.Dyn` adapter specific setting",
suggestions: ["my-api-key"]
},
%{
group: {:subgroup, Swoosh.Adapters.SocketLabs},
key: :server_id,
type: :string,
description: "`Swoosh.Adapters.SocketLabs` adapter specific setting"
},
%{
group: {:subgroup, Swoosh.Adapters.SocketLabs},
key: :api_key,
label: "API key",
type: :string,
description: "`Swoosh.Adapters.SocketLabs` adapter specific setting"
},
%{
group: {:subgroup, Swoosh.Adapters.Gmail},
key: :access_token,
type: :string,
description: "`Swoosh.Adapters.Gmail` adapter specific setting"
}
]
},
%{
group: :swoosh,
type: :group,
description: "`Swoosh.Adapters.Local` adapter specific settings",
children: [
%{
group: {:subgroup, Swoosh.Adapters.Local},
key: :serve_mailbox,
type: :boolean,
description: "Run the preview server together as part of your app"
},
%{
group: {:subgroup, Swoosh.Adapters.Local},
key: :preview_port,
type: :integer,
description: "The preview server port",
suggestions: [4001]
}
]
},
%{
group: :pleroma,
key: :uri_schemes,
label: "URI Schemes",
type: :group,
description: "URI schemes related settings",
children: [
%{
key: :valid_schemes,
type: {:list, :string},
description: "List of the scheme part that is considered valid to be an URL",
suggestions: [
"https",
"http",
"dat",
"dweb",
"gopher",
"hyper",
"ipfs",
"ipns",
"irc",
"ircs",
"magnet",
"mailto",
"mumble",
"ssb",
"xmpp"
]
}
]
},
%{
group: :pleroma,
key: :instance,
type: :group,
description: "Instance-related settings",
children: [
%{
key: :name,
type: :string,
description: "Name of the instance",
suggestions: [
"Pleroma"
]
},
%{
key: :email,
label: "Admin Email Address",
type: :string,
description: "Email used to reach an Administrator/Moderator of the instance",
suggestions: [
"email@example.com"
]
},
%{
key: :notify_email,
label: "Sender Email Address",
type: :string,
description: "Envelope FROM address for mail sent via Pleroma",
suggestions: [
"notify@example.com"
]
},
%{
key: :description,
type: :string,
description:
"The instance's description. It can be seen in nodeinfo and `/api/v1/instance`",
suggestions: [
"Very cool instance"
]
},
%{
key: :limit,
type: :integer,
description: "Posts character limit (CW/Subject included in the counter)",
suggestions: [
5_000
]
},
%{
key: :chat_limit,
type: :integer,
description: "Character limit of the instance chat messages",
suggestions: [
5_000
]
},
%{
key: :remote_limit,
type: :integer,
description: "Hard character limit beyond which remote posts will be dropped",
suggestions: [
100_000
]
},
%{
key: :upload_limit,
type: :integer,
description: "File size limit of uploads (except for avatar, background, banner)",
suggestions: [
16_000_000
]
},
%{
key: :avatar_upload_limit,
type: :integer,
description: "File size limit of user's profile avatars",
suggestions: [
2_000_000
]
},
%{
key: :background_upload_limit,
type: :integer,
description: "File size limit of user's profile backgrounds",
suggestions: [
4_000_000
]
},
%{
key: :banner_upload_limit,
type: :integer,
description: "File size limit of user's profile banners",
suggestions: [
4_000_000
]
},
%{
key: :poll_limits,
type: :map,
description: "A map with poll limits for local polls",
suggestions: [
%{
max_options: 20,
max_option_chars: 200,
min_expiration: 0,
max_expiration: 31_536_000
}
],
children: [
%{
key: :max_options,
type: :integer,
description: "Maximum number of options",
suggestions: [20]
},
%{
key: :max_option_chars,
type: :integer,
description: "Maximum number of characters per option",
suggestions: [200]
},
%{
key: :min_expiration,
type: :integer,
description: "Minimum expiration time (in seconds)",
suggestions: [0]
},
%{
key: :max_expiration,
type: :integer,
description: "Maximum expiration time (in seconds)",
suggestions: [3600]
}
]
},
%{
key: :registrations_open,
type: :boolean,
description:
"Enable registrations for anyone. Invitations require this setting to be disabled."
},
%{
key: :invites_enabled,
type: :boolean,
description:
"Enable user invitations for admins (depends on `registrations_open` being disabled)"
},
%{
key: :account_activation_required,
type: :boolean,
description: "Require users to confirm their emails before signing in"
},
%{
key: :account_approval_required,
type: :boolean,
description: "Require users to be manually approved by an admin before signing in"
},
%{
key: :federating,
type: :boolean,
description: "Enable federation with other instances"
},
%{
key: :federation_incoming_replies_max_depth,
label: "Fed. incoming replies max depth",
type: :integer,
description:
"Max. depth of reply-to and reply activities fetching on incoming federation, to prevent out-of-memory situations while" <>
" fetching very long threads. If set to `nil`, threads of any depth will be fetched. Lower this value if you experience out-of-memory crashes.",
suggestions: [
100
]
},
%{
key: :federation_reachability_timeout_days,
label: "Fed. reachability timeout days",
type: :integer,
description:
"Timeout (in days) of each external federation target being unreachable prior to pausing federating to it",
suggestions: [
7
]
},
%{
key: :allow_relay,
type: :boolean,
description: "Enable Pleroma's Relay, which makes it possible to follow a whole instance"
},
%{
key: :public,
type: :boolean,
description:
"Makes the client API in authenticated mode-only except for user-profiles." <>
" Useful for disabling the Local Timeline and The Whole Known Network. " <>
" Note: when setting to `false`, please also check `:restrict_unauthenticated` setting."
},
%{
key: :quarantined_instances,
type: {:list, :string},
description:
"List of ActivityPub instances where private (DMs, followers-only) activities will not be sent",
suggestions: [
"quarantined.com",
"*.quarantined.com"
]
},
%{
key: :static_dir,
type: :string,
description: "Instance static directory",
suggestions: [
"instance/static/"
]
},
%{
key: :allowed_post_formats,
type: {:list, :string},
description: "MIME-type list of formats allowed to be posted (transformed into HTML)",
suggestions: [
"text/plain",
"text/html",
"text/markdown",
"text/bbcode"
]
},
%{
key: :extended_nickname_format,
type: :boolean,
description:
"Enable to use extended local nicknames format (allows underscores/dashes)." <>
" This will break federation with older software for theses nicknames."
},
%{
key: :cleanup_attachments,
type: :boolean,
description: """
Enable to remove associated attachments when status is removed.
This will not affect duplicates and attachments without status.
Enabling this will increase load to database when deleting statuses on larger instances.
"""
},
%{
key: :max_pinned_statuses,
type: :integer,
description: "The maximum number of pinned statuses. 0 will disable the feature.",
suggestions: [
0,
1,
3
]
},
%{
key: :autofollowed_nicknames,
type: {:list, :string},
description:
"Set to nicknames of (local) users that every new user should automatically follow"
},
%{
key: :autofollowing_nicknames,
type: {:list, :string},
description:
"Set to nicknames of (local) users that automatically follows every newly registered user"
},
%{
key: :attachment_links,
type: :boolean,
description: "Enable to automatically add attachment link text to statuses"
},
%{
key: :max_report_comment_size,
type: :integer,
description: "The maximum size of the report comment. Default: 1000.",
suggestions: [
1_000
]
},
%{
key: :safe_dm_mentions,
label: "Safe DM mentions",
type: :boolean,
description:
"If enabled, only mentions at the beginning of a post will be used to address people in direct messages." <>
" This is to prevent accidental mentioning of people when talking about them (e.g. \"@admin please keep an eye on @bad_actor\")." <>
" Default: disabled"
},
%{
key: :healthcheck,
type: :boolean,
description: "If enabled, system data will be shown on `/api/pleroma/healthcheck`"
},
%{
key: :remote_post_retention_days,
type: :integer,
description:
"The default amount of days to retain remote posts when pruning the database",
suggestions: [
90
]
},
%{
key: :user_bio_length,
type: :integer,
description: "A user bio maximum length. Default: 5000.",
suggestions: [
5_000
]
},
%{
key: :user_name_length,
type: :integer,
description: "A user name maximum length. Default: 100.",
suggestions: [
100
]
},
%{
key: :skip_thread_containment,
type: :boolean,
description: "Skip filtering out broken threads. Default: enabled."
},
%{
key: :limit_to_local_content,
type: {:dropdown, :atom},
description:
"Limit unauthenticated users to search for local statutes and users only. Default: `:unauthenticated`.",
suggestions: [
:unauthenticated,
:all,
false
]
},
%{
key: :max_account_fields,
type: :integer,
description: "The maximum number of custom fields in the user profile. Default: 10.",
suggestions: [
10
]
},
%{
key: :max_remote_account_fields,
type: :integer,
description:
"The maximum number of custom fields in the remote user profile. Default: 20.",
suggestions: [
20
]
},
%{
key: :account_field_name_length,
type: :integer,
description: "An account field name maximum length. Default: 512.",
suggestions: [
512
]
},
%{
key: :account_field_value_length,
type: :integer,
description: "An account field value maximum length. Default: 2048.",
suggestions: [
2048
]
},
%{
key: :registration_reason_length,
type: :integer,
description: "Maximum registration reason length. Default: 500.",
suggestions: [
500
]
},
%{
key: :external_user_synchronization,
type: :boolean,
description: "Enabling following/followers counters synchronization for external users"
},
%{
key: :multi_factor_authentication,
type: :keyword,
description: "Multi-factor authentication settings",
suggestions: [
[
totp: [digits: 6, period: 30],
backup_codes: [number: 5, length: 16]
]
],
children: [
%{
key: :totp,
label: "TOTP settings",
type: :keyword,
description: "TOTP settings",
suggestions: [digits: 6, period: 30],
children: [
%{
key: :digits,
type: :integer,
suggestions: [6],
description:
"Determines the length of a one-time pass-code, in characters. Defaults to 6 characters."
},
%{
key: :period,
type: :integer,
suggestions: [30],
description:
"A period for which the TOTP code will be valid, in seconds. Defaults to 30 seconds."
}
]
},
%{
key: :backup_codes,
type: :keyword,
description: "MFA backup codes settings",
suggestions: [number: 5, length: 16],
children: [
%{
key: :number,
type: :integer,
suggestions: [5],
description: "Number of backup codes to generate."
},
%{
key: :length,
type: :integer,
suggestions: [16],
description:
"Determines the length of backup one-time pass-codes, in characters. Defaults to 16 characters."
}
]
}
]
},
%{
key: :instance_thumbnail,
type: {:string, :image},
description:
"The instance thumbnail can be any image that represents your instance and is used by some apps or services when they display information about your instance.",
suggestions: ["/instance/thumbnail.jpeg"]
},
%{
key: :show_reactions,
type: :boolean,
description: "Let favourites and emoji reactions be viewed through the API."
}
]
},
%{
group: :pleroma,
key: :welcome,
type: :group,
description: "Welcome messages settings",
children: [
%{
key: :direct_message,
type: :keyword,
descpiption: "Direct message settings",
children: [
%{
key: :enabled,
type: :boolean,
description: "Enables sending a direct message to newly registered users"
},
%{
key: :message,
type: :string,
description: "A message that will be sent to newly registered users",
suggestions: [
"Hi, @username! Welcome on board!"
]
},
%{
key: :sender_nickname,
type: :string,
description: "The nickname of the local user that sends a welcome message",
suggestions: [
"lain"
]
}
]
},
%{
key: :chat_message,
type: :keyword,
descpiption: "Chat message settings",
children: [
%{
key: :enabled,
type: :boolean,
description: "Enables sending a chat message to newly registered users"
},
%{
key: :message,
type: :string,
description:
"A message that will be sent to newly registered users as a chat message",
suggestions: [
"Hello, welcome on board!"
]
},
%{
key: :sender_nickname,
type: :string,
description: "The nickname of the local user that sends a welcome chat message",
suggestions: [
"lain"
]
}
]
},
%{
key: :email,
type: :keyword,
descpiption: "Email message settings",
children: [
%{
key: :enabled,
type: :boolean,
description: "Enables sending an email to newly registered users"
},
%{
key: :sender,
type: [:string, :tuple],
description:
"Email address and/or nickname that will be used to send the welcome email.",
suggestions: [
{"Pleroma App", "welcome@pleroma.app"}
]
},
%{
key: :subject,
type: :string,
description:
"Subject of the welcome email. EEX template with user and instance_name variables can be used.",
suggestions: ["Welcome to <%= instance_name%>"]
},
%{
key: :html,
type: :string,
description:
"HTML content of the welcome email. EEX template with user and instance_name variables can be used.",
suggestions: ["<h1>Hello <%= user.name%>. Welcome to <%= instance_name%></h1>"]
},
%{
key: :text,
type: :string,
description:
"Text content of the welcome email. EEX template with user and instance_name variables can be used.",
suggestions: ["Hello <%= user.name%>. \n Welcome to <%= instance_name%>\n"]
}
]
}
]
},
%{
group: :logger,
type: :group,
description: "Logger-related settings",
children: [
%{
key: :backends,
type: [:atom, :tuple, :module],
description:
"Where logs will be sent, :console - send logs to stdout, { ExSyslogger, :ex_syslogger } - to syslog, Quack.Logger - to Slack.",
suggestions: [:console, {ExSyslogger, :ex_syslogger}, Quack.Logger]
}
]
},
%{
group: :logger,
type: :group,
key: :ex_syslogger,
label: "ExSyslogger",
description: "ExSyslogger-related settings",
children: [
%{
key: :level,
type: {:dropdown, :atom},
description: "Log level",
suggestions: [:debug, :info, :warn, :error]
},
%{
key: :ident,
type: :string,
description:
"A string that's prepended to every message, and is typically set to the app name",
suggestions: ["pleroma"]
},
%{
key: :format,
type: :string,
description: "Default: \"$date $time [$level] $levelpad$node $metadata $message\"",
suggestions: ["$metadata[$level] $message"]
},
%{
key: :metadata,
type: {:list, :atom},
suggestions: [:request_id]
}
]
},
%{
group: :logger,
type: :group,
key: :console,
label: "Console Logger",
description: "Console logger settings",
children: [
%{
key: :level,
type: {:dropdown, :atom},
description: "Log level",
suggestions: [:debug, :info, :warn, :error]
},
%{
key: :format,
type: :string,
description: "Default: \"$date $time [$level] $levelpad$node $metadata $message\"",
suggestions: ["$metadata[$level] $message"]
},
%{
key: :metadata,
type: {:list, :atom},
suggestions: [:request_id]
}
]
},
%{
group: :quack,
type: :group,
label: "Quack Logger",
description: "Quack-related settings",
children: [
%{
key: :level,
type: {:dropdown, :atom},
description: "Log level",
suggestions: [:debug, :info, :warn, :error]
},
%{
key: :meta,
type: {:list, :atom},
description: "Configure which metadata you want to report on",
suggestions: [
:application,
:module,
:file,
:function,
:line,
:pid,
:crash_reason,
:initial_call,
:registered_name,
:all,
:none
]
},
%{
key: :webhook_url,
label: "Webhook URL",
type: :string,
description: "Configure the Slack incoming webhook",
suggestions: ["https://hooks.slack.com/services/YOUR-KEY-HERE"]
}
]
},
%{
group: :pleroma,
key: :frontend_configurations,
type: :group,
description:
"This form can be used to configure a keyword list that keeps the configuration data for any " <>
"kind of frontend. By default, settings for pleroma_fe and masto_fe are configured. If you want to " <>
"add your own configuration your settings all fields must be complete.",
children: [
%{
key: :pleroma_fe,
label: "Pleroma FE",
type: :map,
description: "Settings for Pleroma FE",
suggestions: [
%{
alwaysShowSubjectInput: true,
background: "/static/aurora_borealis.jpg",
collapseMessageWithSubject: false,
disableChat: false,
greentext: false,
hideFilteredStatuses: false,
hideMutedPosts: false,
hidePostStats: false,
hideSitename: false,
hideUserStats: false,
loginMethod: "password",
logo: "/static/logo.png",
logoMargin: ".1em",
logoMask: true,
minimalScopesMode: false,
noAttachmentLinks: false,
nsfwCensorImage: "/static/img/nsfw.74818f9.png",
postContentType: "text/plain",
redirectRootLogin: "/main/friends",
redirectRootNoLogin: "/main/all",
scopeCopy: true,
sidebarRight: false,
showFeaturesPanel: true,
showInstanceSpecificPanel: false,
subjectLineBehavior: "email",
theme: "pleroma-dark",
webPushNotifications: false
}
],
children: [
%{
key: :alwaysShowSubjectInput,
label: "Always show subject input",
type: :boolean,
description: "When disabled, auto-hide the subject field if it's empty"
},
%{
key: :background,
type: {:string, :image},
description:
"URL of the background, unless viewing a user profile with a background that is set",
suggestions: ["/images/city.jpg"]
},
%{
key: :collapseMessageWithSubject,
label: "Collapse message with subject",
type: :boolean,
description:
"When a message has a subject (aka Content Warning), collapse it by default"
},
%{
key: :disableChat,
label: "PleromaFE Chat",
type: :boolean,
description: "Disables PleromaFE Chat component"
},
%{
key: :greentext,
label: "Greentext",
type: :boolean,
description: "Enables green text on lines prefixed with the > character"
},
%{
key: :hideFilteredStatuses,
label: "Hide Filtered Statuses",
type: :boolean,
description: "Hides filtered statuses from timelines"
},
%{
key: :hideMutedPosts,
label: "Hide Muted Posts",
type: :boolean,
description: "Hides muted statuses from timelines"
},
%{
key: :hidePostStats,
label: "Hide post stats",
type: :boolean,
description: "Hide notices statistics (repeats, favorites, ...)"
},
%{
key: :hideSitename,
label: "Hide Sitename",
type: :boolean,
description: "Hides instance name from PleromaFE banner"
},
%{
key: :hideUserStats,
label: "Hide user stats",
type: :boolean,
description:
"Hide profile statistics (posts, posts per day, followers, followings, ...)"
},
%{
key: :logo,
type: {:string, :image},
description: "URL of the logo, defaults to Pleroma's logo",
suggestions: ["/static/logo.png"]
},
%{
key: :logoMargin,
label: "Logo margin",
type: :string,
description:
"Allows you to adjust vertical margins between logo boundary and navbar borders. " <>
"The idea is that to have logo's image without any extra margins and instead adjust them to your need in layout.",
suggestions: [".1em"]
},
%{
key: :logoMask,
label: "Logo mask",
type: :boolean,
description:
"By default it assumes logo used will be monochrome with alpha channel to be compatible with both light and dark themes. " <>
"If you want a colorful logo you must disable logoMask."
},
%{
key: :minimalScopesMode,
label: "Minimal scopes mode",
type: :boolean,
description:
"Limit scope selection to Direct, User default, and Scope of post replying to. " <>
"Also prevents replying to a DM with a public post from PleromaFE."
},
%{
key: :nsfwCensorImage,
label: "NSFW Censor Image",
type: {:string, :image},
description:
"URL of the image to use for hiding NSFW media attachments in the timeline",
suggestions: ["/static/img/nsfw.74818f9.png"]
},
%{
key: :postContentType,
label: "Post Content Type",
type: {:dropdown, :atom},
description: "Default post formatting option",
suggestions: ["text/plain", "text/html", "text/markdown", "text/bbcode"]
},
%{
key: :redirectRootNoLogin,
label: "Redirect root no login",
type: :string,
description:
"Relative URL which indicates where to redirect when a user isn't logged in",
suggestions: ["/main/all"]
},
%{
key: :redirectRootLogin,
label: "Redirect root login",
type: :string,
description:
"Relative URL which indicates where to redirect when a user is logged in",
suggestions: ["/main/friends"]
},
%{
key: :scopeCopy,
label: "Scope copy",
type: :boolean,
description: "Copy the scope (private/unlisted/public) in replies to posts by default"
},
%{
key: :sidebarRight,
label: "Sidebar on Right",
type: :boolean,
description: "Change alignment of sidebar and panels to the right"
},
%{
key: :showFeaturesPanel,
label: "Show instance features panel",
type: :boolean,
description:
"Enables panel displaying functionality of the instance on the About page"
},
%{
key: :showInstanceSpecificPanel,
label: "Show instance specific panel",
type: :boolean,
description: "Whether to show the instance's custom panel"
},
%{
key: :subjectLineBehavior,
label: "Subject line behavior",
type: :string,
description: "Allows changing the default behaviour of subject lines in replies.
`email`: copy and preprend re:, as in email,
`masto`: copy verbatim, as in Mastodon,
`noop`: don't copy the subject.",
suggestions: ["email", "masto", "noop"]
},
%{
key: :theme,
type: :string,
description: "Which theme to use. Available themes are defined in styles.json",
suggestions: ["pleroma-dark"]
}
]
},
%{
key: :masto_fe,
label: "Masto FE",
type: :map,
description: "Settings for Masto FE",
suggestions: [
%{
showInstanceSpecificPanel: true
}
],
children: [
%{
key: :showInstanceSpecificPanel,
label: "Show instance specific panel",
type: :boolean,
description: "Whenether to show the instance's specific panel"
}
]
}
]
},
%{
group: :pleroma,
key: :assets,
type: :group,
description:
"This section configures assets to be used with various frontends. Currently the only option relates to mascots on the mastodon frontend",
children: [
%{
key: :mascots,
type: {:keyword, :map},
description:
"Keyword of mascots, each element must contain both an URL and a mime_type key",
suggestions: [
pleroma_fox_tan: %{
url: "/images/pleroma-fox-tan-smol.png",
mime_type: "image/png"
},
pleroma_fox_tan_shy: %{
url: "/images/pleroma-fox-tan-shy.png",
mime_type: "image/png"
}
]
},
%{
key: :default_mascot,
type: :atom,
description:
"This will be used as the default mascot on MastoFE. Default: `:pleroma_fox_tan`",
suggestions: [
:pleroma_fox_tan
]
},
%{
key: :default_user_avatar,
type: {:string, :image},
description: "URL of the default user avatar",
suggestions: ["/images/avi.png"]
}
]
},
%{
group: :pleroma,
key: :manifest,
type: :group,
description:
"This section describe PWA manifest instance-specific values. Currently this option relate only for MastoFE.",
children: [
%{
key: :icons,
type: {:list, :map},
description: "Describe the icons of the app",
suggestion: [
%{
src: "/static/logo.png"
},
%{
src: "/static/icon.png",
type: "image/png"
},
%{
src: "/static/icon.ico",
sizes: "72x72 96x96 128x128 256x256"
}
]
},
%{
key: :theme_color,
type: :string,
description: "Describe the theme color of the app",
suggestions: ["#282c37", "mediumpurple"]
},
%{
key: :background_color,
type: :string,
description: "Describe the background color of the app",
suggestions: ["#191b22", "aliceblue"]
}
]
},
%{
group: :pleroma,
key: :media_proxy,
type: :group,
description: "Media proxy",
children: [
%{
key: :enabled,
type: :boolean,
description: "Enables proxying of remote media to the instance's proxy"
},
%{
key: :base_url,
label: "Base URL",
type: :string,
description:
"The base URL to access a user-uploaded file. Useful when you want to proxy the media files via another host/CDN fronts.",
suggestions: ["https://example.com"]
},
%{
key: :invalidation,
type: :keyword,
descpiption: "",
suggestions: [
enabled: true,
provider: Pleroma.Web.MediaProxy.Invalidation.Script
],
children: [
%{
key: :enabled,
type: :boolean,
description: "Enables media cache object invalidation."
},
%{
key: :provider,
type: :module,
description: "Module which will be used to purge objects from the cache.",
suggestions: [
Pleroma.Web.MediaProxy.Invalidation.Script,
Pleroma.Web.MediaProxy.Invalidation.Http
]
}
]
},
%{
key: :proxy_opts,
label: "Proxy Options",
type: :keyword,
description: "Options for Pleroma.ReverseProxy",
suggestions: [
redirect_on_failure: false,
max_body_length: 25 * 1_048_576,
max_read_duration: 30_000,
http: [
follow_redirect: true,
pool: :media
]
],
children: [
%{
key: :redirect_on_failure,
type: :boolean,
description:
"Redirects the client to the real remote URL if there's any HTTP errors. " <>
"Any error during body processing will not be redirected as the response is chunked."
},
%{
key: :max_body_length,
type: :integer,
description:
"Limits the content length to be approximately the " <>
"specified length. It is validated with the `content-length` header and also verified when proxying."
},
%{
key: :max_read_duration,
type: :integer,
description: "Timeout (in milliseconds) of GET request to remote URI."
},
%{
key: :http,
label: "HTTP",
type: :keyword,
description: "HTTP options",
children: [
%{
key: :adapter,
type: :keyword,
description: "Adapter specific options",
children: [
%{
key: :ssl_options,
type: :keyword,
label: "SSL Options",
description: "SSL options for HTTP adapter",
children: [
%{
key: :versions,
type: {:list, :atom},
description: "List of TLS version to use",
suggestions: [:tlsv1, ":tlsv1.1", ":tlsv1.2"]
}
]
}
]
},
%{
key: :proxy_url,
label: "Proxy URL",
type: [:string, :tuple],
description: "Proxy URL",
suggestions: ["127.0.0.1:8123", {:socks5, :localhost, 9050}]
}
]
}
]
},
%{
key: :whitelist,
type: {:list, :string},
description: "List of hosts with scheme to bypass the mediaproxy",
suggestions: ["http://example.com"]
}
]
},
%{
group: :pleroma,
key: :media_preview_proxy,
type: :group,
description: "Media preview proxy",
children: [
%{
key: :enabled,
type: :boolean,
description:
"Enables proxying of remote media preview to the instance's proxy. Requires enabled media proxy."
},
%{
key: :thumbnail_max_width,
type: :integer,
description:
"Max width of preview thumbnail for images (video preview always has original dimensions)."
},
%{
key: :thumbnail_max_height,
type: :integer,
description:
"Max height of preview thumbnail for images (video preview always has original dimensions)."
},
%{
key: :image_quality,
type: :integer,
description: "Quality of the output. Ranges from 0 (min quality) to 100 (max quality)."
},
%{
key: :min_content_length,
type: :integer,
description:
"Min content length to perform preview, in bytes. If greater than 0, media smaller in size will be served as is, without thumbnailing."
}
]
},
%{
group: :pleroma,
key: Pleroma.Web.MediaProxy.Invalidation.Http,
type: :group,
description: "HTTP invalidate settings",
children: [
%{
key: :method,
type: :atom,
description: "HTTP method of request. Default: :purge"
},
%{
key: :headers,
type: {:keyword, :string},
description: "HTTP headers of request",
suggestions: [{"x-refresh", 1}]
},
%{
key: :options,
type: :keyword,
description: "Request options",
children: [
%{
key: :params,
type: {:map, :string}
}
]
}
]
},
%{
group: :pleroma,
key: Pleroma.Web.MediaProxy.Invalidation.Script,
type: :group,
description: "Script invalidate settings",
children: [
%{
key: :script_path,
type: :string,
description: "Path to shell script. Which will run purge cache.",
suggestions: ["./installation/nginx-cache-purge.sh.example"]
}
]
},
%{
group: :pleroma,
key: :gopher,
type: :group,
description: "Gopher settings",
children: [
%{
key: :enabled,
type: :boolean,
description: "Enables the gopher interface"
},
%{
key: :ip,
label: "IP",
type: :tuple,
description: "IP address to bind to",
suggestions: [{0, 0, 0, 0}]
},
%{
key: :port,
type: :integer,
description: "Port to bind to",
suggestions: [9999]
},
%{
key: :dstport,
type: :integer,
description: "Port advertised in URLs (optional, defaults to port)",
suggestions: [9999]
}
]
},
%{
group: :pleroma,
key: :activitypub,
label: "ActivityPub",
type: :group,
description: "ActivityPub-related settings",
children: [
%{
key: :unfollow_blocked,
type: :boolean,
description: "Whether blocks result in people getting unfollowed"
},
%{
key: :outgoing_blocks,
type: :boolean,
description: "Whether to federate blocks to other instances"
},
%{
key: :sign_object_fetches,
type: :boolean,
description: "Sign object fetches with HTTP signatures"
},
%{
key: :note_replies_output_limit,
type: :integer,
description:
"The number of Note replies' URIs to be included with outgoing federation (`5` to match Mastodon hardcoded value, `0` to disable the output)"
},
%{
key: :follow_handshake_timeout,
type: :integer,
description: "Following handshake timeout",
suggestions: [500]
}
]
},
%{
group: :pleroma,
key: :http_security,
label: "HTTP security",
type: :group,
description: "HTTP security settings",
children: [
%{
key: :enabled,
type: :boolean,
description: "Whether the managed content security policy is enabled"
},
%{
key: :sts,
label: "STS",
type: :boolean,
description: "Whether to additionally send a Strict-Transport-Security header"
},
%{
key: :sts_max_age,
label: "STS max age",
type: :integer,
description: "The maximum age for the Strict-Transport-Security header if sent",
suggestions: [31_536_000]
},
%{
key: :ct_max_age,
label: "CT max age",
type: :integer,
description: "The maximum age for the Expect-CT header if sent",
suggestions: [2_592_000]
},
%{
key: :referrer_policy,
type: :string,
description: "The referrer policy to use, either \"same-origin\" or \"no-referrer\"",
suggestions: ["same-origin", "no-referrer"]
},
%{
key: :report_uri,
label: "Report URI",
type: :string,
description: "Adds the specified URL to report-uri and report-to group in CSP header",
suggestions: ["https://example.com/report-uri"]
}
]
},
%{
group: :web_push_encryption,
key: :vapid_details,
label: "Vapid Details",
type: :group,
description:
"Web Push Notifications configuration. You can use the mix task mix web_push.gen.keypair to generate it.",
children: [
%{
key: :subject,
type: :string,
description:
"A mailto link for the administrative contact." <>
" It's best if this email is not a personal email address, but rather a group email to the instance moderation team.",
suggestions: ["mailto:moderators@pleroma.com"]
},
%{
key: :public_key,
type: :string,
description: "VAPID public key",
suggestions: ["Public key"]
},
%{
key: :private_key,
type: :string,
description: "VAPID private key",
suggestions: ["Private key"]
}
]
},
%{
group: :pleroma,
key: Pleroma.Captcha,
type: :group,
description: "Captcha-related settings",
children: [
%{
key: :enabled,
type: :boolean,
description: "Whether the captcha should be shown on registration"
},
%{
key: :method,
type: :module,
description: "The method/service to use for captcha",
suggestions: [Pleroma.Captcha.Kocaptcha, Pleroma.Captcha.Native]
},
%{
key: :seconds_valid,
type: :integer,
description: "The time in seconds for which the captcha is valid",
suggestions: [60]
}
]
},
%{
group: :pleroma,
key: Pleroma.Captcha.Kocaptcha,
type: :group,
description:
"Kocaptcha is a very simple captcha service with a single API endpoint, the source code is" <>
" here: https://github.com/koto-bank/kocaptcha. The default endpoint (https://captcha.kotobank.ch) is hosted by the developer.",
children: [
%{
key: :endpoint,
type: :string,
description: "The kocaptcha endpoint to use",
suggestions: ["https://captcha.kotobank.ch"]
}
]
},
%{
group: :pleroma,
label: "Pleroma Admin Token",
type: :group,
description:
"Allows setting a token that can be used to authenticate requests with admin privileges without a normal user account token. Append the `admin_token` parameter to requests to utilize it. (Please reconsider using HTTP Basic Auth or OAuth-based authentication if possible)",
children: [
%{
key: :admin_token,
type: :string,
description: "Admin token",
suggestions: [
"Please use a high entropy string or UUID"
]
}
]
},
%{
group: :pleroma,
key: Oban,
type: :group,
description: """
[Oban](https://github.com/sorentwo/oban) asynchronous job processor configuration.
Note: if you are running PostgreSQL in [`silent_mode`](https://postgresqlco.nf/en/doc/param/silent_mode?version=9.1),
it's advised to set [`log_destination`](https://postgresqlco.nf/en/doc/param/log_destination?version=9.1) to `syslog`,
otherwise `postmaster.log` file may grow because of "you don't own a lock of type ShareLock" warnings
(see https://github.com/sorentwo/oban/issues/52).
""",
children: [
%{
key: :log,
type: {:dropdown, :atom},
description: "Logs verbose mode",
suggestions: [false, :error, :warn, :info, :debug]
},
%{
key: :queues,
type: {:keyword, :integer},
description:
"Background jobs queues (keys: queues, values: max numbers of concurrent jobs)",
suggestions: [
activity_expiration: 10,
attachments_cleanup: 5,
background: 5,
federator_incoming: 50,
federator_outgoing: 50,
mailer: 10,
scheduled_activities: 10,
transmogrifier: 20,
web_push: 50
],
children: [
%{
key: :activity_expiration,
type: :integer,
description: "Activity expiration queue",
suggestions: [10]
},
%{
key: :backup,
type: :integer,
description: "Backup queue",
suggestions: [1]
},
%{
key: :attachments_cleanup,
type: :integer,
description: "Attachment deletion queue",
suggestions: [5]
},
%{
key: :background,
type: :integer,
description: "Background queue",
suggestions: [5]
},
%{
key: :federator_incoming,
type: :integer,
description: "Incoming federation queue",
suggestions: [50]
},
%{
key: :federator_outgoing,
type: :integer,
description: "Outgoing federation queue",
suggestions: [50]
},
%{
key: :mailer,
type: :integer,
description: "Email sender queue, see Pleroma.Emails.Mailer",
suggestions: [10]
},
%{
key: :scheduled_activities,
type: :integer,
description: "Scheduled activities queue, see Pleroma.ScheduledActivities",
suggestions: [10]
},
%{
key: :transmogrifier,
type: :integer,
description: "Transmogrifier queue",
suggestions: [20]
},
%{
key: :web_push,
type: :integer,
description: "Web push notifications queue",
suggestions: [50]
}
]
},
%{
key: :crontab,
type: {:list, :tuple},
description: "Settings for cron background jobs",
suggestions: [
{"0 0 * * 0", Pleroma.Workers.Cron.DigestEmailsWorker},
{"0 0 * * *", Pleroma.Workers.Cron.NewUsersDigestWorker}
]
}
]
},
%{
group: :pleroma,
key: :workers,
type: :group,
description: "Includes custom worker options not interpretable directly by `Oban`",
children: [
%{
key: :retries,
type: {:keyword, :integer},
description: "Max retry attempts for failed jobs, per `Oban` queue",
suggestions: [
federator_incoming: 5,
federator_outgoing: 5
]
}
]
},
%{
group: :pleroma,
key: Pleroma.Web.Metadata,
type: :group,
description: "Metadata-related settings",
children: [
%{
key: :providers,
type: {:list, :module},
description: "List of metadata providers to enable",
suggestions: [
Pleroma.Web.Metadata.Providers.OpenGraph,
Pleroma.Web.Metadata.Providers.TwitterCard,
Pleroma.Web.Metadata.Providers.RelMe,
Pleroma.Web.Metadata.Providers.Feed
]
},
%{
key: :unfurl_nsfw,
label: "Unfurl NSFW",
type: :boolean,
description: "When enabled NSFW attachments will be shown in previews"
}
]
},
%{
group: :pleroma,
key: :rich_media,
type: :group,
description:
"If enabled the instance will parse metadata from attached links to generate link previews",
children: [
%{
key: :enabled,
type: :boolean,
description: "Enables RichMedia parsing of URLs"
},
%{
key: :ignore_hosts,
type: {:list, :string},
description: "List of hosts which will be ignored by the metadata parser",
suggestions: ["accounts.google.com", "xss.website"]
},
%{
key: :ignore_tld,
label: "Ignore TLD",
type: {:list, :string},
description: "List TLDs (top-level domains) which will ignore for parse metadata",
suggestions: ["local", "localdomain", "lan"]
},
%{
key: :parsers,
type: {:list, :module},
description:
"List of Rich Media parsers. Module names are shortened (removed leading `Pleroma.Web.RichMedia.Parsers.` part), but on adding custom module you need to use full name.",
suggestions: [
Pleroma.Web.RichMedia.Parsers.OEmbed,
Pleroma.Web.RichMedia.Parsers.TwitterCard
]
},
%{
key: :ttl_setters,
label: "TTL setters",
type: {:list, :module},
description:
"List of rich media TTL setters. Module names are shortened (removed leading `Pleroma.Web.RichMedia.Parser.` part), but on adding custom module you need to use full name.",
suggestions: [
Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl
]
},
%{
key: :failure_backoff,
type: :integer,
description:
"Amount of milliseconds after request failure, during which the request will not be retried.",
suggestions: [60_000]
}
]
},
%{
group: :pleroma,
key: Pleroma.Formatter,
label: "Linkify",
type: :group,
description:
"Configuration for Pleroma's link formatter which parses mentions, hashtags, and URLs.",
children: [
%{
key: :class,
type: [:string, :boolean],
description: "Specify the class to be added to the generated link. Disable to clear.",
suggestions: ["auto-linker", false]
},
%{
key: :rel,
type: [:string, :boolean],
description: "Override the rel attribute. Disable to clear.",
suggestions: ["ugc", "noopener noreferrer", false]
},
%{
key: :new_window,
type: :boolean,
description: "Link URLs will open in a new window/tab."
},
%{
key: :truncate,
type: [:integer, :boolean],
description:
"Set to a number to truncate URLs longer than the number. Truncated URLs will end in `...`",
suggestions: [15, false]
},
%{
key: :strip_prefix,
type: :boolean,
description: "Strip the scheme prefix."
},
%{
key: :extra,
type: :boolean,
description: "Link URLs with rarely used schemes (magnet, ipfs, irc, etc.)"
},
%{
key: :validate_tld,
type: [:atom, :boolean],
description:
"Set to false to disable TLD validation for URLs/emails. Can be set to :no_scheme to validate TLDs only for URLs without a scheme (e.g `example.com` will be validated, but `http://example.loki` won't)",
suggestions: [:no_scheme, true]
}
]
},
%{
group: :pleroma,
key: Pleroma.ScheduledActivity,
type: :group,
description: "Scheduled activities settings",
children: [
%{
key: :daily_user_limit,
type: :integer,
description:
"The number of scheduled activities a user is allowed to create in a single day. Default: 25.",
suggestions: [25]
},
%{
key: :total_user_limit,
type: :integer,
description:
"The number of scheduled activities a user is allowed to create in total. Default: 300.",
suggestions: [300]
},
%{
key: :enabled,
type: :boolean,
description: "Whether scheduled activities are sent to the job queue to be executed"
}
]
},
%{
group: :pleroma,
key: Pleroma.Workers.PurgeExpiredActivity,
type: :group,
description: "Expired activities settings",
children: [
%{
key: :enabled,
type: :boolean,
description: "Enables expired activities addition & deletion"
},
%{
key: :min_lifetime,
type: :integer,
description: "Minimum lifetime for ephemeral activity (in seconds)",
suggestions: [600]
}
]
},
%{
group: :pleroma,
label: "Pleroma Authenticator",
type: :group,
description: "Authenticator",
children: [
%{
key: Pleroma.Web.Auth.Authenticator,
type: :module,
suggestions: [Pleroma.Web.Auth.PleromaAuthenticator, Pleroma.Web.Auth.LDAPAuthenticator]
}
]
},
%{
group: :pleroma,
key: :ldap,
label: "LDAP",
type: :group,
description:
"Use LDAP for user authentication. When a user logs in to the Pleroma instance, the name and password" <>
" will be verified by trying to authenticate (bind) to a LDAP server." <>
" If a user exists in the LDAP directory but there is no account with the same name yet on the" <>
" Pleroma instance then a new Pleroma account will be created with the same name as the LDAP user name.",
children: [
%{
key: :enabled,
type: :boolean,
description: "Enables LDAP authentication"
},
%{
key: :host,
type: :string,
description: "LDAP server hostname",
suggestions: ["localhosts"]
},
%{
key: :port,
type: :integer,
description: "LDAP port, e.g. 389 or 636",
suggestions: [389, 636]
},
%{
key: :ssl,
label: "SSL",
type: :boolean,
description: "Enable to use SSL, usually implies the port 636"
},
%{
key: :sslopts,
label: "SSL options",
type: :keyword,
description: "Additional SSL options",
suggestions: [cacertfile: "path/to/file/with/PEM/cacerts", verify: :verify_peer],
children: [
%{
key: :cacertfile,
type: :string,
description: "Path to file with PEM encoded cacerts",
suggestions: ["path/to/file/with/PEM/cacerts"]
},
%{
key: :verify,
type: :atom,
description: "Type of cert verification",
suggestions: [:verify_peer]
}
]
},
%{
key: :tls,
label: "TLS",
type: :boolean,
description: "Enable to use STARTTLS, usually implies the port 389"
},
%{
key: :tlsopts,
label: "TLS options",
type: :keyword,
description: "Additional TLS options",
suggestions: [cacertfile: "path/to/file/with/PEM/cacerts", verify: :verify_peer],
children: [
%{
key: :cacertfile,
type: :string,
description: "Path to file with PEM encoded cacerts",
suggestions: ["path/to/file/with/PEM/cacerts"]
},
%{
key: :verify,
type: :atom,
description: "Type of cert verification",
suggestions: [:verify_peer]
}
]
},
%{
key: :base,
type: :string,
description: "LDAP base, e.g. \"dc=example,dc=com\"",
suggestions: ["dc=example,dc=com"]
},
%{
key: :uid,
label: "UID",
type: :string,
description:
"LDAP attribute name to authenticate the user, e.g. when \"cn\", the filter will be \"cn=username,base\"",
suggestions: ["cn"]
}
]
},
%{
group: :pleroma,
key: :auth,
type: :group,
description: "Authentication / authorization settings",
children: [
%{
key: :enforce_oauth_admin_scope_usage,
label: "Enforce OAuth admin scope usage",
type: :boolean,
description:
"OAuth admin scope requirement toggle. " <>
"If enabled, admin actions explicitly demand admin OAuth scope(s) presence in OAuth token " <>
"(client app must support admin scopes). If disabled and token doesn't have admin scope(s), " <>
"`is_admin` user flag grants access to admin-specific actions."
},
%{
key: :auth_template,
type: :string,
description:
"Authentication form template. By default it's `show.html` which corresponds to `lib/pleroma/web/templates/o_auth/o_auth/show.html.ee`.",
suggestions: ["show.html"]
},
%{
key: :oauth_consumer_template,
label: "OAuth consumer template",
type: :string,
description:
"OAuth consumer mode authentication form template. By default it's `consumer.html` which corresponds to" <>
" `lib/pleroma/web/templates/o_auth/o_auth/consumer.html.eex`.",
suggestions: ["consumer.html"]
},
%{
key: :oauth_consumer_strategies,
label: "OAuth consumer strategies",
type: {:list, :string},
description:
"The list of enabled OAuth consumer strategies. By default it's set by OAUTH_CONSUMER_STRATEGIES environment variable." <>
" Each entry in this space-delimited string should be of format \"strategy\" or \"strategy:dependency\"" <>
" (e.g. twitter or keycloak:ueberauth_keycloak_strategy in case dependency is named differently than ueberauth_<strategy>).",
suggestions: ["twitter", "keycloak:ueberauth_keycloak_strategy"]
}
]
},
%{
group: :pleroma,
key: :email_notifications,
type: :group,
description: "Email notifications settings",
children: [
%{
key: :digest,
type: :map,
description:
"emails of \"what you've missed\" for users who have been inactive for a while",
suggestions: [
%{
active: false,
schedule: "0 0 * * 0",
interval: 7,
inactivity_threshold: 7
}
],
children: [
%{
key: :active,
label: "Enabled",
type: :boolean,
description: "Globally enable or disable digest emails"
},
%{
key: :schedule,
type: :string,
description:
"When to send digest email, in crontab format. \"0 0 0\" is the default, meaning \"once a week at midnight on Sunday morning\".",
suggestions: ["0 0 * * 0"]
},
%{
key: :interval,
type: :integer,
description: "Minimum interval between digest emails to one user",
suggestions: [7]
},
%{
key: :inactivity_threshold,
type: :integer,
description: "Minimum user inactivity threshold",
suggestions: [7]
}
]
}
]
},
%{
group: :pleroma,
key: Pleroma.Emails.UserEmail,
type: :group,
description: "Email template settings",
children: [
%{
key: :logo,
type: {:string, :image},
description: "A path to a custom logo. Set it to `nil` to use the default Pleroma logo.",
suggestions: ["some/path/logo.png"]
},
%{
key: :styling,
type: :map,
description: "A map with color settings for email templates.",
suggestions: [
%{
link_color: "#d8a070",
background_color: "#2C3645",
content_background_color: "#1B2635",
header_color: "#d8a070",
text_color: "#b9b9ba",
text_muted_color: "#b9b9ba"
}
],
children: [
%{
key: :link_color,
type: :string,
suggestions: ["#d8a070"]
},
%{
key: :background_color,
type: :string,
suggestions: ["#2C3645"]
},
%{
key: :content_background_color,
type: :string,
suggestions: ["#1B2635"]
},
%{
key: :header_color,
type: :string,
suggestions: ["#d8a070"]
},
%{
key: :text_color,
type: :string,
suggestions: ["#b9b9ba"]
},
%{
key: :text_muted_color,
type: :string,
suggestions: ["#b9b9ba"]
}
]
}
]
},
%{
group: :pleroma,
key: Pleroma.Emails.NewUsersDigestEmail,
type: :group,
description: "New users admin email digest",
children: [
%{
key: :enabled,
type: :boolean,
description: "Enables new users admin digest email when `true`"
}
]
},
%{
group: :pleroma,
key: :oauth2,
label: "OAuth2",
type: :group,
description: "Configure OAuth 2 provider capabilities",
children: [
%{
key: :token_expires_in,
type: :integer,
description: "The lifetime in seconds of the access token",
suggestions: [600]
},
%{
key: :issue_new_refresh_token,
type: :boolean,
description:
"Keeps old refresh token or generate new refresh token when to obtain an access token"
},
%{
key: :clean_expired_tokens,
type: :boolean,
description: "Enable a background job to clean expired OAuth tokens. Default: disabled."
}
]
},
%{
group: :pleroma,
key: :emoji,
type: :group,
children: [
%{
key: :shortcode_globs,
type: {:list, :string},
description: "Location of custom emoji files. * can be used as a wildcard.",
suggestions: ["/emoji/custom/**/*.png"]
},
%{
key: :pack_extensions,
type: {:list, :string},
description:
"A list of file extensions for emojis, when no emoji.txt for a pack is present",
suggestions: [".png", ".gif"]
},
%{
key: :groups,
type: {:keyword, {:list, :string}},
description:
"Emojis are ordered in groups (tags). This is an array of key-value pairs where the key is the group name" <>
" and the value is the location or array of locations. * can be used as a wildcard.",
suggestions: [
Custom: ["/emoji/*.png", "/emoji/**/*.png"]
]
},
%{
key: :default_manifest,
type: :string,
description:
"Location of the JSON-manifest. This manifest contains information about the emoji-packs you can download." <>
" Currently only one manifest can be added (no arrays).",
suggestions: ["https://git.pleroma.social/pleroma/emoji-index/raw/master/index.json"]
},
%{
key: :shared_pack_cache_seconds_per_file,
label: "Shared pack cache s/file",
type: :integer,
descpiption:
"When an emoji pack is shared, the archive is created and cached in memory" <>
" for this amount of seconds multiplied by the number of files.",
suggestions: [60]
}
]
},
%{
group: :pleroma,
key: :rate_limit,
type: :group,
description:
"Rate limit settings. This is an advanced feature enabled only for :authentication by default.",
children: [
%{
key: :search,
type: [:tuple, {:list, :tuple}],
description: "For the search requests (account & status search etc.)",
suggestions: [{1000, 10}, [{10_000, 10}, {10_000, 50}]]
},
%{
key: :timeline,
type: [:tuple, {:list, :tuple}],
description: "For requests to timelines (each timeline has it's own limiter)",
suggestions: [{1000, 10}, [{10_000, 10}, {10_000, 50}]]
},
%{
key: :app_account_creation,
type: [:tuple, {:list, :tuple}],
description: "For registering user accounts from the same IP address",
suggestions: [{1000, 10}, [{10_000, 10}, {10_000, 50}]]
},
%{
key: :relations_actions,
type: [:tuple, {:list, :tuple}],
description: "For actions on relationships with all users (follow, unfollow)",
suggestions: [{1000, 10}, [{10_000, 10}, {10_000, 50}]]
},
%{
key: :relation_id_action,
label: "Relation ID action",
type: [:tuple, {:list, :tuple}],
description: "For actions on relation with a specific user (follow, unfollow)",
suggestions: [{1000, 10}, [{10_000, 10}, {10_000, 50}]]
},
%{
key: :statuses_actions,
type: [:tuple, {:list, :tuple}],
description:
"For create / delete / fav / unfav / reblog / unreblog actions on any statuses",
suggestions: [{1000, 10}, [{10_000, 10}, {10_000, 50}]]
},
%{
key: :status_id_action,
label: "Status ID action",
type: [:tuple, {:list, :tuple}],
description:
"For fav / unfav or reblog / unreblog actions on the same status by the same user",
suggestions: [{1000, 10}, [{10_000, 10}, {10_000, 50}]]
},
%{
key: :authentication,
type: [:tuple, {:list, :tuple}],
description: "For authentication create / password check / user existence check requests",
suggestions: [{60_000, 15}]
}
]
},
%{
group: :esshd,
label: "ESSHD",
type: :group,
description:
"Before enabling this you must add :esshd to mix.exs as one of the extra_applications " <>
"and generate host keys in your priv dir with ssh-keygen -m PEM -N \"\" -b 2048 -t rsa -f ssh_host_rsa_key",
children: [
%{
key: :enabled,
type: :boolean,
description: "Enables SSH"
},
%{
key: :priv_dir,
type: :string,
description: "Dir with SSH keys",
suggestions: ["/some/path/ssh_keys"]
},
%{
key: :handler,
type: :string,
description: "Handler module",
suggestions: ["Pleroma.BBS.Handler"]
},
%{
key: :port,
type: :integer,
description: "Port to connect",
suggestions: [10_022]
},
%{
key: :password_authenticator,
type: :string,
description: "Authenticator module",
suggestions: ["Pleroma.BBS.Authenticator"]
}
]
},
%{
group: :mime,
label: "Mime Types",
type: :group,
description: "Mime Types settings",
children: [
%{
key: :types,
type: :map,
suggestions: [
%{
"application/xml" => ["xml"],
"application/xrd+xml" => ["xrd+xml"],
"application/jrd+json" => ["jrd+json"],
"application/activity+json" => ["activity+json"],
"application/ld+json" => ["activity+json"]
}
],
children: [
%{
key: "application/xml",
type: {:list, :string},
suggestions: ["xml"]
},
%{
key: "application/xrd+xml",
type: {:list, :string},
suggestions: ["xrd+xml"]
},
%{
key: "application/jrd+json",
type: {:list, :string},
suggestions: ["jrd+json"]
},
%{
key: "application/activity+json",
type: {:list, :string},
suggestions: ["activity+json"]
},
%{
key: "application/ld+json",
type: {:list, :string},
suggestions: ["activity+json"]
}
]
}
]
},
%{
group: :pleroma,
key: :chat,
type: :group,
description: "Pleroma chat settings",
children: [
%{
key: :enabled,
type: :boolean
}
]
},
%{
group: :pleroma,
key: :http,
label: "HTTP",
type: :group,
description: "HTTP settings",
children: [
%{
key: :proxy_url,
label: "Proxy URL",
type: [:string, :tuple],
description: "Proxy URL",
suggestions: ["localhost:9020", {:socks5, :localhost, 3090}]
},
%{
key: :send_user_agent,
type: :boolean
},
%{
key: :user_agent,
type: [:string, :atom],
description:
"What user agent to use. Must be a string or an atom `:default`. Default value is `:default`.",
suggestions: ["Pleroma", :default]
},
%{
key: :adapter,
type: :keyword,
description: "Adapter specific options",
suggestions: [],
children: [
%{
key: :ssl_options,
type: :keyword,
label: "SSL Options",
description: "SSL options for HTTP adapter",
children: [
%{
key: :versions,
type: {:list, :atom},
description: "List of TLS version to use",
suggestions: [:tlsv1, ":tlsv1.1", ":tlsv1.2"]
}
]
}
]
}
]
},
%{
group: :pleroma,
key: :markup,
label: "Markup Settings",
type: :group,
children: [
%{
key: :allow_inline_images,
type: :boolean
},
%{
key: :allow_headings,
type: :boolean
},
%{
key: :allow_tables,
type: :boolean
},
%{
key: :allow_fonts,
type: :boolean
},
%{
key: :scrub_policy,
type: {:list, :module},
description:
"Module names are shortened (removed leading `Pleroma.HTML.` part), but on adding custom module you need to use full name.",
suggestions: [Pleroma.HTML.Transform.MediaProxy, Pleroma.HTML.Scrubber.Default]
}
]
},
%{
group: :pleroma,
key: :user,
type: :group,
children: [
%{
key: :deny_follow_blocked,
type: :boolean
}
]
},
%{
group: :pleroma,
key: Pleroma.User,
type: :group,
children: [
%{
key: :restricted_nicknames,
type: {:list, :string},
description: "List of nicknames users may not register with.",
suggestions: [
".well-known",
"~",
"about",
"activities",
"api",
"auth",
"check_password",
"dev",
"friend-requests",
"inbox",
"internal",
"main",
"media",
"nodeinfo",
"notice",
"oauth",
"objects",
"ostatus_subscribe",
"pleroma",
"proxy",
"push",
"registration",
"relay",
"settings",
"status",
"tag",
"user-search",
"user_exists",
"users",
"web"
]
},
%{
key: :email_blacklist,
type: {:list, :string},
description: "List of email domains users may not register with.",
suggestions: ["mailinator.com", "maildrop.cc"]
}
]
},
%{
group: :cors_plug,
label: "CORS plug config",
type: :group,
children: [
%{
key: :max_age,
type: :integer,
suggestions: [86_400]
},
%{
key: :methods,
type: {:list, :string},
suggestions: ["POST", "PUT", "DELETE", "GET", "PATCH", "OPTIONS"]
},
%{
key: :expose,
type: {:list, :string},
suggestions: [
"Link",
"X-RateLimit-Reset",
"X-RateLimit-Limit",
"X-RateLimit-Remaining",
"X-Request-Id",
"Idempotency-Key"
]
},
%{
key: :credentials,
type: :boolean
},
%{
key: :headers,
type: {:list, :string},
suggestions: ["Authorization", "Content-Type", "Idempotency-Key"]
}
]
},
%{
group: :pleroma,
key: Pleroma.Web.Plugs.RemoteIp,
type: :group,
description: """
`Pleroma.Web.Plugs.RemoteIp` is a shim to call [`RemoteIp`](https://git.pleroma.social/pleroma/remote_ip) but with runtime configuration.
**If your instance is not behind at least one reverse proxy, you should not enable this plug.**
""",
children: [
%{
key: :enabled,
type: :boolean,
description: "Enable/disable the plug. Default: disabled."
},
%{
key: :headers,
type: {:list, :string},
description: """
A list of strings naming the HTTP headers to use when deriving the true client IP. Default: `["x-forwarded-for"]`.
"""
},
%{
key: :proxies,
type: {:list, :string},
description:
"A list of upstream proxy IP subnets in CIDR notation from which we will parse the content of `headers`. Defaults to `[]`. IPv4 entries without a bitmask will be assumed to be /32 and IPv6 /128."
},
%{
key: :reserved,
type: {:list, :string},
description: """
A list of reserved IP subnets in CIDR notation which should be ignored if found in `headers`. Defaults to `["127.0.0.0/8", "::1/128", "fc00::/7", "10.0.0.0/8", "172.16.0.0/12", "192.168.0.0/16"]`
"""
}
]
},
%{
group: :pleroma,
key: :web_cache_ttl,
label: "Web cache TTL",
type: :group,
description:
"The expiration time for the web responses cache. Values should be in milliseconds or `nil` to disable expiration.",
children: [
%{
key: :activity_pub,
type: :integer,
description:
"Activity pub routes (except question activities). Default: `nil` (no expiration).",
suggestions: [30_000, nil]
},
%{
key: :activity_pub_question,
type: :integer,
description: "Activity pub routes (question activities). Default: `30_000` (30 seconds).",
suggestions: [30_000]
}
]
},
%{
group: :pleroma,
key: :static_fe,
label: "Static FE",
type: :group,
description:
"Render profiles and posts using server-generated HTML that is viewable without using JavaScript",
children: [
%{
key: :enabled,
type: :boolean,
description: "Enables the rendering of static HTML. Default: disabled."
}
]
},
%{
group: :pleroma,
key: :feed,
type: :group,
description: "Configure feed rendering",
children: [
%{
key: :post_title,
type: :map,
description: "Configure title rendering",
children: [
%{
key: :max_length,
type: :integer,
description: "Maximum number of characters before truncating title",
suggestions: [100]
},
%{
key: :omission,
type: :string,
description: "Replacement which will be used after truncating string",
suggestions: ["..."]
}
]
}
]
},
%{
group: :pleroma,
key: :modules,
type: :group,
description: "Custom Runtime Modules",
children: [
%{
key: :runtime_dir,
type: :string,
description: "A path to custom Elixir modules (such as MRF policies)."
}
]
},
%{
group: :pleroma,
key: :streamer,
type: :group,
description: "Settings for notifications streamer",
children: [
%{
key: :workers,
type: :integer,
description: "Number of workers to send notifications",
suggestions: [3]
},
%{
key: :overflow_workers,
type: :integer,
description: "Maximum number of workers created if pool is empty",
suggestions: [2]
}
]
},
%{
group: :pleroma,
key: :connections_pool,
type: :group,
description: "Advanced settings for `Gun` connections pool",
children: [
%{
key: :connection_acquisition_wait,
type: :integer,
description:
"Timeout to acquire a connection from pool. The total max time is this value multiplied by the number of retries. Default: 250ms.",
suggestions: [250]
},
%{
key: :connection_acquisition_retries,
type: :integer,
description:
"Number of attempts to acquire the connection from the pool if it is overloaded. Default: 5",
suggestions: [5]
},
%{
key: :max_connections,
type: :integer,
description: "Maximum number of connections in the pool. Default: 250 connections.",
suggestions: [250]
},
%{
key: :connect_timeout,
type: :integer,
description: "Timeout while `gun` will wait until connection is up. Default: 5000ms.",
suggestions: [5000]
},
%{
key: :reclaim_multiplier,
type: :integer,
description:
"Multiplier for the number of idle connection to be reclaimed if the pool is full. For example if the pool maxes out at 250 connections and this setting is set to 0.3, the pool will reclaim at most 75 idle connections if it's overloaded. Default: 0.1",
suggestions: [0.1]
}
]
},
%{
group: :pleroma,
key: :pools,
type: :group,
description: "Advanced settings for `Gun` workers pools",
children:
Enum.map([:federation, :media, :upload, :default], fn pool_name ->
%{
key: pool_name,
type: :keyword,
description: "Settings for #{pool_name} pool.",
children: [
%{
key: :size,
type: :integer,
description: "Maximum number of concurrent requests in the pool.",
suggestions: [50]
},
%{
key: :max_waiting,
type: :integer,
description:
"Maximum number of requests waiting for other requests to finish. After this number is reached, the pool will start returning errrors when a new request is made",
suggestions: [10]
},
%{
key: :recv_timeout,
type: :integer,
description: "Timeout for the pool while gun will wait for response",
suggestions: [10_000]
}
]
}
end)
},
%{
group: :pleroma,
key: :hackney_pools,
type: :group,
description: "Advanced settings for `Hackney` connections pools",
children: [
%{
key: :federation,
type: :keyword,
description: "Settings for federation pool.",
children: [
%{
key: :max_connections,
type: :integer,
description: "Number workers in the pool.",
suggestions: [50]
},
%{
key: :timeout,
type: :integer,
description: "Timeout while `hackney` will wait for response.",
suggestions: [150_000]
}
]
},
%{
key: :media,
type: :keyword,
description: "Settings for media pool.",
children: [
%{
key: :max_connections,
type: :integer,
description: "Number workers in the pool.",
suggestions: [50]
},
%{
key: :timeout,
type: :integer,
description: "Timeout while `hackney` will wait for response.",
suggestions: [150_000]
}
]
},
%{
key: :upload,
type: :keyword,
description: "Settings for upload pool.",
children: [
%{
key: :max_connections,
type: :integer,
description: "Number workers in the pool.",
suggestions: [25]
},
%{
key: :timeout,
type: :integer,
description: "Timeout while `hackney` will wait for response.",
suggestions: [300_000]
}
]
}
]
},
%{
group: :pleroma,
key: :restrict_unauthenticated,
label: "Restrict Unauthenticated",
type: :group,
description:
"Disallow viewing timelines, user profiles and statuses for unauthenticated users.",
children: [
%{
key: :timelines,
type: :map,
description: "Settings for public and federated timelines.",
children: [
%{
key: :local,
type: :boolean,
description: "Disallow view public timeline."
},
%{
key: :federated,
type: :boolean,
description: "Disallow view federated timeline."
}
]
},
%{
key: :profiles,
type: :map,
description: "Settings for user profiles.",
children: [
%{
key: :local,
type: :boolean,
description: "Disallow view local user profiles."
},
%{
key: :remote,
type: :boolean,
description: "Disallow view remote user profiles."
}
]
},
%{
key: :activities,
type: :map,
description: "Settings for statuses.",
children: [
%{
key: :local,
type: :boolean,
description: "Disallow view local statuses."
},
%{
key: :remote,
type: :boolean,
description: "Disallow view remote statuses."
}
]
}
]
},
%{
group: :pleroma,
key: Pleroma.Web.ApiSpec.CastAndValidate,
type: :group,
children: [
%{
key: :strict,
type: :boolean,
description:
"Enables strict input validation (useful in development, not recommended in production)"
}
]
},
%{
group: :pleroma,
key: :instances_favicons,
type: :group,
description: "Control favicons for instances",
children: [
%{
key: :enabled,
type: :boolean,
description: "Allow/disallow displaying and getting instances favicons"
}
]
},
%{
group: :ex_aws,
key: :s3,
type: :group,
descriptions: "S3 service related settings",
children: [
%{
key: :access_key_id,
type: :string,
description: "S3 access key ID",
suggestions: ["AKIAQ8UKHTGIYN7DMWWJ"]
},
%{
key: :secret_access_key,
type: :string,
description: "Secret access key",
suggestions: ["JFGt+fgH1UQ7vLUQjpW+WvjTdV/UNzVxcwn7DkaeFKtBS5LvoXvIiME4NQBsT6ZZ"]
},
%{
key: :host,
type: :string,
description: "S3 host",
suggestions: ["s3.eu-central-1.amazonaws.com"]
}
]
},
%{
group: :pleroma,
key: :frontends,
type: :group,
description: "Installed frontends management",
children: [
%{
key: :primary,
type: :map,
description: "Primary frontend, the one that is served for all pages by default",
children: installed_frontend_options
},
%{
key: :admin,
type: :map,
description: "Admin frontend",
children: installed_frontend_options
},
%{
key: :available,
type: :map,
description:
"A map containing available frontends and parameters for their installation.",
children: frontend_options
}
]
},
%{
group: :pleroma,
key: Pleroma.Web.Preload,
type: :group,
description: "Preload-related settings",
children: [
%{
key: :providers,
type: {:list, :module},
description: "List of preload providers to enable",
suggestions: [
Pleroma.Web.Preload.Providers.Instance,
Pleroma.Web.Preload.Providers.User,
Pleroma.Web.Preload.Providers.Timelines,
Pleroma.Web.Preload.Providers.StatusNet
]
}
]
},
%{
group: :pleroma,
key: :majic_pool,
type: :group,
description: "Majic/libmagic configuration",
children: [
%{
key: :size,
type: :integer,
description: "Number of majic workers to start.",
suggestions: [2]
}
]
},
%{
group: :pleroma,
key: Pleroma.User.Backup,
type: :group,
description: "Account Backup",
children: [
%{
key: :purge_after_days,
type: :integer,
description: "Remove backup achives after N days",
suggestions: [30]
},
%{
key: :limit_days,
type: :integer,
description: "Limit user to export not more often than once per N days",
suggestions: [7]
}
]
},
%{
group: :prometheus,
key: Pleroma.Web.Endpoint.MetricsExporter,
type: :group,
description: "Prometheus app metrics endpoint configuration",
children: [
%{
key: :enabled,
type: :boolean,
description: "[Pleroma extension] Enables app metrics endpoint."
},
%{
key: :ip_whitelist,
type: [{:list, :string}, {:list, :charlist}, {:list, :tuple}],
description:
"[Pleroma extension] If non-empty, restricts access to app metrics endpoint to specified IP addresses."
},
%{
key: :auth,
type: [:boolean, :tuple],
description: "Enables HTTP Basic Auth for app metrics endpoint.",
suggestion: [false, {:basic, "myusername", "mypassword"}]
},
%{
key: :path,
type: :string,
description: "App metrics endpoint URI path.",
suggestions: ["/api/pleroma/app_metrics"]
},
%{
key: :format,
type: :atom,
description: "App metrics endpoint output format.",
suggestions: [:text, :protobuf]
}
]
}
]
diff --git a/docs/configuration/cheatsheet.md b/docs/configuration/cheatsheet.md
index ebf95ebc9..4d18ac30a 100644
--- a/docs/configuration/cheatsheet.md
+++ b/docs/configuration/cheatsheet.md
@@ -1,1126 +1,1114 @@
# Configuration Cheat Sheet
This is a cheat sheet for Pleroma configuration file, any setting possible to configure should be listed here.
For OTP installations the configuration is typically stored in `/etc/pleroma/config.exs`.
For from source installations Pleroma configuration works by first importing the base config `config/config.exs`, then overriding it by the environment config `config/$MIX_ENV.exs` and then overriding it by user config `config/$MIX_ENV.secret.exs`. In from source installations you should always make the changes to the user config and NEVER to the base config to avoid breakages and merge conflicts. So for production you change/add configuration to `config/prod.secret.exs`.
To add configuration to your config file, you can copy it from the base config. The latest version of it can be viewed [here](https://git.pleroma.social/pleroma/pleroma/blob/develop/config/config.exs). You can also use this file if you don't know how an option is supposed to be formatted.
## :chat
* `enabled` - Enables the backend chat. Defaults to `true`.
## :instance
* `name`: The instance’s name.
* `email`: Email used to reach an Administrator/Moderator of the instance.
* `notify_email`: Email used for notifications.
* `description`: The instance’s description, can be seen in nodeinfo and ``/api/v1/instance``.
* `limit`: Posts character limit (CW/Subject included in the counter).
* `description_limit`: The character limit for image descriptions.
* `chat_limit`: Character limit of the instance chat messages.
* `remote_limit`: Hard character limit beyond which remote posts will be dropped.
* `upload_limit`: File size limit of uploads (except for avatar, background, banner).
* `avatar_upload_limit`: File size limit of user’s profile avatars.
* `background_upload_limit`: File size limit of user’s profile backgrounds.
* `banner_upload_limit`: File size limit of user’s profile banners.
* `poll_limits`: A map with poll limits for **local** polls.
* `max_options`: Maximum number of options.
* `max_option_chars`: Maximum number of characters per option.
* `min_expiration`: Minimum expiration time (in seconds).
* `max_expiration`: Maximum expiration time (in seconds).
* `registrations_open`: Enable registrations for anyone, invitations can be enabled when false.
* `invites_enabled`: Enable user invitations for admins (depends on `registrations_open: false`).
* `account_activation_required`: Require users to confirm their emails before signing in.
* `account_approval_required`: Require users to be manually approved by an admin before signing in.
* `federating`: Enable federation with other instances.
* `federation_incoming_replies_max_depth`: Max. depth of reply-to activities fetching on incoming federation, to prevent out-of-memory situations while fetching very long threads. If set to `nil`, threads of any depth will be fetched. Lower this value if you experience out-of-memory crashes.
* `federation_reachability_timeout_days`: Timeout (in days) of each external federation target being unreachable prior to pausing federating to it.
* `allow_relay`: Enable Pleroma’s Relay, which makes it possible to follow a whole instance.
* `public`: Makes the client API in authenticated mode-only except for user-profiles. Useful for disabling the Local Timeline and The Whole Known Network. Note that there is a dependent setting restricting or allowing unauthenticated access to specific resources, see `restrict_unauthenticated` for more details.
* `quarantined_instances`: List of ActivityPub instances where private (DMs, followers-only) activities will not be send.
* `allowed_post_formats`: MIME-type list of formats allowed to be posted (transformed into HTML).
* `extended_nickname_format`: Set to `true` to use extended local nicknames format (allows underscores/dashes). This will break federation with
older software for theses nicknames.
* `max_pinned_statuses`: The maximum number of pinned statuses. `0` will disable the feature.
* `autofollowed_nicknames`: Set to nicknames of (local) users that every new user should automatically follow.
* `autofollowing_nicknames`: Set to nicknames of (local) users that automatically follows every newly registered user.
* `attachment_links`: Set to true to enable automatically adding attachment link text to statuses.
* `max_report_comment_size`: The maximum size of the report comment (Default: `1000`).
* `safe_dm_mentions`: If set to true, only mentions at the beginning of a post will be used to address people in direct messages. This is to prevent accidental mentioning of people when talking about them (e.g. "@friend hey i really don't like @enemy"). Default: `false`.
* `healthcheck`: If set to true, system data will be shown on ``/api/pleroma/healthcheck``.
* `remote_post_retention_days`: The default amount of days to retain remote posts when pruning the database.
* `user_bio_length`: A user bio maximum length (default: `5000`).
* `user_name_length`: A user name maximum length (default: `100`).
* `skip_thread_containment`: Skip filter out broken threads. The default is `false`.
* `limit_to_local_content`: Limit unauthenticated users to search for local statutes and users only. Possible values: `:unauthenticated`, `:all` and `false`. The default is `:unauthenticated`.
* `max_account_fields`: The maximum number of custom fields in the user profile (default: `10`).
* `max_remote_account_fields`: The maximum number of custom fields in the remote user profile (default: `20`).
* `account_field_name_length`: An account field name maximum length (default: `512`).
* `account_field_value_length`: An account field value maximum length (default: `2048`).
* `registration_reason_length`: Maximum registration reason length (default: `500`).
* `external_user_synchronization`: Enabling following/followers counters synchronization for external users.
* `cleanup_attachments`: Remove attachments along with statuses. Does not affect duplicate files and attachments without status. Enabling this will increase load to database when deleting statuses on larger instances.
* `show_reactions`: Let favourites and emoji reactions be viewed through the API (default: `true`).
## Welcome
* `direct_message`: - welcome message sent as a direct message.
* `enabled`: Enables the send a direct message to a newly registered user. Defaults to `false`.
* `sender_nickname`: The nickname of the local user that sends the welcome message.
* `message`: A message that will be send to a newly registered users as a direct message.
* `chat_message`: - welcome message sent as a chat message.
* `enabled`: Enables the send a chat message to a newly registered user. Defaults to `false`.
* `sender_nickname`: The nickname of the local user that sends the welcome message.
* `message`: A message that will be send to a newly registered users as a chat message.
* `email`: - welcome message sent as a email.
* `enabled`: Enables the send a welcome email to a newly registered user. Defaults to `false`.
* `sender`: The email address or tuple with `{nickname, email}` that will use as sender to the welcome email.
* `subject`: A subject of welcome email.
* `html`: A html that will be send to a newly registered users as a email.
* `text`: A text that will be send to a newly registered users as a email.
Example:
```elixir
config :pleroma, :welcome,
direct_message: [
enabled: true,
sender_nickname: "lain",
message: "Hi! Welcome on board!"
],
email: [
enabled: true,
sender: {"Pleroma App", "welcome@pleroma.app"},
subject: "Welcome to <%= instance_name %>",
html: "Welcome to <%= instance_name %>",
text: "Welcome to <%= instance_name %>"
]
```
## Message rewrite facility
### :mrf
* `policies`: Message Rewrite Policy, either one or a list. Here are the ones available by default:
* `Pleroma.Web.ActivityPub.MRF.NoOpPolicy`: Doesn’t modify activities (default).
* `Pleroma.Web.ActivityPub.MRF.DropPolicy`: Drops all activities. It generally doesn’t makes sense to use in production.
* `Pleroma.Web.ActivityPub.MRF.SimplePolicy`: Restrict the visibility of activities from certains instances (See [`:mrf_simple`](#mrf_simple)).
* `Pleroma.Web.ActivityPub.MRF.TagPolicy`: Applies policies to individual users based on tags, which can be set using pleroma-fe/admin-fe/any other app that supports Pleroma Admin API. For example it allows marking posts from individual users nsfw (sensitive).
* `Pleroma.Web.ActivityPub.MRF.SubchainPolicy`: Selectively runs other MRF policies when messages match (See [`:mrf_subchain`](#mrf_subchain)).
* `Pleroma.Web.ActivityPub.MRF.RejectNonPublic`: Drops posts with non-public visibility settings (See [`:mrf_rejectnonpublic`](#mrf_rejectnonpublic)).
* `Pleroma.Web.ActivityPub.MRF.EnsureRePrepended`: Rewrites posts to ensure that replies to posts with subjects do not have an identical subject and instead begin with re:.
* `Pleroma.Web.ActivityPub.MRF.AntiLinkSpamPolicy`: Rejects posts from likely spambots by rejecting posts from new users that contain links.
* `Pleroma.Web.ActivityPub.MRF.MediaProxyWarmingPolicy`: Crawls attachments using their MediaProxy URLs so that the MediaProxy cache is primed.
* `Pleroma.Web.ActivityPub.MRF.MentionPolicy`: Drops posts mentioning configurable users. (See [`:mrf_mention`](#mrf_mention)).
* `Pleroma.Web.ActivityPub.MRF.VocabularyPolicy`: Restricts activities to a configured set of vocabulary. (See [`:mrf_vocabulary`](#mrf_vocabulary)).
* `Pleroma.Web.ActivityPub.MRF.ObjectAgePolicy`: Rejects or delists posts based on their age when received. (See [`:mrf_object_age`](#mrf_object_age)).
* `Pleroma.Web.ActivityPub.MRF.ActivityExpirationPolicy`: Sets a default expiration on all posts made by users of the local instance. Requires `Pleroma.Workers.PurgeExpiredActivity` to be enabled for processing the scheduled delections.
* `Pleroma.Web.ActivityPub.MRF.ForceBotUnlistedPolicy`: Makes all bot posts to disappear from public timelines.
* `transparency`: Make the content of your Message Rewrite Facility settings public (via nodeinfo).
* `transparency_exclusions`: Exclude specific instance names from MRF transparency. The use of the exclusions feature will be disclosed in nodeinfo as a boolean value.
## Federation
### MRF policies
!!! note
Configuring MRF policies is not enough for them to take effect. You have to enable them by specifying their module in `policies` under [:mrf](#mrf) section.
#### :mrf_simple
* `media_removal`: List of instances to remove media from.
* `media_nsfw`: List of instances to put media as NSFW(sensitive) from.
* `federated_timeline_removal`: List of instances to remove from Federated (aka The Whole Known Network) Timeline.
* `reject`: List of instances to reject any activities from.
* `accept`: List of instances to accept any activities from.
* `followers_only`: List of instances to decrease post visibility to only the followers, including for DM mentions.
* `report_removal`: List of instances to reject reports from.
* `avatar_removal`: List of instances to strip avatars from.
* `banner_removal`: List of instances to strip banners from.
#### :mrf_subchain
This policy processes messages through an alternate pipeline when a given message matches certain criteria.
All criteria are configured as a map of regular expressions to lists of policy modules.
* `match_actor`: Matches a series of regular expressions against the actor field.
Example:
```elixir
config :pleroma, :mrf_subchain,
match_actor: %{
~r/https:\/\/example.com/s => [Pleroma.Web.ActivityPub.MRF.DropPolicy]
}
```
#### :mrf_rejectnonpublic
* `allow_followersonly`: whether to allow followers-only posts.
* `allow_direct`: whether to allow direct messages.
#### :mrf_hellthread
* `delist_threshold`: Number of mentioned users after which the message gets delisted (the message can still be seen, but it will not show up in public timelines and mentioned users won't get notifications about it). Set to 0 to disable.
* `reject_threshold`: Number of mentioned users after which the messaged gets rejected. Set to 0 to disable.
#### :mrf_keyword
* `reject`: A list of patterns which result in message being rejected, each pattern can be a string or a [regular expression](https://hexdocs.pm/elixir/Regex.html).
* `federated_timeline_removal`: A list of patterns which result in message being removed from federated timelines (a.k.a unlisted), each pattern can be a string or a [regular expression](https://hexdocs.pm/elixir/Regex.html).
* `replace`: A list of tuples containing `{pattern, replacement}`, `pattern` can be a string or a [regular expression](https://hexdocs.pm/elixir/Regex.html).
#### :mrf_mention
* `actors`: A list of actors, for which to drop any posts mentioning.
#### :mrf_vocabulary
* `accept`: A list of ActivityStreams terms to accept. If empty, all supported messages are accepted.
* `reject`: A list of ActivityStreams terms to reject. If empty, no messages are rejected.
#### :mrf_user_allowlist
The keys in this section are the domain names that the policy should apply to.
Each key should be assigned a list of users that should be allowed through by
their ActivityPub ID.
An example:
```elixir
config :pleroma, :mrf_user_allowlist, %{
"example.org" => ["https://example.org/users/admin"]
}
```
#### :mrf_object_age
* `threshold`: Required time offset (in seconds) compared to your server clock of an incoming post before actions are taken.
e.g., A value of 900 results in any post with a timestamp older than 15 minutes will be acted upon.
* `actions`: A list of actions to apply to the post:
* `:delist` removes the post from public timelines
* `:strip_followers` removes followers from the ActivityPub recipient list, ensuring they won't be delivered to home timelines
* `:reject` rejects the message entirely
#### :mrf_steal_emoji
* `hosts`: List of hosts to steal emojis from
* `rejected_shortcodes`: Regex-list of shortcodes to reject
* `size_limit`: File size limit (in bytes), checked before an emoji is saved to the disk
#### :mrf_activity_expiration
* `days`: Default global expiration time for all local Create activities (in days)
### :activitypub
* `unfollow_blocked`: Whether blocks result in people getting unfollowed
* `outgoing_blocks`: Whether to federate blocks to other instances
* `deny_follow_blocked`: Whether to disallow following an account that has blocked the user in question
* `sign_object_fetches`: Sign object fetches with HTTP signatures
* `authorized_fetch_mode`: Require HTTP signatures for AP fetches
## Pleroma.User
* `restricted_nicknames`: List of nicknames users may not register with.
* `email_blacklist`: List of email domains users may not register with.
## Pleroma.ScheduledActivity
* `daily_user_limit`: the number of scheduled activities a user is allowed to create in a single day (Default: `25`)
* `total_user_limit`: the number of scheduled activities a user is allowed to create in total (Default: `300`)
* `enabled`: whether scheduled activities are sent to the job queue to be executed
-## FedSockets
-FedSockets is an experimental feature allowing for Pleroma backends to federate using a persistant websocket connection as opposed to making each federation a seperate http connection. This feature is currently off by default. It is configurable throught he following options.
-
-### :fedsockets
-* `enabled`: Enables FedSockets for this instance. `false` by default.
-* `connection_duration`: Time an idle websocket is kept open.
-* `rejection_duration`: Failures to connect via FedSockets will not be retried for this period of time.
-* `fed_socket_fetches` and `fed_socket_rejections`: Settings passed to `cachex` for the fetch registry, and rejection stacks. See `Pleroma.Web.FedSockets` for more details.
-
-
-## Frontends
-
### :frontend_configurations
This can be used to configure a keyword list that keeps the configuration data for any kind of frontend. By default, settings for `pleroma_fe` and `masto_fe` are configured. You can find the documentation for `pleroma_fe` configuration into [Pleroma-FE configuration and customization for instance administrators](/frontend/CONFIGURATION/#options).
Frontends can access these settings at `/api/pleroma/frontend_configurations`
To add your own configuration for PleromaFE, use it like this:
```elixir
config :pleroma, :frontend_configurations,
pleroma_fe: %{
theme: "pleroma-dark",
# ... see /priv/static/static/config.json for the available keys.
},
masto_fe: %{
showInstanceSpecificPanel: true
}
```
These settings **need to be complete**, they will override the defaults.
### :static_fe
Render profiles and posts using server-generated HTML that is viewable without using JavaScript.
Available options:
* `enabled` - Enables the rendering of static HTML. Defaults to `false`.
### :assets
This section configures assets to be used with various frontends. Currently the only option
relates to mascots on the mastodon frontend
* `mascots`: KeywordList of mascots, each element __MUST__ contain both a `url` and a
`mime_type` key.
* `default_mascot`: An element from `mascots` - This will be used as the default mascot
on MastoFE (default: `:pleroma_fox_tan`).
### :manifest
This section describe PWA manifest instance-specific values. Currently this option relate only for MastoFE.
* `icons`: Describe the icons of the app, this a list of maps describing icons in the same way as the
[spec](https://www.w3.org/TR/appmanifest/#imageresource-and-its-members) describes it.
Example:
```elixir
config :pleroma, :manifest,
icons: [
%{
src: "/static/logo.png"
},
%{
src: "/static/icon.png",
type: "image/png"
},
%{
src: "/static/icon.ico",
sizes: "72x72 96x96 128x128 256x256"
}
]
```
* `theme_color`: Describe the theme color of the app. (Example: `"#282c37"`, `"rebeccapurple"`).
* `background_color`: Describe the background color of the app. (Example: `"#191b22"`, `"aliceblue"`).
## :emoji
* `shortcode_globs`: Location of custom emoji files. `*` can be used as a wildcard. Example `["/emoji/custom/**/*.png"]`
* `pack_extensions`: A list of file extensions for emojis, when no emoji.txt for a pack is present. Example `[".png", ".gif"]`
* `groups`: Emojis are ordered in groups (tags). This is an array of key-value pairs where the key is the groupname and the value the location or array of locations. `*` can be used as a wildcard. Example `[Custom: ["/emoji/*.png", "/emoji/custom/*.png"]]`
* `default_manifest`: Location of the JSON-manifest. This manifest contains information about the emoji-packs you can download. Currently only one manifest can be added (no arrays).
* `shared_pack_cache_seconds_per_file`: When an emoji pack is shared, the archive is created and cached in
memory for this amount of seconds multiplied by the number of files.
## :media_proxy
* `enabled`: Enables proxying of remote media to the instance’s proxy
* `base_url`: The base URL to access a user-uploaded file. Useful when you want to proxy the media files via another host/CDN fronts.
* `proxy_opts`: All options defined in `Pleroma.ReverseProxy` documentation, defaults to `[max_body_length: (25*1_048_576)]`.
* `whitelist`: List of hosts with scheme to bypass the mediaproxy (e.g. `https://example.com`)
* `invalidation`: options for remove media from cache after delete object:
* `enabled`: Enables purge cache
* `provider`: Which one of the [purge cache strategy](#purge-cache-strategy) to use.
## :media_preview_proxy
* `enabled`: Enables proxying of remote media preview to the instance’s proxy. Requires enabled media proxy (`media_proxy/enabled`).
* `thumbnail_max_width`: Max width of preview thumbnail for images (video preview always has original dimensions).
* `thumbnail_max_height`: Max height of preview thumbnail for images (video preview always has original dimensions).
* `image_quality`: Quality of the output. Ranges from 0 (min quality) to 100 (max quality).
* `min_content_length`: Min content length to perform preview, in bytes. If greater than 0, media smaller in size will be served as is, without thumbnailing.
### Purge cache strategy
#### Pleroma.Web.MediaProxy.Invalidation.Script
This strategy allow perform external shell script to purge cache.
Urls of attachments pass to script as arguments.
* `script_path`: path to external script.
Example:
```elixir
config :pleroma, Pleroma.Web.MediaProxy.Invalidation.Script,
script_path: "./installation/nginx-cache-purge.example"
```
#### Pleroma.Web.MediaProxy.Invalidation.Http
This strategy allow perform custom http request to purge cache.
* `method`: http method. default is `purge`
* `headers`: http headers.
* `options`: request options.
Example:
```elixir
config :pleroma, Pleroma.Web.MediaProxy.Invalidation.Http,
method: :purge,
headers: [],
options: []
```
## Link previews
### Pleroma.Web.Metadata (provider)
* `providers`: a list of metadata providers to enable. Providers available:
* `Pleroma.Web.Metadata.Providers.OpenGraph`
* `Pleroma.Web.Metadata.Providers.TwitterCard`
* `unfurl_nsfw`: If set to `true` nsfw attachments will be shown in previews.
### :rich_media (consumer)
* `enabled`: if enabled the instance will parse metadata from attached links to generate link previews.
* `ignore_hosts`: list of hosts which will be ignored by the metadata parser. For example `["accounts.google.com", "xss.website"]`, defaults to `[]`.
* `ignore_tld`: list TLDs (top-level domains) which will ignore for parse metadata. default is ["local", "localdomain", "lan"].
* `parsers`: list of Rich Media parsers.
* `failure_backoff`: Amount of milliseconds after request failure, during which the request will not be retried.
## HTTP server
### Pleroma.Web.Endpoint
!!! note
`Phoenix` endpoint configuration, all configuration options can be viewed [here](https://hexdocs.pm/phoenix/Phoenix.Endpoint.html#module-dynamic-configuration), only common options are listed here.
* `http` - a list containing http protocol configuration, all configuration options can be viewed [here](https://hexdocs.pm/plug_cowboy/Plug.Cowboy.html#module-options), only common options are listed here. For deployment using docker, you need to set this to `[ip: {0,0,0,0}, port: 4000]` to make pleroma accessible from other containers (such as your nginx server).
- `ip` - a tuple consisting of 4 integers
- `port`
* `url` - a list containing the configuration for generating urls, accepts
- `host` - the host without the scheme and a post (e.g `example.com`, not `https://example.com:2020`)
- `scheme` - e.g `http`, `https`
- `port`
- `path`
* `extra_cookie_attrs` - a list of `Key=Value` strings to be added as non-standard cookie attributes. Defaults to `["SameSite=Lax"]`. See the [SameSite article](https://www.owasp.org/index.php/SameSite) on OWASP for more info.
Example:
```elixir
config :pleroma, Pleroma.Web.Endpoint,
url: [host: "example.com", port: 2020, scheme: "https"],
http: [
port: 8080,
ip: {127, 0, 0, 1}
]
```
This will make Pleroma listen on `127.0.0.1` port `8080` and generate urls starting with `https://example.com:2020`
### :http_security
* ``enabled``: Whether the managed content security policy is enabled.
* ``sts``: Whether to additionally send a `Strict-Transport-Security` header.
* ``sts_max_age``: The maximum age for the `Strict-Transport-Security` header if sent.
* ``ct_max_age``: The maximum age for the `Expect-CT` header if sent.
* ``referrer_policy``: The referrer policy to use, either `"same-origin"` or `"no-referrer"`.
* ``report_uri``: Adds the specified url to `report-uri` and `report-to` group in CSP header.
### Pleroma.Web.Plugs.RemoteIp
!!! warning
If your instance is not behind at least one reverse proxy, you should not enable this plug.
`Pleroma.Web.Plugs.RemoteIp` is a shim to call [`RemoteIp`](https://git.pleroma.social/pleroma/remote_ip) but with runtime configuration.
Available options:
* `enabled` - Enable/disable the plug. Defaults to `false`.
* `headers` - A list of strings naming the HTTP headers to use when deriving the true client IP address. Defaults to `["x-forwarded-for"]`.
* `proxies` - A list of upstream proxy IP subnets in CIDR notation from which we will parse the content of `headers`. Defaults to `[]`. IPv4 entries without a bitmask will be assumed to be /32 and IPv6 /128.
* `reserved` - A list of reserved IP subnets in CIDR notation which should be ignored if found in `headers`. Defaults to `["127.0.0.0/8", "::1/128", "fc00::/7", "10.0.0.0/8", "172.16.0.0/12", "192.168.0.0/16"]`.
### :rate_limit
!!! note
If your instance is behind a reverse proxy ensure [`Pleroma.Web.Plugs.RemoteIp`](#pleroma-plugs-remoteip) is enabled (it is enabled by default).
A keyword list of rate limiters where a key is a limiter name and value is the limiter configuration. The basic configuration is a tuple where:
* The first element: `scale` (Integer). The time scale in milliseconds.
* The second element: `limit` (Integer). How many requests to limit in the time scale provided.
It is also possible to have different limits for unauthenticated and authenticated users: the keyword value must be a list of two tuples where the first one is a config for unauthenticated users and the second one is for authenticated.
For example:
```elixir
config :pleroma, :rate_limit,
authentication: {60_000, 15},
search: [{1000, 10}, {1000, 30}]
```
Means that:
1. In 60 seconds, 15 authentication attempts can be performed from the same IP address.
2. In 1 second, 10 search requests can be performed from the same IP adress by unauthenticated users, while authenticated users can perform 30 search requests per second.
Supported rate limiters:
* `:search` - Account/Status search.
* `:timeline` - Timeline requests (each timeline has it's own limiter).
* `:app_account_creation` - Account registration from the API.
* `:relations_actions` - Following/Unfollowing in general.
* `:relation_id_action` - Following/Unfollowing for a specific user.
* `:statuses_actions` - Status actions such as: (un)repeating, (un)favouriting, creating, deleting.
* `:status_id_action` - (un)Repeating/(un)Favouriting a particular status.
* `:authentication` - Authentication actions, i.e getting an OAuth token.
* `:password_reset` - Requesting password reset emails.
* `:account_confirmation_resend` - Requesting resending account confirmation emails.
* `:ap_routes` - Requesting statuses via ActivityPub.
### :web_cache_ttl
The expiration time for the web responses cache. Values should be in milliseconds or `nil` to disable expiration.
Available caches:
* `:activity_pub` - activity pub routes (except question activities). Defaults to `nil` (no expiration).
* `:activity_pub_question` - activity pub routes (question activities). Defaults to `30_000` (30 seconds).
## HTTP client
### :http
* `proxy_url`: an upstream proxy to fetch posts and/or media with, (default: `nil`)
* `send_user_agent`: should we include a user agent with HTTP requests? (default: `true`)
* `user_agent`: what user agent should we use? (default: `:default`), must be string or `:default`
* `adapter`: array of adapter options
### :hackney_pools
Advanced. Tweaks Hackney (http client) connections pools.
There's three pools used:
* `:federation` for the federation jobs.
You may want this pool max_connections to be at least equal to the number of federator jobs + retry queue jobs.
* `:media` for rich media, media proxy
* `:upload` for uploaded media (if using a remote uploader and `proxy_remote: true`)
For each pool, the options are:
* `max_connections` - how much connections a pool can hold
* `timeout` - retention duration for connections
### :connections_pool
*For `gun` adapter*
Settings for HTTP connection pool.
* `:connection_acquisition_wait` - Timeout to acquire a connection from pool.The total max time is this value multiplied by the number of retries.
* `connection_acquisition_retries` - Number of attempts to acquire the connection from the pool if it is overloaded. Each attempt is timed `:connection_acquisition_wait` apart.
* `:max_connections` - Maximum number of connections in the pool.
* `:connect_timeout` - Timeout to connect to the host.
* `:reclaim_multiplier` - Multiplied by `:max_connections` this will be the maximum number of idle connections that will be reclaimed in case the pool is overloaded.
### :pools
*For `gun` adapter*
Settings for request pools. These pools are limited on top of `:connections_pool`.
There are four pools used:
* `:federation` for the federation jobs. You may want this pool's max_connections to be at least equal to the number of federator jobs + retry queue jobs.
* `:media` - for rich media, media proxy.
* `:upload` - for proxying media when a remote uploader is used and `proxy_remote: true`.
* `:default` - for other requests.
For each pool, the options are:
* `:size` - limit to how much requests can be concurrently executed.
* `:recv_timeout` - timeout while `gun` will wait for response
* `:max_waiting` - limit to how much requests can be waiting for others to finish, after this is reached, subsequent requests will be dropped.
## Captcha
### Pleroma.Captcha
* `enabled`: Whether the captcha should be shown on registration.
* `method`: The method/service to use for captcha.
* `seconds_valid`: The time in seconds for which the captcha is valid.
### Captcha providers
#### Pleroma.Captcha.Native
A built-in captcha provider. Enabled by default.
#### Pleroma.Captcha.Kocaptcha
Kocaptcha is a very simple captcha service with a single API endpoint,
the source code is here: [kocaptcha](https://github.com/koto-bank/kocaptcha). The default endpoint
`https://captcha.kotobank.ch` is hosted by the developer.
* `endpoint`: the Kocaptcha endpoint to use.
## Uploads
### Pleroma.Upload
* `uploader`: Which one of the [uploaders](#uploaders) to use.
* `filters`: List of [upload filters](#upload-filters) to use.
* `link_name`: When enabled Pleroma will add a `name` parameter to the url of the upload, for example `https://instance.tld/media/corndog.png?name=corndog.png`. This is needed to provide the correct filename in Content-Disposition headers when using filters like `Pleroma.Upload.Filter.Dedupe`
* `base_url`: The base URL to access a user-uploaded file. Useful when you want to proxy the media files via another host.
* `proxy_remote`: If you're using a remote uploader, Pleroma will proxy media requests instead of redirecting to it.
* `proxy_opts`: Proxy options, see `Pleroma.ReverseProxy` documentation.
* `filename_display_max_length`: Set max length of a filename to display. 0 = no limit. Default: 30.
* `default_description`: Sets which default description an image has if none is set explicitly. Options: nil (default) - Don't set a default, :filename - use the filename of the file, a string (e.g. "attachment") - Use this string
!!! warning
`strip_exif` has been replaced by `Pleroma.Upload.Filter.Mogrify`.
### Uploaders
#### Pleroma.Uploaders.Local
* `uploads`: Which directory to store the user-uploads in, relative to pleroma’s working directory.
#### Pleroma.Uploaders.S3
Don't forget to configure [Ex AWS S3](#ex-aws-s3-settings)
* `bucket`: S3 bucket name.
* `bucket_namespace`: S3 bucket namespace.
* `public_endpoint`: S3 endpoint that the user finally accesses(ex. "https://s3.dualstack.ap-northeast-1.amazonaws.com")
* `truncated_namespace`: If you use S3 compatible service such as Digital Ocean Spaces or CDN, set folder name or "" etc.
For example, when using CDN to S3 virtual host format, set "".
At this time, write CNAME to CDN in public_endpoint.
* `streaming_enabled`: Enable streaming uploads, when enabled the file will be sent to the server in chunks as it's being read. This may be unsupported by some providers, try disabling this if you have upload problems.
#### Ex AWS S3 settings
* `access_key_id`: Access key ID
* `secret_access_key`: Secret access key
* `host`: S3 host
Example:
```elixir
config :ex_aws, :s3,
access_key_id: "xxxxxxxxxx",
secret_access_key: "yyyyyyyyyy",
host: "s3.eu-central-1.amazonaws.com"
```
### Upload filters
#### Pleroma.Upload.Filter.AnonymizeFilename
This filter replaces the filename (not the path) of an upload. For complete obfuscation, add
`Pleroma.Upload.Filter.Dedupe` before AnonymizeFilename.
* `text`: Text to replace filenames in links. If empty, `{random}.extension` will be used. You can get the original filename extension by using `{extension}`, for example `custom-file-name.{extension}`.
#### Pleroma.Upload.Filter.Dedupe
No specific configuration.
#### Pleroma.Upload.Filter.Exiftool
This filter only strips the GPS and location metadata with Exiftool leaving color profiles and attributes intact.
No specific configuration.
#### Pleroma.Upload.Filter.Mogrify
* `args`: List of actions for the `mogrify` command like `"strip"` or `["strip", "auto-orient", {"implode", "1"}]`.
## Email
### Pleroma.Emails.Mailer
* `adapter`: one of the mail adapters listed in [Swoosh readme](https://github.com/swoosh/swoosh#adapters), or `Swoosh.Adapters.Local` for in-memory mailbox.
* `api_key` / `password` and / or other adapter-specific settings, per the above documentation.
* `enabled`: Allows enable/disable send emails. Default: `false`.
An example for Sendgrid adapter:
```elixir
config :pleroma, Pleroma.Emails.Mailer,
enabled: true,
adapter: Swoosh.Adapters.Sendgrid,
api_key: "YOUR_API_KEY"
```
An example for SMTP adapter:
```elixir
config :pleroma, Pleroma.Emails.Mailer,
enabled: true,
adapter: Swoosh.Adapters.SMTP,
relay: "smtp.gmail.com",
username: "YOUR_USERNAME@gmail.com",
password: "YOUR_SMTP_PASSWORD",
port: 465,
ssl: true,
auth: :always
```
### :email_notifications
Email notifications settings.
- digest - emails of "what you've missed" for users who have been
inactive for a while.
- active: globally enable or disable digest emails
- schedule: When to send digest email, in [crontab format](https://en.wikipedia.org/wiki/Cron).
"0 0 * * 0" is the default, meaning "once a week at midnight on Sunday morning"
- interval: Minimum interval between digest emails to one user
- inactivity_threshold: Minimum user inactivity threshold
### Pleroma.Emails.UserEmail
- `:logo` - a path to a custom logo. Set it to `nil` to use the default Pleroma logo.
- `:styling` - a map with color settings for email templates.
### Pleroma.Emails.NewUsersDigestEmail
- `:enabled` - a boolean, enables new users admin digest email when `true`. Defaults to `false`.
## Background jobs
### Oban
[Oban](https://github.com/sorentwo/oban) asynchronous job processor configuration.
Configuration options described in [Oban readme](https://github.com/sorentwo/oban#usage):
* `repo` - app's Ecto repo (`Pleroma.Repo`)
* `log` - logs verbosity
* `queues` - job queues (see below)
* `crontab` - periodic jobs, see [`Oban.Cron`](#obancron)
Pleroma has the following queues:
* `activity_expiration` - Activity expiration
* `federator_outgoing` - Outgoing federation
* `federator_incoming` - Incoming federation
* `mailer` - Email sender, see [`Pleroma.Emails.Mailer`](#pleromaemailsmailer)
* `transmogrifier` - Transmogrifier
* `web_push` - Web push notifications
* `scheduled_activities` - Scheduled activities, see [`Pleroma.ScheduledActivity`](#pleromascheduledactivity)
#### Oban.Cron
Pleroma has these periodic job workers:
* `Pleroma.Workers.Cron.DigestEmailsWorker` - digest emails for users with new mentions and follows
* `Pleroma.Workers.Cron.NewUsersDigestWorker` - digest emails for admins with new registrations
```elixir
config :pleroma, Oban,
repo: Pleroma.Repo,
verbose: false,
prune: {:maxlen, 1500},
queues: [
federator_incoming: 50,
federator_outgoing: 50
],
crontab: [
{"0 0 * * 0", Pleroma.Workers.Cron.DigestEmailsWorker},
{"0 0 * * *", Pleroma.Workers.Cron.NewUsersDigestWorker}
]
```
This config contains two queues: `federator_incoming` and `federator_outgoing`. Both have the number of max concurrent jobs set to `50`.
#### Migrating `pleroma_job_queue` settings
`config :pleroma_job_queue, :queues` is replaced by `config :pleroma, Oban, :queues` and uses the same format (keys are queues' names, values are max concurrent jobs numbers).
### :workers
Includes custom worker options not interpretable directly by `Oban`.
* `retries` — keyword lists where keys are `Oban` queues (see above) and values are numbers of max attempts for failed jobs.
Example:
```elixir
config :pleroma, :workers,
retries: [
federator_incoming: 5,
federator_outgoing: 5
]
```
#### Migrating `Pleroma.Web.Federator.RetryQueue` settings
* `max_retries` is replaced with `config :pleroma, :workers, retries: [federator_outgoing: 5]`
* `enabled: false` corresponds to `config :pleroma, :workers, retries: [federator_outgoing: 1]`
* deprecated options: `max_jobs`, `initial_timeout`
## :web_push_encryption, :vapid_details
Web Push Notifications configuration. You can use the mix task `mix web_push.gen.keypair` to generate it.
* ``subject``: a mailto link for the administrative contact. It’s best if this email is not a personal email address, but rather a group email so that if a person leaves an organization, is unavailable for an extended period, or otherwise can’t respond, someone else on the list can.
* ``public_key``: VAPID public key
* ``private_key``: VAPID private key
## :logger
* `backends`: `:console` is used to send logs to stdout, `{ExSyslogger, :ex_syslogger}` to log to syslog, and `Quack.Logger` to log to Slack
An example to enable ONLY ExSyslogger (f/ex in ``prod.secret.exs``) with info and debug suppressed:
```elixir
config :logger,
backends: [{ExSyslogger, :ex_syslogger}]
config :logger, :ex_syslogger,
level: :warn
```
Another example, keeping console output and adding the pid to syslog output:
```elixir
config :logger,
backends: [:console, {ExSyslogger, :ex_syslogger}]
config :logger, :ex_syslogger,
level: :warn,
option: [:pid, :ndelay]
```
See: [logger’s documentation](https://hexdocs.pm/logger/Logger.html) and [ex_syslogger’s documentation](https://hexdocs.pm/ex_syslogger/)
An example of logging info to local syslog, but warn to a Slack channel:
```elixir
config :logger,
backends: [ {ExSyslogger, :ex_syslogger}, Quack.Logger ],
level: :info
config :logger, :ex_syslogger,
level: :info,
ident: "pleroma",
format: "$metadata[$level] $message"
config :quack,
level: :warn,
meta: [:all],
webhook_url: "https://hooks.slack.com/services/YOUR-API-KEY-HERE"
```
See the [Quack Github](https://github.com/azohra/quack) for more details
## Database options
### RUM indexing for full text search
!!! warning
It is recommended to use PostgreSQL v11 or newer. We have seen some minor issues with lower PostgreSQL versions.
* `rum_enabled`: If RUM indexes should be used. Defaults to `false`.
RUM indexes are an alternative indexing scheme that is not included in PostgreSQL by default. While they may eventually be mainlined, for now they have to be installed as a PostgreSQL extension from https://github.com/postgrespro/rum.
Their advantage over the standard GIN indexes is that they allow efficient ordering of search results by timestamp, which makes search queries a lot faster on larger servers, by one or two orders of magnitude. They take up around 3 times as much space as GIN indexes.
To enable them, both the `rum_enabled` flag has to be set and the following special migration has to be run:
`mix ecto.migrate --migrations-path priv/repo/optional_migrations/rum_indexing/`
This will probably take a long time.
## Alternative client protocols
### BBS / SSH access
To enable simple command line interface accessible over ssh, add a setting like this to your configuration file:
```exs
app_dir = File.cwd!
priv_dir = Path.join([app_dir, "priv/ssh_keys"])
config :esshd,
enabled: true,
priv_dir: priv_dir,
handler: "Pleroma.BBS.Handler",
port: 10_022,
password_authenticator: "Pleroma.BBS.Authenticator"
```
Feel free to adjust the priv_dir and port number. Then you will have to create the key for the keys (in the example `priv/ssh_keys`) and create the host keys with `ssh-keygen -m PEM -N "" -b 2048 -t rsa -f ssh_host_rsa_key`. After restarting, you should be able to connect to your Pleroma instance with `ssh username@server -p $PORT`
### :gopher
* `enabled`: Enables the gopher interface
* `ip`: IP address to bind to
* `port`: Port to bind to
* `dstport`: Port advertised in urls (optional, defaults to `port`)
## Authentication
### :admin_token
Allows to set a token that can be used to authenticate with the admin api without using an actual user by giving it as the `admin_token` parameter or `x-admin-token` HTTP header. Example:
```elixir
config :pleroma, :admin_token, "somerandomtoken"
```
You can then do
```shell
curl "http://localhost:4000/api/pleroma/admin/users/invites?admin_token=somerandomtoken"
```
or
```shell
curl -H "X-Admin-Token: somerandomtoken" "http://localhost:4000/api/pleroma/admin/users/invites"
```
Warning: it's discouraged to use this feature because of the associated security risk: static / rarely changed instance-wide token is much weaker compared to email-password pair of a real admin user; consider using HTTP Basic Auth or OAuth-based authentication instead.
### :auth
Authentication / authorization settings.
* `auth_template`: authentication form template. By default it's `show.html` which corresponds to `lib/pleroma/web/templates/o_auth/o_auth/show.html.eex`.
* `oauth_consumer_template`: OAuth consumer mode authentication form template. By default it's `consumer.html` which corresponds to `lib/pleroma/web/templates/o_auth/o_auth/consumer.html.eex`.
* `oauth_consumer_strategies`: the list of enabled OAuth consumer strategies; by default it's set by `OAUTH_CONSUMER_STRATEGIES` environment variable. Each entry in this space-delimited string should be of format `<strategy>` or `<strategy>:<dependency>` (e.g. `twitter` or `keycloak:ueberauth_keycloak_strategy` in case dependency is named differently than `ueberauth_<strategy>`).
### Pleroma.Web.Auth.Authenticator
* `Pleroma.Web.Auth.PleromaAuthenticator`: default database authenticator.
* `Pleroma.Web.Auth.LDAPAuthenticator`: LDAP authentication.
### :ldap
Use LDAP for user authentication. When a user logs in to the Pleroma
instance, the name and password will be verified by trying to authenticate
(bind) to an LDAP server. If a user exists in the LDAP directory but there
is no account with the same name yet on the Pleroma instance then a new
Pleroma account will be created with the same name as the LDAP user name.
* `enabled`: enables LDAP authentication
* `host`: LDAP server hostname
* `port`: LDAP port, e.g. 389 or 636
* `ssl`: true to use SSL, usually implies the port 636
* `sslopts`: additional SSL options
* `tls`: true to start TLS, usually implies the port 389
* `tlsopts`: additional TLS options
* `base`: LDAP base, e.g. "dc=example,dc=com"
* `uid`: LDAP attribute name to authenticate the user, e.g. when "cn", the filter will be "cn=username,base"
Note, if your LDAP server is an Active Directory server the correct value is commonly `uid: "cn"`, but if you use an
OpenLDAP server the value may be `uid: "uid"`.
### OAuth consumer mode
OAuth consumer mode allows sign in / sign up via external OAuth providers (e.g. Twitter, Facebook, Google, Microsoft, etc.).
Implementation is based on Ueberauth; see the list of [available strategies](https://github.com/ueberauth/ueberauth/wiki/List-of-Strategies).
!!! note
Each strategy is shipped as a separate dependency; in order to get the strategies, run `OAUTH_CONSUMER_STRATEGIES="..." mix deps.get`, e.g. `OAUTH_CONSUMER_STRATEGIES="twitter facebook google microsoft" mix deps.get`. The server should also be started with `OAUTH_CONSUMER_STRATEGIES="..." mix phx.server` in case you enable any strategies.
!!! note
Each strategy requires separate setup (on external provider side and Pleroma side). Below are the guidelines on setting up most popular strategies.
!!! note
Make sure that `"SameSite=Lax"` is set in `extra_cookie_attrs` when you have this feature enabled. OAuth consumer mode will not work with `"SameSite=Strict"`
* For Twitter, [register an app](https://developer.twitter.com/en/apps), configure callback URL to https://<your_host>/oauth/twitter/callback
* For Facebook, [register an app](https://developers.facebook.com/apps), configure callback URL to https://<your_host>/oauth/facebook/callback, enable Facebook Login service at https://developers.facebook.com/apps/<app_id>/fb-login/settings/
* For Google, [register an app](https://console.developers.google.com), configure callback URL to https://<your_host>/oauth/google/callback
* For Microsoft, [register an app](https://portal.azure.com), configure callback URL to https://<your_host>/oauth/microsoft/callback
Once the app is configured on external OAuth provider side, add app's credentials and strategy-specific settings (if any — e.g. see Microsoft below) to `config/prod.secret.exs`,
per strategy's documentation (e.g. [ueberauth_twitter](https://github.com/ueberauth/ueberauth_twitter)). Example config basing on environment variables:
```elixir
# Twitter
config :ueberauth, Ueberauth.Strategy.Twitter.OAuth,
consumer_key: System.get_env("TWITTER_CONSUMER_KEY"),
consumer_secret: System.get_env("TWITTER_CONSUMER_SECRET")
# Facebook
config :ueberauth, Ueberauth.Strategy.Facebook.OAuth,
client_id: System.get_env("FACEBOOK_APP_ID"),
client_secret: System.get_env("FACEBOOK_APP_SECRET"),
redirect_uri: System.get_env("FACEBOOK_REDIRECT_URI")
# Google
config :ueberauth, Ueberauth.Strategy.Google.OAuth,
client_id: System.get_env("GOOGLE_CLIENT_ID"),
client_secret: System.get_env("GOOGLE_CLIENT_SECRET"),
redirect_uri: System.get_env("GOOGLE_REDIRECT_URI")
# Microsoft
config :ueberauth, Ueberauth.Strategy.Microsoft.OAuth,
client_id: System.get_env("MICROSOFT_CLIENT_ID"),
client_secret: System.get_env("MICROSOFT_CLIENT_SECRET")
config :ueberauth, Ueberauth,
providers: [
microsoft: {Ueberauth.Strategy.Microsoft, [callback_params: []]}
]
# Keycloak
# Note: make sure to add `keycloak:ueberauth_keycloak_strategy` entry to `OAUTH_CONSUMER_STRATEGIES` environment variable
keycloak_url = "https://publicly-reachable-keycloak-instance.org:8080"
config :ueberauth, Ueberauth.Strategy.Keycloak.OAuth,
client_id: System.get_env("KEYCLOAK_CLIENT_ID"),
client_secret: System.get_env("KEYCLOAK_CLIENT_SECRET"),
site: keycloak_url,
authorize_url: "#{keycloak_url}/auth/realms/master/protocol/openid-connect/auth",
token_url: "#{keycloak_url}/auth/realms/master/protocol/openid-connect/token",
userinfo_url: "#{keycloak_url}/auth/realms/master/protocol/openid-connect/userinfo",
token_method: :post
config :ueberauth, Ueberauth,
providers: [
keycloak: {Ueberauth.Strategy.Keycloak, [uid_field: :email]}
]
```
### OAuth 2.0 provider - :oauth2
Configure OAuth 2 provider capabilities:
* `token_expires_in` - The lifetime in seconds of the access token.
* `issue_new_refresh_token` - Keeps old refresh token or generate new refresh token when to obtain an access token.
* `clean_expired_tokens` - Enable a background job to clean expired oauth tokens. Defaults to `false`.
## Link parsing
### :uri_schemes
* `valid_schemes`: List of the scheme part that is considered valid to be an URL.
### Pleroma.Formatter
Configuration for Pleroma's link formatter which parses mentions, hashtags, and URLs.
* `class` - specify the class to be added to the generated link (default: `false`)
* `rel` - specify the rel attribute (default: `ugc`)
* `new_window` - adds `target="_blank"` attribute (default: `false`)
* `truncate` - Set to a number to truncate URLs longer then the number. Truncated URLs will end in `...` (default: `false`)
* `strip_prefix` - Strip the scheme prefix (default: `false`)
* `extra` - link URLs with rarely used schemes (magnet, ipfs, irc, etc.) (default: `true`)
* `validate_tld` - Set to false to disable TLD validation for URLs/emails. Can be set to :no_scheme to validate TLDs only for urls without a scheme (e.g `example.com` will be validated, but `http://example.loki` won't) (default: `:no_scheme`)
Example:
```elixir
config :pleroma, Pleroma.Formatter,
class: false,
rel: "ugc",
new_window: false,
truncate: false,
strip_prefix: false,
extra: true,
validate_tld: :no_scheme
```
## Custom Runtime Modules (`:modules`)
* `runtime_dir`: A path to custom Elixir modules (such as MRF policies).
## :configurable_from_database
Boolean, enables/disables in-database configuration. Read [Transfering the config to/from the database](../administration/CLI_tasks/config.md) for more information.
## :database_config_whitelist
List of valid configuration sections which are allowed to be configured from the
database. Settings stored in the database before the whitelist is configured are
still applied, so it is suggested to only use the whitelist on instances that
have not migrated the config to the database.
Example:
```elixir
config :pleroma, :database_config_whitelist, [
{:pleroma, :instance},
{:pleroma, Pleroma.Web.Metadata},
{:auto_linker}
]
```
### Multi-factor authentication - :two_factor_authentication
* `totp` - a list containing TOTP configuration
- `digits` - Determines the length of a one-time pass-code in characters. Defaults to 6 characters.
- `period` - a period for which the TOTP code will be valid in seconds. Defaults to 30 seconds.
* `backup_codes` - a list containing backup codes configuration
- `number` - number of backup codes to generate.
- `length` - backup code length. Defaults to 16 characters.
## Restrict entities access for unauthenticated users
### :restrict_unauthenticated
Restrict access for unauthenticated users to timelines (public and federated), user profiles and statuses.
* `timelines`: public and federated timelines
* `local`: public timeline
* `federated`: federated timeline (includes public timeline)
* `profiles`: user profiles
* `local`
* `remote`
* `activities`: statuses
* `local`
* `remote`
Note: when `:instance, :public` is set to `false`, all `:restrict_unauthenticated` items be effectively set to `true` by default. If you'd like to allow unauthenticated access to specific API endpoints on a private instance, please explicitly set `:restrict_unauthenticated` to non-default value in `config/prod.secret.exs`.
Note: setting `restrict_unauthenticated/timelines/local` to `true` has no practical sense if `restrict_unauthenticated/timelines/federated` is set to `false` (since local public activities will still be delivered to unauthenticated users as part of federated timeline).
## Pleroma.Web.ApiSpec.CastAndValidate
* `:strict` a boolean, enables strict input validation (useful in development, not recommended in production). Defaults to `false`.
## :instances_favicons
Control favicons for instances.
* `enabled`: Allow/disallow displaying and getting instances favicons
## Pleroma.User.Backup
!!! note
Requires enabled email
* `:purge_after_days` an integer, remove backup achives after N days.
* `:limit_days` an integer, limit user to export not more often than once per N days.
* `:dir` a string with a path to backup temporary directory or `nil` to let Pleroma choose temporary directory in the following order:
1. the directory named by the TMPDIR environment variable
2. the directory named by the TEMP environment variable
3. the directory named by the TMP environment variable
4. C:\TMP on Windows or /tmp on Unix-like operating systems
5. as a last resort, the current working directory
## Frontend management
Frontends in Pleroma are swappable - you can specify which one to use here.
You can set a frontends for the key `primary` and `admin` and the options of `name` and `ref`. This will then make Pleroma serve the frontend from a folder constructed by concatenating the instance static path, `frontends` and the name and ref.
The key `primary` refers to the frontend that will be served by default for general requests. The key `admin` refers to the frontend that will be served at the `/pleroma/admin` path.
If you don't set anything here, the bundled frontends will be used.
Example:
```
config :pleroma, :frontends,
primary: %{
"name" => "pleroma",
"ref" => "stable"
},
admin: %{
"name" => "admin",
"ref" => "develop"
}
```
This would serve the frontend from the the folder at `$instance_static/frontends/pleroma/stable`. You have to copy the frontend into this folder yourself. You can choose the name and ref any way you like, but they will be used by mix tasks to automate installation in the future, the name referring to the project and the ref referring to a commit.
## Ephemeral activities (Pleroma.Workers.PurgeExpiredActivity)
Settings to enable and configure expiration for ephemeral activities
* `:enabled` - enables ephemeral activities creation
* `:min_lifetime` - minimum lifetime for ephemeral activities (in seconds). Default: 10 minutes.
diff --git a/installation/pleroma.nginx b/installation/pleroma.nginx
index d613befd2..9890cb2b1 100644
--- a/installation/pleroma.nginx
+++ b/installation/pleroma.nginx
@@ -1,101 +1,96 @@
# default nginx site config for Pleroma
#
# Simple installation instructions:
# 1. Install your TLS certificate, possibly using Let's Encrypt.
# 2. Replace 'example.tld' with your instance's domain wherever it appears.
# 3. Copy this file to /etc/nginx/sites-available/ and then add a symlink to it
# in /etc/nginx/sites-enabled/ and run 'nginx -s reload' or restart nginx.
proxy_cache_path /tmp/pleroma-media-cache levels=1:2 keys_zone=pleroma_media_cache:10m max_size=10g
inactive=720m use_temp_path=off;
# this is explicitly IPv4 since Pleroma.Web.Endpoint binds on IPv4 only
# and `localhost.` resolves to [::0] on some systems: see issue #930
upstream phoenix {
server 127.0.0.1:4000 max_fails=5 fail_timeout=60s;
}
server {
server_name example.tld;
listen 80;
listen [::]:80;
# Uncomment this if you need to use the 'webroot' method with certbot. Make sure
# that the directory exists and that it is accessible by the webserver. If you followed
# the guide, you already ran 'mkdir -p /var/lib/letsencrypt' to create the folder.
# You may need to load this file with the ssl server block commented out, run certbot
# to get the certificate, and then uncomment it.
#
# location ~ /\.well-known/acme-challenge {
# root /var/lib/letsencrypt/;
# }
location / {
return 301 https://$server_name$request_uri;
}
}
# Enable SSL session caching for improved performance
ssl_session_cache shared:ssl_session_cache:10m;
server {
server_name example.tld;
listen 443 ssl http2;
listen [::]:443 ssl http2;
ssl_session_timeout 1d;
ssl_session_cache shared:MozSSL:10m; # about 40000 sessions
ssl_session_tickets off;
ssl_trusted_certificate /etc/letsencrypt/live/example.tld/chain.pem;
ssl_certificate /etc/letsencrypt/live/example.tld/fullchain.pem;
ssl_certificate_key /etc/letsencrypt/live/example.tld/privkey.pem;
ssl_protocols TLSv1.2 TLSv1.3;
ssl_ciphers "ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-CHACHA20-POLY1305:ECDHE-RSA-CHACHA20-POLY1305:ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES256-SHA384:!aNULL:!eNULL:!EXPORT:!DES:!MD5:!PSK:!RC4";
ssl_prefer_server_ciphers off;
# In case of an old server with an OpenSSL version of 1.0.2 or below,
# leave only prime256v1 or comment out the following line.
ssl_ecdh_curve X25519:prime256v1:secp384r1:secp521r1;
ssl_stapling on;
ssl_stapling_verify on;
gzip_vary on;
gzip_proxied any;
gzip_comp_level 6;
gzip_buffers 16 8k;
gzip_http_version 1.1;
gzip_types text/plain text/css application/json application/javascript text/xml application/xml application/xml+rss text/javascript application/activity+json application/atom+xml;
# the nginx default is 1m, not enough for large media uploads
client_max_body_size 16m;
ignore_invalid_headers off;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_set_header Host $http_host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
location / {
proxy_pass http://phoenix;
}
location ~ ^/(media|proxy) {
proxy_cache pleroma_media_cache;
slice 1m;
proxy_cache_key $host$uri$is_args$args$slice_range;
proxy_set_header Range $slice_range;
proxy_cache_valid 200 206 301 304 1h;
proxy_cache_lock on;
proxy_ignore_client_abort on;
proxy_buffering on;
chunked_transfer_encoding on;
proxy_pass http://phoenix;
}
-
- location /api/fedsocket/v1 {
- proxy_request_buffering off;
- proxy_pass http://phoenix/api/fedsocket/v1;
- }
}
diff --git a/lib/pleroma/application.ex b/lib/pleroma/application.ex
index 7c4cd9626..8f08a6222 100644
--- a/lib/pleroma/application.ex
+++ b/lib/pleroma/application.ex
@@ -1,276 +1,275 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Application do
use Application
import Cachex.Spec
alias Pleroma.Config
require Logger
@name Mix.Project.config()[:name]
@version Mix.Project.config()[:version]
@repository Mix.Project.config()[:source_url]
@env Mix.env()
def name, do: @name
def version, do: @version
def named_version, do: @name <> " " <> @version
def repository, do: @repository
def user_agent do
if Process.whereis(Pleroma.Web.Endpoint) do
case Config.get([:http, :user_agent], :default) do
:default ->
info = "#{Pleroma.Web.base_url()} <#{Config.get([:instance, :email], "")}>"
named_version() <> "; " <> info
custom ->
custom
end
else
# fallback, if endpoint is not started yet
"Pleroma Data Loader"
end
end
# See http://elixir-lang.org/docs/stable/elixir/Application.html
# for more information on OTP Applications
def start(_type, _args) do
# Scrubbers are compiled at runtime and therefore will cause a conflict
# every time the application is restarted, so we disable module
# conflicts at runtime
Code.compiler_options(ignore_module_conflict: true)
# Disable warnings_as_errors at runtime, it breaks Phoenix live reload
# due to protocol consolidation warnings
Code.compiler_options(warnings_as_errors: false)
Pleroma.Telemetry.Logger.attach()
Config.Holder.save_default()
Pleroma.HTML.compile_scrubbers()
Pleroma.Config.Oban.warn()
Config.DeprecationWarnings.warn()
Pleroma.Web.Plugs.HTTPSecurityPlug.warn_if_disabled()
Pleroma.ApplicationRequirements.verify!()
setup_instrumenters()
load_custom_modules()
Pleroma.Docs.JSON.compile()
adapter = Application.get_env(:tesla, :adapter)
if adapter == Tesla.Adapter.Gun do
if version = Pleroma.OTPVersion.version() do
[major, minor] =
version
|> String.split(".")
|> Enum.map(&String.to_integer/1)
|> Enum.take(2)
if (major == 22 and minor < 2) or major < 22 do
raise "
!!!OTP VERSION WARNING!!!
You are using gun adapter with OTP version #{version}, which doesn't support correct handling of unordered certificates chains. Please update your Erlang/OTP to at least 22.2.
"
end
else
raise "
!!!OTP VERSION WARNING!!!
To support correct handling of unordered certificates chains - OTP version must be > 22.2.
"
end
end
# Define workers and child supervisors to be supervised
children =
[
Pleroma.Repo,
Config.TransferTask,
Pleroma.Emoji,
Pleroma.Web.Plugs.RateLimiter.Supervisor
] ++
cachex_children() ++
http_children(adapter, @env) ++
[
Pleroma.Stats,
Pleroma.JobQueueMonitor,
{Majic.Pool, [name: Pleroma.MajicPool, pool_size: Config.get([:majic_pool, :size], 2)]},
{Oban, Config.get(Oban)}
] ++
task_children(@env) ++
dont_run_in_test(@env) ++
chat_child(chat_enabled?()) ++
[
Pleroma.Web.Endpoint,
Pleroma.Gopher.Server
]
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: Pleroma.Supervisor]
Supervisor.start_link(children, opts)
end
def load_custom_modules do
dir = Config.get([:modules, :runtime_dir])
if dir && File.exists?(dir) do
dir
|> Pleroma.Utils.compile_dir()
|> case do
{:error, _errors, _warnings} ->
raise "Invalid custom modules"
{:ok, modules, _warnings} ->
if @env != :test do
Enum.each(modules, fn mod ->
Logger.info("Custom module loaded: #{inspect(mod)}")
end)
end
:ok
end
end
end
defp setup_instrumenters do
require Prometheus.Registry
if Application.get_env(:prometheus, Pleroma.Repo.Instrumenter) do
:ok =
:telemetry.attach(
"prometheus-ecto",
[:pleroma, :repo, :query],
&Pleroma.Repo.Instrumenter.handle_event/4,
%{}
)
Pleroma.Repo.Instrumenter.setup()
end
Pleroma.Web.Endpoint.MetricsExporter.setup()
Pleroma.Web.Endpoint.PipelineInstrumenter.setup()
# Note: disabled until prometheus-phx is integrated into prometheus-phoenix:
# Pleroma.Web.Endpoint.Instrumenter.setup()
PrometheusPhx.setup()
end
defp cachex_children do
[
build_cachex("used_captcha", ttl_interval: seconds_valid_interval()),
build_cachex("user", default_ttl: 25_000, ttl_interval: 1000, limit: 2500),
build_cachex("object", default_ttl: 25_000, ttl_interval: 1000, limit: 2500),
build_cachex("rich_media", default_ttl: :timer.minutes(120), limit: 5000),
build_cachex("scrubber", limit: 2500),
build_cachex("idempotency", expiration: idempotency_expiration(), limit: 2500),
build_cachex("web_resp", limit: 2500),
build_cachex("emoji_packs", expiration: emoji_packs_expiration(), limit: 10),
build_cachex("failed_proxy_url", limit: 2500),
build_cachex("banned_urls", default_ttl: :timer.hours(24 * 30), limit: 5_000),
build_cachex("chat_message_id_idempotency_key",
expiration: chat_message_id_idempotency_key_expiration(),
limit: 500_000
)
]
end
defp emoji_packs_expiration,
do: expiration(default: :timer.seconds(5 * 60), interval: :timer.seconds(60))
defp idempotency_expiration,
do: expiration(default: :timer.seconds(6 * 60 * 60), interval: :timer.seconds(60))
defp chat_message_id_idempotency_key_expiration,
do: expiration(default: :timer.minutes(2), interval: :timer.seconds(60))
defp seconds_valid_interval,
do: :timer.seconds(Config.get!([Pleroma.Captcha, :seconds_valid]))
@spec build_cachex(String.t(), keyword()) :: map()
def build_cachex(type, opts),
do: %{
id: String.to_atom("cachex_" <> type),
start: {Cachex, :start_link, [String.to_atom(type <> "_cache"), opts]},
type: :worker
}
defp chat_enabled?, do: Config.get([:chat, :enabled])
defp dont_run_in_test(env) when env in [:test, :benchmark], do: []
defp dont_run_in_test(_) do
[
{Registry,
[
name: Pleroma.Web.Streamer.registry(),
keys: :duplicate,
partitions: System.schedulers_online()
- ]},
- Pleroma.Web.FedSockets.Supervisor
+ ]}
]
end
defp chat_child(true) do
[
Pleroma.Web.ChatChannel.ChatChannelState,
{Phoenix.PubSub, [name: Pleroma.PubSub, adapter: Phoenix.PubSub.PG2]}
]
end
defp chat_child(_), do: []
defp task_children(:test) do
[
%{
id: :web_push_init,
start: {Task, :start_link, [&Pleroma.Web.Push.init/0]},
restart: :temporary
}
]
end
defp task_children(_) do
[
%{
id: :web_push_init,
start: {Task, :start_link, [&Pleroma.Web.Push.init/0]},
restart: :temporary
},
%{
id: :internal_fetch_init,
start: {Task, :start_link, [&Pleroma.Web.ActivityPub.InternalFetchActor.init/0]},
restart: :temporary
}
]
end
# start hackney and gun pools in tests
defp http_children(_, :test) do
http_children(Tesla.Adapter.Hackney, nil) ++ http_children(Tesla.Adapter.Gun, nil)
end
defp http_children(Tesla.Adapter.Hackney, _) do
pools = [:federation, :media]
pools =
if Config.get([Pleroma.Upload, :proxy_remote]) do
[:upload | pools]
else
pools
end
for pool <- pools do
options = Config.get([:hackney_pools, pool])
:hackney_pool.child_spec(pool, options)
end
end
defp http_children(Tesla.Adapter.Gun, _) do
Pleroma.Gun.ConnectionPool.children() ++
[{Task, &Pleroma.HTTP.AdapterHelper.Gun.limiter_setup/0}]
end
defp http_children(_, _), do: []
end
diff --git a/lib/pleroma/object/fetcher.ex b/lib/pleroma/object/fetcher.ex
index ae4301738..20d8f687d 100644
--- a/lib/pleroma/object/fetcher.ex
+++ b/lib/pleroma/object/fetcher.ex
@@ -1,267 +1,254 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Object.Fetcher do
alias Pleroma.HTTP
alias Pleroma.Object
alias Pleroma.Object.Containment
alias Pleroma.Repo
alias Pleroma.Signature
alias Pleroma.Web.ActivityPub.InternalFetchActor
alias Pleroma.Web.ActivityPub.ObjectValidator
alias Pleroma.Web.ActivityPub.Transmogrifier
alias Pleroma.Web.Federator
- alias Pleroma.Web.FedSockets
require Logger
require Pleroma.Constants
defp touch_changeset(changeset) do
updated_at =
NaiveDateTime.utc_now()
|> NaiveDateTime.truncate(:second)
Ecto.Changeset.put_change(changeset, :updated_at, updated_at)
end
defp maybe_reinject_internal_fields(%{data: %{} = old_data}, new_data) do
internal_fields = Map.take(old_data, Pleroma.Constants.object_internal_fields())
Map.merge(new_data, internal_fields)
end
defp maybe_reinject_internal_fields(_, new_data), do: new_data
@spec reinject_object(struct(), map()) :: {:ok, Object.t()} | {:error, any()}
defp reinject_object(%Object{data: %{"type" => "Question"}} = object, new_data) do
Logger.debug("Reinjecting object #{new_data["id"]}")
with data <- maybe_reinject_internal_fields(object, new_data),
{:ok, data, _} <- ObjectValidator.validate(data, %{}),
changeset <- Object.change(object, %{data: data}),
changeset <- touch_changeset(changeset),
{:ok, object} <- Repo.insert_or_update(changeset),
{:ok, object} <- Object.set_cache(object) do
{:ok, object}
else
e ->
Logger.error("Error while processing object: #{inspect(e)}")
{:error, e}
end
end
defp reinject_object(%Object{} = object, new_data) do
Logger.debug("Reinjecting object #{new_data["id"]}")
with new_data <- Transmogrifier.fix_object(new_data),
data <- maybe_reinject_internal_fields(object, new_data),
changeset <- Object.change(object, %{data: data}),
changeset <- touch_changeset(changeset),
{:ok, object} <- Repo.insert_or_update(changeset),
{:ok, object} <- Object.set_cache(object) do
{:ok, object}
else
e ->
Logger.error("Error while processing object: #{inspect(e)}")
{:error, e}
end
end
def refetch_object(%Object{data: %{"id" => id}} = object) do
with {:local, false} <- {:local, Object.local?(object)},
{:ok, new_data} <- fetch_and_contain_remote_object_from_id(id),
{:ok, object} <- reinject_object(object, new_data) do
{:ok, object}
else
{:local, true} -> {:ok, object}
e -> {:error, e}
end
end
# Note: will create a Create activity, which we need internally at the moment.
def fetch_object_from_id(id, options \\ []) do
with {_, nil} <- {:fetch_object, Object.get_cached_by_ap_id(id)},
{_, true} <- {:allowed_depth, Federator.allowed_thread_distance?(options[:depth])},
{_, {:ok, data}} <- {:fetch, fetch_and_contain_remote_object_from_id(id)},
{_, nil} <- {:normalize, Object.normalize(data, false)},
params <- prepare_activity_params(data),
{_, :ok} <- {:containment, Containment.contain_origin(id, params)},
{_, {:ok, activity}} <-
{:transmogrifier, Transmogrifier.handle_incoming(params, options)},
{_, _data, %Object{} = object} <-
{:object, data, Object.normalize(activity, false)} do
{:ok, object}
else
{:allowed_depth, false} ->
{:error, "Max thread distance exceeded."}
{:containment, _} ->
{:error, "Object containment failed."}
{:transmogrifier, {:error, {:reject, e}}} ->
{:reject, e}
{:transmogrifier, _} = e ->
{:error, e}
{:object, data, nil} ->
reinject_object(%Object{}, data)
{:normalize, object = %Object{}} ->
{:ok, object}
{:fetch_object, %Object{} = object} ->
{:ok, object}
{:fetch, {:error, error}} ->
{:error, error}
e ->
e
end
end
defp prepare_activity_params(data) do
%{
"type" => "Create",
"to" => data["to"] || [],
"cc" => data["cc"] || [],
# Should we seriously keep this attributedTo thing?
"actor" => data["actor"] || data["attributedTo"],
"object" => data
}
end
def fetch_object_from_id!(id, options \\ []) do
with {:ok, object} <- fetch_object_from_id(id, options) do
object
else
{:error, %Tesla.Mock.Error{}} ->
nil
{:error, "Object has been deleted"} ->
nil
{:reject, reason} ->
Logger.info("Rejected #{id} while fetching: #{inspect(reason)}")
nil
e ->
Logger.error("Error while fetching #{id}: #{inspect(e)}")
nil
end
end
defp make_signature(id, date) do
uri = URI.parse(id)
signature =
InternalFetchActor.get_actor()
|> Signature.sign(%{
"(request-target)": "get #{uri.path}",
host: uri.host,
date: date
})
{"signature", signature}
end
defp sign_fetch(headers, id, date) do
if Pleroma.Config.get([:activitypub, :sign_object_fetches]) do
[make_signature(id, date) | headers]
else
headers
end
end
defp maybe_date_fetch(headers, date) do
if Pleroma.Config.get([:activitypub, :sign_object_fetches]) do
[{"date", date} | headers]
else
headers
end
end
- def fetch_and_contain_remote_object_from_id(prm, opts \\ [])
+ def fetch_and_contain_remote_object_from_id(id)
- def fetch_and_contain_remote_object_from_id(%{"id" => id}, opts),
- do: fetch_and_contain_remote_object_from_id(id, opts)
+ def fetch_and_contain_remote_object_from_id(%{"id" => id}),
+ do: fetch_and_contain_remote_object_from_id(id)
- def fetch_and_contain_remote_object_from_id(id, opts) when is_binary(id) do
+ def fetch_and_contain_remote_object_from_id(id) when is_binary(id) do
Logger.debug("Fetching object #{id} via AP")
with {:scheme, true} <- {:scheme, String.starts_with?(id, "http")},
- {:ok, body} <- get_object(id, opts),
+ {:ok, body} <- get_object(id),
{:ok, data} <- safe_json_decode(body),
:ok <- Containment.contain_origin_from_id(id, data) do
{:ok, data}
else
{:scheme, _} ->
{:error, "Unsupported URI scheme"}
{:error, e} ->
{:error, e}
e ->
{:error, e}
end
end
- def fetch_and_contain_remote_object_from_id(_id, _opts),
+ def fetch_and_contain_remote_object_from_id(_id),
do: {:error, "id must be a string"}
- defp get_object(id, opts) do
- with false <- Keyword.get(opts, :force_http, false),
- {:ok, fedsocket} <- FedSockets.get_or_create_fed_socket(id) do
- Logger.debug("fetching via fedsocket - #{inspect(id)}")
- FedSockets.fetch(fedsocket, id)
- else
- _other ->
- Logger.debug("fetching via http - #{inspect(id)}")
- get_object_http(id)
- end
- end
-
- defp get_object_http(id) do
+ defp get_object(id) do
date = Pleroma.Signature.signed_date()
headers =
[{"accept", "application/activity+json"}]
|> maybe_date_fetch(date)
|> sign_fetch(id, date)
case HTTP.get(id, headers) do
{:ok, %{body: body, status: code, headers: headers}} when code in 200..299 ->
case List.keyfind(headers, "content-type", 0) do
{_, content_type} ->
case Plug.Conn.Utils.media_type(content_type) do
{:ok, "application", "activity+json", _} ->
{:ok, body}
{:ok, "application", "ld+json",
%{"profile" => "https://www.w3.org/ns/activitystreams"}} ->
{:ok, body}
_ ->
{:error, {:content_type, content_type}}
end
_ ->
{:error, {:content_type, nil}}
end
{:ok, %{status: code}} when code in [404, 410] ->
{:error, "Object has been deleted"}
{:error, e} ->
{:error, e}
e ->
{:error, e}
end
end
defp safe_json_decode(nil), do: {:ok, nil}
defp safe_json_decode(json), do: Jason.decode(json)
end
diff --git a/lib/pleroma/signature.ex b/lib/pleroma/signature.ex
index e388993b7..3aa6909d2 100644
--- a/lib/pleroma/signature.ex
+++ b/lib/pleroma/signature.ex
@@ -1,74 +1,74 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Signature do
@behaviour HTTPSignatures.Adapter
alias Pleroma.EctoType.ActivityPub.ObjectValidators
alias Pleroma.Keys
alias Pleroma.User
alias Pleroma.Web.ActivityPub.ActivityPub
def key_id_to_actor_id(key_id) do
uri =
URI.parse(key_id)
|> Map.put(:fragment, nil)
uri =
if not is_nil(uri.path) and String.ends_with?(uri.path, "/publickey") do
Map.put(uri, :path, String.replace(uri.path, "/publickey", ""))
else
uri
end
maybe_ap_id = URI.to_string(uri)
case ObjectValidators.ObjectID.cast(maybe_ap_id) do
{:ok, ap_id} ->
{:ok, ap_id}
_ ->
case Pleroma.Web.WebFinger.finger(maybe_ap_id) do
%{"ap_id" => ap_id} -> {:ok, ap_id}
_ -> {:error, maybe_ap_id}
end
end
end
def fetch_public_key(conn) do
with %{"keyId" => kid} <- HTTPSignatures.signature_for_conn(conn),
{:ok, actor_id} <- key_id_to_actor_id(kid),
- {:ok, public_key} <- User.get_public_key_for_ap_id(actor_id, force_http: true) do
+ {:ok, public_key} <- User.get_public_key_for_ap_id(actor_id) do
{:ok, public_key}
else
e ->
{:error, e}
end
end
def refetch_public_key(conn) do
with %{"keyId" => kid} <- HTTPSignatures.signature_for_conn(conn),
{:ok, actor_id} <- key_id_to_actor_id(kid),
- {:ok, _user} <- ActivityPub.make_user_from_ap_id(actor_id, force_http: true),
- {:ok, public_key} <- User.get_public_key_for_ap_id(actor_id, force_http: true) do
+ {:ok, _user} <- ActivityPub.make_user_from_ap_id(actor_id),
+ {:ok, public_key} <- User.get_public_key_for_ap_id(actor_id) do
{:ok, public_key}
else
e ->
{:error, e}
end
end
def sign(%User{} = user, headers) do
with {:ok, %{keys: keys}} <- User.ensure_keys_present(user),
{:ok, private_key, _} <- Keys.keys_from_pem(keys) do
HTTPSignatures.sign(private_key, user.ap_id <> "#main-key", headers)
end
end
def signed_date, do: signed_date(NaiveDateTime.utc_now())
def signed_date(%NaiveDateTime{} = date) do
Timex.format!(date, "{WDshort}, {0D} {Mshort} {YYYY} {h24}:{m}:{s} GMT")
end
end
diff --git a/lib/pleroma/user.ex b/lib/pleroma/user.ex
index 8e4ec8064..a240579f3 100644
--- a/lib/pleroma/user.ex
+++ b/lib/pleroma/user.ex
@@ -1,2386 +1,2386 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.User do
use Ecto.Schema
import Ecto.Changeset
import Ecto.Query
import Ecto, only: [assoc: 2]
alias Ecto.Multi
alias Pleroma.Activity
alias Pleroma.Config
alias Pleroma.Conversation.Participation
alias Pleroma.Delivery
alias Pleroma.EctoType.ActivityPub.ObjectValidators
alias Pleroma.Emoji
alias Pleroma.FollowingRelationship
alias Pleroma.Formatter
alias Pleroma.HTML
alias Pleroma.Keys
alias Pleroma.MFA
alias Pleroma.Notification
alias Pleroma.Object
alias Pleroma.Registration
alias Pleroma.Repo
alias Pleroma.User
alias Pleroma.UserRelationship
alias Pleroma.Web
alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.ActivityPub.Builder
alias Pleroma.Web.ActivityPub.Pipeline
alias Pleroma.Web.ActivityPub.Utils
alias Pleroma.Web.CommonAPI
alias Pleroma.Web.CommonAPI.Utils, as: CommonUtils
alias Pleroma.Web.OAuth
alias Pleroma.Web.RelMe
alias Pleroma.Workers.BackgroundWorker
require Logger
@type t :: %__MODULE__{}
@type account_status ::
:active
| :deactivated
| :password_reset_pending
| :confirmation_pending
| :approval_pending
@primary_key {:id, FlakeId.Ecto.CompatType, autogenerate: true}
# credo:disable-for-next-line Credo.Check.Readability.MaxLineLength
@email_regex ~r/^[a-zA-Z0-9.!#$%&'*+\/=?^_`{|}~-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$/
@strict_local_nickname_regex ~r/^[a-zA-Z\d]+$/
@extended_local_nickname_regex ~r/^[a-zA-Z\d_-]+$/
# AP ID user relationships (blocks, mutes etc.)
# Format: [rel_type: [outgoing_rel: :outgoing_rel_target, incoming_rel: :incoming_rel_source]]
@user_relationships_config [
block: [
blocker_blocks: :blocked_users,
blockee_blocks: :blocker_users
],
mute: [
muter_mutes: :muted_users,
mutee_mutes: :muter_users
],
reblog_mute: [
reblog_muter_mutes: :reblog_muted_users,
reblog_mutee_mutes: :reblog_muter_users
],
notification_mute: [
notification_muter_mutes: :notification_muted_users,
notification_mutee_mutes: :notification_muter_users
],
# Note: `inverse_subscription` relationship is inverse: subscriber acts as relationship target
inverse_subscription: [
subscribee_subscriptions: :subscriber_users,
subscriber_subscriptions: :subscribee_users
]
]
schema "users" do
field(:bio, :string, default: "")
field(:raw_bio, :string)
field(:email, :string)
field(:name, :string)
field(:nickname, :string)
field(:password_hash, :string)
field(:password, :string, virtual: true)
field(:password_confirmation, :string, virtual: true)
field(:keys, :string)
field(:public_key, :string)
field(:ap_id, :string)
field(:avatar, :map, default: %{})
field(:local, :boolean, default: true)
field(:follower_address, :string)
field(:following_address, :string)
field(:search_rank, :float, virtual: true)
field(:search_type, :integer, virtual: true)
field(:tags, {:array, :string}, default: [])
field(:last_refreshed_at, :naive_datetime_usec)
field(:last_digest_emailed_at, :naive_datetime)
field(:banner, :map, default: %{})
field(:background, :map, default: %{})
field(:note_count, :integer, default: 0)
field(:follower_count, :integer, default: 0)
field(:following_count, :integer, default: 0)
field(:is_locked, :boolean, default: false)
field(:confirmation_pending, :boolean, default: false)
field(:password_reset_pending, :boolean, default: false)
field(:approval_pending, :boolean, default: false)
field(:registration_reason, :string, default: nil)
field(:confirmation_token, :string, default: nil)
field(:default_scope, :string, default: "public")
field(:domain_blocks, {:array, :string}, default: [])
field(:deactivated, :boolean, default: false)
field(:no_rich_text, :boolean, default: false)
field(:ap_enabled, :boolean, default: false)
field(:is_moderator, :boolean, default: false)
field(:is_admin, :boolean, default: false)
field(:show_role, :boolean, default: true)
field(:mastofe_settings, :map, default: nil)
field(:uri, ObjectValidators.Uri, default: nil)
field(:hide_followers_count, :boolean, default: false)
field(:hide_follows_count, :boolean, default: false)
field(:hide_followers, :boolean, default: false)
field(:hide_follows, :boolean, default: false)
field(:hide_favorites, :boolean, default: true)
field(:pinned_activities, {:array, :string}, default: [])
field(:email_notifications, :map, default: %{"digest" => false})
field(:mascot, :map, default: nil)
field(:emoji, :map, default: %{})
field(:pleroma_settings_store, :map, default: %{})
field(:fields, {:array, :map}, default: [])
field(:raw_fields, {:array, :map}, default: [])
field(:is_discoverable, :boolean, default: false)
field(:invisible, :boolean, default: false)
field(:allow_following_move, :boolean, default: true)
field(:skip_thread_containment, :boolean, default: false)
field(:actor_type, :string, default: "Person")
field(:also_known_as, {:array, :string}, default: [])
field(:inbox, :string)
field(:shared_inbox, :string)
field(:accepts_chat_messages, :boolean, default: nil)
embeds_one(
:notification_settings,
Pleroma.User.NotificationSetting,
on_replace: :update
)
has_many(:notifications, Notification)
has_many(:registrations, Registration)
has_many(:deliveries, Delivery)
has_many(:outgoing_relationships, UserRelationship, foreign_key: :source_id)
has_many(:incoming_relationships, UserRelationship, foreign_key: :target_id)
for {relationship_type,
[
{outgoing_relation, outgoing_relation_target},
{incoming_relation, incoming_relation_source}
]} <- @user_relationships_config do
# Definitions of `has_many` relations: :blocker_blocks, :muter_mutes, :reblog_muter_mutes,
# :notification_muter_mutes, :subscribee_subscriptions
has_many(outgoing_relation, UserRelationship,
foreign_key: :source_id,
where: [relationship_type: relationship_type]
)
# Definitions of `has_many` relations: :blockee_blocks, :mutee_mutes, :reblog_mutee_mutes,
# :notification_mutee_mutes, :subscriber_subscriptions
has_many(incoming_relation, UserRelationship,
foreign_key: :target_id,
where: [relationship_type: relationship_type]
)
# Definitions of `has_many` relations: :blocked_users, :muted_users, :reblog_muted_users,
# :notification_muted_users, :subscriber_users
has_many(outgoing_relation_target, through: [outgoing_relation, :target])
# Definitions of `has_many` relations: :blocker_users, :muter_users, :reblog_muter_users,
# :notification_muter_users, :subscribee_users
has_many(incoming_relation_source, through: [incoming_relation, :source])
end
# `:blocks` is deprecated (replaced with `blocked_users` relation)
field(:blocks, {:array, :string}, default: [])
# `:mutes` is deprecated (replaced with `muted_users` relation)
field(:mutes, {:array, :string}, default: [])
# `:muted_reblogs` is deprecated (replaced with `reblog_muted_users` relation)
field(:muted_reblogs, {:array, :string}, default: [])
# `:muted_notifications` is deprecated (replaced with `notification_muted_users` relation)
field(:muted_notifications, {:array, :string}, default: [])
# `:subscribers` is deprecated (replaced with `subscriber_users` relation)
field(:subscribers, {:array, :string}, default: [])
embeds_one(
:multi_factor_authentication_settings,
MFA.Settings,
on_replace: :delete
)
timestamps()
end
for {_relationship_type, [{_outgoing_relation, outgoing_relation_target}, _]} <-
@user_relationships_config do
# `def blocked_users_relation/2`, `def muted_users_relation/2`,
# `def reblog_muted_users_relation/2`, `def notification_muted_users/2`,
# `def subscriber_users/2`
def unquote(:"#{outgoing_relation_target}_relation")(user, restrict_deactivated? \\ false) do
target_users_query = assoc(user, unquote(outgoing_relation_target))
if restrict_deactivated? do
restrict_deactivated(target_users_query)
else
target_users_query
end
end
# `def blocked_users/2`, `def muted_users/2`, `def reblog_muted_users/2`,
# `def notification_muted_users/2`, `def subscriber_users/2`
def unquote(outgoing_relation_target)(user, restrict_deactivated? \\ false) do
__MODULE__
|> apply(unquote(:"#{outgoing_relation_target}_relation"), [
user,
restrict_deactivated?
])
|> Repo.all()
end
# `def blocked_users_ap_ids/2`, `def muted_users_ap_ids/2`, `def reblog_muted_users_ap_ids/2`,
# `def notification_muted_users_ap_ids/2`, `def subscriber_users_ap_ids/2`
def unquote(:"#{outgoing_relation_target}_ap_ids")(user, restrict_deactivated? \\ false) do
__MODULE__
|> apply(unquote(:"#{outgoing_relation_target}_relation"), [
user,
restrict_deactivated?
])
|> select([u], u.ap_id)
|> Repo.all()
end
end
defdelegate following_count(user), to: FollowingRelationship
defdelegate following(user), to: FollowingRelationship
defdelegate following?(follower, followed), to: FollowingRelationship
defdelegate following_ap_ids(user), to: FollowingRelationship
defdelegate get_follow_requests(user), to: FollowingRelationship
defdelegate search(query, opts \\ []), to: User.Search
@doc """
Dumps Flake Id to SQL-compatible format (16-byte UUID).
E.g. "9pQtDGXuq4p3VlcJEm" -> <<0, 0, 1, 110, 179, 218, 42, 92, 213, 41, 44, 227, 95, 213, 0, 0>>
"""
def binary_id(source_id) when is_binary(source_id) do
with {:ok, dumped_id} <- FlakeId.Ecto.CompatType.dump(source_id) do
dumped_id
else
_ -> source_id
end
end
def binary_id(source_ids) when is_list(source_ids) do
Enum.map(source_ids, &binary_id/1)
end
def binary_id(%User{} = user), do: binary_id(user.id)
@doc "Returns status account"
@spec account_status(User.t()) :: account_status()
def account_status(%User{deactivated: true}), do: :deactivated
def account_status(%User{password_reset_pending: true}), do: :password_reset_pending
def account_status(%User{local: true, approval_pending: true}), do: :approval_pending
def account_status(%User{local: true, confirmation_pending: true}) do
if Config.get([:instance, :account_activation_required]) do
:confirmation_pending
else
:active
end
end
def account_status(%User{}), do: :active
@spec visible_for(User.t(), User.t() | nil) ::
:visible
| :invisible
| :restricted_unauthenticated
| :deactivated
| :confirmation_pending
def visible_for(user, for_user \\ nil)
def visible_for(%User{invisible: true}, _), do: :invisible
def visible_for(%User{id: user_id}, %User{id: user_id}), do: :visible
def visible_for(%User{} = user, nil) do
if restrict_unauthenticated?(user) do
:restrict_unauthenticated
else
visible_account_status(user)
end
end
def visible_for(%User{} = user, for_user) do
if superuser?(for_user) do
:visible
else
visible_account_status(user)
end
end
def visible_for(_, _), do: :invisible
defp restrict_unauthenticated?(%User{local: true}) do
Config.restrict_unauthenticated_access?(:profiles, :local)
end
defp restrict_unauthenticated?(%User{local: _}) do
Config.restrict_unauthenticated_access?(:profiles, :remote)
end
defp visible_account_status(user) do
status = account_status(user)
if status in [:active, :password_reset_pending] do
:visible
else
status
end
end
@spec superuser?(User.t()) :: boolean()
def superuser?(%User{local: true, is_admin: true}), do: true
def superuser?(%User{local: true, is_moderator: true}), do: true
def superuser?(_), do: false
@spec invisible?(User.t()) :: boolean()
def invisible?(%User{invisible: true}), do: true
def invisible?(_), do: false
def avatar_url(user, options \\ []) do
case user.avatar do
%{"url" => [%{"href" => href} | _]} ->
href
_ ->
unless options[:no_default] do
Config.get([:assets, :default_user_avatar], "#{Web.base_url()}/images/avi.png")
end
end
end
def banner_url(user, options \\ []) do
case user.banner do
%{"url" => [%{"href" => href} | _]} -> href
_ -> !options[:no_default] && "#{Web.base_url()}/images/banner.png"
end
end
# Should probably be renamed or removed
def ap_id(%User{nickname: nickname}), do: "#{Web.base_url()}/users/#{nickname}"
def ap_followers(%User{follower_address: fa}) when is_binary(fa), do: fa
def ap_followers(%User{} = user), do: "#{ap_id(user)}/followers"
@spec ap_following(User.t()) :: String.t()
def ap_following(%User{following_address: fa}) when is_binary(fa), do: fa
def ap_following(%User{} = user), do: "#{ap_id(user)}/following"
@spec restrict_deactivated(Ecto.Query.t()) :: Ecto.Query.t()
def restrict_deactivated(query) do
from(u in query, where: u.deactivated != ^true)
end
defp truncate_fields_param(params) do
if Map.has_key?(params, :fields) do
Map.put(params, :fields, Enum.map(params[:fields], &truncate_field/1))
else
params
end
end
defp truncate_if_exists(params, key, max_length) do
if Map.has_key?(params, key) and is_binary(params[key]) do
{value, _chopped} = String.split_at(params[key], max_length)
Map.put(params, key, value)
else
params
end
end
defp fix_follower_address(%{follower_address: _, following_address: _} = params), do: params
defp fix_follower_address(%{nickname: nickname} = params),
do: Map.put(params, :follower_address, ap_followers(%User{nickname: nickname}))
defp fix_follower_address(params), do: params
def remote_user_changeset(struct \\ %User{local: false}, params) do
bio_limit = Config.get([:instance, :user_bio_length], 5000)
name_limit = Config.get([:instance, :user_name_length], 100)
name =
case params[:name] do
name when is_binary(name) and byte_size(name) > 0 -> name
_ -> params[:nickname]
end
params =
params
|> Map.put(:name, name)
|> Map.put_new(:last_refreshed_at, NaiveDateTime.utc_now())
|> truncate_if_exists(:name, name_limit)
|> truncate_if_exists(:bio, bio_limit)
|> truncate_fields_param()
|> fix_follower_address()
struct
|> cast(
params,
[
:bio,
:emoji,
:ap_id,
:inbox,
:shared_inbox,
:nickname,
:public_key,
:avatar,
:ap_enabled,
:banner,
:is_locked,
:last_refreshed_at,
:uri,
:follower_address,
:following_address,
:hide_followers,
:hide_follows,
:hide_followers_count,
:hide_follows_count,
:follower_count,
:fields,
:following_count,
:is_discoverable,
:invisible,
:actor_type,
:also_known_as,
:accepts_chat_messages
]
)
|> cast(params, [:name], empty_values: [])
|> validate_required([:ap_id])
|> validate_required([:name], trim: false)
|> unique_constraint(:nickname)
|> validate_format(:nickname, @email_regex)
|> validate_length(:bio, max: bio_limit)
|> validate_length(:name, max: name_limit)
|> validate_fields(true)
end
def update_changeset(struct, params \\ %{}) do
bio_limit = Config.get([:instance, :user_bio_length], 5000)
name_limit = Config.get([:instance, :user_name_length], 100)
struct
|> cast(
params,
[
:bio,
:raw_bio,
:name,
:emoji,
:avatar,
:public_key,
:inbox,
:shared_inbox,
:is_locked,
:no_rich_text,
:default_scope,
:banner,
:hide_follows,
:hide_followers,
:hide_followers_count,
:hide_follows_count,
:hide_favorites,
:allow_following_move,
:background,
:show_role,
:skip_thread_containment,
:fields,
:raw_fields,
:pleroma_settings_store,
:is_discoverable,
:actor_type,
:also_known_as,
:accepts_chat_messages
]
)
|> unique_constraint(:nickname)
|> validate_format(:nickname, local_nickname_regex())
|> validate_length(:bio, max: bio_limit)
|> validate_length(:name, min: 1, max: name_limit)
|> validate_inclusion(:actor_type, ["Person", "Service"])
|> put_fields()
|> put_emoji()
|> put_change_if_present(:bio, &{:ok, parse_bio(&1, struct)})
|> put_change_if_present(:avatar, &put_upload(&1, :avatar))
|> put_change_if_present(:banner, &put_upload(&1, :banner))
|> put_change_if_present(:background, &put_upload(&1, :background))
|> put_change_if_present(
:pleroma_settings_store,
&{:ok, Map.merge(struct.pleroma_settings_store, &1)}
)
|> validate_fields(false)
end
defp put_fields(changeset) do
if raw_fields = get_change(changeset, :raw_fields) do
raw_fields =
raw_fields
|> Enum.filter(fn %{"name" => n} -> n != "" end)
fields =
raw_fields
|> Enum.map(fn f -> Map.update!(f, "value", &parse_fields(&1)) end)
changeset
|> put_change(:raw_fields, raw_fields)
|> put_change(:fields, fields)
else
changeset
end
end
defp parse_fields(value) do
value
|> Formatter.linkify(mentions_format: :full)
|> elem(0)
end
defp put_emoji(changeset) do
emojified_fields = [:bio, :name, :raw_fields]
if Enum.any?(changeset.changes, fn {k, _} -> k in emojified_fields end) do
bio = Emoji.Formatter.get_emoji_map(get_field(changeset, :bio))
name = Emoji.Formatter.get_emoji_map(get_field(changeset, :name))
emoji = Map.merge(bio, name)
emoji =
changeset
|> get_field(:raw_fields)
|> Enum.reduce(emoji, fn x, acc ->
Map.merge(acc, Emoji.Formatter.get_emoji_map(x["name"] <> x["value"]))
end)
put_change(changeset, :emoji, emoji)
else
changeset
end
end
defp put_change_if_present(changeset, map_field, value_function) do
with {:ok, value} <- fetch_change(changeset, map_field),
{:ok, new_value} <- value_function.(value) do
put_change(changeset, map_field, new_value)
else
_ -> changeset
end
end
defp put_upload(value, type) do
with %Plug.Upload{} <- value,
{:ok, object} <- ActivityPub.upload(value, type: type) do
{:ok, object.data}
end
end
def update_as_admin_changeset(struct, params) do
struct
|> update_changeset(params)
|> cast(params, [:email])
|> delete_change(:also_known_as)
|> unique_constraint(:email)
|> validate_format(:email, @email_regex)
|> validate_inclusion(:actor_type, ["Person", "Service"])
end
@spec update_as_admin(User.t(), map()) :: {:ok, User.t()} | {:error, Changeset.t()}
def update_as_admin(user, params) do
params = Map.put(params, "password_confirmation", params["password"])
changeset = update_as_admin_changeset(user, params)
if params["password"] do
reset_password(user, changeset, params)
else
User.update_and_set_cache(changeset)
end
end
def password_update_changeset(struct, params) do
struct
|> cast(params, [:password, :password_confirmation])
|> validate_required([:password, :password_confirmation])
|> validate_confirmation(:password)
|> put_password_hash()
|> put_change(:password_reset_pending, false)
end
@spec reset_password(User.t(), map()) :: {:ok, User.t()} | {:error, Changeset.t()}
def reset_password(%User{} = user, params) do
reset_password(user, user, params)
end
def reset_password(%User{id: user_id} = user, struct, params) do
multi =
Multi.new()
|> Multi.update(:user, password_update_changeset(struct, params))
|> Multi.delete_all(:tokens, OAuth.Token.Query.get_by_user(user_id))
|> Multi.delete_all(:auth, OAuth.Authorization.delete_by_user_query(user))
case Repo.transaction(multi) do
{:ok, %{user: user} = _} -> set_cache(user)
{:error, _, changeset, _} -> {:error, changeset}
end
end
def update_password_reset_pending(user, value) do
user
|> change()
|> put_change(:password_reset_pending, value)
|> update_and_set_cache()
end
def force_password_reset_async(user) do
BackgroundWorker.enqueue("force_password_reset", %{"user_id" => user.id})
end
@spec force_password_reset(User.t()) :: {:ok, User.t()} | {:error, Ecto.Changeset.t()}
def force_password_reset(user), do: update_password_reset_pending(user, true)
# Used to auto-register LDAP accounts which won't have a password hash stored locally
def register_changeset_ldap(struct, params = %{password: password})
when is_nil(password) do
params = Map.put_new(params, :accepts_chat_messages, true)
params =
if Map.has_key?(params, :email) do
Map.put_new(params, :email, params[:email])
else
params
end
struct
|> cast(params, [
:name,
:nickname,
:email,
:accepts_chat_messages
])
|> validate_required([:name, :nickname])
|> unique_constraint(:nickname)
|> validate_exclusion(:nickname, Config.get([User, :restricted_nicknames]))
|> validate_format(:nickname, local_nickname_regex())
|> put_ap_id()
|> unique_constraint(:ap_id)
|> put_following_and_follower_address()
end
def register_changeset(struct, params \\ %{}, opts \\ []) do
bio_limit = Config.get([:instance, :user_bio_length], 5000)
name_limit = Config.get([:instance, :user_name_length], 100)
reason_limit = Config.get([:instance, :registration_reason_length], 500)
params = Map.put_new(params, :accepts_chat_messages, true)
need_confirmation? =
if is_nil(opts[:need_confirmation]) do
Config.get([:instance, :account_activation_required])
else
opts[:need_confirmation]
end
need_approval? =
if is_nil(opts[:need_approval]) do
Config.get([:instance, :account_approval_required])
else
opts[:need_approval]
end
struct
|> confirmation_changeset(need_confirmation: need_confirmation?)
|> approval_changeset(need_approval: need_approval?)
|> cast(params, [
:bio,
:raw_bio,
:email,
:name,
:nickname,
:password,
:password_confirmation,
:emoji,
:accepts_chat_messages,
:registration_reason
])
|> validate_required([:name, :nickname, :password, :password_confirmation])
|> validate_confirmation(:password)
|> unique_constraint(:email)
|> validate_format(:email, @email_regex)
|> validate_change(:email, fn :email, email ->
valid? =
Config.get([User, :email_blacklist])
|> Enum.all?(fn blacklisted_domain ->
!String.ends_with?(email, ["@" <> blacklisted_domain, "." <> blacklisted_domain])
end)
if valid?, do: [], else: [email: "Invalid email"]
end)
|> unique_constraint(:nickname)
|> validate_exclusion(:nickname, Config.get([User, :restricted_nicknames]))
|> validate_format(:nickname, local_nickname_regex())
|> validate_length(:bio, max: bio_limit)
|> validate_length(:name, min: 1, max: name_limit)
|> validate_length(:registration_reason, max: reason_limit)
|> maybe_validate_required_email(opts[:external])
|> put_password_hash
|> put_ap_id()
|> unique_constraint(:ap_id)
|> put_following_and_follower_address()
end
def maybe_validate_required_email(changeset, true), do: changeset
def maybe_validate_required_email(changeset, _) do
if Config.get([:instance, :account_activation_required]) do
validate_required(changeset, [:email])
else
changeset
end
end
defp put_ap_id(changeset) do
ap_id = ap_id(%User{nickname: get_field(changeset, :nickname)})
put_change(changeset, :ap_id, ap_id)
end
defp put_following_and_follower_address(changeset) do
followers = ap_followers(%User{nickname: get_field(changeset, :nickname)})
changeset
|> put_change(:follower_address, followers)
end
defp autofollow_users(user) do
candidates = Config.get([:instance, :autofollowed_nicknames])
autofollowed_users =
User.Query.build(%{nickname: candidates, local: true, deactivated: false})
|> Repo.all()
follow_all(user, autofollowed_users)
end
defp autofollowing_users(user) do
candidates = Config.get([:instance, :autofollowing_nicknames])
User.Query.build(%{nickname: candidates, local: true, deactivated: false})
|> Repo.all()
|> Enum.each(&follow(&1, user, :follow_accept))
{:ok, :success}
end
@doc "Inserts provided changeset, performs post-registration actions (confirmation email sending etc.)"
def register(%Ecto.Changeset{} = changeset) do
with {:ok, user} <- Repo.insert(changeset) do
post_register_action(user)
end
end
def post_register_action(%User{} = user) do
with {:ok, user} <- autofollow_users(user),
{:ok, _} <- autofollowing_users(user),
{:ok, user} <- set_cache(user),
{:ok, _} <- send_welcome_email(user),
{:ok, _} <- send_welcome_message(user),
{:ok, _} <- send_welcome_chat_message(user),
{:ok, _} <- try_send_confirmation_email(user) do
{:ok, user}
end
end
def send_welcome_message(user) do
if User.WelcomeMessage.enabled?() do
User.WelcomeMessage.post_message(user)
{:ok, :enqueued}
else
{:ok, :noop}
end
end
def send_welcome_chat_message(user) do
if User.WelcomeChatMessage.enabled?() do
User.WelcomeChatMessage.post_message(user)
{:ok, :enqueued}
else
{:ok, :noop}
end
end
def send_welcome_email(%User{email: email} = user) when is_binary(email) do
if User.WelcomeEmail.enabled?() do
User.WelcomeEmail.send_email(user)
{:ok, :enqueued}
else
{:ok, :noop}
end
end
def send_welcome_email(_), do: {:ok, :noop}
@spec try_send_confirmation_email(User.t()) :: {:ok, :enqueued | :noop}
def try_send_confirmation_email(%User{confirmation_pending: true, email: email} = user)
when is_binary(email) do
if Config.get([:instance, :account_activation_required]) do
send_confirmation_email(user)
{:ok, :enqueued}
else
{:ok, :noop}
end
end
def try_send_confirmation_email(_), do: {:ok, :noop}
@spec send_confirmation_email(Uset.t()) :: User.t()
def send_confirmation_email(%User{} = user) do
user
|> Pleroma.Emails.UserEmail.account_confirmation_email()
|> Pleroma.Emails.Mailer.deliver_async()
user
end
def needs_update?(%User{local: true}), do: false
def needs_update?(%User{local: false, last_refreshed_at: nil}), do: true
def needs_update?(%User{local: false} = user) do
NaiveDateTime.diff(NaiveDateTime.utc_now(), user.last_refreshed_at) >= 86_400
end
def needs_update?(_), do: true
@spec maybe_direct_follow(User.t(), User.t()) :: {:ok, User.t()} | {:error, String.t()}
# "Locked" (self-locked) users demand explicit authorization of follow requests
def maybe_direct_follow(%User{} = follower, %User{local: true, is_locked: true} = followed) do
follow(follower, followed, :follow_pending)
end
def maybe_direct_follow(%User{} = follower, %User{local: true} = followed) do
follow(follower, followed)
end
def maybe_direct_follow(%User{} = follower, %User{} = followed) do
if not ap_enabled?(followed) do
follow(follower, followed)
else
{:ok, follower}
end
end
@doc "A mass follow for local users. Respects blocks in both directions but does not create activities."
@spec follow_all(User.t(), list(User.t())) :: {atom(), User.t()}
def follow_all(follower, followeds) do
followeds
|> Enum.reject(fn followed -> blocks?(follower, followed) || blocks?(followed, follower) end)
|> Enum.each(&follow(follower, &1, :follow_accept))
set_cache(follower)
end
def follow(%User{} = follower, %User{} = followed, state \\ :follow_accept) do
deny_follow_blocked = Config.get([:user, :deny_follow_blocked])
cond do
followed.deactivated ->
{:error, "Could not follow user: #{followed.nickname} is deactivated."}
deny_follow_blocked and blocks?(followed, follower) ->
{:error, "Could not follow user: #{followed.nickname} blocked you."}
true ->
FollowingRelationship.follow(follower, followed, state)
{:ok, _} = update_follower_count(followed)
follower
|> update_following_count()
end
end
def unfollow(%User{ap_id: ap_id}, %User{ap_id: ap_id}) do
{:error, "Not subscribed!"}
end
@spec unfollow(User.t(), User.t()) :: {:ok, User.t(), Activity.t()} | {:error, String.t()}
def unfollow(%User{} = follower, %User{} = followed) do
case do_unfollow(follower, followed) do
{:ok, follower, followed} ->
{:ok, follower, Utils.fetch_latest_follow(follower, followed)}
error ->
error
end
end
@spec do_unfollow(User.t(), User.t()) :: {:ok, User.t(), User.t()} | {:error, String.t()}
defp do_unfollow(%User{} = follower, %User{} = followed) do
case get_follow_state(follower, followed) do
state when state in [:follow_pending, :follow_accept] ->
FollowingRelationship.unfollow(follower, followed)
{:ok, followed} = update_follower_count(followed)
{:ok, follower} = update_following_count(follower)
{:ok, follower, followed}
nil ->
{:error, "Not subscribed!"}
end
end
@doc "Returns follow state as Pleroma.FollowingRelationship.State value"
def get_follow_state(%User{} = follower, %User{} = following) do
following_relationship = FollowingRelationship.get(follower, following)
get_follow_state(follower, following, following_relationship)
end
def get_follow_state(
%User{} = follower,
%User{} = following,
following_relationship
) do
case {following_relationship, following.local} do
{nil, false} ->
case Utils.fetch_latest_follow(follower, following) do
%Activity{data: %{"state" => state}} when state in ["pending", "accept"] ->
FollowingRelationship.state_to_enum(state)
_ ->
nil
end
{%{state: state}, _} ->
state
{nil, _} ->
nil
end
end
def locked?(%User{} = user) do
user.is_locked || false
end
def get_by_id(id) do
Repo.get_by(User, id: id)
end
def get_by_ap_id(ap_id) do
Repo.get_by(User, ap_id: ap_id)
end
def get_all_by_ap_id(ap_ids) do
from(u in __MODULE__,
where: u.ap_id in ^ap_ids
)
|> Repo.all()
end
def get_all_by_ids(ids) do
from(u in __MODULE__, where: u.id in ^ids)
|> Repo.all()
end
# This is mostly an SPC migration fix. This guesses the user nickname by taking the last part
# of the ap_id and the domain and tries to get that user
def get_by_guessed_nickname(ap_id) do
domain = URI.parse(ap_id).host
name = List.last(String.split(ap_id, "/"))
nickname = "#{name}@#{domain}"
get_cached_by_nickname(nickname)
end
def set_cache({:ok, user}), do: set_cache(user)
def set_cache({:error, err}), do: {:error, err}
def set_cache(%User{} = user) do
Cachex.put(:user_cache, "ap_id:#{user.ap_id}", user)
Cachex.put(:user_cache, "nickname:#{user.nickname}", user)
Cachex.put(:user_cache, "friends_ap_ids:#{user.nickname}", get_user_friends_ap_ids(user))
{:ok, user}
end
def update_and_set_cache(struct, params) do
struct
|> update_changeset(params)
|> update_and_set_cache()
end
def update_and_set_cache(changeset) do
with {:ok, user} <- Repo.update(changeset, stale_error_field: :id) do
set_cache(user)
end
end
def get_user_friends_ap_ids(user) do
from(u in User.get_friends_query(user), select: u.ap_id)
|> Repo.all()
end
@spec get_cached_user_friends_ap_ids(User.t()) :: [String.t()]
def get_cached_user_friends_ap_ids(user) do
Cachex.fetch!(:user_cache, "friends_ap_ids:#{user.ap_id}", fn _ ->
get_user_friends_ap_ids(user)
end)
end
def invalidate_cache(user) do
Cachex.del(:user_cache, "ap_id:#{user.ap_id}")
Cachex.del(:user_cache, "nickname:#{user.nickname}")
Cachex.del(:user_cache, "friends_ap_ids:#{user.ap_id}")
end
@spec get_cached_by_ap_id(String.t()) :: User.t() | nil
def get_cached_by_ap_id(ap_id) do
key = "ap_id:#{ap_id}"
with {:ok, nil} <- Cachex.get(:user_cache, key),
user when not is_nil(user) <- get_by_ap_id(ap_id),
{:ok, true} <- Cachex.put(:user_cache, key, user) do
user
else
{:ok, user} -> user
nil -> nil
end
end
def get_cached_by_id(id) do
key = "id:#{id}"
ap_id =
Cachex.fetch!(:user_cache, key, fn _ ->
user = get_by_id(id)
if user do
Cachex.put(:user_cache, "ap_id:#{user.ap_id}", user)
{:commit, user.ap_id}
else
{:ignore, ""}
end
end)
get_cached_by_ap_id(ap_id)
end
def get_cached_by_nickname(nickname) do
key = "nickname:#{nickname}"
Cachex.fetch!(:user_cache, key, fn ->
case get_or_fetch_by_nickname(nickname) do
{:ok, user} -> {:commit, user}
{:error, _error} -> {:ignore, nil}
end
end)
end
def get_cached_by_nickname_or_id(nickname_or_id, opts \\ []) do
restrict_to_local = Config.get([:instance, :limit_to_local_content])
cond do
is_integer(nickname_or_id) or FlakeId.flake_id?(nickname_or_id) ->
get_cached_by_id(nickname_or_id) || get_cached_by_nickname(nickname_or_id)
restrict_to_local == false or not String.contains?(nickname_or_id, "@") ->
get_cached_by_nickname(nickname_or_id)
restrict_to_local == :unauthenticated and match?(%User{}, opts[:for]) ->
get_cached_by_nickname(nickname_or_id)
true ->
nil
end
end
@spec get_by_nickname(String.t()) :: User.t() | nil
def get_by_nickname(nickname) do
Repo.get_by(User, nickname: nickname) ||
if Regex.match?(~r(@#{Pleroma.Web.Endpoint.host()})i, nickname) do
Repo.get_by(User, nickname: local_nickname(nickname))
end
end
def get_by_email(email), do: Repo.get_by(User, email: email)
def get_by_nickname_or_email(nickname_or_email) do
get_by_nickname(nickname_or_email) || get_by_email(nickname_or_email)
end
def fetch_by_nickname(nickname), do: ActivityPub.make_user_from_nickname(nickname)
def get_or_fetch_by_nickname(nickname) do
with %User{} = user <- get_by_nickname(nickname) do
{:ok, user}
else
_e ->
with [_nick, _domain] <- String.split(nickname, "@"),
{:ok, user} <- fetch_by_nickname(nickname) do
{:ok, user}
else
_e -> {:error, "not found " <> nickname}
end
end
end
@spec get_followers_query(User.t(), pos_integer() | nil) :: Ecto.Query.t()
def get_followers_query(%User{} = user, nil) do
User.Query.build(%{followers: user, deactivated: false})
end
def get_followers_query(%User{} = user, page) do
user
|> get_followers_query(nil)
|> User.Query.paginate(page, 20)
end
@spec get_followers_query(User.t()) :: Ecto.Query.t()
def get_followers_query(%User{} = user), do: get_followers_query(user, nil)
@spec get_followers(User.t(), pos_integer() | nil) :: {:ok, list(User.t())}
def get_followers(%User{} = user, page \\ nil) do
user
|> get_followers_query(page)
|> Repo.all()
end
@spec get_external_followers(User.t(), pos_integer() | nil) :: {:ok, list(User.t())}
def get_external_followers(%User{} = user, page \\ nil) do
user
|> get_followers_query(page)
|> User.Query.build(%{external: true})
|> Repo.all()
end
def get_followers_ids(%User{} = user, page \\ nil) do
user
|> get_followers_query(page)
|> select([u], u.id)
|> Repo.all()
end
@spec get_friends_query(User.t(), pos_integer() | nil) :: Ecto.Query.t()
def get_friends_query(%User{} = user, nil) do
User.Query.build(%{friends: user, deactivated: false})
end
def get_friends_query(%User{} = user, page) do
user
|> get_friends_query(nil)
|> User.Query.paginate(page, 20)
end
@spec get_friends_query(User.t()) :: Ecto.Query.t()
def get_friends_query(%User{} = user), do: get_friends_query(user, nil)
def get_friends(%User{} = user, page \\ nil) do
user
|> get_friends_query(page)
|> Repo.all()
end
def get_friends_ap_ids(%User{} = user) do
user
|> get_friends_query(nil)
|> select([u], u.ap_id)
|> Repo.all()
end
def get_friends_ids(%User{} = user, page \\ nil) do
user
|> get_friends_query(page)
|> select([u], u.id)
|> Repo.all()
end
def increase_note_count(%User{} = user) do
User
|> where(id: ^user.id)
|> update([u], inc: [note_count: 1])
|> select([u], u)
|> Repo.update_all([])
|> case do
{1, [user]} -> set_cache(user)
_ -> {:error, user}
end
end
def decrease_note_count(%User{} = user) do
User
|> where(id: ^user.id)
|> update([u],
set: [
note_count: fragment("greatest(0, note_count - 1)")
]
)
|> select([u], u)
|> Repo.update_all([])
|> case do
{1, [user]} -> set_cache(user)
_ -> {:error, user}
end
end
def update_note_count(%User{} = user, note_count \\ nil) do
note_count =
note_count ||
from(
a in Object,
where: fragment("?->>'actor' = ? and ?->>'type' = 'Note'", a.data, ^user.ap_id, a.data),
select: count(a.id)
)
|> Repo.one()
user
|> cast(%{note_count: note_count}, [:note_count])
|> update_and_set_cache()
end
@spec maybe_fetch_follow_information(User.t()) :: User.t()
def maybe_fetch_follow_information(user) do
with {:ok, user} <- fetch_follow_information(user) do
user
else
e ->
Logger.error("Follower/Following counter update for #{user.ap_id} failed.\n#{inspect(e)}")
user
end
end
def fetch_follow_information(user) do
with {:ok, info} <- ActivityPub.fetch_follow_information_for_user(user) do
user
|> follow_information_changeset(info)
|> update_and_set_cache()
end
end
defp follow_information_changeset(user, params) do
user
|> cast(params, [
:hide_followers,
:hide_follows,
:follower_count,
:following_count,
:hide_followers_count,
:hide_follows_count
])
end
@spec update_follower_count(User.t()) :: {:ok, User.t()}
def update_follower_count(%User{} = user) do
if user.local or !Config.get([:instance, :external_user_synchronization]) do
follower_count = FollowingRelationship.follower_count(user)
user
|> follow_information_changeset(%{follower_count: follower_count})
|> update_and_set_cache
else
{:ok, maybe_fetch_follow_information(user)}
end
end
@spec update_following_count(User.t()) :: {:ok, User.t()}
def update_following_count(%User{local: false} = user) do
if Config.get([:instance, :external_user_synchronization]) do
{:ok, maybe_fetch_follow_information(user)}
else
{:ok, user}
end
end
def update_following_count(%User{local: true} = user) do
following_count = FollowingRelationship.following_count(user)
user
|> follow_information_changeset(%{following_count: following_count})
|> update_and_set_cache()
end
@spec get_users_from_set([String.t()], keyword()) :: [User.t()]
def get_users_from_set(ap_ids, opts \\ []) do
local_only = Keyword.get(opts, :local_only, true)
criteria = %{ap_id: ap_ids, deactivated: false}
criteria = if local_only, do: Map.put(criteria, :local, true), else: criteria
User.Query.build(criteria)
|> Repo.all()
end
@spec get_recipients_from_activity(Activity.t()) :: [User.t()]
def get_recipients_from_activity(%Activity{recipients: to, actor: actor}) do
to = [actor | to]
query = User.Query.build(%{recipients_from_activity: to, local: true, deactivated: false})
query
|> Repo.all()
end
@spec mute(User.t(), User.t(), map()) ::
{:ok, list(UserRelationship.t())} | {:error, String.t()}
def mute(%User{} = muter, %User{} = mutee, params \\ %{}) do
notifications? = Map.get(params, :notifications, true)
expires_in = Map.get(params, :expires_in, 0)
with {:ok, user_mute} <- UserRelationship.create_mute(muter, mutee),
{:ok, user_notification_mute} <-
(notifications? && UserRelationship.create_notification_mute(muter, mutee)) ||
{:ok, nil} do
if expires_in > 0 do
Pleroma.Workers.MuteExpireWorker.enqueue(
"unmute_user",
%{"muter_id" => muter.id, "mutee_id" => mutee.id},
schedule_in: expires_in
)
end
{:ok, Enum.filter([user_mute, user_notification_mute], & &1)}
end
end
def unmute(%User{} = muter, %User{} = mutee) do
with {:ok, user_mute} <- UserRelationship.delete_mute(muter, mutee),
{:ok, user_notification_mute} <-
UserRelationship.delete_notification_mute(muter, mutee) do
{:ok, [user_mute, user_notification_mute]}
end
end
def unmute(muter_id, mutee_id) do
with {:muter, %User{} = muter} <- {:muter, User.get_by_id(muter_id)},
{:mutee, %User{} = mutee} <- {:mutee, User.get_by_id(mutee_id)} do
unmute(muter, mutee)
else
{who, result} = error ->
Logger.warn(
"User.unmute/2 failed. #{who}: #{result}, muter_id: #{muter_id}, mutee_id: #{mutee_id}"
)
{:error, error}
end
end
def subscribe(%User{} = subscriber, %User{} = target) do
deny_follow_blocked = Config.get([:user, :deny_follow_blocked])
if blocks?(target, subscriber) and deny_follow_blocked do
{:error, "Could not subscribe: #{target.nickname} is blocking you"}
else
# Note: the relationship is inverse: subscriber acts as relationship target
UserRelationship.create_inverse_subscription(target, subscriber)
end
end
def subscribe(%User{} = subscriber, %{ap_id: ap_id}) do
with %User{} = subscribee <- get_cached_by_ap_id(ap_id) do
subscribe(subscriber, subscribee)
end
end
def unsubscribe(%User{} = unsubscriber, %User{} = target) do
# Note: the relationship is inverse: subscriber acts as relationship target
UserRelationship.delete_inverse_subscription(target, unsubscriber)
end
def unsubscribe(%User{} = unsubscriber, %{ap_id: ap_id}) do
with %User{} = user <- get_cached_by_ap_id(ap_id) do
unsubscribe(unsubscriber, user)
end
end
def block(%User{} = blocker, %User{} = blocked) do
# sever any follow relationships to prevent leaks per activitypub (Pleroma issue #213)
blocker =
if following?(blocker, blocked) do
{:ok, blocker, _} = unfollow(blocker, blocked)
blocker
else
blocker
end
# clear any requested follows as well
blocked =
case CommonAPI.reject_follow_request(blocked, blocker) do
{:ok, %User{} = updated_blocked} -> updated_blocked
nil -> blocked
end
unsubscribe(blocked, blocker)
unfollowing_blocked = Config.get([:activitypub, :unfollow_blocked], true)
if unfollowing_blocked && following?(blocked, blocker), do: unfollow(blocked, blocker)
{:ok, blocker} = update_follower_count(blocker)
{:ok, blocker, _} = Participation.mark_all_as_read(blocker, blocked)
add_to_block(blocker, blocked)
end
# helper to handle the block given only an actor's AP id
def block(%User{} = blocker, %{ap_id: ap_id}) do
block(blocker, get_cached_by_ap_id(ap_id))
end
def unblock(%User{} = blocker, %User{} = blocked) do
remove_from_block(blocker, blocked)
end
# helper to handle the block given only an actor's AP id
def unblock(%User{} = blocker, %{ap_id: ap_id}) do
unblock(blocker, get_cached_by_ap_id(ap_id))
end
def mutes?(nil, _), do: false
def mutes?(%User{} = user, %User{} = target), do: mutes_user?(user, target)
def mutes_user?(%User{} = user, %User{} = target) do
UserRelationship.mute_exists?(user, target)
end
@spec muted_notifications?(User.t() | nil, User.t() | map()) :: boolean()
def muted_notifications?(nil, _), do: false
def muted_notifications?(%User{} = user, %User{} = target),
do: UserRelationship.notification_mute_exists?(user, target)
def blocks?(nil, _), do: false
def blocks?(%User{} = user, %User{} = target) do
blocks_user?(user, target) ||
(blocks_domain?(user, target) and not User.following?(user, target))
end
def blocks_user?(%User{} = user, %User{} = target) do
UserRelationship.block_exists?(user, target)
end
def blocks_user?(_, _), do: false
def blocks_domain?(%User{} = user, %User{} = target) do
domain_blocks = Pleroma.Web.ActivityPub.MRF.subdomains_regex(user.domain_blocks)
%{host: host} = URI.parse(target.ap_id)
Pleroma.Web.ActivityPub.MRF.subdomain_match?(domain_blocks, host)
end
def blocks_domain?(_, _), do: false
def subscribed_to?(%User{} = user, %User{} = target) do
# Note: the relationship is inverse: subscriber acts as relationship target
UserRelationship.inverse_subscription_exists?(target, user)
end
def subscribed_to?(%User{} = user, %{ap_id: ap_id}) do
with %User{} = target <- get_cached_by_ap_id(ap_id) do
subscribed_to?(user, target)
end
end
@doc """
Returns map of outgoing (blocked, muted etc.) relationships' user AP IDs by relation type.
E.g. `outgoing_relationships_ap_ids(user, [:block])` -> `%{block: ["https://some.site/users/userapid"]}`
"""
@spec outgoing_relationships_ap_ids(User.t(), list(atom())) :: %{atom() => list(String.t())}
def outgoing_relationships_ap_ids(_user, []), do: %{}
def outgoing_relationships_ap_ids(nil, _relationship_types), do: %{}
def outgoing_relationships_ap_ids(%User{} = user, relationship_types)
when is_list(relationship_types) do
db_result =
user
|> assoc(:outgoing_relationships)
|> join(:inner, [user_rel], u in assoc(user_rel, :target))
|> where([user_rel, u], user_rel.relationship_type in ^relationship_types)
|> select([user_rel, u], [user_rel.relationship_type, fragment("array_agg(?)", u.ap_id)])
|> group_by([user_rel, u], user_rel.relationship_type)
|> Repo.all()
|> Enum.into(%{}, fn [k, v] -> {k, v} end)
Enum.into(
relationship_types,
%{},
fn rel_type -> {rel_type, db_result[rel_type] || []} end
)
end
def incoming_relationships_ungrouped_ap_ids(user, relationship_types, ap_ids \\ nil)
def incoming_relationships_ungrouped_ap_ids(_user, [], _ap_ids), do: []
def incoming_relationships_ungrouped_ap_ids(nil, _relationship_types, _ap_ids), do: []
def incoming_relationships_ungrouped_ap_ids(%User{} = user, relationship_types, ap_ids)
when is_list(relationship_types) do
user
|> assoc(:incoming_relationships)
|> join(:inner, [user_rel], u in assoc(user_rel, :source))
|> where([user_rel, u], user_rel.relationship_type in ^relationship_types)
|> maybe_filter_on_ap_id(ap_ids)
|> select([user_rel, u], u.ap_id)
|> distinct(true)
|> Repo.all()
end
defp maybe_filter_on_ap_id(query, ap_ids) when is_list(ap_ids) do
where(query, [user_rel, u], u.ap_id in ^ap_ids)
end
defp maybe_filter_on_ap_id(query, _ap_ids), do: query
def deactivate_async(user, status \\ true) do
BackgroundWorker.enqueue("deactivate_user", %{"user_id" => user.id, "status" => status})
end
def deactivate(user, status \\ true)
def deactivate(users, status) when is_list(users) do
Repo.transaction(fn ->
for user <- users, do: deactivate(user, status)
end)
end
def deactivate(%User{} = user, status) do
with {:ok, user} <- set_activation_status(user, status) do
user
|> get_followers()
|> Enum.filter(& &1.local)
|> Enum.each(&set_cache(update_following_count(&1)))
# Only update local user counts, remote will be update during the next pull.
user
|> get_friends()
|> Enum.filter(& &1.local)
|> Enum.each(&do_unfollow(user, &1))
{:ok, user}
end
end
def approve(users) when is_list(users) do
Repo.transaction(fn ->
Enum.map(users, fn user ->
with {:ok, user} <- approve(user), do: user
end)
end)
end
def approve(%User{} = user) do
change(user, approval_pending: false)
|> update_and_set_cache()
end
def update_notification_settings(%User{} = user, settings) do
user
|> cast(%{notification_settings: settings}, [])
|> cast_embed(:notification_settings)
|> validate_required([:notification_settings])
|> update_and_set_cache()
end
@spec purge_user_changeset(User.t()) :: Changeset.t()
def purge_user_changeset(user) do
# "Right to be forgotten"
# https://gdpr.eu/right-to-be-forgotten/
change(user, %{
bio: "",
raw_bio: nil,
email: nil,
name: nil,
password_hash: nil,
keys: nil,
public_key: nil,
avatar: %{},
tags: [],
last_refreshed_at: nil,
last_digest_emailed_at: nil,
banner: %{},
background: %{},
note_count: 0,
follower_count: 0,
following_count: 0,
is_locked: false,
confirmation_pending: false,
password_reset_pending: false,
approval_pending: false,
registration_reason: nil,
confirmation_token: nil,
domain_blocks: [],
deactivated: true,
ap_enabled: false,
is_moderator: false,
is_admin: false,
mastofe_settings: nil,
mascot: nil,
emoji: %{},
pleroma_settings_store: %{},
fields: [],
raw_fields: [],
is_discoverable: false,
also_known_as: []
})
end
def delete(users) when is_list(users) do
for user <- users, do: delete(user)
end
def delete(%User{} = user) do
BackgroundWorker.enqueue("delete_user", %{"user_id" => user.id})
end
defp delete_and_invalidate_cache(%User{} = user) do
invalidate_cache(user)
Repo.delete(user)
end
defp delete_or_deactivate(%User{local: false} = user), do: delete_and_invalidate_cache(user)
defp delete_or_deactivate(%User{local: true} = user) do
status = account_status(user)
case status do
:confirmation_pending ->
delete_and_invalidate_cache(user)
:approval_pending ->
delete_and_invalidate_cache(user)
_ ->
user
|> purge_user_changeset()
|> update_and_set_cache()
end
end
def perform(:force_password_reset, user), do: force_password_reset(user)
@spec perform(atom(), User.t()) :: {:ok, User.t()}
def perform(:delete, %User{} = user) do
# Remove all relationships
user
|> get_followers()
|> Enum.each(fn follower ->
ActivityPub.unfollow(follower, user)
unfollow(follower, user)
end)
user
|> get_friends()
|> Enum.each(fn followed ->
ActivityPub.unfollow(user, followed)
unfollow(user, followed)
end)
delete_user_activities(user)
delete_notifications_from_user_activities(user)
delete_outgoing_pending_follow_requests(user)
delete_or_deactivate(user)
end
def perform(:deactivate_async, user, status), do: deactivate(user, status)
@spec external_users_query() :: Ecto.Query.t()
def external_users_query do
User.Query.build(%{
external: true,
active: true,
order_by: :id
})
end
@spec external_users(keyword()) :: [User.t()]
def external_users(opts \\ []) do
query =
external_users_query()
|> select([u], struct(u, [:id, :ap_id]))
query =
if opts[:max_id],
do: where(query, [u], u.id > ^opts[:max_id]),
else: query
query =
if opts[:limit],
do: limit(query, ^opts[:limit]),
else: query
Repo.all(query)
end
def delete_notifications_from_user_activities(%User{ap_id: ap_id}) do
Notification
|> join(:inner, [n], activity in assoc(n, :activity))
|> where([n, a], fragment("? = ?", a.actor, ^ap_id))
|> Repo.delete_all()
end
def delete_user_activities(%User{ap_id: ap_id} = user) do
ap_id
|> Activity.Queries.by_actor()
|> Repo.chunk_stream(50, :batches)
|> Stream.each(fn activities ->
Enum.each(activities, fn activity -> delete_activity(activity, user) end)
end)
|> Stream.run()
end
defp delete_activity(%{data: %{"type" => "Create", "object" => object}} = activity, user) do
with {_, %Object{}} <- {:find_object, Object.get_by_ap_id(object)},
{:ok, delete_data, _} <- Builder.delete(user, object) do
Pipeline.common_pipeline(delete_data, local: user.local)
else
{:find_object, nil} ->
# We have the create activity, but not the object, it was probably pruned.
# Insert a tombstone and try again
with {:ok, tombstone_data, _} <- Builder.tombstone(user.ap_id, object),
{:ok, _tombstone} <- Object.create(tombstone_data) do
delete_activity(activity, user)
end
e ->
Logger.error("Could not delete #{object} created by #{activity.data["ap_id"]}")
Logger.error("Error: #{inspect(e)}")
end
end
defp delete_activity(%{data: %{"type" => type}} = activity, user)
when type in ["Like", "Announce"] do
{:ok, undo, _} = Builder.undo(user, activity)
Pipeline.common_pipeline(undo, local: user.local)
end
defp delete_activity(_activity, _user), do: "Doing nothing"
defp delete_outgoing_pending_follow_requests(user) do
user
|> FollowingRelationship.outgoing_pending_follow_requests_query()
|> Repo.delete_all()
end
def html_filter_policy(%User{no_rich_text: true}) do
Pleroma.HTML.Scrubber.TwitterText
end
def html_filter_policy(_), do: Config.get([:markup, :scrub_policy])
- def fetch_by_ap_id(ap_id, opts \\ []), do: ActivityPub.make_user_from_ap_id(ap_id, opts)
+ def fetch_by_ap_id(ap_id), do: ActivityPub.make_user_from_ap_id(ap_id)
- def get_or_fetch_by_ap_id(ap_id, opts \\ []) do
+ def get_or_fetch_by_ap_id(ap_id) do
cached_user = get_cached_by_ap_id(ap_id)
- maybe_fetched_user = needs_update?(cached_user) && fetch_by_ap_id(ap_id, opts)
+ maybe_fetched_user = needs_update?(cached_user) && fetch_by_ap_id(ap_id)
case {cached_user, maybe_fetched_user} do
{_, {:ok, %User{} = user}} ->
{:ok, user}
{%User{} = user, _} ->
{:ok, user}
_ ->
{:error, :not_found}
end
end
@doc """
Creates an internal service actor by URI if missing.
Optionally takes nickname for addressing.
"""
@spec get_or_create_service_actor_by_ap_id(String.t(), String.t()) :: User.t() | nil
def get_or_create_service_actor_by_ap_id(uri, nickname) do
{_, user} =
case get_cached_by_ap_id(uri) do
nil ->
with {:error, %{errors: errors}} <- create_service_actor(uri, nickname) do
Logger.error("Cannot create service actor: #{uri}/.\n#{inspect(errors)}")
{:error, nil}
end
%User{invisible: false} = user ->
set_invisible(user)
user ->
{:ok, user}
end
user
end
@spec set_invisible(User.t()) :: {:ok, User.t()}
defp set_invisible(user) do
user
|> change(%{invisible: true})
|> update_and_set_cache()
end
@spec create_service_actor(String.t(), String.t()) ::
{:ok, User.t()} | {:error, Ecto.Changeset.t()}
defp create_service_actor(uri, nickname) do
%User{
invisible: true,
local: true,
ap_id: uri,
nickname: nickname,
follower_address: uri <> "/followers"
}
|> change
|> unique_constraint(:nickname)
|> Repo.insert()
|> set_cache()
end
def public_key(%{public_key: public_key_pem}) when is_binary(public_key_pem) do
key =
public_key_pem
|> :public_key.pem_decode()
|> hd()
|> :public_key.pem_entry_decode()
{:ok, key}
end
def public_key(_), do: {:error, "key not found"}
- def get_public_key_for_ap_id(ap_id, opts \\ []) do
- with {:ok, %User{} = user} <- get_or_fetch_by_ap_id(ap_id, opts),
+ def get_public_key_for_ap_id(ap_id) do
+ with {:ok, %User{} = user} <- get_or_fetch_by_ap_id(ap_id),
{:ok, public_key} <- public_key(user) do
{:ok, public_key}
else
_ -> :error
end
end
def ap_enabled?(%User{local: true}), do: true
def ap_enabled?(%User{ap_enabled: ap_enabled}), do: ap_enabled
def ap_enabled?(_), do: false
@doc "Gets or fetch a user by uri or nickname."
@spec get_or_fetch(String.t()) :: {:ok, User.t()} | {:error, String.t()}
def get_or_fetch("http" <> _host = uri), do: get_or_fetch_by_ap_id(uri)
def get_or_fetch(nickname), do: get_or_fetch_by_nickname(nickname)
# wait a period of time and return newest version of the User structs
# this is because we have synchronous follow APIs and need to simulate them
# with an async handshake
def wait_and_refresh(_, %User{local: true} = a, %User{local: true} = b) do
with %User{} = a <- get_cached_by_id(a.id),
%User{} = b <- get_cached_by_id(b.id) do
{:ok, a, b}
else
nil -> :error
end
end
def wait_and_refresh(timeout, %User{} = a, %User{} = b) do
with :ok <- :timer.sleep(timeout),
%User{} = a <- get_cached_by_id(a.id),
%User{} = b <- get_cached_by_id(b.id) do
{:ok, a, b}
else
nil -> :error
end
end
def parse_bio(bio) when is_binary(bio) and bio != "" do
bio
|> CommonUtils.format_input("text/plain", mentions_format: :full)
|> elem(0)
end
def parse_bio(_), do: ""
def parse_bio(bio, user) when is_binary(bio) and bio != "" do
# TODO: get profile URLs other than user.ap_id
profile_urls = [user.ap_id]
bio
|> CommonUtils.format_input("text/plain",
mentions_format: :full,
rel: &RelMe.maybe_put_rel_me(&1, profile_urls)
)
|> elem(0)
end
def parse_bio(_, _), do: ""
def tag(user_identifiers, tags) when is_list(user_identifiers) do
Repo.transaction(fn ->
for user_identifier <- user_identifiers, do: tag(user_identifier, tags)
end)
end
def tag(nickname, tags) when is_binary(nickname),
do: tag(get_by_nickname(nickname), tags)
def tag(%User{} = user, tags),
do: update_tags(user, Enum.uniq((user.tags || []) ++ normalize_tags(tags)))
def untag(user_identifiers, tags) when is_list(user_identifiers) do
Repo.transaction(fn ->
for user_identifier <- user_identifiers, do: untag(user_identifier, tags)
end)
end
def untag(nickname, tags) when is_binary(nickname),
do: untag(get_by_nickname(nickname), tags)
def untag(%User{} = user, tags),
do: update_tags(user, (user.tags || []) -- normalize_tags(tags))
defp update_tags(%User{} = user, new_tags) do
{:ok, updated_user} =
user
|> change(%{tags: new_tags})
|> update_and_set_cache()
updated_user
end
defp normalize_tags(tags) do
[tags]
|> List.flatten()
|> Enum.map(&String.downcase/1)
end
defp local_nickname_regex do
if Config.get([:instance, :extended_nickname_format]) do
@extended_local_nickname_regex
else
@strict_local_nickname_regex
end
end
def local_nickname(nickname_or_mention) do
nickname_or_mention
|> full_nickname()
|> String.split("@")
|> hd()
end
def full_nickname(nickname_or_mention),
do: String.trim_leading(nickname_or_mention, "@")
def error_user(ap_id) do
%User{
name: ap_id,
ap_id: ap_id,
nickname: "erroruser@example.com",
inserted_at: NaiveDateTime.utc_now()
}
end
@spec all_superusers() :: [User.t()]
def all_superusers do
User.Query.build(%{super_users: true, local: true, deactivated: false})
|> Repo.all()
end
def muting_reblogs?(%User{} = user, %User{} = target) do
UserRelationship.reblog_mute_exists?(user, target)
end
def showing_reblogs?(%User{} = user, %User{} = target) do
not muting_reblogs?(user, target)
end
@doc """
The function returns a query to get users with no activity for given interval of days.
Inactive users are those who didn't read any notification, or had any activity where
the user is the activity's actor, during `inactivity_threshold` days.
Deactivated users will not appear in this list.
## Examples
iex> Pleroma.User.list_inactive_users()
%Ecto.Query{}
"""
@spec list_inactive_users_query(integer()) :: Ecto.Query.t()
def list_inactive_users_query(inactivity_threshold \\ 7) do
negative_inactivity_threshold = -inactivity_threshold
now = NaiveDateTime.truncate(NaiveDateTime.utc_now(), :second)
# Subqueries are not supported in `where` clauses, join gets too complicated.
has_read_notifications =
from(n in Pleroma.Notification,
where: n.seen == true,
group_by: n.id,
having: max(n.updated_at) > datetime_add(^now, ^negative_inactivity_threshold, "day"),
select: n.user_id
)
|> Pleroma.Repo.all()
from(u in Pleroma.User,
left_join: a in Pleroma.Activity,
on: u.ap_id == a.actor,
where: not is_nil(u.nickname),
where: u.deactivated != ^true,
where: u.id not in ^has_read_notifications,
group_by: u.id,
having:
max(a.inserted_at) < datetime_add(^now, ^negative_inactivity_threshold, "day") or
is_nil(max(a.inserted_at))
)
end
@doc """
Enable or disable email notifications for user
## Examples
iex> Pleroma.User.switch_email_notifications(Pleroma.User{email_notifications: %{"digest" => false}}, "digest", true)
Pleroma.User{email_notifications: %{"digest" => true}}
iex> Pleroma.User.switch_email_notifications(Pleroma.User{email_notifications: %{"digest" => true}}, "digest", false)
Pleroma.User{email_notifications: %{"digest" => false}}
"""
@spec switch_email_notifications(t(), String.t(), boolean()) ::
{:ok, t()} | {:error, Ecto.Changeset.t()}
def switch_email_notifications(user, type, status) do
User.update_email_notifications(user, %{type => status})
end
@doc """
Set `last_digest_emailed_at` value for the user to current time
"""
@spec touch_last_digest_emailed_at(t()) :: t()
def touch_last_digest_emailed_at(user) do
now = NaiveDateTime.truncate(NaiveDateTime.utc_now(), :second)
{:ok, updated_user} =
user
|> change(%{last_digest_emailed_at: now})
|> update_and_set_cache()
updated_user
end
@spec toggle_confirmation(User.t()) :: {:ok, User.t()} | {:error, Changeset.t()}
def toggle_confirmation(%User{} = user) do
user
|> confirmation_changeset(need_confirmation: !user.confirmation_pending)
|> update_and_set_cache()
end
@spec toggle_confirmation([User.t()]) :: [{:ok, User.t()} | {:error, Changeset.t()}]
def toggle_confirmation(users) do
Enum.map(users, &toggle_confirmation/1)
end
@spec need_confirmation(User.t(), boolean()) :: {:ok, User.t()} | {:error, Changeset.t()}
def need_confirmation(%User{} = user, bool) do
user
|> confirmation_changeset(need_confirmation: bool)
|> update_and_set_cache()
end
def get_mascot(%{mascot: %{} = mascot}) when not is_nil(mascot) do
mascot
end
def get_mascot(%{mascot: mascot}) when is_nil(mascot) do
# use instance-default
config = Config.get([:assets, :mascots])
default_mascot = Config.get([:assets, :default_mascot])
mascot = Keyword.get(config, default_mascot)
%{
"id" => "default-mascot",
"url" => mascot[:url],
"preview_url" => mascot[:url],
"pleroma" => %{
"mime_type" => mascot[:mime_type]
}
}
end
def ensure_keys_present(%{keys: keys} = user) when not is_nil(keys), do: {:ok, user}
def ensure_keys_present(%User{} = user) do
with {:ok, pem} <- Keys.generate_rsa_pem() do
user
|> cast(%{keys: pem}, [:keys])
|> validate_required([:keys])
|> update_and_set_cache()
end
end
def get_ap_ids_by_nicknames(nicknames) do
from(u in User,
where: u.nickname in ^nicknames,
select: u.ap_id
)
|> Repo.all()
end
defp put_password_hash(
%Ecto.Changeset{valid?: true, changes: %{password: password}} = changeset
) do
change(changeset, password_hash: Pbkdf2.hash_pwd_salt(password))
end
defp put_password_hash(changeset), do: changeset
def is_internal_user?(%User{nickname: nil}), do: true
def is_internal_user?(%User{local: true, nickname: "internal." <> _}), do: true
def is_internal_user?(_), do: false
# A hack because user delete activities have a fake id for whatever reason
# TODO: Get rid of this
def get_delivered_users_by_object_id("pleroma:fake_object_id"), do: []
def get_delivered_users_by_object_id(object_id) do
from(u in User,
inner_join: delivery in assoc(u, :deliveries),
where: delivery.object_id == ^object_id
)
|> Repo.all()
end
def change_email(user, email) do
user
|> cast(%{email: email}, [:email])
|> validate_required([:email])
|> unique_constraint(:email)
|> validate_format(:email, @email_regex)
|> update_and_set_cache()
end
# Internal function; public one is `deactivate/2`
defp set_activation_status(user, deactivated) do
user
|> cast(%{deactivated: deactivated}, [:deactivated])
|> update_and_set_cache()
end
def update_banner(user, banner) do
user
|> cast(%{banner: banner}, [:banner])
|> update_and_set_cache()
end
def update_background(user, background) do
user
|> cast(%{background: background}, [:background])
|> update_and_set_cache()
end
def roles(%{is_moderator: is_moderator, is_admin: is_admin}) do
%{
admin: is_admin,
moderator: is_moderator
}
end
def validate_fields(changeset, remote? \\ false) do
limit_name = if remote?, do: :max_remote_account_fields, else: :max_account_fields
limit = Config.get([:instance, limit_name], 0)
changeset
|> validate_length(:fields, max: limit)
|> validate_change(:fields, fn :fields, fields ->
if Enum.all?(fields, &valid_field?/1) do
[]
else
[fields: "invalid"]
end
end)
end
defp valid_field?(%{"name" => name, "value" => value}) do
name_limit = Config.get([:instance, :account_field_name_length], 255)
value_limit = Config.get([:instance, :account_field_value_length], 255)
is_binary(name) && is_binary(value) && String.length(name) <= name_limit &&
String.length(value) <= value_limit
end
defp valid_field?(_), do: false
defp truncate_field(%{"name" => name, "value" => value}) do
{name, _chopped} =
String.split_at(name, Config.get([:instance, :account_field_name_length], 255))
{value, _chopped} =
String.split_at(value, Config.get([:instance, :account_field_value_length], 255))
%{"name" => name, "value" => value}
end
def admin_api_update(user, params) do
user
|> cast(params, [
:is_moderator,
:is_admin,
:show_role
])
|> update_and_set_cache()
end
@doc "Signs user out of all applications"
def global_sign_out(user) do
OAuth.Authorization.delete_user_authorizations(user)
OAuth.Token.delete_user_tokens(user)
end
def mascot_update(user, url) do
user
|> cast(%{mascot: url}, [:mascot])
|> validate_required([:mascot])
|> update_and_set_cache()
end
def mastodon_settings_update(user, settings) do
user
|> cast(%{mastofe_settings: settings}, [:mastofe_settings])
|> validate_required([:mastofe_settings])
|> update_and_set_cache()
end
@spec confirmation_changeset(User.t(), keyword()) :: Changeset.t()
def confirmation_changeset(user, need_confirmation: need_confirmation?) do
params =
if need_confirmation? do
%{
confirmation_pending: true,
confirmation_token: :crypto.strong_rand_bytes(32) |> Base.url_encode64()
}
else
%{
confirmation_pending: false,
confirmation_token: nil
}
end
cast(user, params, [:confirmation_pending, :confirmation_token])
end
@spec approval_changeset(User.t(), keyword()) :: Changeset.t()
def approval_changeset(user, need_approval: need_approval?) do
params = if need_approval?, do: %{approval_pending: true}, else: %{approval_pending: false}
cast(user, params, [:approval_pending])
end
def add_pinnned_activity(user, %Pleroma.Activity{id: id}) do
if id not in user.pinned_activities do
max_pinned_statuses = Config.get([:instance, :max_pinned_statuses], 0)
params = %{pinned_activities: user.pinned_activities ++ [id]}
# if pinned activity was scheduled for deletion, we remove job
if expiration = Pleroma.Workers.PurgeExpiredActivity.get_expiration(id) do
Oban.cancel_job(expiration.id)
end
user
|> cast(params, [:pinned_activities])
|> validate_length(:pinned_activities,
max: max_pinned_statuses,
message: "You have already pinned the maximum number of statuses"
)
else
change(user)
end
|> update_and_set_cache()
end
def remove_pinnned_activity(user, %Pleroma.Activity{id: id, data: data}) do
params = %{pinned_activities: List.delete(user.pinned_activities, id)}
# if pinned activity was scheduled for deletion, we reschedule it for deletion
if data["expires_at"] do
# MRF.ActivityExpirationPolicy used UTC timestamps for expires_at in original implementation
{:ok, expires_at} =
data["expires_at"] |> Pleroma.EctoType.ActivityPub.ObjectValidators.DateTime.cast()
Pleroma.Workers.PurgeExpiredActivity.enqueue(%{
activity_id: id,
expires_at: expires_at
})
end
user
|> cast(params, [:pinned_activities])
|> update_and_set_cache()
end
def update_email_notifications(user, settings) do
email_notifications =
user.email_notifications
|> Map.merge(settings)
|> Map.take(["digest"])
params = %{email_notifications: email_notifications}
fields = [:email_notifications]
user
|> cast(params, fields)
|> validate_required(fields)
|> update_and_set_cache()
end
defp set_domain_blocks(user, domain_blocks) do
params = %{domain_blocks: domain_blocks}
user
|> cast(params, [:domain_blocks])
|> validate_required([:domain_blocks])
|> update_and_set_cache()
end
def block_domain(user, domain_blocked) do
set_domain_blocks(user, Enum.uniq([domain_blocked | user.domain_blocks]))
end
def unblock_domain(user, domain_blocked) do
set_domain_blocks(user, List.delete(user.domain_blocks, domain_blocked))
end
@spec add_to_block(User.t(), User.t()) ::
{:ok, UserRelationship.t()} | {:error, Ecto.Changeset.t()}
defp add_to_block(%User{} = user, %User{} = blocked) do
UserRelationship.create_block(user, blocked)
end
@spec add_to_block(User.t(), User.t()) ::
{:ok, UserRelationship.t()} | {:ok, nil} | {:error, Ecto.Changeset.t()}
defp remove_from_block(%User{} = user, %User{} = blocked) do
UserRelationship.delete_block(user, blocked)
end
def set_invisible(user, invisible) do
params = %{invisible: invisible}
user
|> cast(params, [:invisible])
|> validate_required([:invisible])
|> update_and_set_cache()
end
def sanitize_html(%User{} = user) do
sanitize_html(user, nil)
end
# User data that mastodon isn't filtering (treated as plaintext):
# - field name
# - display name
def sanitize_html(%User{} = user, filter) do
fields =
Enum.map(user.fields, fn %{"name" => name, "value" => value} ->
%{
"name" => name,
"value" => HTML.filter_tags(value, Pleroma.HTML.Scrubber.LinksOnly)
}
end)
user
|> Map.put(:bio, HTML.filter_tags(user.bio, filter))
|> Map.put(:fields, fields)
end
end
diff --git a/lib/pleroma/web/activity_pub/activity_pub.ex b/lib/pleroma/web/activity_pub/activity_pub.ex
index d8f685d38..35f71b7ae 100644
--- a/lib/pleroma/web/activity_pub/activity_pub.ex
+++ b/lib/pleroma/web/activity_pub/activity_pub.ex
@@ -1,1462 +1,1460 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.ActivityPub do
alias Pleroma.Activity
alias Pleroma.Activity.Ir.Topics
alias Pleroma.Config
alias Pleroma.Constants
alias Pleroma.Conversation
alias Pleroma.Conversation.Participation
alias Pleroma.Filter
alias Pleroma.Maps
alias Pleroma.Notification
alias Pleroma.Object
alias Pleroma.Object.Containment
alias Pleroma.Object.Fetcher
alias Pleroma.Pagination
alias Pleroma.Repo
alias Pleroma.Upload
alias Pleroma.User
alias Pleroma.Web.ActivityPub.MRF
alias Pleroma.Web.ActivityPub.Transmogrifier
alias Pleroma.Web.Streamer
alias Pleroma.Web.WebFinger
alias Pleroma.Workers.BackgroundWorker
import Ecto.Query
import Pleroma.Web.ActivityPub.Utils
import Pleroma.Web.ActivityPub.Visibility
require Logger
require Pleroma.Constants
defp get_recipients(%{"type" => "Create"} = data) do
to = Map.get(data, "to", [])
cc = Map.get(data, "cc", [])
bcc = Map.get(data, "bcc", [])
actor = Map.get(data, "actor", [])
recipients = [to, cc, bcc, [actor]] |> Enum.concat() |> Enum.uniq()
{recipients, to, cc}
end
defp get_recipients(data) do
to = Map.get(data, "to", [])
cc = Map.get(data, "cc", [])
bcc = Map.get(data, "bcc", [])
recipients = Enum.concat([to, cc, bcc])
{recipients, to, cc}
end
defp check_actor_is_active(nil), do: true
defp check_actor_is_active(actor) when is_binary(actor) do
case User.get_cached_by_ap_id(actor) do
%User{deactivated: deactivated} -> not deactivated
_ -> false
end
end
defp check_remote_limit(%{"object" => %{"content" => content}}) when not is_nil(content) do
limit = Config.get([:instance, :remote_limit])
String.length(content) <= limit
end
defp check_remote_limit(_), do: true
def increase_note_count_if_public(actor, object) do
if is_public?(object), do: User.increase_note_count(actor), else: {:ok, actor}
end
def decrease_note_count_if_public(actor, object) do
if is_public?(object), do: User.decrease_note_count(actor), else: {:ok, actor}
end
defp increase_replies_count_if_reply(%{
"object" => %{"inReplyTo" => reply_ap_id} = object,
"type" => "Create"
}) do
if is_public?(object) do
Object.increase_replies_count(reply_ap_id)
end
end
defp increase_replies_count_if_reply(_create_data), do: :noop
@object_types ~w[ChatMessage Question Answer Audio Video Event Article]
@spec persist(map(), keyword()) :: {:ok, Activity.t() | Object.t()}
def persist(%{"type" => type} = object, meta) when type in @object_types do
with {:ok, object} <- Object.create(object) do
{:ok, object, meta}
end
end
def persist(object, meta) do
with local <- Keyword.fetch!(meta, :local),
{recipients, _, _} <- get_recipients(object),
{:ok, activity} <-
Repo.insert(%Activity{
data: object,
local: local,
recipients: recipients,
actor: object["actor"]
}),
# TODO: add tests for expired activities, when Note type will be supported in new pipeline
{:ok, _} <- maybe_create_activity_expiration(activity) do
{:ok, activity, meta}
end
end
@spec insert(map(), boolean(), boolean(), boolean()) :: {:ok, Activity.t()} | {:error, any()}
def insert(map, local \\ true, fake \\ false, bypass_actor_check \\ false) when is_map(map) do
with nil <- Activity.normalize(map),
map <- lazy_put_activity_defaults(map, fake),
{_, true} <- {:actor_check, bypass_actor_check || check_actor_is_active(map["actor"])},
{_, true} <- {:remote_limit_pass, check_remote_limit(map)},
{:ok, map} <- MRF.filter(map),
{recipients, _, _} = get_recipients(map),
{:fake, false, map, recipients} <- {:fake, fake, map, recipients},
{:containment, :ok} <- {:containment, Containment.contain_child(map)},
{:ok, map, object} <- insert_full_object(map),
{:ok, activity} <- insert_activity_with_expiration(map, local, recipients) do
# Splice in the child object if we have one.
activity = Maps.put_if_present(activity, :object, object)
BackgroundWorker.enqueue("fetch_data_for_activity", %{"activity_id" => activity.id})
{:ok, activity}
else
%Activity{} = activity ->
{:ok, activity}
{:actor_check, _} ->
{:error, false}
{:containment, _} = error ->
error
{:error, _} = error ->
error
{:fake, true, map, recipients} ->
activity = %Activity{
data: map,
local: local,
actor: map["actor"],
recipients: recipients,
id: "pleroma:fakeid"
}
Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity)
{:ok, activity}
{:remote_limit_pass, _} ->
{:error, :remote_limit}
{:reject, _} = e ->
{:error, e}
end
end
defp insert_activity_with_expiration(data, local, recipients) do
struct = %Activity{
data: data,
local: local,
actor: data["actor"],
recipients: recipients
}
with {:ok, activity} <- Repo.insert(struct) do
maybe_create_activity_expiration(activity)
end
end
def notify_and_stream(activity) do
Notification.create_notifications(activity)
conversation = create_or_bump_conversation(activity, activity.actor)
participations = get_participations(conversation)
stream_out(activity)
stream_out_participations(participations)
end
defp maybe_create_activity_expiration(
%{data: %{"expires_at" => %DateTime{} = expires_at}} = activity
) do
with {:ok, _job} <-
Pleroma.Workers.PurgeExpiredActivity.enqueue(%{
activity_id: activity.id,
expires_at: expires_at
}) do
{:ok, activity}
end
end
defp maybe_create_activity_expiration(activity), do: {:ok, activity}
defp create_or_bump_conversation(activity, actor) do
with {:ok, conversation} <- Conversation.create_or_bump_for(activity),
%User{} = user <- User.get_cached_by_ap_id(actor) do
Participation.mark_as_read(user, conversation)
{:ok, conversation}
end
end
defp get_participations({:ok, conversation}) do
conversation
|> Repo.preload(:participations, force: true)
|> Map.get(:participations)
end
defp get_participations(_), do: []
def stream_out_participations(participations) do
participations =
participations
|> Repo.preload(:user)
Streamer.stream("participation", participations)
end
def stream_out_participations(%Object{data: %{"context" => context}}, user) do
with %Conversation{} = conversation <- Conversation.get_for_ap_id(context) do
conversation = Repo.preload(conversation, :participations)
last_activity_id =
fetch_latest_direct_activity_id_for_context(conversation.ap_id, %{
user: user,
blocking_user: user
})
if last_activity_id do
stream_out_participations(conversation.participations)
end
end
end
def stream_out_participations(_, _), do: :noop
def stream_out(%Activity{data: %{"type" => data_type}} = activity)
when data_type in ["Create", "Announce", "Delete"] do
activity
|> Topics.get_activity_topics()
|> Streamer.stream(activity)
end
def stream_out(_activity) do
:noop
end
@spec create(map(), boolean()) :: {:ok, Activity.t()} | {:error, any()}
def create(params, fake \\ false) do
with {:ok, result} <- Repo.transaction(fn -> do_create(params, fake) end) do
result
end
end
defp do_create(%{to: to, actor: actor, context: context, object: object} = params, fake) do
additional = params[:additional] || %{}
# only accept false as false value
local = !(params[:local] == false)
published = params[:published]
quick_insert? = Config.get([:env]) == :benchmark
create_data =
make_create_data(
%{to: to, actor: actor, published: published, context: context, object: object},
additional
)
with {:ok, activity} <- insert(create_data, local, fake),
{:fake, false, activity} <- {:fake, fake, activity},
_ <- increase_replies_count_if_reply(create_data),
{:quick_insert, false, activity} <- {:quick_insert, quick_insert?, activity},
{:ok, _actor} <- increase_note_count_if_public(actor, activity),
_ <- notify_and_stream(activity),
:ok <- maybe_federate(activity) do
{:ok, activity}
else
{:quick_insert, true, activity} ->
{:ok, activity}
{:fake, true, activity} ->
{:ok, activity}
{:error, message} ->
Repo.rollback(message)
end
end
@spec listen(map()) :: {:ok, Activity.t()} | {:error, any()}
def listen(%{to: to, actor: actor, context: context, object: object} = params) do
additional = params[:additional] || %{}
# only accept false as false value
local = !(params[:local] == false)
published = params[:published]
listen_data =
make_listen_data(
%{to: to, actor: actor, published: published, context: context, object: object},
additional
)
with {:ok, activity} <- insert(listen_data, local),
_ <- notify_and_stream(activity),
:ok <- maybe_federate(activity) do
{:ok, activity}
end
end
@spec unfollow(User.t(), User.t(), String.t() | nil, boolean()) ::
{:ok, Activity.t()} | nil | {:error, any()}
def unfollow(follower, followed, activity_id \\ nil, local \\ true) do
with {:ok, result} <-
Repo.transaction(fn -> do_unfollow(follower, followed, activity_id, local) end) do
result
end
end
defp do_unfollow(follower, followed, activity_id, local) do
with %Activity{} = follow_activity <- fetch_latest_follow(follower, followed),
{:ok, follow_activity} <- update_follow_state(follow_activity, "cancelled"),
unfollow_data <- make_unfollow_data(follower, followed, follow_activity, activity_id),
{:ok, activity} <- insert(unfollow_data, local),
_ <- notify_and_stream(activity),
:ok <- maybe_federate(activity) do
{:ok, activity}
else
nil -> nil
{:error, error} -> Repo.rollback(error)
end
end
@spec flag(map()) :: {:ok, Activity.t()} | {:error, any()}
def flag(
%{
actor: actor,
context: _context,
account: account,
statuses: statuses,
content: content
} = params
) do
# only accept false as false value
local = !(params[:local] == false)
forward = !(params[:forward] == false)
additional = params[:additional] || %{}
additional =
if forward do
Map.merge(additional, %{"to" => [], "cc" => [account.ap_id]})
else
Map.merge(additional, %{"to" => [], "cc" => []})
end
with flag_data <- make_flag_data(params, additional),
{:ok, activity} <- insert(flag_data, local),
{:ok, stripped_activity} <- strip_report_status_data(activity),
_ <- notify_and_stream(activity),
:ok <- maybe_federate(stripped_activity) do
User.all_superusers()
|> Enum.filter(fn user -> not is_nil(user.email) end)
|> Enum.each(fn superuser ->
superuser
|> Pleroma.Emails.AdminEmail.report(actor, account, statuses, content)
|> Pleroma.Emails.Mailer.deliver_async()
end)
{:ok, activity}
end
end
@spec move(User.t(), User.t(), boolean()) :: {:ok, Activity.t()} | {:error, any()}
def move(%User{} = origin, %User{} = target, local \\ true) do
params = %{
"type" => "Move",
"actor" => origin.ap_id,
"object" => origin.ap_id,
"target" => target.ap_id
}
with true <- origin.ap_id in target.also_known_as,
{:ok, activity} <- insert(params, local),
_ <- notify_and_stream(activity) do
maybe_federate(activity)
BackgroundWorker.enqueue("move_following", %{
"origin_id" => origin.id,
"target_id" => target.id
})
{:ok, activity}
else
false -> {:error, "Target account must have the origin in `alsoKnownAs`"}
err -> err
end
end
def fetch_activities_for_context_query(context, opts) do
public = [Constants.as_public()]
recipients =
if opts[:user],
do: [opts[:user].ap_id | User.following(opts[:user])] ++ public,
else: public
from(activity in Activity)
|> maybe_preload_objects(opts)
|> maybe_preload_bookmarks(opts)
|> maybe_set_thread_muted_field(opts)
|> restrict_blocked(opts)
|> restrict_recipients(recipients, opts[:user])
|> restrict_filtered(opts)
|> where(
[activity],
fragment(
"?->>'type' = ? and ?->>'context' = ?",
activity.data,
"Create",
activity.data,
^context
)
)
|> exclude_poll_votes(opts)
|> exclude_id(opts)
|> order_by([activity], desc: activity.id)
end
@spec fetch_activities_for_context(String.t(), keyword() | map()) :: [Activity.t()]
def fetch_activities_for_context(context, opts \\ %{}) do
context
|> fetch_activities_for_context_query(opts)
|> Repo.all()
end
@spec fetch_latest_direct_activity_id_for_context(String.t(), keyword() | map()) ::
FlakeId.Ecto.CompatType.t() | nil
def fetch_latest_direct_activity_id_for_context(context, opts \\ %{}) do
context
|> fetch_activities_for_context_query(Map.merge(%{skip_preload: true}, opts))
|> restrict_visibility(%{visibility: "direct"})
|> limit(1)
|> select([a], a.id)
|> Repo.one()
end
@spec fetch_public_or_unlisted_activities(map(), Pagination.type()) :: [Activity.t()]
def fetch_public_or_unlisted_activities(opts \\ %{}, pagination \\ :keyset) do
opts = Map.delete(opts, :user)
[Constants.as_public()]
|> fetch_activities_query(opts)
|> restrict_unlisted(opts)
|> Pagination.fetch_paginated(opts, pagination)
end
@spec fetch_public_activities(map(), Pagination.type()) :: [Activity.t()]
def fetch_public_activities(opts \\ %{}, pagination \\ :keyset) do
opts
|> Map.put(:restrict_unlisted, true)
|> fetch_public_or_unlisted_activities(pagination)
end
@valid_visibilities ~w[direct unlisted public private]
defp restrict_visibility(query, %{visibility: visibility})
when is_list(visibility) do
if Enum.all?(visibility, &(&1 in @valid_visibilities)) do
from(
a in query,
where:
fragment(
"activity_visibility(?, ?, ?) = ANY (?)",
a.actor,
a.recipients,
a.data,
^visibility
)
)
else
Logger.error("Could not restrict visibility to #{visibility}")
end
end
defp restrict_visibility(query, %{visibility: visibility})
when visibility in @valid_visibilities do
from(
a in query,
where:
fragment("activity_visibility(?, ?, ?) = ?", a.actor, a.recipients, a.data, ^visibility)
)
end
defp restrict_visibility(_query, %{visibility: visibility})
when visibility not in @valid_visibilities do
Logger.error("Could not restrict visibility to #{visibility}")
end
defp restrict_visibility(query, _visibility), do: query
defp exclude_visibility(query, %{exclude_visibilities: visibility})
when is_list(visibility) do
if Enum.all?(visibility, &(&1 in @valid_visibilities)) do
from(
a in query,
where:
not fragment(
"activity_visibility(?, ?, ?) = ANY (?)",
a.actor,
a.recipients,
a.data,
^visibility
)
)
else
Logger.error("Could not exclude visibility to #{visibility}")
query
end
end
defp exclude_visibility(query, %{exclude_visibilities: visibility})
when visibility in @valid_visibilities do
from(
a in query,
where:
not fragment(
"activity_visibility(?, ?, ?) = ?",
a.actor,
a.recipients,
a.data,
^visibility
)
)
end
defp exclude_visibility(query, %{exclude_visibilities: visibility})
when visibility not in [nil | @valid_visibilities] do
Logger.error("Could not exclude visibility to #{visibility}")
query
end
defp exclude_visibility(query, _visibility), do: query
defp restrict_thread_visibility(query, _, %{skip_thread_containment: true} = _),
do: query
defp restrict_thread_visibility(query, %{user: %User{skip_thread_containment: true}}, _),
do: query
defp restrict_thread_visibility(query, %{user: %User{ap_id: ap_id}}, _) do
from(
a in query,
where: fragment("thread_visibility(?, (?)->>'id') = true", ^ap_id, a.data)
)
end
defp restrict_thread_visibility(query, _, _), do: query
def fetch_user_abstract_activities(user, reading_user, params \\ %{}) do
params =
params
|> Map.put(:user, reading_user)
|> Map.put(:actor_id, user.ap_id)
%{
godmode: params[:godmode],
reading_user: reading_user
}
|> user_activities_recipients()
|> fetch_activities(params)
|> Enum.reverse()
end
def fetch_user_activities(user, reading_user, params \\ %{}) do
params =
params
|> Map.put(:type, ["Create", "Announce"])
|> Map.put(:user, reading_user)
|> Map.put(:actor_id, user.ap_id)
|> Map.put(:pinned_activity_ids, user.pinned_activities)
params =
if User.blocks?(reading_user, user) do
params
else
params
|> Map.put(:blocking_user, reading_user)
|> Map.put(:muting_user, reading_user)
end
%{
godmode: params[:godmode],
reading_user: reading_user
}
|> user_activities_recipients()
|> fetch_activities(params)
|> Enum.reverse()
end
def fetch_statuses(reading_user, params) do
params = Map.put(params, :type, ["Create", "Announce"])
%{
godmode: params[:godmode],
reading_user: reading_user
}
|> user_activities_recipients()
|> fetch_activities(params, :offset)
|> Enum.reverse()
end
defp user_activities_recipients(%{godmode: true}), do: []
defp user_activities_recipients(%{reading_user: reading_user}) do
if reading_user do
[Constants.as_public(), reading_user.ap_id | User.following(reading_user)]
else
[Constants.as_public()]
end
end
defp restrict_announce_object_actor(_query, %{announce_filtering_user: _, skip_preload: true}) do
raise "Can't use the child object without preloading!"
end
defp restrict_announce_object_actor(query, %{announce_filtering_user: %{ap_id: actor}}) do
from(
[activity, object] in query,
where:
fragment(
"?->>'type' != ? or ?->>'actor' != ?",
activity.data,
"Announce",
object.data,
^actor
)
)
end
defp restrict_announce_object_actor(query, _), do: query
defp restrict_since(query, %{since_id: ""}), do: query
defp restrict_since(query, %{since_id: since_id}) do
from(activity in query, where: activity.id > ^since_id)
end
defp restrict_since(query, _), do: query
defp restrict_tag_reject(_query, %{tag_reject: _tag_reject, skip_preload: true}) do
raise "Can't use the child object without preloading!"
end
defp restrict_tag_reject(query, %{tag_reject: [_ | _] = tag_reject}) do
from(
[_activity, object] in query,
where: fragment("not (?)->'tag' \\?| (?)", object.data, ^tag_reject)
)
end
defp restrict_tag_reject(query, _), do: query
defp restrict_tag_all(_query, %{tag_all: _tag_all, skip_preload: true}) do
raise "Can't use the child object without preloading!"
end
defp restrict_tag_all(query, %{tag_all: [_ | _] = tag_all}) do
from(
[_activity, object] in query,
where: fragment("(?)->'tag' \\?& (?)", object.data, ^tag_all)
)
end
defp restrict_tag_all(query, _), do: query
defp restrict_tag(_query, %{tag: _tag, skip_preload: true}) do
raise "Can't use the child object without preloading!"
end
defp restrict_tag(query, %{tag: tag}) when is_list(tag) do
from(
[_activity, object] in query,
where: fragment("(?)->'tag' \\?| (?)", object.data, ^tag)
)
end
defp restrict_tag(query, %{tag: tag}) when is_binary(tag) do
from(
[_activity, object] in query,
where: fragment("(?)->'tag' \\? (?)", object.data, ^tag)
)
end
defp restrict_tag(query, _), do: query
defp restrict_recipients(query, [], _user), do: query
defp restrict_recipients(query, recipients, nil) do
from(activity in query, where: fragment("? && ?", ^recipients, activity.recipients))
end
defp restrict_recipients(query, recipients, user) do
from(
activity in query,
where: fragment("? && ?", ^recipients, activity.recipients),
or_where: activity.actor == ^user.ap_id
)
end
defp restrict_local(query, %{local_only: true}) do
from(activity in query, where: activity.local == true)
end
defp restrict_local(query, _), do: query
defp restrict_actor(query, %{actor_id: actor_id}) do
from(activity in query, where: activity.actor == ^actor_id)
end
defp restrict_actor(query, _), do: query
defp restrict_type(query, %{type: type}) when is_binary(type) do
from(activity in query, where: fragment("?->>'type' = ?", activity.data, ^type))
end
defp restrict_type(query, %{type: type}) do
from(activity in query, where: fragment("?->>'type' = ANY(?)", activity.data, ^type))
end
defp restrict_type(query, _), do: query
defp restrict_state(query, %{state: state}) do
from(activity in query, where: fragment("?->>'state' = ?", activity.data, ^state))
end
defp restrict_state(query, _), do: query
defp restrict_favorited_by(query, %{favorited_by: ap_id}) do
from(
[_activity, object] in query,
where: fragment("(?)->'likes' \\? (?)", object.data, ^ap_id)
)
end
defp restrict_favorited_by(query, _), do: query
defp restrict_media(_query, %{only_media: _val, skip_preload: true}) do
raise "Can't use the child object without preloading!"
end
defp restrict_media(query, %{only_media: true}) do
from(
[activity, object] in query,
where: fragment("(?)->>'type' = ?", activity.data, "Create"),
where: fragment("not (?)->'attachment' = (?)", object.data, ^[])
)
end
defp restrict_media(query, _), do: query
defp restrict_replies(query, %{exclude_replies: true}) do
from(
[_activity, object] in query,
where: fragment("?->>'inReplyTo' is null", object.data)
)
end
defp restrict_replies(query, %{
reply_filtering_user: %User{} = user,
reply_visibility: "self"
}) do
from(
[activity, object] in query,
where:
fragment(
"?->>'inReplyTo' is null OR ? = ANY(?)",
object.data,
^user.ap_id,
activity.recipients
)
)
end
defp restrict_replies(query, %{
reply_filtering_user: %User{} = user,
reply_visibility: "following"
}) do
from(
[activity, object] in query,
where:
fragment(
"""
?->>'type' != 'Create' -- This isn't a Create
OR ?->>'inReplyTo' is null -- this isn't a reply
OR ? && array_remove(?, ?) -- The recipient is us or one of our friends,
-- unless they are the author (because authors
-- are also part of the recipients). This leads
-- to a bug that self-replies by friends won't
-- show up.
OR ? = ? -- The actor is us
""",
activity.data,
object.data,
^[user.ap_id | User.get_cached_user_friends_ap_ids(user)],
activity.recipients,
activity.actor,
activity.actor,
^user.ap_id
)
)
end
defp restrict_replies(query, _), do: query
defp restrict_reblogs(query, %{exclude_reblogs: true}) do
from(activity in query, where: fragment("?->>'type' != 'Announce'", activity.data))
end
defp restrict_reblogs(query, _), do: query
defp restrict_muted(query, %{with_muted: true}), do: query
defp restrict_muted(query, %{muting_user: %User{} = user} = opts) do
mutes = opts[:muted_users_ap_ids] || User.muted_users_ap_ids(user)
query =
from([activity] in query,
where: fragment("not (? = ANY(?))", activity.actor, ^mutes),
where:
fragment(
"not (?->'to' \\?| ?) or ? = ?",
activity.data,
^mutes,
activity.actor,
^user.ap_id
)
)
unless opts[:skip_preload] do
from([thread_mute: tm] in query, where: is_nil(tm.user_id))
else
query
end
end
defp restrict_muted(query, _), do: query
defp restrict_blocked(query, %{blocking_user: %User{} = user} = opts) do
blocked_ap_ids = opts[:blocked_users_ap_ids] || User.blocked_users_ap_ids(user)
domain_blocks = user.domain_blocks || []
following_ap_ids = User.get_friends_ap_ids(user)
query =
if has_named_binding?(query, :object), do: query, else: Activity.with_joined_object(query)
from(
[activity, object: o] in query,
where: fragment("not (? = ANY(?))", activity.actor, ^blocked_ap_ids),
where:
fragment(
"((not (? && ?)) or ? = ?)",
activity.recipients,
^blocked_ap_ids,
activity.actor,
^user.ap_id
),
where:
fragment(
"recipients_contain_blocked_domains(?, ?) = false",
activity.recipients,
^domain_blocks
),
where:
fragment(
"not (?->>'type' = 'Announce' and ?->'to' \\?| ?)",
activity.data,
activity.data,
^blocked_ap_ids
),
where:
fragment(
"(not (split_part(?, '/', 3) = ANY(?))) or ? = ANY(?)",
activity.actor,
^domain_blocks,
activity.actor,
^following_ap_ids
),
where:
fragment(
"(not (split_part(?->>'actor', '/', 3) = ANY(?))) or (?->>'actor') = ANY(?)",
o.data,
^domain_blocks,
o.data,
^following_ap_ids
)
)
end
defp restrict_blocked(query, _), do: query
defp restrict_unlisted(query, %{restrict_unlisted: true}) do
from(
activity in query,
where:
fragment(
"not (coalesce(?->'cc', '{}'::jsonb) \\?| ?)",
activity.data,
^[Constants.as_public()]
)
)
end
defp restrict_unlisted(query, _), do: query
defp restrict_pinned(query, %{pinned: true, pinned_activity_ids: ids}) do
from(activity in query, where: activity.id in ^ids)
end
defp restrict_pinned(query, _), do: query
defp restrict_muted_reblogs(query, %{muting_user: %User{} = user} = opts) do
muted_reblogs = opts[:reblog_muted_users_ap_ids] || User.reblog_muted_users_ap_ids(user)
from(
activity in query,
where:
fragment(
"not ( ?->>'type' = 'Announce' and ? = ANY(?))",
activity.data,
activity.actor,
^muted_reblogs
)
)
end
defp restrict_muted_reblogs(query, _), do: query
defp restrict_instance(query, %{instance: instance}) when is_binary(instance) do
from(
activity in query,
where: fragment("split_part(actor::text, '/'::text, 3) = ?", ^instance)
)
end
defp restrict_instance(query, _), do: query
defp restrict_filtered(query, %{user: %User{} = user}) do
case Filter.compose_regex(user) do
nil ->
query
regex ->
from([activity, object] in query,
where:
fragment("not(?->>'content' ~* ?)", object.data, ^regex) or
activity.actor == ^user.ap_id
)
end
end
defp restrict_filtered(query, %{blocking_user: %User{} = user}) do
restrict_filtered(query, %{user: user})
end
defp restrict_filtered(query, _), do: query
defp exclude_poll_votes(query, %{include_poll_votes: true}), do: query
defp exclude_poll_votes(query, _) do
if has_named_binding?(query, :object) do
from([activity, object: o] in query,
where: fragment("not(?->>'type' = ?)", o.data, "Answer")
)
else
query
end
end
defp exclude_chat_messages(query, %{include_chat_messages: true}), do: query
defp exclude_chat_messages(query, _) do
if has_named_binding?(query, :object) do
from([activity, object: o] in query,
where: fragment("not(?->>'type' = ?)", o.data, "ChatMessage")
)
else
query
end
end
defp exclude_invisible_actors(query, %{invisible_actors: true}), do: query
defp exclude_invisible_actors(query, _opts) do
invisible_ap_ids =
User.Query.build(%{invisible: true, select: [:ap_id]})
|> Repo.all()
|> Enum.map(fn %{ap_id: ap_id} -> ap_id end)
from([activity] in query, where: activity.actor not in ^invisible_ap_ids)
end
defp exclude_id(query, %{exclude_id: id}) when is_binary(id) do
from(activity in query, where: activity.id != ^id)
end
defp exclude_id(query, _), do: query
defp maybe_preload_objects(query, %{skip_preload: true}), do: query
defp maybe_preload_objects(query, _) do
query
|> Activity.with_preloaded_object()
end
defp maybe_preload_bookmarks(query, %{skip_preload: true}), do: query
defp maybe_preload_bookmarks(query, opts) do
query
|> Activity.with_preloaded_bookmark(opts[:user])
end
defp maybe_preload_report_notes(query, %{preload_report_notes: true}) do
query
|> Activity.with_preloaded_report_notes()
end
defp maybe_preload_report_notes(query, _), do: query
defp maybe_set_thread_muted_field(query, %{skip_preload: true}), do: query
defp maybe_set_thread_muted_field(query, opts) do
query
|> Activity.with_set_thread_muted_field(opts[:muting_user] || opts[:user])
end
defp maybe_order(query, %{order: :desc}) do
query
|> order_by(desc: :id)
end
defp maybe_order(query, %{order: :asc}) do
query
|> order_by(asc: :id)
end
defp maybe_order(query, _), do: query
defp fetch_activities_query_ap_ids_ops(opts) do
source_user = opts[:muting_user]
ap_id_relationships = if source_user, do: [:mute, :reblog_mute], else: []
ap_id_relationships =
if opts[:blocking_user] && opts[:blocking_user] == source_user do
[:block | ap_id_relationships]
else
ap_id_relationships
end
preloaded_ap_ids = User.outgoing_relationships_ap_ids(source_user, ap_id_relationships)
restrict_blocked_opts = Map.merge(%{blocked_users_ap_ids: preloaded_ap_ids[:block]}, opts)
restrict_muted_opts = Map.merge(%{muted_users_ap_ids: preloaded_ap_ids[:mute]}, opts)
restrict_muted_reblogs_opts =
Map.merge(%{reblog_muted_users_ap_ids: preloaded_ap_ids[:reblog_mute]}, opts)
{restrict_blocked_opts, restrict_muted_opts, restrict_muted_reblogs_opts}
end
def fetch_activities_query(recipients, opts \\ %{}) do
{restrict_blocked_opts, restrict_muted_opts, restrict_muted_reblogs_opts} =
fetch_activities_query_ap_ids_ops(opts)
config = %{
skip_thread_containment: Config.get([:instance, :skip_thread_containment])
}
Activity
|> maybe_preload_objects(opts)
|> maybe_preload_bookmarks(opts)
|> maybe_preload_report_notes(opts)
|> maybe_set_thread_muted_field(opts)
|> maybe_order(opts)
|> restrict_recipients(recipients, opts[:user])
|> restrict_replies(opts)
|> restrict_tag(opts)
|> restrict_tag_reject(opts)
|> restrict_tag_all(opts)
|> restrict_since(opts)
|> restrict_local(opts)
|> restrict_actor(opts)
|> restrict_type(opts)
|> restrict_state(opts)
|> restrict_favorited_by(opts)
|> restrict_blocked(restrict_blocked_opts)
|> restrict_muted(restrict_muted_opts)
|> restrict_filtered(opts)
|> restrict_media(opts)
|> restrict_visibility(opts)
|> restrict_thread_visibility(opts, config)
|> restrict_reblogs(opts)
|> restrict_pinned(opts)
|> restrict_muted_reblogs(restrict_muted_reblogs_opts)
|> restrict_instance(opts)
|> restrict_announce_object_actor(opts)
|> restrict_filtered(opts)
|> Activity.restrict_deactivated_users()
|> exclude_poll_votes(opts)
|> exclude_chat_messages(opts)
|> exclude_invisible_actors(opts)
|> exclude_visibility(opts)
end
def fetch_activities(recipients, opts \\ %{}, pagination \\ :keyset) do
list_memberships = Pleroma.List.memberships(opts[:user])
fetch_activities_query(recipients ++ list_memberships, opts)
|> Pagination.fetch_paginated(opts, pagination)
|> Enum.reverse()
|> maybe_update_cc(list_memberships, opts[:user])
end
@doc """
Fetch favorites activities of user with order by sort adds to favorites
"""
@spec fetch_favourites(User.t(), map(), Pagination.type()) :: list(Activity.t())
def fetch_favourites(user, params \\ %{}, pagination \\ :keyset) do
user.ap_id
|> Activity.Queries.by_actor()
|> Activity.Queries.by_type("Like")
|> Activity.with_joined_object()
|> Object.with_joined_activity()
|> select([like, object, activity], %{activity | object: object, pagination_id: like.id})
|> order_by([like, _, _], desc_nulls_last: like.id)
|> Pagination.fetch_paginated(
Map.merge(params, %{skip_order: true}),
pagination
)
end
defp maybe_update_cc(activities, [_ | _] = list_memberships, %User{ap_id: user_ap_id}) do
Enum.map(activities, fn
%{data: %{"bcc" => [_ | _] = bcc}} = activity ->
if Enum.any?(bcc, &(&1 in list_memberships)) do
update_in(activity.data["cc"], &[user_ap_id | &1])
else
activity
end
activity ->
activity
end)
end
defp maybe_update_cc(activities, _, _), do: activities
defp fetch_activities_bounded_query(query, recipients, recipients_with_public) do
from(activity in query,
where:
fragment("? && ?", activity.recipients, ^recipients) or
(fragment("? && ?", activity.recipients, ^recipients_with_public) and
^Constants.as_public() in activity.recipients)
)
end
def fetch_activities_bounded(
recipients,
recipients_with_public,
opts \\ %{},
pagination \\ :keyset
) do
fetch_activities_query([], opts)
|> fetch_activities_bounded_query(recipients, recipients_with_public)
|> Pagination.fetch_paginated(opts, pagination)
|> Enum.reverse()
end
@spec upload(Upload.source(), keyword()) :: {:ok, Object.t()} | {:error, any()}
def upload(file, opts \\ []) do
with {:ok, data} <- Upload.store(file, opts) do
obj_data = Maps.put_if_present(data, "actor", opts[:actor])
Repo.insert(%Object{data: obj_data})
end
end
@spec get_actor_url(any()) :: binary() | nil
defp get_actor_url(url) when is_binary(url), do: url
defp get_actor_url(%{"href" => href}) when is_binary(href), do: href
defp get_actor_url(url) when is_list(url) do
url
|> List.first()
|> get_actor_url()
end
defp get_actor_url(_url), do: nil
defp object_to_user_data(data) do
avatar =
data["icon"]["url"] &&
%{
"type" => "Image",
"url" => [%{"href" => data["icon"]["url"]}]
}
banner =
data["image"]["url"] &&
%{
"type" => "Image",
"url" => [%{"href" => data["image"]["url"]}]
}
fields =
data
|> Map.get("attachment", [])
|> Enum.filter(fn %{"type" => t} -> t == "PropertyValue" end)
|> Enum.map(fn fields -> Map.take(fields, ["name", "value"]) end)
emojis =
data
|> Map.get("tag", [])
|> Enum.filter(fn
%{"type" => "Emoji"} -> true
_ -> false
end)
|> Map.new(fn %{"icon" => %{"url" => url}, "name" => name} ->
{String.trim(name, ":"), url}
end)
is_locked = data["manuallyApprovesFollowers"] || false
capabilities = data["capabilities"] || %{}
accepts_chat_messages = capabilities["acceptsChatMessages"]
data = Transmogrifier.maybe_fix_user_object(data)
is_discoverable = data["discoverable"] || false
invisible = data["invisible"] || false
actor_type = data["type"] || "Person"
public_key =
if is_map(data["publicKey"]) && is_binary(data["publicKey"]["publicKeyPem"]) do
data["publicKey"]["publicKeyPem"]
else
nil
end
shared_inbox =
if is_map(data["endpoints"]) && is_binary(data["endpoints"]["sharedInbox"]) do
data["endpoints"]["sharedInbox"]
else
nil
end
user_data = %{
ap_id: data["id"],
uri: get_actor_url(data["url"]),
ap_enabled: true,
banner: banner,
fields: fields,
emoji: emojis,
is_locked: is_locked,
is_discoverable: is_discoverable,
invisible: invisible,
avatar: avatar,
name: data["name"],
follower_address: data["followers"],
following_address: data["following"],
bio: data["summary"] || "",
actor_type: actor_type,
also_known_as: Map.get(data, "alsoKnownAs", []),
public_key: public_key,
inbox: data["inbox"],
shared_inbox: shared_inbox,
accepts_chat_messages: accepts_chat_messages
}
# nickname can be nil because of virtual actors
if data["preferredUsername"] do
Map.put(
user_data,
:nickname,
"#{data["preferredUsername"]}@#{URI.parse(data["id"]).host}"
)
else
Map.put(user_data, :nickname, nil)
end
end
def fetch_follow_information_for_user(user) do
with {:ok, following_data} <-
- Fetcher.fetch_and_contain_remote_object_from_id(user.following_address,
- force_http: true
- ),
+ Fetcher.fetch_and_contain_remote_object_from_id(user.following_address),
{:ok, hide_follows} <- collection_private(following_data),
{:ok, followers_data} <-
- Fetcher.fetch_and_contain_remote_object_from_id(user.follower_address, force_http: true),
+ Fetcher.fetch_and_contain_remote_object_from_id(user.follower_address),
{:ok, hide_followers} <- collection_private(followers_data) do
{:ok,
%{
hide_follows: hide_follows,
follower_count: normalize_counter(followers_data["totalItems"]),
following_count: normalize_counter(following_data["totalItems"]),
hide_followers: hide_followers
}}
else
{:error, _} = e -> e
e -> {:error, e}
end
end
defp normalize_counter(counter) when is_integer(counter), do: counter
defp normalize_counter(_), do: 0
def maybe_update_follow_information(user_data) do
with {:enabled, true} <- {:enabled, Config.get([:instance, :external_user_synchronization])},
{_, true} <- {:user_type_check, user_data[:type] in ["Person", "Service"]},
{_, true} <-
{:collections_available,
!!(user_data[:following_address] && user_data[:follower_address])},
{:ok, info} <-
fetch_follow_information_for_user(user_data) do
info = Map.merge(user_data[:info] || %{}, info)
user_data
|> Map.put(:info, info)
else
{:user_type_check, false} ->
user_data
{:collections_available, false} ->
user_data
{:enabled, false} ->
user_data
e ->
Logger.error(
"Follower/Following counter update for #{user_data.ap_id} failed.\n" <> inspect(e)
)
user_data
end
end
defp collection_private(%{"first" => %{"type" => type}})
when type in ["CollectionPage", "OrderedCollectionPage"],
do: {:ok, false}
defp collection_private(%{"first" => first}) do
with {:ok, %{"type" => type}} when type in ["CollectionPage", "OrderedCollectionPage"] <-
Fetcher.fetch_and_contain_remote_object_from_id(first) do
{:ok, false}
else
{:error, {:ok, %{status: code}}} when code in [401, 403] -> {:ok, true}
{:error, _} = e -> e
e -> {:error, e}
end
end
defp collection_private(_data), do: {:ok, true}
def user_data_from_user_object(data) do
with {:ok, data} <- MRF.filter(data) do
{:ok, object_to_user_data(data)}
else
e -> {:error, e}
end
end
- def fetch_and_prepare_user_from_ap_id(ap_id, opts \\ []) do
- with {:ok, data} <- Fetcher.fetch_and_contain_remote_object_from_id(ap_id, opts),
+ def fetch_and_prepare_user_from_ap_id(ap_id) do
+ with {:ok, data} <- Fetcher.fetch_and_contain_remote_object_from_id(ap_id),
{:ok, data} <- user_data_from_user_object(data) do
{:ok, maybe_update_follow_information(data)}
else
# If this has been deleted, only log a debug and not an error
{:error, "Object has been deleted" = e} ->
Logger.debug("Could not decode user at fetch #{ap_id}, #{inspect(e)}")
{:error, e}
{:error, {:reject, reason} = e} ->
Logger.info("Rejected user #{ap_id}: #{inspect(reason)}")
{:error, e}
{:error, e} ->
Logger.error("Could not decode user at fetch #{ap_id}, #{inspect(e)}")
{:error, e}
end
end
def maybe_handle_clashing_nickname(data) do
with nickname when is_binary(nickname) <- data[:nickname],
%User{} = old_user <- User.get_by_nickname(nickname),
{_, false} <- {:ap_id_comparison, data[:ap_id] == old_user.ap_id} do
Logger.info(
"Found an old user for #{nickname}, the old ap id is #{old_user.ap_id}, new one is #{
data[:ap_id]
}, renaming."
)
old_user
|> User.remote_user_changeset(%{nickname: "#{old_user.id}.#{old_user.nickname}"})
|> User.update_and_set_cache()
else
{:ap_id_comparison, true} ->
Logger.info(
"Found an old user for #{data[:nickname]}, but the ap id #{data[:ap_id]} is the same as the new user. Race condition? Not changing anything."
)
_ ->
nil
end
end
- def make_user_from_ap_id(ap_id, opts \\ []) do
+ def make_user_from_ap_id(ap_id) do
user = User.get_cached_by_ap_id(ap_id)
if user && !User.ap_enabled?(user) do
Transmogrifier.upgrade_user_from_ap_id(ap_id)
else
- with {:ok, data} <- fetch_and_prepare_user_from_ap_id(ap_id, opts) do
+ with {:ok, data} <- fetch_and_prepare_user_from_ap_id(ap_id) do
if user do
user
|> User.remote_user_changeset(data)
|> User.update_and_set_cache()
else
maybe_handle_clashing_nickname(data)
data
|> User.remote_user_changeset()
|> Repo.insert()
|> User.set_cache()
end
end
end
end
def make_user_from_nickname(nickname) do
with {:ok, %{"ap_id" => ap_id}} when not is_nil(ap_id) <- WebFinger.finger(nickname) do
make_user_from_ap_id(ap_id)
else
_e -> {:error, "No AP id in WebFinger"}
end
end
# filter out broken threads
defp contain_broken_threads(%Activity{} = activity, %User{} = user) do
entire_thread_visible_for_user?(activity, user)
end
# do post-processing on a specific activity
def contain_activity(%Activity{} = activity, %User{} = user) do
contain_broken_threads(activity, user)
end
def fetch_direct_messages_query do
Activity
|> restrict_type(%{type: "Create"})
|> restrict_visibility(%{visibility: "direct"})
|> order_by([activity], asc: activity.id)
end
end
diff --git a/lib/pleroma/web/activity_pub/publisher.ex b/lib/pleroma/web/activity_pub/publisher.ex
index a2930c1cd..5ab3562bf 100644
--- a/lib/pleroma/web/activity_pub/publisher.ex
+++ b/lib/pleroma/web/activity_pub/publisher.ex
@@ -1,295 +1,281 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.Publisher do
alias Pleroma.Activity
alias Pleroma.Config
alias Pleroma.Delivery
alias Pleroma.HTTP
alias Pleroma.Instances
alias Pleroma.Object
alias Pleroma.Repo
alias Pleroma.User
alias Pleroma.Web.ActivityPub.Relay
alias Pleroma.Web.ActivityPub.Transmogrifier
- alias Pleroma.Web.FedSockets
require Pleroma.Constants
import Pleroma.Web.ActivityPub.Visibility
@behaviour Pleroma.Web.Federator.Publisher
require Logger
@moduledoc """
ActivityPub outgoing federation module.
"""
@doc """
Determine if an activity can be represented by running it through Transmogrifier.
"""
def is_representable?(%Activity{} = activity) do
with {:ok, _data} <- Transmogrifier.prepare_outgoing(activity.data) do
true
else
_e ->
false
end
end
@doc """
Publish a single message to a peer. Takes a struct with the following
parameters set:
* `inbox`: the inbox to publish to
* `json`: the JSON message body representing the ActivityPub message
* `actor`: the actor which is signing the message
* `id`: the ActivityStreams URI of the message
"""
def publish_one(%{inbox: inbox, json: json, actor: %User{} = actor, id: id} = params) do
Logger.debug("Federating #{id} to #{inbox}")
-
- case FedSockets.get_or_create_fed_socket(inbox) do
- {:ok, fedsocket} ->
- Logger.debug("publishing via fedsockets - #{inspect(inbox)}")
- FedSockets.publish(fedsocket, json)
-
- _ ->
- Logger.debug("publishing via http - #{inspect(inbox)}")
- http_publish(inbox, actor, json, params)
- end
- end
-
- def publish_one(%{actor_id: actor_id} = params) do
- actor = User.get_cached_by_id(actor_id)
-
- params
- |> Map.delete(:actor_id)
- |> Map.put(:actor, actor)
- |> publish_one()
- end
-
- defp http_publish(inbox, actor, json, params) do
uri = %{path: path} = URI.parse(inbox)
digest = "SHA-256=" <> (:crypto.hash(:sha256, json) |> Base.encode64())
date = Pleroma.Signature.signed_date()
signature =
Pleroma.Signature.sign(actor, %{
"(request-target)": "post #{path}",
host: signature_host(uri),
"content-length": byte_size(json),
digest: digest,
date: date
})
with {:ok, %{status: code}} when code in 200..299 <-
result =
HTTP.post(
inbox,
json,
[
{"Content-Type", "application/activity+json"},
{"Date", date},
{"signature", signature},
{"digest", digest}
]
) do
if not Map.has_key?(params, :unreachable_since) || params[:unreachable_since] do
Instances.set_reachable(inbox)
end
result
else
{_post_result, response} ->
unless params[:unreachable_since], do: Instances.set_unreachable(inbox)
{:error, response}
end
end
+ def publish_one(%{actor_id: actor_id} = params) do
+ actor = User.get_cached_by_id(actor_id)
+
+ params
+ |> Map.delete(:actor_id)
+ |> Map.put(:actor, actor)
+ |> publish_one()
+ end
+
defp signature_host(%URI{port: port, scheme: scheme, host: host}) do
if port == URI.default_port(scheme) do
host
else
"#{host}:#{port}"
end
end
defp should_federate?(inbox, public) do
if public do
true
else
%{host: host} = URI.parse(inbox)
quarantined_instances =
Config.get([:instance, :quarantined_instances], [])
|> Pleroma.Web.ActivityPub.MRF.subdomains_regex()
!Pleroma.Web.ActivityPub.MRF.subdomain_match?(quarantined_instances, host)
end
end
@spec recipients(User.t(), Activity.t()) :: list(User.t()) | []
defp recipients(actor, activity) do
followers =
if actor.follower_address in activity.recipients do
User.get_external_followers(actor)
else
[]
end
fetchers =
with %Activity{data: %{"type" => "Delete"}} <- activity,
%Object{id: object_id} <- Object.normalize(activity),
fetchers <- User.get_delivered_users_by_object_id(object_id),
_ <- Delivery.delete_all_by_object_id(object_id) do
fetchers
else
_ ->
[]
end
Pleroma.Web.Federator.Publisher.remote_users(actor, activity) ++ followers ++ fetchers
end
defp get_cc_ap_ids(ap_id, recipients) do
host = Map.get(URI.parse(ap_id), :host)
recipients
|> Enum.filter(fn %User{ap_id: ap_id} -> Map.get(URI.parse(ap_id), :host) == host end)
|> Enum.map(& &1.ap_id)
end
defp maybe_use_sharedinbox(%User{shared_inbox: nil, inbox: inbox}), do: inbox
defp maybe_use_sharedinbox(%User{shared_inbox: shared_inbox}), do: shared_inbox
@doc """
Determine a user inbox to use based on heuristics. These heuristics
are based on an approximation of the ``sharedInbox`` rules in the
[ActivityPub specification][ap-sharedinbox].
Please do not edit this function (or its children) without reading
the spec, as editing the code is likely to introduce some breakage
without some familiarity.
[ap-sharedinbox]: https://www.w3.org/TR/activitypub/#shared-inbox-delivery
"""
def determine_inbox(
%Activity{data: activity_data},
%User{inbox: inbox} = user
) do
to = activity_data["to"] || []
cc = activity_data["cc"] || []
type = activity_data["type"]
cond do
type == "Delete" ->
maybe_use_sharedinbox(user)
Pleroma.Constants.as_public() in to || Pleroma.Constants.as_public() in cc ->
maybe_use_sharedinbox(user)
length(to) + length(cc) > 1 ->
maybe_use_sharedinbox(user)
true ->
inbox
end
end
@doc """
Publishes an activity with BCC to all relevant peers.
"""
def publish(%User{} = actor, %{data: %{"bcc" => bcc}} = activity)
when is_list(bcc) and bcc != [] do
public = is_public?(activity)
{:ok, data} = Transmogrifier.prepare_outgoing(activity.data)
recipients = recipients(actor, activity)
inboxes =
recipients
|> Enum.filter(&User.ap_enabled?/1)
|> Enum.map(fn actor -> actor.inbox end)
|> Enum.filter(fn inbox -> should_federate?(inbox, public) end)
|> Instances.filter_reachable()
Repo.checkout(fn ->
Enum.each(inboxes, fn {inbox, unreachable_since} ->
%User{ap_id: ap_id} = Enum.find(recipients, fn actor -> actor.inbox == inbox end)
# Get all the recipients on the same host and add them to cc. Otherwise, a remote
# instance would only accept a first message for the first recipient and ignore the rest.
cc = get_cc_ap_ids(ap_id, recipients)
json =
data
|> Map.put("cc", cc)
|> Jason.encode!()
Pleroma.Web.Federator.Publisher.enqueue_one(__MODULE__, %{
inbox: inbox,
json: json,
actor_id: actor.id,
id: activity.data["id"],
unreachable_since: unreachable_since
})
end)
end)
end
# Publishes an activity to all relevant peers.
def publish(%User{} = actor, %Activity{} = activity) do
public = is_public?(activity)
if public && Config.get([:instance, :allow_relay]) do
Logger.debug(fn -> "Relaying #{activity.data["id"]} out" end)
Relay.publish(activity)
end
{:ok, data} = Transmogrifier.prepare_outgoing(activity.data)
json = Jason.encode!(data)
recipients(actor, activity)
|> Enum.filter(fn user -> User.ap_enabled?(user) end)
|> Enum.map(fn %User{} = user ->
determine_inbox(activity, user)
end)
|> Enum.uniq()
|> Enum.filter(fn inbox -> should_federate?(inbox, public) end)
|> Instances.filter_reachable()
|> Enum.each(fn {inbox, unreachable_since} ->
Pleroma.Web.Federator.Publisher.enqueue_one(
__MODULE__,
%{
inbox: inbox,
json: json,
actor_id: actor.id,
id: activity.data["id"],
unreachable_since: unreachable_since
}
)
end)
end
def gather_webfinger_links(%User{} = user) do
[
%{"rel" => "self", "type" => "application/activity+json", "href" => user.ap_id},
%{
"rel" => "self",
"type" => "application/ld+json; profile=\"https://www.w3.org/ns/activitystreams\"",
"href" => user.ap_id
},
%{
"rel" => "http://ostatus.org/schema/1.0/subscribe",
"template" => "#{Pleroma.Web.base_url()}/ostatus_subscribe?acct={uri}"
}
]
end
def gather_nodeinfo_protocol_names, do: ["activitypub"]
end
diff --git a/lib/pleroma/web/activity_pub/transmogrifier.ex b/lib/pleroma/web/activity_pub/transmogrifier.ex
index 0bcd1db22..565d32433 100644
--- a/lib/pleroma/web/activity_pub/transmogrifier.ex
+++ b/lib/pleroma/web/activity_pub/transmogrifier.ex
@@ -1,1034 +1,1034 @@
# Pleroma: A lightweight social networking server
# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
# SPDX-License-Identifier: AGPL-3.0-only
defmodule Pleroma.Web.ActivityPub.Transmogrifier do
@moduledoc """
A module to handle coding from internal to wire ActivityPub and back.
"""
alias Pleroma.Activity
alias Pleroma.EctoType.ActivityPub.ObjectValidators
alias Pleroma.Maps
alias Pleroma.Object
alias Pleroma.Object.Containment
alias Pleroma.Repo
alias Pleroma.User
alias Pleroma.Web.ActivityPub.ActivityPub
alias Pleroma.Web.ActivityPub.Builder
alias Pleroma.Web.ActivityPub.ObjectValidator
alias Pleroma.Web.ActivityPub.Pipeline
alias Pleroma.Web.ActivityPub.Utils
alias Pleroma.Web.ActivityPub.Visibility
alias Pleroma.Web.Federator
alias Pleroma.Workers.TransmogrifierWorker
import Ecto.Query
require Logger
require Pleroma.Constants
@doc """
Modifies an incoming AP object (mastodon format) to our internal format.
"""
def fix_object(object, options \\ []) do
object
|> strip_internal_fields
|> fix_actor
|> fix_url
|> fix_attachments
|> fix_context
|> fix_in_reply_to(options)
|> fix_emoji
|> fix_tag
|> set_sensitive
|> fix_content_map
|> fix_addressing
|> fix_summary
|> fix_type(options)
end
def fix_summary(%{"summary" => nil} = object) do
Map.put(object, "summary", "")
end
def fix_summary(%{"summary" => _} = object) do
# summary is present, nothing to do
object
end
def fix_summary(object), do: Map.put(object, "summary", "")
def fix_addressing_list(map, field) do
addrs = map[field]
cond do
is_list(addrs) ->
Map.put(map, field, Enum.filter(addrs, &is_binary/1))
is_binary(addrs) ->
Map.put(map, field, [addrs])
true ->
Map.put(map, field, [])
end
end
def fix_explicit_addressing(
%{"to" => to, "cc" => cc} = object,
explicit_mentions,
follower_collection
) do
explicit_to = Enum.filter(to, fn x -> x in explicit_mentions end)
explicit_cc = Enum.filter(to, fn x -> x not in explicit_mentions end)
final_cc =
(cc ++ explicit_cc)
|> Enum.reject(fn x -> String.ends_with?(x, "/followers") and x != follower_collection end)
|> Enum.uniq()
object
|> Map.put("to", explicit_to)
|> Map.put("cc", final_cc)
end
def fix_explicit_addressing(object, _explicit_mentions, _followers_collection), do: object
# if directMessage flag is set to true, leave the addressing alone
def fix_explicit_addressing(%{"directMessage" => true} = object), do: object
def fix_explicit_addressing(object) do
explicit_mentions = Utils.determine_explicit_mentions(object)
%User{follower_address: follower_collection} =
object
|> Containment.get_actor()
|> User.get_cached_by_ap_id()
explicit_mentions =
explicit_mentions ++
[
Pleroma.Constants.as_public(),
follower_collection
]
fix_explicit_addressing(object, explicit_mentions, follower_collection)
end
# if as:Public is addressed, then make sure the followers collection is also addressed
# so that the activities will be delivered to local users.
def fix_implicit_addressing(%{"to" => to, "cc" => cc} = object, followers_collection) do
recipients = to ++ cc
if followers_collection not in recipients do
cond do
Pleroma.Constants.as_public() in cc ->
to = to ++ [followers_collection]
Map.put(object, "to", to)
Pleroma.Constants.as_public() in to ->
cc = cc ++ [followers_collection]
Map.put(object, "cc", cc)
true ->
object
end
else
object
end
end
def fix_implicit_addressing(object, _), do: object
def fix_addressing(object) do
{:ok, %User{} = user} = User.get_or_fetch_by_ap_id(object["actor"])
followers_collection = User.ap_followers(user)
object
|> fix_addressing_list("to")
|> fix_addressing_list("cc")
|> fix_addressing_list("bto")
|> fix_addressing_list("bcc")
|> fix_explicit_addressing()
|> fix_implicit_addressing(followers_collection)
end
def fix_actor(%{"attributedTo" => actor} = object) do
actor = Containment.get_actor(%{"actor" => actor})
# TODO: Remove actor field for Objects
object
|> Map.put("actor", actor)
|> Map.put("attributedTo", actor)
end
def fix_in_reply_to(object, options \\ [])
def fix_in_reply_to(%{"inReplyTo" => in_reply_to} = object, options)
when not is_nil(in_reply_to) do
in_reply_to_id = prepare_in_reply_to(in_reply_to)
depth = (options[:depth] || 0) + 1
if Federator.allowed_thread_distance?(depth) do
with {:ok, replied_object} <- get_obj_helper(in_reply_to_id, options),
%Activity{} <- Activity.get_create_by_object_ap_id(replied_object.data["id"]) do
object
|> Map.put("inReplyTo", replied_object.data["id"])
|> Map.put("context", replied_object.data["context"] || object["conversation"])
|> Map.drop(["conversation", "inReplyToAtomUri"])
else
e ->
Logger.warn("Couldn't fetch #{inspect(in_reply_to_id)}, error: #{inspect(e)}")
object
end
else
object
end
end
def fix_in_reply_to(object, _options), do: object
defp prepare_in_reply_to(in_reply_to) do
cond do
is_bitstring(in_reply_to) ->
in_reply_to
is_map(in_reply_to) && is_bitstring(in_reply_to["id"]) ->
in_reply_to["id"]
is_list(in_reply_to) && is_bitstring(Enum.at(in_reply_to, 0)) ->
Enum.at(in_reply_to, 0)
true ->
""
end
end
def fix_context(object) do
context = object["context"] || object["conversation"] || Utils.generate_context_id()
object
|> Map.put("context", context)
|> Map.drop(["conversation"])
end
def fix_attachments(%{"attachment" => attachment} = object) when is_list(attachment) do
attachments =
Enum.map(attachment, fn data ->
url =
cond do
is_list(data["url"]) -> List.first(data["url"])
is_map(data["url"]) -> data["url"]
true -> nil
end
media_type =
cond do
is_map(url) && MIME.valid?(url["mediaType"]) -> url["mediaType"]
MIME.valid?(data["mediaType"]) -> data["mediaType"]
MIME.valid?(data["mimeType"]) -> data["mimeType"]
true -> nil
end
href =
cond do
is_map(url) && is_binary(url["href"]) -> url["href"]
is_binary(data["url"]) -> data["url"]
is_binary(data["href"]) -> data["href"]
true -> nil
end
if href do
attachment_url =
%{
"href" => href,
"type" => Map.get(url || %{}, "type", "Link")
}
|> Maps.put_if_present("mediaType", media_type)
%{
"url" => [attachment_url],
"type" => data["type"] || "Document"
}
|> Maps.put_if_present("mediaType", media_type)
|> Maps.put_if_present("name", data["name"])
|> Maps.put_if_present("blurhash", data["blurhash"])
else
nil
end
end)
|> Enum.filter(& &1)
Map.put(object, "attachment", attachments)
end
def fix_attachments(%{"attachment" => attachment} = object) when is_map(attachment) do
object
|> Map.put("attachment", [attachment])
|> fix_attachments()
end
def fix_attachments(object), do: object
def fix_url(%{"url" => url} = object) when is_map(url) do
Map.put(object, "url", url["href"])
end
def fix_url(%{"url" => url} = object) when is_list(url) do
first_element = Enum.at(url, 0)
url_string =
cond do
is_bitstring(first_element) -> first_element
is_map(first_element) -> first_element["href"] || ""
true -> ""
end
Map.put(object, "url", url_string)
end
def fix_url(object), do: object
def fix_emoji(%{"tag" => tags} = object) when is_list(tags) do
emoji =
tags
|> Enum.filter(fn data -> is_map(data) and data["type"] == "Emoji" and data["icon"] end)
|> Enum.reduce(%{}, fn data, mapping ->
name = String.trim(data["name"], ":")
Map.put(mapping, name, data["icon"]["url"])
end)
Map.put(object, "emoji", emoji)
end
def fix_emoji(%{"tag" => %{"type" => "Emoji"} = tag} = object) do
name = String.trim(tag["name"], ":")
emoji = %{name => tag["icon"]["url"]}
Map.put(object, "emoji", emoji)
end
def fix_emoji(object), do: object
def fix_tag(%{"tag" => tag} = object) when is_list(tag) do
tags =
tag
|> Enum.filter(fn data -> data["type"] == "Hashtag" and data["name"] end)
|> Enum.map(fn %{"name" => name} ->
name
|> String.slice(1..-1)
|> String.downcase()
end)
Map.put(object, "tag", tag ++ tags)
end
def fix_tag(%{"tag" => %{} = tag} = object) do
object
|> Map.put("tag", [tag])
|> fix_tag
end
def fix_tag(object), do: object
# content map usually only has one language so this will do for now.
def fix_content_map(%{"contentMap" => content_map} = object) do
content_groups = Map.to_list(content_map)
{_, content} = Enum.at(content_groups, 0)
Map.put(object, "content", content)
end
def fix_content_map(object), do: object
def fix_type(object, options \\ [])
def fix_type(%{"inReplyTo" => reply_id, "name" => _} = object, options)
when is_binary(reply_id) do
with true <- Federator.allowed_thread_distance?(options[:depth]),
{:ok, %{data: %{"type" => "Question"} = _} = _} <- get_obj_helper(reply_id, options) do
Map.put(object, "type", "Answer")
else
_ -> object
end
end
def fix_type(object, _), do: object
# Reduce the object list to find the reported user.
defp get_reported(objects) do
Enum.reduce_while(objects, nil, fn ap_id, _ ->
with %User{} = user <- User.get_cached_by_ap_id(ap_id) do
{:halt, user}
else
_ -> {:cont, nil}
end
end)
end
# Compatibility wrapper for Mastodon votes
defp handle_create(%{"object" => %{"type" => "Answer"}} = data, _user) do
handle_incoming(data)
end
defp handle_create(%{"object" => object} = data, user) do
%{
to: data["to"],
object: object,
actor: user,
context: object["context"],
local: false,
published: data["published"],
additional:
Map.take(data, [
"cc",
"directMessage",
"id"
])
}
|> ActivityPub.create()
end
def handle_incoming(data, options \\ [])
# Flag objects are placed ahead of the ID check because Mastodon 2.8 and earlier send them
# with nil ID.
def handle_incoming(%{"type" => "Flag", "object" => objects, "actor" => actor} = data, _options) do
with context <- data["context"] || Utils.generate_context_id(),
content <- data["content"] || "",
%User{} = actor <- User.get_cached_by_ap_id(actor),
# Reduce the object list to find the reported user.
%User{} = account <- get_reported(objects),
# Remove the reported user from the object list.
statuses <- Enum.filter(objects, fn ap_id -> ap_id != account.ap_id end) do
%{
actor: actor,
context: context,
account: account,
statuses: statuses,
content: content,
additional: %{"cc" => [account.ap_id]}
}
|> ActivityPub.flag()
end
end
# disallow objects with bogus IDs
def handle_incoming(%{"id" => nil}, _options), do: :error
def handle_incoming(%{"id" => ""}, _options), do: :error
# length of https:// = 8, should validate better, but good enough for now.
def handle_incoming(%{"id" => id}, _options) when is_binary(id) and byte_size(id) < 8,
do: :error
# TODO: validate those with a Ecto scheme
# - tags
# - emoji
def handle_incoming(
%{"type" => "Create", "object" => %{"type" => objtype} = object} = data,
options
)
when objtype in ~w{Note Page} do
actor = Containment.get_actor(data)
with nil <- Activity.get_create_by_object_ap_id(object["id"]),
{:ok, %User{} = user} <- User.get_or_fetch_by_ap_id(actor) do
data =
data
|> Map.put("object", fix_object(object, options))
|> Map.put("actor", actor)
|> fix_addressing()
with {:ok, created_activity} <- handle_create(data, user) do
reply_depth = (options[:depth] || 0) + 1
if Federator.allowed_thread_distance?(reply_depth) do
for reply_id <- replies(object) do
Pleroma.Workers.RemoteFetcherWorker.enqueue("fetch_remote", %{
"id" => reply_id,
"depth" => reply_depth
})
end
end
{:ok, created_activity}
end
else
%Activity{} = activity -> {:ok, activity}
_e -> :error
end
end
def handle_incoming(
%{"type" => "Listen", "object" => %{"type" => "Audio"} = object} = data,
options
) do
actor = Containment.get_actor(data)
data =
Map.put(data, "actor", actor)
|> fix_addressing
with {:ok, %User{} = user} <- User.get_or_fetch_by_ap_id(data["actor"]) do
reply_depth = (options[:depth] || 0) + 1
options = Keyword.put(options, :depth, reply_depth)
object = fix_object(object, options)
params = %{
to: data["to"],
object: object,
actor: user,
context: nil,
local: false,
published: data["published"],
additional: Map.take(data, ["cc", "id"])
}
ActivityPub.listen(params)
else
_e -> :error
end
end
@misskey_reactions %{
"like" => "👍",
"love" => "❤️",
"laugh" => "😆",
"hmm" => "🤔",
"surprise" => "😮",
"congrats" => "🎉",
"angry" => "💢",
"confused" => "😥",
"rip" => "😇",
"pudding" => "🍮",
"star" => "⭐"
}
@doc "Rewrite misskey likes into EmojiReacts"
def handle_incoming(
%{
"type" => "Like",
"_misskey_reaction" => reaction
} = data,
options
) do
data
|> Map.put("type", "EmojiReact")
|> Map.put("content", @misskey_reactions[reaction] || reaction)
|> handle_incoming(options)
end
def handle_incoming(
%{"type" => "Create", "object" => %{"type" => objtype, "id" => obj_id}} = data,
_options
)
when objtype in ~w{Question Answer ChatMessage Audio Video Event Article} do
data = Map.put(data, "object", strip_internal_fields(data["object"]))
with {:ok, %User{}} <- ObjectValidator.fetch_actor(data),
nil <- Activity.get_create_by_object_ap_id(obj_id),
{:ok, activity, _} <- Pipeline.common_pipeline(data, local: false) do
{:ok, activity}
else
%Activity{} = activity -> {:ok, activity}
e -> e
end
end
def handle_incoming(%{"type" => type} = data, _options)
when type in ~w{Like EmojiReact Announce} do
with :ok <- ObjectValidator.fetch_actor_and_object(data),
{:ok, activity, _meta} <-
Pipeline.common_pipeline(data, local: false) do
{:ok, activity}
else
e -> {:error, e}
end
end
def handle_incoming(
%{"type" => type} = data,
_options
)
when type in ~w{Update Block Follow Accept Reject} do
with {:ok, %User{}} <- ObjectValidator.fetch_actor(data),
{:ok, activity, _} <-
Pipeline.common_pipeline(data, local: false) do
{:ok, activity}
end
end
def handle_incoming(
%{"type" => "Delete"} = data,
_options
) do
with {:ok, activity, _} <-
Pipeline.common_pipeline(data, local: false) do
{:ok, activity}
else
{:error, {:validate_object, _}} = e ->
# Check if we have a create activity for this
with {:ok, object_id} <- ObjectValidators.ObjectID.cast(data["object"]),
%Activity{data: %{"actor" => actor}} <-
Activity.create_by_object_ap_id(object_id) |> Repo.one(),
# We have one, insert a tombstone and retry
{:ok, tombstone_data, _} <- Builder.tombstone(actor, object_id),
{:ok, _tombstone} <- Object.create(tombstone_data) do
handle_incoming(data)
else
_ -> e
end
end
end
def handle_incoming(
%{
"type" => "Undo",
"object" => %{"type" => "Follow", "object" => followed},
"actor" => follower,
"id" => id
} = _data,
_options
) do
with %User{local: true} = followed <- User.get_cached_by_ap_id(followed),
{:ok, %User{} = follower} <- User.get_or_fetch_by_ap_id(follower),
{:ok, activity} <- ActivityPub.unfollow(follower, followed, id, false) do
User.unfollow(follower, followed)
{:ok, activity}
else
_e -> :error
end
end
def handle_incoming(
%{
"type" => "Undo",
"object" => %{"type" => type}
} = data,
_options
)
when type in ["Like", "EmojiReact", "Announce", "Block"] do
with {:ok, activity, _} <- Pipeline.common_pipeline(data, local: false) do
{:ok, activity}
end
end
# For Undos that don't have the complete object attached, try to find it in our database.
def handle_incoming(
%{
"type" => "Undo",
"object" => object
} = activity,
options
)
when is_binary(object) do
with %Activity{data: data} <- Activity.get_by_ap_id(object) do
activity
|> Map.put("object", data)
|> handle_incoming(options)
else
_e -> :error
end
end
def handle_incoming(
%{
"type" => "Move",
"actor" => origin_actor,
"object" => origin_actor,
"target" => target_actor
},
_options
) do
with %User{} = origin_user <- User.get_cached_by_ap_id(origin_actor),
{:ok, %User{} = target_user} <- User.get_or_fetch_by_ap_id(target_actor),
true <- origin_actor in target_user.also_known_as do
ActivityPub.move(origin_user, target_user, false)
else
_e -> :error
end
end
def handle_incoming(_, _), do: :error
@spec get_obj_helper(String.t(), Keyword.t()) :: {:ok, Object.t()} | nil
def get_obj_helper(id, options \\ []) do
case Object.normalize(id, true, options) do
%Object{} = object -> {:ok, object}
_ -> nil
end
end
@spec get_embedded_obj_helper(String.t() | Object.t(), User.t()) :: {:ok, Object.t()} | nil
def get_embedded_obj_helper(%{"attributedTo" => attributed_to, "id" => object_id} = data, %User{
ap_id: ap_id
})
when attributed_to == ap_id do
with {:ok, activity} <-
handle_incoming(%{
"type" => "Create",
"to" => data["to"],
"cc" => data["cc"],
"actor" => attributed_to,
"object" => data
}) do
{:ok, Object.normalize(activity)}
else
_ -> get_obj_helper(object_id)
end
end
def get_embedded_obj_helper(object_id, _) do
get_obj_helper(object_id)
end
def set_reply_to_uri(%{"inReplyTo" => in_reply_to} = object) when is_binary(in_reply_to) do
with false <- String.starts_with?(in_reply_to, "http"),
{:ok, %{data: replied_to_object}} <- get_obj_helper(in_reply_to) do
Map.put(object, "inReplyTo", replied_to_object["external_url"] || in_reply_to)
else
_e -> object
end
end
def set_reply_to_uri(obj), do: obj
@doc """
Serialized Mastodon-compatible `replies` collection containing _self-replies_.
Based on Mastodon's ActivityPub::NoteSerializer#replies.
"""
def set_replies(obj_data) do
replies_uris =
with limit when limit > 0 <-
Pleroma.Config.get([:activitypub, :note_replies_output_limit], 0),
%Object{} = object <- Object.get_cached_by_ap_id(obj_data["id"]) do
object
|> Object.self_replies()
|> select([o], fragment("?->>'id'", o.data))
|> limit(^limit)
|> Repo.all()
else
_ -> []
end
set_replies(obj_data, replies_uris)
end
defp set_replies(obj, []) do
obj
end
defp set_replies(obj, replies_uris) do
replies_collection = %{
"type" => "Collection",
"items" => replies_uris
}
Map.merge(obj, %{"replies" => replies_collection})
end
def replies(%{"replies" => %{"first" => %{"items" => items}}}) when not is_nil(items) do
items
end
def replies(%{"replies" => %{"items" => items}}) when not is_nil(items) do
items
end
def replies(_), do: []
# Prepares the object of an outgoing create activity.
def prepare_object(object) do
object
|> set_sensitive
|> add_hashtags
|> add_mention_tags
|> add_emoji_tags
|> add_attributed_to
|> prepare_attachments
|> set_conversation
|> set_reply_to_uri
|> set_replies
|> strip_internal_fields
|> strip_internal_tags
|> set_type
end
# @doc
# """
# internal -> Mastodon
# """
def prepare_outgoing(%{"type" => activity_type, "object" => object_id} = data)
when activity_type in ["Create", "Listen"] do
object =
object_id
|> Object.normalize()
|> Map.get(:data)
|> prepare_object
data =
data
|> Map.put("object", object)
|> Map.merge(Utils.make_json_ld_header())
|> Map.delete("bcc")
{:ok, data}
end
def prepare_outgoing(%{"type" => "Announce", "actor" => ap_id, "object" => object_id} = data) do
object =
object_id
|> Object.normalize()
data =
if Visibility.is_private?(object) && object.data["actor"] == ap_id do
data |> Map.put("object", object |> Map.get(:data) |> prepare_object)
else
data |> maybe_fix_object_url
end
data =
data
|> strip_internal_fields
|> Map.merge(Utils.make_json_ld_header())
|> Map.delete("bcc")
{:ok, data}
end
# Mastodon Accept/Reject requires a non-normalized object containing the actor URIs,
# because of course it does.
def prepare_outgoing(%{"type" => "Accept"} = data) do
with follow_activity <- Activity.normalize(data["object"]) do
object = %{
"actor" => follow_activity.actor,
"object" => follow_activity.data["object"],
"id" => follow_activity.data["id"],
"type" => "Follow"
}
data =
data
|> Map.put("object", object)
|> Map.merge(Utils.make_json_ld_header())
{:ok, data}
end
end
def prepare_outgoing(%{"type" => "Reject"} = data) do
with follow_activity <- Activity.normalize(data["object"]) do
object = %{
"actor" => follow_activity.actor,
"object" => follow_activity.data["object"],
"id" => follow_activity.data["id"],
"type" => "Follow"
}
data =
data
|> Map.put("object", object)
|> Map.merge(Utils.make_json_ld_header())
{:ok, data}
end
end
def prepare_outgoing(%{"type" => _type} = data) do
data =
data
|> strip_internal_fields
|> maybe_fix_object_url
|> Map.merge(Utils.make_json_ld_header())
{:ok, data}
end
def maybe_fix_object_url(%{"object" => object} = data) when is_binary(object) do
with false <- String.starts_with?(object, "http"),
{:fetch, {:ok, relative_object}} <- {:fetch, get_obj_helper(object)},
%{data: %{"external_url" => external_url}} when not is_nil(external_url) <-
relative_object do
Map.put(data, "object", external_url)
else
{:fetch, e} ->
Logger.error("Couldn't fetch #{object} #{inspect(e)}")
data
_ ->
data
end
end
def maybe_fix_object_url(data), do: data
def add_hashtags(object) do
tags =
(object["tag"] || [])
|> Enum.map(fn
# Expand internal representation tags into AS2 tags.
tag when is_binary(tag) ->
%{
"href" => Pleroma.Web.Endpoint.url() <> "/tags/#{tag}",
"name" => "##{tag}",
"type" => "Hashtag"
}
# Do not process tags which are already AS2 tag objects.
tag when is_map(tag) ->
tag
end)
Map.put(object, "tag", tags)
end
# TODO These should be added on our side on insertion, it doesn't make much
# sense to regenerate these all the time
def add_mention_tags(object) do
to = object["to"] || []
cc = object["cc"] || []
mentioned = User.get_users_from_set(to ++ cc, local_only: false)
mentions = Enum.map(mentioned, &build_mention_tag/1)
tags = object["tag"] || []
Map.put(object, "tag", tags ++ mentions)
end
defp build_mention_tag(%{ap_id: ap_id, nickname: nickname} = _) do
%{"type" => "Mention", "href" => ap_id, "name" => "@#{nickname}"}
end
def take_emoji_tags(%User{emoji: emoji}) do
emoji
|> Map.to_list()
|> Enum.map(&build_emoji_tag/1)
end
# TODO: we should probably send mtime instead of unix epoch time for updated
def add_emoji_tags(%{"emoji" => emoji} = object) do
tags = object["tag"] || []
out = Enum.map(emoji, &build_emoji_tag/1)
Map.put(object, "tag", tags ++ out)
end
def add_emoji_tags(object), do: object
defp build_emoji_tag({name, url}) do
%{
"icon" => %{"url" => url, "type" => "Image"},
"name" => ":" <> name <> ":",
"type" => "Emoji",
"updated" => "1970-01-01T00:00:00Z",
"id" => url
}
end
def set_conversation(object) do
Map.put(object, "conversation", object["context"])
end
def set_sensitive(%{"sensitive" => _} = object) do
object
end
def set_sensitive(object) do
tags = object["tag"] || []
Map.put(object, "sensitive", "nsfw" in tags)
end
def set_type(%{"type" => "Answer"} = object) do
Map.put(object, "type", "Note")
end
def set_type(object), do: object
def add_attributed_to(object) do
attributed_to = object["attributedTo"] || object["actor"]
Map.put(object, "attributedTo", attributed_to)
end
# TODO: Revisit this
def prepare_attachments(%{"type" => "ChatMessage"} = object), do: object
def prepare_attachments(object) do
attachments =
object
|> Map.get("attachment", [])
|> Enum.map(fn data ->
[%{"mediaType" => media_type, "href" => href} | _] = data["url"]
%{
"url" => href,
"mediaType" => media_type,
"name" => data["name"],
"type" => "Document"
}
end)
Map.put(object, "attachment", attachments)
end
def strip_internal_fields(object) do
Map.drop(object, Pleroma.Constants.object_internal_fields())
end
defp strip_internal_tags(%{"tag" => tags} = object) do
tags = Enum.filter(tags, fn x -> is_map(x) end)
Map.put(object, "tag", tags)
end
defp strip_internal_tags(object), do: object
def perform(:user_upgrade, user) do
# we pass a fake user so that the followers collection is stripped away
old_follower_address = User.ap_followers(%User{nickname: user.nickname})
from(
a in Activity,
where: ^old_follower_address in a.recipients,
update: [
set: [
recipients:
fragment(
"array_replace(?,?,?)",
a.recipients,
^old_follower_address,
^user.follower_address
)
]
]
)
|> Repo.update_all([])
end
def upgrade_user_from_ap_id(ap_id) do
with %User{local: false} = user <- User.get_cached_by_ap_id(ap_id),
- {:ok, data} <- ActivityPub.fetch_and_prepare_user_from_ap_id(ap_id, force_http: true),
+ {:ok, data} <- ActivityPub.fetch_and_prepare_user_from_ap_id(ap_id),
{:ok, user} <- update_user(user, data) do
TransmogrifierWorker.enqueue("user_upgrade", %{"user_id" => user.id})
{:ok, user}
else
%User{} = user -> {:ok, user}
e -> e
end
end
defp update_user(user, data) do
user
|> User.remote_user_changeset(data)
|> User.update_and_set_cache()
end
def maybe_fix_user_url(%{"url" => url} = data) when is_map(url) do
Map.put(data, "url", url["href"])
end
def maybe_fix_user_url(data), do: data
def maybe_fix_user_object(data), do: maybe_fix_user_url(data)
end
diff --git a/lib/pleroma/web/fed_sockets.ex b/lib/pleroma/web/fed_sockets.ex
deleted file mode 100644
index 1fd5899c8..000000000
--- a/lib/pleroma/web/fed_sockets.ex
+++ /dev/null
@@ -1,185 +0,0 @@
-# Pleroma: A lightweight social networking server
-# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
-# SPDX-License-Identifier: AGPL-3.0-only
-
-defmodule Pleroma.Web.FedSockets do
- @moduledoc """
- This documents the FedSockets framework. A framework for federating
- ActivityPub objects between servers via persistant WebSocket connections.
-
- FedSockets allow servers to authenticate on first contact and maintain that
- connection, eliminating the need to authenticate every time data needs to be shared.
-
- ## Protocol
- FedSockets currently support 2 types of data transfer:
- * `publish` method which doesn't require a response
- * `fetch` method requires a response be sent
-
- ### Publish
- The publish operation sends a json encoded map of the shape:
- %{action: :publish, data: json}
- and accepts (but does not require) a reply of form:
- %{"action" => "publish_reply"}
-
- The outgoing params represent
- * data: ActivityPub object encoded into json
-
-
- ### Fetch
- The fetch operation sends a json encoded map of the shape:
- %{action: :fetch, data: id, uuid: fetch_uuid}
- and requires a reply of form:
- %{"action" => "fetch_reply", "uuid" => uuid, "data" => data}
-
- The outgoing params represent
- * id: an ActivityPub object URI
- * uuid: a unique uuid generated by the sender
-
- The reply params represent
- * data: an ActivityPub object encoded into json
- * uuid: the uuid sent along with the fetch request
-
- ## Examples
- Clients of FedSocket transfers shouldn't need to use any of the functions outside of this module.
-
- A typical publish operation can be performed through the following code, and a fetch operation in a similar manner.
-
- case FedSockets.get_or_create_fed_socket(inbox) do
- {:ok, fedsocket} ->
- FedSockets.publish(fedsocket, json)
-
- _ ->
- alternative_publish(inbox, actor, json, params)
- end
-
- ## Configuration
- FedSockets have the following config settings
-
- config :pleroma, :fed_sockets,
- enabled: true,
- ping_interval: :timer.seconds(15),
- connection_duration: :timer.hours(1),
- rejection_duration: :timer.hours(1),
- fed_socket_fetches: [
- default: 12_000,
- interval: 3_000,
- lazy: false
- ]
- * enabled - turn FedSockets on or off with this flag. Can be toggled at runtime.
- * connection_duration - How long a FedSocket can sit idle before it's culled.
- * rejection_duration - After failing to make a FedSocket connection a host will be excluded
- from further connections for this amount of time
- * fed_socket_fetches - Use these parameters to pass options to the Cachex queue backing the FetchRegistry
- * fed_socket_rejections - Use these parameters to pass options to the Cachex queue backing the FedRegistry
-
- Cachex options are
- * default: the minimum amount of time a fetch can wait before it times out.
- * interval: the interval between checks for timed out entries. This plus the default represent the maximum time allowed
- * lazy: leave at false for consistant and fast lookups, set to true for stricter timeout enforcement
-
- """
- require Logger
-
- alias Pleroma.Web.FedSockets.FedRegistry
- alias Pleroma.Web.FedSockets.FedSocket
- alias Pleroma.Web.FedSockets.SocketInfo
-
- @doc """
- returns a FedSocket for the given origin. Will reuse an existing one or create a new one.
-
- address is expected to be a fully formed URL such as:
- "http://www.example.com" or "http://www.example.com:8080"
-
- It can and usually does include additional path parameters,
- but these are ignored as the FedSockets are organized by host and port info alone.
- """
- def get_or_create_fed_socket(address) do
- with {:cache, {:error, :missing}} <- {:cache, get_fed_socket(address)},
- {:connect, {:ok, _pid}} <- {:connect, FedSocket.connect_to_host(address)},
- {:cache, {:ok, fed_socket}} <- {:cache, get_fed_socket(address)} do
- Logger.debug("fedsocket created for - #{inspect(address)}")
- {:ok, fed_socket}
- else
- {:cache, {:ok, socket}} ->
- Logger.debug("fedsocket found in cache - #{inspect(address)}")
- {:ok, socket}
-
- {:cache, {:error, :rejected} = e} ->
- e
-
- {:connect, {:error, _host}} ->
- Logger.debug("set host rejected for - #{inspect(address)}")
- FedRegistry.set_host_rejected(address)
- {:error, :rejected}
-
- {_, {:error, :disabled}} ->
- {:error, :disabled}
-
- {_, {:error, reason}} ->
- Logger.warn("get_or_create_fed_socket error - #{inspect(reason)}")
- {:error, reason}
- end
- end
-
- @doc """
- returns a FedSocket for the given origin. Will not create a new FedSocket if one does not exist.
-
- address is expected to be a fully formed URL such as:
- "http://www.example.com" or "http://www.example.com:8080"
- """
- def get_fed_socket(address) do
- origin = SocketInfo.origin(address)
-
- with {:config, true} <- {:config, Pleroma.Config.get([:fed_sockets, :enabled], false)},
- {:ok, socket} <- FedRegistry.get_fed_socket(origin) do
- {:ok, socket}
- else
- {:config, _} ->
- {:error, :disabled}
-
- {:error, :rejected} ->
- Logger.debug("FedSocket previously rejected - #{inspect(origin)}")
- {:error, :rejected}
-
- {:error, reason} ->
- {:error, reason}
- end
- end
-
- @doc """
- Sends the supplied data via the publish protocol.
- It will not block waiting for a reply.
- Returns :ok but this is not an indication of a successful transfer.
-
- the data is expected to be JSON encoded binary data.
- """
- def publish(%SocketInfo{} = fed_socket, json) do
- FedSocket.publish(fed_socket, json)
- end
-
- @doc """
- Sends the supplied data via the fetch protocol.
- It will block waiting for a reply or timeout.
-
- Returns {:ok, object} where object is the requested object (or nil)
- {:error, :timeout} in the event the message was not responded to
-
- the id is expected to be the URI of an ActivityPub object.
- """
- def fetch(%SocketInfo{} = fed_socket, id) do
- FedSocket.fetch(fed_socket, id)
- end
-
- @doc """
- Disconnect all and restart FedSockets.
- This is mainly used in development and testing but could be useful in production.
- """
- def reset do
- FedRegistry
- |> Process.whereis()
- |> Process.exit(:testing)
- end
-
- def uri_for_origin(origin),
- do: "ws://#{origin}/api/fedsocket/v1"
-end
diff --git a/lib/pleroma/web/fed_sockets/fed_registry.ex b/lib/pleroma/web/fed_sockets/fed_registry.ex
deleted file mode 100644
index e00ea69c0..000000000
--- a/lib/pleroma/web/fed_sockets/fed_registry.ex
+++ /dev/null
@@ -1,185 +0,0 @@
-# Pleroma: A lightweight social networking server
-# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
-# SPDX-License-Identifier: AGPL-3.0-only
-
-defmodule Pleroma.Web.FedSockets.FedRegistry do
- @moduledoc """
- The FedRegistry stores the active FedSockets for quick retrieval.
-
- The storage and retrieval portion of the FedRegistry is done in process through
- elixir's `Registry` module for speed and its ability to monitor for terminated processes.
-
- Dropped connections will be caught by `Registry` and deleted. Since the next
- message will initiate a new connection there is no reason to try and reconnect at that point.
-
- Normally outside modules should have no need to call or use the FedRegistry themselves.
- """
-
- alias Pleroma.Web.FedSockets.FedSocket
- alias Pleroma.Web.FedSockets.SocketInfo
-
- require Logger
-
- @default_rejection_duration 15 * 60 * 1000
- @rejections :fed_socket_rejections
-
- @doc """
- Retrieves a FedSocket from the Registry given it's origin.
-
- The origin is expected to be a string identifying the endpoint "example.com" or "example2.com:8080"
-
- Will return:
- * {:ok, fed_socket} for working FedSockets
- * {:error, :rejected} for origins that have been tried and refused within the rejection duration interval
- * {:error, some_reason} usually :missing for unknown origins
- """
- def get_fed_socket(origin) do
- case get_registry_data(origin) do
- {:error, reason} ->
- {:error, reason}
-
- {:ok, %{state: :connected} = socket_info} ->
- {:ok, socket_info}
- end
- end
-
- @doc """
- Adds a connected FedSocket to the Registry.
-
- Always returns {:ok, fed_socket}
- """
- def add_fed_socket(origin, pid \\ nil) do
- origin
- |> SocketInfo.build(pid)
- |> SocketInfo.connect()
- |> add_socket_info
- end
-
- defp add_socket_info(%{origin: origin, state: :connected} = socket_info) do
- case Registry.register(FedSockets.Registry, origin, socket_info) do
- {:ok, _owner} ->
- clear_prior_rejection(origin)
- Logger.debug("fedsocket added: #{inspect(origin)}")
-
- {:ok, socket_info}
-
- {:error, {:already_registered, _pid}} ->
- FedSocket.close(socket_info)
- existing_socket_info = Registry.lookup(FedSockets.Registry, origin)
-
- {:ok, existing_socket_info}
-
- _ ->
- {:error, :error_adding_socket}
- end
- end
-
- @doc """
- Mark this origin as having rejected a connection attempt.
- This will keep it from getting additional connection attempts
- for a period of time specified in the config.
-
- Always returns {:ok, new_reg_data}
- """
- def set_host_rejected(uri) do
- new_reg_data =
- uri
- |> SocketInfo.origin()
- |> get_or_create_registry_data()
- |> set_to_rejected()
- |> save_registry_data()
-
- {:ok, new_reg_data}
- end
-
- @doc """
- Retrieves the FedRegistryData from the Registry given it's origin.
-
- The origin is expected to be a string identifying the endpoint "example.com" or "example2.com:8080"
-
- Will return:
- * {:ok, fed_registry_data} for known origins
- * {:error, :missing} for uniknown origins
- * {:error, :cache_error} indicating some low level runtime issues
- """
- def get_registry_data(origin) do
- case Registry.lookup(FedSockets.Registry, origin) do
- [] ->
- if is_rejected?(origin) do
- Logger.debug("previously rejected fedsocket requested")
- {:error, :rejected}
- else
- {:error, :missing}
- end
-
- [{_pid, %{state: :connected} = socket_info}] ->
- {:ok, socket_info}
-
- _ ->
- {:error, :cache_error}
- end
- end
-
- @doc """
- Retrieves a map of all sockets from the Registry. The keys are the origins and the values are the corresponding SocketInfo
- """
- def list_all do
- (list_all_connected() ++ list_all_rejected())
- |> Enum.into(%{})
- end
-
- defp list_all_connected do
- FedSockets.Registry
- |> Registry.select([{{:"$1", :_, :"$3"}, [], [{{:"$1", :"$3"}}]}])
- end
-
- defp list_all_rejected do
- {:ok, keys} = Cachex.keys(@rejections)
-
- {:ok, registry_data} =
- Cachex.execute(@rejections, fn worker ->
- Enum.map(keys, fn k -> {k, Cachex.get!(worker, k)} end)
- end)
-
- registry_data
- end
-
- defp clear_prior_rejection(origin),
- do: Cachex.del(@rejections, origin)
-
- defp is_rejected?(origin) do
- case Cachex.get(@rejections, origin) do
- {:ok, nil} ->
- false
-
- {:ok, _} ->
- true
- end
- end
-
- defp get_or_create_registry_data(origin) do
- case get_registry_data(origin) do
- {:error, :missing} ->
- %SocketInfo{origin: origin}
-
- {:ok, socket_info} ->
- socket_info
- end
- end
-
- defp save_registry_data(%SocketInfo{origin: origin, state: :connected} = socket_info) do
- {:ok, true} = Registry.update_value(FedSockets.Registry, origin, fn _ -> socket_info end)
- socket_info
- end
-
- defp save_registry_data(%SocketInfo{origin: origin, state: :rejected} = socket_info) do
- rejection_expiration =
- Pleroma.Config.get([:fed_sockets, :rejection_duration], @default_rejection_duration)
-
- {:ok, true} = Cachex.put(@rejections, origin, socket_info, ttl: rejection_expiration)
- socket_info
- end
-
- defp set_to_rejected(%SocketInfo{} = socket_info),
- do: %SocketInfo{socket_info | state: :rejected}
-end
diff --git a/lib/pleroma/web/fed_sockets/fed_socket.ex b/lib/pleroma/web/fed_sockets/fed_socket.ex
deleted file mode 100644
index 98d64e65a..000000000
--- a/lib/pleroma/web/fed_sockets/fed_socket.ex
+++ /dev/null
@@ -1,137 +0,0 @@
-# Pleroma: A lightweight social networking server
-# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
-# SPDX-License-Identifier: AGPL-3.0-only
-
-defmodule Pleroma.Web.FedSockets.FedSocket do
- @moduledoc """
- The FedSocket module abstracts the actions to be taken taken on connections regardless of
- whether the connection started as inbound or outbound.
-
-
- Normally outside modules will have no need to call the FedSocket module directly.
- """
-
- alias Pleroma.Object
- alias Pleroma.Object.Containment
- alias Pleroma.User
- alias Pleroma.Web.ActivityPub.ObjectView
- alias Pleroma.Web.ActivityPub.UserView
- alias Pleroma.Web.ActivityPub.Visibility
- alias Pleroma.Web.FedSockets.FetchRegistry
- alias Pleroma.Web.FedSockets.IngesterWorker
- alias Pleroma.Web.FedSockets.OutgoingHandler
- alias Pleroma.Web.FedSockets.SocketInfo
-
- require Logger
-
- @shake "61dd18f7-f1e6-49a4-939a-a749fcdc1103"
-
- def connect_to_host(uri) do
- case OutgoingHandler.start_link(uri) do
- {:ok, pid} ->
- {:ok, pid}
-
- error ->
- {:error, error}
- end
- end
-
- def close(%SocketInfo{pid: socket_pid}),
- do: Process.send(socket_pid, :close, [])
-
- def publish(%SocketInfo{pid: socket_pid}, json) do
- %{action: :publish, data: json}
- |> Jason.encode!()
- |> send_packet(socket_pid)
- end
-
- def fetch(%SocketInfo{pid: socket_pid}, id) do
- fetch_uuid = FetchRegistry.register_fetch(id)
-
- %{action: :fetch, data: id, uuid: fetch_uuid}
- |> Jason.encode!()
- |> send_packet(socket_pid)
-
- wait_for_fetch_to_return(fetch_uuid, 0)
- end
-
- def receive_package(%SocketInfo{} = fed_socket, json) do
- json
- |> Jason.decode!()
- |> process_package(fed_socket)
- end
-
- defp wait_for_fetch_to_return(uuid, cntr) do
- case FetchRegistry.check_fetch(uuid) do
- {:error, :waiting} ->
- Process.sleep(:math.pow(cntr, 3) |> Kernel.trunc())
- wait_for_fetch_to_return(uuid, cntr + 1)
-
- {:error, :missing} ->
- Logger.error("FedSocket fetch timed out - #{inspect(uuid)}")
- {:error, :timeout}
-
- {:ok, _fr} ->
- FetchRegistry.pop_fetch(uuid)
- end
- end
-
- defp process_package(%{"action" => "publish", "data" => data}, %{origin: origin} = _fed_socket) do
- if Containment.contain_origin(origin, data) do
- IngesterWorker.enqueue("ingest", %{"object" => data})
- end
-
- {:reply, %{"action" => "publish_reply", "status" => "processed"}}
- end
-
- defp process_package(%{"action" => "fetch_reply", "uuid" => uuid, "data" => data}, _fed_socket) do
- FetchRegistry.register_fetch_received(uuid, data)
- {:noreply, nil}
- end
-
- defp process_package(%{"action" => "fetch", "uuid" => uuid, "data" => ap_id}, _fed_socket) do
- {:ok, data} = render_fetched_data(ap_id, uuid)
- {:reply, data}
- end
-
- defp process_package(%{"action" => "publish_reply"}, _fed_socket) do
- {:noreply, nil}
- end
-
- defp process_package(other, _fed_socket) do
- Logger.warn("unknown json packages received #{inspect(other)}")
- {:noreply, nil}
- end
-
- defp render_fetched_data(ap_id, uuid) do
- {:ok,
- %{
- "action" => "fetch_reply",
- "status" => "processed",
- "uuid" => uuid,
- "data" => represent_item(ap_id)
- }}
- end
-
- defp represent_item(ap_id) do
- case User.get_by_ap_id(ap_id) do
- nil ->
- object = Object.get_cached_by_ap_id(ap_id)
-
- if Visibility.is_public?(object) do
- Phoenix.View.render_to_string(ObjectView, "object.json", object: object)
- else
- nil
- end
-
- user ->
- Phoenix.View.render_to_string(UserView, "user.json", user: user)
- end
- end
-
- defp send_packet(data, socket_pid) do
- Process.send(socket_pid, {:send, data}, [])
- end
-
- def shake, do: @shake
-end
diff --git a/lib/pleroma/web/fed_sockets/fetch_registry.ex b/lib/pleroma/web/fed_sockets/fetch_registry.ex
deleted file mode 100644
index 7897f0fc6..000000000
--- a/lib/pleroma/web/fed_sockets/fetch_registry.ex
+++ /dev/null
@@ -1,151 +0,0 @@
-# Pleroma: A lightweight social networking server
-# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
-# SPDX-License-Identifier: AGPL-3.0-only
-
-defmodule Pleroma.Web.FedSockets.FetchRegistry do
- @moduledoc """
- The FetchRegistry acts as a broker for fetch requests and return values.
- This allows calling processes to block while waiting for a reply.
- It doesn't impose it's own process instead using `Cachex` to handle fetches in process, allowing
- multi threaded processes to avoid bottlenecking.
-
- Normally outside modules will have no need to call or use the FetchRegistry themselves.
-
- The `Cachex` parameters can be controlled from the config. Since exact timeout intervals
- aren't necessary the following settings are used by default:
-
- config :pleroma, :fed_sockets,
- fed_socket_fetches: [
- default: 12_000,
- interval: 3_000,
- lazy: false
- ]
-
- """
-
- defmodule FetchRegistryData do
- defstruct uuid: nil,
- sent_json: nil,
- received_json: nil,
- sent_at: nil,
- received_at: nil
- end
-
- alias Ecto.UUID
-
- require Logger
-
- @fetches :fed_socket_fetches
-
- @doc """
- Registers a json request wth the FetchRegistry and returns the identifying UUID.
- """
- def register_fetch(json) do
- %FetchRegistryData{uuid: uuid} =
- json
- |> new_registry_data
- |> save_registry_data
-
- uuid
- end
-
- @doc """
- Reports on the status of a Fetch given the identifying UUID.
-
- Will return
- * {:ok, fetched_object} if a fetch has completed
- * {:error, :waiting} if a fetch is still pending
- * {:error, other_error} usually :missing to indicate a fetch that has timed out
- """
- def check_fetch(uuid) do
- case get_registry_data(uuid) do
- {:ok, %FetchRegistryData{received_at: nil}} ->
- {:error, :waiting}
-
- {:ok, %FetchRegistryData{} = reg_data} ->
- {:ok, reg_data}
-
- e ->
- e
- end
- end
-
- @doc """
- Retrieves the response to a fetch given the identifying UUID.
- The completed fetch will be deleted from the FetchRegistry
-
- Will return
- * {:ok, fetched_object} if a fetch has completed
- * {:error, :waiting} if a fetch is still pending
- * {:error, other_error} usually :missing to indicate a fetch that has timed out
- """
- def pop_fetch(uuid) do
- case check_fetch(uuid) do
- {:ok, %FetchRegistryData{received_json: received_json}} ->
- delete_registry_data(uuid)
- {:ok, received_json}
-
- e ->
- e
- end
- end
-
- @doc """
- This is called to register a fetch has returned.
- It expects the result data along with the UUID that was sent in the request
-
- Will return the fetched object or :error
- """
- def register_fetch_received(uuid, data) do
- case get_registry_data(uuid) do
- {:ok, %FetchRegistryData{received_at: nil} = reg_data} ->
- reg_data
- |> set_fetch_received(data)
- |> save_registry_data()
-
- {:ok, %FetchRegistryData{} = reg_data} ->
- Logger.warn("tried to add fetched data twice - #{uuid}")
- reg_data
-
- {:error, _} ->
- Logger.warn("Error adding fetch to registry - #{uuid}")
- :error
- end
- end
-
- defp new_registry_data(json) do
- %FetchRegistryData{
- uuid: UUID.generate(),
- sent_json: json,
- sent_at: :erlang.monotonic_time(:millisecond)
- }
- end
-
- defp get_registry_data(origin) do
- case Cachex.get(@fetches, origin) do
- {:ok, nil} ->
- {:error, :missing}
-
- {:ok, reg_data} ->
- {:ok, reg_data}
-
- _ ->
- {:error, :cache_error}
- end
- end
-
- defp set_fetch_received(%FetchRegistryData{} = reg_data, data),
- do: %FetchRegistryData{
- reg_data
- | received_at: :erlang.monotonic_time(:millisecond),
- received_json: data
- }
-
- defp save_registry_data(%FetchRegistryData{uuid: uuid} = reg_data) do
- {:ok, true} = Cachex.put(@fetches, uuid, reg_data)
- reg_data
- end
-
- defp delete_registry_data(origin),
- do: {:ok, true} = Cachex.del(@fetches, origin)
-end
diff --git a/lib/pleroma/web/fed_sockets/incoming_handler.ex b/lib/pleroma/web/fed_sockets/incoming_handler.ex
deleted file mode 100644
index 49d0d9d84..000000000
--- a/lib/pleroma/web/fed_sockets/incoming_handler.ex
+++ /dev/null
@@ -1,88 +0,0 @@
-# Pleroma: A lightweight social networking server
-# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
-# SPDX-License-Identifier: AGPL-3.0-only
-
-defmodule Pleroma.Web.FedSockets.IncomingHandler do
- require Logger
-
- alias Pleroma.Web.FedSockets.FedRegistry
- alias Pleroma.Web.FedSockets.FedSocket
- alias Pleroma.Web.FedSockets.SocketInfo
-
- import HTTPSignatures, only: [validate_conn: 1, split_signature: 1]
-
- @behaviour :cowboy_websocket
-
- def init(req, state) do
- shake = FedSocket.shake()
-
- with true <- Pleroma.Config.get([:fed_sockets, :enabled]),
- sec_protocol <- :cowboy_req.header("sec-websocket-protocol", req, nil),
- headers = %{"(request-target)" => ^shake} <- :cowboy_req.headers(req),
- true <- validate_conn(%{req_headers: headers}),
- %{"keyId" => origin} <- split_signature(headers["signature"]) do
- req =
- if is_nil(sec_protocol) do
- req
- else
- :cowboy_req.set_resp_header("sec-websocket-protocol", sec_protocol, req)
- end
-
- {:cowboy_websocket, req, %{origin: origin}, %{}}
- else
- _ ->
- {:ok, req, state}
- end
- end
-
- def websocket_init(%{origin: origin}) do
- case FedRegistry.add_fed_socket(origin) do
- {:ok, socket_info} ->
- {:ok, socket_info}
-
- e ->
- Logger.error("FedSocket websocket_init failed - #{inspect(e)}")
- {:error, inspect(e)}
- end
- end
-
- # Use the ping to check if the connection should be expired
- def websocket_handle(:ping, socket_info) do
- if SocketInfo.expired?(socket_info) do
- {:stop, socket_info}
- else
- {:ok, socket_info, :hibernate}
- end
- end
-
- def websocket_handle({:text, data}, socket_info) do
- socket_info = SocketInfo.touch(socket_info)
-
- case FedSocket.receive_package(socket_info, data) do
- {:noreply, _} ->
- {:ok, socket_info}
-
- {:reply, reply} ->
- {:reply, {:text, Jason.encode!(reply)}, socket_info}
-
- {:error, reason} ->
- Logger.error("incoming error - receive_package: #{inspect(reason)}")
- {:ok, socket_info}
- end
- end
-
- def websocket_info({:send, message}, socket_info) do
- socket_info = SocketInfo.touch(socket_info)
-
- {:reply, {:text, message}, socket_info}
- end
-
- def websocket_info(:close, state) do
- {:stop, state}
- end
-
- def websocket_info(message, state) do
- Logger.debug("#{__MODULE__} unknown message #{inspect(message)}")
- {:ok, state}
- end
-end
diff --git a/lib/pleroma/web/fed_sockets/ingester_worker.ex b/lib/pleroma/web/fed_sockets/ingester_worker.ex
deleted file mode 100644
index 325f2a4ab..000000000
--- a/lib/pleroma/web/fed_sockets/ingester_worker.ex
+++ /dev/null
@@ -1,33 +0,0 @@
-# Pleroma: A lightweight social networking server
-# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
-# SPDX-License-Identifier: AGPL-3.0-only
-
-defmodule Pleroma.Web.FedSockets.IngesterWorker do
- use Pleroma.Workers.WorkerHelper, queue: "ingestion_queue"
- require Logger
-
- alias Pleroma.Web.Federator
-
- @impl Oban.Worker
- def perform(%Job{args: %{"op" => "ingest", "object" => ingestee}}) do
- try do
- ingestee
- |> Jason.decode!()
- |> do_ingestion()
- rescue
- e ->
- Logger.error("IngesterWorker error - #{inspect(e)}")
- e
- end
- end
-
- defp do_ingestion(params) do
- case Federator.incoming_ap_doc(params) do
- {:error, reason} ->
- {:error, reason}
-
- {:ok, object} ->
- {:ok, object}
- end
- end
-end
diff --git a/lib/pleroma/web/fed_sockets/outgoing_handler.ex b/lib/pleroma/web/fed_sockets/outgoing_handler.ex
deleted file mode 100644
index e235a7c43..000000000
--- a/lib/pleroma/web/fed_sockets/outgoing_handler.ex
+++ /dev/null
@@ -1,151 +0,0 @@
-# Pleroma: A lightweight social networking server
-# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
-# SPDX-License-Identifier: AGPL-3.0-only
-
-defmodule Pleroma.Web.FedSockets.OutgoingHandler do
- use GenServer
-
- require Logger
-
- alias Pleroma.Application
- alias Pleroma.Web.ActivityPub.InternalFetchActor
- alias Pleroma.Web.FedSockets
- alias Pleroma.Web.FedSockets.FedRegistry
- alias Pleroma.Web.FedSockets.FedSocket
- alias Pleroma.Web.FedSockets.SocketInfo
-
- def start_link(uri) do
- GenServer.start_link(__MODULE__, %{uri: uri})
- end
-
- def init(%{uri: uri}) do
- case initiate_connection(uri) do
- {:ok, ws_origin, conn_pid} ->
- FedRegistry.add_fed_socket(ws_origin, conn_pid)
-
- {:error, reason} ->
- Logger.debug("Outgoing connection failed - #{inspect(reason)}")
- :ignore
- end
- end
-
- def handle_info({:gun_ws, conn_pid, _ref, {:text, data}}, socket_info) do
- socket_info = SocketInfo.touch(socket_info)
-
- case FedSocket.receive_package(socket_info, data) do
- {:noreply, _} ->
- {:noreply, socket_info}
-
- {:reply, reply} ->
- :gun.ws_send(conn_pid, {:text, Jason.encode!(reply)})
- {:noreply, socket_info}
-
- {:error, reason} ->
- Logger.error("incoming error - receive_package: #{inspect(reason)}")
- {:noreply, socket_info}
- end
- end
-
- def handle_info(:close, state) do
- Logger.debug("Sending close frame !!!!!!!")
- {:close, state}
- end
-
- def handle_info({:gun_down, _pid, _prot, :closed, _}, state) do
- {:stop, :normal, state}
- end
-
- def handle_info({:send, data}, %{conn_pid: conn_pid} = socket_info) do
- socket_info = SocketInfo.touch(socket_info)
- :gun.ws_send(conn_pid, {:text, data})
- {:noreply, socket_info}
- end
-
- def handle_info({:gun_ws, _, _, :pong}, state) do
- {:noreply, state, :hibernate}
- end
-
- def handle_info(msg, state) do
- Logger.debug("#{__MODULE__} unhandled event #{inspect(msg)}")
- {:noreply, state}
- end
-
- def terminate(reason, state) do
- Logger.debug(
- "#{__MODULE__} terminating outgoing connection for #{inspect(state)} for #{inspect(reason)}"
- )
-
- {:ok, state}
- end
-
- def initiate_connection(uri) do
- ws_uri =
- uri
- |> SocketInfo.origin()
- |> FedSockets.uri_for_origin()
-
- %{host: host, port: port, path: path} = URI.parse(ws_uri)
-
- with {:ok, conn_pid} <- :gun.open(to_charlist(host), port, %{protocols: [:http]}),
- {:ok, _} <- :gun.await_up(conn_pid),
- reference <-
- :gun.get(conn_pid, to_charlist(path), [
- {'user-agent', to_charlist(Application.user_agent())}
- ]),
- {:response, :fin, 204, _} <- :gun.await(conn_pid, reference),
- headers <- build_headers(uri),
- ref <- :gun.ws_upgrade(conn_pid, to_charlist(path), headers, %{silence_pings: false}) do
- receive do
- {:gun_upgrade, ^conn_pid, ^ref, [<<"websocket">>], _} ->
- {:ok, ws_uri, conn_pid}
- after
- 15_000 ->
- Logger.debug("Fedsocket timeout connecting to #{inspect(uri)}")
- {:error, :timeout}
- end
- else
- {:response, :nofin, 404, _} ->
- {:error, :fedsockets_not_supported}
-
- e ->
- Logger.debug("Fedsocket error connecting to #{inspect(uri)}")
- {:error, e}
- end
- end
-
- defp build_headers(uri) do
- host_for_sig = uri |> URI.parse() |> host_signature()
-
- shake = FedSocket.shake()
- digest = "SHA-256=" <> (:crypto.hash(:sha256, shake) |> Base.encode64())
- date = Pleroma.Signature.signed_date()
- shake_size = byte_size(shake)
-
- signature_opts = %{
- "(request-target)": shake,
- "content-length": to_charlist("#{shake_size}"),
- date: date,
- digest: digest,
- host: host_for_sig
- }
-
- signature = Pleroma.Signature.sign(InternalFetchActor.get_actor(), signature_opts)
-
- [
- {'signature', to_charlist(signature)},
- {'date', date},
- {'digest', to_charlist(digest)},
- {'content-length', to_charlist("#{shake_size}")},
- {to_charlist("(request-target)"), to_charlist(shake)},
- {'user-agent', to_charlist(Application.user_agent())}
- ]
- end
-
- defp host_signature(%{host: host, scheme: scheme, port: port}) do
- if port == URI.default_port(scheme) do
- host
- else
- "#{host}:#{port}"
- end
- end
-end
diff --git a/lib/pleroma/web/fed_sockets/socket_info.ex b/lib/pleroma/web/fed_sockets/socket_info.ex
deleted file mode 100644
index d6fdffe1a..000000000
--- a/lib/pleroma/web/fed_sockets/socket_info.ex
+++ /dev/null
@@ -1,52 +0,0 @@
-# Pleroma: A lightweight social networking server
-# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
-# SPDX-License-Identifier: AGPL-3.0-only
-
-defmodule Pleroma.Web.FedSockets.SocketInfo do
- defstruct origin: nil,
- pid: nil,
- conn_pid: nil,
- state: :default,
- connected_until: nil
-
- alias Pleroma.Web.FedSockets.SocketInfo
- @default_connection_duration 15 * 60 * 1000
-
- def build(uri, conn_pid \\ nil) do
- uri
- |> build_origin()
- |> build_pids(conn_pid)
- |> touch()
- end
-
- def touch(%SocketInfo{} = socket_info),
- do: %{socket_info | connected_until: new_ttl()}
-
- def connect(%SocketInfo{} = socket_info),
- do: %{socket_info | state: :connected}
-
- def expired?(%{connected_until: connected_until}),
- do: connected_until < :erlang.monotonic_time(:millisecond)
-
- def origin(uri),
- do: build_origin(uri).origin
-
- defp build_pids(socket_info, conn_pid),
- do: struct(socket_info, pid: self(), conn_pid: conn_pid)
-
- defp build_origin(uri) when is_binary(uri),
- do: uri |> URI.parse() |> build_origin
-
- defp build_origin(%{host: host, port: nil, scheme: scheme}),
- do: build_origin(%{host: host, port: URI.default_port(scheme)})
-
- defp build_origin(%{host: host, port: port}),
- do: %SocketInfo{origin: "#{host}:#{port}"}
-
- defp new_ttl do
- connection_duration =
- Pleroma.Config.get([:fed_sockets, :connection_duration], @default_connection_duration)
-
- :erlang.monotonic_time(:millisecond) + connection_duration
- end
-end
diff --git a/lib/pleroma/web/fed_sockets/supervisor.ex b/lib/pleroma/web/fed_sockets/supervisor.ex
deleted file mode 100644
index a5f4bebfb..000000000
--- a/lib/pleroma/web/fed_sockets/supervisor.ex
+++ /dev/null
@@ -1,59 +0,0 @@
-# Pleroma: A lightweight social networking server
-# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
-# SPDX-License-Identifier: AGPL-3.0-only
-
-defmodule Pleroma.Web.FedSockets.Supervisor do
- use Supervisor
- import Cachex.Spec
-
- def start_link(opts) do
- Supervisor.start_link(__MODULE__, opts, name: __MODULE__)
- end
-
- def init(args) do
- children = [
- build_cache(:fed_socket_fetches, args),
- build_cache(:fed_socket_rejections, args),
- {Registry, keys: :unique, name: FedSockets.Registry, meta: [rejected: %{}]}
- ]
-
- opts = [strategy: :one_for_all, name: Pleroma.Web.Streamer.Supervisor]
- Supervisor.init(children, opts)
- end
-
- defp build_cache(name, args) do
- opts = get_opts(name, args)
-
- %{
- id: String.to_atom("#{name}_cache"),
- start: {Cachex, :start_link, [name, opts]},
- type: :worker
- }
- end
-
- defp get_opts(cache_name, args)
- when cache_name in [:fed_socket_fetches, :fed_socket_rejections] do
- default = get_opts_or_config(args, cache_name, :default, 15_000)
- interval = get_opts_or_config(args, cache_name, :interval, 3_000)
- lazy = get_opts_or_config(args, cache_name, :lazy, false)
-
- [expiration: expiration(default: default, interval: interval, lazy: lazy)]
- end
-
- defp get_opts(name, args) do
- Keyword.get(args, name, [])
- end
-
- defp get_opts_or_config(args, name, key, default) do
- args
- |> Keyword.get(name, [])
- |> Keyword.get(key)
- |> case do
- nil ->
- Pleroma.Config.get([:fed_sockets, name, key], default)
-
- value ->
- value
- end
- end
-end
diff --git a/test/pleroma/web/fed_sockets/fed_registry_test.exs b/test/pleroma/web/fed_sockets/fed_registry_test.exs
deleted file mode 100644
index 73aaced46..000000000
--- a/test/pleroma/web/fed_sockets/fed_registry_test.exs
+++ /dev/null
@@ -1,124 +0,0 @@
-# Pleroma: A lightweight social networking server
-# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
-# SPDX-License-Identifier: AGPL-3.0-only
-
-defmodule Pleroma.Web.FedSockets.FedRegistryTest do
- use ExUnit.Case
-
- alias Pleroma.Web.FedSockets
- alias Pleroma.Web.FedSockets.FedRegistry
- alias Pleroma.Web.FedSockets.SocketInfo
-
- @good_domain "http://good.domain"
- @good_domain_origin "good.domain:80"
-
- setup do
- start_supervised({Pleroma.Web.FedSockets.Supervisor, []})
- build_test_socket(@good_domain)
- Process.sleep(10)
-
- :ok
- end
-
- describe "add_fed_socket/1 without conflicting sockets" do
- test "can be added" do
- Process.sleep(10)
- assert {:ok, %SocketInfo{origin: origin}} = FedRegistry.get_fed_socket(@good_domain_origin)
- assert origin == "good.domain:80"
- end
-
- test "multiple origins can be added" do
- build_test_socket("http://anothergood.domain")
- Process.sleep(10)
-
- assert {:ok, %SocketInfo{origin: origin_1}} =
- FedRegistry.get_fed_socket(@good_domain_origin)
-
- assert {:ok, %SocketInfo{origin: origin_2}} =
- FedRegistry.get_fed_socket("anothergood.domain:80")
-
- assert origin_1 == "good.domain:80"
- assert origin_2 == "anothergood.domain:80"
- assert FedRegistry.list_all() |> Enum.count() == 2
- end
- end
-
- describe "add_fed_socket/1 when duplicate sockets conflict" do
- setup do
- build_test_socket(@good_domain)
- build_test_socket(@good_domain)
- Process.sleep(10)
- :ok
- end
-
- test "will be ignored" do
- assert {:ok, %SocketInfo{origin: origin, pid: _pid_one}} =
- FedRegistry.get_fed_socket(@good_domain_origin)
-
- assert origin == "good.domain:80"
-
- assert FedRegistry.list_all() |> Enum.count() == 1
- end
-
- test "the newer process will be closed" do
- pid_two = build_test_socket(@good_domain)
-
- assert {:ok, %SocketInfo{origin: origin, pid: _pid_one}} =
- FedRegistry.get_fed_socket(@good_domain_origin)
-
- assert origin == "good.domain:80"
- Process.sleep(10)
-
- refute Process.alive?(pid_two)
-
- assert FedRegistry.list_all() |> Enum.count() == 1
- end
- end
-
- describe "get_fed_socket/1" do
- test "returns missing for unknown hosts" do
- assert {:error, :missing} = FedRegistry.get_fed_socket("not_a_dmoain")
- end
-
- test "returns rejected for hosts previously rejected" do
- "rejected.domain:80"
- |> FedSockets.uri_for_origin()
- |> FedRegistry.set_host_rejected()
-
- assert {:error, :rejected} = FedRegistry.get_fed_socket("rejected.domain:80")
- end
-
- test "can retrieve a previously added SocketInfo" do
- build_test_socket(@good_domain)
- Process.sleep(10)
- assert {:ok, %SocketInfo{origin: origin}} = FedRegistry.get_fed_socket(@good_domain_origin)
- assert origin == "good.domain:80"
- end
-
- test "removes references to SocketInfos when the process crashes" do
- assert {:ok, %SocketInfo{origin: origin, pid: pid}} =
- FedRegistry.get_fed_socket(@good_domain_origin)
-
- assert origin == "good.domain:80"
-
- Process.exit(pid, :testing)
- Process.sleep(100)
- assert {:error, :missing} = FedRegistry.get_fed_socket(@good_domain_origin)
- end
- end
-
- def build_test_socket(uri) do
- Kernel.spawn(fn -> fed_socket_almost(uri) end)
- end
-
- def fed_socket_almost(origin) do
- FedRegistry.add_fed_socket(origin)
-
- receive do
- :close ->
- :ok
- after
- 5_000 -> :timeout
- end
- end
-end
diff --git a/test/pleroma/web/fed_sockets/fetch_registry_test.exs b/test/pleroma/web/fed_sockets/fetch_registry_test.exs
deleted file mode 100644
index 7bd2d995a..000000000
--- a/test/pleroma/web/fed_sockets/fetch_registry_test.exs
+++ /dev/null
@@ -1,67 +0,0 @@
-# Pleroma: A lightweight social networking server
-# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
-# SPDX-License-Identifier: AGPL-3.0-only
-
-defmodule Pleroma.Web.FedSockets.FetchRegistryTest do
- use ExUnit.Case
-
- alias Pleroma.Web.FedSockets.FetchRegistry
- alias Pleroma.Web.FedSockets.FetchRegistry.FetchRegistryData
-
- @json_message "hello"
- @json_reply "hello back"
-
- setup do
- start_supervised(
- {Pleroma.Web.FedSockets.Supervisor,
- [
- ping_interval: 8,
- connection_duration: 15,
- rejection_duration: 5,
- fed_socket_fetches: [default: 10, interval: 10]
- ]}
- )
-
- :ok
- end
-
- test "fetches can be stored" do
- uuid = FetchRegistry.register_fetch(@json_message)
-
- assert {:error, :waiting} = FetchRegistry.check_fetch(uuid)
- end
-
- test "fetches can return" do
- uuid = FetchRegistry.register_fetch(@json_message)
- task = Task.async(fn -> FetchRegistry.register_fetch_received(uuid, @json_reply) end)
-
- assert {:error, :waiting} = FetchRegistry.check_fetch(uuid)
- Task.await(task)
-
- assert {:ok, %FetchRegistryData{received_json: received_json}} =
- FetchRegistry.check_fetch(uuid)
-
- assert received_json == @json_reply
- end
-
- test "fetches are deleted once popped from stack" do
- uuid = FetchRegistry.register_fetch(@json_message)
- task = Task.async(fn -> FetchRegistry.register_fetch_received(uuid, @json_reply) end)
- Task.await(task)
-
- assert {:ok, %FetchRegistryData{received_json: received_json}} =
- FetchRegistry.check_fetch(uuid)
-
- assert received_json == @json_reply
- assert {:ok, @json_reply} = FetchRegistry.pop_fetch(uuid)
-
- assert {:error, :missing} = FetchRegistry.check_fetch(uuid)
- end
-
- test "fetches can time out" do
- uuid = FetchRegistry.register_fetch(@json_message)
- assert {:error, :waiting} = FetchRegistry.check_fetch(uuid)
- Process.sleep(500)
- assert {:error, :missing} = FetchRegistry.check_fetch(uuid)
- end
-end
diff --git a/test/pleroma/web/fed_sockets/socket_info_test.exs b/test/pleroma/web/fed_sockets/socket_info_test.exs
deleted file mode 100644
index db3d6edcd..000000000
--- a/test/pleroma/web/fed_sockets/socket_info_test.exs
+++ /dev/null
@@ -1,118 +0,0 @@
-# Pleroma: A lightweight social networking server
-# Copyright © 2017-2020 Pleroma Authors <https://pleroma.social/>
-# SPDX-License-Identifier: AGPL-3.0-only
-
-defmodule Pleroma.Web.FedSockets.SocketInfoTest do
- use ExUnit.Case
-
- alias Pleroma.Web.FedSockets
- alias Pleroma.Web.FedSockets.SocketInfo
-
- describe "uri_for_origin" do
- test "provides the fed_socket URL given the origin information" do
- endpoint = "example.com:4000"
- assert FedSockets.uri_for_origin(endpoint) =~ "ws://"
- assert FedSockets.uri_for_origin(endpoint) =~ endpoint
- end
- end
-
- describe "origin" do
- test "will provide the origin field given a url" do
- endpoint = "example.com:4000"
- assert SocketInfo.origin("ws://#{endpoint}") == endpoint
- assert SocketInfo.origin("http://#{endpoint}") == endpoint
- assert SocketInfo.origin("https://#{endpoint}") == endpoint
- end
-
- test "will proide the origin field given a uri" do
- endpoint = "example.com:4000"
- uri = URI.parse("http://#{endpoint}")
-
- assert SocketInfo.origin(uri) == endpoint
- end
- end
-
- describe "touch" do
- test "will update the TTL" do
- endpoint = "example.com:4000"
- socket = SocketInfo.build("ws://#{endpoint}")
- Process.sleep(2)
- touched_socket = SocketInfo.touch(socket)
-
- assert socket.connected_until < touched_socket.connected_until
- end
- end
-
- describe "expired?" do
- setup do
- start_supervised(
- {Pleroma.Web.FedSockets.Supervisor,
- [
- ping_interval: 8,
- connection_duration: 5,
- rejection_duration: 5,
- fed_socket_rejections: [lazy: true]
- ]}
- )
-
- :ok
- end
-
- test "tests if the TTL is exceeded" do
- endpoint = "example.com:4000"
- socket = SocketInfo.build("ws://#{endpoint}")
- refute SocketInfo.expired?(socket)
- Process.sleep(10)
-
- assert SocketInfo.expired?(socket)
- end
- end
-
- describe "creating outgoing connection records" do
- test "can be passed a string" do
- assert %{conn_pid: :pid, origin: _origin} = SocketInfo.build("example.com:4000", :pid)
- end
-
- test "can be passed a URI" do
- uri = URI.parse("http://example.com:4000")
- assert %{conn_pid: :pid, origin: origin} = SocketInfo.build(uri, :pid)
- assert origin =~ "example.com:4000"
- end
-
- test "will include the port number" do
- assert %{conn_pid: :pid, origin: origin} = SocketInfo.build("http://example.com:4000", :pid)
-
- assert origin =~ ":4000"
- end
-
- test "will provide the port if missing" do
- assert %{conn_pid: :pid, origin: "example.com:80"} =
- SocketInfo.build("http://example.com", :pid)
-
- assert %{conn_pid: :pid, origin: "example.com:443"} =
- SocketInfo.build("https://example.com", :pid)
- end
- end
-
- describe "creating incoming connection records" do
- test "can be passed a string" do
- assert %{pid: _, origin: _origin} = SocketInfo.build("example.com:4000")
- end
-
- test "can be passed a URI" do
- uri = URI.parse("example.com:4000")
- assert %{pid: _, origin: _origin} = SocketInfo.build(uri)
- end
-
- test "will include the port number" do
- assert %{pid: _, origin: origin} = SocketInfo.build("http://example.com:4000")
-
- assert origin =~ ":4000"
- end
-
- test "will provide the port if missing" do
- assert %{pid: _, origin: "example.com:80"} = SocketInfo.build("http://example.com")
- assert %{pid: _, origin: "example.com:443"} = SocketInfo.build("https://example.com")
- end
- end
-end

File Metadata

Mime Type
text/x-diff
Expires
Wed, May 14, 10:56 AM (1 d, 18 h)
Storage Engine
blob
Storage Format
Raw Data
Storage Handle
167268
Default Alt Text
(402 KB)

Event Timeline