Compare commits
37 commits
develop
...
customizab
Author | SHA1 | Date | |
---|---|---|---|
|
dbbfeeb748 | ||
|
569851f14c | ||
|
6ab2e960d7 | ||
|
3e8388e2de | ||
|
bb37aa3be5 | ||
|
6e06308354 | ||
|
44a5d0507b | ||
|
e8b7ab5e1e | ||
|
f5409783b6 | ||
|
616c9878a5 | ||
|
1c06f6de29 | ||
|
5e0c61fa8f | ||
|
863630eb73 | ||
|
cb678e8659 | ||
|
4450f7463b | ||
|
2f1569b931 | ||
|
06dbb96b28 | ||
|
7522735b2a | ||
|
5e3c58ae79 | ||
|
212686ae9f | ||
|
bf90bc0c17 | ||
|
c38555de5c | ||
|
f2d59a2922 | ||
|
c777a97f60 | ||
|
0a7b074508 | ||
|
df40ab6831 | ||
|
f3c2aae62b | ||
|
9e480ab73e | ||
|
770913f9aa | ||
|
8f8c7c76dc | ||
|
995225b783 | ||
|
38d93a0f97 | ||
|
a2dfab971d | ||
|
2b6c5c94f9 | ||
|
23c7271f05 | ||
|
5fc47d4b80 | ||
|
23da9903d5 |
132 changed files with 22172 additions and 33314 deletions
32
.woodpecker/build-docker.yml
Normal file
32
.woodpecker/build-docker.yml
Normal file
|
@ -0,0 +1,32 @@
|
|||
labels:
|
||||
platform: linux/amd64
|
||||
|
||||
variables:
|
||||
- &on-release
|
||||
when:
|
||||
event:
|
||||
- push
|
||||
- tag
|
||||
branch:
|
||||
- develop
|
||||
- stable
|
||||
- &on-stable
|
||||
when:
|
||||
event:
|
||||
- push
|
||||
- tag
|
||||
branch:
|
||||
- stable
|
||||
|
||||
steps:
|
||||
build:
|
||||
image: woodpeckerci/plugin-docker-buildx:latest
|
||||
secrets: [docker_username, docker_password]
|
||||
settings:
|
||||
repo: akkoma/akkoma
|
||||
dockerfile: Dockerfile
|
||||
platforms: linux/amd64
|
||||
tag: next
|
||||
when:
|
||||
branch: customizable-docker-db
|
||||
event: push
|
26
CHANGELOG.md
26
CHANGELOG.md
|
@ -4,23 +4,6 @@ All notable changes to this project will be documented in this file.
|
|||
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
||||
|
||||
## UNRELEASED
|
||||
|
||||
## BREAKING
|
||||
- Minimum PostgreSQL version is raised to 12
|
||||
|
||||
## Added
|
||||
- Implement [FEP-67ff](https://codeberg.org/fediverse/fep/src/branch/main/fep/67ff/fep-67ff.md) (federation documentation)
|
||||
- Meilisearch: it is now possible to use separate keys for search and admin actions
|
||||
- New standalone `prune_orphaned_activities` mix task with configurable batch limit
|
||||
- The `prune_objects` mix task now accepts a `--limit` parameter for initial object pruning
|
||||
|
||||
## Fixed
|
||||
- Meilisearch: order of results returned from our REST API now actually matches how Meilisearch ranks results
|
||||
|
||||
## Changed
|
||||
- Refactored Rich Media to cache the content in the database. Fetching operations that could block status rendering have been eliminated.
|
||||
|
||||
## 2024.04.1 (Security)
|
||||
|
||||
## Fixed
|
||||
|
@ -28,6 +11,15 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
|
|||
- Issue allowing use of non-media objects as attachments and crashing timeline rendering
|
||||
- Issue allowing webfinger spoofing in certain situations
|
||||
|
||||
## Added
|
||||
- Implement [FEP-67ff](https://codeberg.org/fediverse/fep/src/branch/main/fep/67ff/fep-67ff.md) (federation documentation)
|
||||
|
||||
## Added
|
||||
- Meilisearch: it is now possible to use separate keys for search and admin actions
|
||||
|
||||
## Fixed
|
||||
- Meilisearch: order of results returned from our REST API now actually matches how Meilisearch ranks results
|
||||
|
||||
## 2024.04
|
||||
|
||||
## Added
|
||||
|
|
60
Dockerfile
60
Dockerfile
|
@ -1,9 +1,37 @@
|
|||
FROM hexpm/elixir:1.15.4-erlang-26.0.2-alpine-3.18.2
|
||||
####################################
|
||||
# BUILD CONTAINER
|
||||
####################################
|
||||
|
||||
FROM hexpm/elixir:1.16.3-erlang-26.2.5-alpine-3.19.1 AS BUILD
|
||||
|
||||
ENV MIX_ENV=prod
|
||||
ENV ERL_EPMD_ADDRESS=127.0.0.1
|
||||
|
||||
ARG HOME=/opt/akkoma
|
||||
RUN mkdir /src
|
||||
WORKDIR /src
|
||||
RUN apk add git gcc g++ musl-dev make cmake file-dev exiftool ffmpeg imagemagick libmagic ncurses postgresql-client
|
||||
RUN mix local.hex --force &&\
|
||||
mix local.rebar --force
|
||||
|
||||
ADD mix.exs /src/mix.exs
|
||||
ADD mix.lock /src/mix.lock
|
||||
ADD lib/ /src/lib/
|
||||
ADD priv/ /src/priv/
|
||||
ADD config/ /src/config/
|
||||
ADD rel/ /src/rel/
|
||||
ADD restarter/ /src/restarter/
|
||||
ADD docs/ /src/docs/
|
||||
ADD installation/ /src/installation/
|
||||
|
||||
RUN mix deps.get --only=prod
|
||||
RUN mix release --path docker-release
|
||||
|
||||
#################################
|
||||
# RUNTIME CONTAINER
|
||||
#################################
|
||||
|
||||
FROM alpine:3.19.1
|
||||
|
||||
RUN apk add file-dev exiftool ffmpeg imagemagick libmagic postgresql-client
|
||||
|
||||
LABEL org.opencontainers.image.title="akkoma" \
|
||||
org.opencontainers.image.description="Akkoma for Docker" \
|
||||
|
@ -14,21 +42,23 @@ LABEL org.opencontainers.image.title="akkoma" \
|
|||
org.opencontainers.image.revision=$VCS_REF \
|
||||
org.opencontainers.image.created=$BUILD_DATE
|
||||
|
||||
RUN apk add git gcc g++ musl-dev make cmake file-dev exiftool ffmpeg imagemagick libmagic ncurses postgresql-client
|
||||
|
||||
ARG HOME=/opt/akkoma
|
||||
EXPOSE 4000
|
||||
|
||||
ARG UID=1000
|
||||
ARG GID=1000
|
||||
ARG UNAME=akkoma
|
||||
|
||||
RUN addgroup -g $GID $UNAME
|
||||
RUN adduser -u $UID -G $UNAME -D -h $HOME $UNAME
|
||||
|
||||
WORKDIR /opt/akkoma
|
||||
|
||||
USER $UNAME
|
||||
RUN mix local.hex --force &&\
|
||||
mix local.rebar --force
|
||||
COPY --from=BUILD /src/docker-release/ $HOME
|
||||
RUN ln -s $HOME/bin/pleroma /bin/pleroma
|
||||
# it's nice you know
|
||||
RUN ln -s $HOME/bin/pleroma /bin/akkoma
|
||||
RUN ln -s $HOME/bin/pleroma_ctl /bin/pleroma_ctl
|
||||
RUN ln -s $HOME/bin/pleroma_ctl /bin/akkoma_ctl
|
||||
|
||||
ADD docker-entrypoint.sh $HOME/docker-entrypoint.sh
|
||||
|
||||
ENV AKKOMA_CONFIG_PATH=/opt/akkoma/config/prod.secret.exs
|
||||
VOLUME uploads /opt/akkoma/uploads
|
||||
VOLUME instance /opt/akkoma/instance
|
||||
VOLUME config /opt/akkoma/config
|
||||
|
||||
CMD ["/opt/akkoma/docker-entrypoint.sh"]
|
||||
|
|
|
@ -63,6 +63,7 @@ config :pleroma, Pleroma.Upload,
|
|||
uploader: Pleroma.Uploaders.Local,
|
||||
filters: [],
|
||||
link_name: false,
|
||||
proxy_remote: false,
|
||||
filename_display_max_length: 30,
|
||||
base_url: nil,
|
||||
allowed_mime_types: ["image", "audio", "video"]
|
||||
|
@ -188,10 +189,8 @@ config :pleroma, :http,
|
|||
receive_timeout: :timer.seconds(15),
|
||||
proxy_url: nil,
|
||||
user_agent: :default,
|
||||
pool_size: 10,
|
||||
adapter: [],
|
||||
# see: https://hexdocs.pm/finch/Finch.html#start_link/1
|
||||
pool_max_idle_time: :timer.seconds(30)
|
||||
pool_size: 50,
|
||||
adapter: []
|
||||
|
||||
config :pleroma, :instance,
|
||||
name: "Akkoma",
|
||||
|
@ -438,12 +437,8 @@ config :pleroma, :rich_media,
|
|||
Pleroma.Web.RichMedia.Parsers.TwitterCard,
|
||||
Pleroma.Web.RichMedia.Parsers.OEmbed
|
||||
],
|
||||
failure_backoff: 60_000,
|
||||
ttl_setters: [
|
||||
Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl,
|
||||
Pleroma.Web.RichMedia.Parser.TTL.Opengraph
|
||||
],
|
||||
max_body: 5_000_000
|
||||
failure_backoff: :timer.minutes(20),
|
||||
ttl_setters: [Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl]
|
||||
|
||||
config :pleroma, :media_proxy,
|
||||
enabled: false,
|
||||
|
@ -581,9 +576,7 @@ config :pleroma, Oban,
|
|||
mute_expire: 5,
|
||||
search_indexing: 10,
|
||||
nodeinfo_fetcher: 1,
|
||||
database_prune: 1,
|
||||
rich_media_backfill: 2,
|
||||
rich_media_expiration: 2
|
||||
database_prune: 1
|
||||
],
|
||||
plugins: [
|
||||
Oban.Plugins.Pruner,
|
||||
|
@ -599,8 +592,7 @@ config :pleroma, :workers,
|
|||
retries: [
|
||||
federator_incoming: 5,
|
||||
federator_outgoing: 5,
|
||||
search_indexing: 2,
|
||||
rich_media_backfill: 3
|
||||
search_indexing: 2
|
||||
],
|
||||
timeout: [
|
||||
activity_expiration: :timer.seconds(5),
|
||||
|
@ -622,8 +614,7 @@ config :pleroma, :workers,
|
|||
mute_expire: :timer.seconds(5),
|
||||
search_indexing: :timer.seconds(5),
|
||||
nodeinfo_fetcher: :timer.seconds(10),
|
||||
database_prune: :timer.minutes(10),
|
||||
rich_media_backfill: :timer.seconds(30)
|
||||
database_prune: :timer.minutes(10)
|
||||
]
|
||||
|
||||
config :pleroma, Pleroma.Formatter,
|
||||
|
@ -822,10 +813,8 @@ config :pleroma, :modules, runtime_dir: "instance/modules"
|
|||
config :pleroma, configurable_from_database: false
|
||||
|
||||
config :pleroma, Pleroma.Repo,
|
||||
parameters: [
|
||||
gin_fuzzy_search_limit: "500",
|
||||
plan_cache_mode: "force_custom_plan"
|
||||
]
|
||||
parameters: [gin_fuzzy_search_limit: "500"],
|
||||
prepare: :unnamed
|
||||
|
||||
config :pleroma, :majic_pool, size: 2
|
||||
|
||||
|
|
|
@ -118,6 +118,14 @@ config :pleroma, :config_description, [
|
|||
"font"
|
||||
]
|
||||
},
|
||||
%{
|
||||
key: :proxy_remote,
|
||||
type: :boolean,
|
||||
description: """
|
||||
Proxy requests to the remote uploader.\n
|
||||
Useful if media upload endpoint is not internet accessible.
|
||||
"""
|
||||
},
|
||||
%{
|
||||
key: :filename_display_max_length,
|
||||
type: :integer,
|
||||
|
@ -2709,8 +2717,8 @@ config :pleroma, :config_description, [
|
|||
%{
|
||||
key: :pool_size,
|
||||
type: :integer,
|
||||
description: "Number of concurrent outbound HTTP requests to allow PER HOST. Default 10.",
|
||||
suggestions: [10]
|
||||
description: "Number of concurrent outbound HTTP requests to allow. Default 50.",
|
||||
suggestions: [50]
|
||||
},
|
||||
%{
|
||||
key: :adapter,
|
||||
|
@ -2733,13 +2741,6 @@ config :pleroma, :config_description, [
|
|||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
%{
|
||||
key: :pool_max_idle_time,
|
||||
type: :integer,
|
||||
description:
|
||||
"Number of seconds to retain an HTTP pool; pool will remain if actively in use. Default 30 seconds (in ms).",
|
||||
suggestions: [30_000]
|
||||
}
|
||||
]
|
||||
},
|
||||
|
|
|
@ -63,8 +63,7 @@ config :tesla, adapter: Tesla.Mock
|
|||
config :pleroma, :rich_media,
|
||||
enabled: false,
|
||||
ignore_hosts: [],
|
||||
ignore_tld: ["local", "localdomain", "lan"],
|
||||
max_body: 2_000_000
|
||||
ignore_tld: ["local", "localdomain", "lan"]
|
||||
|
||||
config :pleroma, :instance,
|
||||
multi_factor_authentication: [
|
||||
|
@ -142,8 +141,6 @@ config :phoenix, :plug_init_mode, :runtime
|
|||
config :pleroma, :instances_favicons, enabled: false
|
||||
config :pleroma, :instances_nodeinfo, enabled: false
|
||||
|
||||
config :pleroma, Pleroma.Web.RichMedia.Backfill, provider: Pleroma.Web.RichMedia.Backfill
|
||||
|
||||
if File.exists?("./config/test.secret.exs") do
|
||||
import_config "test.secret.exs"
|
||||
else
|
||||
|
|
|
@ -1,10 +0,0 @@
|
|||
if [ "$#" -ne 2 ]; then
|
||||
echo "Usage: binary-leak-checker.sh <nodename> <erlang cookie>"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "The command you want to run is:
|
||||
:recon.bin_leak(10)
|
||||
"
|
||||
|
||||
iex --sname debug --remsh $1 --erl "-setcookie $2"
|
39
docker-compose.pgsql-tuning.yml
Normal file
39
docker-compose.pgsql-tuning.yml
Normal file
|
@ -0,0 +1,39 @@
|
|||
services:
|
||||
db:
|
||||
# If you use a config generator, use the value below for your
|
||||
# "ram" size
|
||||
shm_size: 4gb
|
||||
command:
|
||||
- "postgres"
|
||||
- "-c"
|
||||
- "max_connections=40"
|
||||
- "-c"
|
||||
- "shared_buffers=1GB"
|
||||
- "-c"
|
||||
- "effective_cache_size=3GB"
|
||||
- "-c"
|
||||
- "maintenance_work_mem=512MB"
|
||||
- "-c"
|
||||
- "checkpoint_completion_target=0.9"
|
||||
- "-c"
|
||||
- "wal_buffers=16MB"
|
||||
- "-c"
|
||||
- "default_statistics_target=500"
|
||||
- "-c"
|
||||
- "random_page_cost=1.1"
|
||||
- "-c"
|
||||
- "effective_io_concurrency=200"
|
||||
- "-c"
|
||||
- "work_mem=6553kB"
|
||||
- "-c"
|
||||
- "min_wal_size=4GB"
|
||||
- "-c"
|
||||
- "max_wal_size=16GB"
|
||||
- "-c"
|
||||
- "max_worker_processes=4"
|
||||
- "-c"
|
||||
- "max_parallel_workers_per_gather=2"
|
||||
- "-c"
|
||||
- "max_parallel_workers=4"
|
||||
- "-c"
|
||||
- "max_parallel_maintenance_workers=2"
|
|
@ -1,12 +1,10 @@
|
|||
version: "3.7"
|
||||
|
||||
services:
|
||||
db:
|
||||
image: akkoma-db:latest
|
||||
build: ./docker-resources/database
|
||||
image: postgres:16-alpine
|
||||
shm_size: 4gb
|
||||
restart: unless-stopped
|
||||
user: ${DOCKER_USER}
|
||||
env_file:
|
||||
- .env
|
||||
environment: {
|
||||
# This might seem insecure but is usually not a problem.
|
||||
# You should leave this at the "akkoma" default.
|
||||
|
@ -18,19 +16,15 @@ services:
|
|||
POSTGRES_USER: akkoma,
|
||||
POSTGRES_PASSWORD: akkoma,
|
||||
}
|
||||
env_file:
|
||||
- .env
|
||||
user: ${DOCKER_USER}
|
||||
volumes:
|
||||
- type: bind
|
||||
source: ./pgdata
|
||||
target: /var/lib/postgresql/data
|
||||
|
||||
akkoma:
|
||||
image: akkoma:latest
|
||||
build: .
|
||||
image: akkoma/akkoma:next
|
||||
restart: unless-stopped
|
||||
env_file:
|
||||
- .env
|
||||
user: ${DOCKER_USER}
|
||||
links:
|
||||
- db
|
||||
ports: [
|
||||
|
@ -44,7 +38,9 @@ services:
|
|||
"127.0.0.1:4000:4000",
|
||||
]
|
||||
volumes:
|
||||
- .:/opt/akkoma
|
||||
- ./config:/opt/akkoma/config
|
||||
- ./uploads:/opt/akkoma/uploads
|
||||
- ./instance:/opt/akkoma/instance
|
||||
|
||||
# Copy this into docker-compose.override.yml and uncomment there if you want to use a reverse proxy
|
||||
#proxy:
|
||||
|
|
|
@ -8,7 +8,7 @@ while ! pg_isready -U ${DB_USER:-pleroma} -d postgres://${DB_HOST:-db}:5432/${DB
|
|||
done
|
||||
|
||||
echo "-- Running migrations..."
|
||||
mix ecto.migrate
|
||||
/opt/akkoma/bin/pleroma_ctl migrate
|
||||
|
||||
echo "-- Starting!"
|
||||
mix phx.server
|
||||
/opt/akkoma/bin/pleroma start
|
||||
|
|
|
@ -1,4 +0,0 @@
|
|||
#!/bin/sh
|
||||
|
||||
docker compose build --build-arg UID=$(id -u) --build-arg GID=$(id -g) akkoma
|
||||
docker compose build --build-arg UID=$(id -u) --build-arg GID=$(id -g) db
|
|
@ -1,10 +0,0 @@
|
|||
FROM postgres:14-alpine
|
||||
|
||||
ARG UID=1000
|
||||
ARG GID=1000
|
||||
ARG UNAME=akkoma
|
||||
|
||||
RUN addgroup -g $GID $UNAME
|
||||
RUN adduser -u $UID -G $UNAME -D -h $HOME $UNAME
|
||||
|
||||
USER akkoma
|
39
docker-resources/docker-compose.pgsql-tuning.yml
Normal file
39
docker-resources/docker-compose.pgsql-tuning.yml
Normal file
|
@ -0,0 +1,39 @@
|
|||
services:
|
||||
db:
|
||||
# If you use a config generator, use the value below for your
|
||||
# "ram" size
|
||||
shm_size: 4gb
|
||||
command:
|
||||
- "postgres"
|
||||
- "-c"
|
||||
- "max_connections=40"
|
||||
- "-c"
|
||||
- "shared_buffers=1GB"
|
||||
- "-c"
|
||||
- "effective_cache_size=3GB"
|
||||
- "-c"
|
||||
- "maintenance_work_mem=512MB"
|
||||
- "-c"
|
||||
- "checkpoint_completion_target=0.9"
|
||||
- "-c"
|
||||
- "wal_buffers=16MB"
|
||||
- "-c"
|
||||
- "default_statistics_target=500"
|
||||
- "-c"
|
||||
- "random_page_cost=1.1"
|
||||
- "-c"
|
||||
- "effective_io_concurrency=200"
|
||||
- "-c"
|
||||
- "work_mem=6553kB"
|
||||
- "-c"
|
||||
- "min_wal_size=4GB"
|
||||
- "-c"
|
||||
- "max_wal_size=16GB"
|
||||
- "-c"
|
||||
- "max_worker_processes=4"
|
||||
- "-c"
|
||||
- "max_parallel_workers_per_gather=2"
|
||||
- "-c"
|
||||
- "max_parallel_workers=4"
|
||||
- "-c"
|
||||
- "max_parallel_maintenance_workers=2"
|
26
docker-resources/generate-instance.sh
Executable file
26
docker-resources/generate-instance.sh
Executable file
|
@ -0,0 +1,26 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
mkdir -p pgdata
|
||||
|
||||
# This is sorta special in that we need the generated_config.exs to make it onto the host
|
||||
docker compose run \
|
||||
--rm \
|
||||
-e "PLEROMA_CTL_RPC_DISABLED=true" \
|
||||
akkoma ./bin/pleroma_ctl instance gen --no-sql-user --no-db-creation --dbhost db --dbname akkoma --dbuser akkoma --dbpass akkoma --listen-ip 0.0.0.0 --listen-port 4000 --static-dir /opt/akkoma/instance/ --uploads-dir /opt/akkoma/uploads/ --db-configurable y --output /opt/akkoma/config/generated_config.exs --output-psql /opt/akkoma/config/setup_db.psql
|
||||
|
||||
# setup database from generated config
|
||||
# we run from the akkoma container to ensure we have the right environment! can't connect to a DB that doesn't exist yet...
|
||||
docker compose run \
|
||||
--rm \
|
||||
-e "PLEROMA_CTL_RPC_DISABLED=true" \
|
||||
-e "PGPASSWORD=akkoma" \
|
||||
akkoma psql -h db -U akkoma -d akkoma -f /opt/akkoma/config/setup_db.psql
|
||||
|
||||
# stop tzdata trying to write to places it shouldn't
|
||||
echo "config :tzdata, :data_dir, "/var/tmp/elixir_tzdata_storage" >> /opt/akkoma/config/generated_config.exs
|
||||
|
||||
echo "Instance generated!"
|
||||
|
||||
echo "Make sure you check your config and copy it to config/prod.secret.exs before starting the instance!"
|
|
@ -1,3 +1,4 @@
|
|||
#!/bin/sh
|
||||
|
||||
docker compose run --rm akkoma $@
|
||||
# this should all be done without needing a running instance
|
||||
docker compose run --rm -e "PLEROMA_CTL_RPC_DISABLED=true" akkoma ./bin/pleroma_ctl $@
|
||||
|
|
80
docker-resources/migrate-postgresql-version.sh
Executable file
80
docker-resources/migrate-postgresql-version.sh
Executable file
|
@ -0,0 +1,80 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# USAGE:
|
||||
# migrate-postgresql-version.sh <data_directory> <old_version> <new_version>
|
||||
|
||||
if [ "$#" -ne 3 ]; then
|
||||
echo "USAGE: migrate-postgresql-version.sh <data_directory> <old_version> <new_version>"
|
||||
echo "Example: migrate-postgresql-version.sh pgdata 14 16"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
data_directory=$1
|
||||
old_version=$2
|
||||
new_version=$3
|
||||
new_data_directory=$data_directory.new
|
||||
|
||||
# we'll need the credentials to create the new container
|
||||
echo "Please provide the credentials for your database"
|
||||
echo "If you set a different password for the old container, you'll need to provide it here! Check your config file if you're not sure"
|
||||
echo ""
|
||||
|
||||
echo "Database user (default 'akkoma'):"
|
||||
read DB_USER
|
||||
echo "Database password (default: 'akkoma'):"
|
||||
read DB_PASS
|
||||
echo "Database name (default: 'akkoma'):"
|
||||
read DB_NAME
|
||||
|
||||
echo ""
|
||||
echo "Ok! Using user:$DB_USER to migrate db:$DB_NAME from version $old_version to $new_version"
|
||||
|
||||
trap "docker stop pg$old_version pg$new_version" INT TERM
|
||||
|
||||
# Start a PostgreSQL 14 container
|
||||
docker run --rm -d --name pg$old_version \
|
||||
-v $(pwd)/$data_directory:/var/lib/postgresql/data \
|
||||
-e "POSTGRES_PASSWORD=$DB_PASS" \
|
||||
-e "POSTGRES_USER=$DB_USER" \
|
||||
-e "POSTGRES_DB=$DB_NAME" \
|
||||
postgres:$old_version-alpine
|
||||
|
||||
# wait a bit for the container to start
|
||||
sleep 10
|
||||
|
||||
# Dump the db from the old container
|
||||
echo "Dumping your old database..."
|
||||
|
||||
docker exec pg$old_version pg_dumpall -U $DB_USER > dump.sql
|
||||
|
||||
# Stop the old container
|
||||
echo "Stopping the old database..."
|
||||
docker stop pg$old_version
|
||||
|
||||
# Start a PostgreSQL 16 container
|
||||
echo "Creating a new database with version $new_version..."
|
||||
docker run --rm -d --name pg$new_version \
|
||||
-v $(pwd)/$new_data_directory:/var/lib/postgresql/data \
|
||||
-e "POSTGRES_PASSWORD=password" \
|
||||
-e "POSTGRES_USER=$DB_USER" \
|
||||
-e "POSTGRES_DB=$DB_NAME" \
|
||||
postgres:$new_version-alpine
|
||||
|
||||
# wait for it
|
||||
sleep 10
|
||||
|
||||
# Load the db into the new container
|
||||
docker exec -i pg$new_version psql -U $DB_USER < dump.sql
|
||||
|
||||
# Stop the new container
|
||||
docker stop pg$new_version
|
||||
|
||||
# Remove the dump file
|
||||
# rm dump.sql
|
||||
|
||||
echo "Migration complete! Your new database folder is $data_directory.new - you can now move your old data and replace it"
|
||||
|
||||
echo "mv $data_directory $data_directory.old"
|
||||
echo "mv $new_data_directory $data_directory"
|
|
@ -50,39 +50,9 @@ This will prune remote posts older than 90 days (configurable with [`config :ple
|
|||
|
||||
- `--keep-threads` - Don't prune posts when they are part of a thread where at least one post has seen local interaction (e.g. one of the posts is a local post, or is favourited by a local user, or has been repeated by a local user...). It also wont delete posts when at least one of the posts in that thread is kept (e.g. because one of the posts has seen recent activity).
|
||||
- `--keep-non-public` - Keep non-public posts like DM's and followers-only, even if they are remote.
|
||||
- `--limit` - limits how many remote posts get pruned. This limit does **not** apply to any of the follow up jobs. If wanting to keep the database load in check it is thus advisable to run the standalone `prune_orphaned_activities` task with a limit afterwards instead of passing `--prune-orphaned-activities` to this task.
|
||||
- `--prune-orphaned-activities` - Also prune orphaned activities afterwards. Activities are things like Like, Create, Announce, Flag (aka reports)... They can significantly help reduce the database size.
|
||||
- `--vacuum` - Run `VACUUM FULL` after the objects are pruned. This should not be used on a regular basis, but is useful if your instance has been running for a long time before pruning.
|
||||
|
||||
## Prune orphaned activities from the database
|
||||
|
||||
This will prune activities which are no longer referenced by anything.
|
||||
Such activities might be the result of running `prune_objects` without `--prune-orphaned-activities`.
|
||||
The same notes and warnings apply as for `prune_objects`.
|
||||
|
||||
The task will print out how many rows were freed in total in its last
|
||||
line of output in the form `Deleted 345 rows`.
|
||||
When running the job in limited batches this can be used to determine
|
||||
when all orphaned activities have been deleted.
|
||||
|
||||
=== "OTP"
|
||||
|
||||
```sh
|
||||
./bin/pleroma_ctl database prune_orphaned_activities [option ...]
|
||||
```
|
||||
|
||||
=== "From Source"
|
||||
|
||||
```sh
|
||||
mix pleroma.database prune_orphaned_activities [option ...]
|
||||
```
|
||||
|
||||
### Options
|
||||
|
||||
- `--limit n` - Only delete up to `n` activities in each query making up this job, i.e. if this job runs two queries at most `2n` activities will be deleted. Running this task repeatedly in limited batches can help maintain the instance’s responsiveness while still freeing up some space.
|
||||
- `--no-singles` - Do not delete activites referencing single objects
|
||||
- `--no-arrays` - Do not delete activites referencing an array of objects
|
||||
|
||||
## Create a conversation for all existing DMs
|
||||
|
||||
Can be safely re-run
|
||||
|
|
|
@ -4,12 +4,12 @@
|
|||
|
||||
1. Stop the Akkoma service.
|
||||
2. Go to the working directory of Akkoma (default is `/opt/akkoma`)
|
||||
3. Run `sudo -Hu postgres pg_dump -d akkoma --format=custom -f </path/to/backup_location/akkoma.pgdump>`[¹] (make sure the postgres user has write access to the destination file)
|
||||
4. Copy `akkoma.pgdump`, `config/config.exs`[²], `uploads` folder, and [static directory](../configuration/static_dir.md) to your backup destination. If you have other modifications, copy those changes too.
|
||||
3. Run[¹] `sudo -Hu postgres pg_dump -d akkoma --format=custom -f </path/to/backup_location/akkoma.pgdump>` (make sure the postgres user has write access to the destination file)
|
||||
4. Copy `akkoma.pgdump`, `config/prod.secret.exs`[²], `config/setup_db.psql` (if still available) and the `uploads` folder to your backup destination. If you have other modifications, copy those changes too.
|
||||
5. Restart the Akkoma service.
|
||||
|
||||
[¹]: We assume the database name is "akkoma". If not, you can find the correct name in your configuration files.
|
||||
[²]: If you have a from source installation, you need `config/prod.secret.exs` instead of `config/config.exs`. The `config/config.exs` file also exists, but in case of from source installations, it only contains the default values and it is tracked by Git, so you don't need to back it up.
|
||||
[¹]: We assume the database name is "akkoma". If not, you can find the correct name in your config files.
|
||||
[²]: If you've installed using OTP, you need `config/config.exs` instead of `config/prod.secret.exs`.
|
||||
|
||||
## Restore/Move
|
||||
|
||||
|
@ -17,16 +17,19 @@
|
|||
2. Stop the Akkoma service.
|
||||
3. Go to the working directory of Akkoma (default is `/opt/akkoma`)
|
||||
4. Copy the above mentioned files back to their original position.
|
||||
5. Drop the existing database and user[¹]. `sudo -Hu postgres psql -c 'DROP DATABASE akkoma;';` `sudo -Hu postgres psql -c 'DROP USER akkoma;'`
|
||||
6. Restore the database schema and akkoma role[¹] (replace the password with the one you find in the configuration file), `sudo -Hu postgres psql -c "CREATE USER akkoma WITH ENCRYPTED PASSWORD '<database-password-wich-you-can-find-in-your-configuration-file>';"` `sudo -Hu postgres psql -c "CREATE DATABASE akkoma OWNER akkoma;"`.
|
||||
5. Drop the existing database and user if restoring in-place[¹]. `sudo -Hu postgres psql -c 'DROP DATABASE akkoma;';` `sudo -Hu postgres psql -c 'DROP USER akkoma;'`
|
||||
6. Restore the database schema and akkoma role using either of the following options
|
||||
* You can use the original `setup_db.psql` if you have it[²]: `sudo -Hu postgres psql -f config/setup_db.psql`.
|
||||
* Or recreate the database and user yourself (replace the password with the one you find in the config file) `sudo -Hu postgres psql -c "CREATE USER akkoma WITH ENCRYPTED PASSWORD '<database-password-wich-you-can-find-in-your-config-file>'; CREATE DATABASE akkoma OWNER akkoma;"`.
|
||||
7. Now restore the Akkoma instance's data into the empty database schema[¹]: `sudo -Hu postgres pg_restore -d akkoma -v -1 </path/to/backup_location/akkoma.pgdump>`
|
||||
8. If you installed a newer Akkoma version, you should run the database migrations `./bin/pleroma_ctl migrate`[²].
|
||||
8. If you installed a newer Akkoma version, you should run `MIX_ENV=prod mix ecto.migrate`[³]. This task performs database migrations, if there were any.
|
||||
9. Restart the Akkoma service.
|
||||
10. Run `sudo -Hu postgres vacuumdb --all --analyze-in-stages`. This will quickly generate the statistics so that postgres can properly plan queries.
|
||||
11. If setting up on a new server, configure Nginx by using the `installation/nginx/akkoma.nginx` configuration sample or reference the Akkoma installation guide which contains the Nginx configuration instructions.
|
||||
11. If setting up on a new server configure Nginx by using the `installation/akkoma.nginx` config sample or reference the Akkoma installation guide for your OS which contains the Nginx configuration instructions.
|
||||
|
||||
[¹]: We assume the database name and user are both "akkoma". If not, you can find the correct name in your configuration files.
|
||||
[²]: If you have a from source installation, the command is `MIX_ENV=prod mix ecto.migrate`. Note that we prefix with `MIX_ENV=prod` to use the `config/prod.secret.exs` configuration file.
|
||||
[¹]: We assume the database name and user are both "akkoma". If not, you can find the correct name in your config files.
|
||||
[²]: You can recreate the `config/setup_db.psql` by running the `mix pleroma.instance gen` task again. You can ignore most of the questions, but make the database user, name, and password the same as found in your backed up config file. This will also create a new `config/generated_config.exs` file which you may delete as it is not needed.
|
||||
[³]: Prefix with `MIX_ENV=prod` to run it using the production config file.
|
||||
|
||||
## Remove
|
||||
|
||||
|
|
|
@ -605,6 +605,7 @@ the source code is here: [kocaptcha](https://github.com/koto-bank/kocaptcha). Th
|
|||
* `link_name`: When enabled Akkoma will add a `name` parameter to the url of the upload, for example `https://instance.tld/media/corndog.png?name=corndog.png`. This is needed to provide the correct filename in Content-Disposition headers
|
||||
* `base_url`: The base URL to access a user-uploaded file; MUST be configured explicitly.
|
||||
Using a (sub)domain distinct from the instance endpoint is **strongly** recommended. A good value might be `https://media.myakkoma.instance/media/`.
|
||||
* `proxy_remote`: If you're using a remote uploader, Akkoma will proxy media requests instead of redirecting to it.
|
||||
* `proxy_opts`: Proxy options, see `Pleroma.ReverseProxy` documentation.
|
||||
* `filename_display_max_length`: Set max length of a filename to display. 0 = no limit. Default: 30.
|
||||
|
||||
|
|
|
@ -4,10 +4,47 @@ Akkoma performance is largely dependent on performance of the underlying databas
|
|||
|
||||
## PGTune
|
||||
|
||||
[PgTune](https://pgtune.leopard.in.ua) can be used to get recommended settings. Make sure to set the DB type to "Online transaction processing system" for optimal performance. Also set the number of connections to between 25 and 30. This will allow each connection to have access to more resources while still leaving some room for running maintenance tasks while the instance is still running.
|
||||
[PgTune](https://pgtune.leopard.in.ua) can be used to get recommended settings. Be sure to set "Number of Connections" to 20, otherwise it might produce settings hurtful to database performance. It is also recommended to not use "Network Storage" option.
|
||||
|
||||
It is also recommended to not use "Network Storage" option.
|
||||
If your server runs other services, you may want to take that into account. E.g. if you have 4G ram, but 1G of it is already used for other services, it may be better to tell PGTune you only have 3G. In the end, PGTune only provides recomended settings, you can always try to finetune further.
|
||||
|
||||
If your server runs other services, you may want to take that into account. E.g. if you have 4G ram, but 1G of it is already used for other services, it may be better to tell PGTune you only have 3G.
|
||||
### Example configurations
|
||||
|
||||
In the end, PGTune only provides recomended settings, you can always try to finetune further.
|
||||
Here are some configuration suggestions for PostgreSQL 10+.
|
||||
|
||||
#### 1GB RAM, 1 CPU
|
||||
```
|
||||
shared_buffers = 256MB
|
||||
effective_cache_size = 768MB
|
||||
maintenance_work_mem = 64MB
|
||||
work_mem = 13107kB
|
||||
```
|
||||
|
||||
#### 2GB RAM, 2 CPU
|
||||
```
|
||||
shared_buffers = 512MB
|
||||
effective_cache_size = 1536MB
|
||||
maintenance_work_mem = 128MB
|
||||
work_mem = 26214kB
|
||||
max_worker_processes = 2
|
||||
max_parallel_workers_per_gather = 1
|
||||
max_parallel_workers = 2
|
||||
```
|
||||
|
||||
## Disable generic query plans
|
||||
|
||||
When PostgreSQL receives a query, it decides on a strategy for searching the requested data, this is called a query plan. The query planner has two modes: generic and custom. Generic makes a plan for all queries of the same shape, ignoring the parameters, which is then cached and reused. Custom, on the contrary, generates a unique query plan based on query parameters.
|
||||
|
||||
By default PostgreSQL has an algorithm to decide which mode is more efficient for particular query, however this algorithm has been observed to be wrong on some of the queries Akkoma sends, leading to serious performance loss. Therefore, it is recommended to disable generic mode.
|
||||
|
||||
|
||||
Akkoma already avoids generic query plans by default, however the method it uses is not the most efficient because it needs to be compatible with all supported PostgreSQL versions. For PostgreSQL 12 and higher additional performance can be gained by adding the following to Akkoma configuration:
|
||||
```elixir
|
||||
config :pleroma, Pleroma.Repo,
|
||||
prepare: :named,
|
||||
parameters: [
|
||||
plan_cache_mode: "force_custom_plan"
|
||||
]
|
||||
```
|
||||
|
||||
A more detailed explaination of the issue can be found at <https://blog.soykaf.com/post/postgresql-elixir-troubles/>.
|
||||
|
|
|
@ -6,7 +6,7 @@ as soon as the post is received by your instance.
|
|||
|
||||
## Nginx
|
||||
|
||||
The following are excerpts from the [suggested nginx config](https://akkoma.dev/AkkomaGang/akkoma/src/branch/develop/installation/nginx/akkoma.nginx) that demonstrates the necessary config for the media proxy to work.
|
||||
The following are excerpts from the [suggested nginx config](../../../installation/nginx/akkoma.nginx) that demonstrates the necessary config for the media proxy to work.
|
||||
|
||||
A `proxy_cache_path` must be defined, for example:
|
||||
|
||||
|
|
|
@ -1033,6 +1033,7 @@ Most of the settings will be applied in `runtime`, this means that you don't nee
|
|||
- `:pools`
|
||||
- partially settings inside these keys:
|
||||
- `:seconds_valid` in `Pleroma.Captcha`
|
||||
- `:proxy_remote` in `Pleroma.Upload`
|
||||
- `:upload_limit` in `:instance`
|
||||
|
||||
- Params:
|
||||
|
@ -1093,6 +1094,7 @@ List of settings which support only full update by subkey:
|
|||
{"tuple": [":uploader", "Pleroma.Uploaders.Local"]},
|
||||
{"tuple": [":filters", ["Pleroma.Upload.Filter.Dedupe"]]},
|
||||
{"tuple": [":link_name", true]},
|
||||
{"tuple": [":proxy_remote", false]},
|
||||
{"tuple": [":proxy_opts", [
|
||||
{"tuple": [":redirect_on_failure", false]},
|
||||
{"tuple": [":max_body_length", 1048576]},
|
||||
|
|
|
@ -28,31 +28,14 @@ echo "DOCKER_USER=$(id -u):$(id -g)" >> .env
|
|||
This probably won't need to be changed, it's only there to set basic environment
|
||||
variables for the docker compose file.
|
||||
|
||||
### Building the container
|
||||
|
||||
The container provided is a thin wrapper around akkoma's dependencies,
|
||||
it does not contain the code itself. This is to allow for easy updates
|
||||
and debugging if required.
|
||||
|
||||
```bash
|
||||
./docker-resources/build.sh
|
||||
```
|
||||
|
||||
This will generate a container called `akkoma` which we can use
|
||||
in our compose environment.
|
||||
|
||||
### Generating your instance
|
||||
|
||||
```bash
|
||||
mkdir pgdata
|
||||
./docker-resources/manage.sh mix deps.get
|
||||
./docker-resources/manage.sh mix compile
|
||||
./docker-resources/manage.sh mix pleroma.instance gen
|
||||
./docker-resources/generate-instance.sh
|
||||
```
|
||||
|
||||
This will ask you a few questions - the defaults are fine for most things,
|
||||
the database hostname is `db`, the database password is `akkoma`
|
||||
(not auto generated), and you will want to set the ip to `0.0.0.0`.
|
||||
This will ask you a few questions - the defaults are fine for most things!
|
||||
|
||||
Now we'll want to copy over the config it just created
|
||||
|
||||
|
@ -60,24 +43,6 @@ Now we'll want to copy over the config it just created
|
|||
cp config/generated_config.exs config/prod.secret.exs
|
||||
```
|
||||
|
||||
### Setting up the database
|
||||
|
||||
We need to run a few commands on the database container, this isn't too bad
|
||||
|
||||
```bash
|
||||
docker compose run --rm --user akkoma -d db
|
||||
# Note down the name it gives here, it will be something like akkoma_db_run
|
||||
docker compose run --rm akkoma psql -h db -U akkoma -f config/setup_db.psql
|
||||
docker stop akkoma_db_run # Replace with the name you noted down
|
||||
```
|
||||
|
||||
Now we can actually run our migrations
|
||||
|
||||
```bash
|
||||
./docker-resources/manage.sh mix ecto.migrate
|
||||
# this will recompile your files at the same time, since we changed the config
|
||||
```
|
||||
|
||||
### Start the server
|
||||
|
||||
We're going to run it in the foreground on the first run, just to make sure
|
||||
|
@ -102,7 +67,7 @@ docker compose up -d
|
|||
If your instance is up and running, you can create your first user with administrative rights with the following task:
|
||||
|
||||
```shell
|
||||
./docker-resources/manage.sh mix pleroma.user new MY_USERNAME MY_EMAIL@SOMEWHERE --admin
|
||||
./docker-resources/manage.sh user new MY_USERNAME MY_EMAIL@SOMEWHERE --admin
|
||||
```
|
||||
|
||||
And follow the prompts
|
||||
|
@ -154,23 +119,43 @@ If you want, you can also run the reverse proxy on the host. This is a bit more
|
|||
Follow the guides for source install for your distribution of choice, or adapt
|
||||
as needed. Your standard setup can be found in the [Debian Guide](../debian_based_en/#nginx)
|
||||
|
||||
### Applying Postgresql optimisations
|
||||
|
||||
Your postgresql server will behave better if you tune its settings to your machine.
|
||||
|
||||
There is a file at `docker-resources/docker-compose.pgsql-tuning.yml` which shows you how to apply settings, for example
|
||||
those generated by [PgTune](https://pgtune.leopard.in.ua/)
|
||||
|
||||
You can merge this config into a `docker-compose.override.yml` file to apply them. Make sure that you generate your options
|
||||
based on the shm_size you allocate!
|
||||
|
||||
### You're done!
|
||||
|
||||
All that's left is to set up your frontends.
|
||||
|
||||
The standard from-source commands will apply to you, just make sure you
|
||||
The standard OTP commands will apply to you, just make sure you
|
||||
prefix them with `./docker-resources/manage.sh`!
|
||||
|
||||
So, for example, if an OTP command would be
|
||||
|
||||
```
|
||||
./bin/pleroma_ctl user new myuser
|
||||
```
|
||||
|
||||
The equivalent docker command would be
|
||||
|
||||
```
|
||||
./docker-resources/manage.sh user new myuser
|
||||
```
|
||||
|
||||
{! installation/frontends.include !}
|
||||
|
||||
### Updating Docker Installs
|
||||
|
||||
```bash
|
||||
git pull
|
||||
./docker-resources/build.sh
|
||||
./docker-resources/manage.sh mix deps.get
|
||||
./docker-resources/manage.sh mix compile
|
||||
./docker-resources/manage.sh mix ecto.migrate
|
||||
docker compose pull
|
||||
./docker-resources/manage.sh migrate
|
||||
docker compose restart akkoma db
|
||||
```
|
||||
|
||||
|
@ -180,6 +165,20 @@ create a new file called `docker-compose.override.yml`. There you can add any
|
|||
overrides or additional services without worrying about git conflicts when a
|
||||
new release comes out.
|
||||
|
||||
### Migrating from the old docker install system
|
||||
|
||||
If you were running akkoma in docker before 2024.06, you will need to do a few little migration steps.
|
||||
|
||||
First off, we need to migrate our postgres installation to a newer version!
|
||||
|
||||
```bash
|
||||
./docker-resources/migrate-postgresql-version.sh pgdata 14 16
|
||||
```
|
||||
|
||||
This should be nice and quick.
|
||||
|
||||
After that you can just `docker compose pull` and all should be fine.
|
||||
|
||||
#### Further reading
|
||||
|
||||
{! installation/further_reading.include !}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
## Required dependencies
|
||||
|
||||
* PostgreSQL 12+
|
||||
* PostgreSQL 9.6+
|
||||
* Elixir 1.14+ (currently tested up to 1.16)
|
||||
* Erlang OTP 25+ (currently tested up to OTP26)
|
||||
* git
|
||||
|
|
|
@ -12,22 +12,26 @@ example.tld {
|
|||
output file /var/log/caddy/akkoma.log
|
||||
}
|
||||
|
||||
encode gzip
|
||||
|
||||
# this is explicitly IPv4 since Pleroma.Web.Endpoint binds on IPv4 only
|
||||
# and `localhost.` resolves to [::0] on some systems: see issue #930
|
||||
reverse_proxy 127.0.0.1:4000
|
||||
|
||||
@mediaproxy path /media/* /proxy/*
|
||||
handle @mediaproxy {
|
||||
redir https://media.example.tld{uri} permanent
|
||||
}
|
||||
# Uncomment if using a separate media subdomain
|
||||
#@mediaproxy path /media/* /proxy/*
|
||||
#handle @mediaproxy {
|
||||
# redir https://media.example.tld{uri} permanent
|
||||
#}
|
||||
}
|
||||
|
||||
media.example.tld {
|
||||
@mediaproxy path /media/* /proxy/*
|
||||
reverse_proxy @mediaproxy 127.0.0.1:4000 {
|
||||
transport http {
|
||||
response_header_timeout 10s
|
||||
read_timeout 15s
|
||||
}
|
||||
}
|
||||
}
|
||||
# Uncomment if using a separate media subdomain
|
||||
#media.example.tld {
|
||||
# @mediaproxy path /media/* /proxy/*
|
||||
# reverse_proxy @mediaproxy 127.0.0.1:4000 {
|
||||
# transport http {
|
||||
# response_header_timeout 10s
|
||||
# read_timeout 15s
|
||||
# }
|
||||
# }
|
||||
#}
|
||||
|
|
|
@ -112,26 +112,18 @@ defmodule Mix.Pleroma do
|
|||
end
|
||||
end
|
||||
|
||||
def shell_info(message) when is_binary(message) or is_list(message) do
|
||||
def shell_info(message) do
|
||||
if mix_shell?(),
|
||||
do: Mix.shell().info(message),
|
||||
else: IO.puts(message)
|
||||
end
|
||||
|
||||
def shell_info(message) do
|
||||
shell_info("#{inspect(message)}")
|
||||
end
|
||||
|
||||
def shell_error(message) when is_binary(message) or is_list(message) do
|
||||
def shell_error(message) do
|
||||
if mix_shell?(),
|
||||
do: Mix.shell().error(message),
|
||||
else: IO.puts(:stderr, message)
|
||||
end
|
||||
|
||||
def shell_error(message) do
|
||||
shell_error("#{inspect(message)}")
|
||||
end
|
||||
|
||||
@doc "Performs a safe check whether `Mix.shell/0` is available (does not raise if Mix is not loaded)"
|
||||
def mix_shell?, do: :erlang.function_exported(Mix, :shell, 0)
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@ defmodule Mix.Tasks.Pleroma.Activity do
|
|||
alias Pleroma.User
|
||||
alias Pleroma.Web.CommonAPI
|
||||
alias Pleroma.Pagination
|
||||
require Logger
|
||||
import Mix.Pleroma
|
||||
import Ecto.Query
|
||||
|
||||
|
@ -16,7 +17,7 @@ defmodule Mix.Tasks.Pleroma.Activity do
|
|||
|
||||
id
|
||||
|> Activity.get_by_id()
|
||||
|> shell_info()
|
||||
|> IO.inspect()
|
||||
end
|
||||
|
||||
def run(["delete_by_keyword", user, keyword | _rest]) do
|
||||
|
@ -34,7 +35,7 @@ defmodule Mix.Tasks.Pleroma.Activity do
|
|||
)
|
||||
|> Enum.map(fn x -> CommonAPI.delete(x.id, u) end)
|
||||
|> Enum.count()
|
||||
|> shell_info()
|
||||
|> IO.puts()
|
||||
end
|
||||
|
||||
defp query_with(q, search_query) do
|
||||
|
|
|
@ -20,102 +20,6 @@ defmodule Mix.Tasks.Pleroma.Database do
|
|||
@shortdoc "A collection of database related tasks"
|
||||
@moduledoc File.read!("docs/docs/administration/CLI_tasks/database.md")
|
||||
|
||||
defp maybe_limit(query, limit_cnt) do
|
||||
if is_number(limit_cnt) and limit_cnt > 0 do
|
||||
limit(query, [], ^limit_cnt)
|
||||
else
|
||||
query
|
||||
end
|
||||
end
|
||||
|
||||
defp limit_statement(limit) when is_number(limit) do
|
||||
if limit > 0 do
|
||||
"LIMIT #{limit}"
|
||||
else
|
||||
""
|
||||
end
|
||||
end
|
||||
|
||||
defp prune_orphaned_activities_singles(limit) do
|
||||
%{:num_rows => del_single} =
|
||||
"""
|
||||
delete from public.activities
|
||||
where id in (
|
||||
select a.id from public.activities a
|
||||
left join public.objects o on a.data ->> 'object' = o.data ->> 'id'
|
||||
left join public.activities a2 on a.data ->> 'object' = a2.data ->> 'id'
|
||||
left join public.users u on a.data ->> 'object' = u.ap_id
|
||||
where not a.local
|
||||
and jsonb_typeof(a."data" -> 'object') = 'string'
|
||||
and o.id is null
|
||||
and a2.id is null
|
||||
and u.id is null
|
||||
#{limit_statement(limit)}
|
||||
)
|
||||
"""
|
||||
|> Repo.query!([], timeout: :infinity)
|
||||
|
||||
Logger.info("Prune activity singles: deleted #{del_single} rows...")
|
||||
del_single
|
||||
end
|
||||
|
||||
defp prune_orphaned_activities_array(limit) do
|
||||
%{:num_rows => del_array} =
|
||||
"""
|
||||
delete from public.activities
|
||||
where id in (
|
||||
select a.id from public.activities a
|
||||
join json_array_elements_text((a."data" -> 'object')::json) as j
|
||||
on a.data->>'type' = 'Flag'
|
||||
left join public.objects o on j.value = o.data ->> 'id'
|
||||
left join public.activities a2 on j.value = a2.data ->> 'id'
|
||||
left join public.users u on j.value = u.ap_id
|
||||
group by a.id
|
||||
having max(o.data ->> 'id') is null
|
||||
and max(a2.data ->> 'id') is null
|
||||
and max(u.ap_id) is null
|
||||
#{limit_statement(limit)}
|
||||
)
|
||||
"""
|
||||
|> Repo.query!([], timeout: :infinity)
|
||||
|
||||
Logger.info("Prune activity arrays: deleted #{del_array} rows...")
|
||||
del_array
|
||||
end
|
||||
|
||||
def prune_orphaned_activities(limit \\ 0, opts \\ []) when is_number(limit) do
|
||||
# Activities can either refer to a single object id, and array of object ids
|
||||
# or contain an inlined object (at least after going through our normalisation)
|
||||
#
|
||||
# Flag is the only type we support with an array (and always has arrays).
|
||||
# Update the only one with inlined objects.
|
||||
#
|
||||
# We already regularly purge old Delete, Undo, Update and Remove and if
|
||||
# rejected Follow requests anyway; no need to explicitly deal with those here.
|
||||
#
|
||||
# Since there’s an index on types and there are typically only few Flag
|
||||
# activites, it’s _much_ faster to utilise the index. To avoid accidentally
|
||||
# deleting useful activities should more types be added, keep typeof for singles.
|
||||
|
||||
# Prune activities who link to an array of objects
|
||||
del_array =
|
||||
if Keyword.get(opts, :arrays, true) do
|
||||
prune_orphaned_activities_array(limit)
|
||||
else
|
||||
0
|
||||
end
|
||||
|
||||
# Prune activities who link to a single object
|
||||
del_single =
|
||||
if Keyword.get(opts, :singles, true) do
|
||||
prune_orphaned_activities_singles(limit)
|
||||
else
|
||||
0
|
||||
end
|
||||
|
||||
del_single + del_array
|
||||
end
|
||||
|
||||
def run(["remove_embedded_objects" | args]) do
|
||||
{options, [], []} =
|
||||
OptionParser.parse(
|
||||
|
@ -158,37 +62,6 @@ defmodule Mix.Tasks.Pleroma.Database do
|
|||
)
|
||||
end
|
||||
|
||||
def run(["prune_orphaned_activities" | args]) do
|
||||
{options, [], []} =
|
||||
OptionParser.parse(
|
||||
args,
|
||||
strict: [
|
||||
limit: :integer,
|
||||
singles: :boolean,
|
||||
arrays: :boolean
|
||||
]
|
||||
)
|
||||
|
||||
start_pleroma()
|
||||
|
||||
{limit, options} = Keyword.pop(options, :limit, 0)
|
||||
|
||||
log_message = "Pruning orphaned activities"
|
||||
|
||||
log_message =
|
||||
if limit > 0 do
|
||||
log_message <> ", limiting deletion to #{limit} rows"
|
||||
else
|
||||
log_message
|
||||
end
|
||||
|
||||
Logger.info(log_message)
|
||||
|
||||
deleted = prune_orphaned_activities(limit, options)
|
||||
|
||||
Logger.info("Deleted #{deleted} rows")
|
||||
end
|
||||
|
||||
def run(["prune_objects" | args]) do
|
||||
{options, [], []} =
|
||||
OptionParser.parse(
|
||||
|
@ -197,8 +70,7 @@ defmodule Mix.Tasks.Pleroma.Database do
|
|||
vacuum: :boolean,
|
||||
keep_threads: :boolean,
|
||||
keep_non_public: :boolean,
|
||||
prune_orphaned_activities: :boolean,
|
||||
limit: :integer
|
||||
prune_orphaned_activities: :boolean
|
||||
]
|
||||
)
|
||||
|
||||
|
@ -207,8 +79,6 @@ defmodule Mix.Tasks.Pleroma.Database do
|
|||
deadline = Pleroma.Config.get([:instance, :remote_post_retention_days])
|
||||
time_deadline = NaiveDateTime.utc_now() |> NaiveDateTime.add(-(deadline * 86_400))
|
||||
|
||||
limit_cnt = Keyword.get(options, :limit, 0)
|
||||
|
||||
log_message = "Pruning objects older than #{deadline} days"
|
||||
|
||||
log_message =
|
||||
|
@ -240,16 +110,8 @@ defmodule Mix.Tasks.Pleroma.Database do
|
|||
log_message
|
||||
end
|
||||
|
||||
log_message =
|
||||
if limit_cnt > 0 do
|
||||
log_message <> ", limiting to #{limit_cnt} rows"
|
||||
else
|
||||
log_message
|
||||
end
|
||||
|
||||
Logger.info(log_message)
|
||||
|
||||
{del_obj, _} =
|
||||
if Keyword.get(options, :keep_threads) do
|
||||
# We want to delete objects from threads where
|
||||
# 1. the newest post is still old
|
||||
|
@ -281,13 +143,11 @@ defmodule Mix.Tasks.Pleroma.Database do
|
|||
|> having([a], max(a.updated_at) < ^time_deadline)
|
||||
|> having([a], not fragment("bool_or(?)", a.local))
|
||||
|> having([_, b], fragment("max(?::text) is null", b.id))
|
||||
|> maybe_limit(limit_cnt)
|
||||
|> select([a], fragment("? ->> 'context'::text", a.data))
|
||||
|
||||
Pleroma.Object
|
||||
|> where([o], fragment("? ->> 'context'::text", o.data) in subquery(deletable_context))
|
||||
else
|
||||
deletable =
|
||||
if Keyword.get(options, :keep_non_public) do
|
||||
Pleroma.Object
|
||||
|> where(
|
||||
|
@ -308,20 +168,12 @@ defmodule Mix.Tasks.Pleroma.Database do
|
|||
[o],
|
||||
fragment("split_part(?->>'actor', '/', 3) != ?", o.data, ^Pleroma.Web.Endpoint.host())
|
||||
)
|
||||
|> maybe_limit(limit_cnt)
|
||||
|> select([o], o.id)
|
||||
|
||||
Pleroma.Object
|
||||
|> where([o], o.id in subquery(deletable))
|
||||
end
|
||||
|> Repo.delete_all(timeout: :infinity)
|
||||
|
||||
Logger.info("Deleted #{del_obj} objects...")
|
||||
|
||||
if !Keyword.get(options, :keep_threads) do
|
||||
# Without the --keep-threads option, it's possible that bookmarked
|
||||
# objects have been deleted. We remove the corresponding bookmarks.
|
||||
%{:num_rows => del_bookmarks} =
|
||||
"""
|
||||
delete from public.bookmarks
|
||||
where id in (
|
||||
|
@ -331,33 +183,56 @@ defmodule Mix.Tasks.Pleroma.Database do
|
|||
where o.id is null
|
||||
)
|
||||
"""
|
||||
|> Repo.query!([], timeout: :infinity)
|
||||
|
||||
Logger.info("Deleted #{del_bookmarks} orphaned bookmarks...")
|
||||
|> Repo.query([], timeout: :infinity)
|
||||
end
|
||||
|
||||
if Keyword.get(options, :prune_orphaned_activities) do
|
||||
del_activities = prune_orphaned_activities()
|
||||
Logger.info("Deleted #{del_activities} orphaned activities...")
|
||||
# Prune activities who link to a single object
|
||||
"""
|
||||
delete from public.activities
|
||||
where id in (
|
||||
select a.id from public.activities a
|
||||
left join public.objects o on a.data ->> 'object' = o.data ->> 'id'
|
||||
left join public.activities a2 on a.data ->> 'object' = a2.data ->> 'id'
|
||||
left join public.users u on a.data ->> 'object' = u.ap_id
|
||||
where not a.local
|
||||
and jsonb_typeof(a."data" -> 'object') = 'string'
|
||||
and o.id is null
|
||||
and a2.id is null
|
||||
and u.id is null
|
||||
)
|
||||
"""
|
||||
|> Repo.query([], timeout: :infinity)
|
||||
|
||||
# Prune activities who link to an array of objects
|
||||
"""
|
||||
delete from public.activities
|
||||
where id in (
|
||||
select a.id from public.activities a
|
||||
join json_array_elements_text((a."data" -> 'object')::json) as j on jsonb_typeof(a."data" -> 'object') = 'array'
|
||||
left join public.objects o on j.value = o.data ->> 'id'
|
||||
left join public.activities a2 on j.value = a2.data ->> 'id'
|
||||
left join public.users u on j.value = u.ap_id
|
||||
group by a.id
|
||||
having max(o.data ->> 'id') is null
|
||||
and max(a2.data ->> 'id') is null
|
||||
and max(u.ap_id) is null
|
||||
)
|
||||
"""
|
||||
|> Repo.query([], timeout: :infinity)
|
||||
end
|
||||
|
||||
%{:num_rows => del_hashtags} =
|
||||
"""
|
||||
DELETE FROM hashtags AS ht
|
||||
WHERE NOT EXISTS (
|
||||
SELECT 1 FROM hashtags_objects hto
|
||||
WHERE ht.id = hto.hashtag_id)
|
||||
"""
|
||||
|> Repo.query!()
|
||||
|
||||
Logger.info("Deleted #{del_hashtags} no longer used hashtags...")
|
||||
|> Repo.query()
|
||||
|
||||
if Keyword.get(options, :vacuum) do
|
||||
Logger.info("Starting vacuum...")
|
||||
Maintenance.vacuum("full")
|
||||
end
|
||||
|
||||
Logger.info("All done!")
|
||||
end
|
||||
|
||||
def run(["prune_task"]) do
|
||||
|
|
|
@ -3,6 +3,7 @@ defmodule Mix.Tasks.Pleroma.Diagnostics do
|
|||
alias Pleroma.Repo
|
||||
alias Pleroma.User
|
||||
|
||||
require Logger
|
||||
require Pleroma.Constants
|
||||
|
||||
import Mix.Pleroma
|
||||
|
@ -13,7 +14,7 @@ defmodule Mix.Tasks.Pleroma.Diagnostics do
|
|||
start_pleroma()
|
||||
|
||||
Pleroma.HTTP.get(url)
|
||||
|> shell_info()
|
||||
|> IO.inspect()
|
||||
end
|
||||
|
||||
def run(["fetch_object", url]) do
|
||||
|
@ -26,7 +27,7 @@ defmodule Mix.Tasks.Pleroma.Diagnostics do
|
|||
def run(["home_timeline", nickname]) do
|
||||
start_pleroma()
|
||||
user = Repo.get_by!(User, nickname: nickname)
|
||||
shell_info("Home timeline query #{user.nickname}")
|
||||
Logger.info("Home timeline query #{user.nickname}")
|
||||
|
||||
followed_hashtags =
|
||||
user
|
||||
|
@ -55,14 +56,14 @@ defmodule Mix.Tasks.Pleroma.Diagnostics do
|
|||
|> limit(20)
|
||||
|
||||
Ecto.Adapters.SQL.explain(Repo, :all, query, analyze: true, timeout: :infinity)
|
||||
|> shell_info()
|
||||
|> IO.puts()
|
||||
end
|
||||
|
||||
def run(["user_timeline", nickname, reading_nickname]) do
|
||||
start_pleroma()
|
||||
user = Repo.get_by!(User, nickname: nickname)
|
||||
reading_user = Repo.get_by!(User, nickname: reading_nickname)
|
||||
shell_info("User timeline query #{user.nickname}")
|
||||
Logger.info("User timeline query #{user.nickname}")
|
||||
|
||||
params =
|
||||
%{limit: 20}
|
||||
|
@ -86,7 +87,7 @@ defmodule Mix.Tasks.Pleroma.Diagnostics do
|
|||
|> limit(20)
|
||||
|
||||
Ecto.Adapters.SQL.explain(Repo, :all, query, analyze: true, timeout: :infinity)
|
||||
|> shell_info()
|
||||
|> IO.puts()
|
||||
end
|
||||
|
||||
def run(["notifications", nickname]) do
|
||||
|
@ -102,7 +103,7 @@ defmodule Mix.Tasks.Pleroma.Diagnostics do
|
|||
|> limit(20)
|
||||
|
||||
Ecto.Adapters.SQL.explain(Repo, :all, query, analyze: true, timeout: :infinity)
|
||||
|> shell_info()
|
||||
|> IO.puts()
|
||||
end
|
||||
|
||||
def run(["known_network", nickname]) do
|
||||
|
@ -128,6 +129,6 @@ defmodule Mix.Tasks.Pleroma.Diagnostics do
|
|||
|> limit(20)
|
||||
|
||||
Ecto.Adapters.SQL.explain(Repo, :all, query, analyze: true, timeout: :infinity)
|
||||
|> shell_info()
|
||||
|> IO.puts()
|
||||
end
|
||||
end
|
||||
|
|
|
@ -27,11 +27,11 @@ defmodule Mix.Tasks.Pleroma.Emoji do
|
|||
]
|
||||
|
||||
for {param, value} <- to_print do
|
||||
shell_info(IO.ANSI.format([:bright, param, :normal, ": ", value]))
|
||||
IO.puts(IO.ANSI.format([:bright, param, :normal, ": ", value]))
|
||||
end
|
||||
|
||||
# A newline
|
||||
shell_info("")
|
||||
IO.puts("")
|
||||
end)
|
||||
end
|
||||
|
||||
|
@ -49,7 +49,7 @@ defmodule Mix.Tasks.Pleroma.Emoji do
|
|||
pack = manifest[pack_name]
|
||||
src = pack["src"]
|
||||
|
||||
shell_info(
|
||||
IO.puts(
|
||||
IO.ANSI.format([
|
||||
"Downloading ",
|
||||
:bright,
|
||||
|
@ -67,9 +67,9 @@ defmodule Mix.Tasks.Pleroma.Emoji do
|
|||
sha_status_text = ["SHA256 of ", :bright, pack_name, :normal, " source file is ", :bright]
|
||||
|
||||
if archive_sha == String.upcase(pack["src_sha256"]) do
|
||||
shell_info(IO.ANSI.format(sha_status_text ++ [:green, "OK"]))
|
||||
IO.puts(IO.ANSI.format(sha_status_text ++ [:green, "OK"]))
|
||||
else
|
||||
shell_info(IO.ANSI.format(sha_status_text ++ [:red, "BAD"]))
|
||||
IO.puts(IO.ANSI.format(sha_status_text ++ [:red, "BAD"]))
|
||||
|
||||
raise "Bad SHA256 for #{pack_name}"
|
||||
end
|
||||
|
@ -80,7 +80,7 @@ defmodule Mix.Tasks.Pleroma.Emoji do
|
|||
|> Path.dirname()
|
||||
|> Path.join(pack["files"])
|
||||
|
||||
shell_info(
|
||||
IO.puts(
|
||||
IO.ANSI.format([
|
||||
"Fetching the file list for ",
|
||||
:bright,
|
||||
|
@ -94,7 +94,7 @@ defmodule Mix.Tasks.Pleroma.Emoji do
|
|||
|
||||
files = fetch_and_decode!(files_loc)
|
||||
|
||||
shell_info(IO.ANSI.format(["Unpacking ", :bright, pack_name]))
|
||||
IO.puts(IO.ANSI.format(["Unpacking ", :bright, pack_name]))
|
||||
|
||||
pack_path =
|
||||
Path.join([
|
||||
|
@ -115,7 +115,7 @@ defmodule Mix.Tasks.Pleroma.Emoji do
|
|||
file_list: files_to_unzip
|
||||
)
|
||||
|
||||
shell_info(IO.ANSI.format(["Writing pack.json for ", :bright, pack_name]))
|
||||
IO.puts(IO.ANSI.format(["Writing pack.json for ", :bright, pack_name]))
|
||||
|
||||
pack_json = %{
|
||||
pack: %{
|
||||
|
@ -132,7 +132,7 @@ defmodule Mix.Tasks.Pleroma.Emoji do
|
|||
File.write!(Path.join(pack_path, "pack.json"), Jason.encode!(pack_json, pretty: true))
|
||||
Pleroma.Emoji.reload()
|
||||
else
|
||||
shell_info(IO.ANSI.format([:bright, :red, "No pack named \"#{pack_name}\" found"]))
|
||||
IO.puts(IO.ANSI.format([:bright, :red, "No pack named \"#{pack_name}\" found"]))
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -180,14 +180,14 @@ defmodule Mix.Tasks.Pleroma.Emoji do
|
|||
custom_exts
|
||||
end
|
||||
|
||||
shell_info("Using #{Enum.join(exts, " ")} extensions")
|
||||
IO.puts("Using #{Enum.join(exts, " ")} extensions")
|
||||
|
||||
shell_info("Downloading the pack and generating SHA256")
|
||||
IO.puts("Downloading the pack and generating SHA256")
|
||||
|
||||
{:ok, %{body: binary_archive}} = Pleroma.HTTP.get(src)
|
||||
archive_sha = :crypto.hash(:sha256, binary_archive) |> Base.encode16()
|
||||
|
||||
shell_info("SHA256 is #{archive_sha}")
|
||||
IO.puts("SHA256 is #{archive_sha}")
|
||||
|
||||
pack_json = %{
|
||||
name => %{
|
||||
|
@ -208,7 +208,7 @@ defmodule Mix.Tasks.Pleroma.Emoji do
|
|||
|
||||
File.write!(files_name, Jason.encode!(emoji_map, pretty: true))
|
||||
|
||||
shell_info("""
|
||||
IO.puts("""
|
||||
|
||||
#{files_name} has been created and contains the list of all found emojis in the pack.
|
||||
Please review the files in the pack and remove those not needed.
|
||||
|
@ -230,11 +230,11 @@ defmodule Mix.Tasks.Pleroma.Emoji do
|
|||
)
|
||||
)
|
||||
|
||||
shell_info("#{pack_file} has been updated with the #{name} pack")
|
||||
IO.puts("#{pack_file} has been updated with the #{name} pack")
|
||||
else
|
||||
File.write!(pack_file, Jason.encode!(pack_json, pretty: true))
|
||||
|
||||
shell_info("#{pack_file} has been created with the #{name} pack")
|
||||
IO.puts("#{pack_file} has been created with the #{name} pack")
|
||||
end
|
||||
|
||||
Pleroma.Emoji.reload()
|
||||
|
@ -243,7 +243,7 @@ defmodule Mix.Tasks.Pleroma.Emoji do
|
|||
def run(["reload"]) do
|
||||
start_pleroma()
|
||||
Pleroma.Emoji.reload()
|
||||
shell_info("Emoji packs have been reloaded.")
|
||||
IO.puts("Emoji packs have been reloaded.")
|
||||
end
|
||||
|
||||
defp fetch_and_decode!(from) do
|
||||
|
|
|
@ -37,7 +37,9 @@ defmodule Mix.Tasks.Pleroma.Instance do
|
|||
listen_port: :string,
|
||||
strip_uploads_metadata: :string,
|
||||
read_uploads_description: :string,
|
||||
anonymize_uploads: :string
|
||||
anonymize_uploads: :string,
|
||||
no_sql_user: :boolean,
|
||||
no_db_creation: :boolean
|
||||
],
|
||||
aliases: [
|
||||
o: :output,
|
||||
|
@ -260,7 +262,9 @@ defmodule Mix.Tasks.Pleroma.Instance do
|
|||
dbname: dbname,
|
||||
dbuser: dbuser,
|
||||
dbpass: dbpass,
|
||||
rum_enabled: rum_enabled
|
||||
rum_enabled: rum_enabled,
|
||||
no_sql_user: Keyword.get(options, :no_sql_user, false),
|
||||
no_db_creation: Keyword.get(options, :no_db_creation, false)
|
||||
)
|
||||
|
||||
config_dir = Path.dirname(config_path)
|
||||
|
|
|
@ -11,6 +11,7 @@ defmodule Mix.Tasks.Pleroma.RefreshCounterCache do
|
|||
alias Pleroma.CounterCache
|
||||
alias Pleroma.Repo
|
||||
|
||||
require Logger
|
||||
import Ecto.Query
|
||||
|
||||
def run([]) do
|
||||
|
|
|
@ -48,7 +48,7 @@ defmodule Mix.Tasks.Pleroma.Search.Meilisearch do
|
|||
]
|
||||
)
|
||||
|
||||
shell_info("Created indices. Starting to insert posts.")
|
||||
IO.puts("Created indices. Starting to insert posts.")
|
||||
|
||||
chunk_size = Pleroma.Config.get([Pleroma.Search.Meilisearch, :initial_indexing_chunk_size])
|
||||
|
||||
|
@ -65,7 +65,7 @@ defmodule Mix.Tasks.Pleroma.Search.Meilisearch do
|
|||
)
|
||||
|
||||
count = query |> Pleroma.Repo.aggregate(:count, :data)
|
||||
shell_info("Entries to index: #{count}")
|
||||
IO.puts("Entries to index: #{count}")
|
||||
|
||||
Pleroma.Repo.stream(
|
||||
query,
|
||||
|
@ -92,10 +92,10 @@ defmodule Mix.Tasks.Pleroma.Search.Meilisearch do
|
|||
|
||||
with {:ok, res} <- result do
|
||||
if not Map.has_key?(res, "indexUid") do
|
||||
shell_info("\nFailed to index: #{inspect(result)}")
|
||||
IO.puts("\nFailed to index: #{inspect(result)}")
|
||||
end
|
||||
else
|
||||
e -> shell_error("\nFailed to index due to network error: #{inspect(e)}")
|
||||
e -> IO.puts("\nFailed to index due to network error: #{inspect(e)}")
|
||||
end
|
||||
end)
|
||||
|> Stream.run()
|
||||
|
@ -128,13 +128,13 @@ defmodule Mix.Tasks.Pleroma.Search.Meilisearch do
|
|||
if decoded["results"] do
|
||||
Enum.each(decoded["results"], fn
|
||||
%{"name" => name, "key" => key} ->
|
||||
shell_info("#{name}: #{key}")
|
||||
IO.puts("#{name}: #{key}")
|
||||
|
||||
%{"description" => desc, "key" => key} ->
|
||||
shell_info("#{desc}: #{key}")
|
||||
IO.puts("#{desc}: #{key}")
|
||||
end)
|
||||
else
|
||||
shell_error("Error fetching the keys, check the master key is correct: #{inspect(decoded)}")
|
||||
IO.puts("Error fetching the keys, check the master key is correct: #{inspect(decoded)}")
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -142,7 +142,7 @@ defmodule Mix.Tasks.Pleroma.Search.Meilisearch do
|
|||
start_pleroma()
|
||||
|
||||
{:ok, result} = meili_get("/indexes/objects/stats")
|
||||
shell_info("Number of entries: #{result["numberOfDocuments"]}")
|
||||
shell_info("Indexing? #{result["isIndexing"]}")
|
||||
IO.puts("Number of entries: #{result["numberOfDocuments"]}")
|
||||
IO.puts("Indexing? #{result["isIndexing"]}")
|
||||
end
|
||||
end
|
||||
|
|
|
@ -38,7 +38,7 @@ defmodule Mix.Tasks.Pleroma.Security do
|
|||
Logger.put_process_level(self(), :notice)
|
||||
start_pleroma()
|
||||
|
||||
shell_info("""
|
||||
IO.puts("""
|
||||
+------------------------+
|
||||
| SPOOF SEARCH UPLOADS |
|
||||
+------------------------+
|
||||
|
@ -55,7 +55,7 @@ defmodule Mix.Tasks.Pleroma.Security do
|
|||
Logger.put_process_level(self(), :notice)
|
||||
start_pleroma()
|
||||
|
||||
shell_info("""
|
||||
IO.puts("""
|
||||
+----------------------+
|
||||
| SPOOF SEARCH NOTES |
|
||||
+----------------------+
|
||||
|
@ -77,7 +77,7 @@ defmodule Mix.Tasks.Pleroma.Security do
|
|||
uploads_search_spoofs_local_dir(Config.get!([Pleroma.Uploaders.Local, :uploads]))
|
||||
|
||||
_ ->
|
||||
shell_info("""
|
||||
IO.puts("""
|
||||
NOTE:
|
||||
Not using local uploader; thus not affected by this exploit.
|
||||
It's impossible to check for files, but in case local uploader was used before
|
||||
|
@ -98,13 +98,13 @@ defmodule Mix.Tasks.Pleroma.Security do
|
|||
|
||||
orphaned_attachs = upload_search_orphaned_attachments(not_orphaned_urls)
|
||||
|
||||
shell_info("\nSearch concluded; here are the results:")
|
||||
IO.puts("\nSearch concluded; here are the results:")
|
||||
pretty_print_list_with_title(emoji, "Emoji")
|
||||
pretty_print_list_with_title(files, "Uploaded Files")
|
||||
pretty_print_list_with_title(post_attachs, "(Not Deleted) Post Attachments")
|
||||
pretty_print_list_with_title(orphaned_attachs, "Orphaned Uploads")
|
||||
|
||||
shell_info("""
|
||||
IO.puts("""
|
||||
In total found
|
||||
#{length(emoji)} emoji
|
||||
#{length(files)} uploads
|
||||
|
@ -116,7 +116,7 @@ defmodule Mix.Tasks.Pleroma.Security do
|
|||
defp uploads_search_spoofs_local_dir(dir) do
|
||||
local_dir = String.replace_suffix(dir, "/", "")
|
||||
|
||||
shell_info("Searching for suspicious files in #{local_dir}...")
|
||||
IO.puts("Searching for suspicious files in #{local_dir}...")
|
||||
|
||||
glob_ext = "{" <> Enum.join(@activity_exts, ",") <> "}"
|
||||
|
||||
|
@ -128,7 +128,7 @@ defmodule Mix.Tasks.Pleroma.Security do
|
|||
end
|
||||
|
||||
defp uploads_search_spoofs_notes() do
|
||||
shell_info("Now querying DB for posts with spoofing attachments. This might take a while...")
|
||||
IO.puts("Now querying DB for posts with spoofing attachments. This might take a while...")
|
||||
|
||||
patterns = [local_id_pattern() | activity_ext_url_patterns()]
|
||||
|
||||
|
@ -153,7 +153,7 @@ defmodule Mix.Tasks.Pleroma.Security do
|
|||
end
|
||||
|
||||
defp upload_search_orphaned_attachments(not_orphaned_urls) do
|
||||
shell_info("""
|
||||
IO.puts("""
|
||||
Now querying DB for orphaned spoofing attachment (i.e. their post was deleted,
|
||||
but if :cleanup_attachments was not enabled traces remain in the database)
|
||||
This might take a bit...
|
||||
|
@ -184,7 +184,7 @@ defmodule Mix.Tasks.Pleroma.Security do
|
|||
# | S P O O F - I N S E R T E D |
|
||||
# +-----------------------------+
|
||||
defp do_spoof_inserted() do
|
||||
shell_info("""
|
||||
IO.puts("""
|
||||
Searching for local posts whose Create activity has no ActivityPub id...
|
||||
This is a pretty good indicator, but only for spoofs of local actors
|
||||
and only if the spoofing happened after around late 2021.
|
||||
|
@ -194,9 +194,9 @@ defmodule Mix.Tasks.Pleroma.Security do
|
|||
search_local_notes_without_create_id()
|
||||
|> Enum.sort()
|
||||
|
||||
shell_info("Done.\n")
|
||||
IO.puts("Done.\n")
|
||||
|
||||
shell_info("""
|
||||
IO.puts("""
|
||||
Now trying to weed out other poorly hidden spoofs.
|
||||
This can't detect all and may have some false positives.
|
||||
""")
|
||||
|
@ -207,9 +207,9 @@ defmodule Mix.Tasks.Pleroma.Security do
|
|||
search_sus_notes_by_id_patterns()
|
||||
|> Enum.filter(fn r -> !(r in likely_spoofed_posts_set) end)
|
||||
|
||||
shell_info("Done.\n")
|
||||
IO.puts("Done.\n")
|
||||
|
||||
shell_info("""
|
||||
IO.puts("""
|
||||
Finally, searching for spoofed, local user accounts.
|
||||
(It's impossible to detect spoofed remote users)
|
||||
""")
|
||||
|
@ -220,7 +220,7 @@ defmodule Mix.Tasks.Pleroma.Security do
|
|||
pretty_print_list_with_title(idless_create, "Likely Spoofed Posts")
|
||||
pretty_print_list_with_title(spoofed_users, "Spoofed local user accounts")
|
||||
|
||||
shell_info("""
|
||||
IO.puts("""
|
||||
In total found:
|
||||
#{length(spoofed_users)} bogus users
|
||||
#{length(idless_create)} likely spoofed posts
|
||||
|
@ -289,27 +289,27 @@ defmodule Mix.Tasks.Pleroma.Security do
|
|||
defp pretty_print_list_with_title(list, title) do
|
||||
title_len = String.length(title)
|
||||
title_underline = String.duplicate("=", title_len)
|
||||
shell_info(title)
|
||||
shell_info(title_underline)
|
||||
IO.puts(title)
|
||||
IO.puts(title_underline)
|
||||
pretty_print_list(list)
|
||||
end
|
||||
|
||||
defp pretty_print_list([]), do: shell_info("")
|
||||
defp pretty_print_list([]), do: IO.puts("")
|
||||
|
||||
defp pretty_print_list([{a, o} | rest])
|
||||
when (is_binary(a) or is_number(a)) and is_binary(o) do
|
||||
shell_info(" {#{a}, #{o}}")
|
||||
IO.puts(" {#{a}, #{o}}")
|
||||
pretty_print_list(rest)
|
||||
end
|
||||
|
||||
defp pretty_print_list([{u, a, o} | rest])
|
||||
when is_binary(a) and is_binary(u) and is_binary(o) do
|
||||
shell_info(" {#{u}, #{a}, #{o}}")
|
||||
IO.puts(" {#{u}, #{a}, #{o}}")
|
||||
pretty_print_list(rest)
|
||||
end
|
||||
|
||||
defp pretty_print_list([e | rest]) when is_binary(e) do
|
||||
shell_info(" #{e}")
|
||||
IO.puts(" #{e}")
|
||||
pretty_print_list(rest)
|
||||
end
|
||||
|
||||
|
|
|
@ -114,7 +114,7 @@ defmodule Mix.Tasks.Pleroma.User do
|
|||
{:ok, token} <- Pleroma.PasswordResetToken.create_token(user) do
|
||||
shell_info("Generated password reset token for #{user.nickname}")
|
||||
|
||||
shell_info("URL: #{~p[/api/v1/pleroma/password_reset/#{token.token}]}")
|
||||
IO.puts("URL: #{~p[/api/v1/pleroma/password_reset/#{token.token}]}")
|
||||
else
|
||||
_ ->
|
||||
shell_error("No local user #{nickname}")
|
||||
|
@ -301,7 +301,7 @@ defmodule Mix.Tasks.Pleroma.User do
|
|||
shell_info("Generated user invite token " <> String.replace(invite.invite_type, "_", " "))
|
||||
|
||||
url = url(~p[/registration/#{invite.token}])
|
||||
shell_info(url)
|
||||
IO.puts(url)
|
||||
else
|
||||
error ->
|
||||
shell_error("Could not create invite token: #{inspect(error)}")
|
||||
|
@ -373,7 +373,7 @@ defmodule Mix.Tasks.Pleroma.User do
|
|||
nickname
|
||||
|> User.get_cached_by_nickname()
|
||||
|
||||
shell_info(user)
|
||||
shell_info("#{inspect(user)}")
|
||||
end
|
||||
|
||||
def run(["send_confirmation", nickname]) do
|
||||
|
@ -457,7 +457,7 @@ defmodule Mix.Tasks.Pleroma.User do
|
|||
|
||||
with %User{local: true} = user <- User.get_cached_by_nickname(nickname) do
|
||||
blocks = User.following_ap_ids(user)
|
||||
shell_info(blocks)
|
||||
IO.puts("#{inspect(blocks)}")
|
||||
end
|
||||
end
|
||||
|
||||
|
@ -516,12 +516,12 @@ defmodule Mix.Tasks.Pleroma.User do
|
|||
{:follow_data, Pleroma.Web.ActivityPub.Utils.fetch_latest_follow(local, remote)} do
|
||||
calculated_state = User.following?(local, remote)
|
||||
|
||||
shell_info(
|
||||
IO.puts(
|
||||
"Request state is #{request_state}, vs calculated state of following=#{calculated_state}"
|
||||
)
|
||||
|
||||
if calculated_state == false && request_state == "accept" do
|
||||
shell_info("Discrepancy found, fixing")
|
||||
IO.puts("Discrepancy found, fixing")
|
||||
Pleroma.Web.CommonAPI.reject_follow_request(local, remote)
|
||||
shell_info("Relationship fixed")
|
||||
else
|
||||
|
@ -551,14 +551,14 @@ defmodule Mix.Tasks.Pleroma.User do
|
|||
|> Stream.each(fn users ->
|
||||
users
|
||||
|> Enum.each(fn user ->
|
||||
shell_info("Re-Resolving: #{user.ap_id}")
|
||||
IO.puts("Re-Resolving: #{user.ap_id}")
|
||||
|
||||
with {:ok, user} <- Pleroma.User.fetch_by_ap_id(user.ap_id),
|
||||
changeset <- Pleroma.User.update_changeset(user),
|
||||
{:ok, _user} <- Pleroma.User.update_and_set_cache(changeset) do
|
||||
:ok
|
||||
else
|
||||
error -> shell_info("Could not resolve: #{user.ap_id}, #{inspect(error)}")
|
||||
error -> IO.puts("Could not resolve: #{user.ap_id}, #{inspect(error)}")
|
||||
end
|
||||
end)
|
||||
end)
|
||||
|
|
|
@ -28,7 +28,7 @@ defmodule Pleroma.Activity.HTML do
|
|||
end
|
||||
end
|
||||
|
||||
def add_cache_key_for(activity_id, additional_key) do
|
||||
defp add_cache_key_for(activity_id, additional_key) do
|
||||
current = get_cache_keys_for(activity_id)
|
||||
|
||||
unless additional_key in current do
|
||||
|
|
|
@ -95,17 +95,34 @@ defmodule Pleroma.Application do
|
|||
|
||||
opts = [strategy: :one_for_one, name: Pleroma.Supervisor, max_restarts: max_restarts]
|
||||
|
||||
case Supervisor.start_link(children, opts) do
|
||||
{:ok, data} ->
|
||||
with {:ok, data} <- Supervisor.start_link(children, opts) do
|
||||
set_postgres_server_version()
|
||||
{:ok, data}
|
||||
|
||||
else
|
||||
e ->
|
||||
Logger.critical("Failed to start!")
|
||||
Logger.critical("#{inspect(e)}")
|
||||
Logger.error("Failed to start!")
|
||||
Logger.error("#{inspect(e)}")
|
||||
e
|
||||
end
|
||||
end
|
||||
|
||||
defp set_postgres_server_version do
|
||||
version =
|
||||
with %{rows: [[version]]} <- Ecto.Adapters.SQL.query!(Pleroma.Repo, "show server_version"),
|
||||
{num, _} <- Float.parse(version) do
|
||||
num
|
||||
else
|
||||
e ->
|
||||
Logger.warning(
|
||||
"Could not get the postgres version: #{inspect(e)}.\nSetting the default value of 9.6"
|
||||
)
|
||||
|
||||
9.6
|
||||
end
|
||||
|
||||
:persistent_term.put({Pleroma.Repo, :postgres_version}, version)
|
||||
end
|
||||
|
||||
def load_custom_modules do
|
||||
dir = Config.get([:modules, :runtime_dir])
|
||||
|
||||
|
@ -264,9 +281,7 @@ defmodule Pleroma.Application do
|
|||
defp http_children do
|
||||
proxy_url = Config.get([:http, :proxy_url])
|
||||
proxy = Pleroma.HTTP.AdapterHelper.format_proxy(proxy_url)
|
||||
pool_size = Config.get([:http, :pool_size], 10)
|
||||
pool_timeout = Config.get([:http, :pool_timeout], 60_000)
|
||||
connection_timeout = Config.get([:http, :conn_max_idle_time], 10_000)
|
||||
pool_size = Config.get([:http, :pool_size])
|
||||
|
||||
:public_key.cacerts_load()
|
||||
|
||||
|
@ -276,8 +291,6 @@ defmodule Pleroma.Application do
|
|||
|> Pleroma.HTTP.AdapterHelper.add_pool_size(pool_size)
|
||||
|> Pleroma.HTTP.AdapterHelper.maybe_add_proxy_pool(proxy)
|
||||
|> Pleroma.HTTP.AdapterHelper.ensure_ipv6()
|
||||
|> Pleroma.HTTP.AdapterHelper.add_default_conn_max_idle_time(connection_timeout)
|
||||
|> Pleroma.HTTP.AdapterHelper.add_default_pool_max_idle_time(pool_timeout)
|
||||
|> Keyword.put(:name, MyFinch)
|
||||
|
||||
[{Finch, config}]
|
||||
|
|
|
@ -24,6 +24,7 @@ defmodule Pleroma.Config.TransferTask do
|
|||
defp reboot_time_subkeys,
|
||||
do: [
|
||||
{:pleroma, Pleroma.Captcha, [:seconds_valid]},
|
||||
{:pleroma, Pleroma.Upload, [:proxy_remote]},
|
||||
{:pleroma, :instance, [:upload_limit]},
|
||||
{:pleroma, :http, [:pool_size]},
|
||||
{:pleroma, :http, [:proxy_url]}
|
||||
|
|
|
@ -6,6 +6,8 @@ defmodule Pleroma.HTML do
|
|||
# Scrubbers are compiled on boot so they can be configured in OTP releases
|
||||
# @on_load :compile_scrubbers
|
||||
|
||||
@cachex Pleroma.Config.get([:cachex, :provider], Cachex)
|
||||
|
||||
def compile_scrubbers do
|
||||
dir = Path.join(:code.priv_dir(:pleroma), "scrubbers")
|
||||
|
||||
|
@ -65,9 +67,22 @@ defmodule Pleroma.HTML do
|
|||
end
|
||||
end
|
||||
|
||||
@spec extract_first_external_url_from_object(Pleroma.Object.t()) :: String.t() | nil
|
||||
def extract_first_external_url_from_object(%{data: %{"content" => content}})
|
||||
def extract_first_external_url_from_object(%{data: %{"content" => content}} = object)
|
||||
when is_binary(content) do
|
||||
unless object.data["fake"] do
|
||||
key = "URL|#{object.id}"
|
||||
|
||||
@cachex.fetch!(:scrubber_cache, key, fn _key ->
|
||||
{:commit, {:ok, extract_first_external_url(content)}}
|
||||
end)
|
||||
else
|
||||
{:ok, extract_first_external_url(content)}
|
||||
end
|
||||
end
|
||||
|
||||
def extract_first_external_url_from_object(_), do: {:error, :no_content}
|
||||
|
||||
def extract_first_external_url(content) do
|
||||
content
|
||||
|> Floki.parse_fragment!()
|
||||
|> Floki.find("a:not(.mention,.hashtag,.attachment,[rel~=\"tag\"])")
|
||||
|
@ -75,6 +90,4 @@ defmodule Pleroma.HTML do
|
|||
|> Floki.attribute("href")
|
||||
|> Enum.at(0)
|
||||
end
|
||||
|
||||
def extract_first_external_url_from_object(_), do: nil
|
||||
end
|
||||
|
|
|
@ -74,12 +74,7 @@ defmodule Pleroma.HTTP do
|
|||
request = build_request(method, headers, options, url, body, params)
|
||||
client = Tesla.client([Tesla.Middleware.FollowRedirects, Tesla.Middleware.Telemetry])
|
||||
|
||||
Logger.debug("Outbound: #{method} #{url}")
|
||||
request(client, request)
|
||||
rescue
|
||||
e ->
|
||||
Logger.error("Failed to fetch #{url}: #{inspect(e)}")
|
||||
{:error, :fetch_error}
|
||||
end
|
||||
|
||||
@spec request(Client.t(), keyword()) :: {:ok, Env.t()} | {:error, any()}
|
||||
|
|
|
@ -116,20 +116,6 @@ defmodule Pleroma.HTTP.AdapterHelper do
|
|||
put_in(opts, [:pools, :default, :conn_opts, :transport_opts, :inet6], true)
|
||||
end
|
||||
|
||||
def add_default_pool_max_idle_time(opts, pool_timeout) do
|
||||
opts
|
||||
|> maybe_add_pools()
|
||||
|> maybe_add_default_pool()
|
||||
|> put_in([:pools, :default, :pool_max_idle_time], pool_timeout)
|
||||
end
|
||||
|
||||
def add_default_conn_max_idle_time(opts, connection_timeout) do
|
||||
opts
|
||||
|> maybe_add_pools()
|
||||
|> maybe_add_default_pool()
|
||||
|> put_in([:pools, :default, :conn_max_idle_time], connection_timeout)
|
||||
end
|
||||
|
||||
@doc """
|
||||
Merge default connection & adapter options with received ones.
|
||||
"""
|
||||
|
|
|
@ -21,12 +21,19 @@ defmodule Pleroma.Search.DatabaseSearch do
|
|||
offset = Keyword.get(options, :offset, 0)
|
||||
author = Keyword.get(options, :author)
|
||||
|
||||
search_function =
|
||||
if :persistent_term.get({Pleroma.Repo, :postgres_version}) >= 11 do
|
||||
:websearch
|
||||
else
|
||||
:plain
|
||||
end
|
||||
|
||||
try do
|
||||
Activity
|
||||
|> Activity.with_preloaded_object()
|
||||
|> Activity.restrict_deactivated_users()
|
||||
|> restrict_public()
|
||||
|> query_with(index_type, search_query)
|
||||
|> query_with(index_type, search_query, search_function)
|
||||
|> maybe_restrict_local(user)
|
||||
|> maybe_restrict_author(author)
|
||||
|> maybe_restrict_blocked(user)
|
||||
|
@ -65,7 +72,25 @@ defmodule Pleroma.Search.DatabaseSearch do
|
|||
)
|
||||
end
|
||||
|
||||
defp query_with(q, :gin, search_query) do
|
||||
defp query_with(q, :gin, search_query, :plain) do
|
||||
%{rows: [[tsc]]} =
|
||||
Ecto.Adapters.SQL.query!(
|
||||
Pleroma.Repo,
|
||||
"select current_setting('default_text_search_config')::regconfig::oid;"
|
||||
)
|
||||
|
||||
from([a, o] in q,
|
||||
where:
|
||||
fragment(
|
||||
"to_tsvector(?::oid::regconfig, ?->>'content') @@ plainto_tsquery(?)",
|
||||
^tsc,
|
||||
o.data,
|
||||
^search_query
|
||||
)
|
||||
)
|
||||
end
|
||||
|
||||
defp query_with(q, :gin, search_query, :websearch) do
|
||||
%{rows: [[tsc]]} =
|
||||
Ecto.Adapters.SQL.query!(
|
||||
Pleroma.Repo,
|
||||
|
@ -83,7 +108,19 @@ defmodule Pleroma.Search.DatabaseSearch do
|
|||
)
|
||||
end
|
||||
|
||||
defp query_with(q, :rum, search_query) do
|
||||
defp query_with(q, :rum, search_query, :plain) do
|
||||
from([a, o] in q,
|
||||
where:
|
||||
fragment(
|
||||
"? @@ plainto_tsquery(?)",
|
||||
o.fts_content,
|
||||
^search_query
|
||||
),
|
||||
order_by: [fragment("? <=> now()::date", o.inserted_at)]
|
||||
)
|
||||
end
|
||||
|
||||
defp query_with(q, :rum, search_query, :websearch) do
|
||||
from([a, o] in q,
|
||||
where:
|
||||
fragment(
|
||||
|
|
|
@ -1624,12 +1624,8 @@ defmodule Pleroma.User do
|
|||
|
||||
def blocks_user?(_, _), do: false
|
||||
|
||||
def blocks_domain?(%User{} = user, %User{ap_id: ap_id}) do
|
||||
blocks_domain?(user, ap_id)
|
||||
end
|
||||
|
||||
def blocks_domain?(%User{} = user, url) when is_binary(url) do
|
||||
%{host: host} = URI.parse(url)
|
||||
def blocks_domain?(%User{} = user, %User{} = target) do
|
||||
%{host: host} = URI.parse(target.ap_id)
|
||||
Enum.member?(user.domain_blocks, host)
|
||||
# TODO: functionality should probably be changed such that subdomains block as well,
|
||||
# but as it stands, this just hecks up the relationships endpoint
|
||||
|
|
|
@ -155,7 +155,9 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
|||
# Splice in the child object if we have one.
|
||||
activity = Maps.put_if_present(activity, :object, object)
|
||||
|
||||
Pleroma.Web.RichMedia.Card.get_by_activity(activity)
|
||||
ConcurrentLimiter.limit(Pleroma.Web.RichMedia.Helpers, fn ->
|
||||
Task.start(fn -> Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity) end)
|
||||
end)
|
||||
|
||||
# Add local posts to search index
|
||||
if local, do: Pleroma.Search.add_to_index(activity)
|
||||
|
@ -183,7 +185,7 @@ defmodule Pleroma.Web.ActivityPub.ActivityPub do
|
|||
id: "pleroma:fakeid"
|
||||
}
|
||||
|
||||
Pleroma.Web.RichMedia.Card.get_by_activity(activity)
|
||||
Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity)
|
||||
{:ok, activity}
|
||||
|
||||
{:remote_limit_pass, _} ->
|
||||
|
|
|
@ -233,7 +233,7 @@ defmodule Pleroma.Web.ActivityPub.MRF do
|
|||
if function_exported?(policy, :config_description, 0) do
|
||||
description =
|
||||
@default_description
|
||||
|> Map.merge(policy.config_description())
|
||||
|> Map.merge(policy.config_description)
|
||||
|> Map.put(:group, :pleroma)
|
||||
|> Map.put(:tab, :mrf)
|
||||
|> Map.put(:type, :group)
|
||||
|
|
|
@ -101,19 +101,10 @@ defmodule Pleroma.Web.ActivityPub.MRF.StealEmojiPolicy do
|
|||
end
|
||||
end
|
||||
|
||||
defp get_int_header(headers, header_name, default \\ nil) do
|
||||
with rawval when rawval != :undefined <- :proplists.get_value(header_name, headers),
|
||||
{int, ""} <- Integer.parse(rawval) do
|
||||
int
|
||||
else
|
||||
_ -> default
|
||||
end
|
||||
end
|
||||
|
||||
defp is_remote_size_within_limit?(url) do
|
||||
with {:ok, %{status: status, headers: headers} = _response} when status in 200..299 <-
|
||||
Pleroma.HTTP.request(:head, url, nil, [], []) do
|
||||
content_length = get_int_header(headers, "content-length")
|
||||
content_length = :proplists.get_value("content-length", headers, nil)
|
||||
size_limit = Config.get([:mrf_steal_emoji, :size_limit], @size_limit)
|
||||
|
||||
accept_unknown =
|
||||
|
@ -181,7 +172,7 @@ defmodule Pleroma.Web.ActivityPub.MRF.StealEmojiPolicy do
|
|||
description: <<_::272, _::_*256>>,
|
||||
key: :hosts | :rejected_shortcodes | :size_limit,
|
||||
suggestions: [any(), ...],
|
||||
type: {:list, :string} | {:list, :string} | :integer | :boolean
|
||||
type: {:list, :string} | {:list, :string} | :integer
|
||||
},
|
||||
...
|
||||
],
|
||||
|
@ -218,12 +209,6 @@ defmodule Pleroma.Web.ActivityPub.MRF.StealEmojiPolicy do
|
|||
type: :integer,
|
||||
description: "File size limit (in bytes), checked before an emoji is saved to the disk",
|
||||
suggestions: ["100000"]
|
||||
},
|
||||
%{
|
||||
key: :download_unknown_size,
|
||||
type: :boolean,
|
||||
description: "Whether to download emoji if size can't be determined ahead of time",
|
||||
suggestions: [false, true]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
|
|
@ -225,7 +225,9 @@ defmodule Pleroma.Web.ActivityPub.SideEffects do
|
|||
end
|
||||
end
|
||||
|
||||
Pleroma.Web.RichMedia.Card.get_by_activity(activity)
|
||||
ConcurrentLimiter.limit(Pleroma.Web.RichMedia.Helpers, fn ->
|
||||
Task.start(fn -> Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity) end)
|
||||
end)
|
||||
|
||||
Pleroma.Search.add_to_index(Map.put(activity, :object, object))
|
||||
|
||||
|
|
|
@ -22,13 +22,17 @@ defmodule Pleroma.Web.MastodonAPI.StatusView do
|
|||
alias Pleroma.Web.MediaProxy
|
||||
alias Pleroma.Web.PleromaAPI.EmojiReactionController
|
||||
require Logger
|
||||
alias Pleroma.Web.RichMedia.Card
|
||||
|
||||
import Pleroma.Web.ActivityPub.Visibility, only: [get_visibility: 1, visible_for_user?: 2]
|
||||
|
||||
# This is a naive way to do this, just spawning a process per activity
|
||||
# to fetch the preview. However it should be fine considering
|
||||
# pagination is restricted to 40 activities at a time
|
||||
defp fetch_rich_media_for_activities(activities) do
|
||||
Enum.each(activities, fn activity ->
|
||||
Card.get_by_activity(activity)
|
||||
spawn(fn ->
|
||||
Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity)
|
||||
end)
|
||||
end)
|
||||
end
|
||||
|
||||
|
@ -89,7 +93,9 @@ defmodule Pleroma.Web.MastodonAPI.StatusView do
|
|||
# To do: check AdminAPIControllerTest on the reasons behind nil activities in the list
|
||||
activities = Enum.filter(opts.activities, & &1)
|
||||
|
||||
# Start prefetching rich media before doing anything else
|
||||
# Start fetching rich media before doing anything else, so that later calls to get the cards
|
||||
# only block for timeout in the worst case, as opposed to
|
||||
# length(activities_with_links) * timeout
|
||||
fetch_rich_media_for_activities(activities)
|
||||
replied_to_activities = get_replied_to_activities(activities)
|
||||
|
||||
|
@ -303,12 +309,6 @@ defmodule Pleroma.Web.MastodonAPI.StatusView do
|
|||
"mastoapi:content:#{chrono_order}"
|
||||
)
|
||||
|
||||
card =
|
||||
case Card.get_by_activity(activity) do
|
||||
%Card{} = result -> render("card.json", result)
|
||||
_ -> nil
|
||||
end
|
||||
|
||||
content_plaintext =
|
||||
content
|
||||
|> Activity.HTML.get_cached_stripped_html_for_activity(
|
||||
|
@ -318,6 +318,8 @@ defmodule Pleroma.Web.MastodonAPI.StatusView do
|
|||
|
||||
summary = object.data["summary"] || ""
|
||||
|
||||
card = render("card.json", Pleroma.Web.RichMedia.Helpers.fetch_data_for_activity(activity))
|
||||
|
||||
url =
|
||||
if user.local do
|
||||
url(~p[/notice/#{activity}])
|
||||
|
@ -526,30 +528,37 @@ defmodule Pleroma.Web.MastodonAPI.StatusView do
|
|||
}
|
||||
end
|
||||
|
||||
def render("card.json", %Card{fields: rich_media}) do
|
||||
page_url_data = URI.parse(rich_media["url"])
|
||||
def render("card.json", %{rich_media: rich_media, page_url: page_url}) do
|
||||
page_url_data = URI.parse(page_url)
|
||||
|
||||
page_url_data =
|
||||
if is_binary(rich_media["url"]) do
|
||||
URI.merge(page_url_data, URI.parse(rich_media["url"]))
|
||||
else
|
||||
page_url_data
|
||||
end
|
||||
|
||||
page_url = page_url_data |> to_string
|
||||
|
||||
image_url = proxied_url(rich_media["image"], page_url_data)
|
||||
audio_url = proxied_url(rich_media["audio"], page_url_data)
|
||||
video_url = proxied_url(rich_media["video"], page_url_data)
|
||||
image_url_data =
|
||||
if is_binary(rich_media["image"]) do
|
||||
URI.parse(rich_media["image"])
|
||||
else
|
||||
nil
|
||||
end
|
||||
|
||||
image_url = build_image_url(image_url_data, page_url_data)
|
||||
|
||||
%{
|
||||
type: "link",
|
||||
provider_name: page_url_data.host,
|
||||
provider_url: page_url_data.scheme <> "://" <> page_url_data.host,
|
||||
url: page_url,
|
||||
image: image_url,
|
||||
image_description: rich_media["image:alt"] || "",
|
||||
image: image_url |> MediaProxy.url(),
|
||||
title: rich_media["title"] || "",
|
||||
description: rich_media["description"] || "",
|
||||
pleroma: %{
|
||||
opengraph:
|
||||
rich_media
|
||||
|> Maps.put_if_present("image", image_url)
|
||||
|> Maps.put_if_present("audio", audio_url)
|
||||
|> Maps.put_if_present("video", video_url)
|
||||
opengraph: rich_media
|
||||
}
|
||||
}
|
||||
end
|
||||
|
@ -627,14 +636,6 @@ defmodule Pleroma.Web.MastodonAPI.StatusView do
|
|||
}
|
||||
end
|
||||
|
||||
defp proxied_url(url, page_url_data) do
|
||||
if is_binary(url) do
|
||||
build_image_url(URI.parse(url), page_url_data) |> MediaProxy.url()
|
||||
else
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
def get_reply_to(activity, %{replied_to_activities: replied_to_activities}) do
|
||||
object = Object.normalize(activity, fetch: false)
|
||||
|
||||
|
@ -739,7 +740,19 @@ defmodule Pleroma.Web.MastodonAPI.StatusView do
|
|||
|
||||
defp build_application(_), do: nil
|
||||
|
||||
# Workaround for Elixir issue #10771
|
||||
# Avoid applying URI.merge unless necessary
|
||||
# TODO: revert to always attempting URI.merge(image_url_data, page_url_data)
|
||||
# when Elixir 1.12 is the minimum supported version
|
||||
@spec build_image_url(struct() | nil, struct()) :: String.t() | nil
|
||||
defp build_image_url(
|
||||
%URI{scheme: image_scheme, host: image_host} = image_url_data,
|
||||
%URI{} = _page_url_data
|
||||
)
|
||||
when not is_nil(image_scheme) and not is_nil(image_host) do
|
||||
image_url_data |> to_string
|
||||
end
|
||||
|
||||
defp build_image_url(%URI{} = image_url_data, %URI{} = page_url_data) do
|
||||
URI.merge(page_url_data, image_url_data) |> to_string
|
||||
end
|
||||
|
|
|
@ -18,8 +18,6 @@ defmodule Pleroma.Web.MastodonAPI.WebsocketHandler do
|
|||
@timeout :timer.seconds(60)
|
||||
# Hibernate every X messages
|
||||
@hibernate_every 100
|
||||
# Tune garabge collect for long-lived websocket process
|
||||
@fullsweep_after 20
|
||||
|
||||
def init(%{qs: qs} = req, state) do
|
||||
with params <- Enum.into(:cow_qs.parse_qs(qs), %{}),
|
||||
|
@ -61,10 +59,6 @@ defmodule Pleroma.Web.MastodonAPI.WebsocketHandler do
|
|||
"#{__MODULE__} accepted websocket connection for user #{(state.user || %{id: "anonymous"}).id}, topic #{state.topic}"
|
||||
)
|
||||
|
||||
# process is long-lived and can sometimes accumulate stale data in such a way it's
|
||||
# not freed by young garbage cycles, thus make full collection sweeps more frequent
|
||||
:erlang.process_flag(:fullsweep_after, @fullsweep_after)
|
||||
|
||||
Streamer.add_socket(state.topic, state.oauth_token)
|
||||
{:ok, %{state | timer: timer()}}
|
||||
end
|
||||
|
|
|
@ -52,14 +52,6 @@ defmodule Pleroma.Web.PleromaAPI.EmojiReactionController do
|
|||
end)
|
||||
end
|
||||
|
||||
defp filter_allowed_users_by_domain(ap_ids, %User{} = for_user) do
|
||||
Enum.reject(ap_ids, fn ap_id ->
|
||||
User.blocks_domain?(for_user, ap_id)
|
||||
end)
|
||||
end
|
||||
|
||||
defp filter_allowed_users_by_domain(ap_ids, nil), do: ap_ids
|
||||
|
||||
def filter_allowed_users(reactions, user, with_muted) do
|
||||
exclude_ap_ids =
|
||||
if is_nil(user) do
|
||||
|
@ -70,10 +62,7 @@ defmodule Pleroma.Web.PleromaAPI.EmojiReactionController do
|
|||
end
|
||||
|
||||
filter_emoji = fn emoji, users, url ->
|
||||
users
|
||||
|> filter_allowed_user_by_ap_id(exclude_ap_ids)
|
||||
|> filter_allowed_users_by_domain(user)
|
||||
|> case do
|
||||
case filter_allowed_user_by_ap_id(users, exclude_ap_ids) do
|
||||
[] -> nil
|
||||
users -> {emoji, users, url}
|
||||
end
|
||||
|
|
|
@ -44,26 +44,6 @@ defmodule Pleroma.Web.Plugs.HTTPSignaturePlug do
|
|||
|
||||
def route_aliases(_), do: []
|
||||
|
||||
def maybe_put_created_psudoheader(conn) do
|
||||
case HTTPSignatures.signature_for_conn(conn) do
|
||||
%{"created" => created} ->
|
||||
put_req_header(conn, "(created)", created)
|
||||
|
||||
_ ->
|
||||
conn
|
||||
end
|
||||
end
|
||||
|
||||
def maybe_put_expires_psudoheader(conn) do
|
||||
case HTTPSignatures.signature_for_conn(conn) do
|
||||
%{"expires" => expires} ->
|
||||
put_req_header(conn, "(expires)", expires)
|
||||
|
||||
_ ->
|
||||
conn
|
||||
end
|
||||
end
|
||||
|
||||
defp assign_valid_signature_on_route_aliases(conn, []), do: conn
|
||||
|
||||
defp assign_valid_signature_on_route_aliases(%{assigns: %{valid_signature: true}} = conn, _),
|
||||
|
@ -75,8 +55,6 @@ defmodule Pleroma.Web.Plugs.HTTPSignaturePlug do
|
|||
conn =
|
||||
conn
|
||||
|> put_req_header("(request-target)", request_target)
|
||||
|> maybe_put_created_psudoheader()
|
||||
|> maybe_put_expires_psudoheader()
|
||||
|> case do
|
||||
%{assigns: %{digest: digest}} = conn -> put_req_header(conn, "digest", digest)
|
||||
conn -> conn
|
||||
|
|
|
@ -1,98 +0,0 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Web.RichMedia.Backfill do
|
||||
use Pleroma.Workers.WorkerHelper,
|
||||
queue: "rich_media_backfill",
|
||||
unique: [period: 300, states: Oban.Job.states(), keys: [:op, :url_hash]]
|
||||
|
||||
alias Pleroma.Web.RichMedia.Card
|
||||
alias Pleroma.Web.RichMedia.Parser
|
||||
alias Pleroma.Web.RichMedia.Parser.TTL
|
||||
alias Pleroma.Workers.RichMediaExpirationWorker
|
||||
|
||||
require Logger
|
||||
|
||||
@cachex Pleroma.Config.get([:cachex, :provider], Cachex)
|
||||
|
||||
def start(%{url: url} = args) when is_binary(url) do
|
||||
url_hash = Card.url_to_hash(url)
|
||||
|
||||
args =
|
||||
args
|
||||
|> Map.put(:url_hash, url_hash)
|
||||
|
||||
__MODULE__.enqueue("rich_media_backfill", args)
|
||||
end
|
||||
|
||||
def perform(%Oban.Job{args: %{"op" => "rich_media_backfill", "url" => url} = args})
|
||||
when is_binary(url) do
|
||||
run(args)
|
||||
end
|
||||
|
||||
def run(%{"url" => url, "url_hash" => url_hash} = args) do
|
||||
case Parser.parse(url) do
|
||||
{:ok, fields} ->
|
||||
{:ok, card} = Card.create(url, fields)
|
||||
|
||||
maybe_schedule_expiration(url, fields)
|
||||
|
||||
if Map.has_key?(args, "activity_id") do
|
||||
stream_update(args)
|
||||
end
|
||||
|
||||
warm_cache(url_hash, card)
|
||||
:ok
|
||||
|
||||
{:error, {:invalid_metadata, fields}} ->
|
||||
Logger.debug("Rich media incomplete or invalid metadata for #{url}: #{inspect(fields)}")
|
||||
negative_cache(url_hash, :timer.minutes(30))
|
||||
|
||||
{:error, :body_too_large} ->
|
||||
Logger.error("Rich media error for #{url}: :body_too_large")
|
||||
negative_cache(url_hash, :timer.minutes(30))
|
||||
|
||||
{:error, {:content_type, type}} ->
|
||||
Logger.debug("Rich media error for #{url}: :content_type is #{type}")
|
||||
negative_cache(url_hash, :timer.minutes(30))
|
||||
|
||||
e ->
|
||||
Logger.debug("Rich media error for #{url}: #{inspect(e)}")
|
||||
{:error, e}
|
||||
end
|
||||
end
|
||||
|
||||
def run(e) do
|
||||
Logger.error("Rich media failure - invalid args: #{inspect(e)}")
|
||||
{:discard, :invalid}
|
||||
end
|
||||
|
||||
defp maybe_schedule_expiration(url, fields) do
|
||||
case TTL.process(fields, url) do
|
||||
{:ok, ttl} when is_number(ttl) ->
|
||||
timestamp = DateTime.from_unix!(ttl)
|
||||
|
||||
RichMediaExpirationWorker.new(%{"url" => url}, scheduled_at: timestamp)
|
||||
|> Oban.insert()
|
||||
|
||||
_ ->
|
||||
:ok
|
||||
end
|
||||
end
|
||||
|
||||
defp stream_update(%{"activity_id" => activity_id}) do
|
||||
Logger.info("Rich media backfill: streaming update for activity #{activity_id}")
|
||||
|
||||
Pleroma.Activity.get_by_id(activity_id)
|
||||
|> Pleroma.Activity.normalize()
|
||||
|> Pleroma.Web.ActivityPub.ActivityPub.stream_out()
|
||||
end
|
||||
|
||||
defp warm_cache(key, val), do: @cachex.put(:rich_media_cache, key, val)
|
||||
|
||||
def negative_cache(key, ttl \\ :timer.minutes(30)) do
|
||||
@cachex.put(:rich_media_cache, key, nil, ttl: ttl)
|
||||
{:discard, :error}
|
||||
end
|
||||
end
|
|
@ -1,149 +0,0 @@
|
|||
defmodule Pleroma.Web.RichMedia.Card do
|
||||
use Ecto.Schema
|
||||
import Ecto.Changeset
|
||||
import Ecto.Query
|
||||
|
||||
alias Pleroma.Activity
|
||||
alias Pleroma.HTML
|
||||
alias Pleroma.Object
|
||||
alias Pleroma.Repo
|
||||
alias Pleroma.Web.RichMedia.Backfill
|
||||
alias Pleroma.Web.RichMedia.Parser
|
||||
|
||||
@cachex Pleroma.Config.get([:cachex, :provider], Cachex)
|
||||
@config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config)
|
||||
|
||||
@type t :: %__MODULE__{}
|
||||
|
||||
schema "rich_media_card" do
|
||||
field(:url_hash, :binary)
|
||||
field(:fields, :map)
|
||||
|
||||
timestamps()
|
||||
end
|
||||
|
||||
@doc false
|
||||
def changeset(card, attrs) do
|
||||
card
|
||||
|> cast(attrs, [:url_hash, :fields])
|
||||
|> validate_required([:url_hash, :fields])
|
||||
|> unique_constraint(:url_hash)
|
||||
end
|
||||
|
||||
@spec create(String.t(), map()) :: {:ok, t()}
|
||||
def create(url, fields) do
|
||||
url_hash = url_to_hash(url)
|
||||
|
||||
fields = Map.put_new(fields, "url", url)
|
||||
|
||||
%__MODULE__{}
|
||||
|> changeset(%{url_hash: url_hash, fields: fields})
|
||||
|> Repo.insert(on_conflict: {:replace, [:fields]}, conflict_target: :url_hash)
|
||||
end
|
||||
|
||||
@spec delete(String.t()) :: {:ok, Ecto.Schema.t()} | {:error, Ecto.Changeset.t()} | :ok
|
||||
def delete(url) do
|
||||
url_hash = url_to_hash(url)
|
||||
@cachex.del(:rich_media_cache, url_hash)
|
||||
|
||||
case get_by_url(url) do
|
||||
%__MODULE__{} = card -> Repo.delete(card)
|
||||
nil -> :ok
|
||||
end
|
||||
end
|
||||
|
||||
@spec get_by_url(String.t() | nil) :: t() | nil | :error
|
||||
def get_by_url(url) when is_binary(url) do
|
||||
if @config_impl.get([:rich_media, :enabled]) do
|
||||
url_hash = url_to_hash(url)
|
||||
|
||||
@cachex.fetch!(:rich_media_cache, url_hash, fn _ ->
|
||||
result =
|
||||
__MODULE__
|
||||
|> where(url_hash: ^url_hash)
|
||||
|> Repo.one()
|
||||
|
||||
case result do
|
||||
%__MODULE__{} = card -> {:commit, card}
|
||||
_ -> {:ignore, nil}
|
||||
end
|
||||
end)
|
||||
else
|
||||
:error
|
||||
end
|
||||
end
|
||||
|
||||
def get_by_url(nil), do: nil
|
||||
|
||||
@spec get_or_backfill_by_url(String.t(), map()) :: t() | nil
|
||||
def get_or_backfill_by_url(url, backfill_opts \\ %{}) do
|
||||
case get_by_url(url) do
|
||||
%__MODULE__{} = card ->
|
||||
card
|
||||
|
||||
nil ->
|
||||
backfill_opts = Map.put(backfill_opts, :url, url)
|
||||
|
||||
Backfill.start(backfill_opts)
|
||||
|
||||
nil
|
||||
|
||||
:error ->
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
@spec get_by_activity(Activity.t()) :: t() | nil | :error
|
||||
# Fake/Draft activity
|
||||
def get_by_activity(%Activity{id: "pleroma:fakeid"} = activity) do
|
||||
with %Object{} = object <- Object.normalize(activity, fetch: false),
|
||||
url when not is_nil(url) <- HTML.extract_first_external_url_from_object(object) do
|
||||
case get_by_url(url) do
|
||||
# Cache hit
|
||||
%__MODULE__{} = card ->
|
||||
card
|
||||
|
||||
# Cache miss, but fetch for rendering the Draft
|
||||
_ ->
|
||||
with {:ok, fields} <- Parser.parse(url),
|
||||
{:ok, card} <- create(url, fields) do
|
||||
card
|
||||
else
|
||||
_ -> nil
|
||||
end
|
||||
end
|
||||
else
|
||||
_ ->
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
def get_by_activity(activity) do
|
||||
with %Object{} = object <- Object.normalize(activity, fetch: false),
|
||||
{_, nil} <- {:cached, get_cached_url(object, activity.id)} do
|
||||
nil
|
||||
else
|
||||
{:cached, url} ->
|
||||
get_or_backfill_by_url(url, %{activity_id: activity.id})
|
||||
|
||||
_ ->
|
||||
:error
|
||||
end
|
||||
end
|
||||
|
||||
@spec url_to_hash(String.t()) :: String.t()
|
||||
def url_to_hash(url) do
|
||||
:crypto.hash(:sha256, url) |> Base.encode16(case: :lower)
|
||||
end
|
||||
|
||||
defp get_cached_url(object, activity_id) do
|
||||
key = "URL|#{activity_id}"
|
||||
|
||||
@cachex.fetch!(:scrubber_cache, key, fn _ ->
|
||||
url = HTML.extract_first_external_url_from_object(object)
|
||||
Activity.HTML.add_cache_key_for(activity_id, key)
|
||||
|
||||
{:commit, url}
|
||||
end)
|
||||
end
|
||||
end
|
|
@ -3,13 +3,85 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Web.RichMedia.Helpers do
|
||||
alias Pleroma.Activity
|
||||
alias Pleroma.Config
|
||||
alias Pleroma.HTML
|
||||
alias Pleroma.Object
|
||||
alias Pleroma.Web.RichMedia.Parser
|
||||
|
||||
@options [
|
||||
max_body: 2_000_000,
|
||||
receive_timeout: 2_000
|
||||
]
|
||||
|
||||
@spec validate_page_url(URI.t() | binary()) :: :ok | :error
|
||||
defp validate_page_url(page_url) when is_binary(page_url) do
|
||||
validate_tld = Config.get([Pleroma.Formatter, :validate_tld])
|
||||
|
||||
page_url
|
||||
|> Linkify.Parser.url?(validate_tld: validate_tld)
|
||||
|> parse_uri(page_url)
|
||||
end
|
||||
|
||||
defp validate_page_url(%URI{host: host, scheme: "https", authority: authority})
|
||||
when is_binary(authority) do
|
||||
cond do
|
||||
host in Config.get([:rich_media, :ignore_hosts], []) ->
|
||||
:error
|
||||
|
||||
get_tld(host) in Config.get([:rich_media, :ignore_tld], []) ->
|
||||
:error
|
||||
|
||||
true ->
|
||||
:ok
|
||||
end
|
||||
end
|
||||
|
||||
defp validate_page_url(_), do: :error
|
||||
|
||||
defp parse_uri(true, url) do
|
||||
url
|
||||
|> URI.parse()
|
||||
|> validate_page_url
|
||||
end
|
||||
|
||||
defp parse_uri(_, _), do: :error
|
||||
|
||||
defp get_tld(host) do
|
||||
host
|
||||
|> String.split(".")
|
||||
|> Enum.reverse()
|
||||
|> hd
|
||||
end
|
||||
|
||||
def fetch_data_for_object(object) do
|
||||
with true <- Config.get([:rich_media, :enabled]),
|
||||
{:ok, page_url} <-
|
||||
HTML.extract_first_external_url_from_object(object),
|
||||
:ok <- validate_page_url(page_url),
|
||||
{:ok, rich_media} <- Parser.parse(page_url) do
|
||||
%{page_url: page_url, rich_media: rich_media}
|
||||
else
|
||||
_ -> %{}
|
||||
end
|
||||
end
|
||||
|
||||
def fetch_data_for_activity(%Activity{data: %{"type" => "Create"}} = activity) do
|
||||
with true <- Config.get([:rich_media, :enabled]),
|
||||
%Object{} = object <- Object.normalize(activity, fetch: false) do
|
||||
fetch_data_for_object(object)
|
||||
else
|
||||
_ -> %{}
|
||||
end
|
||||
end
|
||||
|
||||
def fetch_data_for_activity(_), do: %{}
|
||||
|
||||
def rich_media_get(url) do
|
||||
headers = [{"user-agent", Pleroma.Application.user_agent() <> "; Bot"}]
|
||||
|
||||
head_check =
|
||||
case Pleroma.HTTP.head(url, headers, http_options()) do
|
||||
case Pleroma.HTTP.head(url, headers, @options) do
|
||||
# If the HEAD request didn't reach the server for whatever reason,
|
||||
# we assume the GET that comes right after won't either
|
||||
{:error, _} = e ->
|
||||
|
@ -24,7 +96,7 @@ defmodule Pleroma.Web.RichMedia.Helpers do
|
|||
:ok
|
||||
end
|
||||
|
||||
with :ok <- head_check, do: Pleroma.HTTP.get(url, headers, http_options())
|
||||
with :ok <- head_check, do: Pleroma.HTTP.get(url, headers, @options)
|
||||
end
|
||||
|
||||
defp check_content_type(headers) do
|
||||
|
@ -40,13 +112,12 @@ defmodule Pleroma.Web.RichMedia.Helpers do
|
|||
end
|
||||
end
|
||||
|
||||
@max_body @options[:max_body]
|
||||
defp check_content_length(headers) do
|
||||
max_body = Keyword.get(http_options(), :max_body)
|
||||
|
||||
case List.keyfind(headers, "content-length", 0) do
|
||||
{_, maybe_content_length} ->
|
||||
case Integer.parse(maybe_content_length) do
|
||||
{content_length, ""} when content_length <= max_body -> :ok
|
||||
{content_length, ""} when content_length <= @max_body -> :ok
|
||||
{_, ""} -> {:error, :body_too_large}
|
||||
_ -> :ok
|
||||
end
|
||||
|
@ -55,11 +126,4 @@ defmodule Pleroma.Web.RichMedia.Helpers do
|
|||
:ok
|
||||
end
|
||||
end
|
||||
|
||||
defp http_options do
|
||||
[
|
||||
pool: :media,
|
||||
max_body: Config.get([:rich_media, :max_body], 5_000_000)
|
||||
]
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,41 +1,161 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
|
||||
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Web.RichMedia.Parser do
|
||||
require Logger
|
||||
|
||||
@config_impl Application.compile_env(:pleroma, [__MODULE__, :config_impl], Pleroma.Config)
|
||||
@cachex Pleroma.Config.get([:cachex, :provider], Cachex)
|
||||
|
||||
defp parsers do
|
||||
Pleroma.Config.get([:rich_media, :parsers])
|
||||
end
|
||||
|
||||
def parse(nil), do: nil
|
||||
def parse(nil), do: {:error, "No URL provided"}
|
||||
|
||||
if Pleroma.Config.get(:env) == :test do
|
||||
@spec parse(String.t()) :: {:ok, map()} | {:error, any()}
|
||||
def parse(url), do: parse_with_timeout(url)
|
||||
else
|
||||
@spec parse(String.t()) :: {:ok, map()} | {:error, any()}
|
||||
def parse(url) do
|
||||
with {_, true} <- {:config, @config_impl.get([:rich_media, :enabled])},
|
||||
:ok <- validate_page_url(url),
|
||||
{:ok, data} <- parse_url(url) do
|
||||
data = Map.put(data, "url", url)
|
||||
with {:ok, data} <- get_cached_or_parse(url),
|
||||
{:ok, _} <- set_ttl_based_on_image(data, url) do
|
||||
{:ok, data}
|
||||
else
|
||||
{:config, _} -> {:error, :rich_media_disabled}
|
||||
e -> e
|
||||
end
|
||||
end
|
||||
|
||||
defp parse_url(url) do
|
||||
defp get_cached_or_parse(url) do
|
||||
case @cachex.fetch(:rich_media_cache, url, fn ->
|
||||
case parse_with_timeout(url) do
|
||||
{:ok, _} = res ->
|
||||
{:commit, res}
|
||||
|
||||
{:error, reason} = e ->
|
||||
# Unfortunately we have to log errors here, instead of doing that
|
||||
# along with ttl setting at the bottom. Otherwise we can get log spam
|
||||
# if more than one process was waiting for the rich media card
|
||||
# while it was generated. Ideally we would set ttl here as well,
|
||||
# so we don't override it number_of_waiters_on_generation
|
||||
# times, but one, obviously, can't set ttl for not-yet-created entry
|
||||
# and Cachex doesn't support returning ttl from the fetch callback.
|
||||
log_error(url, reason)
|
||||
{:commit, e}
|
||||
end
|
||||
end) do
|
||||
{action, res} when action in [:commit, :ok] ->
|
||||
case res do
|
||||
{:ok, _data} = res ->
|
||||
res
|
||||
|
||||
{:error, reason} = e ->
|
||||
if action == :commit, do: set_error_ttl(url, reason)
|
||||
e
|
||||
end
|
||||
|
||||
{:error, e} ->
|
||||
{:error, {:cachex_error, e}}
|
||||
end
|
||||
end
|
||||
|
||||
defp set_error_ttl(_url, :body_too_large), do: :ok
|
||||
defp set_error_ttl(_url, {:content_type, _}), do: :ok
|
||||
|
||||
# The TTL is not set for the errors above, since they are unlikely to change
|
||||
# with time
|
||||
|
||||
defp set_error_ttl(url, _reason) do
|
||||
ttl = Pleroma.Config.get([:rich_media, :failure_backoff], 60_000)
|
||||
@cachex.expire(:rich_media_cache, url, ttl)
|
||||
:ok
|
||||
end
|
||||
|
||||
defp log_error(url, {:invalid_metadata, data}) do
|
||||
Logger.debug(fn -> "Incomplete or invalid metadata for #{url}: #{inspect(data)}" end)
|
||||
end
|
||||
|
||||
defp log_error(url, reason) do
|
||||
Logger.warning(fn -> "Rich media error for #{url}: #{inspect(reason)}" end)
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
Set the rich media cache based on the expiration time of image.
|
||||
|
||||
Adopt behaviour `Pleroma.Web.RichMedia.Parser.TTL`
|
||||
|
||||
## Example
|
||||
|
||||
defmodule MyModule do
|
||||
@behaviour Pleroma.Web.RichMedia.Parser.TTL
|
||||
def ttl(data, url) do
|
||||
image_url = Map.get(data, :image)
|
||||
# do some parsing in the url and get the ttl of the image
|
||||
# and return ttl is unix time
|
||||
parse_ttl_from_url(image_url)
|
||||
end
|
||||
end
|
||||
|
||||
Define the module in the config
|
||||
|
||||
config :pleroma, :rich_media,
|
||||
ttl_setters: [MyModule]
|
||||
"""
|
||||
@spec set_ttl_based_on_image(map(), String.t()) ::
|
||||
{:ok, Integer.t() | :noop} | {:error, :no_key}
|
||||
def set_ttl_based_on_image(data, url) do
|
||||
case get_ttl_from_image(data, url) do
|
||||
{:ok, ttl} when is_number(ttl) ->
|
||||
ttl = ttl * 1000
|
||||
|
||||
case @cachex.expire_at(:rich_media_cache, url, ttl) do
|
||||
{:ok, true} -> {:ok, ttl}
|
||||
{:ok, false} -> {:error, :no_key}
|
||||
end
|
||||
|
||||
_ ->
|
||||
{:ok, :noop}
|
||||
end
|
||||
end
|
||||
|
||||
defp get_ttl_from_image(data, url) do
|
||||
[:rich_media, :ttl_setters]
|
||||
|> Pleroma.Config.get()
|
||||
|> Enum.reduce({:ok, nil}, fn
|
||||
module, {:ok, _ttl} ->
|
||||
module.ttl(data, url)
|
||||
|
||||
_, error ->
|
||||
error
|
||||
end)
|
||||
end
|
||||
|
||||
def parse_url(url) do
|
||||
with {:ok, %Tesla.Env{body: html}} <- Pleroma.Web.RichMedia.Helpers.rich_media_get(url),
|
||||
{:ok, html} <- Floki.parse_document(html) do
|
||||
html
|
||||
|> maybe_parse()
|
||||
|> Map.put("url", url)
|
||||
|> clean_parsed_data()
|
||||
|> check_parsed_data()
|
||||
end
|
||||
end
|
||||
|
||||
def parse_with_timeout(url) do
|
||||
try do
|
||||
task =
|
||||
Task.Supervisor.async_nolink(Pleroma.TaskSupervisor, fn ->
|
||||
parse_url(url)
|
||||
end)
|
||||
|
||||
Task.await(task, 5000)
|
||||
catch
|
||||
:exit, {:timeout, _} ->
|
||||
Logger.warning("Timeout while fetching rich media for #{url}")
|
||||
{:error, :timeout}
|
||||
end
|
||||
end
|
||||
|
||||
defp maybe_parse(html) do
|
||||
Enum.reduce_while(parsers(), %{}, fn parser, acc ->
|
||||
case parser.parse(html, acc) do
|
||||
|
@ -61,46 +181,4 @@ defmodule Pleroma.Web.RichMedia.Parser do
|
|||
end)
|
||||
|> Map.new()
|
||||
end
|
||||
|
||||
@spec validate_page_url(URI.t() | binary()) :: :ok | :error
|
||||
defp validate_page_url(page_url) when is_binary(page_url) do
|
||||
validate_tld = @config_impl.get([Pleroma.Formatter, :validate_tld])
|
||||
|
||||
page_url
|
||||
|> Linkify.Parser.url?(validate_tld: validate_tld)
|
||||
|> parse_uri(page_url)
|
||||
end
|
||||
|
||||
defp validate_page_url(%URI{host: host, scheme: "https"}) do
|
||||
cond do
|
||||
Linkify.Parser.ip?(host) ->
|
||||
:error
|
||||
|
||||
host in @config_impl.get([:rich_media, :ignore_hosts], []) ->
|
||||
:error
|
||||
|
||||
get_tld(host) in @config_impl.get([:rich_media, :ignore_tld], []) ->
|
||||
:error
|
||||
|
||||
true ->
|
||||
:ok
|
||||
end
|
||||
end
|
||||
|
||||
defp validate_page_url(_), do: :error
|
||||
|
||||
defp parse_uri(true, url) do
|
||||
url
|
||||
|> URI.parse()
|
||||
|> validate_page_url
|
||||
end
|
||||
|
||||
defp parse_uri(_, _), do: :error
|
||||
|
||||
defp get_tld(host) do
|
||||
host
|
||||
|> String.split(".")
|
||||
|> Enum.reverse()
|
||||
|> hd
|
||||
end
|
||||
end
|
||||
|
|
|
@ -3,18 +3,5 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Web.RichMedia.Parser.TTL do
|
||||
@callback ttl(map(), String.t()) :: integer() | nil
|
||||
|
||||
@spec process(map(), String.t()) :: {:ok, integer() | nil}
|
||||
def process(data, url) do
|
||||
[:rich_media, :ttl_setters]
|
||||
|> Pleroma.Config.get()
|
||||
|> Enum.reduce_while({:ok, nil}, fn
|
||||
module, acc ->
|
||||
case module.ttl(data, url) do
|
||||
ttl when is_number(ttl) -> {:halt, {:ok, ttl}}
|
||||
_ -> {:cont, acc}
|
||||
end
|
||||
end)
|
||||
end
|
||||
@callback ttl(Map.t(), String.t()) :: Integer.t() | nil
|
||||
end
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
|
||||
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl do
|
||||
|
@ -7,26 +7,25 @@ defmodule Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl do
|
|||
|
||||
@impl true
|
||||
def ttl(data, _url) do
|
||||
image = Map.get(data, "image")
|
||||
image = Map.get(data, :image)
|
||||
|
||||
if aws_signed_url?(image) do
|
||||
if is_aws_signed_url(image) do
|
||||
image
|
||||
|> parse_query_params()
|
||||
|> format_query_params()
|
||||
|> get_expiration_timestamp()
|
||||
else
|
||||
nil
|
||||
{:error, "Not aws signed url #{inspect(image)}"}
|
||||
end
|
||||
end
|
||||
|
||||
defp aws_signed_url?(image) when is_binary(image) and image != "" do
|
||||
defp is_aws_signed_url(image) when is_binary(image) and image != "" do
|
||||
%URI{host: host, query: query} = URI.parse(image)
|
||||
|
||||
is_binary(host) and String.contains?(host, "amazonaws.com") and
|
||||
is_binary(query) and String.contains?(query, "X-Amz-Expires")
|
||||
String.contains?(host, "amazonaws.com") and String.contains?(query, "X-Amz-Expires")
|
||||
end
|
||||
|
||||
defp aws_signed_url?(_), do: nil
|
||||
defp is_aws_signed_url(_), do: nil
|
||||
|
||||
defp parse_query_params(image) do
|
||||
%URI{query: query} = URI.parse(image)
|
||||
|
@ -46,6 +45,6 @@ defmodule Pleroma.Web.RichMedia.Parser.TTL.AwsSignedUrl do
|
|||
|> Map.get("X-Amz-Date")
|
||||
|> Timex.parse("{ISO:Basic:Z}")
|
||||
|
||||
Timex.to_unix(date) + String.to_integer(Map.get(params, "X-Amz-Expires"))
|
||||
{:ok, Timex.to_unix(date) + String.to_integer(Map.get(params, "X-Amz-Expires"))}
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,20 +0,0 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Web.RichMedia.Parser.TTL.Opengraph do
|
||||
@behaviour Pleroma.Web.RichMedia.Parser.TTL
|
||||
|
||||
@impl true
|
||||
def ttl(%{"ttl" => ttl_string}, _url) when is_binary(ttl_string) do
|
||||
try do
|
||||
ttl = String.to_integer(ttl_string)
|
||||
now = DateTime.utc_now() |> DateTime.to_unix()
|
||||
now + ttl
|
||||
rescue
|
||||
_ -> nil
|
||||
end
|
||||
end
|
||||
|
||||
def ttl(_, _), do: nil
|
||||
end
|
|
@ -1,5 +1,5 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
|
||||
# Copyright © 2017-2021 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Web.RichMedia.Parsers.OEmbed do
|
||||
|
|
|
@ -9,13 +9,13 @@
|
|||
xmlns:ostatus="http://ostatus.org/schema/1.0"
|
||||
xmlns:statusnet="http://status.net/schema/api/1/">
|
||||
|
||||
<id><%= "#{url(~p"/tags/#{@tag}")}.rss" %></id>
|
||||
<id><%= '#{url(~p"/tags/#{@tag}")}.rss' %></id>
|
||||
<title>#<%= @tag %></title>
|
||||
|
||||
<subtitle><%= Gettext.dpgettext("static_pages", "tag feed description", "These are public toots tagged with #%{tag}. You can interact with them if you have an account anywhere in the fediverse.", tag: @tag) %></subtitle>
|
||||
<logo><%= feed_logo() %></logo>
|
||||
<updated><%= most_recent_update(@activities) %></updated>
|
||||
<link rel="self" href="<%= "#{url(~p"/tags/#{@tag}")}.atom" %>" type="application/atom+xml"/>
|
||||
<link rel="self" href="<%= '#{url(~p"/tags/#{@tag}")}.atom' %>" type="application/atom+xml"/>
|
||||
<%= for activity <- @activities do %>
|
||||
<%= render @view_module, "_tag_activity.atom", Map.merge(assigns, prepare_activity(activity, actor: true)) %>
|
||||
<% end %>
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
|
||||
<title>#<%= @tag %></title>
|
||||
<description><%= Gettext.dpgettext("static_pages", "tag feed description", "These are public toots tagged with #%{tag}. You can interact with them if you have an account anywhere in the fediverse.", tag: @tag) %></description>
|
||||
<link><%= "#{url(~p"/tags/#{@tag}")}.rss" %></link>
|
||||
<link><%= '#{url(~p"/tags/#{@tag}")}.rss' %></link>
|
||||
<webfeeds:logo><%= feed_logo() %></webfeeds:logo>
|
||||
<webfeeds:accentColor>2b90d9</webfeeds:accentColor>
|
||||
<%= for activity <- @activities do %>
|
||||
|
|
|
@ -10,12 +10,12 @@
|
|||
<title><%= @user.nickname <> "'s timeline" %></title>
|
||||
<updated><%= most_recent_update(@activities, @user) %></updated>
|
||||
<logo><%= logo(@user) %></logo>
|
||||
<link rel="self" href="<%= "#{url(~p"/users/#{@user.nickname}/feed")}.atom" %>" type="application/atom+xml"/>
|
||||
<link rel="self" href="<%= '#{url(~p"/users/#{@user.nickname}/feed")}.atom' %>" type="application/atom+xml"/>
|
||||
|
||||
<%= render @view_module, "_author.atom", assigns %>
|
||||
|
||||
<%= if last_activity(@activities) do %>
|
||||
<link rel="next" href="<%= "#{url(~p"/users/#{@user.nickname}/feed")}.atom?max_id=#{last_activity(@activities).id}" %>" type="application/atom+xml"/>
|
||||
<link rel="next" href="<%= '#{url(~p"/users/#{@user.nickname}/feed")}.atom?max_id=#{last_activity(@activities).id}' %>" type="application/atom+xml"/>
|
||||
<% end %>
|
||||
|
||||
<%= for activity <- @activities do %>
|
||||
|
|
|
@ -5,12 +5,12 @@
|
|||
<title><%= @user.nickname <> "'s timeline" %></title>
|
||||
<updated><%= most_recent_update(@activities, @user) %></updated>
|
||||

|
||||
<link><%= "#{url(~p"/users/#{@user.nickname}/feed")}.rss" %></link>
|
||||
<link><%= '#{url(~p"/users/#{@user.nickname}/feed")}.rss' %></link>
|
||||
|
||||
<%= render @view_module, "_author.rss", assigns %>
|
||||
|
||||
<%= if last_activity(@activities) do %>
|
||||
<link rel="next"><%= "#{url(~p"/users/#{@user.nickname}/feed")}.rss?max_id=#{last_activity(@activities).id}" %></link>
|
||||
<link rel="next"><%= '#{url(~p"/users/#{@user.nickname}/feed")}.rss?max_id=#{last_activity(@activities).id}' %></link>
|
||||
<% end %>
|
||||
|
||||
<%= for activity <- @activities do %>
|
||||
|
|
|
@ -1,15 +0,0 @@
|
|||
# Pleroma: A lightweight social networking server
|
||||
# Copyright © 2017-2022 Pleroma Authors <https://pleroma.social/>
|
||||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
|
||||
defmodule Pleroma.Workers.RichMediaExpirationWorker do
|
||||
alias Pleroma.Web.RichMedia.Card
|
||||
|
||||
use Oban.Worker,
|
||||
queue: :rich_media_expiration
|
||||
|
||||
@impl Oban.Worker
|
||||
def perform(%Job{args: %{"url" => url} = _args}) do
|
||||
Card.delete(url)
|
||||
end
|
||||
end
|
9
mix.exs
9
mix.exs
|
@ -78,8 +78,7 @@ defmodule Pleroma.Mixfile do
|
|||
:comeonin,
|
||||
:fast_sanitize,
|
||||
:os_mon,
|
||||
:ssl,
|
||||
:recon
|
||||
:ssl
|
||||
],
|
||||
included_applications: [:ex_syslogger]
|
||||
]
|
||||
|
@ -137,7 +136,7 @@ defmodule Pleroma.Mixfile do
|
|||
{:tesla, "~> 1.7"},
|
||||
{:castore, "~> 1.0"},
|
||||
{:cowlib, "~> 2.12"},
|
||||
{:finch, "~> 0.18.0"},
|
||||
{:finch, "~> 0.16.0"},
|
||||
{:jason, "~> 1.4"},
|
||||
{:trailing_format_plug, "~> 0.0.7"},
|
||||
{:mogrify, "~> 0.9"},
|
||||
|
@ -158,10 +157,10 @@ defmodule Pleroma.Mixfile do
|
|||
{:floki, "~> 0.34"},
|
||||
{:timex, "~> 3.7"},
|
||||
{:ueberauth, "== 0.10.5"},
|
||||
{:linkify, "~> 0.5.3"},
|
||||
{:linkify, git: "https://akkoma.dev/AkkomaGang/linkify.git"},
|
||||
{:http_signatures,
|
||||
git: "https://akkoma.dev/AkkomaGang/http_signatures.git",
|
||||
ref: "d44c43d66758c6a73eaa4da9cffdbee0c5da44ae"},
|
||||
ref: "6640ce7d24c783ac2ef56e27d00d12e8dc85f396"},
|
||||
{:telemetry, "~> 1.2"},
|
||||
{:telemetry_poller, "~> 1.0"},
|
||||
{:telemetry_metrics, "~> 0.6"},
|
||||
|
|
50
mix.lock
50
mix.lock
|
@ -3,12 +3,12 @@
|
|||
"base62": {:hex, :base62, "1.2.2", "85c6627eb609317b70f555294045895ffaaeb1758666ab9ef9ca38865b11e629", [:mix], [{:custom_base, "~> 0.2.1", [hex: :custom_base, repo: "hexpm", optional: false]}], "hexpm", "d41336bda8eaa5be197f1e4592400513ee60518e5b9f4dcf38f4b4dae6f377bb"},
|
||||
"bbcode_pleroma": {:hex, :bbcode_pleroma, "0.2.0", "d36f5bca6e2f62261c45be30fa9b92725c0655ad45c99025cb1c3e28e25803ef", [:mix], [{:nimble_parsec, "~> 0.5", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "19851074419a5fedb4ef49e1f01b30df504bb5dbb6d6adfc135238063bebd1c3"},
|
||||
"bcrypt_elixir": {:hex, :bcrypt_elixir, "3.0.1", "9be815469e6bfefec40fa74658ecbbe6897acfb57614df1416eeccd4903f602c", [:make, :mix], [{:comeonin, "~> 5.3", [hex: :comeonin, repo: "hexpm", optional: false]}, {:elixir_make, "~> 0.6", [hex: :elixir_make, repo: "hexpm", optional: false]}], "hexpm", "486bb95efb645d1efc6794c1ddd776a186a9a713abf06f45708a6ce324fb96cf"},
|
||||
"benchee": {:hex, :benchee, "1.3.1", "c786e6a76321121a44229dde3988fc772bca73ea75170a73fd5f4ddf1af95ccf", [:mix], [{:deep_merge, "~> 1.0", [hex: :deep_merge, repo: "hexpm", optional: false]}, {:statistex, "~> 1.0", [hex: :statistex, repo: "hexpm", optional: false]}, {:table, "~> 0.1.0", [hex: :table, repo: "hexpm", optional: true]}], "hexpm", "76224c58ea1d0391c8309a8ecbfe27d71062878f59bd41a390266bf4ac1cc56d"},
|
||||
"benchee": {:hex, :benchee, "1.3.0", "f64e3b64ad3563fa9838146ddefb2d2f94cf5b473bdfd63f5ca4d0657bf96694", [:mix], [{:deep_merge, "~> 1.0", [hex: :deep_merge, repo: "hexpm", optional: false]}, {:statistex, "~> 1.0", [hex: :statistex, repo: "hexpm", optional: false]}, {:table, "~> 0.1.0", [hex: :table, repo: "hexpm", optional: true]}], "hexpm", "34f4294068c11b2bd2ebf2c59aac9c7da26ffa0068afdf3419f1b176e16c5f81"},
|
||||
"bunt": {:hex, :bunt, "1.0.0", "081c2c665f086849e6d57900292b3a161727ab40431219529f13c4ddcf3e7a44", [:mix], [], "hexpm", "dc5f86aa08a5f6fa6b8096f0735c4e76d54ae5c9fa2c143e5a1fc7c1cd9bb6b5"},
|
||||
"cachex": {:hex, :cachex, "3.6.0", "14a1bfbeee060dd9bec25a5b6f4e4691e3670ebda28c8ba2884b12fe30b36bf8", [:mix], [{:eternal, "~> 1.2", [hex: :eternal, repo: "hexpm", optional: false]}, {:jumper, "~> 1.0", [hex: :jumper, repo: "hexpm", optional: false]}, {:sleeplocks, "~> 1.1", [hex: :sleeplocks, repo: "hexpm", optional: false]}, {:unsafe, "~> 1.0", [hex: :unsafe, repo: "hexpm", optional: false]}], "hexpm", "ebf24e373883bc8e0c8d894a63bbe102ae13d918f790121f5cfe6e485cc8e2e2"},
|
||||
"calendar": {:hex, :calendar, "1.0.0", "f52073a708528482ec33d0a171954ca610fe2bd28f1e871f247dc7f1565fa807", [:mix], [{:tzdata, "~> 0.1.201603 or ~> 0.5.20 or ~> 1.0", [hex: :tzdata, repo: "hexpm", optional: false]}], "hexpm", "990e9581920c82912a5ee50e62ff5ef96da6b15949a2ee4734f935fdef0f0a6f"},
|
||||
"captcha": {:git, "https://git.pleroma.social/pleroma/elixir-libraries/elixir-captcha.git", "90f6ce7672f70f56708792a98d98bd05176c9176", [ref: "90f6ce7672f70f56708792a98d98bd05176c9176"]},
|
||||
"castore": {:hex, :castore, "1.0.7", "b651241514e5f6956028147fe6637f7ac13802537e895a724f90bf3e36ddd1dd", [:mix], [], "hexpm", "da7785a4b0d2a021cd1292a60875a784b6caef71e76bf4917bdee1f390455cf5"},
|
||||
"castore": {:hex, :castore, "1.0.6", "ffc42f110ebfdafab0ea159cd43d31365fa0af0ce4a02ecebf1707ae619ee727", [:mix], [], "hexpm", "374c6e7ca752296be3d6780a6d5b922854ffcc74123da90f2f328996b962d33a"},
|
||||
"certifi": {:hex, :certifi, "2.12.0", "2d1cca2ec95f59643862af91f001478c9863c2ac9cb6e2f89780bfd8de987329", [:rebar3], [], "hexpm", "ee68d85df22e554040cdb4be100f33873ac6051387baf6a8f6ce82272340ff1c"},
|
||||
"combine": {:hex, :combine, "0.10.0", "eff8224eeb56498a2af13011d142c5e7997a80c8f5b97c499f84c841032e429f", [:mix], [], "hexpm", "1b1dbc1790073076580d0d1d64e42eae2366583e7aecd455d1215b0d16f2451b"},
|
||||
"comeonin": {:hex, :comeonin, "5.4.0", "246a56ca3f41d404380fc6465650ddaa532c7f98be4bda1b4656b3a37cc13abe", [:mix], [], "hexpm", "796393a9e50d01999d56b7b8420ab0481a7538d0caf80919da493b4a6e51faf1"},
|
||||
|
@ -18,7 +18,7 @@
|
|||
"cowboy": {:hex, :cowboy, "2.12.0", "f276d521a1ff88b2b9b4c54d0e753da6c66dd7be6c9fca3d9418b561828a3731", [:make, :rebar3], [{:cowlib, "2.13.0", [hex: :cowlib, repo: "hexpm", optional: false]}, {:ranch, "1.8.0", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm", "8a7abe6d183372ceb21caa2709bec928ab2b72e18a3911aa1771639bef82651e"},
|
||||
"cowboy_telemetry": {:hex, :cowboy_telemetry, "0.4.0", "f239f68b588efa7707abce16a84d0d2acf3a0f50571f8bb7f56a15865aae820c", [:rebar3], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:telemetry, "~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "7d98bac1ee4565d31b62d59f8823dfd8356a169e7fcbb83831b8a5397404c9de"},
|
||||
"cowlib": {:hex, :cowlib, "2.13.0", "db8f7505d8332d98ef50a3ef34b34c1afddec7506e4ee4dd4a3a266285d282ca", [:make, :rebar3], [], "hexpm", "e1e1284dc3fc030a64b1ad0d8382ae7e99da46c3246b815318a4b848873800a4"},
|
||||
"credo": {:hex, :credo, "1.7.7", "771445037228f763f9b2afd612b6aa2fd8e28432a95dbbc60d8e03ce71ba4446", [:mix], [{:bunt, "~> 0.2.1 or ~> 1.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "8bc87496c9aaacdc3f90f01b7b0582467b69b4bd2441fe8aae3109d843cc2f2e"},
|
||||
"credo": {:hex, :credo, "1.7.5", "643213503b1c766ec0496d828c90c424471ea54da77c8a168c725686377b9545", [:mix], [{:bunt, "~> 0.2.1 or ~> 1.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "f799e9b5cd1891577d8c773d245668aa74a2fcd15eb277f51a0131690ebfb3fd"},
|
||||
"custom_base": {:hex, :custom_base, "0.2.1", "4a832a42ea0552299d81652aa0b1f775d462175293e99dfbe4d7dbaab785a706", [:mix], [], "hexpm", "8df019facc5ec9603e94f7270f1ac73ddf339f56ade76a721eaa57c1493ba463"},
|
||||
"db_connection": {:hex, :db_connection, "2.6.0", "77d835c472b5b67fc4f29556dee74bf511bbafecdcaf98c27d27fa5918152086", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "c2f992d15725e721ec7fbc1189d4ecdb8afef76648c746a8e1cad35e3b8a35f3"},
|
||||
"decimal": {:hex, :decimal, "2.1.1", "5611dca5d4b2c3dd497dec8f68751f1f1a54755e8ed2a966c2633cf885973ad6", [:mix], [], "hexpm", "53cfe5f497ed0e7771ae1a475575603d77425099ba5faef9394932b35020ffcc"},
|
||||
|
@ -29,18 +29,18 @@
|
|||
"eblurhash": {:hex, :eblurhash, "1.2.2", "7da4255aaea984b31bb71155f673257353b0e0554d0d30dcf859547e74602582", [:rebar3], [], "hexpm", "8c20ca00904de023a835a9dcb7b7762fed32264c85a80c3cafa85288e405044c"},
|
||||
"ecto": {:hex, :ecto, "3.10.3", "eb2ae2eecd210b4eb8bece1217b297ad4ff824b4384c0e3fdd28aaf96edd6135", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "44bec74e2364d491d70f7e42cd0d690922659d329f6465e89feb8a34e8cd3433"},
|
||||
"ecto_enum": {:hex, :ecto_enum, "1.4.0", "d14b00e04b974afc69c251632d1e49594d899067ee2b376277efd8233027aec8", [:mix], [{:ecto, ">= 3.0.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:ecto_sql, "> 3.0.0", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:mariaex, ">= 0.0.0", [hex: :mariaex, repo: "hexpm", optional: true]}, {:postgrex, ">= 0.0.0", [hex: :postgrex, repo: "hexpm", optional: true]}], "hexpm", "8fb55c087181c2b15eee406519dc22578fa60dd82c088be376d0010172764ee4"},
|
||||
"ecto_psql_extras": {:hex, :ecto_psql_extras, "0.8.0", "440719cd74f09b3f01c84455707a2c3972b725c513808e68eb6c5b0ab82bf523", [:mix], [{:ecto_sql, "~> 3.7", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.16.0 or ~> 0.17.0 or ~> 0.18.0", [hex: :postgrex, repo: "hexpm", optional: false]}, {:table_rex, "~> 3.1.1 or ~> 4.0.0", [hex: :table_rex, repo: "hexpm", optional: false]}], "hexpm", "f1512812dc196bcb932a96c82e55f69b543dc125e9d39f5e3631a9c4ec65ef12"},
|
||||
"ecto_psql_extras": {:hex, :ecto_psql_extras, "0.7.15", "0fc29dbae0e444a29bd6abeee4cf3c4c037e692a272478a234a1cc765077dbb1", [:mix], [{:ecto_sql, "~> 3.7", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.16.0 or ~> 0.17.0", [hex: :postgrex, repo: "hexpm", optional: false]}, {:table_rex, "~> 3.1.1 or ~> 4.0.0", [hex: :table_rex, repo: "hexpm", optional: false]}], "hexpm", "b6127f3a5c6fc3d84895e4768cc7c199f22b48b67d6c99b13fbf4a374e73f039"},
|
||||
"ecto_sql": {:hex, :ecto_sql, "3.10.2", "6b98b46534b5c2f8b8b5f03f126e75e2a73c64f3c071149d32987a5378b0fdbd", [:mix], [{:db_connection, "~> 2.4.1 or ~> 2.5", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.10.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.6.0", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.16.0 or ~> 0.17.0 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.1 or ~> 2.2", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "68c018debca57cb9235e3889affdaec7a10616a4e3a80c99fa1d01fdafaa9007"},
|
||||
"elasticsearch": {:git, "https://akkoma.dev/AkkomaGang/elasticsearch-elixir.git", "6cd946f75f6ab9042521a009d1d32d29a90113ca", [ref: "main"]},
|
||||
"elixir_make": {:hex, :elixir_make, "0.8.4", "4960a03ce79081dee8fe119d80ad372c4e7badb84c493cc75983f9d3bc8bde0f", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:certifi, "~> 2.0", [hex: :certifi, repo: "hexpm", optional: true]}], "hexpm", "6e7f1d619b5f61dfabd0a20aa268e575572b542ac31723293a4c1a567d5ef040"},
|
||||
"elixir_make": {:hex, :elixir_make, "0.8.3", "d38d7ee1578d722d89b4d452a3e36bcfdc644c618f0d063b874661876e708683", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:certifi, "~> 2.0", [hex: :certifi, repo: "hexpm", optional: true]}], "hexpm", "5c99a18571a756d4af7a4d89ca75c28ac899e6103af6f223982f09ce44942cc9"},
|
||||
"elixir_xml_to_map": {:hex, :elixir_xml_to_map, "3.1.0", "4d6260486a8cce59e4bf3575fe2dd2a24766546ceeef9f93fcec6f7c62a2827a", [:mix], [{:erlsom, "~> 1.4", [hex: :erlsom, repo: "hexpm", optional: false]}], "hexpm", "8fe5f2e75f90bab07ee2161120c2dc038ebcae8135554f5582990f1c8c21f911"},
|
||||
"erlex": {:hex, :erlex, "0.2.6", "c7987d15e899c7a2f34f5420d2a2ea0d659682c06ac607572df55a43753aa12e", [:mix], [], "hexpm", "2ed2e25711feb44d52b17d2780eabf998452f6efda104877a3881c2f8c0c0c75"},
|
||||
"erlsom": {:hex, :erlsom, "1.5.1", "c8fe2babd33ff0846403f6522328b8ab676f896b793634cfe7ef181c05316c03", [:rebar3], [], "hexpm", "7965485494c5844dd127656ac40f141aadfa174839ec1be1074e7edf5b4239eb"},
|
||||
"eternal": {:hex, :eternal, "1.2.2", "d1641c86368de99375b98d183042dd6c2b234262b8d08dfd72b9eeaafc2a1abd", [:mix], [], "hexpm", "2c9fe32b9c3726703ba5e1d43a1d255a4f3f2d8f8f9bc19f094c7cb1a7a9e782"},
|
||||
"ex_aws": {:hex, :ex_aws, "2.5.4", "86c5bb870a49e0ab6f5aa5dd58cf505f09d2624ebe17530db3c1b61c88a673af", [:mix], [{:configparser_ex, "~> 4.0", [hex: :configparser_ex, repo: "hexpm", optional: true]}, {:hackney, "~> 1.16", [hex: :hackney, repo: "hexpm", optional: true]}, {:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: true]}, {:jsx, "~> 2.8 or ~> 3.0", [hex: :jsx, repo: "hexpm", optional: true]}, {:mime, "~> 1.2 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:sweet_xml, "~> 0.7", [hex: :sweet_xml, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "e82bd0091bb9a5bb190139599f922ff3fc7aebcca4374d65c99c4e23aa6d1625"},
|
||||
"ex_aws": {:hex, :ex_aws, "2.5.3", "9c2d05ba0c057395b12c7b5ca6267d14cdaec1d8e65bdf6481fe1fd245accfb4", [:mix], [{:configparser_ex, "~> 4.0", [hex: :configparser_ex, repo: "hexpm", optional: true]}, {:hackney, "~> 1.16", [hex: :hackney, repo: "hexpm", optional: true]}, {:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: true]}, {:jsx, "~> 2.8 or ~> 3.0", [hex: :jsx, repo: "hexpm", optional: true]}, {:mime, "~> 1.2 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:sweet_xml, "~> 0.7", [hex: :sweet_xml, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "67115f1d399d7ec4d191812ee565c6106cb4b1bbf19a9d4db06f265fd87da97e"},
|
||||
"ex_aws_s3": {:hex, :ex_aws_s3, "2.5.3", "422468e5c3e1a4da5298e66c3468b465cfd354b842e512cb1f6fbbe4e2f5bdaf", [:mix], [{:ex_aws, "~> 2.0", [hex: :ex_aws, repo: "hexpm", optional: false]}, {:sweet_xml, ">= 0.0.0", [hex: :sweet_xml, repo: "hexpm", optional: true]}], "hexpm", "4f09dd372cc386550e484808c5ac5027766c8d0cd8271ccc578b82ee6ef4f3b8"},
|
||||
"ex_const": {:hex, :ex_const, "0.3.0", "9d79516679991baf540ef445438eef1455ca91cf1a3c2680d8fb9e5bea2fe4de", [:mix], [], "hexpm", "76546322abb9e40ee4a2f454cf1c8a5b25c3672fa79bed1ea52c31e0d2428ca9"},
|
||||
"ex_doc": {:hex, :ex_doc, "0.34.0", "ab95e0775db3df71d30cf8d78728dd9261c355c81382bcd4cefdc74610bef13e", [:mix], [{:earmark_parser, "~> 1.4.39", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_c, ">= 0.1.0", [hex: :makeup_c, repo: "hexpm", optional: true]}, {:makeup_elixir, "~> 0.14 or ~> 1.0", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1 or ~> 1.0", [hex: :makeup_erlang, repo: "hexpm", optional: false]}, {:makeup_html, ">= 0.1.0", [hex: :makeup_html, repo: "hexpm", optional: true]}], "hexpm", "60734fb4c1353f270c3286df4a0d51e65a2c1d9fba66af3940847cc65a8066d7"},
|
||||
"ex_const": {:hex, :ex_const, "0.2.4", "d06e540c9d834865b012a17407761455efa71d0ce91e5831e86881b9c9d82448", [:mix], [], "hexpm", "96fd346610cc992b8f896ed26a98be82ac4efb065a0578f334a32d60a3ba9767"},
|
||||
"ex_doc": {:hex, :ex_doc, "0.32.0", "896afb57b1e00030f6ec8b2e19d3ca99a197afb23858d49d94aea673dc222f12", [:mix], [{:earmark_parser, "~> 1.4.39", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_c, ">= 0.1.1", [hex: :makeup_c, repo: "hexpm", optional: true]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1", [hex: :makeup_erlang, repo: "hexpm", optional: false]}], "hexpm", "ed2c3e42c558f49bda3ff37e05713432006e1719a6c4a3320c7e4735787374e7"},
|
||||
"ex_machina": {:hex, :ex_machina, "2.7.0", "b792cc3127fd0680fecdb6299235b4727a4944a09ff0fa904cc639272cd92dc7", [:mix], [{:ecto, "~> 2.2 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: true]}, {:ecto_sql, "~> 3.0", [hex: :ecto_sql, repo: "hexpm", optional: true]}], "hexpm", "419aa7a39bde11894c87a615c4ecaa52d8f107bbdd81d810465186f783245bf8"},
|
||||
"ex_syslogger": {:hex, :ex_syslogger, "2.0.0", "de6de5c5472a9c4fdafb28fa6610e381ae79ebc17da6490b81d785d68bd124c9", [:mix], [{:jason, "~> 1.2", [hex: :jason, repo: "hexpm", optional: true]}, {:syslog, "~> 1.1.0", [hex: :syslog, repo: "hexpm", optional: false]}], "hexpm", "a52b2fe71764e9e6ecd149ab66635812f68e39279cbeee27c52c0e35e8b8019e"},
|
||||
"excoveralls": {:hex, :excoveralls, "0.16.1", "0bd42ed05c7d2f4d180331a20113ec537be509da31fed5c8f7047ce59ee5a7c5", [:mix], [{:hackney, "~> 1.16", [hex: :hackney, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "dae763468e2008cf7075a64cb1249c97cb4bc71e236c5c2b5e5cdf1cfa2bf138"},
|
||||
|
@ -49,55 +49,55 @@
|
|||
"fast_sanitize": {:hex, :fast_sanitize, "0.2.3", "67b93dfb34e302bef49fec3aaab74951e0f0602fd9fa99085987af05bd91c7a5", [:mix], [{:fast_html, "~> 2.0", [hex: :fast_html, repo: "hexpm", optional: false]}, {:plug, "~> 1.8", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "e8ad286d10d0386e15d67d0ee125245ebcfbc7d7290b08712ba9013c8c5e56e2"},
|
||||
"file_ex": {:git, "https://akkoma.dev/AkkomaGang/file_ex.git", "cc7067c7d446c2526e9ecf91d40896b088851569", [ref: "cc7067c7d446c2526e9ecf91d40896b088851569"]},
|
||||
"file_system": {:hex, :file_system, "1.0.0", "b689cc7dcee665f774de94b5a832e578bd7963c8e637ef940cd44327db7de2cd", [:mix], [], "hexpm", "6752092d66aec5a10e662aefeed8ddb9531d79db0bc145bb8c40325ca1d8536d"},
|
||||
"finch": {:hex, :finch, "0.18.0", "944ac7d34d0bd2ac8998f79f7a811b21d87d911e77a786bc5810adb75632ada4", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: false]}, {:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.3", [hex: :mint, repo: "hexpm", optional: false]}, {:nimble_options, "~> 0.4 or ~> 1.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:nimble_pool, "~> 0.2.6 or ~> 1.0", [hex: :nimble_pool, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "69f5045b042e531e53edc2574f15e25e735b522c37e2ddb766e15b979e03aa65"},
|
||||
"finch": {:hex, :finch, "0.16.0", "40733f02c89f94a112518071c0a91fe86069560f5dbdb39f9150042f44dcfb1a", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: false]}, {:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.3", [hex: :mint, repo: "hexpm", optional: false]}, {:nimble_options, "~> 0.4 or ~> 1.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:nimble_pool, "~> 0.2.6 or ~> 1.0", [hex: :nimble_pool, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "f660174c4d519e5fec629016054d60edd822cdfe2b7270836739ac2f97735ec5"},
|
||||
"flake_id": {:hex, :flake_id, "0.1.0", "7716b086d2e405d09b647121a166498a0d93d1a623bead243e1f74216079ccb3", [:mix], [{:base62, "~> 1.2", [hex: :base62, repo: "hexpm", optional: false]}, {:ecto, ">= 2.0.0", [hex: :ecto, repo: "hexpm", optional: true]}], "hexpm", "31fc8090fde1acd267c07c36ea7365b8604055f897d3a53dd967658c691bd827"},
|
||||
"floki": {:hex, :floki, "0.36.2", "a7da0193538c93f937714a6704369711998a51a6164a222d710ebd54020aa7a3", [:mix], [], "hexpm", "a8766c0bc92f074e5cb36c4f9961982eda84c5d2b8e979ca67f5c268ec8ed580"},
|
||||
"floki": {:hex, :floki, "0.36.1", "712b7f2ba19a4d5a47dfe3e74d81876c95bbcbee44fe551f0af3d2a388abb3da", [:mix], [], "hexpm", "21ba57abb8204bcc70c439b423fc0dd9f0286de67dc82773a14b0200ada0995f"},
|
||||
"gen_smtp": {:hex, :gen_smtp, "1.2.0", "9cfc75c72a8821588b9b9fe947ae5ab2aed95a052b81237e0928633a13276fd3", [:rebar3], [{:ranch, ">= 1.8.0", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm", "5ee0375680bca8f20c4d85f58c2894441443a743355430ff33a783fe03296779"},
|
||||
"gettext": {:hex, :gettext, "0.22.3", "c8273e78db4a0bb6fba7e9f0fd881112f349a3117f7f7c598fa18c66c888e524", [:mix], [{:expo, "~> 0.4.0", [hex: :expo, repo: "hexpm", optional: false]}], "hexpm", "935f23447713954a6866f1bb28c3a878c4c011e802bcd68a726f5e558e4b64bd"},
|
||||
"hackney": {:hex, :hackney, "1.20.1", "8d97aec62ddddd757d128bfd1df6c5861093419f8f7a4223823537bad5d064e2", [:rebar3], [{:certifi, "~> 2.12.0", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "~> 6.1.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "~> 1.0.0", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~> 1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:parse_trans, "3.4.1", [hex: :parse_trans, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "~> 1.1.0", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}, {:unicode_util_compat, "~> 0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "fe9094e5f1a2a2c0a7d10918fee36bfec0ec2a979994cff8cfe8058cd9af38e3"},
|
||||
"hpax": {:hex, :hpax, "0.1.2", "09a75600d9d8bbd064cdd741f21fc06fc1f4cf3d0fcc335e5aa19be1a7235c84", [:mix], [], "hexpm", "2c87843d5a23f5f16748ebe77969880e29809580efdaccd615cd3bed628a8c13"},
|
||||
"html_entities": {:hex, :html_entities, "0.5.2", "9e47e70598da7de2a9ff6af8758399251db6dbb7eebe2b013f2bbd2515895c3c", [:mix], [], "hexpm", "c53ba390403485615623b9531e97696f076ed415e8d8058b1dbaa28181f4fdcc"},
|
||||
"http_signatures": {:git, "https://akkoma.dev/AkkomaGang/http_signatures.git", "d44c43d66758c6a73eaa4da9cffdbee0c5da44ae", [ref: "d44c43d66758c6a73eaa4da9cffdbee0c5da44ae"]},
|
||||
"http_signatures": {:git, "https://akkoma.dev/AkkomaGang/http_signatures.git", "6640ce7d24c783ac2ef56e27d00d12e8dc85f396", [ref: "6640ce7d24c783ac2ef56e27d00d12e8dc85f396"]},
|
||||
"httpoison": {:hex, :httpoison, "1.8.2", "9eb9c63ae289296a544842ef816a85d881d4a31f518a0fec089aaa744beae290", [:mix], [{:hackney, "~> 1.17", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "2bb350d26972e30c96e2ca74a1aaf8293d61d0742ff17f01e0279fef11599921"},
|
||||
"idna": {:hex, :idna, "6.1.1", "8a63070e9f7d0c62eb9d9fcb360a7de382448200fbbd1b106cc96d3d8099df8d", [:rebar3], [{:unicode_util_compat, "~> 0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "92376eb7894412ed19ac475e4a86f7b413c1b9fbb5bd16dccd57934157944cea"},
|
||||
"inet_cidr": {:hex, :inet_cidr, "1.0.8", "d26bb7bdbdf21ae401ead2092bf2bb4bf57fe44a62f5eaa5025280720ace8a40", [:mix], [], "hexpm", "d5b26da66603bb56c933c65214c72152f0de9a6ea53618b56d63302a68f6a90e"},
|
||||
"jason": {:hex, :jason, "1.4.1", "af1504e35f629ddcdd6addb3513c3853991f694921b1b9368b0bd32beb9f1b63", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "fbb01ecdfd565b56261302f7e1fcc27c4fb8f32d56eab74db621fc154604a7a1"},
|
||||
"joken": {:hex, :joken, "2.6.1", "2ca3d8d7f83bf7196296a3d9b2ecda421a404634bfc618159981a960020480a1", [:mix], [{:jose, "~> 1.11.9", [hex: :jose, repo: "hexpm", optional: false]}], "hexpm", "ab26122c400b3d254ce7d86ed066d6afad27e70416df947cdcb01e13a7382e68"},
|
||||
"jose": {:hex, :jose, "1.11.10", "a903f5227417bd2a08c8a00a0cbcc458118be84480955e8d251297a425723f83", [:mix, :rebar3], [], "hexpm", "0d6cd36ff8ba174db29148fc112b5842186b68a90ce9fc2b3ec3afe76593e614"},
|
||||
"jose": {:hex, :jose, "1.11.9", "c861eb99d9e9f62acd071dc5a49ffbeab9014e44490cd85ea3e49e3d36184777", [:mix, :rebar3], [], "hexpm", "b5ccc3749d2e1638c26bed806259df5bc9e438797fe60dc71e9fa0716133899b"},
|
||||
"jumper": {:hex, :jumper, "1.0.2", "68cdcd84472a00ac596b4e6459a41b3062d4427cbd4f1e8c8793c5b54f1406a7", [:mix], [], "hexpm", "9b7782409021e01ab3c08270e26f36eb62976a38c1aa64b2eaf6348422f165e1"},
|
||||
"linkify": {:hex, :linkify, "0.5.3", "5f8143d8f61f5ff08d3aeeff47ef6509492b4948d8f08007fbf66e4d2246a7f2", [:mix], [], "hexpm", "3ef35a1377d47c25506e07c1c005ea9d38d700699d92ee92825f024434258177"},
|
||||
"linkify": {:git, "https://akkoma.dev/AkkomaGang/linkify.git", "2567e2c1073fa371fd26fd66dfa5bc77b6919c16", []},
|
||||
"mail": {:hex, :mail, "0.3.1", "cb0a14e4ed8904e4e5a08214e686ccf6f9099346885db17d8c309381f865cc5c", [:mix], [], "hexpm", "1db701e89865c1d5fa296b2b57b1cd587587cca8d8a1a22892b35ef5a8e352a6"},
|
||||
"majic": {:git, "https://akkoma.dev/AkkomaGang/majic.git", "80540b36939ec83f48e76c61e5000e0fd67706f0", [ref: "80540b36939ec83f48e76c61e5000e0fd67706f0"]},
|
||||
"makeup": {:hex, :makeup, "1.1.2", "9ba8837913bdf757787e71c1581c21f9d2455f4dd04cfca785c70bbfff1a76a3", [:mix], [{:nimble_parsec, "~> 1.2.2 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "cce1566b81fbcbd21eca8ffe808f33b221f9eee2cbc7a1706fc3da9ff18e6cac"},
|
||||
"makeup": {:hex, :makeup, "1.1.1", "fa0bc768698053b2b3869fa8a62616501ff9d11a562f3ce39580d60860c3a55e", [:mix], [{:nimble_parsec, "~> 1.2.2 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "5dc62fbdd0de44de194898b6710692490be74baa02d9d108bc29f007783b0b48"},
|
||||
"makeup_elixir": {:hex, :makeup_elixir, "0.16.2", "627e84b8e8bf22e60a2579dad15067c755531fea049ae26ef1020cad58fe9578", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.2.3 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "41193978704763f6bbe6cc2758b84909e62984c7752b3784bd3c218bb341706b"},
|
||||
"makeup_erlang": {:hex, :makeup_erlang, "1.0.0", "6f0eff9c9c489f26b69b61440bf1b238d95badae49adac77973cbacae87e3c2e", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "ea7a9307de9d1548d2a72d299058d1fd2339e3d398560a0e46c27dab4891e4d2"},
|
||||
"makeup_erlang": {:hex, :makeup_erlang, "0.1.5", "e0ff5a7c708dda34311f7522a8758e23bfcd7d8d8068dc312b5eb41c6fd76eba", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "94d2e986428585a21516d7d7149781480013c56e30c6a233534bedf38867a59a"},
|
||||
"meck": {:hex, :meck, "0.9.2", "85ccbab053f1db86c7ca240e9fc718170ee5bda03810a6292b5306bf31bae5f5", [:rebar3], [], "hexpm", "81344f561357dc40a8344afa53767c32669153355b626ea9fcbc8da6b3045826"},
|
||||
"metrics": {:hex, :metrics, "1.0.1", "25f094dea2cda98213cecc3aeff09e940299d950904393b2a29d191c346a8486", [:rebar3], [], "hexpm", "69b09adddc4f74a40716ae54d140f93beb0fb8978d8636eaded0c31b6f099f16"},
|
||||
"mfm_parser": {:git, "https://akkoma.dev/AkkomaGang/mfm-parser.git", "b21ab7754024af096f2d14247574f55f0063295b", [ref: "b21ab7754024af096f2d14247574f55f0063295b"]},
|
||||
"mime": {:hex, :mime, "2.0.5", "dc34c8efd439abe6ae0343edbb8556f4d63f178594894720607772a041b04b02", [:mix], [], "hexpm", "da0d64a365c45bc9935cc5c8a7fc5e49a0e0f9932a761c55d6c52b142780a05c"},
|
||||
"mimerl": {:hex, :mimerl, "1.3.0", "d0cd9fc04b9061f82490f6581e0128379830e78535e017f7780f37fea7545726", [:rebar3], [], "hexpm", "a1e15a50d1887217de95f0b9b0793e32853f7c258a5cd227650889b38839fe9d"},
|
||||
"mimerl": {:hex, :mimerl, "1.2.0", "67e2d3f571088d5cfd3e550c383094b47159f3eee8ffa08e64106cdf5e981be3", [:rebar3], [], "hexpm", "f278585650aa581986264638ebf698f8bb19df297f66ad91b18910dfc6e19323"},
|
||||
"mint": {:hex, :mint, "1.5.2", "4805e059f96028948870d23d7783613b7e6b0e2fb4e98d720383852a760067fd", [:mix], [{:castore, "~> 0.1.0 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:hpax, "~> 0.1.1", [hex: :hpax, repo: "hexpm", optional: false]}], "hexpm", "d77d9e9ce4eb35941907f1d3df38d8f750c357865353e21d335bdcdf6d892a02"},
|
||||
"mock": {:hex, :mock, "0.3.8", "7046a306b71db2488ef54395eeb74df0a7f335a7caca4a3d3875d1fc81c884dd", [:mix], [{:meck, "~> 0.9.2", [hex: :meck, repo: "hexpm", optional: false]}], "hexpm", "7fa82364c97617d79bb7d15571193fc0c4fe5afd0c932cef09426b3ee6fe2022"},
|
||||
"mogrify": {:hex, :mogrify, "0.9.3", "238c782f00271dace01369ad35ae2e9dd020feee3443b9299ea5ea6bed559841", [:mix], [], "hexpm", "0189b1e1de27455f2b9ae8cf88239cefd23d38de9276eb5add7159aea51731e6"},
|
||||
"mox": {:hex, :mox, "1.1.0", "0f5e399649ce9ab7602f72e718305c0f9cdc351190f72844599545e4996af73c", [:mix], [], "hexpm", "d44474c50be02d5b72131070281a5d3895c0e7a95c780e90bc0cfe712f633a13"},
|
||||
"nimble_options": {:hex, :nimble_options, "1.1.1", "e3a492d54d85fc3fd7c5baf411d9d2852922f66e69476317787a7b2bb000a61b", [:mix], [], "hexpm", "821b2470ca9442c4b6984882fe9bb0389371b8ddec4d45a9504f00a66f650b44"},
|
||||
"nimble_options": {:hex, :nimble_options, "1.1.0", "3b31a57ede9cb1502071fade751ab0c7b8dbe75a9a4c2b5bbb0943a690b63172", [:mix], [], "hexpm", "8bbbb3941af3ca9acc7835f5655ea062111c9c27bcac53e004460dfd19008a99"},
|
||||
"nimble_parsec": {:hex, :nimble_parsec, "1.4.0", "51f9b613ea62cfa97b25ccc2c1b4216e81df970acd8e16e8d1bdc58fef21370d", [:mix], [], "hexpm", "9c565862810fb383e9838c1dd2d7d2c437b3d13b267414ba6af33e50d2d1cf28"},
|
||||
"nimble_pool": {:hex, :nimble_pool, "1.1.0", "bf9c29fbdcba3564a8b800d1eeb5a3c58f36e1e11d7b7fb2e084a643f645f06b", [:mix], [], "hexpm", "af2e4e6b34197db81f7aad230c1118eac993acc0dae6bc83bac0126d4ae0813a"},
|
||||
"oban": {:hex, :oban, "2.17.10", "c3e5bd739b5c3fdc38eba1d43ab270a8c6ca4463bb779b7705c69400b0d87678", [:mix], [{:ecto_sql, "~> 3.10", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:ecto_sqlite3, "~> 0.9", [hex: :ecto_sqlite3, repo: "hexpm", optional: true]}, {:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.16", [hex: :postgrex, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "4afd027b8e2bc3c399b54318b4f46ee8c40251fb55a285cb4e38b5363f0ee7c4"},
|
||||
"open_api_spex": {:hex, :open_api_spex, "3.19.1", "65ccb5d06e3d664d1eec7c5ea2af2289bd2f37897094a74d7219fb03fc2b5994", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:plug, "~> 1.7", [hex: :plug, repo: "hexpm", optional: false]}, {:poison, "~> 3.0 or ~> 4.0 or ~> 5.0", [hex: :poison, repo: "hexpm", optional: true]}, {:ymlr, "~> 2.0 or ~> 3.0 or ~> 4.0 or ~> 5.0", [hex: :ymlr, repo: "hexpm", optional: true]}], "hexpm", "392895827ce2984a3459c91a484e70708132d8c2c6c5363972b4b91d6bbac3dd"},
|
||||
"oban": {:hex, :oban, "2.17.8", "7fd7c8e82c7819afc1b5b5ed8d6d92bf0ecdd7ba170328fb043301eb06d32521", [:mix], [{:ecto_sql, "~> 3.10", [hex: :ecto_sql, repo: "hexpm", optional: false]}, {:ecto_sqlite3, "~> 0.9", [hex: :ecto_sqlite3, repo: "hexpm", optional: true]}, {:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.16", [hex: :postgrex, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "a2165bf93843b7bcb68182c82725ddd4cb43c0c3719f114e7aa3b6c99c4b6129"},
|
||||
"open_api_spex": {:hex, :open_api_spex, "3.18.3", "fefb84fe323cacfc92afdd0ecb9e89bc0261ae00b7e3167ffc2028ce3944de42", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:plug, "~> 1.7", [hex: :plug, repo: "hexpm", optional: false]}, {:poison, "~> 3.0 or ~> 4.0 or ~> 5.0", [hex: :poison, repo: "hexpm", optional: true]}, {:ymlr, "~> 2.0 or ~> 3.0 or ~> 4.0 or ~> 5.0", [hex: :ymlr, repo: "hexpm", optional: true]}], "hexpm", "c0cfc31570199ce7e7520b494a591027da609af45f6bf9adce51e2469b1609fb"},
|
||||
"parse_trans": {:hex, :parse_trans, "3.4.1", "6e6aa8167cb44cc8f39441d05193be6e6f4e7c2946cb2759f015f8c56b76e5ff", [:rebar3], [], "hexpm", "620a406ce75dada827b82e453c19cf06776be266f5a67cff34e1ef2cbb60e49a"},
|
||||
"phoenix": {:hex, :phoenix, "1.7.12", "1cc589e0eab99f593a8aa38ec45f15d25297dd6187ee801c8de8947090b5a9d3", [:mix], [{:castore, ">= 0.0.0", [hex: :castore, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 2.1", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}, {:phoenix_view, "~> 2.0", [hex: :phoenix_view, repo: "hexpm", optional: true]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 2.7", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:plug_crypto, "~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:websock_adapter, "~> 0.5.3", [hex: :websock_adapter, repo: "hexpm", optional: false]}], "hexpm", "d646192fbade9f485b01bc9920c139bfdd19d0f8df3d73fd8eaf2dfbe0d2837c"},
|
||||
"phoenix_ecto": {:hex, :phoenix_ecto, "4.6.1", "96798325fab2fed5a824ca204e877b81f9afd2e480f581e81f7b4b64a5a477f2", [:mix], [{:ecto, "~> 3.5", [hex: :ecto, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.1", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:plug, "~> 1.9", [hex: :plug, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.17", [hex: :postgrex, repo: "hexpm", optional: true]}], "hexpm", "0ae544ff99f3c482b0807c5cec2c8289e810ecacabc04959d82c3337f4703391"},
|
||||
"phoenix_html": {:hex, :phoenix_html, "3.3.4", "42a09fc443bbc1da37e372a5c8e6755d046f22b9b11343bf885067357da21cb3", [:mix], [{:plug, "~> 1.5", [hex: :plug, repo: "hexpm", optional: true]}], "hexpm", "0249d3abec3714aff3415e7ee3d9786cb325be3151e6c4b3021502c585bf53fb"},
|
||||
"phoenix_ecto": {:hex, :phoenix_ecto, "4.5.1", "6fdbc334ea53620e71655664df6f33f670747b3a7a6c4041cdda3e2c32df6257", [:mix], [{:ecto, "~> 3.5", [hex: :ecto, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.1", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:plug, "~> 1.9", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "ebe43aa580db129e54408e719fb9659b7f9e0d52b965c5be26cdca416ecead28"},
|
||||
"phoenix_html": {:hex, :phoenix_html, "3.3.3", "380b8fb45912b5638d2f1d925a3771b4516b9a78587249cabe394e0a5d579dc9", [:mix], [{:plug, "~> 1.5", [hex: :plug, repo: "hexpm", optional: true]}], "hexpm", "923ebe6fec6e2e3b3e569dfbdc6560de932cd54b000ada0208b5f45024bdd76c"},
|
||||
"phoenix_live_dashboard": {:hex, :phoenix_live_dashboard, "0.7.2", "97cc4ff2dba1ebe504db72cb45098cb8e91f11160528b980bd282cc45c73b29c", [:mix], [{:ecto, "~> 3.6.2 or ~> 3.7", [hex: :ecto, repo: "hexpm", optional: true]}, {:ecto_mysql_extras, "~> 0.5", [hex: :ecto_mysql_extras, repo: "hexpm", optional: true]}, {:ecto_psql_extras, "~> 0.7", [hex: :ecto_psql_extras, repo: "hexpm", optional: true]}, {:mime, "~> 1.6 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:phoenix_live_view, "~> 0.18.3", [hex: :phoenix_live_view, repo: "hexpm", optional: false]}, {:telemetry_metrics, "~> 0.6 or ~> 1.0", [hex: :telemetry_metrics, repo: "hexpm", optional: false]}], "hexpm", "0e5fdf063c7a3b620c566a30fcf68b7ee02e5e46fe48ee46a6ec3ba382dc05b7"},
|
||||
"phoenix_live_view": {:hex, :phoenix_live_view, "0.18.18", "1f38fbd7c363723f19aad1a04b5490ff3a178e37daaf6999594d5f34796c47fc", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix, "~> 1.6.15 or ~> 1.7.0", [hex: :phoenix, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 3.3", [hex: :phoenix_html, repo: "hexpm", optional: false]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}, {:phoenix_view, "~> 2.0", [hex: :phoenix_view, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.2 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "a5810d0472f3189ede6d2a95bda7f31c6113156b91784a3426cb0ab6a6d85214"},
|
||||
"phoenix_pubsub": {:hex, :phoenix_pubsub, "2.1.3", "3168d78ba41835aecad272d5e8cd51aa87a7ac9eb836eabc42f6e57538e3731d", [:mix], [], "hexpm", "bba06bc1dcfd8cb086759f0edc94a8ba2bc8896d5331a1e2c2902bf8e36ee502"},
|
||||
"phoenix_swoosh": {:hex, :phoenix_swoosh, "1.2.1", "b74ccaa8046fbc388a62134360ee7d9742d5a8ae74063f34eb050279de7a99e1", [:mix], [{:finch, "~> 0.8", [hex: :finch, repo: "hexpm", optional: true]}, {:hackney, "~> 1.10", [hex: :hackney, repo: "hexpm", optional: true]}, {:phoenix, "~> 1.6", [hex: :phoenix, repo: "hexpm", optional: true]}, {:phoenix_html, "~> 3.0 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:phoenix_view, "~> 1.0 or ~> 2.0", [hex: :phoenix_view, repo: "hexpm", optional: false]}, {:swoosh, "~> 1.5", [hex: :swoosh, repo: "hexpm", optional: false]}], "hexpm", "4000eeba3f9d7d1a6bf56d2bd56733d5cadf41a7f0d8ffe5bb67e7d667e204a2"},
|
||||
"phoenix_template": {:hex, :phoenix_template, "1.0.4", "e2092c132f3b5e5b2d49c96695342eb36d0ed514c5b252a77048d5969330d639", [:mix], [{:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}], "hexpm", "2c0c81f0e5c6753faf5cca2f229c9709919aba34fab866d3bc05060c9c444206"},
|
||||
"phoenix_view": {:hex, :phoenix_view, "2.0.4", "b45c9d9cf15b3a1af5fb555c674b525391b6a1fe975f040fb4d913397b31abf4", [:mix], [{:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}], "hexpm", "4e992022ce14f31fe57335db27a28154afcc94e9983266835bb3040243eb620b"},
|
||||
"plug": {:hex, :plug, "1.16.0", "1d07d50cb9bb05097fdf187b31cf087c7297aafc3fed8299aac79c128a707e47", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "cbf53aa1f5c4d758a7559c0bd6d59e286c2be0c6a1fac8cc3eee2f638243b93e"},
|
||||
"phoenix_view": {:hex, :phoenix_view, "2.0.3", "4d32c4817fce933693741deeb99ef1392619f942633dde834a5163124813aad3", [:mix], [{:phoenix_html, "~> 2.14.2 or ~> 3.0 or ~> 4.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:phoenix_template, "~> 1.0", [hex: :phoenix_template, repo: "hexpm", optional: false]}], "hexpm", "cd34049af41be2c627df99cd4eaa71fc52a328c0c3d8e7d4aa28f880c30e7f64"},
|
||||
"plug": {:hex, :plug, "1.15.3", "712976f504418f6dff0a3e554c40d705a9bcf89a7ccef92fc6a5ef8f16a30a97", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2 or ~> 2.0", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "cc4365a3c010a56af402e0809208873d113e9c38c401cabd88027ef4f5c01fd2"},
|
||||
"plug_cowboy": {:hex, :plug_cowboy, "2.7.1", "87677ffe3b765bc96a89be7960f81703223fe2e21efa42c125fcd0127dd9d6b2", [:mix], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:cowboy_telemetry, "~> 0.3", [hex: :cowboy_telemetry, repo: "hexpm", optional: false]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "02dbd5f9ab571b864ae39418db7811618506256f6d13b4a45037e5fe78dc5de3"},
|
||||
"plug_crypto": {:hex, :plug_crypto, "2.1.0", "f44309c2b06d249c27c8d3f65cfe08158ade08418cf540fd4f72d4d6863abb7b", [:mix], [], "hexpm", "131216a4b030b8f8ce0f26038bc4421ae60e4bb95c5cf5395e1421437824c4fa"},
|
||||
"plug_crypto": {:hex, :plug_crypto, "2.0.0", "77515cc10af06645abbfb5e6ad7a3e9714f805ae118fa1a70205f80d2d70fe73", [:mix], [], "hexpm", "53695bae57cc4e54566d993eb01074e4d894b65a3766f1c43e2c61a1b0f45ea9"},
|
||||
"plug_static_index_html": {:hex, :plug_static_index_html, "1.0.0", "840123d4d3975585133485ea86af73cb2600afd7f2a976f9f5fd8b3808e636a0", [:mix], [{:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "79fd4fcf34d110605c26560cbae8f23c603ec4158c08298bd4360fdea90bb5cf"},
|
||||
"poison": {:hex, :poison, "5.0.0", "d2b54589ab4157bbb82ec2050757779bfed724463a544b6e20d79855a9e43b24", [:mix], [{:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "11dc6117c501b80c62a7594f941d043982a1bd05a1184280c0d9166eb4d8d3fc"},
|
||||
"poolboy": {:hex, :poolboy, "1.5.2", "392b007a1693a64540cead79830443abf5762f5d30cf50bc95cb2c1aaafa006b", [:rebar3], [], "hexpm", "dad79704ce5440f3d5a3681c8590b9dc25d1a561e8f5a9c995281012860901e3"},
|
||||
|
@ -107,7 +107,7 @@
|
|||
"recon": {:hex, :recon, "2.5.5", "c108a4c406fa301a529151a3bb53158cadc4064ec0c5f99b03ddb8c0e4281bdf", [:mix, :rebar3], [], "hexpm", "632a6f447df7ccc1a4a10bdcfce71514412b16660fe59deca0fcf0aa3c054404"},
|
||||
"remote_ip": {:hex, :remote_ip, "1.1.0", "cb308841595d15df3f9073b7c39243a1dd6ca56e5020295cb012c76fbec50f2d", [:mix], [{:combine, "~> 0.10", [hex: :combine, repo: "hexpm", optional: false]}, {:plug, "~> 1.14", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "616ffdf66aaad6a72fc546dabf42eed87e2a99e97b09cbd92b10cc180d02ed74"},
|
||||
"search_parser": {:git, "https://github.com/FloatingGhost/pleroma-contrib-search-parser.git", "08971a81e68686f9ac465cfb6661d51c5e4e1e7f", [ref: "08971a81e68686f9ac465cfb6661d51c5e4e1e7f"]},
|
||||
"sleeplocks": {:hex, :sleeplocks, "1.1.3", "96a86460cc33b435c7310dbd27ec82ca2c1f24ae38e34f8edde97f756503441a", [:rebar3], [], "hexpm", "d3b3958552e6eb16f463921e70ae7c767519ef8f5be46d7696cc1ed649421321"},
|
||||
"sleeplocks": {:hex, :sleeplocks, "1.1.2", "d45aa1c5513da48c888715e3381211c859af34bee9b8290490e10c90bb6ff0ca", [:rebar3], [], "hexpm", "9fe5d048c5b781d6305c1a3a0f40bb3dfc06f49bf40571f3d2d0c57eaa7f59a5"},
|
||||
"ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.7", "354c321cf377240c7b8716899e182ce4890c5938111a1296add3ec74cf1715df", [:make, :mix, :rebar3], [], "hexpm", "fe4c190e8f37401d30167c8c405eda19469f34577987c76dde613e838bbc67f8"},
|
||||
"statistex": {:hex, :statistex, "1.0.0", "f3dc93f3c0c6c92e5f291704cf62b99b553253d7969e9a5fa713e5481cd858a5", [:mix], [], "hexpm", "ff9d8bee7035028ab4742ff52fc80a2aa35cece833cf5319009b52f1b5a86c27"},
|
||||
"sweet_xml": {:hex, :sweet_xml, "0.7.4", "a8b7e1ce7ecd775c7e8a65d501bc2cd933bff3a9c41ab763f5105688ef485d08", [:mix], [], "hexpm", "e7c4b0bdbf460c928234951def54fe87edf1a170f6896675443279e2dbeba167"},
|
||||
|
|
|
@ -5,8 +5,8 @@ msgstr ""
|
|||
"POT-Creation-Date: 2022-07-28 09:35+0000\n"
|
||||
"PO-Revision-Date: 2023-08-04 14:19+0000\n"
|
||||
"Last-Translator: Anonymous <noreply@weblate.org>\n"
|
||||
"Language-Team: Catalan <http://translate.akkoma.dev/projects/akkoma/akkoma-"
|
||||
"backend-config-descriptions/ca/>\n"
|
||||
"Language-Team: Catalan <http://translate.akkoma.dev/projects/akkoma/"
|
||||
"akkoma-backend-config-descriptions/ca/>\n"
|
||||
"Language: ca\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
|
@ -3296,6 +3296,18 @@ msgstr ""
|
|||
"If enabled, a name parameter will be added to the URL of the upload. For "
|
||||
"example `https://instance.tld/media/imagehash.png?name=realname.png`."
|
||||
|
||||
#: lib/pleroma/docs/translator.ex:5
|
||||
#, fuzzy
|
||||
msgctxt "config description at :pleroma-Pleroma.Upload > :proxy_remote"
|
||||
msgid ""
|
||||
"Proxy requests to the remote uploader.\n"
|
||||
"\n"
|
||||
"Useful if media upload endpoint is not internet accessible.\n"
|
||||
msgstr ""
|
||||
"Proxy requests to the remote uploader.\n"
|
||||
"\n"
|
||||
"Useful if media upload endpoint is not internet accessible.\n"
|
||||
|
||||
#: lib/pleroma/docs/translator.ex:5
|
||||
#, fuzzy
|
||||
msgctxt "config description at :pleroma-Pleroma.Upload > :uploader"
|
||||
|
@ -5786,6 +5798,12 @@ msgctxt "config label at :pleroma-Pleroma.Upload > :link_name"
|
|||
msgid "Link name"
|
||||
msgstr "Link name"
|
||||
|
||||
#: lib/pleroma/docs/translator.ex:5
|
||||
#, fuzzy
|
||||
msgctxt "config label at :pleroma-Pleroma.Upload > :proxy_remote"
|
||||
msgid "Proxy remote"
|
||||
msgstr "Proxy remote"
|
||||
|
||||
#: lib/pleroma/docs/translator.ex:5
|
||||
#, fuzzy
|
||||
msgctxt "config label at :pleroma-Pleroma.Upload > :uploader"
|
||||
|
|
|
@ -2602,6 +2602,12 @@ msgctxt "config description at :pleroma-Pleroma.Upload > :link_name"
|
|||
msgid "If enabled, a name parameter will be added to the URL of the upload. For example `https://instance.tld/media/imagehash.png?name=realname.png`."
|
||||
msgstr ""
|
||||
|
||||
#: lib/pleroma/docs/translator.ex:5
|
||||
#, elixir-autogen, elixir-format
|
||||
msgctxt "config description at :pleroma-Pleroma.Upload > :proxy_remote"
|
||||
msgid "Proxy requests to the remote uploader.\n\nUseful if media upload endpoint is not internet accessible.\n"
|
||||
msgstr ""
|
||||
|
||||
#: lib/pleroma/docs/translator.ex:5
|
||||
#, elixir-autogen, elixir-format
|
||||
msgctxt "config description at :pleroma-Pleroma.Upload > :uploader"
|
||||
|
@ -4882,6 +4888,12 @@ msgctxt "config label at :pleroma-Pleroma.Upload > :link_name"
|
|||
msgid "Link name"
|
||||
msgstr ""
|
||||
|
||||
#: lib/pleroma/docs/translator.ex:5
|
||||
#, elixir-autogen, elixir-format
|
||||
msgctxt "config label at :pleroma-Pleroma.Upload > :proxy_remote"
|
||||
msgid "Proxy remote"
|
||||
msgstr ""
|
||||
|
||||
#: lib/pleroma/docs/translator.ex:5
|
||||
#, elixir-autogen, elixir-format
|
||||
msgctxt "config label at :pleroma-Pleroma.Upload > :uploader"
|
||||
|
|
|
@ -2603,6 +2603,12 @@ msgctxt "config description at :pleroma-Pleroma.Upload > :link_name"
|
|||
msgid "If enabled, a name parameter will be added to the URL of the upload. For example `https://instance.tld/media/imagehash.png?name=realname.png`."
|
||||
msgstr ""
|
||||
|
||||
#: lib/pleroma/docs/translator.ex:5
|
||||
#, elixir-autogen, elixir-format
|
||||
msgctxt "config description at :pleroma-Pleroma.Upload > :proxy_remote"
|
||||
msgid "Proxy requests to the remote uploader.\n\nUseful if media upload endpoint is not internet accessible.\n"
|
||||
msgstr ""
|
||||
|
||||
#: lib/pleroma/docs/translator.ex:5
|
||||
#, elixir-autogen, elixir-format
|
||||
msgctxt "config description at :pleroma-Pleroma.Upload > :uploader"
|
||||
|
@ -4883,6 +4889,12 @@ msgctxt "config label at :pleroma-Pleroma.Upload > :link_name"
|
|||
msgid "Link name"
|
||||
msgstr ""
|
||||
|
||||
#: lib/pleroma/docs/translator.ex:5
|
||||
#, elixir-autogen, elixir-format
|
||||
msgctxt "config label at :pleroma-Pleroma.Upload > :proxy_remote"
|
||||
msgid "Proxy remote"
|
||||
msgstr ""
|
||||
|
||||
#: lib/pleroma/docs/translator.ex:5
|
||||
#, elixir-autogen, elixir-format
|
||||
msgctxt "config label at :pleroma-Pleroma.Upload > :uploader"
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -5,8 +5,8 @@ msgstr ""
|
|||
"POT-Creation-Date: 2022-08-06 22:23+0000\n"
|
||||
"PO-Revision-Date: 2023-08-04 14:19+0000\n"
|
||||
"Last-Translator: Anonymous <noreply@weblate.org>\n"
|
||||
"Language-Team: Spanish <http://translate.akkoma.dev/projects/akkoma/akkoma-"
|
||||
"backend-config-descriptions/es/>\n"
|
||||
"Language-Team: Spanish <http://translate.akkoma.dev/projects/akkoma/"
|
||||
"akkoma-backend-config-descriptions/es/>\n"
|
||||
"Language: es\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
|
@ -3314,6 +3314,18 @@ msgstr ""
|
|||
"If enabled, a name parameter will be added to the URL of the upload. For "
|
||||
"example `https://instance.tld/media/imagehash.png?name=realname.png`."
|
||||
|
||||
#: lib/pleroma/docs/translator.ex:5
|
||||
#, fuzzy
|
||||
msgctxt "config description at :pleroma-Pleroma.Upload > :proxy_remote"
|
||||
msgid ""
|
||||
"Proxy requests to the remote uploader.\n"
|
||||
"\n"
|
||||
"Useful if media upload endpoint is not internet accessible.\n"
|
||||
msgstr ""
|
||||
"Proxy requests to the remote uploader.\n"
|
||||
"\n"
|
||||
"Useful if media upload endpoint is not internet accessible.\n"
|
||||
|
||||
#: lib/pleroma/docs/translator.ex:5
|
||||
#, fuzzy
|
||||
msgctxt "config description at :pleroma-Pleroma.Upload > :uploader"
|
||||
|
@ -5804,6 +5816,12 @@ msgctxt "config label at :pleroma-Pleroma.Upload > :link_name"
|
|||
msgid "Link name"
|
||||
msgstr "Link name"
|
||||
|
||||
#: lib/pleroma/docs/translator.ex:5
|
||||
#, fuzzy
|
||||
msgctxt "config label at :pleroma-Pleroma.Upload > :proxy_remote"
|
||||
msgid "Proxy remote"
|
||||
msgstr "Proxy remote"
|
||||
|
||||
#: lib/pleroma/docs/translator.ex:5
|
||||
#, fuzzy
|
||||
msgctxt "config label at :pleroma-Pleroma.Upload > :uploader"
|
||||
|
|
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
@ -5,8 +5,8 @@ msgstr ""
|
|||
"POT-Creation-Date: 2022-08-06 21:54+0000\n"
|
||||
"PO-Revision-Date: 2023-08-04 14:26+0000\n"
|
||||
"Last-Translator: Anonymous <noreply@weblate.org>\n"
|
||||
"Language-Team: Dutch <http://translate.akkoma.dev/projects/akkoma/akkoma-"
|
||||
"backend-config-descriptions/nl/>\n"
|
||||
"Language-Team: Dutch <http://translate.akkoma.dev/projects/akkoma/"
|
||||
"akkoma-backend-config-descriptions/nl/>\n"
|
||||
"Language: nl\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
|
@ -3316,6 +3316,18 @@ msgstr ""
|
|||
"If enabled, a name parameter will be added to the URL of the upload. For "
|
||||
"example `https://instance.tld/media/imagehash.png?name=realname.png`."
|
||||
|
||||
#: lib/pleroma/docs/translator.ex:5
|
||||
#, fuzzy
|
||||
msgctxt "config description at :pleroma-Pleroma.Upload > :proxy_remote"
|
||||
msgid ""
|
||||
"Proxy requests to the remote uploader.\n"
|
||||
"\n"
|
||||
"Useful if media upload endpoint is not internet accessible.\n"
|
||||
msgstr ""
|
||||
"Proxy requests to the remote uploader.\n"
|
||||
"\n"
|
||||
"Useful if media upload endpoint is not internet accessible.\n"
|
||||
|
||||
#: lib/pleroma/docs/translator.ex:5
|
||||
#, fuzzy
|
||||
msgctxt "config description at :pleroma-Pleroma.Upload > :uploader"
|
||||
|
@ -5806,6 +5818,12 @@ msgctxt "config label at :pleroma-Pleroma.Upload > :link_name"
|
|||
msgid "Link name"
|
||||
msgstr "Link name"
|
||||
|
||||
#: lib/pleroma/docs/translator.ex:5
|
||||
#, fuzzy
|
||||
msgctxt "config label at :pleroma-Pleroma.Upload > :proxy_remote"
|
||||
msgid "Proxy remote"
|
||||
msgstr "Proxy remote"
|
||||
|
||||
#: lib/pleroma/docs/translator.ex:5
|
||||
#, fuzzy
|
||||
msgctxt "config label at :pleroma-Pleroma.Upload > :uploader"
|
||||
|
|
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
@ -5,8 +5,8 @@ msgstr ""
|
|||
"POT-Creation-Date: 2023-07-07 18:47+0000\n"
|
||||
"PO-Revision-Date: 2023-08-04 14:19+0000\n"
|
||||
"Last-Translator: Anonymous <noreply@weblate.org>\n"
|
||||
"Language-Team: Thai <http://translate.akkoma.dev/projects/akkoma/akkoma-"
|
||||
"backend-config-descriptions/th/>\n"
|
||||
"Language-Team: Thai <http://translate.akkoma.dev/projects/akkoma/"
|
||||
"akkoma-backend-config-descriptions/th/>\n"
|
||||
"Language: th\n"
|
||||
"MIME-Version: 1.0\n"
|
||||
"Content-Type: text/plain; charset=UTF-8\n"
|
||||
|
@ -3325,6 +3325,18 @@ msgstr ""
|
|||
"If enabled, a name parameter will be added to the URL of the upload. For "
|
||||
"example `https://instance.tld/media/imagehash.png?name=realname.png`."
|
||||
|
||||
#: lib/pleroma/docs/translator.ex:5
|
||||
#, fuzzy
|
||||
msgctxt "config description at :pleroma-Pleroma.Upload > :proxy_remote"
|
||||
msgid ""
|
||||
"Proxy requests to the remote uploader.\n"
|
||||
"\n"
|
||||
"Useful if media upload endpoint is not internet accessible.\n"
|
||||
msgstr ""
|
||||
"Proxy requests to the remote uploader.\n"
|
||||
"\n"
|
||||
"Useful if media upload endpoint is not internet accessible.\n"
|
||||
|
||||
#: lib/pleroma/docs/translator.ex:5
|
||||
#, fuzzy
|
||||
msgctxt "config description at :pleroma-Pleroma.Upload > :uploader"
|
||||
|
@ -5815,6 +5827,12 @@ msgctxt "config label at :pleroma-Pleroma.Upload > :link_name"
|
|||
msgid "Link name"
|
||||
msgstr "Link name"
|
||||
|
||||
#: lib/pleroma/docs/translator.ex:5
|
||||
#, fuzzy
|
||||
msgctxt "config label at :pleroma-Pleroma.Upload > :proxy_remote"
|
||||
msgid "Proxy remote"
|
||||
msgstr "Proxy remote"
|
||||
|
||||
#: lib/pleroma/docs/translator.ex:5
|
||||
#, fuzzy
|
||||
msgctxt "config label at :pleroma-Pleroma.Upload > :uploader"
|
||||
|
|
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
@ -1,10 +1,12 @@
|
|||
defmodule Pleroma.Repo.Migrations.AddContextIndex do
|
||||
use Ecto.Migration
|
||||
@disable_ddl_transaction true
|
||||
|
||||
def change do
|
||||
create(
|
||||
index(:activities, ["(data->>'type')", "(data->>'context')"],
|
||||
name: :activities_context_index
|
||||
name: :activities_context_index,
|
||||
concurrently: true
|
||||
)
|
||||
)
|
||||
end
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
defmodule Pleroma.Repo.Migrations.AddFTSIndexToActivities do
|
||||
use Ecto.Migration
|
||||
@disable_ddl_transaction true
|
||||
|
||||
def change do
|
||||
create(
|
||||
index(:activities, ["(to_tsvector('english', data->'object'->>'content'))"],
|
||||
concurrently: true,
|
||||
using: :gin,
|
||||
name: :activities_fts
|
||||
)
|
||||
|
|
|
@ -1,9 +1,12 @@
|
|||
defmodule Pleroma.Repo.Migrations.AddTagIndex do
|
||||
use Ecto.Migration
|
||||
|
||||
@disable_ddl_transaction true
|
||||
|
||||
def change do
|
||||
create(
|
||||
index(:activities, ["(data #> '{\"object\",\"tag\"}')"],
|
||||
concurrently: true,
|
||||
using: :gin,
|
||||
name: :activities_tags
|
||||
)
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
defmodule Pleroma.Repo.Migrations.AddSecondObjectIndexToActivty do
|
||||
use Ecto.Migration
|
||||
|
||||
@disable_ddl_transaction true
|
||||
|
||||
def change do
|
||||
drop_if_exists(
|
||||
index(:activities, ["(data->'object'->>'id')", "(data->>'type')"],
|
||||
|
@ -10,7 +12,8 @@ defmodule Pleroma.Repo.Migrations.AddSecondObjectIndexToActivty do
|
|||
|
||||
create(
|
||||
index(:activities, ["(coalesce(data->'object'->>'id', data->>'object'))"],
|
||||
name: :activities_create_objects_index
|
||||
name: :activities_create_objects_index,
|
||||
concurrently: true
|
||||
)
|
||||
)
|
||||
end
|
||||
|
|
|
@ -1,7 +1,14 @@
|
|||
defmodule Pleroma.Repo.Migrations.AddObjectActorIndex do
|
||||
use Ecto.Migration
|
||||
|
||||
@disable_ddl_transaction true
|
||||
|
||||
def change do
|
||||
create(index(:objects, ["(data->>'actor')", "(data->>'type')"], name: :objects_actor_type))
|
||||
create(
|
||||
index(:objects, ["(data->>'actor')", "(data->>'type')"],
|
||||
concurrently: true,
|
||||
name: :objects_actor_type
|
||||
)
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,12 +1,14 @@
|
|||
defmodule Pleroma.Repo.Migrations.AddActorToActivity do
|
||||
use Ecto.Migration
|
||||
|
||||
@disable_ddl_transaction true
|
||||
|
||||
def up do
|
||||
alter table(:activities) do
|
||||
add(:actor, :string)
|
||||
end
|
||||
|
||||
create(index(:activities, [:actor, "id DESC NULLS LAST"]))
|
||||
create(index(:activities, [:actor, "id DESC NULLS LAST"], concurrently: true))
|
||||
end
|
||||
|
||||
def down do
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
defmodule Pleroma.Repo.Migrations.AddSortIndexToActivities do
|
||||
use Ecto.Migration
|
||||
@disable_ddl_transaction true
|
||||
|
||||
def change do
|
||||
create(index(:activities, ["id desc nulls last"]))
|
||||
create(index(:activities, ["id desc nulls last"], concurrently: true))
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
defmodule Pleroma.Repo.Migrations.AddFollowerAddressIndexToUsers do
|
||||
use Ecto.Migration
|
||||
|
||||
@disable_ddl_transaction true
|
||||
def change do
|
||||
create(index(:users, [:follower_address]))
|
||||
create(index(:users, [:following], using: :gin))
|
||||
create(index(:users, [:follower_address], concurrently: true))
|
||||
create(index(:users, [:following], concurrently: true, using: :gin))
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
defmodule Pleroma.Repo.Migrations.ModifyActivityIndex do
|
||||
use Ecto.Migration
|
||||
@disable_ddl_transaction true
|
||||
|
||||
def change do
|
||||
create(index(:activities, ["id desc nulls last", "local"]))
|
||||
create(index(:activities, ["id desc nulls last", "local"], concurrently: true))
|
||||
drop_if_exists(index(:activities, ["id desc nulls last"]))
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,7 +1,13 @@
|
|||
defmodule Pleroma.Repo.Migrations.CreateApidHostExtractionIndex do
|
||||
use Ecto.Migration
|
||||
@disable_ddl_transaction true
|
||||
|
||||
def change do
|
||||
create(index(:activities, ["(split_part(actor, '/', 3))"], name: :activities_hosts))
|
||||
create(
|
||||
index(:activities, ["(split_part(actor, '/', 3))"],
|
||||
concurrently: true,
|
||||
name: :activities_hosts
|
||||
)
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,7 +1,13 @@
|
|||
defmodule Pleroma.Repo.Migrations.CreateActivitiesInReplyToIndex do
|
||||
use Ecto.Migration
|
||||
@disable_ddl_transaction true
|
||||
|
||||
def change do
|
||||
create(index(:activities, ["(data->'object'->>'inReplyTo')"], name: :activities_in_reply_to))
|
||||
create(
|
||||
index(:activities, ["(data->'object'->>'inReplyTo')"],
|
||||
concurrently: true,
|
||||
name: :activities_in_reply_to
|
||||
)
|
||||
)
|
||||
end
|
||||
end
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
defmodule Pleroma.Repo.Migrations.AddVisibilityFunction do
|
||||
use Ecto.Migration
|
||||
@disable_ddl_transaction true
|
||||
|
||||
def up do
|
||||
definition = """
|
||||
|
@ -29,7 +30,8 @@ defmodule Pleroma.Repo.Migrations.AddVisibilityFunction do
|
|||
|
||||
create(
|
||||
index(:activities, ["activity_visibility(actor, recipients, data)"],
|
||||
name: :activities_visibility_index
|
||||
name: :activities_visibility_index,
|
||||
concurrently: true
|
||||
)
|
||||
)
|
||||
end
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue