Browse Source

Added ability to bundle arbitrary patches - current patches include a backport from master to fix export issues on 3.1.1 for accounts with a larger export size. TODO - add a way of automatically excluding patches for specific versions

master
root 1 month ago
parent
commit
c43ccdb654
4 changed files with 192 additions and 4 deletions
  1. +11
    -0
      deploy/3.1.1-backport.patch
  2. +7
    -3
      deploy/Dockerfile
  3. +1
    -1
      generate-patch.sh
  4. +173
    -0
      overlay-patches/app/services/backup_service.rb

+ 11
- 0
deploy/3.1.1-backport.patch View File

@@ -0,0 +1,11 @@
--- mastodon/app/services/backup_service.rb
+++ new/app/services/backup_service.rb
@@ -66,6 +66,8 @@
def dump_media_attachments!(tar)
MediaAttachment.attached.where(account: account).reorder(nil).find_in_batches do |media_attachments|
media_attachments.each do |m|
+ next unless m.file&.path
+
download_to_tar(tar, m.file, m.file.path)
end

+ 7
- 3
deploy/Dockerfile View File

@@ -1,9 +1,10 @@
ARG MASTODON_VERSION
FROM tootsuite/mastodon:v$MASTODON_VERSION

COPY queer.party.patch /
COPY pressstart2p /mastodon/app/javascript/fonts/pressstart2p
USER root
RUN mkdir /patches
COPY *.patch /patches/
COPY pressstart2p /mastodon/app/javascript/fonts/pressstart2p
#Seems like in v2.8.0 the official docker image no longer includes `patch`.
RUN test -x /usr/bin/patch || sh -c 'mkdir -p /var/lib/apt/lists/partial && \
mkdir -p /var/cache/apt/archives/partial && \
@@ -12,5 +13,8 @@ RUN test -x /usr/bin/patch || sh -c 'mkdir -p /var/lib/apt/lists/partial && \
apt-get clean -y && \
rm -rf /var/cache/apt/archives /var/lib/apt/lists'
USER mastodon
RUN cd /mastodon && /usr/bin/patch -p1 < /queer.party.patch
RUN cd /mastodon && sh -c 'for patch in `ls -ad /patches/*`;do echo "Applying patch $patch";/usr/bin/patch -p1 < $patch;done'
USER root
RUN rm -rf /patches
USER mastodon
RUN OTP_SECRET=precompile_placeholder SECRET_KEY_BASE=precompile_placeholder bundle exec rails assets:precompile

+ 1
- 1
generate-patch.sh View File

@@ -22,4 +22,4 @@ diff -ruN untainted tainted > deploy/queer.party.patch
rm -rf untainted tainted modifications
docker build --build-arg MASTODON_VERSION=$MASTODON_VERSION -t maffsie/qp-mastodon:latest -t maffsie/qp-mastodon:v$MASTODON_VERSION deploy/
rm -rf deploy/queer.party.patch deploy/pressstart2p
if [ -f ~/.docker/config.json ]; then docker push maffsie/qp-mastodon:v$MASTODON_VERSION;docker push maffsie/qp-mastodon:latest; fi
if [ "x$NOPUSH" == "x" -a -f ~/.docker/config.json ]; then docker push maffsie/qp-mastodon:v$MASTODON_VERSION;docker push maffsie/qp-mastodon:latest; fi

+ 173
- 0
overlay-patches/app/services/backup_service.rb View File

@@ -0,0 +1,173 @@
# frozen_string_literal: true

require 'rubygems/package'

class BackupService < BaseService
include Payloadable

attr_reader :account, :backup, :collection

def call(backup)
@backup = backup
@account = backup.user.account

build_json!
build_archive!
end

private

def build_json!
@collection = serialize(collection_presenter, ActivityPub::CollectionSerializer)

account.statuses.with_includes.reorder(nil).find_in_batches do |statuses|
statuses.each do |status|
item = serialize_payload(status, ActivityPub::ActivitySerializer, signer: @account)
item.delete(:'@context')

unless item[:type] == 'Announce' || item[:object][:attachment].blank?
item[:object][:attachment].each do |attachment|
attachment[:url] = Addressable::URI.parse(attachment[:url]).path.gsub(/\A\/system\//, '')
end
end

@collection[:orderedItems] << item
end

GC.start
end
end

def build_archive!
tmp_file = Tempfile.new(%w(archive .tar.gz))

File.open(tmp_file, 'wb') do |file|
Zlib::GzipWriter.wrap(file) do |gz|
Gem::Package::TarWriter.new(gz) do |tar|
dump_media_attachments!(tar)
dump_outbox!(tar)
dump_likes!(tar)
dump_bookmarks!(tar)
dump_actor!(tar)
end
end
end

archive_filename = ['archive', Time.now.utc.strftime('%Y%m%d%H%M%S'), SecureRandom.hex(16)].join('-') + '.tar.gz'

@backup.dump = ActionDispatch::Http::UploadedFile.new(tempfile: tmp_file, filename: archive_filename)
@backup.processed = true
@backup.save!
ensure
tmp_file.close
tmp_file.unlink
end

def dump_media_attachments!(tar)
MediaAttachment.attached.where(account: account).reorder(nil).find_in_batches do |media_attachments|
media_attachments.each do |m|
next unless m.file&.path

download_to_tar(tar, m.file, m.file.path)
end

GC.start
end
end

def dump_outbox!(tar)
json = Oj.dump(collection)

tar.add_file_simple('outbox.json', 0o444, json.bytesize) do |io|
io.write(json)
end
end

def dump_actor!(tar)
actor = serialize(account, ActivityPub::ActorSerializer)

actor[:icon][:url] = 'avatar' + File.extname(actor[:icon][:url]) if actor[:icon]
actor[:image][:url] = 'header' + File.extname(actor[:image][:url]) if actor[:image]
actor[:outbox] = 'outbox.json'
actor[:likes] = 'likes.json'
actor[:bookmarks] = 'bookmarks.json'

download_to_tar(tar, account.avatar, 'avatar' + File.extname(account.avatar.path)) if account.avatar.exists?
download_to_tar(tar, account.header, 'header' + File.extname(account.header.path)) if account.header.exists?

json = Oj.dump(actor)

tar.add_file_simple('actor.json', 0o444, json.bytesize) do |io|
io.write(json)
end
end

def dump_likes!(tar)
collection = serialize(ActivityPub::CollectionPresenter.new(id: 'likes.json', type: :ordered, size: 0, items: []), ActivityPub::CollectionSerializer)

Status.reorder(nil).joins(:favourites).includes(:account).merge(account.favourites).find_in_batches do |statuses|
statuses.each do |status|
collection[:totalItems] += 1
collection[:orderedItems] << ActivityPub::TagManager.instance.uri_for(status)
end

GC.start
end

json = Oj.dump(collection)

tar.add_file_simple('likes.json', 0o444, json.bytesize) do |io|
io.write(json)
end
end

def dump_bookmarks!(tar)
collection = serialize(ActivityPub::CollectionPresenter.new(id: 'bookmarks.json', type: :ordered, size: 0, items: []), ActivityPub::CollectionSerializer)

Status.reorder(nil).joins(:bookmarks).includes(:account).merge(account.bookmarks).find_in_batches do |statuses|
statuses.each do |status|
collection[:totalItems] += 1
collection[:orderedItems] << ActivityPub::TagManager.instance.uri_for(status)
end

GC.start
end

json = Oj.dump(collection)

tar.add_file_simple('bookmarks.json', 0o444, json.bytesize) do |io|
io.write(json)
end
end

def collection_presenter
ActivityPub::CollectionPresenter.new(
id: 'outbox.json',
type: :ordered,
size: account.statuses_count,
items: []
)
end

def serialize(object, serializer)
ActiveModelSerializers::SerializableResource.new(
object,
serializer: serializer,
adapter: ActivityPub::Adapter
).as_json
end

CHUNK_SIZE = 1.megabyte

def download_to_tar(tar, attachment, filename)
adapter = Paperclip.io_adapters.for(attachment)

tar.add_file_simple(filename, 0o444, adapter.size) do |io|
while (buffer = adapter.read(CHUNK_SIZE))
io.write(buffer)
end
end
rescue Errno::ENOENT, Seahorse::Client::NetworkingError
Rails.logger.warn "Could not backup file #{filename}: file not found"
end
end

Loading…
Cancel
Save