2019-02-03 03:59:51 +01:00
# frozen_string_literal: true
require 'csv'
2023-05-02 12:08:48 +02:00
# NOTE: This is a deprecated service, only kept to not break ongoing imports
# on upgrade. See `BulkImportService` for its replacement.
2019-02-03 03:59:51 +01:00
class ImportService < BaseService
ROWS_PROCESSING_LIMIT = 20_000
def call ( import )
@import = import
@account = @import . account
case @import . type
when 'following'
import_follows!
when 'blocking'
import_blocks!
when 'muting'
import_mutes!
when 'domain_blocking'
import_domain_blocks!
2020-11-19 17:48:13 +01:00
when 'bookmarks'
import_bookmarks!
2019-02-03 03:59:51 +01:00
end
end
private
def import_follows!
2019-04-03 18:17:43 +02:00
parse_import_data! ( [ 'Account address' ] )
2022-09-20 23:51:21 +02:00
import_relationships! ( 'follow' , 'unfollow' , @account . following , ROWS_PROCESSING_LIMIT , reblogs : { header : 'Show boosts' , default : true } , notify : { header : 'Notify on new posts' , default : false } , languages : { header : 'Languages' , default : nil } )
2019-02-03 03:59:51 +01:00
end
def import_blocks!
2019-04-03 18:17:43 +02:00
parse_import_data! ( [ 'Account address' ] )
2019-02-03 03:59:51 +01:00
import_relationships! ( 'block' , 'unblock' , @account . blocking , ROWS_PROCESSING_LIMIT )
end
def import_mutes!
2019-04-03 18:17:43 +02:00
parse_import_data! ( [ 'Account address' ] )
2020-09-18 17:26:45 +02:00
import_relationships! ( 'mute' , 'unmute' , @account . muting , ROWS_PROCESSING_LIMIT , notifications : { header : 'Hide notifications' , default : true } )
2019-02-03 03:59:51 +01:00
end
def import_domain_blocks!
2019-04-03 18:17:43 +02:00
parse_import_data! ( [ '#domain' ] )
items = @data . take ( ROWS_PROCESSING_LIMIT ) . map { | row | row [ '#domain' ] . strip }
2019-02-03 03:59:51 +01:00
if @import . overwrite?
2021-03-24 10:44:31 +01:00
presence_hash = items . index_with ( true )
2019-02-03 03:59:51 +01:00
@account . domain_blocks . find_each do | domain_block |
if presence_hash [ domain_block . domain ]
items . delete ( domain_block . domain )
else
@account . unblock_domain! ( domain_block . domain )
end
end
end
items . each do | domain |
@account . block_domain! ( domain )
end
AfterAccountDomainBlockWorker . push_bulk ( items ) do | domain |
[ @account . id , domain ]
end
end
2019-04-08 07:28:27 +02:00
def import_relationships! ( action , undo_action , overwrite_scope , limit , extra_fields = { } )
2020-03-30 20:32:34 +02:00
local_domain_suffix = " @ #{ Rails . configuration . x . local_domain } "
2023-02-28 14:59:19 +01:00
items = @data . take ( limit ) . map { | row | [ row [ 'Account address' ] & . strip & . delete_suffix ( local_domain_suffix ) , extra_fields . to_h { | key , field_settings | [ key , row [ field_settings [ :header ] ] & . strip || field_settings [ :default ] ] } ] } . reject { | ( id , _ ) | id . blank? }
2019-02-03 03:59:51 +01:00
if @import . overwrite?
2019-04-03 18:17:43 +02:00
presence_hash = items . each_with_object ( { } ) { | ( id , extra ) , mapping | mapping [ id ] = [ true , extra ] }
2019-02-03 03:59:51 +01:00
overwrite_scope . find_each do | target_account |
if presence_hash [ target_account . acct ]
items . delete ( target_account . acct )
2019-04-03 18:17:43 +02:00
extra = presence_hash [ target_account . acct ] [ 1 ]
2022-01-28 00:43:56 +01:00
Import :: RelationshipWorker . perform_async ( @account . id , target_account . acct , action , extra . stringify_keys )
2019-02-03 03:59:51 +01:00
else
Import :: RelationshipWorker . perform_async ( @account . id , target_account . acct , undo_action )
end
end
end
2020-06-09 10:26:58 +02:00
head_items = items . uniq { | acct , _ | acct . split ( '@' ) [ 1 ] }
tail_items = items - head_items
2020-12-18 09:18:31 +01:00
2020-06-09 10:26:58 +02:00
Import :: RelationshipWorker . push_bulk ( head_items + tail_items ) do | acct , extra |
2022-01-28 00:43:56 +01:00
[ @account . id , acct , action , extra . stringify_keys ]
2019-02-03 03:59:51 +01:00
end
end
2020-11-19 17:48:13 +01:00
def import_bookmarks!
parse_import_data! ( [ '#uri' ] )
items = @data . take ( ROWS_PROCESSING_LIMIT ) . map { | row | row [ '#uri' ] . strip }
if @import . overwrite?
2021-03-24 10:44:31 +01:00
presence_hash = items . index_with ( true )
2020-11-19 17:48:13 +01:00
@account . bookmarks . find_each do | bookmark |
if presence_hash [ bookmark . status . uri ]
items . delete ( bookmark . status . uri )
else
bookmark . destroy!
end
end
end
2021-01-10 00:32:01 +01:00
statuses = items . filter_map do | uri |
2020-11-19 17:48:13 +01:00
status = ActivityPub :: TagManager . instance . uri_to_resource ( uri , Status )
next if status . nil? && ActivityPub :: TagManager . instance . local_uri? ( uri )
status || ActivityPub :: FetchRemoteStatusService . new . call ( uri )
2022-11-02 16:38:23 +01:00
rescue HTTP :: Error , OpenSSL :: SSL :: SSLError , Mastodon :: UnexpectedResponseError
nil
2023-02-20 11:01:20 +01:00
rescue = > e
2022-11-02 16:38:23 +01:00
Rails . logger . warn " Unexpected error when importing bookmark: #{ e } "
nil
2021-01-10 00:32:01 +01:00
end
2020-11-19 17:48:13 +01:00
account_ids = statuses . map ( & :account_id )
2023-03-21 10:32:58 +01:00
preloaded_relations = @account . relations_map ( account_ids , skip_blocking_and_muting : true )
2020-11-19 17:48:13 +01:00
statuses . keep_if { | status | StatusPolicy . new ( @account , status , preloaded_relations ) . show? }
statuses . each do | status |
@account . bookmarks . find_or_create_by! ( account : @account , status : status )
end
end
2019-04-03 18:17:43 +02:00
def parse_import_data! ( default_headers )
data = CSV . parse ( import_data , headers : true )
data = CSV . parse ( import_data , headers : default_headers ) unless data . headers & . first & . strip & . include? ( ' ' )
2023-04-30 14:07:21 +02:00
@data = data . compact_blank
2019-04-03 18:17:43 +02:00
end
2019-02-03 03:59:51 +01:00
def import_data
2022-11-14 05:52:13 +01:00
Paperclip . io_adapters . for ( @import . data ) . read . force_encoding ( Encoding :: UTF_8 )
2019-02-03 03:59:51 +01:00
end
end