2016-11-15 16:56:29 +01:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2016-02-24 12:57:29 +01:00
|
|
|
class ProcessFeedService < BaseService
|
2016-02-20 22:53:20 +01:00
|
|
|
def call(body, account)
|
|
|
|
xml = Nokogiri::XML(body)
|
2016-11-13 19:12:40 +01:00
|
|
|
xml.encoding = 'utf-8'
|
2016-11-08 01:32:34 +01:00
|
|
|
|
2017-04-08 13:26:03 +02:00
|
|
|
update_author(body, account)
|
2016-11-08 01:32:34 +01:00
|
|
|
process_entries(xml, account)
|
2016-03-25 02:13:30 +01:00
|
|
|
end
|
2016-02-20 22:53:20 +01:00
|
|
|
|
2016-03-25 02:13:30 +01:00
|
|
|
private
|
2016-02-28 14:26:26 +01:00
|
|
|
|
2017-04-08 13:26:03 +02:00
|
|
|
def update_author(body, account)
|
2017-04-05 21:41:50 +02:00
|
|
|
RemoteProfileUpdateWorker.perform_async(account.id, body.force_encoding('UTF-8'), true)
|
2016-11-08 01:32:34 +01:00
|
|
|
end
|
2016-02-24 01:28:53 +01:00
|
|
|
|
2016-11-08 01:32:34 +01:00
|
|
|
def process_entries(xml, account)
|
2016-11-30 21:32:11 +01:00
|
|
|
xml.xpath('//xmlns:entry', xmlns: TagManager::XMLNS).reverse_each.map { |entry| ProcessEntry.new.call(entry, account) }.compact
|
2016-11-08 01:32:34 +01:00
|
|
|
end
|
2016-03-16 10:46:15 +01:00
|
|
|
|
2016-11-08 01:32:34 +01:00
|
|
|
class ProcessEntry
|
2017-05-03 17:02:18 +02:00
|
|
|
include AuthorExtractor
|
|
|
|
|
2016-11-08 01:32:34 +01:00
|
|
|
def call(xml, account)
|
|
|
|
@account = account
|
|
|
|
@xml = xml
|
2016-02-24 01:28:53 +01:00
|
|
|
|
2016-11-08 01:32:34 +01:00
|
|
|
return if skip_unsupported_type?
|
2016-02-24 01:28:53 +01:00
|
|
|
|
2016-11-08 01:32:34 +01:00
|
|
|
case verb
|
|
|
|
when :post, :share
|
|
|
|
return create_status
|
|
|
|
when :delete
|
|
|
|
return delete_status
|
2016-02-24 01:28:53 +01:00
|
|
|
end
|
2016-11-18 23:19:38 +01:00
|
|
|
rescue ActiveRecord::RecordInvalid => e
|
|
|
|
Rails.logger.debug "Nothing was saved for #{id} because: #{e}"
|
|
|
|
nil
|
2016-03-25 02:13:30 +01:00
|
|
|
end
|
2016-02-25 00:17:01 +01:00
|
|
|
|
2016-11-08 01:32:34 +01:00
|
|
|
private
|
2016-09-09 20:04:34 +02:00
|
|
|
|
2016-11-08 01:32:34 +01:00
|
|
|
def create_status
|
2017-05-04 04:34:57 +02:00
|
|
|
if redis.exists("delete_upon_arrival:#{id}")
|
|
|
|
Rails.logger.debug "Delete for status #{id} was queued, ignoring"
|
|
|
|
return
|
|
|
|
end
|
|
|
|
|
2017-05-22 19:35:48 +02:00
|
|
|
status, just_created = nil
|
|
|
|
|
2016-11-08 01:32:34 +01:00
|
|
|
Rails.logger.debug "Creating remote status #{id}"
|
2016-11-05 15:20:05 +01:00
|
|
|
|
2017-06-07 12:28:16 +02:00
|
|
|
if verb == :share
|
|
|
|
original_status = shared_status_from_xml(@xml.at_xpath('.//activity:object', activity: TagManager::AS_XMLNS))
|
|
|
|
return nil if original_status.nil?
|
|
|
|
end
|
|
|
|
|
2017-05-22 19:35:48 +02:00
|
|
|
ApplicationRecord.transaction do
|
|
|
|
status, just_created = status_from_xml(@xml)
|
|
|
|
|
|
|
|
return if status.nil?
|
|
|
|
return status unless just_created
|
2017-01-20 18:31:49 +01:00
|
|
|
|
2017-05-22 19:35:48 +02:00
|
|
|
if verb == :share
|
2017-06-07 12:28:16 +02:00
|
|
|
if original_status.reblog?
|
2017-05-22 19:35:48 +02:00
|
|
|
status.reblog = original_status.reblog
|
2017-06-07 12:28:16 +02:00
|
|
|
else
|
|
|
|
status.reblog = original_status
|
2017-05-22 19:35:48 +02:00
|
|
|
end
|
2016-11-08 19:37:08 +01:00
|
|
|
end
|
2016-09-09 20:04:34 +02:00
|
|
|
|
2017-06-06 00:09:14 +02:00
|
|
|
status.thread = find_status(thread(@xml).first) if thread?(@xml)
|
|
|
|
|
2017-05-22 19:35:48 +02:00
|
|
|
status.save!
|
|
|
|
end
|
2016-11-21 09:56:01 +01:00
|
|
|
|
2017-06-06 00:09:14 +02:00
|
|
|
if thread?(@xml) && status.thread.nil?
|
|
|
|
Rails.logger.debug "Trying to attach #{status.id} (#{id(@xml)}) to #{thread(@xml).first}"
|
|
|
|
ThreadResolveWorker.perform_async(status.id, thread(@xml).second)
|
|
|
|
end
|
|
|
|
|
2017-03-13 16:34:15 +01:00
|
|
|
notify_about_mentions!(status) unless status.reblog?
|
|
|
|
notify_about_reblog!(status) if status.reblog? && status.reblog.account.local?
|
2017-05-17 00:41:15 +02:00
|
|
|
|
2016-11-08 01:32:34 +01:00
|
|
|
Rails.logger.debug "Queuing remote status #{status.id} (#{id}) for distribution"
|
2017-05-17 00:41:15 +02:00
|
|
|
|
2017-05-31 20:38:17 +02:00
|
|
|
LinkCrawlWorker.perform_async(status.id) unless status.spoiler_text?
|
2016-03-25 03:22:26 +01:00
|
|
|
DistributionWorker.perform_async(status.id)
|
2017-05-17 00:41:15 +02:00
|
|
|
|
2016-11-08 01:32:34 +01:00
|
|
|
status
|
2016-03-25 02:13:30 +01:00
|
|
|
end
|
2016-02-28 21:22:56 +01:00
|
|
|
|
2017-03-13 16:34:15 +01:00
|
|
|
def notify_about_mentions!(status)
|
|
|
|
status.mentions.includes(:account).each do |mention|
|
|
|
|
mentioned_account = mention.account
|
|
|
|
next unless mentioned_account.local?
|
|
|
|
NotifyService.new.call(mentioned_account, mention)
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def notify_about_reblog!(status)
|
|
|
|
NotifyService.new.call(status.reblog.account, status)
|
|
|
|
end
|
|
|
|
|
2016-11-08 01:32:34 +01:00
|
|
|
def delete_status
|
|
|
|
Rails.logger.debug "Deleting remote status #{id}"
|
|
|
|
status = Status.find_by(uri: id)
|
2017-05-04 04:34:57 +02:00
|
|
|
|
|
|
|
if status.nil?
|
|
|
|
redis.setex("delete_upon_arrival:#{id}", 6 * 3_600, id)
|
|
|
|
else
|
|
|
|
RemoveStatusService.new.call(status)
|
|
|
|
end
|
|
|
|
|
2016-11-08 01:32:34 +01:00
|
|
|
nil
|
|
|
|
end
|
2016-10-14 20:14:53 +02:00
|
|
|
|
2016-11-08 01:32:34 +01:00
|
|
|
def skip_unsupported_type?
|
|
|
|
!([:post, :share, :delete].include?(verb) && [:activity, :note, :comment].include?(type))
|
|
|
|
end
|
2016-02-28 21:22:56 +01:00
|
|
|
|
2017-04-27 17:06:47 +02:00
|
|
|
def shared_status_from_xml(entry)
|
|
|
|
status = find_status(id(entry))
|
|
|
|
|
|
|
|
return status unless status.nil?
|
|
|
|
|
|
|
|
FetchRemoteStatusService.new.call(url(entry))
|
|
|
|
end
|
|
|
|
|
2016-11-08 01:32:34 +01:00
|
|
|
def status_from_xml(entry)
|
|
|
|
# Return early if status already exists in db
|
|
|
|
status = find_status(id(entry))
|
2017-01-27 16:55:06 +01:00
|
|
|
|
|
|
|
return [status, false] unless status.nil?
|
2016-11-08 01:32:34 +01:00
|
|
|
|
2016-11-10 00:15:49 +01:00
|
|
|
# If status embeds an author, find that author
|
|
|
|
# If that author cannot be found, don't record the status (do not misattribute)
|
|
|
|
if account?(entry)
|
|
|
|
begin
|
2017-05-03 17:02:18 +02:00
|
|
|
account = author_from_xml(entry)
|
2017-01-27 16:55:06 +01:00
|
|
|
return [nil, false] if account.nil?
|
2016-11-10 00:15:49 +01:00
|
|
|
rescue Goldfinger::Error
|
2017-01-27 16:55:06 +01:00
|
|
|
return [nil, false]
|
2016-11-10 00:15:49 +01:00
|
|
|
end
|
|
|
|
else
|
|
|
|
account = @account
|
2016-11-08 19:37:08 +01:00
|
|
|
end
|
|
|
|
|
2017-01-27 16:55:06 +01:00
|
|
|
return [nil, false] if account.suspended?
|
2016-12-05 22:59:30 +01:00
|
|
|
|
2016-11-15 16:56:29 +01:00
|
|
|
status = Status.create!(
|
2016-11-08 01:32:34 +01:00
|
|
|
uri: id(entry),
|
|
|
|
url: url(entry),
|
2016-11-08 19:37:08 +01:00
|
|
|
account: account,
|
2016-11-08 01:32:34 +01:00
|
|
|
text: content(entry),
|
2017-01-25 00:49:08 +01:00
|
|
|
spoiler_text: content_warning(entry),
|
2017-02-09 20:25:39 +01:00
|
|
|
created_at: published(entry),
|
2017-02-11 15:10:22 +01:00
|
|
|
reply: thread?(entry),
|
2017-04-16 20:32:17 +02:00
|
|
|
language: content_language(entry),
|
2017-05-12 19:09:21 +02:00
|
|
|
visibility: visibility_scope(entry),
|
|
|
|
conversation: find_or_create_conversation(entry)
|
2016-11-15 16:56:29 +01:00
|
|
|
)
|
2016-11-08 01:32:34 +01:00
|
|
|
|
|
|
|
mentions_from_xml(status, entry)
|
|
|
|
hashtags_from_xml(status, entry)
|
|
|
|
media_from_xml(status, entry)
|
2016-09-29 21:28:21 +02:00
|
|
|
|
2017-01-27 16:55:06 +01:00
|
|
|
[status, true]
|
2016-11-08 01:32:34 +01:00
|
|
|
end
|
2016-02-28 21:22:56 +01:00
|
|
|
|
2017-05-12 19:09:21 +02:00
|
|
|
def find_or_create_conversation(xml)
|
|
|
|
uri = xml.at_xpath('./ostatus:conversation', ostatus: TagManager::OS_XMLNS)&.attribute('ref')&.content
|
|
|
|
return if uri.nil?
|
|
|
|
|
|
|
|
if TagManager.instance.local_id?(uri)
|
|
|
|
local_id = TagManager.instance.unique_tag_to_local_id(uri, 'Conversation')
|
|
|
|
return Conversation.find_by(id: local_id)
|
|
|
|
end
|
|
|
|
|
|
|
|
Conversation.find_by(uri: uri)
|
|
|
|
end
|
|
|
|
|
2016-11-08 01:32:34 +01:00
|
|
|
def find_status(uri)
|
|
|
|
if TagManager.instance.local_id?(uri)
|
|
|
|
local_id = TagManager.instance.unique_tag_to_local_id(uri, 'Status')
|
2017-05-27 00:53:38 +02:00
|
|
|
return Status.find_by(id: local_id)
|
2016-02-28 21:22:56 +01:00
|
|
|
end
|
2016-11-08 01:32:34 +01:00
|
|
|
|
|
|
|
Status.find_by(uri: uri)
|
2016-02-24 01:28:53 +01:00
|
|
|
end
|
|
|
|
|
2016-11-08 01:32:34 +01:00
|
|
|
def mentions_from_xml(parent, xml)
|
|
|
|
processed_account_ids = []
|
2016-10-14 20:14:53 +02:00
|
|
|
|
2016-11-30 21:32:11 +01:00
|
|
|
xml.xpath('./xmlns:link[@rel="mentioned"]', xmlns: TagManager::XMLNS).each do |link|
|
2017-02-11 15:10:22 +01:00
|
|
|
next if [TagManager::TYPES[:group], TagManager::TYPES[:collection]].include? link['ostatus:object-type']
|
2016-09-05 18:39:53 +02:00
|
|
|
|
2017-04-16 18:01:48 +02:00
|
|
|
mentioned_account = account_from_href(link['href'])
|
2016-09-29 21:28:21 +02:00
|
|
|
|
2016-11-08 01:32:34 +01:00
|
|
|
next if mentioned_account.nil? || processed_account_ids.include?(mentioned_account.id)
|
2016-09-05 18:39:53 +02:00
|
|
|
|
2017-03-13 16:34:15 +01:00
|
|
|
mentioned_account.mentions.where(status: parent).first_or_create(status: parent)
|
2016-02-24 17:23:59 +01:00
|
|
|
|
2016-11-08 01:32:34 +01:00
|
|
|
# So we can skip duplicate mentions
|
|
|
|
processed_account_ids << mentioned_account.id
|
|
|
|
end
|
2016-03-19 19:20:07 +01:00
|
|
|
end
|
2016-02-24 01:28:53 +01:00
|
|
|
|
2017-04-16 18:01:48 +02:00
|
|
|
def account_from_href(href)
|
2017-04-25 02:47:31 +02:00
|
|
|
url = Addressable::URI.parse(href).normalize
|
2017-04-16 18:01:48 +02:00
|
|
|
|
|
|
|
if TagManager.instance.web_domain?(url.host)
|
|
|
|
Account.find_local(url.path.gsub('/users/', ''))
|
|
|
|
else
|
2017-05-16 12:12:29 +02:00
|
|
|
Account.where(uri: href).or(Account.where(url: href)).first || FetchRemoteAccountService.new.call(href)
|
2017-04-16 18:01:48 +02:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2016-11-08 01:32:34 +01:00
|
|
|
def hashtags_from_xml(parent, xml)
|
2017-04-16 12:51:30 +02:00
|
|
|
tags = xml.xpath('./xmlns:category', xmlns: TagManager::XMLNS).map { |category| category['term'] }.select(&:present?)
|
2016-11-08 01:32:34 +01:00
|
|
|
ProcessHashtagsService.new.call(parent, tags)
|
Fix #24 - Thread resolving for remote statuses
This is a big one, so let me enumerate:
Accounts as well as stream entry pages now contain Link headers that
reference the Atom feed and Webfinger URL for the former and Atom entry
for the latter. So you only need to HEAD those resources to get that
information, no need to download and parse HTML <link>s.
ProcessFeedService will now queue ThreadResolveWorker for each remote
status that it cannot find otherwise. Furthermore, entries are now
processed in reverse order (from bottom to top) in case a newer entry
references a chronologically previous one.
ThreadResolveWorker uses FetchRemoteStatusService to obtain a status
and attach the child status it was queued for to it.
FetchRemoteStatusService looks up the URL, first with a HEAD, tests
if it's an Atom feed, in which case it processes it directly. Next
for Link headers to the Atom feed, in which case that is fetched
and processed. Lastly if it's HTML, it is checked for <link>s to the Atom
feed, and if such is found, that is fetched and processed. The account for
the status is derived from author/name attribute in the XML and the hostname
in the URL (domain). FollowRemoteAccountService and ProcessFeedService
are used.
This means that potentially threads are resolved recursively until a dead-end
is encountered, however it is performed asynchronously over background jobs,
so it should be ok.
2016-09-21 01:34:14 +02:00
|
|
|
end
|
2016-02-20 22:53:20 +01:00
|
|
|
|
2016-11-08 01:32:34 +01:00
|
|
|
def media_from_xml(parent, xml)
|
2017-04-16 12:51:30 +02:00
|
|
|
do_not_download = DomainBlock.find_by(domain: parent.account.domain)&.reject_media?
|
2017-01-23 21:36:08 +01:00
|
|
|
|
2016-11-30 21:32:11 +01:00
|
|
|
xml.xpath('./xmlns:link[@rel="enclosure"]', xmlns: TagManager::XMLNS).each do |link|
|
2016-11-08 01:32:34 +01:00
|
|
|
next unless link['href']
|
2016-03-16 10:46:15 +01:00
|
|
|
|
2016-11-08 01:32:34 +01:00
|
|
|
media = MediaAttachment.where(status: parent, remote_url: link['href']).first_or_initialize(account: parent.account, status: parent, remote_url: link['href'])
|
2017-04-25 02:47:31 +02:00
|
|
|
parsed_url = Addressable::URI.parse(link['href']).normalize
|
2017-02-22 19:55:14 +01:00
|
|
|
|
2017-04-30 00:23:45 +02:00
|
|
|
next if !%w(http https).include?(parsed_url.scheme) || parsed_url.host.empty?
|
2017-04-16 12:51:30 +02:00
|
|
|
|
|
|
|
media.save
|
|
|
|
|
|
|
|
next if do_not_download
|
2016-02-20 22:53:20 +01:00
|
|
|
|
2016-11-08 01:32:34 +01:00
|
|
|
begin
|
|
|
|
media.file_remote_url = link['href']
|
2017-05-18 15:43:10 +02:00
|
|
|
media.save!
|
|
|
|
rescue ActiveRecord::RecordInvalid
|
2016-11-08 01:32:34 +01:00
|
|
|
next
|
|
|
|
end
|
|
|
|
end
|
2016-02-20 22:53:20 +01:00
|
|
|
end
|
2016-02-24 17:23:59 +01:00
|
|
|
|
2016-11-08 01:32:34 +01:00
|
|
|
def id(xml = @xml)
|
2016-11-30 21:32:11 +01:00
|
|
|
xml.at_xpath('./xmlns:id', xmlns: TagManager::XMLNS).content
|
2016-02-24 17:23:59 +01:00
|
|
|
end
|
2016-02-24 01:28:53 +01:00
|
|
|
|
2016-11-08 01:32:34 +01:00
|
|
|
def verb(xml = @xml)
|
2016-11-30 21:32:11 +01:00
|
|
|
raw = xml.at_xpath('./activity:verb', activity: TagManager::AS_XMLNS).content
|
|
|
|
TagManager::VERBS.key(raw)
|
2016-11-08 01:32:34 +01:00
|
|
|
rescue
|
|
|
|
:post
|
Fix #24 - Thread resolving for remote statuses
This is a big one, so let me enumerate:
Accounts as well as stream entry pages now contain Link headers that
reference the Atom feed and Webfinger URL for the former and Atom entry
for the latter. So you only need to HEAD those resources to get that
information, no need to download and parse HTML <link>s.
ProcessFeedService will now queue ThreadResolveWorker for each remote
status that it cannot find otherwise. Furthermore, entries are now
processed in reverse order (from bottom to top) in case a newer entry
references a chronologically previous one.
ThreadResolveWorker uses FetchRemoteStatusService to obtain a status
and attach the child status it was queued for to it.
FetchRemoteStatusService looks up the URL, first with a HEAD, tests
if it's an Atom feed, in which case it processes it directly. Next
for Link headers to the Atom feed, in which case that is fetched
and processed. Lastly if it's HTML, it is checked for <link>s to the Atom
feed, and if such is found, that is fetched and processed. The account for
the status is derived from author/name attribute in the XML and the hostname
in the URL (domain). FollowRemoteAccountService and ProcessFeedService
are used.
This means that potentially threads are resolved recursively until a dead-end
is encountered, however it is performed asynchronously over background jobs,
so it should be ok.
2016-09-21 01:34:14 +02:00
|
|
|
end
|
|
|
|
|
2016-11-08 01:32:34 +01:00
|
|
|
def type(xml = @xml)
|
2016-11-30 21:32:11 +01:00
|
|
|
raw = xml.at_xpath('./activity:object-type', activity: TagManager::AS_XMLNS).content
|
|
|
|
TagManager::TYPES.key(raw)
|
2016-11-08 01:32:34 +01:00
|
|
|
rescue
|
|
|
|
:activity
|
|
|
|
end
|
2016-02-24 01:28:53 +01:00
|
|
|
|
2016-11-08 01:32:34 +01:00
|
|
|
def url(xml = @xml)
|
2016-11-30 21:32:11 +01:00
|
|
|
link = xml.at_xpath('./xmlns:link[@rel="alternate"]', xmlns: TagManager::XMLNS)
|
2016-11-08 19:09:22 +01:00
|
|
|
link.nil? ? nil : link['href']
|
2016-11-08 01:32:34 +01:00
|
|
|
end
|
2016-02-28 21:22:56 +01:00
|
|
|
|
2016-11-08 01:32:34 +01:00
|
|
|
def content(xml = @xml)
|
2016-11-30 21:32:11 +01:00
|
|
|
xml.at_xpath('./xmlns:content', xmlns: TagManager::XMLNS).content
|
2016-11-08 01:32:34 +01:00
|
|
|
end
|
2016-02-24 17:23:59 +01:00
|
|
|
|
2017-04-16 20:32:17 +02:00
|
|
|
def content_language(xml = @xml)
|
|
|
|
xml.at_xpath('./xmlns:content', xmlns: TagManager::XMLNS)['xml:lang']&.presence || 'en'
|
|
|
|
end
|
|
|
|
|
2017-01-25 00:49:08 +01:00
|
|
|
def content_warning(xml = @xml)
|
2017-01-25 16:53:30 +01:00
|
|
|
xml.at_xpath('./xmlns:summary', xmlns: TagManager::XMLNS)&.content || ''
|
2017-01-25 00:49:08 +01:00
|
|
|
end
|
|
|
|
|
2017-02-11 15:10:22 +01:00
|
|
|
def visibility_scope(xml = @xml)
|
|
|
|
xml.at_xpath('./mastodon:scope', mastodon: TagManager::MTDN_XMLNS)&.content&.to_sym || :public
|
|
|
|
end
|
|
|
|
|
2016-11-08 01:32:34 +01:00
|
|
|
def published(xml = @xml)
|
2016-11-30 21:32:11 +01:00
|
|
|
xml.at_xpath('./xmlns:published', xmlns: TagManager::XMLNS).content
|
2016-11-08 01:32:34 +01:00
|
|
|
end
|
2016-02-24 17:23:59 +01:00
|
|
|
|
2016-11-08 01:32:34 +01:00
|
|
|
def thread?(xml = @xml)
|
2016-11-30 21:32:11 +01:00
|
|
|
!xml.at_xpath('./thr:in-reply-to', thr: TagManager::THR_XMLNS).nil?
|
2016-11-08 01:32:34 +01:00
|
|
|
end
|
2016-02-24 01:28:53 +01:00
|
|
|
|
2016-11-08 01:32:34 +01:00
|
|
|
def thread(xml = @xml)
|
2016-11-30 21:32:11 +01:00
|
|
|
thr = xml.at_xpath('./thr:in-reply-to', thr: TagManager::THR_XMLNS)
|
2016-11-08 01:32:34 +01:00
|
|
|
[thr['ref'], thr['href']]
|
|
|
|
end
|
2016-02-24 03:05:40 +01:00
|
|
|
|
2016-11-08 01:32:34 +01:00
|
|
|
def account?(xml = @xml)
|
2016-11-30 21:32:11 +01:00
|
|
|
!xml.at_xpath('./xmlns:author', xmlns: TagManager::XMLNS).nil?
|
2016-11-08 01:32:34 +01:00
|
|
|
end
|
2017-05-04 04:34:57 +02:00
|
|
|
|
|
|
|
def redis
|
|
|
|
Redis.current
|
|
|
|
end
|
2016-09-20 00:39:03 +02:00
|
|
|
end
|
2016-02-20 22:53:20 +01:00
|
|
|
end
|