diff --git a/app/javascript/mastodon/features/ui/components/compose_panel.js b/app/javascript/mastodon/features/ui/components/compose_panel.js
index 6cb352322414a3..05fe1892df2765 100644
--- a/app/javascript/mastodon/features/ui/components/compose_panel.js
+++ b/app/javascript/mastodon/features/ui/components/compose_panel.js
@@ -4,6 +4,7 @@ import PropTypes from 'prop-types';
import SearchContainer from 'mastodon/features/compose/containers/search_container';
import ComposeFormContainer from 'mastodon/features/compose/containers/compose_form_container';
import NavigationContainer from 'mastodon/features/compose/containers/navigation_container';
+import ModsAnnouncements from 'mastodon/features/compose/components/mods_announcements';
import LinkFooter from './link_footer';
import ServerBanner from 'mastodon/components/server_banner';
import { changeComposing, mountCompose, unmountCompose } from 'mastodon/actions/compose';
@@ -57,6 +58,7 @@ class ComposePanel extends React.PureComponent {
+
)}
diff --git a/app/javascript/mastodon/features/ui/components/header.js b/app/javascript/mastodon/features/ui/components/header.js
index 1384bebda0e1a8..3735346f83381a 100644
--- a/app/javascript/mastodon/features/ui/components/header.js
+++ b/app/javascript/mastodon/features/ui/components/header.js
@@ -4,6 +4,7 @@ import { Link, withRouter } from 'react-router-dom';
import { FormattedMessage } from 'react-intl';
import { registrationsOpen, me } from 'mastodon/initial_state';
import Avatar from 'mastodon/components/avatar';
+import Permalink from 'mastodon/components/permalink';
import PropTypes from 'prop-types';
import { connect } from 'react-redux';
import { openModal } from 'mastodon/actions/modal';
@@ -11,9 +12,9 @@ import { openModal } from 'mastodon/actions/modal';
const Account = connect(state => ({
account: state.getIn(['accounts', me]),
}))(({ account }) => (
-
+
-
+
));
const mapDispatchToProps = (dispatch) => ({
@@ -22,8 +23,8 @@ const mapDispatchToProps = (dispatch) => ({
},
});
-export default @connect(null, mapDispatchToProps)
-@withRouter
+export default @withRouter
+@connect(null, mapDispatchToProps)
class Header extends React.PureComponent {
static contextTypes = {
diff --git a/app/javascript/mastodon/features/ui/components/navigation_panel.js b/app/javascript/mastodon/features/ui/components/navigation_panel.js
index 9a9309be059a61..755b19349d79e6 100644
--- a/app/javascript/mastodon/features/ui/components/navigation_panel.js
+++ b/app/javascript/mastodon/features/ui/components/navigation_panel.js
@@ -82,8 +82,8 @@ class NavigationPanel extends React.Component {
{signedIn && (
-
+
diff --git a/app/javascript/mastodon/features/ui/index.js b/app/javascript/mastodon/features/ui/index.js
index 4f0ea0450402db..2dd59f95d42b85 100644
--- a/app/javascript/mastodon/features/ui/index.js
+++ b/app/javascript/mastodon/features/ui/index.js
@@ -474,10 +474,10 @@ class UI extends React.PureComponent {
};
handleHotkeyBack = () => {
- if (window.history && window.history.length === 1) {
- this.context.router.history.push('/');
- } else {
+ if (window.history && window.history.state) {
this.context.router.history.goBack();
+ } else {
+ this.context.router.history.push('/');
}
};
diff --git a/app/javascript/mastodon/locales/en.json b/app/javascript/mastodon/locales/en.json
index ae7722635d2930..f4d0519403298a 100644
--- a/app/javascript/mastodon/locales/en.json
+++ b/app/javascript/mastodon/locales/en.json
@@ -162,6 +162,8 @@
"confirmations.discard_edit_media.message": "You have unsaved changes to the media description or preview, discard them anyway?",
"confirmations.domain_block.confirm": "Block entire domain",
"confirmations.domain_block.message": "Are you really, really sure you want to block the entire {domain}? In most cases a few targeted blocks or mutes are sufficient and preferable. You will not see content from that domain in any public timelines or your notifications. Your followers from that domain will be removed.",
+ "confirmations.edit.confirm": "Edit",
+ "confirmations.edit.message": "Editing now will overwrite the message you are currently composing. Are you sure you want to proceed?",
"confirmations.logout.confirm": "Log out",
"confirmations.logout.message": "Are you sure you want to log out?",
"confirmations.mute.confirm": "Mute",
diff --git a/app/javascript/mastodon/reducers/compose.js b/app/javascript/mastodon/reducers/compose.js
index 783d748ae3104c..842b7af5184bb4 100644
--- a/app/javascript/mastodon/reducers/compose.js
+++ b/app/javascript/mastodon/reducers/compose.js
@@ -186,11 +186,12 @@ const ignoreSuggestion = (state, position, token, completion, path) => {
};
const sortHashtagsByUse = (state, tags) => {
- const personalHistory = state.get('tagHistory');
+ const personalHistory = state.get('tagHistory').map(tag => tag.toLowerCase());
- return tags.sort((a, b) => {
- const usedA = personalHistory.includes(a.name);
- const usedB = personalHistory.includes(b.name);
+ const tagsWithLowercase = tags.map(t => ({ ...t, lowerName: t.name.toLowerCase() }));
+ const sorted = tagsWithLowercase.sort((a, b) => {
+ const usedA = personalHistory.includes(a.lowerName);
+ const usedB = personalHistory.includes(b.lowerName);
if (usedA === usedB) {
return 0;
@@ -200,6 +201,8 @@ const sortHashtagsByUse = (state, tags) => {
return 1;
}
});
+ sorted.forEach(tag => delete tag.lowerName);
+ return sorted;
};
const insertEmoji = (state, position, emojiData, needsSpace) => {
diff --git a/app/javascript/styles/application.scss b/app/javascript/styles/application.scss
index 81a040108ec504..2ea30538b8a964 100644
--- a/app/javascript/styles/application.scss
+++ b/app/javascript/styles/application.scss
@@ -23,3 +23,7 @@
@import 'mastodon/dashboard';
@import 'mastodon/rtl';
@import 'mastodon/accessibility';
+
+@import 'mods/announcements';
+@import 'mods/compact-padding';
+@import 'mods/legacy-style';
diff --git a/app/javascript/styles/mastodon-light/diff.scss b/app/javascript/styles/mastodon-light/diff.scss
index c37100a28fe61a..db564b227935cf 100644
--- a/app/javascript/styles/mastodon-light/diff.scss
+++ b/app/javascript/styles/mastodon-light/diff.scss
@@ -254,6 +254,10 @@ html {
border-color: $ui-base-color;
}
+.upload-progress__backdrop {
+ background: $ui-base-color;
+}
+
// Change the background colors of statuses
.focusable:focus {
background: $ui-base-color;
diff --git a/app/javascript/styles/mastodon/admin.scss b/app/javascript/styles/mastodon/admin.scss
index 674fafbe955a75..505f59a4654e76 100644
--- a/app/javascript/styles/mastodon/admin.scss
+++ b/app/javascript/styles/mastodon/admin.scss
@@ -384,7 +384,7 @@ $content-width: 840px;
position: fixed;
z-index: 10;
width: 100%;
- height: calc(100vh - 56px);
+ height: calc(100% - 56px);
left: 0;
bottom: 0;
overflow-y: auto;
diff --git a/app/javascript/styles/mastodon/components.scss b/app/javascript/styles/mastodon/components.scss
index 493efea3052661..e4616c1100dee7 100644
--- a/app/javascript/styles/mastodon/components.scss
+++ b/app/javascript/styles/mastodon/components.scss
@@ -4482,6 +4482,7 @@ a.status-card.compact:hover {
display: flex;
align-items: center;
justify-content: center;
+ text-align: center;
color: $secondary-text-color;
font-size: 18px;
font-weight: 500;
@@ -4516,7 +4517,7 @@ a.status-card.compact:hover {
width: 100%;
height: 6px;
border-radius: 6px;
- background: $ui-base-lighter-color;
+ background: darken($simple-background-color, 8%);
position: relative;
margin-top: 5px;
}
diff --git a/app/javascript/styles/mods/announcements.scss b/app/javascript/styles/mods/announcements.scss
new file mode 100644
index 00000000000000..50df5e86388ef5
--- /dev/null
+++ b/app/javascript/styles/mods/announcements.scss
@@ -0,0 +1,31 @@
+.mods__announcements__item {
+ display: flex;
+ padding: 10px;
+ margin: 15px;
+ background: $simple-background-color;
+ color: $inverted-text-color;
+ box-shadow: 0 0 2px rgba(0,0,0,.5);
+ border-radius: 4px;
+ text-decoration: none;
+
+ &--clickable {
+ cursor: pointer;
+ }
+}
+
+.mods__announcements__body {
+ flex: 1 1 auto;
+}
+
+.mods__announcements__icon {
+ flex: 0 0 auto;
+ height: 40px;
+ width: 40px;
+ text-align: center;
+ padding-right: 10px;
+
+ > img {
+ max-width: 100%;
+ max-height: 100%;
+ }
+}
diff --git a/app/javascript/styles/mods/compact-padding.scss b/app/javascript/styles/mods/compact-padding.scss
new file mode 100644
index 00000000000000..217a902688a53f
--- /dev/null
+++ b/app/javascript/styles/mods/compact-padding.scss
@@ -0,0 +1,40 @@
+.compact-padding {
+ .status__prepend {
+ padding-top: 6px;
+ padding-bottom: 0;
+ }
+
+ .status {
+ padding: 12px;
+ padding-bottom: 6px;
+ }
+
+ .status-reply, .muted {
+ padding-top: 6px;
+ }
+
+ .status__info {
+ margin-bottom: 6px;
+ }
+
+ .status__content {
+ padding-top: 0;
+ }
+
+ .status__action-bar {
+ margin-top: 6px;
+ justify-content: flex-start;
+ }
+
+ .status .media-gallery {
+ margin-top: 6px;
+ }
+
+ .status .attachment-list {
+ margin-top: 2px;
+ }
+
+ .notification__message {
+ padding: 12px 12px 0;
+ }
+}
diff --git a/app/javascript/styles/mods/legacy-style.scss b/app/javascript/styles/mods/legacy-style.scss
new file mode 100644
index 00000000000000..bbc94b9ee85990
--- /dev/null
+++ b/app/javascript/styles/mods/legacy-style.scss
@@ -0,0 +1,85 @@
+.legacy-style {
+ .status__prepend {
+ padding-top: 0.25em;
+ padding-left: 2.15em;
+ padding-bottom: 0;
+ }
+ .status {
+ padding: 0.5em 0.5em 0.5em 4.75em;
+ position: relative;
+ }
+ .status-reply {
+ padding-top: 0.25em;
+ }
+ .status .status__info {
+ margin-bottom: 0;
+ }
+ .status .status__relative-time {
+ height: auto;
+ line-height: normal;
+ }
+ .status .status__avatar {
+ width: 3em;
+ height: 3em;
+ position: absolute;
+ left: 0.5em;
+ top: 0.5em;
+ }
+ .status .display-name:not(.detailed-status__display-name) bdi {
+ padding-right: 0.25em;
+ }
+ .status .display-name:not(.detailed-status__display-name) {
+ display: flex;
+ }
+ .detailed-status .status-card {
+ padding: 0.25em;
+ }
+ .account {
+ padding: 0.5em;
+ }
+ .account__wrapper .display-name, .detailed-status .display-name {
+ padding-left: 0;
+ }
+ .status__action-bar {
+ margin-top: 0.25em;
+ justify-content: flex-start;
+ }
+ .status .media-gallery, .detailed-status .status-card, .detailed-status__meta {
+ margin-top: 0.5em;
+ }
+ .status .attachment-list {
+ margin-top: 0.25em;
+ }
+ .notification__message {
+ padding: 0.5em 0.5em 0 2.35em;
+ }
+ .muted {
+ padding-top: 0.5em;
+ }
+ .detailed-status__display-name {
+ margin-bottom: 0.5em;
+ }
+ .mods__announcements__item {
+ margin: 10px;
+ }
+ .drawer__inner .navigation-bar, .drawer__inner .compose-form {
+ padding: 10px;
+ }
+ .account__header__bar {
+ padding: 0 12px;
+ }
+ .account__header__tabs__name {
+ margin-top: 12px;
+ margin-bottom: 12px;
+ }
+ .account__header__bio .account__header__fields {
+ margin: 12px 0 0;
+ }
+ .account__header__bio .account__header__fields dl {
+ padding: 8px 12px;
+ }
+ .account__header__extra__links {
+ margin: 0 -8px;
+ padding-top: 12px;
+ }
+}
diff --git a/app/lib/account_reach_finder.rb b/app/lib/account_reach_finder.rb
index 706ce8c1fbbbf7..481e254396e453 100644
--- a/app/lib/account_reach_finder.rb
+++ b/app/lib/account_reach_finder.rb
@@ -6,7 +6,7 @@ def initialize(account)
end
def inboxes
- (followers_inboxes + reporters_inboxes + relay_inboxes).uniq
+ (followers_inboxes + reporters_inboxes + recently_mentioned_inboxes + relay_inboxes).uniq
end
private
@@ -19,6 +19,13 @@ def reporters_inboxes
Account.where(id: @account.targeted_reports.select(:account_id)).inboxes
end
+ def recently_mentioned_inboxes
+ cutoff_id = Mastodon::Snowflake.id_at(2.days.ago, with_random: false)
+ recent_statuses = @account.statuses.recent.where(id: cutoff_id...).limit(200)
+
+ Account.joins(:mentions).where(mentions: { status: recent_statuses }).inboxes.take(2000)
+ end
+
def relay_inboxes
Relay.enabled.pluck(:inbox_url)
end
diff --git a/app/models/account_statuses_filter.rb b/app/lib/account_statuses_filter.rb
similarity index 100%
rename from app/models/account_statuses_filter.rb
rename to app/lib/account_statuses_filter.rb
diff --git a/app/lib/activitypub/activity/flag.rb b/app/lib/activitypub/activity/flag.rb
index b0443849a6b8b5..7539bda422ff38 100644
--- a/app/lib/activitypub/activity/flag.rb
+++ b/app/lib/activitypub/activity/flag.rb
@@ -16,7 +16,7 @@ def perform
@account,
target_account,
status_ids: target_statuses.nil? ? [] : target_statuses.map(&:id),
- comment: @json['content'] || '',
+ comment: report_comment,
uri: report_uri
)
end
@@ -35,4 +35,8 @@ def object_uris
def report_uri
@json['id'] unless @json['id'].nil? || invalid_origin?(@json['id'])
end
+
+ def report_comment
+ (@json['content'] || '')[0...5000]
+ end
end
diff --git a/app/lib/activitypub/activity/update.rb b/app/lib/activitypub/activity/update.rb
index e7c3bc9bf83dec..91ebd3732a828b 100644
--- a/app/lib/activitypub/activity/update.rb
+++ b/app/lib/activitypub/activity/update.rb
@@ -28,6 +28,6 @@ def update_status
return if @status.nil?
- ActivityPub::ProcessStatusUpdateService.new.call(@status, @object, request_id: @options[:request_id])
+ ActivityPub::ProcessStatusUpdateService.new.call(@status, @json, @object, request_id: @options[:request_id])
end
end
diff --git a/app/lib/activitypub/tag_manager.rb b/app/lib/activitypub/tag_manager.rb
index 3d6b28ef5814d6..e05c0652268f7c 100644
--- a/app/lib/activitypub/tag_manager.rb
+++ b/app/lib/activitypub/tag_manager.rb
@@ -27,6 +27,8 @@ def url_for(target)
when :note, :comment, :activity
return activity_account_status_url(target.account, target) if target.reblog?
short_account_status_url(target.account, target)
+ when :flag
+ target.uri
end
end
@@ -41,6 +43,8 @@ def uri_for(target)
account_status_url(target.account, target)
when :emoji
emoji_url(target)
+ when :flag
+ target.uri
end
end
diff --git a/app/lib/admin/account_statuses_filter.rb b/app/lib/admin/account_statuses_filter.rb
new file mode 100644
index 00000000000000..94927e4b6806c9
--- /dev/null
+++ b/app/lib/admin/account_statuses_filter.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+class Admin::AccountStatusesFilter < AccountStatusesFilter
+ private
+
+ def blocked?
+ false
+ end
+end
diff --git a/app/lib/admin/system_check.rb b/app/lib/admin/system_check.rb
index f512635abb0a8f..89dfcef9f1dcac 100644
--- a/app/lib/admin/system_check.rb
+++ b/app/lib/admin/system_check.rb
@@ -2,6 +2,7 @@
class Admin::SystemCheck
ACTIVE_CHECKS = [
+ Admin::SystemCheck::MediaPrivacyCheck,
Admin::SystemCheck::DatabaseSchemaCheck,
Admin::SystemCheck::SidekiqProcessCheck,
Admin::SystemCheck::RulesCheck,
diff --git a/app/lib/admin/system_check/elasticsearch_check.rb b/app/lib/admin/system_check/elasticsearch_check.rb
index 5b4c12399b8878..0b55be35017a62 100644
--- a/app/lib/admin/system_check/elasticsearch_check.rb
+++ b/app/lib/admin/system_check/elasticsearch_check.rb
@@ -31,7 +31,7 @@ def message
def running_version
@running_version ||= begin
Chewy.client.info['version']['number']
- rescue Faraday::ConnectionFailed
+ rescue Faraday::ConnectionFailed, Elasticsearch::Transport::Transport::Error
nil
end
end
diff --git a/app/lib/admin/system_check/media_privacy_check.rb b/app/lib/admin/system_check/media_privacy_check.rb
new file mode 100644
index 00000000000000..1df05b120ea80a
--- /dev/null
+++ b/app/lib/admin/system_check/media_privacy_check.rb
@@ -0,0 +1,105 @@
+# frozen_string_literal: true
+
+class Admin::SystemCheck::MediaPrivacyCheck < Admin::SystemCheck::BaseCheck
+ include RoutingHelper
+
+ def skip?
+ !current_user.can?(:view_devops)
+ end
+
+ def pass?
+ check_media_uploads!
+ @failure_message.nil?
+ end
+
+ def message
+ Admin::SystemCheck::Message.new(@failure_message, @failure_value, @failure_action, true)
+ end
+
+ private
+
+ def check_media_uploads!
+ if Rails.configuration.x.use_s3
+ check_media_listing_inaccessible_s3!
+ else
+ check_media_listing_inaccessible!
+ end
+ end
+
+ def check_media_listing_inaccessible!
+ full_url = full_asset_url(media_attachment.file.url(:original, false))
+
+ # Check if we can list the uploaded file. If true, that's an error
+ directory_url = Addressable::URI.parse(full_url)
+ directory_url.query = nil
+ filename = directory_url.path.gsub(%r{.*/}, '')
+ directory_url.path = directory_url.path.gsub(%r{/[^/]+\Z}, '/')
+ Request.new(:get, directory_url, allow_local: true).perform do |res|
+ if res.truncated_body&.include?(filename)
+ @failure_message = use_storage? ? :upload_check_privacy_error_object_storage : :upload_check_privacy_error
+ @failure_action = 'https://docs.joinmastodon.org/admin/optional/object-storage/#FS'
+ end
+ end
+ rescue
+ nil
+ end
+
+ def check_media_listing_inaccessible_s3!
+ urls_to_check = []
+ paperclip_options = Paperclip::Attachment.default_options
+ s3_protocol = paperclip_options[:s3_protocol]
+ s3_host_alias = paperclip_options[:s3_host_alias]
+ s3_host_name = paperclip_options[:s3_host_name]
+ bucket_name = paperclip_options.dig(:s3_credentials, :bucket)
+
+ urls_to_check << "#{s3_protocol}://#{s3_host_alias}/" if s3_host_alias.present?
+ urls_to_check << "#{s3_protocol}://#{s3_host_name}/#{bucket_name}/"
+ urls_to_check.uniq.each do |full_url|
+ check_s3_listing!(full_url)
+ break if @failure_message.present?
+ end
+ rescue
+ nil
+ end
+
+ def check_s3_listing!(full_url)
+ bucket_url = Addressable::URI.parse(full_url)
+ bucket_url.path = bucket_url.path.delete_suffix(media_attachment.file.path(:original))
+ bucket_url.query = "max-keys=1&x-random=#{SecureRandom.hex(10)}"
+ Request.new(:get, bucket_url, allow_local: true).perform do |res|
+ if res.truncated_body&.include?('ListBucketResult')
+ @failure_message = :upload_check_privacy_error_object_storage
+ @failure_action = 'https://docs.joinmastodon.org/admin/optional/object-storage/#S3'
+ end
+ end
+ end
+
+ def media_attachment
+ @media_attachment ||= begin
+ attachment = Account.representative.media_attachments.first
+ if attachment.present?
+ attachment.touch # rubocop:disable Rails/SkipsModelValidations
+ attachment
+ else
+ create_test_attachment!
+ end
+ end
+ end
+
+ def create_test_attachment!
+ Tempfile.create(%w(test-upload .jpg), binmode: true) do |tmp_file|
+ tmp_file.write(
+ Base64.decode64(
+ '/9j/4QAiRXhpZgAATU0AKgAAAAgAAQESAAMAAAABAAYAAAA' \
+ 'AAAD/2wCEAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBA' \
+ 'QEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQE' \
+ 'BAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAf/AABEIAAEAAgMBEQACEQEDEQH/x' \
+ 'ABKAAEAAAAAAAAAAAAAAAAAAAALEAEAAAAAAAAAAAAAAAAAAAAAAQEAAAAAAAAAAAAAAAA' \
+ 'AAAAAEQEAAAAAAAAAAAAAAAAAAAAA/9oADAMBAAIRAxEAPwA/8H//2Q=='
+ )
+ )
+ tmp_file.flush
+ Account.representative.media_attachments.create!(file: tmp_file)
+ end
+ end
+end
diff --git a/app/lib/admin/system_check/message.rb b/app/lib/admin/system_check/message.rb
index bfcad3bf3d00a6..ad8d4b6073872a 100644
--- a/app/lib/admin/system_check/message.rb
+++ b/app/lib/admin/system_check/message.rb
@@ -1,11 +1,12 @@
# frozen_string_literal: true
class Admin::SystemCheck::Message
- attr_reader :key, :value, :action
+ attr_reader :key, :value, :action, :critical
- def initialize(key, value = nil, action = nil)
- @key = key
- @value = value
- @action = action
+ def initialize(key, value = nil, action = nil, critical = false)
+ @key = key
+ @value = value
+ @action = action
+ @critical = critical
end
end
diff --git a/app/lib/application_extension.rb b/app/lib/application_extension.rb
index d61ec0e6e7f121..4de69c1eaddb24 100644
--- a/app/lib/application_extension.rb
+++ b/app/lib/application_extension.rb
@@ -9,10 +9,6 @@ module ApplicationExtension
validates :redirect_uri, length: { maximum: 2_000 }
end
- def most_recently_used_access_token
- @most_recently_used_access_token ||= access_tokens.where.not(last_used_at: nil).order(last_used_at: :desc).first
- end
-
def confirmation_redirect_uri
redirect_uri.lines.first.strip
end
diff --git a/app/lib/link_details_extractor.rb b/app/lib/link_details_extractor.rb
index 2e0672abe1689a..9d774842506d12 100644
--- a/app/lib/link_details_extractor.rb
+++ b/app/lib/link_details_extractor.rb
@@ -140,7 +140,7 @@ def link_type
end
def html
- player_url.present? ? content_tag(:iframe, nil, src: player_url, width: width, height: height, allowtransparency: 'true', scrolling: 'no', frameborder: '0') : nil
+ player_url.present? ? content_tag(:iframe, nil, src: player_url, width: width, height: height, allowfullscreen: 'true', allowtransparency: 'true', scrolling: 'no', frameborder: '0') : nil
end
def width
diff --git a/app/lib/plain_text_formatter.rb b/app/lib/plain_text_formatter.rb
index 08aa29696450a8..d1ff6808b2a995 100644
--- a/app/lib/plain_text_formatter.rb
+++ b/app/lib/plain_text_formatter.rb
@@ -1,9 +1,7 @@
# frozen_string_literal: true
class PlainTextFormatter
- include ActionView::Helpers::TextHelper
-
- NEWLINE_TAGS_RE = /( | |<\/p>)+/.freeze
+ NEWLINE_TAGS_RE = %r{( | |)+}
attr_reader :text, :local
@@ -18,7 +16,10 @@ def to_s
if local?
text
else
- strip_tags(insert_newlines).chomp
+ node = Nokogiri::HTML.fragment(insert_newlines)
+ # Elements that are entirely removed with our Sanitize config
+ node.xpath('.//iframe|.//math|.//noembed|.//noframes|.//noscript|.//plaintext|.//script|.//style|.//svg|.//xmp').remove
+ node.text.chomp
end
end
diff --git a/app/lib/request.rb b/app/lib/request.rb
index 0508169dcba3c9..660dc0fa769f52 100644
--- a/app/lib/request.rb
+++ b/app/lib/request.rb
@@ -4,14 +4,60 @@
require 'socket'
require 'resolv'
-# Monkey-patch the HTTP.rb timeout class to avoid using a timeout block
+# Use our own timeout class to avoid using HTTP.rb's timeout block
# around the Socket#open method, since we use our own timeout blocks inside
# that method
-class HTTP::Timeout::PerOperation
+#
+# Also changes how the read timeout behaves so that it is cumulative (closer
+# to HTTP::Timeout::Global, but still having distinct timeouts for other
+# operation types)
+class PerOperationWithDeadline < HTTP::Timeout::PerOperation
+ READ_DEADLINE = 30
+
+ def initialize(*args)
+ super
+
+ @read_deadline = options.fetch(:read_deadline, READ_DEADLINE)
+ end
+
def connect(socket_class, host, port, nodelay = false)
@socket = socket_class.open(host, port)
@socket.setsockopt(Socket::IPPROTO_TCP, Socket::TCP_NODELAY, 1) if nodelay
end
+
+ # Reset deadline when the connection is re-used for different requests
+ def reset_counter
+ @deadline = nil
+ end
+
+ # Read data from the socket
+ def readpartial(size, buffer = nil)
+ @deadline ||= Process.clock_gettime(Process::CLOCK_MONOTONIC) + @read_deadline
+
+ timeout = false
+ loop do
+ result = @socket.read_nonblock(size, buffer, exception: false)
+
+ return :eof if result.nil?
+
+ remaining_time = @deadline - Process.clock_gettime(Process::CLOCK_MONOTONIC)
+ raise HTTP::TimeoutError, "Read timed out after #{@read_timeout} seconds" if timeout
+ raise HTTP::TimeoutError, "Read timed out after a total of #{@read_deadline} seconds" if remaining_time <= 0
+ return result if result != :wait_readable
+
+ # marking the socket for timeout. Why is this not being raised immediately?
+ # it seems there is some race-condition on the network level between calling
+ # #read_nonblock and #wait_readable, in which #read_nonblock signalizes waiting
+ # for reads, and when waiting for x seconds, it returns nil suddenly without completing
+ # the x seconds. In a normal case this would be a timeout on wait/read, but it can
+ # also mean that the socket has been closed by the server. Therefore we "mark" the
+ # socket for timeout and try to read more bytes. If it returns :eof, it's all good, no
+ # timeout. Else, the first timeout was a proper timeout.
+ # This hack has to be done because io/wait#wait_readable doesn't provide a value for when
+ # the socket is closed by the server, and HTTP::Parser doesn't provide the limit for the chunks.
+ timeout = true unless @socket.to_io.wait_readable([remaining_time, @read_timeout].min)
+ end
+ end
end
class Request
@@ -20,7 +66,7 @@ class Request
# We enforce a 5s timeout on DNS resolving, 5s timeout on socket opening
# and 5s timeout on the TLS handshake, meaning the worst case should take
# about 15s in total
- TIMEOUT = { connect: 5, read: 10, write: 10 }.freeze
+ TIMEOUT = { connect_timeout: 5, read_timeout: 10, write_timeout: 10, read_deadline: 30 }.freeze
include RoutingHelper
@@ -32,6 +78,7 @@ def initialize(verb, url, **options)
@http_client = options.delete(:http_client)
@allow_local = options.delete(:allow_local)
@options = options.merge(socket_class: use_proxy? || @allow_local ? ProxySocket : Socket)
+ @options = @options.merge(timeout_class: PerOperationWithDeadline, timeout_options: TIMEOUT)
@options = @options.merge(proxy_url) if use_proxy?
@headers = {}
@@ -92,7 +139,7 @@ def valid_url?(url)
end
def http_client
- HTTP.use(:auto_inflate).timeout(TIMEOUT.dup).follow(max_hops: 3)
+ HTTP.use(:auto_inflate).follow(max_hops: 3)
end
end
@@ -238,11 +285,11 @@ def open(host, *args)
end
until socks.empty?
- _, available_socks, = IO.select(nil, socks, nil, Request::TIMEOUT[:connect])
+ _, available_socks, = IO.select(nil, socks, nil, Request::TIMEOUT[:connect_timeout])
if available_socks.nil?
socks.each(&:close)
- raise HTTP::TimeoutError, "Connect timed out after #{Request::TIMEOUT[:connect]} seconds"
+ raise HTTP::TimeoutError, "Connect timed out after #{Request::TIMEOUT[:connect_timeout]} seconds"
end
available_socks.each do |sock|
diff --git a/app/lib/scope_parser.rb b/app/lib/scope_parser.rb
index d268688c83edfd..45eb3c7b93831c 100644
--- a/app/lib/scope_parser.rb
+++ b/app/lib/scope_parser.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
class ScopeParser < Parslet::Parser
- rule(:term) { match('[a-z]').repeat(1).as(:term) }
+ rule(:term) { match('[a-z_]').repeat(1).as(:term) }
rule(:colon) { str(':') }
rule(:access) { (str('write') | str('read')).as(:access) }
rule(:namespace) { str('admin').as(:namespace) }
diff --git a/app/lib/search_query_transformer.rb b/app/lib/search_query_transformer.rb
index aef05e9d9d883c..7adf1b7e73d7b6 100644
--- a/app/lib/search_query_transformer.rb
+++ b/app/lib/search_query_transformer.rb
@@ -25,7 +25,7 @@ def apply(search)
def clause_to_query(clause)
case clause
when TermClause
- { multi_match: { type: 'most_fields', query: clause.term, fields: ['text', 'text.stemmed'] } }
+ { match: { 'text.stemmed': { query: clause.term } } }
when PhraseClause
{ match_phrase: { text: { query: clause.phrase } } }
else
diff --git a/app/lib/tag_manager.rb b/app/lib/tag_manager.rb
index a1d12a654eb43a..2e929d6e3f3f36 100644
--- a/app/lib/tag_manager.rb
+++ b/app/lib/tag_manager.rb
@@ -7,18 +7,18 @@ class TagManager
include RoutingHelper
def web_domain?(domain)
- domain.nil? || domain.gsub(/[\/]/, '').casecmp(Rails.configuration.x.web_domain).zero?
+ domain.nil? || domain.delete_suffix('/').casecmp(Rails.configuration.x.web_domain).zero?
end
def local_domain?(domain)
- domain.nil? || domain.gsub(/[\/]/, '').casecmp(Rails.configuration.x.local_domain).zero?
+ domain.nil? || domain.delete_suffix('/').casecmp(Rails.configuration.x.local_domain).zero?
end
def normalize_domain(domain)
return if domain.nil?
uri = Addressable::URI.new
- uri.host = domain.gsub(/[\/]/, '')
+ uri.host = domain.delete_suffix('/')
uri.normalized_host
end
@@ -28,7 +28,7 @@ def local_url?(url)
domain = uri.host + (uri.port ? ":#{uri.port}" : '')
TagManager.instance.web_domain?(domain)
- rescue Addressable::URI::InvalidURIError
+ rescue Addressable::URI::InvalidURIError, IDN::Idna::IdnaError
false
end
end
diff --git a/app/lib/text_formatter.rb b/app/lib/text_formatter.rb
index 48e2fc2338dcc1..e51266a08ad4c1 100644
--- a/app/lib/text_formatter.rb
+++ b/app/lib/text_formatter.rb
@@ -48,6 +48,26 @@ def to_s
html.html_safe # rubocop:disable Rails/OutputSafety
end
+ class << self
+ include ERB::Util
+
+ def shortened_link(url, rel_me: false)
+ url = Addressable::URI.parse(url).to_s
+ rel = rel_me ? (DEFAULT_REL + %w(me)) : DEFAULT_REL
+
+ prefix = url.match(URL_PREFIX_REGEX).to_s
+ display_url = url[prefix.length, 30]
+ suffix = url[prefix.length + 30..-1]
+ cutoff = url[prefix.length..-1].length > 30
+
+ <<~HTML.squish.html_safe # rubocop:disable Rails/OutputSafety
+ #{h(prefix)}#{h(display_url)}#{h(suffix)}
+ HTML
+ rescue Addressable::URI::InvalidURIError, IDN::Idna::IdnaError
+ h(url)
+ end
+ end
+
private
def rewrite
@@ -70,19 +90,7 @@ def rewrite
end
def link_to_url(entity)
- url = Addressable::URI.parse(entity[:url]).to_s
- rel = with_rel_me? ? (DEFAULT_REL + %w(me)) : DEFAULT_REL
-
- prefix = url.match(URL_PREFIX_REGEX).to_s
- display_url = url[prefix.length, 30]
- suffix = url[prefix.length + 30..-1]
- cutoff = url[prefix.length..-1].length > 30
-
- <<~HTML.squish
- #{h(prefix)}#{h(display_url)}#{h(suffix)}
- HTML
- rescue Addressable::URI::InvalidURIError, IDN::Idna::IdnaError
- h(entity[:url])
+ TextFormatter.shortened_link(entity[:url], rel_me: with_rel_me?)
end
def link_to_hashtag(entity)
diff --git a/app/lib/user_settings_decorator.rb b/app/lib/user_settings_decorator.rb
index 5fb7655a9b4837..1a8654196868fc 100644
--- a/app/lib/user_settings_decorator.rb
+++ b/app/lib/user_settings_decorator.rb
@@ -39,6 +39,7 @@ def process_update
user.settings['trends'] = trends_preference if change?('setting_trends')
user.settings['crop_images'] = crop_images_preference if change?('setting_crop_images')
user.settings['always_send_emails'] = always_send_emails_preference if change?('setting_always_send_emails')
+ user.settings['webui_styles'] = webui_styles_preference if change?('setting_webui_styles')
end
def merged_notification_emails
@@ -137,6 +138,10 @@ def always_send_emails_preference
boolean_cast_setting 'setting_always_send_emails'
end
+ def webui_styles_preference
+ settings['setting_webui_styles']
+ end
+
def boolean_cast_setting(key)
ActiveModel::Type::Boolean.new.cast(settings[key])
end
diff --git a/app/lib/vacuum/access_tokens_vacuum.rb b/app/lib/vacuum/access_tokens_vacuum.rb
index 7b91f74a512a11..a224f6d6380e58 100644
--- a/app/lib/vacuum/access_tokens_vacuum.rb
+++ b/app/lib/vacuum/access_tokens_vacuum.rb
@@ -9,10 +9,12 @@ def perform
private
def vacuum_revoked_access_tokens!
- Doorkeeper::AccessToken.where.not(revoked_at: nil).where('revoked_at < NOW()').delete_all
+ Doorkeeper::AccessToken.where.not(expires_in: nil).where('created_at + make_interval(secs => expires_in) < NOW()').in_batches.delete_all
+ Doorkeeper::AccessToken.where.not(revoked_at: nil).where('revoked_at < NOW()').in_batches.delete_all
end
def vacuum_revoked_access_grants!
- Doorkeeper::AccessGrant.where.not(revoked_at: nil).where('revoked_at < NOW()').delete_all
+ Doorkeeper::AccessGrant.where.not(expires_in: nil).where('created_at + make_interval(secs => expires_in) < NOW()').in_batches.delete_all
+ Doorkeeper::AccessGrant.where.not(revoked_at: nil).where('revoked_at < NOW()').in_batches.delete_all
end
end
diff --git a/app/lib/video_metadata_extractor.rb b/app/lib/video_metadata_extractor.rb
index 2896620cb21b09..f27d34868a2798 100644
--- a/app/lib/video_metadata_extractor.rb
+++ b/app/lib/video_metadata_extractor.rb
@@ -43,6 +43,9 @@ def parse_metadata
@height = video_stream[:height]
@frame_rate = video_stream[:avg_frame_rate] == '0/0' ? nil : Rational(video_stream[:avg_frame_rate])
@r_frame_rate = video_stream[:r_frame_rate] == '0/0' ? nil : Rational(video_stream[:r_frame_rate])
+ # For some video streams the frame_rate reported by `ffprobe` will be 0/0, but for these streams we
+ # should use `r_frame_rate` instead. Video screencast generated by Gnome Screencast have this issue.
+ @frame_rate ||= @r_frame_rate
end
if (audio_stream = audio_streams.first)
diff --git a/app/mailers/application_mailer.rb b/app/mailers/application_mailer.rb
index a37682eca63ab6..4edcb75f31bef4 100644
--- a/app/mailers/application_mailer.rb
+++ b/app/mailers/application_mailer.rb
@@ -7,6 +7,8 @@ class ApplicationMailer < ActionMailer::Base
helper :instance
helper :formatting
+ after_action :set_autoreply_headers!
+
protected
def locale_for_account(account)
@@ -14,4 +16,10 @@ def locale_for_account(account)
yield
end
end
+
+ def set_autoreply_headers!
+ headers['Precedence'] = 'list'
+ headers['X-Auto-Response-Suppress'] = 'All'
+ headers['Auto-Submitted'] = 'auto-generated'
+ end
end
diff --git a/app/models/account.rb b/app/models/account.rb
index 262285a09ecad0..28bd828b06b874 100644
--- a/app/models/account.rb
+++ b/app/models/account.rb
@@ -107,7 +107,7 @@ class Account < ApplicationRecord
scope :bots, -> { where(actor_type: %w(Application Service)) }
scope :groups, -> { where(actor_type: 'Group') }
scope :alphabetic, -> { order(domain: :asc, username: :asc) }
- scope :matches_username, ->(value) { where(arel_table[:username].matches("#{value}%")) }
+ scope :matches_username, ->(value) { where('lower((username)::text) LIKE lower(?)', "#{value}%") }
scope :matches_display_name, ->(value) { where(arel_table[:display_name].matches("#{value}%")) }
scope :matches_domain, ->(value) { where(arel_table[:domain].matches("%#{value}%")) }
scope :without_unapproved, -> { left_outer_joins(:user).remote.or(left_outer_joins(:user).merge(User.approved.confirmed)) }
diff --git a/app/models/account_conversation.rb b/app/models/account_conversation.rb
index 45e74bbeb31920..38ee247cfdc206 100644
--- a/app/models/account_conversation.rb
+++ b/app/models/account_conversation.rb
@@ -16,34 +16,44 @@
class AccountConversation < ApplicationRecord
include Redisable
+ attr_writer :participant_accounts
+
+ before_validation :set_last_status
after_commit :push_to_streaming_api
belongs_to :account
belongs_to :conversation
belongs_to :last_status, class_name: 'Status'
- before_validation :set_last_status
-
def participant_account_ids=(arr)
self[:participant_account_ids] = arr.sort
+ @participant_accounts = nil
end
def participant_accounts
- if participant_account_ids.empty?
- [account]
- else
- participants = Account.where(id: participant_account_ids)
- participants.empty? ? [account] : participants
- end
+ @participant_accounts ||= Account.where(id: participant_account_ids).to_a
+ @participant_accounts.presence || [account]
end
class << self
def to_a_paginated_by_id(limit, options = {})
- if options[:min_id]
- paginate_by_min_id(limit, options[:min_id], options[:max_id]).reverse
- else
- paginate_by_max_id(limit, options[:max_id], options[:since_id]).to_a
+ array = begin
+ if options[:min_id]
+ paginate_by_min_id(limit, options[:min_id], options[:max_id]).reverse
+ else
+ paginate_by_max_id(limit, options[:max_id], options[:since_id]).to_a
+ end
end
+
+ # Preload participants
+ participant_ids = array.flat_map(&:participant_account_ids)
+ accounts_by_id = Account.where(id: participant_ids).index_by(&:id)
+
+ array.each do |conversation|
+ conversation.participant_accounts = conversation.participant_account_ids.filter_map { |id| accounts_by_id[id] }
+ end
+
+ array
end
def paginate_by_min_id(limit, min_id = nil, max_id = nil)
diff --git a/app/models/admin/status_batch_action.rb b/app/models/admin/status_batch_action.rb
index b8bdec7223fe36..6641688788847d 100644
--- a/app/models/admin/status_batch_action.rb
+++ b/app/models/admin/status_batch_action.rb
@@ -140,6 +140,6 @@ def report_params
end
def allowed_status_ids
- AccountStatusesFilter.new(@report.target_account, current_account).results.with_discarded.where(id: status_ids).pluck(:id)
+ Admin::AccountStatusesFilter.new(@report.target_account, current_account).results.with_discarded.where(id: status_ids).pluck(:id)
end
end
diff --git a/app/models/backup.rb b/app/models/backup.rb
index d242fd62c19d1d..8823e7cae56488 100644
--- a/app/models/backup.rb
+++ b/app/models/backup.rb
@@ -17,6 +17,6 @@
class Backup < ApplicationRecord
belongs_to :user, inverse_of: :backups
- has_attached_file :dump
+ has_attached_file :dump, s3_permissions: ->(*) { ENV['S3_PERMISSION'] == '' ? nil : 'private' }
do_not_validate_attachment_file_type :dump
end
diff --git a/app/models/concerns/attachmentable.rb b/app/models/concerns/attachmentable.rb
index 01fae4236fea12..662b2ef529b15e 100644
--- a/app/models/concerns/attachmentable.rb
+++ b/app/models/concerns/attachmentable.rb
@@ -22,15 +22,14 @@ module Attachmentable
included do
def self.has_attached_file(name, options = {}) # rubocop:disable Naming/PredicateName
- options = { validate_media_type: false }.merge(options)
super(name, options)
- send(:"before_#{name}_post_process") do
+
+ send(:"before_#{name}_validate", prepend: true) do
attachment = send(name)
check_image_dimension(attachment)
set_file_content_type(attachment)
obfuscate_file_name(attachment)
set_file_extension(attachment)
- Paperclip::Validators::MediaTypeSpoofDetectionValidator.new(attributes: [name]).validate(self)
end
end
end
diff --git a/app/models/concerns/ldap_authenticable.rb b/app/models/concerns/ldap_authenticable.rb
index dc5abcd5accefe..775df081764d96 100644
--- a/app/models/concerns/ldap_authenticable.rb
+++ b/app/models/concerns/ldap_authenticable.rb
@@ -6,7 +6,7 @@ module LdapAuthenticable
class_methods do
def authenticate_with_ldap(params = {})
ldap = Net::LDAP.new(ldap_options)
- filter = format(Devise.ldap_search_filter, uid: Devise.ldap_uid, mail: Devise.ldap_mail, email: params[:email])
+ filter = format(Devise.ldap_search_filter, uid: Devise.ldap_uid, mail: Devise.ldap_mail, email: Net::LDAP::Filter.escape(params[:email]))
if (user_info = ldap.bind_as(base: Devise.ldap_base, filter: filter, password: params[:password]))
ldap_get_user(user_info.first)
diff --git a/app/models/form/account_batch.rb b/app/models/form/account_batch.rb
index 473622edf4de00..4665a586798398 100644
--- a/app/models/form/account_batch.rb
+++ b/app/models/form/account_batch.rb
@@ -17,8 +17,8 @@ def save
unfollow!
when 'remove_from_followers'
remove_from_followers!
- when 'block_domains'
- block_domains!
+ when 'remove_domains_from_followers'
+ remove_domains_from_followers!
when 'approve'
approve!
when 'reject'
@@ -35,9 +35,15 @@ def save
private
def follow!
+ error = nil
+
accounts.each do |target_account|
FollowService.new.call(current_account, target_account)
+ rescue Mastodon::NotPermittedError, ActiveRecord::RecordNotFound => e
+ error ||= e
end
+
+ raise error if error.present?
end
def unfollow!
@@ -50,10 +56,8 @@ def remove_from_followers!
RemoveFromFollowersService.new.call(current_account, account_ids)
end
- def block_domains!
- AfterAccountDomainBlockWorker.push_bulk(account_domains) do |domain|
- [current_account.id, domain]
- end
+ def remove_domains_from_followers!
+ RemoveDomainsFromFollowersService.new.call(current_account, account_domains)
end
def account_domains
@@ -119,7 +123,18 @@ def suspend_account(account)
account: current_account,
action: :suspend
)
+
Admin::SuspensionWorker.perform_async(account.id)
+
+ # Suspending a single account closes their associated reports, so
+ # mass-suspending would be consistent.
+ Report.where(target_account: account).unresolved.find_each do |report|
+ authorize(report, :update?)
+ log_action(:resolve, report)
+ report.resolve!(current_account)
+ rescue Mastodon::NotPermittedError
+ # This should not happen, but just in case, do not fail early
+ end
end
def approve_account(account)
diff --git a/app/models/identity.rb b/app/models/identity.rb
index 8cc65aef413d13..869f06372dab8b 100644
--- a/app/models/identity.rb
+++ b/app/models/identity.rb
@@ -12,7 +12,7 @@
#
class Identity < ApplicationRecord
- belongs_to :user, dependent: :destroy
+ belongs_to :user
validates :uid, presence: true, uniqueness: { scope: :provider }
validates :provider, presence: true
diff --git a/app/models/report.rb b/app/models/report.rb
index 525d22ad5decd3..3ae5c10dd0bd12 100644
--- a/app/models/report.rb
+++ b/app/models/report.rb
@@ -39,7 +39,10 @@ class Report < ApplicationRecord
scope :resolved, -> { where.not(action_taken_at: nil) }
scope :with_accounts, -> { includes([:account, :target_account, :action_taken_by_account, :assigned_account].index_with({ user: [:invite_request, :invite] })) }
- validates :comment, length: { maximum: 1_000 }
+ # A report is considered local if the reporter is local
+ delegate :local?, to: :account
+
+ validates :comment, length: { maximum: 1_000 }, if: :local?
validates :rule_ids, absence: true, unless: :violation?
validate :validate_rule_ids
@@ -50,10 +53,6 @@ class Report < ApplicationRecord
violation: 2_000,
}
- def local?
- false # Force uri_for to use uri attribute
- end
-
before_validation :set_uri, only: :create
after_create_commit :trigger_webhooks
diff --git a/app/models/status.rb b/app/models/status.rb
index b1c49e99a45daf..afd3747ea36765 100644
--- a/app/models/status.rb
+++ b/app/models/status.rb
@@ -354,13 +354,25 @@ def reload_stale_associations!(cached_items)
account_ids.uniq!
+ status_ids = cached_items.map { |item| item.reblog? ? item.reblog_of_id : item.id }.uniq
+
return if account_ids.empty?
accounts = Account.where(id: account_ids).includes(:account_stat, :user).index_by(&:id)
+ status_stats = StatusStat.where(status_id: status_ids).index_by(&:status_id)
+
cached_items.each do |item|
item.account = accounts[item.account_id]
item.reblog.account = accounts[item.reblog.account_id] if item.reblog?
+
+ if item.reblog?
+ status_stat = status_stats[item.reblog.id]
+ item.reblog.status_stat = status_stat if status_stat.present?
+ else
+ status_stat = status_stats[item.id]
+ item.status_stat = status_stat if status_stat.present?
+ end
end
end
diff --git a/app/models/user.rb b/app/models/user.rb
index c767f8984208b9..236e31a55128fb 100644
--- a/app/models/user.rb
+++ b/app/models/user.rb
@@ -136,6 +136,7 @@ class User < ApplicationRecord
:expand_spoilers, :default_language, :aggregate_reblogs, :show_application,
:advanced_layout, :use_blurhash, :use_pending_items, :trends, :crop_images,
:disable_swiping, :always_send_emails,
+ :webui_styles,
to: :settings, prefix: :setting, allow_nil: false
delegate :can?, to: :role
@@ -504,11 +505,14 @@ def sanitize_role
def prepare_new_user!
BootstrapTimelineWorker.perform_async(account_id)
ActivityTracker.increment('activity:accounts:local')
+ ActivityTracker.record('activity:logins', id)
UserMailer.welcome(self).deliver_later
TriggerWebhookWorker.perform_async('account.approved', 'Account', account_id)
end
def prepare_returning_user!
+ return unless confirmed?
+
ActivityTracker.record('activity:logins', id)
regenerate_feed! if needs_feed_update?
end
diff --git a/app/models/webhook.rb b/app/models/webhook.rb
index 4aafb1257be9db..bc1c027d50d1d1 100644
--- a/app/models/webhook.rb
+++ b/app/models/webhook.rb
@@ -20,6 +20,8 @@ class Webhook < ApplicationRecord
report.created
).freeze
+ attr_writer :current_account
+
scope :enabled, -> { where(enabled: true) }
validates :url, presence: true, url: true
@@ -27,6 +29,7 @@ class Webhook < ApplicationRecord
validates :events, presence: true
validate :validate_events
+ validate :validate_permissions
before_validation :strip_events
before_validation :generate_secret
@@ -43,12 +46,29 @@ def disable!
update!(enabled: false)
end
+ def required_permissions
+ events.map { |event| Webhook.permission_for_event(event) }
+ end
+
+ def self.permission_for_event(event)
+ case event
+ when 'account.approved', 'account.created', 'account.updated'
+ :manage_users
+ when 'report.created'
+ :manage_reports
+ end
+ end
+
private
def validate_events
errors.add(:events, :invalid) if events.any? { |e| !EVENTS.include?(e) }
end
+ def validate_permissions
+ errors.add(:events, :invalid_permissions) if defined?(@current_account) && required_permissions.any? { |permission| !@current_account.user_role.can?(permission) }
+ end
+
def strip_events
self.events = events.map { |str| str.strip.presence }.compact if events.present?
end
diff --git a/app/policies/admin/status_policy.rb b/app/policies/admin/status_policy.rb
index ffaa30f13de60a..e9379c25eca903 100644
--- a/app/policies/admin/status_policy.rb
+++ b/app/policies/admin/status_policy.rb
@@ -12,7 +12,7 @@ def index?
end
def show?
- role.can?(:manage_reports, :manage_users) && (record.public_visibility? || record.unlisted_visibility? || record.reported?)
+ role.can?(:manage_reports, :manage_users) && (record.public_visibility? || record.unlisted_visibility? || record.reported? || viewable_through_normal_policy?)
end
def destroy?
@@ -26,4 +26,10 @@ def update?
def review?
role.can?(:manage_taxonomies)
end
+
+ private
+
+ def viewable_through_normal_policy?
+ StatusPolicy.new(current_account, record, @preloaded_relations).show?
+ end
end
diff --git a/app/policies/webhook_policy.rb b/app/policies/webhook_policy.rb
index a2199a333fcf29..577e891b6698b0 100644
--- a/app/policies/webhook_policy.rb
+++ b/app/policies/webhook_policy.rb
@@ -14,7 +14,7 @@ def show?
end
def update?
- role.can?(:manage_webhooks)
+ role.can?(:manage_webhooks) && record.required_permissions.all? { |permission| role.can?(permission) }
end
def enable?
@@ -30,6 +30,6 @@ def rotate_secret?
end
def destroy?
- role.can?(:manage_webhooks)
+ role.can?(:manage_webhooks) && record.required_permissions.all? { |permission| role.can?(permission) }
end
end
diff --git a/app/serializers/initial_state_serializer.rb b/app/serializers/initial_state_serializer.rb
index 24417bca7752c8..8793a26049c60b 100644
--- a/app/serializers/initial_state_serializer.rb
+++ b/app/serializers/initial_state_serializer.rb
@@ -51,6 +51,7 @@ def meta
store[:use_pending_items] = object.current_account.user.setting_use_pending_items
store[:trends] = Setting.trends && object.current_account.user.setting_trends
store[:crop_images] = object.current_account.user.setting_crop_images
+ store[:webui_styles] = object.current_account.user.setting_webui_styles
else
store[:auto_play_gif] = Setting.auto_play_gif
store[:display_media] = Setting.display_media
diff --git a/app/serializers/rest/preview_card_serializer.rb b/app/serializers/rest/preview_card_serializer.rb
index 66ff47d22ea50e..e6d204fec3c076 100644
--- a/app/serializers/rest/preview_card_serializer.rb
+++ b/app/serializers/rest/preview_card_serializer.rb
@@ -11,4 +11,8 @@ class REST::PreviewCardSerializer < ActiveModel::Serializer
def image
object.image? ? full_asset_url(object.image.url(:original)) : nil
end
+
+ def html
+ Sanitize.fragment(object.html, Sanitize::Config::MASTODON_OEMBED)
+ end
end
diff --git a/app/services/activitypub/fetch_remote_poll_service.rb b/app/services/activitypub/fetch_remote_poll_service.rb
index 1829e791ce6eba..41b9b2f0c9be97 100644
--- a/app/services/activitypub/fetch_remote_poll_service.rb
+++ b/app/services/activitypub/fetch_remote_poll_service.rb
@@ -8,6 +8,6 @@ def call(poll, on_behalf_of = nil)
return unless supported_context?(json)
- ActivityPub::ProcessStatusUpdateService.new.call(poll.status, json)
+ ActivityPub::ProcessStatusUpdateService.new.call(poll.status, json, json)
end
end
diff --git a/app/services/activitypub/process_account_service.rb b/app/services/activitypub/process_account_service.rb
index 2da9096c734d99..087c23fa7e2446 100644
--- a/app/services/activitypub/process_account_service.rb
+++ b/app/services/activitypub/process_account_service.rb
@@ -76,6 +76,9 @@ def create_account
@account.suspended_at = domain_block.created_at if auto_suspend?
@account.suspension_origin = :local if auto_suspend?
@account.silenced_at = domain_block.created_at if auto_silence?
+
+ set_immediate_protocol_attributes!
+
@account.save
end
diff --git a/app/services/activitypub/process_status_update_service.rb b/app/services/activitypub/process_status_update_service.rb
index 1dc393e28e9a65..ecb058bf787125 100644
--- a/app/services/activitypub/process_status_update_service.rb
+++ b/app/services/activitypub/process_status_update_service.rb
@@ -5,10 +5,11 @@ class ActivityPub::ProcessStatusUpdateService < BaseService
include Redisable
include Lockable
- def call(status, json, request_id: nil)
+ def call(status, activity_json, object_json, request_id: nil)
raise ArgumentError, 'Status has unsaved changes' if status.changed?
- @json = json
+ @activity_json = activity_json
+ @json = object_json
@status_parser = ActivityPub::Parser::StatusParser.new(@json)
@uri = @status_parser.uri
@status = status
@@ -308,6 +309,6 @@ def forward_activity!
end
def forwarder
- @forwarder ||= ActivityPub::Forwarder.new(@account, @json, @status)
+ @forwarder ||= ActivityPub::Forwarder.new(@account, @activity_json, @status)
end
end
diff --git a/app/services/follow_migration_service.rb b/app/services/follow_migration_service.rb
new file mode 100644
index 00000000000000..cfe9093cbe5d92
--- /dev/null
+++ b/app/services/follow_migration_service.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+class FollowMigrationService < FollowService
+ # Follow an account with the same settings as another account, and unfollow the old account once the request is sent
+ # @param [Account] source_account From which to follow
+ # @param [Account] target_account Account to follow
+ # @param [Account] old_target_account Account to unfollow once the follow request has been sent to the new one
+ # @option [Boolean] bypass_locked Whether to immediately follow the new account even if it is locked
+ def call(source_account, target_account, old_target_account, bypass_locked: false)
+ @old_target_account = old_target_account
+
+ follow = source_account.active_relationships.find_by(target_account: old_target_account)
+ reblogs = follow&.show_reblogs?
+ notify = follow&.notify?
+ languages = follow&.languages
+
+ super(source_account, target_account, reblogs: reblogs, notify: notify, languages: languages, bypass_locked: bypass_locked, bypass_limit: true)
+ end
+
+ private
+
+ def request_follow!
+ follow_request = @source_account.request_follow!(@target_account, **follow_options.merge(rate_limit: @options[:with_rate_limit], bypass_limit: @options[:bypass_limit]))
+
+ if @target_account.local?
+ LocalNotificationWorker.perform_async(@target_account.id, follow_request.id, follow_request.class.name, 'follow_request')
+ UnfollowService.new.call(@source_account, @old_target_account, skip_unmerge: true)
+ elsif @target_account.activitypub?
+ ActivityPub::MigratedFollowDeliveryWorker.perform_async(build_json(follow_request), @source_account.id, @target_account.inbox_url, @old_target_account.id)
+ end
+
+ follow_request
+ end
+
+ def direct_follow!
+ follow = super
+ UnfollowService.new.call(@source_account, @old_target_account, skip_unmerge: true)
+ follow
+ end
+end
diff --git a/app/services/remove_domains_from_followers_service.rb b/app/services/remove_domains_from_followers_service.rb
new file mode 100644
index 00000000000000..d76763409d3b0d
--- /dev/null
+++ b/app/services/remove_domains_from_followers_service.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+class RemoveDomainsFromFollowersService < BaseService
+ include Payloadable
+
+ def call(source_account, target_domains)
+ source_account.passive_relationships.where(account_id: Account.where(domain: target_domains)).find_each do |follow|
+ follow.destroy
+
+ create_notification(follow) if source_account.local? && !follow.account.local? && follow.account.activitypub?
+ end
+ end
+
+ private
+
+ def create_notification(follow)
+ ActivityPub::DeliveryWorker.perform_async(build_json(follow), follow.target_account_id, follow.account.inbox_url)
+ end
+
+ def build_json(follow)
+ Oj.dump(serialize_payload(follow, ActivityPub::RejectFollowSerializer))
+ end
+end
diff --git a/app/services/remove_status_service.rb b/app/services/remove_status_service.rb
index 45cfb75f47eead..c84f90726bc7e3 100644
--- a/app/services/remove_status_service.rb
+++ b/app/services/remove_status_service.rb
@@ -12,6 +12,7 @@ class RemoveStatusService < BaseService
# @option [Boolean] :immediate
# @option [Boolean] :preserve
# @option [Boolean] :original_removed
+ # @option [Boolean] :skip_streaming
def call(status, **options)
@payload = Oj.dump(event: :delete, payload: status.id.to_s)
@status = status
@@ -52,6 +53,9 @@ def call(status, **options)
private
+ # The following FeedManager calls all do not result in redis publishes for
+ # streaming, as the `:update` option is false
+
def remove_from_self
FeedManager.instance.unpush_from_home(@account, @status)
end
@@ -75,6 +79,8 @@ def remove_from_mentions
# followers. Here we send a delete to actively mentioned accounts
# that may not follow the account
+ return if skip_streaming?
+
@status.active_mentions.find_each do |mention|
redis.publish("timeline:#{mention.account_id}", @payload)
end
@@ -103,7 +109,7 @@ def remove_reblogs
# without us being able to do all the fancy stuff
@status.reblogs.rewhere(deleted_at: [nil, @status.deleted_at]).includes(:account).reorder(nil).find_each do |reblog|
- RemoveStatusService.new.call(reblog, original_removed: true)
+ RemoveStatusService.new.call(reblog, original_removed: true, skip_streaming: skip_streaming?)
end
end
@@ -114,6 +120,8 @@ def remove_from_hashtags
return unless @status.public_visibility?
+ return if skip_streaming?
+
@status.tags.map(&:name).each do |hashtag|
redis.publish("timeline:hashtag:#{hashtag.mb_chars.downcase}", @payload)
redis.publish("timeline:hashtag:#{hashtag.mb_chars.downcase}:local", @payload) if @status.local?
@@ -123,6 +131,8 @@ def remove_from_hashtags
def remove_from_public
return unless @status.public_visibility?
+ return if skip_streaming?
+
redis.publish('timeline:public', @payload)
redis.publish(@status.local? ? 'timeline:public:local' : 'timeline:public:remote', @payload)
end
@@ -130,6 +140,8 @@ def remove_from_public
def remove_from_media
return unless @status.public_visibility?
+ return if skip_streaming?
+
redis.publish('timeline:public:media', @payload)
redis.publish(@status.local? ? 'timeline:public:local:media' : 'timeline:public:remote:media', @payload)
end
@@ -143,4 +155,8 @@ def remove_media
def permanently?
@options[:immediate] || !(@options[:preserve] || @status.reported?)
end
+
+ def skip_streaming?
+ !!@options[:skip_streaming]
+ end
end
diff --git a/app/services/resolve_url_service.rb b/app/services/resolve_url_service.rb
index d8e795f3b042ab..d6e528654fda59 100644
--- a/app/services/resolve_url_service.rb
+++ b/app/services/resolve_url_service.rb
@@ -89,13 +89,28 @@ def local_url?
def process_local_url
recognized_params = Rails.application.routes.recognize_path(@url)
- return unless recognized_params[:action] == 'show'
+ case recognized_params[:controller]
+ when 'statuses'
+ return unless recognized_params[:action] == 'show'
- if recognized_params[:controller] == 'statuses'
status = Status.find_by(id: recognized_params[:id])
check_local_status(status)
- elsif recognized_params[:controller] == 'accounts'
+ when 'accounts'
+ return unless recognized_params[:action] == 'show'
+
Account.find_local(recognized_params[:username])
+ when 'home'
+ return unless recognized_params[:action] == 'index' && recognized_params[:username_with_domain].present?
+
+ if recognized_params[:any]&.match?(/\A[0-9]+\Z/)
+ status = Status.find_by(id: recognized_params[:any])
+ check_local_status(status)
+ elsif recognized_params[:any].blank?
+ username, domain = recognized_params[:username_with_domain].gsub(/\A@/, '').split('@')
+ return unless username.present? && domain.present?
+
+ Account.find_remote(username, domain)
+ end
end
end
diff --git a/app/services/search_service.rb b/app/services/search_service.rb
index 1a76cbb388316c..d002458834229c 100644
--- a/app/services/search_service.rb
+++ b/app/services/search_service.rb
@@ -36,6 +36,7 @@ def perform_accounts_search!
def perform_statuses_search!
definition = parsed_query.apply(StatusesIndex.filter(term: { searchable_by: @account.id }))
+ .order(id: :desc)
if @options[:account_id].present?
definition = definition.filter(term: { account_id: @options[:account_id] })
@@ -124,6 +125,6 @@ def relations_map_for_account(account, account_ids, domains)
end
def parsed_query
- SearchQueryTransformer.new.apply(SearchQueryParser.new.parse(@query))
+ SearchQueryTransformer.new.apply(SearchQueryParser.new.parse(@query.gsub(/( )/," ")))
end
end
diff --git a/app/services/translate_status_service.rb b/app/services/translate_status_service.rb
index 539a0d9db5fd93..b905f8158ad3e7 100644
--- a/app/services/translate_status_service.rb
+++ b/app/services/translate_status_service.rb
@@ -12,7 +12,9 @@ def call(status, target_language)
@content = status_content_format(@status)
@target_language = target_language
- Rails.cache.fetch("translations/#{@status.language}/#{@target_language}/#{content_hash}", expires_in: CACHE_TTL) { translation_backend.translate(@content, @status.language, @target_language) }
+ Rails.cache.fetch("translations/#{@status.language}/#{@target_language}/#{content_hash}", expires_in: CACHE_TTL) do
+ Sanitize.fragment(translation_backend.translate(@content, @status.language, @target_language), Sanitize::Config::MASTODON_STRICT)
+ end
end
private
diff --git a/app/validators/status_length_validator.rb b/app/validators/status_length_validator.rb
index e107912b77df63..a3cbe5123b7472 100644
--- a/app/validators/status_length_validator.rb
+++ b/app/validators/status_length_validator.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
class StatusLengthValidator < ActiveModel::Validator
- MAX_CHARS = 500
+ MAX_CHARS = 5000
URL_PLACEHOLDER_CHARS = 23
URL_PLACEHOLDER = 'x' * 23
diff --git a/app/validators/vote_validator.rb b/app/validators/vote_validator.rb
index b1692562d41598..4316c59ef09bb2 100644
--- a/app/validators/vote_validator.rb
+++ b/app/validators/vote_validator.rb
@@ -3,8 +3,8 @@
class VoteValidator < ActiveModel::Validator
def validate(vote)
vote.errors.add(:base, I18n.t('polls.errors.expired')) if vote.poll.expired?
-
vote.errors.add(:base, I18n.t('polls.errors.invalid_choice')) if invalid_choice?(vote)
+ vote.errors.add(:base, I18n.t('polls.errors.self_vote')) if self_vote?(vote)
if vote.poll.multiple? && vote.poll.votes.where(account: vote.account, choice: vote.choice).exists?
vote.errors.add(:base, I18n.t('polls.errors.already_voted'))
@@ -18,4 +18,8 @@ def validate(vote)
def invalid_choice?(vote)
vote.choice.negative? || vote.choice >= vote.poll.options.size
end
+
+ def self_vote?(vote)
+ vote.account_id == vote.poll.account_id
+ end
end
diff --git a/app/views/admin/dashboard/index.html.haml b/app/views/admin/dashboard/index.html.haml
index 8354f0b9f5a18e..425472abdecaf0 100644
--- a/app/views/admin/dashboard/index.html.haml
+++ b/app/views/admin/dashboard/index.html.haml
@@ -12,7 +12,7 @@
- unless @system_checks.empty?
.flash-message-stack
- @system_checks.each do |message|
- .flash-message.warning
+ .flash-message{ class: message.critical ? 'alert' : 'warning' }
= t("admin.system_checks.#{message.key}.message_html", value: message.value ? content_tag(:strong, message.value) : nil)
- if message.action
= link_to t("admin.system_checks.#{message.key}.action"), message.action
diff --git a/app/views/admin/reports/actions/preview.html.haml b/app/views/admin/reports/actions/preview.html.haml
index 58745319c804f2..70edb48d80af94 100644
--- a/app/views/admin/reports/actions/preview.html.haml
+++ b/app/views/admin/reports/actions/preview.html.haml
@@ -54,15 +54,15 @@
.strike-card__statuses-list__item
- if (status = status_map[status_id.to_i])
.one-liner
- = link_to short_account_status_url(@report.target_account, status_id), class: 'emojify' do
- = one_line_preview(status)
+ .emojify= one_line_preview(status)
- - status.ordered_media_attachments.each do |media_attachment|
- %abbr{ title: media_attachment.description }
- = fa_icon 'link'
- = media_attachment.file_file_name
+ - status.ordered_media_attachments.each do |media_attachment|
+ %abbr{ title: media_attachment.description }
+ = fa_icon 'link'
+ = media_attachment.file_file_name
.strike-card__statuses-list__item__meta
- %time.formatted{ datetime: status.created_at.iso8601, title: l(status.created_at) }= l(status.created_at)
+ = link_to ActivityPub::TagManager.instance.url_for(status), target: '_blank' do
+ %time.formatted{ datetime: status.created_at.iso8601, title: l(status.created_at) }= l(status.created_at)
- unless status.application.nil?
·
= status.application.name
diff --git a/app/views/admin/statuses/show.html.haml b/app/views/admin/statuses/show.html.haml
index 62b49de8c8b155..4631e97f161397 100644
--- a/app/views/admin/statuses/show.html.haml
+++ b/app/views/admin/statuses/show.html.haml
@@ -34,7 +34,7 @@
%td
- if @status.trend.allowed?
%abbr{ title: t('admin.trends.tags.current_score', score: @status.trend.score) }= t('admin.trends.tags.trending_rank', rank: @status.trend.rank)
- - elsif @status.trend.requires_review?
+ - elsif @status.requires_review?
= t('admin.trends.pending_review')
- else
= t('admin.trends.not_allowed_to_trend')
diff --git a/app/views/admin/trends/links/preview_card_providers/index.html.haml b/app/views/admin/trends/links/preview_card_providers/index.html.haml
index c3648c35e97bd3..025270c128fbc9 100644
--- a/app/views/admin/trends/links/preview_card_providers/index.html.haml
+++ b/app/views/admin/trends/links/preview_card_providers/index.html.haml
@@ -29,7 +29,7 @@
- Trends::PreviewCardProviderFilter::KEYS.each do |key|
= hidden_field_tag key, params[key] if params[key].present?
- .batch-table.optional
+ .batch-table
.batch-table__toolbar
%label.batch-table__toolbar__select.batch-checkbox-all
= check_box_tag :batch_checkbox_all, nil, false
diff --git a/app/views/admin/webhooks/_form.html.haml b/app/views/admin/webhooks/_form.html.haml
index c1e8f8979bdd43..6c5ff03dd510e9 100644
--- a/app/views/admin/webhooks/_form.html.haml
+++ b/app/views/admin/webhooks/_form.html.haml
@@ -5,7 +5,7 @@
= f.input :url, wrapper: :with_block_label, input_html: { placeholder: 'https://' }
.fields-group
- = f.input :events, collection: Webhook::EVENTS, wrapper: :with_block_label, include_blank: false, as: :check_boxes, collection_wrapper_tag: 'ul', item_wrapper_tag: 'li'
+ = f.input :events, collection: Webhook::EVENTS, wrapper: :with_block_label, include_blank: false, as: :check_boxes, collection_wrapper_tag: 'ul', item_wrapper_tag: 'li', disabled: Webhook::EVENTS.filter { |event| !current_user.role.can?(Webhook.permission_for_event(event)) }
.actions
= f.button :button, @webhook.new_record? ? t('admin.webhooks.add_new') : t('generic.save_changes'), type: :submit
diff --git a/app/views/application/_sidebar.html.haml b/app/views/application/_sidebar.html.haml
index 6d18668b08f262..9d0efa7e10b7e1 100644
--- a/app/views/application/_sidebar.html.haml
+++ b/app/views/application/_sidebar.html.haml
@@ -3,7 +3,7 @@
= image_tag @instance_presenter.thumbnail&.file&.url(:'@1x') || asset_pack_path('media/images/preview.png'), alt: @instance_presenter.title
.hero-widget__text
- %p= @instance_presenter.description.html_safe.presence || t('about.about_mastodon_html')
+ %p= @instance_presenter.description.presence || t('about.about_mastodon_html')
- if Setting.trends && !(user_signed_in? && !current_user.setting_trends)
- trends = Trends.tags.query.allowed.limit(3)
diff --git a/app/views/disputes/strikes/show.html.haml b/app/views/disputes/strikes/show.html.haml
index 7797348dd72756..ce52e470d9d070 100644
--- a/app/views/disputes/strikes/show.html.haml
+++ b/app/views/disputes/strikes/show.html.haml
@@ -50,15 +50,15 @@
.strike-card__statuses-list__item
- if (status = status_map[status_id.to_i])
.one-liner
- = link_to short_account_status_url(@strike.target_account, status_id), class: 'emojify' do
- = one_line_preview(status)
+ .emojify= one_line_preview(status)
- - status.ordered_media_attachments.each do |media_attachment|
- %abbr{ title: media_attachment.description }
- = fa_icon 'link'
- = media_attachment.file_file_name
+ - status.ordered_media_attachments.each do |media_attachment|
+ %abbr{ title: media_attachment.description }
+ = fa_icon 'link'
+ = media_attachment.file_file_name
.strike-card__statuses-list__item__meta
- %time.formatted{ datetime: status.created_at.iso8601, title: l(status.created_at) }= l(status.created_at)
+ = link_to ActivityPub::TagManager.instance.url_for(status), target: '_blank' do
+ %time.formatted{ datetime: status.created_at.iso8601, title: l(status.created_at) }= l(status.created_at)
- unless status.application.nil?
·
= status.application.name
diff --git a/app/views/oauth/authorized_applications/index.html.haml b/app/views/oauth/authorized_applications/index.html.haml
index 0280d8aef84a88..55d8524dbe91e8 100644
--- a/app/views/oauth/authorized_applications/index.html.haml
+++ b/app/views/oauth/authorized_applications/index.html.haml
@@ -18,8 +18,8 @@
.announcements-list__item__action-bar
.announcements-list__item__meta
- - if application.most_recently_used_access_token
- = t('doorkeeper.authorized_applications.index.last_used_at', date: l(application.most_recently_used_access_token.last_used_at.to_date))
+ - if @last_used_at_by_app[application.id]
+ = t('doorkeeper.authorized_applications.index.last_used_at', date: l(@last_used_at_by_app[application.id].to_date))
- else
= t('doorkeeper.authorized_applications.index.never_used')
diff --git a/app/views/relationships/show.html.haml b/app/views/relationships/show.html.haml
index 2899cd5140feda..f08e9c1df8efd5 100644
--- a/app/views/relationships/show.html.haml
+++ b/app/views/relationships/show.html.haml
@@ -48,7 +48,7 @@
= f.button safe_join([fa_icon('trash'), t('relationships.remove_selected_followers')]), name: :remove_from_followers, class: 'table-action-link', type: :submit, data: { confirm: t('relationships.confirm_remove_selected_followers') } unless following_relationship?
- = f.button safe_join([fa_icon('trash'), t('relationships.remove_selected_domains')]), name: :block_domains, class: 'table-action-link', type: :submit, data: { confirm: t('admin.reports.are_you_sure') } if followed_by_relationship?
+ = f.button safe_join([fa_icon('trash'), t('relationships.remove_selected_domains')]), name: :remove_domains_from_followers, class: 'table-action-link', type: :submit, data: { confirm: t('admin.reports.are_you_sure') } if followed_by_relationship?
.batch-table__body
- if @accounts.empty?
= nothing_here 'nothing-here--under-tabs'
diff --git a/app/views/settings/exports/show.html.haml b/app/views/settings/exports/show.html.haml
index c49613fdc0c61f..d7b59af270a43b 100644
--- a/app/views/settings/exports/show.html.haml
+++ b/app/views/settings/exports/show.html.haml
@@ -64,6 +64,6 @@
%td= l backup.created_at
- if backup.processed?
%td= number_to_human_size backup.dump_file_size
- %td= table_link_to 'download', t('exports.archive_takeout.download'), backup.dump.url
+ %td= table_link_to 'download', t('exports.archive_takeout.download'), download_backup_url(backup)
- else
%td{ colspan: 2 }= t('exports.archive_takeout.in_progress')
diff --git a/app/views/settings/preferences/appearance/show.html.haml b/app/views/settings/preferences/appearance/show.html.haml
index 9e3964f2176c44..fc2af98cdf74f1 100644
--- a/app/views/settings/preferences/appearance/show.html.haml
+++ b/app/views/settings/preferences/appearance/show.html.haml
@@ -15,6 +15,9 @@
.flash-message.translation-prompt
#{t 'appearance.localization.body'} #{content_tag(:a, t('appearance.localization.guide_link_text'), href: t('appearance.localization.guide_link'), target: "_blank", rel: "noopener")}
+ .fields-group
+ = f.input :setting_webui_styles, collection: ['default', 'compact', 'legacy'],label_method: lambda { |item| t("simple_form.hints.defaults.setting_webui_styles_#{item}") }, wrapper: :with_label, include_blank: false, hint: t('simple_form.hints.defaults.setting_webui_styles_hint')
+
%h4= t 'appearance.advanced_web_interface'
%p.hint= t 'appearance.advanced_web_interface_hint'
diff --git a/app/views/shared/_og.html.haml b/app/views/shared/_og.html.haml
index 2941b566e05ae5..a5d99ae33ac6f8 100644
--- a/app/views/shared/_og.html.haml
+++ b/app/views/shared/_og.html.haml
@@ -1,5 +1,5 @@
- thumbnail = @instance_presenter.thumbnail
-- description ||= strip_tags(@instance_presenter.description.presence || t('about.about_mastodon_html'))
+- description ||= @instance_presenter.description.presence || strip_tags(t('about.about_mastodon_html'))
%meta{ name: 'description', content: description }/
diff --git a/app/views/user_mailer/backup_ready.html.haml b/app/views/user_mailer/backup_ready.html.haml
index 85140b08be3b4e..465ead2c8bac23 100644
--- a/app/views/user_mailer/backup_ready.html.haml
+++ b/app/views/user_mailer/backup_ready.html.haml
@@ -55,5 +55,5 @@
%tbody
%tr
%td.button-primary
- = link_to full_asset_url(@backup.dump.url) do
+ = link_to download_backup_url(@backup) do
%span= t 'exports.archive_takeout.download'
diff --git a/app/views/user_mailer/backup_ready.text.erb b/app/views/user_mailer/backup_ready.text.erb
index eb89e7d743fba4..8ebbaae85a5b82 100644
--- a/app/views/user_mailer/backup_ready.text.erb
+++ b/app/views/user_mailer/backup_ready.text.erb
@@ -4,4 +4,4 @@
<%= t 'user_mailer.backup_ready.explanation' %>
-=> <%= full_asset_url(@backup.dump.url) %>
+=> <%= download_backup_url(@backup) %>
diff --git a/app/workers/activitypub/delivery_worker.rb b/app/workers/activitypub/delivery_worker.rb
index d9153132b3102b..7c1c14766b70a5 100644
--- a/app/workers/activitypub/delivery_worker.rb
+++ b/app/workers/activitypub/delivery_worker.rb
@@ -10,6 +10,16 @@ class ActivityPub::DeliveryWorker
sidekiq_options queue: 'push', retry: 16, dead: false
+ # Unfortunately, we cannot control Sidekiq's jitter, so add our own
+ sidekiq_retry_in do |count|
+ # This is Sidekiq's default delay
+ delay = (count**4) + 15
+ # Our custom jitter, that will be added to Sidekiq's built-in one.
+ # Sidekiq's built-in jitter is `rand(10) * (count + 1)`
+ jitter = rand(0.5 * (count**4))
+ delay + jitter
+ end
+
HEADERS = { 'Content-Type' => 'application/activity+json' }.freeze
def perform(json, source_account_id, inbox_url, options = {})
diff --git a/app/workers/activitypub/migrated_follow_delivery_worker.rb b/app/workers/activitypub/migrated_follow_delivery_worker.rb
new file mode 100644
index 00000000000000..17a9e515efad08
--- /dev/null
+++ b/app/workers/activitypub/migrated_follow_delivery_worker.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+class ActivityPub::MigratedFollowDeliveryWorker < ActivityPub::DeliveryWorker
+ def perform(json, source_account_id, inbox_url, old_target_account_id, options = {})
+ super(json, source_account_id, inbox_url, options)
+ unfollow_old_account!(old_target_account_id)
+ end
+
+ private
+
+ def unfollow_old_account!(old_target_account_id)
+ old_target_account = Account.find(old_target_account_id)
+ UnfollowService.new.call(@source_account, old_target_account, skip_unmerge: true)
+ rescue StandardError
+ true
+ end
+end
diff --git a/app/workers/scheduler/accounts_statuses_cleanup_scheduler.rb b/app/workers/scheduler/accounts_statuses_cleanup_scheduler.rb
index bd92fe32c40c44..a2ab31cc5d49a9 100644
--- a/app/workers/scheduler/accounts_statuses_cleanup_scheduler.rb
+++ b/app/workers/scheduler/accounts_statuses_cleanup_scheduler.rb
@@ -7,52 +7,68 @@ class Scheduler::AccountsStatusesCleanupScheduler
# This limit is mostly to be nice to the fediverse at large and not
# generate too much traffic.
# This also helps limiting the running time of the scheduler itself.
- MAX_BUDGET = 50
+ MAX_BUDGET = 300
- # This is an attempt to spread the load across instances, as various
- # accounts are likely to have various followers.
+ # This is an attempt to spread the load across remote servers, as
+ # spreading deletions across diverse accounts is likely to spread
+ # the deletion across diverse followers. It also helps each individual
+ # user see some effect sooner.
PER_ACCOUNT_BUDGET = 5
# This is an attempt to limit the workload generated by status removal
- # jobs to something the particular instance can handle.
+ # jobs to something the particular server can handle.
PER_THREAD_BUDGET = 5
- # Those avoid loading an instance that is already under load
- MAX_DEFAULT_SIZE = 2
- MAX_DEFAULT_LATENCY = 5
- MAX_PUSH_SIZE = 5
- MAX_PUSH_LATENCY = 10
- # 'pull' queue has lower priority jobs, and it's unlikely that pushing
- # deletes would cause much issues with this queue if it didn't cause issues
- # with default and push. Yet, do not enqueue deletes if the instance is
- # lagging behind too much.
- MAX_PULL_SIZE = 500
- MAX_PULL_LATENCY = 300
-
- # This is less of an issue in general, but deleting old statuses is likely
- # to cause delivery errors, and thus increase the number of jobs to be retried.
- # This doesn't directly translate to load, but connection errors and a high
- # number of dead instances may lead to this spiraling out of control if
- # unchecked.
- MAX_RETRY_SIZE = 50_000
-
- sidekiq_options retry: 0, lock: :until_executed
+ # These are latency limits on various queues above which a server is
+ # considered to be under load, causing the auto-deletion to be entirely
+ # skipped for that run.
+ LOAD_LATENCY_THRESHOLDS = {
+ default: 5,
+ push: 10,
+ # The `pull` queue has lower priority jobs, and it's unlikely that
+ # pushing deletes would cause much issues with this queue if it didn't
+ # cause issues with `default` and `push`. Yet, do not enqueue deletes
+ # if the instance is lagging behind too much.
+ pull: 5.minutes.to_i,
+ }.freeze
+
+ sidekiq_options retry: 0, lock: :until_executed, lock_ttl: 1.day.to_i
def perform
return if under_load?
budget = compute_budget
- first_policy_id = last_processed_id
+
+ # If the budget allows it, we want to consider all accounts with enabled
+ # auto cleanup at least once.
+ #
+ # We start from `first_policy_id` (the last processed id in the previous
+ # run) and process each policy until we loop to `first_policy_id`,
+ # recording into `affected_policies` any policy that caused posts to be
+ # deleted.
+ #
+ # After that, we set `full_iteration` to `false` and continue looping on
+ # policies from `affected_policies`.
+ first_policy_id = last_processed_id || 0
+ first_iteration = true
+ full_iteration = true
+ affected_policies = []
loop do
num_processed_accounts = 0
- scope = AccountStatusesCleanupPolicy.where(enabled: true)
- scope.where(Account.arel_table[:id].gt(first_policy_id)) if first_policy_id.present?
+ scope = cleanup_policies(first_policy_id, affected_policies, first_iteration, full_iteration)
scope.find_each(order: :asc) do |policy|
num_deleted = AccountStatusesCleanupService.new.call(policy, [budget, PER_ACCOUNT_BUDGET].min)
- num_processed_accounts += 1 unless num_deleted.zero?
budget -= num_deleted
+
+ unless num_deleted.zero?
+ num_processed_accounts += 1
+ affected_policies << policy.id if full_iteration
+ end
+
+ full_iteration = false if !first_iteration && policy.id >= first_policy_id
+
if budget.zero?
save_last_processed_id(policy.id)
break
@@ -61,37 +77,55 @@ def perform
# The idea here is to loop through all policies at least once until the budget is exhausted
# and start back after the last processed account otherwise
- break if budget.zero? || (num_processed_accounts.zero? && first_policy_id.nil?)
- first_policy_id = nil
+ break if budget.zero? || (num_processed_accounts.zero? && !full_iteration)
+
+ full_iteration = false unless first_iteration
+ first_iteration = false
end
end
def compute_budget
- threads = Sidekiq::ProcessSet.new.select { |x| x['queues'].include?('push') }.map { |x| x['concurrency'] }.sum
+ # Each post deletion is a `RemovalWorker` job (on `default` queue), each
+ # potentially spawning many `ActivityPub::DeliveryWorker` jobs (on the `push` queue).
+ threads = Sidekiq::ProcessSet.new.select { |x| x['queues'].include?('push') }.pluck('concurrency').sum
[PER_THREAD_BUDGET * threads, MAX_BUDGET].min
end
def under_load?
- return true if Sidekiq::Stats.new.retry_size > MAX_RETRY_SIZE
- queue_under_load?('default', MAX_DEFAULT_SIZE, MAX_DEFAULT_LATENCY) || queue_under_load?('push', MAX_PUSH_SIZE, MAX_PUSH_LATENCY) || queue_under_load?('pull', MAX_PULL_SIZE, MAX_PULL_LATENCY)
+ LOAD_LATENCY_THRESHOLDS.any? { |queue, max_latency| queue_under_load?(queue, max_latency) }
end
private
- def queue_under_load?(name, max_size, max_latency)
- queue = Sidekiq::Queue.new(name)
- queue.size > max_size || queue.latency > max_latency
+ def cleanup_policies(first_policy_id, affected_policies, first_iteration, full_iteration)
+ scope = AccountStatusesCleanupPolicy.where(enabled: true)
+
+ if full_iteration
+ # If we are doing a full iteration, examine all policies we have not examined yet
+ if first_iteration
+ scope.where(id: first_policy_id...)
+ else
+ scope.where(id: ..first_policy_id).or(scope.where(id: affected_policies))
+ end
+ else
+ # Otherwise, examine only policies that previously yielded posts to delete
+ scope.where(id: affected_policies)
+ end
+ end
+
+ def queue_under_load?(name, max_latency)
+ Sidekiq::Queue.new(name).latency > max_latency
end
def last_processed_id
- redis.get('account_statuses_cleanup_scheduler:last_account_id')
+ redis.get('account_statuses_cleanup_scheduler:last_policy_id')&.to_i
end
def save_last_processed_id(id)
if id.nil?
- redis.del('account_statuses_cleanup_scheduler:last_account_id')
+ redis.del('account_statuses_cleanup_scheduler:last_policy_id')
else
- redis.set('account_statuses_cleanup_scheduler:last_account_id', id, ex: 1.hour.seconds)
+ redis.set('account_statuses_cleanup_scheduler:last_policy_id', id, ex: 1.hour.seconds)
end
end
end
diff --git a/app/workers/scheduler/indexing_scheduler.rb b/app/workers/scheduler/indexing_scheduler.rb
index c423966297c310..73ddc3fc8813cd 100644
--- a/app/workers/scheduler/indexing_scheduler.rb
+++ b/app/workers/scheduler/indexing_scheduler.rb
@@ -6,17 +6,22 @@ class Scheduler::IndexingScheduler
sidekiq_options retry: 0
+ IMPORT_BATCH_SIZE = 1000
+ SCAN_BATCH_SIZE = 10 * IMPORT_BATCH_SIZE
+
def perform
return unless Chewy.enabled?
indexes.each do |type|
with_redis do |redis|
- ids = redis.smembers("chewy:queue:#{type.name}")
-
- type.import!(ids)
-
- redis.pipelined do |pipeline|
- ids.each { |id| pipeline.srem("chewy:queue:#{type.name}", id) }
+ redis.sscan_each("chewy:queue:#{type.name}", count: SCAN_BATCH_SIZE).each_slice(IMPORT_BATCH_SIZE) do |ids|
+ type.import!(ids)
+ rescue => e
+ Rails.logger.error("Error while running #{type.name}#import!: #{e}")
+ ensure
+ redis.pipelined do |pipeline|
+ pipeline.srem("chewy:queue:#{type.name}", ids)
+ end
end
end
end
diff --git a/app/workers/scheduler/user_cleanup_scheduler.rb b/app/workers/scheduler/user_cleanup_scheduler.rb
index 45cfbc62e61384..4aee7935a2f39d 100644
--- a/app/workers/scheduler/user_cleanup_scheduler.rb
+++ b/app/workers/scheduler/user_cleanup_scheduler.rb
@@ -24,7 +24,7 @@ def clean_unconfirmed_accounts!
def clean_discarded_statuses!
Status.unscoped.discarded.where('deleted_at <= ?', 30.days.ago).find_in_batches do |statuses|
RemovalWorker.push_bulk(statuses) do |status|
- [status.id, { 'immediate' => true }]
+ [status.id, { 'immediate' => true, 'skip_streaming' => true }]
end
end
end
diff --git a/app/workers/unfollow_follow_worker.rb b/app/workers/unfollow_follow_worker.rb
index 7203b4888f8c80..a4d57839de96c8 100644
--- a/app/workers/unfollow_follow_worker.rb
+++ b/app/workers/unfollow_follow_worker.rb
@@ -10,13 +10,7 @@ def perform(follower_account_id, old_target_account_id, new_target_account_id, b
old_target_account = Account.find(old_target_account_id)
new_target_account = Account.find(new_target_account_id)
- follow = follower_account.active_relationships.find_by(target_account: old_target_account)
- reblogs = follow&.show_reblogs?
- notify = follow&.notify?
- languages = follow&.languages
-
- FollowService.new.call(follower_account, new_target_account, reblogs: reblogs, notify: notify, languages: languages, bypass_locked: bypass_locked, bypass_limit: true)
- UnfollowService.new.call(follower_account, old_target_account, skip_unmerge: true)
+ FollowMigrationService.new.call(follower_account, new_target_account, old_target_account, bypass_locked: bypass_locked)
rescue ActiveRecord::RecordNotFound, Mastodon::NotPermittedError
true
end
diff --git a/bin/tootctl b/bin/tootctl
index a9ebb22c6dc5ac..9c7ae8b8712104 100755
--- a/bin/tootctl
+++ b/bin/tootctl
@@ -5,7 +5,9 @@ require_relative '../config/boot'
require_relative '../lib/cli'
begin
- Mastodon::CLI.start(ARGV)
+ Chewy.strategy(:mastodon) do
+ Mastodon::CLI.start(ARGV)
+ end
rescue Interrupt
exit(130)
end
diff --git a/config/application.rb b/config/application.rb
index c51eacd6800d79..55d98ca990e7f1 100644
--- a/config/application.rb
+++ b/config/application.rb
@@ -28,6 +28,7 @@
require_relative '../lib/paperclip/attachment_extensions'
require_relative '../lib/paperclip/lazy_thumbnail'
require_relative '../lib/paperclip/gif_transcoder'
+require_relative '../lib/paperclip/media_type_spoof_detector_extensions'
require_relative '../lib/paperclip/transcoder'
require_relative '../lib/paperclip/type_corrector'
require_relative '../lib/paperclip/response_with_limit_adapter'
@@ -35,9 +36,11 @@
require_relative '../lib/mastodon/snowflake'
require_relative '../lib/mastodon/version'
require_relative '../lib/mastodon/rack_middleware'
+require_relative '../lib/public_file_server_middleware'
require_relative '../lib/devise/two_factor_ldap_authenticatable'
require_relative '../lib/devise/two_factor_pam_authenticatable'
require_relative '../lib/chewy/strategy/mastodon'
+require_relative '../lib/chewy/strategy/bypass_with_warning'
require_relative '../lib/webpacker/manifest_extensions'
require_relative '../lib/webpacker/helper_extensions'
require_relative '../lib/rails/engine_extensions'
@@ -181,6 +184,10 @@ class Application < Rails::Application
config.active_job.queue_adapter = :sidekiq
config.action_mailer.deliver_later_queue_name = 'mailers'
+ # We use our own middleware for this
+ config.public_file_server.enabled = false
+
+ config.middleware.use PublicFileServerMiddleware if Rails.env.development? || ENV['RAILS_SERVE_STATIC_FILES'] == 'true'
config.middleware.use Rack::Attack
config.middleware.use Mastodon::RackMiddleware
diff --git a/config/database.yml b/config/database.yml
index bfb53f21b4a512..34acf2f19aedfd 100644
--- a/config/database.yml
+++ b/config/database.yml
@@ -5,6 +5,7 @@ default: &default
connect_timeout: 15
encoding: unicode
sslmode: <%= ENV['DB_SSLMODE'] || "prefer" %>
+ application_name: ''
development:
<<: *default
diff --git a/config/environments/development.rb b/config/environments/development.rb
index de8762ff744902..c7b4a5d035e4b7 100644
--- a/config/environments/development.rb
+++ b/config/environments/development.rb
@@ -16,12 +16,7 @@
# Run rails dev:cache to toggle caching.
if Rails.root.join('tmp/caching-dev.txt').exist?
config.action_controller.perform_caching = true
-
config.cache_store = :redis_cache_store, REDIS_CACHE_PARAMS
-
- config.public_file_server.headers = {
- 'Cache-Control' => "public, max-age=#{2.days.to_i}",
- }
else
config.action_controller.perform_caching = false
diff --git a/config/environments/production.rb b/config/environments/production.rb
index 99c9bb40c542c2..00d783477596da 100644
--- a/config/environments/production.rb
+++ b/config/environments/production.rb
@@ -19,27 +19,16 @@
# or in config/master.key. This key is used to decrypt credentials (and other encrypted files).
# config.require_master_key = true
- # Disable serving static files from the `/public` folder by default since
- # Apache or NGINX already handles this.
- config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
-
ActiveSupport::Logger.new(STDOUT).tap do |logger|
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
- # Compress JavaScripts and CSS.
- # config.assets.js_compressor = Uglifier.new(mangle: false)
- # config.assets.css_compressor = :sass
-
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
- # `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
-
# Specifies the header that your server uses for sending files.
- # config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
- config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
+ config.action_dispatch.x_sendfile_header = ENV['SENDFILE_HEADER'] if ENV['SENDFILE_HEADER'].present?
# Allow to specify public IP of reverse proxy if it's needed
config.action_dispatch.trusted_proxies = ENV['TRUSTED_PROXY_IP'].split(/(?:\s*,\s*|\s+)/).map { |item| IPAddr.new(item) } if ENV['TRUSTED_PROXY_IP'].present?
@@ -67,7 +56,7 @@
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# English when a translation cannot be found).
- config.i18n.fallbacks = [:en]
+ config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
@@ -128,6 +117,7 @@
enable_starttls_auto: enable_starttls_auto,
tls: ENV['SMTP_TLS'].presence && ENV['SMTP_TLS'] == 'true',
ssl: ENV['SMTP_SSL'].presence && ENV['SMTP_SSL'] == 'true',
+ read_timeout: 20,
}
config.action_mailer.delivery_method = ENV.fetch('SMTP_DELIVERY_METHOD', 'smtp').to_sym
diff --git a/config/environments/test.rb b/config/environments/test.rb
index ef3cb2e4877039..44786962ae73ef 100644
--- a/config/environments/test.rb
+++ b/config/environments/test.rb
@@ -12,11 +12,6 @@
# preloads Rails for running tests, you may have to set it to true.
config.eager_load = false
- # Configure public file server for tests with Cache-Control for performance.
- config.public_file_server.enabled = true
- config.public_file_server.headers = {
- 'Cache-Control' => "public, max-age=#{1.hour.to_i}"
- }
config.assets.digest = false
# Show full error reports and disable caching.
diff --git a/config/imagemagick/policy.xml b/config/imagemagick/policy.xml
new file mode 100644
index 00000000000000..1052476b319eb3
--- /dev/null
+++ b/config/imagemagick/policy.xml
@@ -0,0 +1,27 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/config/initializers/chewy.rb b/config/initializers/chewy.rb
index 752fc3c6dfe551..daf4a5f3260cd6 100644
--- a/config/initializers/chewy.rb
+++ b/config/initializers/chewy.rb
@@ -19,7 +19,7 @@
# cycle, which takes care of checking if Elasticsearch is enabled
# or not. However, mind that for the Rails console, the :urgent
# strategy is set automatically with no way to override it.
-Chewy.root_strategy = :mastodon
+Chewy.root_strategy = :bypass_with_warning if Rails.env.production?
Chewy.request_strategy = :mastodon
Chewy.use_after_commit_callbacks = false
diff --git a/config/initializers/content_security_policy.rb b/config/initializers/content_security_policy.rb
index cb56293376d786..ccce6d71eb713f 100644
--- a/config/initializers/content_security_policy.rb
+++ b/config/initializers/content_security_policy.rb
@@ -3,7 +3,7 @@
# https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Security-Policy
def host_to_url(str)
- "http#{Rails.configuration.x.use_https ? 's' : ''}://#{str}" unless str.blank?
+ "http#{Rails.configuration.x.use_https ? 's' : ''}://#{str.split('/').first}" if str.present?
end
base_host = Rails.configuration.x.web_domain
diff --git a/config/initializers/paperclip.rb b/config/initializers/paperclip.rb
index a2285427c80eea..7ed244e0c0cb7a 100644
--- a/config/initializers/paperclip.rb
+++ b/config/initializers/paperclip.rb
@@ -124,6 +124,7 @@ def copy_to_local_file(style, local_dest_path)
openstack_domain_name: ENV.fetch('SWIFT_DOMAIN_NAME') { 'default' },
openstack_region: ENV['SWIFT_REGION'],
openstack_cache_ttl: ENV.fetch('SWIFT_CACHE_TTL') { 60 },
+ openstack_temp_url_key: ENV['SWIFT_TEMP_URL_KEY'],
},
fog_file: { 'Cache-Control' => 'public, max-age=315576000, immutable' },
@@ -154,3 +155,10 @@ class NetworkingError < StandardError; end
end
end
end
+
+# Set our ImageMagick security policy, but allow admins to override it
+ENV['MAGICK_CONFIGURE_PATH'] = begin
+ imagemagick_config_paths = ENV.fetch('MAGICK_CONFIGURE_PATH', '').split(File::PATH_SEPARATOR)
+ imagemagick_config_paths << Rails.root.join('config', 'imagemagick').expand_path.to_s
+ imagemagick_config_paths.join(File::PATH_SEPARATOR)
+end
diff --git a/config/initializers/sidekiq.rb b/config/initializers/sidekiq.rb
index 9d2abf0745eca9..b847e654692d16 100644
--- a/config/initializers/sidekiq.rb
+++ b/config/initializers/sidekiq.rb
@@ -3,6 +3,11 @@
require_relative '../../lib/mastodon/sidekiq_middleware'
Sidekiq.configure_server do |config|
+ if Rails.configuration.database_configuration.dig('production', 'adapter') == 'postgresql_makara'
+ STDERR.puts 'ERROR: Database replication is not currently supported in Sidekiq workers. Check your configuration.'
+ exit 1
+ end
+
config.redis = REDIS_SIDEKIQ_PARAMS
config.server_middleware do |chain|
diff --git a/config/initializers/twitter_regex.rb b/config/initializers/twitter_regex.rb
index 6a7723fd213c77..e65b05dfdea083 100644
--- a/config/initializers/twitter_regex.rb
+++ b/config/initializers/twitter_regex.rb
@@ -25,7 +25,7 @@ class Regex
\)
/iox
UCHARS = '\u{A0}-\u{D7FF}\u{F900}-\u{FDCF}\u{FDF0}-\u{FFEF}\u{10000}-\u{1FFFD}\u{20000}-\u{2FFFD}\u{30000}-\u{3FFFD}\u{40000}-\u{4FFFD}\u{50000}-\u{5FFFD}\u{60000}-\u{6FFFD}\u{70000}-\u{7FFFD}\u{80000}-\u{8FFFD}\u{90000}-\u{9FFFD}\u{A0000}-\u{AFFFD}\u{B0000}-\u{BFFFD}\u{C0000}-\u{CFFFD}\u{D0000}-\u{DFFFD}\u{E1000}-\u{EFFFD}\u{E000}-\u{F8FF}\u{F0000}-\u{FFFFD}\u{100000}-\u{10FFFD}'
- REGEXEN[:valid_url_query_chars] = /[a-z0-9!?\*'\(\);:&=\+\$\/%#\[\]\-_\.,~|@#{UCHARS}]/iou
+ REGEXEN[:valid_url_query_chars] = /[a-z0-9!?\*'\(\);:&=\+\$\/%#\[\]\-_\.,~|@\^#{UCHARS}]/iou
REGEXEN[:valid_url_query_ending_chars] = /[a-z0-9_&=#\/\-#{UCHARS}]/iou
REGEXEN[:valid_url_path] = /(?:
(?:
diff --git a/config/locales/activerecord.en.yml b/config/locales/activerecord.en.yml
index 8aee15659f27e2..a53c7c6e9e3939 100644
--- a/config/locales/activerecord.en.yml
+++ b/config/locales/activerecord.en.yml
@@ -53,3 +53,7 @@ en:
position:
elevated: cannot be higher than your current role
own_role: cannot be changed with your current role
+ webhook:
+ attributes:
+ events:
+ invalid_permissions: cannot include events you don't have the rights to
diff --git a/config/locales/en.yml b/config/locales/en.yml
index 39ff4236a155fc..ce20d5d1dec60f 100644
--- a/config/locales/en.yml
+++ b/config/locales/en.yml
@@ -805,6 +805,12 @@ en:
message_html: You haven't defined any server rules.
sidekiq_process_check:
message_html: No Sidekiq process running for the %{value} queue(s). Please review your Sidekiq configuration
+ upload_check_privacy_error:
+ action: Check here for more information
+ message_html: "Your web server is misconfigured. The privacy of your users is at risk."
+ upload_check_privacy_error_object_storage:
+ action: Check here for more information
+ message_html: "Your object storage is misconfigured. The privacy of your users is at risk."
tags:
review: Review status
updated_msg: Hashtag settings updated successfully
@@ -1381,6 +1387,7 @@ en:
expired: The poll has already ended
invalid_choice: The chosen vote option does not exist
over_character_limit: cannot be longer than %{max} characters each
+ self_vote: You cannot vote in your own polls
too_few_options: must have more than one item
too_many_options: can't contain more than %{max} items
preferences:
@@ -1399,6 +1406,7 @@ en:
confirm_remove_selected_followers: Are you sure you want to remove selected followers?
confirm_remove_selected_follows: Are you sure you want to remove selected follows?
dormant: Dormant
+ follow_failure: Could not follow some of the selected accounts.
follow_selected_followers: Follow selected followers
followers: Followers
following: Following
diff --git a/config/locales/simple_form.en.yml b/config/locales/simple_form.en.yml
index 96b0131efe1eb9..ecfa6e27ce9a9d 100644
--- a/config/locales/simple_form.en.yml
+++ b/config/locales/simple_form.en.yml
@@ -59,6 +59,10 @@ en:
setting_show_application: The application you use to post will be displayed in the detailed view of your posts
setting_use_blurhash: Gradients are based on the colors of the hidden visuals but obfuscate any details
setting_use_pending_items: Hide timeline updates behind a click instead of automatically scrolling the feed
+ setting_webui_styles_hint: You can make padding/margin tighter like previous version
+ setting_webui_styles_default: Default
+ setting_webui_styles_compact: Compact
+ setting_webui_styles_legacy: Legacy style
username: Your username will be unique on %{domain}
whole_word: When the keyword or phrase is alphanumeric only, it will only be applied if it matches the whole word
domain_allow:
@@ -217,6 +221,7 @@ en:
setting_unfollow_modal: Show confirmation dialog before unfollowing someone
setting_use_blurhash: Show colorful gradients for hidden media
setting_use_pending_items: Slow mode
+ setting_webui_styles: WebUI styles
severity: Severity
sign_in_token_attempt: Security code
title: Title
diff --git a/config/locales/simple_form.ja.yml b/config/locales/simple_form.ja.yml
index f7e2cb9545e3a6..19871c55722757 100644
--- a/config/locales/simple_form.ja.yml
+++ b/config/locales/simple_form.ja.yml
@@ -59,7 +59,11 @@ ja:
setting_show_application: 投稿するのに使用したアプリが投稿の詳細ビューに表示されるようになります
setting_use_blurhash: ぼかしはメディアの色を元に生成されますが、細部は見えにくくなっています
setting_use_pending_items: 新着があってもタイムラインを自動的にスクロールしないようにします
- username: あなたのユーザー名は%{domain}の中で重複していない必要があります
+ setting_webui_styles_hint: 従来のバージョンのものに近い、余白が狭めの表示に切り替えることができます
+ setting_webui_styles_default: デフォルト
+ setting_webui_styles_compact: コンパクト
+ setting_webui_styles_legacy: 旧バージョン風
+ username: あなたのユーザー名は %{domain} の中で重複していない必要があります
whole_word: キーワードまたはフレーズが英数字のみの場合、単語全体と一致する場合のみ適用されるようになります
domain_allow:
domain: 登録するとこのサーバーからデータを受信したり、このドメインから受信するデータを処理して保存できるようになります
@@ -217,6 +221,7 @@ ja:
setting_unfollow_modal: フォローを解除する前に確認ダイアログを表示する
setting_use_blurhash: 非表示のメディアを色付きのぼかしで表示する
setting_use_pending_items: 手動更新モード
+ setting_webui_styles: 表示スタイル
severity: 重大性
sign_in_token_attempt: セキュリティコード
title: タイトル
diff --git a/config/puma.rb b/config/puma.rb
index e5929544583a30..c4e2b0b85c5c42 100644
--- a/config/puma.rb
+++ b/config/puma.rb
@@ -22,3 +22,5 @@
end
plugin :tmp_restart
+
+set_remote_address(proxy_protocol: :v1) if ENV['PROXY_PROTO_V1'] == 'true'
diff --git a/config/routes.rb b/config/routes.rb
index 319f0c7d151a35..49508dc39c5ece 100644
--- a/config/routes.rb
+++ b/config/routes.rb
@@ -110,6 +110,8 @@
resource :inbox, only: [:create], module: :activitypub
+ get '/:encoded_at(*path)', to: redirect("/@%{path}"), constraints: { encoded_at: /%40/ }
+
constraints(username: /[^@\/.]+/) do
get '/@:username', to: 'accounts#show', as: :short_account
get '/@:username/with_replies', to: 'accounts#show', as: :short_account_with_replies
@@ -218,6 +220,7 @@
resource :statuses_cleanup, controller: :statuses_cleanup, only: [:show, :update]
get '/media_proxy/:id/(*any)', to: 'media_proxy#show', as: :media_proxy, format: false
+ get '/backups/:id/download', to: 'backups#download', as: :download_backup, format: false
resource :authorize_interaction, only: [:show, :create]
resource :share, only: [:show, :create]
@@ -289,7 +292,7 @@
end
end
- resources :instances, only: [:index, :show, :destroy], constraints: { id: /[^\/]+/ } do
+ resources :instances, only: [:index, :show, :destroy], constraints: { id: /[^\/]+/ }, format: 'html' do
member do
post :clear_delivery_errors
post :restart_delivery
@@ -470,7 +473,9 @@
resources :list, only: :show
end
- resources :streaming, only: [:index]
+ get '/streaming', to: 'streaming#index'
+ get '/streaming/(*any)', to: 'streaming#index'
+
resources :custom_emojis, only: [:index]
resources :suggestions, only: [:index, :destroy]
resources :scheduled_statuses, only: [:index, :show, :update, :destroy]
diff --git a/config/settings.yml b/config/settings.yml
index f0b09dd5c88700..abe50b2cdc81dd 100644
--- a/config/settings.yml
+++ b/config/settings.yml
@@ -70,6 +70,7 @@ defaults: &defaults
show_domain_blocks: 'disabled'
show_domain_blocks_rationale: 'disabled'
require_invite_text: false
+ webui_styles: 'default'
backups_retention_period: 7
development:
diff --git a/db/migrate/20191008135601_add_wakuwaku_local_index_to_statuses.rb b/db/migrate/20191008135601_add_wakuwaku_local_index_to_statuses.rb
new file mode 100644
index 00000000000000..db76519f4ad6e3
--- /dev/null
+++ b/db/migrate/20191008135601_add_wakuwaku_local_index_to_statuses.rb
@@ -0,0 +1,11 @@
+class AddWakuwakuLocalIndexToStatuses < ActiveRecord::Migration[5.2]
+ disable_ddl_transaction!
+
+ def up
+ add_index :statuses, [:id, :account_id], name: :index_statuses_wakuwaku_local_20191008, algorithm: :concurrently, order: { id: :desc }, where: '(local OR (uri IS NULL)) AND deleted_at IS NULL AND (visibility = 0 OR visibility = 1) AND reblog_of_id IS NULL AND ((NOT reply) OR (in_reply_to_account_id = account_id))'
+ end
+
+ def down
+ remove_index :statuses, name: :index_statuses_wakuwaku_local_20191008
+ end
+end
diff --git a/db/migrate/20191008135602_remove_wakuwaku_local_index_to_statuses.rb b/db/migrate/20191008135602_remove_wakuwaku_local_index_to_statuses.rb
new file mode 100644
index 00000000000000..3aa4cfea161714
--- /dev/null
+++ b/db/migrate/20191008135602_remove_wakuwaku_local_index_to_statuses.rb
@@ -0,0 +1,11 @@
+class RemoveWakuwakuLocalIndexToStatuses < ActiveRecord::Migration[6.1]
+ disable_ddl_transaction!
+
+ def up
+ remove_index :statuses, name: :index_statuses_wakuwaku_local_20191008
+ end
+
+ def down
+ add_index :statuses, [:id, :account_id], name: :index_statuses_wakuwaku_local_20191008, algorithm: :concurrently, order: { id: :desc }, where: "(local OR (uri IS NULL)) AND deleted_at IS NULL AND (visibility = 0 OR visibility = 1) AND reblog_of_id IS NULL AND ((NOT reply) OR (in_reply_to_account_id = account_id))"
+ end
+end
diff --git a/db/seeds.rb b/db/seeds.rb
index 1ca300de734bec..c01e83f1d39020 100644
--- a/db/seeds.rb
+++ b/db/seeds.rb
@@ -1,5 +1,7 @@
# frozen_string_literal: true
-Dir[Rails.root.join('db', 'seeds', '*.rb')].sort.each do |seed|
- load seed
+Chewy.strategy(:mastodon) do
+ Dir[Rails.root.join('db', 'seeds', '*.rb')].sort.each do |seed|
+ load seed
+ end
end
diff --git a/dist/nginx.conf b/dist/nginx.conf
index 5bc960e2568c36..b59446403b0b4f 100644
--- a/dist/nginx.conf
+++ b/dist/nginx.conf
@@ -109,6 +109,8 @@ server {
location ~ ^/system/ {
add_header Cache-Control "public, max-age=2419200, immutable";
add_header Strict-Transport-Security "max-age=63072000; includeSubDomains";
+ add_header X-Content-Type-Options nosniff;
+ add_header Content-Security-Policy "default-src 'none'; form-action 'none'";
try_files $uri =404;
}
diff --git a/docker-compose.yml b/docker-compose.yml
index c534286c76d315..769b215737d8ff 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -56,7 +56,7 @@ services:
web:
build: .
- image: tootsuite/mastodon
+ image: ghcr.io/mastodon/mastodon:v4.1.8
restart: always
env_file: .env.production
command: bash -c "rm -f /mastodon/tmp/pids/server.pid; bundle exec rails s -p 3000"
@@ -77,7 +77,7 @@ services:
streaming:
build: .
- image: tootsuite/mastodon
+ image: ghcr.io/mastodon/mastodon:v4.1.8
restart: always
env_file: .env.production
command: node ./streaming
@@ -95,7 +95,7 @@ services:
sidekiq:
build: .
- image: tootsuite/mastodon
+ image: ghcr.io/mastodon/mastodon:v4.1.8
restart: always
env_file: .env.production
command: bundle exec sidekiq
diff --git a/lib/chewy/strategy/bypass_with_warning.rb b/lib/chewy/strategy/bypass_with_warning.rb
new file mode 100644
index 00000000000000..eb6fbaab167603
--- /dev/null
+++ b/lib/chewy/strategy/bypass_with_warning.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+module Chewy
+ class Strategy
+ class BypassWithWarning < Base
+ def update(...)
+ Rails.logger.warn 'Chewy update without a root strategy' unless @warning_issued
+ @warning_issued = true
+ end
+ end
+ end
+end
diff --git a/lib/mastodon/accounts_cli.rb b/lib/mastodon/accounts_cli.rb
index 34afbc699dd89b..d4a6bbc1276608 100644
--- a/lib/mastodon/accounts_cli.rb
+++ b/lib/mastodon/accounts_cli.rb
@@ -372,16 +372,16 @@ def cull(*domains)
option :concurrency, type: :numeric, default: 5, aliases: [:c]
option :verbose, type: :boolean, aliases: [:v]
option :dry_run, type: :boolean
- desc 'refresh [USERNAME]', 'Fetch remote user data and files'
+ desc 'refresh [USERNAMES]', 'Fetch remote user data and files'
long_desc <<-LONG_DESC
Fetch remote user data and files for one or multiple accounts.
With the --all option, all remote accounts will be processed.
Through the --domain option, this can be narrowed down to a
- specific domain only. Otherwise, a single remote account must
- be specified with USERNAME.
+ specific domain only. Otherwise, remote accounts must be
+ specified with space-separated USERNAMES.
LONG_DESC
- def refresh(username = nil)
+ def refresh(*usernames)
dry_run = options[:dry_run] ? ' (DRY RUN)' : ''
if options[:domain] || options[:all]
@@ -397,19 +397,25 @@ def refresh(username = nil)
end
say("Refreshed #{processed} accounts#{dry_run}", :green, true)
- elsif username.present?
- username, domain = username.split('@')
- account = Account.find_remote(username, domain)
+ elsif !usernames.empty?
+ usernames.each do |user|
+ user, domain = user.split('@')
+ account = Account.find_remote(user, domain)
+
+ if account.nil?
+ say('No such account', :red)
+ exit(1)
+ end
- if account.nil?
- say('No such account', :red)
- exit(1)
- end
+ next if options[:dry_run]
- unless options[:dry_run]
- account.reset_avatar!
- account.reset_header!
- account.save
+ begin
+ account.reset_avatar!
+ account.reset_header!
+ account.save
+ rescue Mastodon::UnexpectedResponseError
+ say("Account failed: #{user}@#{domain}", :red)
+ end
end
say("OK#{dry_run}", :green)
@@ -536,7 +542,7 @@ def approve(username = nil)
User.pending.find_each(&:approve!)
say('OK', :green)
elsif options[:number]
- User.pending.limit(options[:number]).each(&:approve!)
+ User.pending.order(created_at: :asc).limit(options[:number]).each(&:approve!)
say('OK', :green)
elsif username.present?
account = Account.find_local(username)
@@ -631,7 +637,7 @@ def migrate(username)
exit(1)
end
- unless options[:force] || migration.target_acount_id == account.moved_to_account_id
+ unless options[:force] || migration.target_account_id == account.moved_to_account_id
say('The specified account is not redirecting to its last migration target. Use --force if you want to replay the migration anyway', :red)
exit(1)
end
diff --git a/lib/mastodon/cli_helper.rb b/lib/mastodon/cli_helper.rb
index a78a28e2734b2d..4e304c903539b5 100644
--- a/lib/mastodon/cli_helper.rb
+++ b/lib/mastodon/cli_helper.rb
@@ -53,14 +53,16 @@ def parallelize_with_progress(scope)
progress.log("Processing #{item.id}") if options[:verbose]
- result = ActiveRecord::Base.connection_pool.with_connection do
- yield(item)
- ensure
- RedisConfiguration.pool.checkin if Thread.current[:redis]
- Thread.current[:redis] = nil
+ Chewy.strategy(:mastodon) do
+ result = ActiveRecord::Base.connection_pool.with_connection do
+ yield(item)
+ ensure
+ RedisConfiguration.pool.checkin if Thread.current[:redis]
+ Thread.current[:redis] = nil
+ end
+
+ aggregate.increment(result) if result.is_a?(Integer)
end
-
- aggregate.increment(result) if result.is_a?(Integer)
rescue => e
progress.log pastel.red("Error processing #{item.id}: #{e}")
ensure
diff --git a/lib/mastodon/sidekiq_middleware.rb b/lib/mastodon/sidekiq_middleware.rb
index c75e8401f5fbc2..9832e1a27c96ca 100644
--- a/lib/mastodon/sidekiq_middleware.rb
+++ b/lib/mastodon/sidekiq_middleware.rb
@@ -3,8 +3,8 @@
class Mastodon::SidekiqMiddleware
BACKTRACE_LIMIT = 3
- def call(*)
- yield
+ def call(*, &block)
+ Chewy.strategy(:mastodon, &block)
rescue Mastodon::HostValidationError
# Do not retry
rescue => e
diff --git a/lib/mastodon/version.rb b/lib/mastodon/version.rb
index 8101e61dd28c1f..725ddaf613c5fe 100644
--- a/lib/mastodon/version.rb
+++ b/lib/mastodon/version.rb
@@ -13,7 +13,7 @@ def minor
end
def patch
- 0
+ 8
end
def flags
diff --git a/lib/paperclip/media_type_spoof_detector_extensions.rb b/lib/paperclip/media_type_spoof_detector_extensions.rb
new file mode 100644
index 00000000000000..a406ef312fd3c7
--- /dev/null
+++ b/lib/paperclip/media_type_spoof_detector_extensions.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+module Paperclip
+ module MediaTypeSpoofDetectorExtensions
+ def calculated_content_type
+ return @calculated_content_type if defined?(@calculated_content_type)
+
+ @calculated_content_type = type_from_file_command.chomp
+
+ # The `file` command fails to recognize some MP3 files as such
+ @calculated_content_type = type_from_marcel if @calculated_content_type == 'application/octet-stream' && type_from_marcel == 'audio/mpeg'
+ @calculated_content_type
+ end
+
+ def type_from_marcel
+ @type_from_marcel ||= Marcel::MimeType.for Pathname.new(@file.path),
+ name: @file.path
+ end
+ end
+end
+
+Paperclip::MediaTypeSpoofDetector.prepend(Paperclip::MediaTypeSpoofDetectorExtensions)
diff --git a/lib/paperclip/transcoder.rb b/lib/paperclip/transcoder.rb
index afd9f58ff695c0..0f2e30f7d5e45e 100644
--- a/lib/paperclip/transcoder.rb
+++ b/lib/paperclip/transcoder.rb
@@ -19,10 +19,7 @@ def initialize(file, options = {}, attachment = nil)
def make
metadata = VideoMetadataExtractor.new(@file.path)
- unless metadata.valid?
- Paperclip.log("Unsupported file #{@file.path}")
- return File.open(@file.path)
- end
+ raise Paperclip::Error, "Error while transcoding #{@file.path}: unsupported file" unless metadata.valid?
update_attachment_type(metadata)
update_options_from_metadata(metadata)
@@ -40,12 +37,14 @@ def make
@output_options['f'] = 'image2'
@output_options['vframes'] = 1
when 'mp4'
- @output_options['acodec'] = 'aac'
- @output_options['strict'] = 'experimental'
-
- if high_vfr?(metadata) && !eligible_to_passthrough?(metadata)
- @output_options['vsync'] = 'vfr'
- @output_options['r'] = @vfr_threshold
+ unless eligible_to_passthrough?(metadata)
+ @output_options['acodec'] = 'aac'
+ @output_options['strict'] = 'experimental'
+
+ if high_vfr?(metadata)
+ @output_options['vsync'] = 'vfr'
+ @output_options['r'] = @vfr_threshold
+ end
end
end
diff --git a/lib/public_file_server_middleware.rb b/lib/public_file_server_middleware.rb
new file mode 100644
index 00000000000000..7e02e37a08464a
--- /dev/null
+++ b/lib/public_file_server_middleware.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'action_dispatch/middleware/static'
+
+class PublicFileServerMiddleware
+ SERVICE_WORKER_TTL = 7.days.to_i
+ CACHE_TTL = 28.days.to_i
+
+ def initialize(app)
+ @app = app
+ @file_handler = ActionDispatch::FileHandler.new(Rails.application.paths['public'].first)
+ end
+
+ def call(env)
+ file = @file_handler.attempt(env)
+
+ # If the request is not a static file, move on!
+ return @app.call(env) if file.nil?
+
+ status, headers, response = file
+
+ # Set cache headers on static files. Some paths require different cache headers
+ headers['Cache-Control'] = begin
+ request_path = env['REQUEST_PATH']
+
+ if request_path.start_with?('/sw.js')
+ "public, max-age=#{SERVICE_WORKER_TTL}, must-revalidate"
+ elsif request_path.start_with?(paperclip_root_url)
+ "public, max-age=#{CACHE_TTL}, immutable"
+ else
+ "public, max-age=#{CACHE_TTL}, must-revalidate"
+ end
+ end
+
+ # Override the default CSP header set by the CSP middleware
+ headers['Content-Security-Policy'] = "default-src 'none'; form-action 'none'" if request_path.start_with?(paperclip_root_url)
+
+ headers['X-Content-Type-Options'] = 'nosniff'
+
+ [status, headers, response]
+ end
+
+ private
+
+ def paperclip_root_url
+ ENV.fetch('PAPERCLIP_ROOT_URL', '/system')
+ end
+end
diff --git a/lib/sanitize_ext/sanitize_config.rb b/lib/sanitize_ext/sanitize_config.rb
index baf65266263fd8..703ba8b0598e83 100644
--- a/lib/sanitize_ext/sanitize_config.rb
+++ b/lib/sanitize_ext/sanitize_config.rb
@@ -94,26 +94,26 @@ module Config
]
)
- MASTODON_OEMBED ||= freeze_config merge(
- RELAXED,
- elements: RELAXED[:elements] + %w(audio embed iframe source video),
+ MASTODON_OEMBED ||= freeze_config(
+ elements: %w(audio embed iframe source video),
- attributes: merge(
- RELAXED[:attributes],
+ attributes: {
'audio' => %w(controls),
'embed' => %w(height src type width),
'iframe' => %w(allowfullscreen frameborder height scrolling src width),
'source' => %w(src type),
'video' => %w(controls height loop width),
- 'div' => [:data]
- ),
+ },
- protocols: merge(
- RELAXED[:protocols],
+ protocols: {
'embed' => { 'src' => HTTP_PROTOCOLS },
'iframe' => { 'src' => HTTP_PROTOCOLS },
- 'source' => { 'src' => HTTP_PROTOCOLS }
- )
+ 'source' => { 'src' => HTTP_PROTOCOLS },
+ },
+
+ add_attributes: {
+ 'iframe' => { 'sandbox' => 'allow-scripts allow-same-origin allow-popups allow-popups-to-escape-sandbox allow-forms' },
+ }
)
end
end
diff --git a/lib/tasks/branding.rake b/lib/tasks/branding.rake
index d1c1c9dede791d..d97c97c99edc71 100644
--- a/lib/tasks/branding.rake
+++ b/lib/tasks/branding.rake
@@ -40,7 +40,7 @@ namespace :branding do
output_dest = Rails.root.join('app', 'javascript', 'icons')
rsvg_convert = Terrapin::CommandLine.new('rsvg-convert', '-w :size -h :size --keep-aspect-ratio :input -o :output')
- convert = Terrapin::CommandLine.new('convert', ':input :output')
+ convert = Terrapin::CommandLine.new('convert', ':input :output', environment: { 'MAGICK_CONFIGURE_PATH' => nil })
favicon_sizes = [16, 32, 48]
apple_icon_sizes = [57, 60, 72, 76, 114, 120, 144, 152, 167, 180, 1024]
diff --git a/spec/controllers/admin/statuses_controller_spec.rb b/spec/controllers/admin/statuses_controller_spec.rb
index 7f912c1c07bb24..877c7e63ebfb30 100644
--- a/spec/controllers/admin/statuses_controller_spec.rb
+++ b/spec/controllers/admin/statuses_controller_spec.rb
@@ -40,24 +40,36 @@
end
describe 'POST #batch' do
- before do
- post :batch, params: { account_id: account.id, action => '', admin_status_batch_action: { status_ids: status_ids } }
- end
+ subject { post :batch, params: { :account_id => account.id, action => '', :admin_status_batch_action => { status_ids: status_ids } } }
let(:status_ids) { [media_attached_status.id] }
- context 'when action is report' do
+ shared_examples 'when action is report' do
let(:action) { 'report' }
it 'creates a report' do
+ subject
+
report = Report.last
expect(report.target_account_id).to eq account.id
expect(report.status_ids).to eq status_ids
end
it 'redirects to report page' do
+ subject
+
expect(response).to redirect_to(admin_report_path(Report.last.id))
end
end
+
+ it_behaves_like 'when action is report'
+
+ context 'when the moderator is blocked by the author' do
+ before do
+ account.block!(user.account)
+ end
+
+ it_behaves_like 'when action is report'
+ end
end
end
diff --git a/spec/controllers/api/v1/conversations_controller_spec.rb b/spec/controllers/api/v1/conversations_controller_spec.rb
index 5add7cf1d4214f..1ec26d52036fa6 100644
--- a/spec/controllers/api/v1/conversations_controller_spec.rb
+++ b/spec/controllers/api/v1/conversations_controller_spec.rb
@@ -16,6 +16,7 @@
before do
PostStatusService.new.call(other.account, text: 'Hey @alice', visibility: 'direct')
+ PostStatusService.new.call(user.account, text: 'Hey, nobody here', visibility: 'direct')
end
it 'returns http success' do
@@ -31,7 +32,26 @@
it 'returns conversations' do
get :index
json = body_as_json
- expect(json.size).to eq 1
+ expect(json.size).to eq 2
+ expect(json[0][:accounts].size).to eq 1
+ end
+
+ context 'with since_id' do
+ context 'when requesting old posts' do
+ it 'returns conversations' do
+ get :index, params: { since_id: Mastodon::Snowflake.id_at(1.hour.ago, with_random: false) }
+ json = body_as_json
+ expect(json.size).to eq 2
+ end
+ end
+
+ context 'when requesting posts in the future' do
+ it 'returns no conversation' do
+ get :index, params: { since_id: Mastodon::Snowflake.id_at(1.hour.from_now, with_random: false) }
+ json = body_as_json
+ expect(json.size).to eq 0
+ end
+ end
end
end
end
diff --git a/spec/controllers/api/v1/statuses/histories_controller_spec.rb b/spec/controllers/api/v1/statuses/histories_controller_spec.rb
index 00677f1d2c37c6..99384c8ed5e914 100644
--- a/spec/controllers/api/v1/statuses/histories_controller_spec.rb
+++ b/spec/controllers/api/v1/statuses/histories_controller_spec.rb
@@ -23,6 +23,7 @@
it 'returns http success' do
expect(response).to have_http_status(200)
+ expect(body_as_json.size).to_not be 0
end
end
end
diff --git a/spec/controllers/api/v1/timelines/tag_controller_spec.rb b/spec/controllers/api/v1/timelines/tag_controller_spec.rb
index 718911083362de..1c60798fcf6fed 100644
--- a/spec/controllers/api/v1/timelines/tag_controller_spec.rb
+++ b/spec/controllers/api/v1/timelines/tag_controller_spec.rb
@@ -5,36 +5,66 @@
describe Api::V1::Timelines::TagController do
render_views
- let(:user) { Fabricate(:user) }
+ let(:user) { Fabricate(:user) }
+ let(:token) { Fabricate(:accessible_access_token, resource_owner_id: user.id, scopes: 'read:statuses') }
before do
allow(controller).to receive(:doorkeeper_token) { token }
end
- context 'with a user context' do
- let(:token) { Fabricate(:accessible_access_token, resource_owner_id: user.id) }
+ describe 'GET #show' do
+ subject do
+ get :show, params: { id: 'test' }
+ end
- describe 'GET #show' do
- before do
- PostStatusService.new.call(user.account, text: 'It is a #test')
+ before do
+ PostStatusService.new.call(user.account, text: 'It is a #test')
+ end
+
+ context 'when the instance allows public preview' do
+ context 'when the user is not authenticated' do
+ let(:token) { nil }
+
+ it 'returns http success', :aggregate_failures do
+ subject
+
+ expect(response).to have_http_status(200)
+ expect(response.headers['Link'].links.size).to eq(2)
+ end
end
- it 'returns http success' do
- get :show, params: { id: 'test' }
- expect(response).to have_http_status(200)
- expect(response.headers['Link'].links.size).to eq(2)
+ context 'when the user is authenticated' do
+ it 'returns http success', :aggregate_failures do
+ subject
+
+ expect(response).to have_http_status(200)
+ expect(response.headers['Link'].links.size).to eq(2)
+ end
end
end
- end
- context 'without a user context' do
- let(:token) { Fabricate(:accessible_access_token, resource_owner_id: nil) }
+ context 'when the instance does not allow public preview' do
+ before do
+ Form::AdminSettings.new(timeline_preview: false).save
+ end
+
+ context 'when the user is not authenticated' do
+ let(:token) { nil }
+
+ it 'returns http unauthorized' do
+ subject
+
+ expect(response).to have_http_status(401)
+ end
+ end
+
+ context 'when the user is authenticated' do
+ it 'returns http success', :aggregate_failures do
+ subject
- describe 'GET #show' do
- it 'returns http success' do
- get :show, params: { id: 'test' }
- expect(response).to have_http_status(200)
- expect(response.headers['Link']).to be_nil
+ expect(response).to have_http_status(200)
+ expect(response.headers['Link'].links.size).to eq(2)
+ end
end
end
end
diff --git a/spec/controllers/api/v2/admin/accounts_controller_spec.rb b/spec/controllers/api/v2/admin/accounts_controller_spec.rb
index 2508a9e0557cec..ebec4a13e8227d 100644
--- a/spec/controllers/api/v2/admin/accounts_controller_spec.rb
+++ b/spec/controllers/api/v2/admin/accounts_controller_spec.rb
@@ -69,5 +69,13 @@
end
end
end
+
+ context 'with limit param' do
+ let(:params) { { limit: 1 } }
+
+ it 'sets the correct pagination headers' do
+ expect(response.headers['Link'].find_link(%w(rel next)).href).to eq api_v2_admin_accounts_url(limit: 1, max_id: admin_account.id)
+ end
+ end
end
end
diff --git a/spec/controllers/concerns/cache_concern_spec.rb b/spec/controllers/concerns/cache_concern_spec.rb
index a34d7d72676964..21daa19921007e 100644
--- a/spec/controllers/concerns/cache_concern_spec.rb
+++ b/spec/controllers/concerns/cache_concern_spec.rb
@@ -13,12 +13,17 @@ def empty_array
def empty_relation
render plain: cache_collection(Status.none, Status).size
end
+
+ def account_statuses_favourites
+ render plain: cache_collection(Status.where(account_id: params[:id]), Status).map(&:favourites_count)
+ end
end
before do
routes.draw do
- get 'empty_array' => 'anonymous#empty_array'
- post 'empty_relation' => 'anonymous#empty_relation'
+ get 'empty_array' => 'anonymous#empty_array'
+ get 'empty_relation' => 'anonymous#empty_relation'
+ get 'account_statuses_favourites' => 'anonymous#account_statuses_favourites'
end
end
@@ -36,5 +41,20 @@ def empty_relation
expect(response.body).to eq '0'
end
end
+
+ context 'when given a collection of statuses' do
+ let!(:account) { Fabricate(:account) }
+ let!(:status) { Fabricate(:status, account: account) }
+
+ it 'correctly updates with new interactions' do
+ get :account_statuses_favourites, params: { id: account.id }
+ expect(response.body).to eq '[0]'
+
+ FavouriteService.new.call(account, status)
+
+ get :account_statuses_favourites, params: { id: account.id }
+ expect(response.body).to eq '[1]'
+ end
+ end
end
end
diff --git a/spec/controllers/relationships_controller_spec.rb b/spec/controllers/relationships_controller_spec.rb
index 2056a2ac294176..bcdcfa9051f3e9 100644
--- a/spec/controllers/relationships_controller_spec.rb
+++ b/spec/controllers/relationships_controller_spec.rb
@@ -55,7 +55,7 @@
end
context 'when select parameter is provided' do
- subject { patch :update, params: { form_account_batch: { account_ids: [poopfeast.id] }, block_domains: '' } }
+ subject { patch :update, params: { form_account_batch: { account_ids: [poopfeast.id] }, remove_domains_from_followers: '' } }
it 'soft-blocks followers from selected domains' do
poopfeast.follow!(user.account)
@@ -66,6 +66,15 @@
expect(poopfeast.following?(user.account)).to be false
end
+ it 'does not unfollow users from selected domains' do
+ user.account.follow!(poopfeast)
+
+ sign_in user, scope: :user
+ subject
+
+ expect(user.account.following?(poopfeast)).to be true
+ end
+
include_examples 'authenticate user'
include_examples 'redirects back to followers page'
end
diff --git a/spec/controllers/settings/two_factor_authentication/webauthn_credentials_controller_spec.rb b/spec/controllers/settings/two_factor_authentication/webauthn_credentials_controller_spec.rb
index fe53b4dfc26c17..269c4d685a3f6a 100644
--- a/spec/controllers/settings/two_factor_authentication/webauthn_credentials_controller_spec.rb
+++ b/spec/controllers/settings/two_factor_authentication/webauthn_credentials_controller_spec.rb
@@ -248,7 +248,7 @@ def add_webauthn_credential(user)
post :create, params: { credential: new_webauthn_credential, nickname: 'USB Key' }
- expect(response).to have_http_status(500)
+ expect(response).to have_http_status(422)
expect(flash[:error]).to be_present
end
end
@@ -268,7 +268,7 @@ def add_webauthn_credential(user)
post :create, params: { credential: new_webauthn_credential, nickname: nickname }
- expect(response).to have_http_status(500)
+ expect(response).to have_http_status(422)
expect(flash[:error]).to be_present
end
end
diff --git a/spec/controllers/well_known/webfinger_controller_spec.rb b/spec/controllers/well_known/webfinger_controller_spec.rb
index 8574d369d19d5e..0e7b34f471b0ef 100644
--- a/spec/controllers/well_known/webfinger_controller_spec.rb
+++ b/spec/controllers/well_known/webfinger_controller_spec.rb
@@ -4,6 +4,10 @@
render_views
describe 'GET #show' do
+ subject(:perform_show!) do
+ get :show, params: { resource: resource }, format: :json
+ end
+
let(:alternate_domains) { [] }
let(:alice) { Fabricate(:account, username: 'alice') }
let(:resource) { nil }
@@ -15,10 +19,6 @@
Rails.configuration.x.alternate_domains = tmp
end
- subject do
- get :show, params: { resource: resource }, format: :json
- end
-
shared_examples 'a successful response' do
it 'returns http success' do
expect(response).to have_http_status(200)
@@ -43,7 +43,7 @@
let(:resource) { alice.to_webfinger_s }
before do
- subject
+ perform_show!
end
it_behaves_like 'a successful response'
@@ -54,7 +54,7 @@
before do
alice.suspend!
- subject
+ perform_show!
end
it_behaves_like 'a successful response'
@@ -66,7 +66,7 @@
before do
alice.suspend!
alice.deletion_request.destroy
- subject
+ perform_show!
end
it 'returns http gone' do
@@ -78,7 +78,7 @@
let(:resource) { 'acct:not@existing.com' }
before do
- subject
+ perform_show!
end
it 'returns http not found' do
@@ -90,7 +90,7 @@
let(:alternate_domains) { ['foo.org'] }
before do
- subject
+ perform_show!
end
context 'when an account exists' do
@@ -114,11 +114,39 @@
end
end
+ context 'when the old name scheme is used to query the instance actor' do
+ let(:resource) do
+ "#{Rails.configuration.x.local_domain}@#{Rails.configuration.x.local_domain}"
+ end
+
+ before do
+ perform_show!
+ end
+
+ it 'returns http success' do
+ expect(response).to have_http_status(200)
+ end
+
+ it 'does not set a Vary header' do
+ expect(response.headers['Vary']).to be_nil
+ end
+
+ it 'returns application/jrd+json' do
+ expect(response.media_type).to eq 'application/jrd+json'
+ end
+
+ it 'returns links for the internal account' do
+ json = body_as_json
+ expect(json[:subject]).to eq 'acct:mastodon.internal@cb6e6126.ngrok.io'
+ expect(json[:aliases]).to eq ['https://cb6e6126.ngrok.io/actor']
+ end
+ end
+
context 'with no resource parameter' do
let(:resource) { nil }
before do
- subject
+ perform_show!
end
it 'returns http bad request' do
@@ -130,7 +158,7 @@
let(:resource) { 'df/:dfkj' }
before do
- subject
+ perform_show!
end
it 'returns http bad request' do
diff --git a/spec/fixtures/files/attachment-jpg.123456_abcd b/spec/fixtures/files/attachment-jpg.123456_abcd
new file mode 100644
index 00000000000000..f1d40539ac0484
Binary files /dev/null and b/spec/fixtures/files/attachment-jpg.123456_abcd differ
diff --git a/spec/fixtures/files/boop.mp3 b/spec/fixtures/files/boop.mp3
new file mode 100644
index 00000000000000..ba106a3a32d414
Binary files /dev/null and b/spec/fixtures/files/boop.mp3 differ
diff --git a/spec/lib/account_reach_finder_spec.rb b/spec/lib/account_reach_finder_spec.rb
new file mode 100644
index 00000000000000..1da95ba6b3a66e
--- /dev/null
+++ b/spec/lib/account_reach_finder_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'rails_helper'
+
+RSpec.describe AccountReachFinder do
+ let(:account) { Fabricate(:account) }
+
+ let(:follower1) { Fabricate(:account, protocol: :activitypub, inbox_url: 'https://example.com/inbox-1') }
+ let(:follower2) { Fabricate(:account, protocol: :activitypub, inbox_url: 'https://example.com/inbox-2') }
+ let(:follower3) { Fabricate(:account, protocol: :activitypub, inbox_url: 'https://foo.bar/users/a/inbox', shared_inbox_url: 'https://foo.bar/inbox') }
+
+ let(:mentioned1) { Fabricate(:account, protocol: :activitypub, inbox_url: 'https://foo.bar/users/b/inbox', shared_inbox_url: 'https://foo.bar/inbox') }
+ let(:mentioned2) { Fabricate(:account, protocol: :activitypub, inbox_url: 'https://example.com/inbox-3') }
+ let(:mentioned3) { Fabricate(:account, protocol: :activitypub, inbox_url: 'https://example.com/inbox-4') }
+
+ let(:unrelated_account) { Fabricate(:account, protocol: :activitypub, inbox_url: 'https://example.com/unrelated-inbox') }
+
+ before do
+ follower1.follow!(account)
+ follower2.follow!(account)
+ follower3.follow!(account)
+
+ Fabricate(:status, account: account).tap do |status|
+ status.mentions << Mention.new(account: follower1)
+ status.mentions << Mention.new(account: mentioned1)
+ end
+
+ Fabricate(:status, account: account)
+
+ Fabricate(:status, account: account).tap do |status|
+ status.mentions << Mention.new(account: mentioned2)
+ status.mentions << Mention.new(account: mentioned3)
+ end
+
+ Fabricate(:status).tap do |status|
+ status.mentions << Mention.new(account: unrelated_account)
+ end
+ end
+
+ describe '#inboxes' do
+ it 'includes the preferred inbox URL of followers' do
+ expect(described_class.new(account).inboxes).to include(*[follower1, follower2, follower3].map(&:preferred_inbox_url))
+ end
+
+ it 'includes the preferred inbox URL of recently-mentioned accounts' do
+ expect(described_class.new(account).inboxes).to include(*[mentioned1, mentioned2, mentioned3].map(&:preferred_inbox_url))
+ end
+
+ it 'does not include the inbox of unrelated users' do
+ expect(described_class.new(account).inboxes).to_not include(unrelated_account.preferred_inbox_url)
+ end
+ end
+end
diff --git a/spec/lib/activitypub/activity/flag_spec.rb b/spec/lib/activitypub/activity/flag_spec.rb
index 2f2d13876760df..6d7a8a7ec2e8dd 100644
--- a/spec/lib/activitypub/activity/flag_spec.rb
+++ b/spec/lib/activitypub/activity/flag_spec.rb
@@ -37,6 +37,37 @@
end
end
+ context 'when the report comment is excessively long' do
+ subject do
+ described_class.new({
+ '@context': 'https://www.w3.org/ns/activitystreams',
+ id: flag_id,
+ type: 'Flag',
+ content: long_comment,
+ actor: ActivityPub::TagManager.instance.uri_for(sender),
+ object: [
+ ActivityPub::TagManager.instance.uri_for(flagged),
+ ActivityPub::TagManager.instance.uri_for(status),
+ ],
+ }.with_indifferent_access, sender)
+ end
+
+ let(:long_comment) { Faker::Lorem.characters(number: 6000) }
+
+ before do
+ subject.perform
+ end
+
+ it 'creates a report but with a truncated comment' do
+ report = Report.find_by(account: sender, target_account: flagged)
+
+ expect(report).to_not be_nil
+ expect(report.comment.length).to eq 5000
+ expect(report.comment).to eq long_comment[0...5000]
+ expect(report.status_ids).to eq [status.id]
+ end
+ end
+
context 'when the reported status is private and should not be visible to the remote server' do
let(:status) { Fabricate(:status, account: flagged, uri: 'foobar', visibility: :private) }
diff --git a/spec/lib/plain_text_formatter_spec.rb b/spec/lib/plain_text_formatter_spec.rb
index c3d0ee630121ac..81e4ae286e698b 100644
--- a/spec/lib/plain_text_formatter_spec.rb
+++ b/spec/lib/plain_text_formatter_spec.rb
@@ -4,7 +4,7 @@
describe '#to_s' do
subject { described_class.new(status.text, status.local?).to_s }
- context 'given a post with local status' do
+ context 'when status is local' do
let(:status) { Fabricate(:status, text: '
a text by a nerd who uses an HTML tag in text
', uri: nil) }
it 'returns the raw text' do
@@ -12,12 +12,63 @@
end
end
- context 'given a post with remote status' do
+ context 'when status is remote' do
let(:remote_account) { Fabricate(:account, domain: 'remote.test', username: 'bob', url: 'https://remote.test/') }
- let(:status) { Fabricate(:status, account: remote_account, text: '
Hello
') }
- it 'returns tag-stripped text' do
- is_expected.to eq 'Hello'
+ context 'when text contains inline HTML tags' do
+ let(:status) { Fabricate(:status, account: remote_account, text: 'Loremipsum') }
+
+ it 'strips the tags' do
+ expect(subject).to eq 'Lorem ipsum'
+ end
+ end
+
+ context 'when text contains
tags' do
+ let(:status) { Fabricate(:status, account: remote_account, text: '
Lorem
ipsum
') }
+
+ it 'inserts a newline' do
+ expect(subject).to eq "Lorem\nipsum"
+ end
+ end
+
+ context 'when text contains a single tag' do
+ let(:status) { Fabricate(:status, account: remote_account, text: 'Lorem ipsum') }
+
+ it 'inserts a newline' do
+ expect(subject).to eq "Lorem\nipsum"
+ end
+ end
+
+ context 'when text contains consecutive tag' do
+ let(:status) { Fabricate(:status, account: remote_account, text: 'Lorem
ipsum') }
+
+ it 'inserts a single newline' do
+ expect(subject).to eq "Lorem\nipsum"
+ end
+ end
+
+ context 'when text contains HTML entity' do
+ let(:status) { Fabricate(:status, account: remote_account, text: 'Lorem & ipsum ❤') }
+
+ it 'unescapes the entity' do
+ expect(subject).to eq 'Lorem & ipsum ❤'
+ end
+ end
+
+ context 'when text contains ipsum') }
+
+ it 'strips the tag and its contents' do
+ expect(subject).to eq 'Lorem ipsum'
+ end
+ end
+
+ context 'when text contains an HTML comment tags' do
+ let(:status) { Fabricate(:status, account: remote_account, text: 'Lorem ipsum') }
+
+ it 'strips the comment' do
+ expect(subject).to eq 'Lorem ipsum'
+ end
end
end
end
diff --git a/spec/lib/vacuum/access_tokens_vacuum_spec.rb b/spec/lib/vacuum/access_tokens_vacuum_spec.rb
index 0244c34492688a..39c8cdb3973609 100644
--- a/spec/lib/vacuum/access_tokens_vacuum_spec.rb
+++ b/spec/lib/vacuum/access_tokens_vacuum_spec.rb
@@ -5,9 +5,11 @@
describe '#perform' do
let!(:revoked_access_token) { Fabricate(:access_token, revoked_at: 1.minute.ago) }
+ let!(:expired_access_token) { Fabricate(:access_token, expires_in: 59.minutes.to_i, created_at: 1.hour.ago) }
let!(:active_access_token) { Fabricate(:access_token) }
let!(:revoked_access_grant) { Fabricate(:access_grant, revoked_at: 1.minute.ago) }
+ let!(:expired_access_grant) { Fabricate(:access_grant, expires_in: 59.minutes.to_i, created_at: 1.hour.ago) }
let!(:active_access_grant) { Fabricate(:access_grant) }
before do
@@ -18,10 +20,18 @@
expect { revoked_access_token.reload }.to raise_error ActiveRecord::RecordNotFound
end
+ it 'deletes expired access tokens' do
+ expect { expired_access_token.reload }.to raise_error ActiveRecord::RecordNotFound
+ end
+
it 'deletes revoked access grants' do
expect { revoked_access_grant.reload }.to raise_error ActiveRecord::RecordNotFound
end
+ it 'deletes expired access grants' do
+ expect { expired_access_grant.reload }.to raise_error ActiveRecord::RecordNotFound
+ end
+
it 'does not delete active access tokens' do
expect { active_access_token.reload }.to_not raise_error
end
diff --git a/spec/models/form/account_batch_spec.rb b/spec/models/form/account_batch_spec.rb
new file mode 100644
index 00000000000000..fd8e90901065ec
--- /dev/null
+++ b/spec/models/form/account_batch_spec.rb
@@ -0,0 +1,63 @@
+# frozen_string_literal: true
+
+require 'rails_helper'
+
+RSpec.describe Form::AccountBatch do
+ let(:account_batch) { described_class.new }
+
+ describe '#save' do
+ subject { account_batch.save }
+
+ let(:account) { Fabricate(:user, role: UserRole.find_by(name: 'Admin')).account }
+ let(:account_ids) { [] }
+ let(:query) { Account.none }
+
+ before do
+ account_batch.assign_attributes(
+ action: action,
+ current_account: account,
+ account_ids: account_ids,
+ query: query,
+ select_all_matching: select_all_matching
+ )
+ end
+
+ context 'when action is "suspend"' do
+ let(:action) { 'suspend' }
+
+ let(:target_account) { Fabricate(:account) }
+ let(:target_account2) { Fabricate(:account) }
+
+ before do
+ Fabricate(:report, target_account: target_account)
+ Fabricate(:report, target_account: target_account2)
+ end
+
+ context 'when accounts are passed as account_ids' do
+ let(:select_all_matching) { '0' }
+ let(:account_ids) { [target_account.id, target_account2.id] }
+
+ it 'suspends the expected users' do
+ expect { subject }.to change { [target_account.reload.suspended?, target_account2.reload.suspended?] }.from([false, false]).to([true, true])
+ end
+
+ it 'closes open reports targeting the suspended users' do
+ expect { subject }.to change { Report.unresolved.where(target_account: [target_account, target_account2]).count }.from(2).to(0)
+ end
+ end
+
+ context 'when accounts are passed as a query' do
+ let(:select_all_matching) { '1' }
+ let(:query) { Account.where(id: [target_account.id, target_account2.id]) }
+
+ it 'suspends the expected users' do
+ expect { subject }.to change { [target_account.reload.suspended?, target_account2.reload.suspended?] }.from([false, false]).to([true, true])
+ end
+
+ it 'closes open reports targeting the suspended users' do
+ expect { subject }.to change { Report.unresolved.where(target_account: [target_account, target_account2]).count }.from(2).to(0)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/models/media_attachment_spec.rb b/spec/models/media_attachment_spec.rb
index 29fd313aec2e49..c3283ccb04f14d 100644
--- a/spec/models/media_attachment_spec.rb
+++ b/spec/models/media_attachment_spec.rb
@@ -150,6 +150,26 @@
end
end
+ describe 'mp3 with large cover art' do
+ let(:media) { described_class.create(account: Fabricate(:account), file: attachment_fixture('boop.mp3')) }
+
+ it 'detects it as an audio file' do
+ expect(media.type).to eq 'audio'
+ end
+
+ it 'sets meta for the duration' do
+ expect(media.file.meta['original']['duration']).to be_within(0.05).of(0.235102)
+ end
+
+ it 'extracts thumbnail' do
+ expect(media.thumbnail.present?).to be true
+ end
+
+ it 'gives the file a random name' do
+ expect(media.file_file_name).to_not eq 'boop.mp3'
+ end
+ end
+
describe 'jpeg' do
let(:media) { MediaAttachment.create(account: Fabricate(:account), file: attachment_fixture('attachment.jpg')) }
diff --git a/spec/models/report_spec.rb b/spec/models/report_spec.rb
index 874be41328cb50..c485a4a3c9ad12 100644
--- a/spec/models/report_spec.rb
+++ b/spec/models/report_spec.rb
@@ -125,10 +125,17 @@
expect(report).to be_valid
end
- it 'is invalid if comment is longer than 1000 characters' do
+ let(:remote_account) { Fabricate(:account, domain: 'example.com', protocol: :activitypub, inbox_url: 'http://example.com/inbox') }
+
+ it 'is invalid if comment is longer than 1000 characters only if reporter is local' do
report = Fabricate.build(:report, comment: Faker::Lorem.characters(number: 1001))
- report.valid?
+ expect(report.valid?).to be false
expect(report).to model_have_error_on_field(:comment)
end
+
+ it 'is valid if comment is longer than 1000 characters and reporter is not local' do
+ report = Fabricate.build(:report, account: remote_account, comment: Faker::Lorem.characters(number: 1001))
+ expect(report.valid?).to be true
+ end
end
end
diff --git a/spec/requests/api/v2/media_spec.rb b/spec/requests/api/v2/media_spec.rb
new file mode 100644
index 00000000000000..89384d0ca36206
--- /dev/null
+++ b/spec/requests/api/v2/media_spec.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+require 'rails_helper'
+
+RSpec.describe 'Media API', paperclip_processing: true do
+ let(:user) { Fabricate(:user) }
+ let(:token) { Fabricate(:accessible_access_token, resource_owner_id: user.id, scopes: scopes) }
+ let(:scopes) { 'write' }
+ let(:headers) { { 'Authorization' => "Bearer #{token.token}" } }
+
+ describe 'POST /api/v2/media' do
+ it 'returns http success' do
+ post '/api/v2/media', headers: headers, params: { file: fixture_file_upload('attachment-jpg.123456_abcd', 'image/jpeg') }
+ expect(File.exist?(user.account.media_attachments.first.file.path(:small))).to be true
+ expect(response).to have_http_status(200)
+ end
+ end
+end
diff --git a/spec/requests/content_security_policy_spec.rb b/spec/requests/content_security_policy_spec.rb
new file mode 100644
index 00000000000000..7eb27d61d615ca
--- /dev/null
+++ b/spec/requests/content_security_policy_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'rails_helper'
+
+describe 'Content-Security-Policy' do
+ it 'sets the expected CSP headers' do
+ allow(SecureRandom).to receive(:base64).with(16).and_return('ZbA+JmE7+bK8F5qvADZHuQ==')
+
+ get '/'
+ expect(response.headers['Content-Security-Policy'].split(';').map(&:strip)).to contain_exactly(
+ "base-uri 'none'",
+ "default-src 'none'",
+ "frame-ancestors 'none'",
+ "font-src 'self' https://cb6e6126.ngrok.io",
+ "img-src 'self' https: data: blob: https://cb6e6126.ngrok.io",
+ "style-src 'self' https://cb6e6126.ngrok.io 'nonce-ZbA+JmE7+bK8F5qvADZHuQ=='",
+ "media-src 'self' https: data: https://cb6e6126.ngrok.io",
+ "frame-src 'self' https:",
+ "manifest-src 'self' https://cb6e6126.ngrok.io",
+ "form-action 'self'",
+ "child-src 'self' blob: https://cb6e6126.ngrok.io",
+ "worker-src 'self' blob: https://cb6e6126.ngrok.io",
+ "connect-src 'self' data: blob: https://cb6e6126.ngrok.io https://cb6e6126.ngrok.io ws://localhost:4000",
+ "script-src 'self' https://cb6e6126.ngrok.io 'wasm-unsafe-eval'"
+ )
+ end
+end
diff --git a/spec/services/activitypub/process_status_update_service_spec.rb b/spec/services/activitypub/process_status_update_service_spec.rb
index 750369d57fbfcf..09c7fe94a0c427 100644
--- a/spec/services/activitypub/process_status_update_service_spec.rb
+++ b/spec/services/activitypub/process_status_update_service_spec.rb
@@ -41,12 +41,12 @@ def poll_option_json(name, votes)
describe '#call' do
it 'updates text' do
- subject.call(status, json)
+ subject.call(status, json, json)
expect(status.reload.text).to eq 'Hello universe'
end
it 'updates content warning' do
- subject.call(status, json)
+ subject.call(status, json, json)
expect(status.reload.spoiler_text).to eq 'Show more'
end
@@ -64,7 +64,7 @@ def poll_option_json(name, votes)
end
before do
- subject.call(status, json)
+ subject.call(status, json, json)
end
it 'does not create any edits' do
@@ -87,7 +87,7 @@ def poll_option_json(name, votes)
end
before do
- subject.call(status, json)
+ subject.call(status, json, json)
end
it 'does not create any edits' do
@@ -135,7 +135,7 @@ def poll_option_json(name, votes)
end
before do
- subject.call(status, json)
+ subject.call(status, json, json)
end
it 'does not create any edits' do
@@ -188,7 +188,7 @@ def poll_option_json(name, votes)
end
before do
- subject.call(status, json)
+ subject.call(status, json, json)
end
it 'does not create any edits' do
@@ -216,11 +216,11 @@ def poll_option_json(name, votes)
end
it 'does not create any edits' do
- expect { subject.call(status, json) }.not_to change { status.reload.edits.pluck(&:id) }
+ expect { subject.call(status, json, json) }.to_not(change { status.reload.edits.pluck(&:id) })
end
it 'does not update the text, spoiler_text or edited_at' do
- expect { subject.call(status, json) }.not_to change { s = status.reload; [s.text, s.spoiler_text, s.edited_at] }
+ expect { subject.call(status, json, json) }.to_not(change { s = status.reload; [s.text, s.spoiler_text, s.edited_at] })
end
end
@@ -235,7 +235,7 @@ def poll_option_json(name, votes)
end
before do
- subject.call(status, json)
+ subject.call(status, json, json)
end
it 'does not create any edits' do
@@ -259,7 +259,7 @@ def poll_option_json(name, votes)
before do
status.update(ordered_media_attachment_ids: nil)
- subject.call(status, json)
+ subject.call(status, json, json)
end
it 'does not create any edits' do
@@ -273,7 +273,7 @@ def poll_option_json(name, votes)
context 'originally without tags' do
before do
- subject.call(status, json)
+ subject.call(status, json, json)
end
it 'updates tags' do
@@ -299,7 +299,7 @@ def poll_option_json(name, votes)
end
before do
- subject.call(status, json)
+ subject.call(status, json, json)
end
it 'updates tags' do
@@ -309,7 +309,7 @@ def poll_option_json(name, votes)
context 'originally without mentions' do
before do
- subject.call(status, json)
+ subject.call(status, json, json)
end
it 'updates mentions' do
@@ -321,7 +321,7 @@ def poll_option_json(name, votes)
let(:mentions) { [alice, bob] }
before do
- subject.call(status, json)
+ subject.call(status, json, json)
end
it 'updates mentions' do
@@ -332,7 +332,7 @@ def poll_option_json(name, votes)
context 'originally without media attachments' do
before do
stub_request(:get, 'https://example.com/foo.png').to_return(body: attachment_fixture('emojo.png'))
- subject.call(status, json)
+ subject.call(status, json, json)
end
let(:payload) do
@@ -382,7 +382,7 @@ def poll_option_json(name, votes)
before do
allow(RedownloadMediaWorker).to receive(:perform_async)
- subject.call(status, json)
+ subject.call(status, json, json)
end
it 'updates the existing media attachment in-place' do
@@ -410,7 +410,7 @@ def poll_option_json(name, votes)
before do
poll = Fabricate(:poll, status: status)
status.update(preloadable_poll: poll)
- subject.call(status, json)
+ subject.call(status, json, json)
end
it 'removes poll' do
@@ -440,7 +440,7 @@ def poll_option_json(name, votes)
end
before do
- subject.call(status, json)
+ subject.call(status, json, json)
end
it 'creates a poll' do
@@ -456,12 +456,12 @@ def poll_option_json(name, votes)
end
it 'creates edit history' do
- subject.call(status, json)
+ subject.call(status, json, json)
expect(status.edits.reload.map(&:text)).to eq ['Hello world', 'Hello universe']
end
it 'sets edited timestamp' do
- subject.call(status, json)
+ subject.call(status, json, json)
expect(status.reload.edited_at.to_s).to eq '2021-09-08 22:39:25 UTC'
end
end
diff --git a/spec/services/fetch_link_card_service_spec.rb b/spec/services/fetch_link_card_service_spec.rb
index 4914c275326eb9..7a758f910fbf93 100644
--- a/spec/services/fetch_link_card_service_spec.rb
+++ b/spec/services/fetch_link_card_service_spec.rb
@@ -10,6 +10,7 @@
stub_request(:get, 'http://example.com/koi8-r').to_return(request_fixture('koi8-r.txt'))
stub_request(:get, 'http://example.com/日本語').to_return(request_fixture('sjis.txt'))
stub_request(:get, 'https://github.com/qbi/WannaCry').to_return(status: 404)
+ stub_request(:get, 'http://example.com/test?data=file.gpx%5E1').to_return(status: 200)
stub_request(:get, 'http://example.com/test-').to_return(request_fixture('idn.txt'))
stub_request(:get, 'http://example.com/windows-1251').to_return(request_fixture('windows-1251.txt'))
@@ -85,6 +86,15 @@
expect(a_request(:get, 'http://example.com/sjis')).to_not have_been_made
end
end
+
+ context do
+ let(:status) { Fabricate(:status, text: 'test http://example.com/test?data=file.gpx^1') }
+
+ it 'does fetch URLs with a caret in search params' do
+ expect(a_request(:get, 'http://example.com/test?data=file.gpx')).to_not have_been_made
+ expect(a_request(:get, 'http://example.com/test?data=file.gpx%5E1')).to have_been_made.once
+ end
+ end
end
context 'in a remote status' do
diff --git a/spec/services/report_service_spec.rb b/spec/services/report_service_spec.rb
index 02bc42ac170d60..1737a05ae38101 100644
--- a/spec/services/report_service_spec.rb
+++ b/spec/services/report_service_spec.rb
@@ -4,6 +4,14 @@
subject { described_class.new }
let(:source_account) { Fabricate(:account) }
+ let(:target_account) { Fabricate(:account) }
+
+ context 'with a local account' do
+ it 'has a uri' do
+ report = subject.call(source_account, target_account)
+ expect(report.uri).to_not be_nil
+ end
+ end
context 'for a remote account' do
let(:remote_account) { Fabricate(:account, domain: 'example.com', protocol: :activitypub, inbox_url: 'http://example.com/inbox') }
diff --git a/spec/services/resolve_url_service_spec.rb b/spec/services/resolve_url_service_spec.rb
index b3e3defbff2ccd..ab5b50b765058d 100644
--- a/spec/services/resolve_url_service_spec.rb
+++ b/spec/services/resolve_url_service_spec.rb
@@ -145,5 +145,35 @@
expect(subject.call(url, on_behalf_of: account)).to eq(status)
end
end
+
+ context 'when searching for a local link of a remote private status' do
+ let(:account) { Fabricate(:account) }
+ let(:poster) { Fabricate(:account, username: 'foo', domain: 'example.com') }
+ let(:url) { 'https://example.com/@foo/42' }
+ let(:uri) { 'https://example.com/users/foo/statuses/42' }
+ let!(:status) { Fabricate(:status, url: url, uri: uri, account: poster, visibility: :private) }
+ let(:search_url) { "https://#{Rails.configuration.x.local_domain}/@foo@example.com/#{status.id}" }
+
+ before do
+ stub_request(:get, url).to_return(status: 404) if url.present?
+ stub_request(:get, uri).to_return(status: 404)
+ end
+
+ context 'when the account follows the poster' do
+ before do
+ account.follow!(poster)
+ end
+
+ it 'returns the status' do
+ expect(subject.call(search_url, on_behalf_of: account)).to eq(status)
+ end
+ end
+
+ context 'when the account does not follow the poster' do
+ it 'does not return the status' do
+ expect(subject.call(search_url, on_behalf_of: account)).to be_nil
+ end
+ end
+ end
end
end
diff --git a/spec/workers/scheduler/accounts_statuses_cleanup_scheduler_spec.rb b/spec/workers/scheduler/accounts_statuses_cleanup_scheduler_spec.rb
index 8f20725c87f256..0b0c4dd487e3f6 100644
--- a/spec/workers/scheduler/accounts_statuses_cleanup_scheduler_spec.rb
+++ b/spec/workers/scheduler/accounts_statuses_cleanup_scheduler_spec.rb
@@ -7,11 +7,13 @@
let!(:account2) { Fabricate(:account, domain: nil) }
let!(:account3) { Fabricate(:account, domain: nil) }
let!(:account4) { Fabricate(:account, domain: nil) }
+ let!(:account5) { Fabricate(:account, domain: nil) }
let!(:remote) { Fabricate(:account) }
let!(:policy1) { Fabricate(:account_statuses_cleanup_policy, account: account1) }
let!(:policy2) { Fabricate(:account_statuses_cleanup_policy, account: account3) }
let!(:policy3) { Fabricate(:account_statuses_cleanup_policy, account: account4, enabled: false) }
+ let!(:policy4) { Fabricate(:account_statuses_cleanup_policy, account: account5) }
let(:queue_size) { 0 }
let(:queue_latency) { 0 }
@@ -23,7 +25,6 @@
},
]
end
- let(:retry_size) { 0 }
before do
queue_stub = double
@@ -33,7 +34,6 @@
allow(Sidekiq::ProcessSet).to receive(:new).and_return(process_set_stub)
sidekiq_stats_stub = double
- allow(sidekiq_stats_stub).to receive(:retry_size).and_return(retry_size)
allow(Sidekiq::Stats).to receive(:new).and_return(sidekiq_stats_stub)
# Create a bunch of old statuses
@@ -42,6 +42,7 @@
Fabricate(:status, account: account2, created_at: 3.years.ago)
Fabricate(:status, account: account3, created_at: 3.years.ago)
Fabricate(:status, account: account4, created_at: 3.years.ago)
+ Fabricate(:status, account: account5, created_at: 3.years.ago)
Fabricate(:status, account: remote, created_at: 3.years.ago)
end
@@ -70,17 +71,9 @@
expect(subject.under_load?).to be true
end
end
-
- context 'when there is a huge amount of jobs to retry' do
- let(:retry_size) { 1_000_000 }
-
- it 'returns true' do
- expect(subject.under_load?).to be true
- end
- end
end
- describe '#get_budget' do
+ describe '#compute_budget' do
context 'on a single thread' do
let(:process_set_stub) { [ { 'concurrency' => 1, 'queues' => ['push', 'default'] } ] }
@@ -119,8 +112,48 @@
expect { subject.perform }.to_not change { account4.statuses.count }
end
- it 'eventually deletes every deletable toot' do
- expect { subject.perform; subject.perform; subject.perform; subject.perform }.to change { Status.count }.by(-20)
+ it 'eventually deletes every deletable toot given enough runs' do
+ stub_const 'Scheduler::AccountsStatusesCleanupScheduler::MAX_BUDGET', 4
+
+ expect { 10.times { subject.perform } }.to change { Status.count }.by(-30)
+ end
+
+ it 'correctly round-trips between users across several runs' do
+ stub_const 'Scheduler::AccountsStatusesCleanupScheduler::MAX_BUDGET', 3
+ stub_const 'Scheduler::AccountsStatusesCleanupScheduler::PER_ACCOUNT_BUDGET', 2
+
+ expect { 3.times { subject.perform } }
+ .to change { Status.count }.by(-3 * 3)
+ .and change { account1.statuses.count }
+ .and change { account3.statuses.count }
+ .and change { account5.statuses.count }
+ end
+
+ context 'when given a big budget' do
+ let(:process_set_stub) { [{ 'concurrency' => 400, 'queues' => %w(push default) }] }
+
+ before do
+ stub_const 'Scheduler::AccountsStatusesCleanupScheduler::MAX_BUDGET', 400
+ end
+
+ it 'correctly handles looping in a single run' do
+ expect(subject.compute_budget).to eq(400)
+ expect { subject.perform }.to change { Status.count }.by(-30)
+ end
+ end
+
+ context 'when there is no work to be done' do
+ let(:process_set_stub) { [{ 'concurrency' => 400, 'queues' => %w(push default) }] }
+
+ before do
+ stub_const 'Scheduler::AccountsStatusesCleanupScheduler::MAX_BUDGET', 400
+ subject.perform
+ end
+
+ it 'does not get stuck' do
+ expect(subject.compute_budget).to eq(400)
+ expect { subject.perform }.to_not change { Status.count }
+ end
end
end
end
diff --git a/streaming/index.js b/streaming/index.js
index a8708ec78bbcbb..3fea71f9445e9b 100644
--- a/streaming/index.js
+++ b/streaming/index.js
@@ -92,18 +92,31 @@ const redisUrlToClient = async (defaultConfig, redisUrl) => {
const numWorkers = +process.env.STREAMING_CLUSTER_NUM || (env === 'development' ? 1 : Math.max(os.cpus().length - 1, 1));
/**
+ * Attempts to safely parse a string as JSON, used when both receiving a message
+ * from redis and when receiving a message from a client over a websocket
+ * connection, this is why it accepts a `req` argument.
* @param {string} json
- * @param {any} req
- * @return {Object.|null}
+ * @param {any?} req
+ * @returns {Object.|null}
*/
const parseJSON = (json, req) => {
try {
return JSON.parse(json);
} catch (err) {
- if (req.accountId) {
- log.warn(req.requestId, `Error parsing message from user ${req.accountId}: ${err}`);
+ /* FIXME: This logging isn't great, and should probably be done at the
+ * call-site of parseJSON, not in the method, but this would require changing
+ * the signature of parseJSON to return something akin to a Result type:
+ * [Error|null, null|Object {
const redisPrefix = redisNamespace ? `${redisNamespace}:` : '';
/**
- * @type {Object.>}
+ * @type {Object.): void>>}
*/
const subs = {};
@@ -207,12 +220,21 @@ const startWorker = async (workerId) => {
return;
}
- callbacks.forEach(callback => callback(message));
+ const json = parseJSON(message, null);
+ if (!json) return;
+
+ callbacks.forEach(callback => callback(json));
};
+ /**
+ * @callback SubscriptionListener
+ * @param {ReturnType} json of the message
+ * @returns void
+ */
+
/**
* @param {string} channel
- * @param {function(string): void} callback
+ * @param {SubscriptionListener} callback
*/
const subscribe = (channel, callback) => {
log.silly(`Adding listener for ${channel}`);
@@ -229,6 +251,7 @@ const startWorker = async (workerId) => {
/**
* @param {string} channel
+ * @param {SubscriptionListener} callback
*/
const unsubscribe = (channel, callback) => {
log.silly(`Removing listener for ${channel}`);
@@ -378,7 +401,7 @@ const startWorker = async (workerId) => {
/**
* @param {any} req
- * @return {string}
+ * @returns {string|undefined}
*/
const channelNameFromPath = req => {
const { path, query } = req;
@@ -487,15 +510,11 @@ const startWorker = async (workerId) => {
/**
* @param {any} req
* @param {SystemMessageHandlers} eventHandlers
- * @return {function(string): void}
+ * @returns {function(object): void}
*/
const createSystemMessageListener = (req, eventHandlers) => {
return message => {
- const json = parseJSON(message, req);
-
- if (!json) return;
-
- const { event } = json;
+ const { event } = message;
log.silly(req.requestId, `System message for ${req.accountId}: ${event}`);
@@ -610,54 +629,66 @@ const startWorker = async (workerId) => {
* @param {string[]} ids
* @param {any} req
* @param {function(string, string): void} output
- * @param {function(string[], function(string): void): void} attachCloseHandler
+ * @param {undefined | function(string[], SubscriptionListener): void} attachCloseHandler
* @param {boolean=} needsFiltering
- * @return {function(string): void}
+ * @returns {SubscriptionListener}
*/
const streamFrom = (ids, req, output, attachCloseHandler, needsFiltering = false) => {
const accountId = req.accountId || req.remoteAddress;
log.verbose(req.requestId, `Starting stream from ${ids.join(', ')} for ${accountId}`);
- const listener = message => {
- const json = parseJSON(message, req);
-
- if (!json) return;
-
- const { event, payload, queued_at } = json;
+ const transmit = (event, payload) => {
+ // TODO: Replace "string"-based delete payloads with object payloads:
+ const encodedPayload = typeof payload === 'object' ? JSON.stringify(payload) : payload;
- const transmit = () => {
- const now = new Date().getTime();
- const delta = now - queued_at;
- const encodedPayload = typeof payload === 'object' ? JSON.stringify(payload) : payload;
-
- log.silly(req.requestId, `Transmitting for ${accountId}: ${event} ${encodedPayload} Delay: ${delta}ms`);
- output(event, encodedPayload);
- };
+ log.silly(req.requestId, `Transmitting for ${accountId}: ${event} ${encodedPayload}`);
+ output(event, encodedPayload);
+ };
- // Only messages that may require filtering are statuses, since notifications
- // are already personalized and deletes do not matter
- if (!needsFiltering || event !== 'update') {
- transmit();
+ // The listener used to process each message off the redis subscription,
+ // message here is an object with an `event` and `payload` property. Some
+ // events also include a queued_at value, but this is being removed shortly.
+ /** @type {SubscriptionListener} */
+ const listener = message => {
+ const { event, payload } = message;
+
+ // Streaming only needs to apply filtering to some channels and only to
+ // some events. This is because majority of the filtering happens on the
+ // Ruby on Rails side when producing the event for streaming.
+ //
+ // The only events that require filtering from the streaming server are
+ // `update` and `status.update`, all other events are transmitted to the
+ // client as soon as they're received (pass-through).
+ //
+ // The channels that need filtering are determined in the function
+ // `channelNameToIds` defined below:
+ if (!needsFiltering || (event !== 'update' && event !== 'status.update')) {
+ transmit(event, payload);
return;
}
- const unpackedPayload = payload;
- const targetAccountIds = [unpackedPayload.account.id].concat(unpackedPayload.mentions.map(item => item.id));
- const accountDomain = unpackedPayload.account.acct.split('@')[1];
+ // The rest of the logic from here on in this function is to handle
+ // filtering of statuses:
- if (Array.isArray(req.chosenLanguages) && unpackedPayload.language !== null && req.chosenLanguages.indexOf(unpackedPayload.language) === -1) {
- log.silly(req.requestId, `Message ${unpackedPayload.id} filtered by language (${unpackedPayload.language})`);
+ // Filter based on language:
+ if (Array.isArray(req.chosenLanguages) && payload.language !== null && req.chosenLanguages.indexOf(payload.language) === -1) {
+ log.silly(req.requestId, `Message ${payload.id} filtered by language (${payload.language})`);
return;
}
// When the account is not logged in, it is not necessary to confirm the block or mute
if (!req.accountId) {
- transmit();
+ transmit(event, payload);
return;
}
- pgPool.connect((err, client, done) => {
+ // Filter based on domain blocks, blocks, mutes, or custom filters:
+ const targetAccountIds = [payload.account.id].concat(payload.mentions.map(item => item.id));
+ const accountDomain = payload.account.acct.split('@')[1];
+
+ // TODO: Move this logic out of the message handling loop
+ pgPool.connect((err, client, releasePgConnection) => {
if (err) {
log.error(err);
return;
@@ -672,40 +703,57 @@ const startWorker = async (workerId) => {
SELECT 1
FROM mutes
WHERE account_id = $1
- AND target_account_id IN (${placeholders(targetAccountIds, 2)})`, [req.accountId, unpackedPayload.account.id].concat(targetAccountIds)),
+ AND target_account_id IN (${placeholders(targetAccountIds, 2)})`, [req.accountId, payload.account.id].concat(targetAccountIds)),
];
if (accountDomain) {
queries.push(client.query('SELECT 1 FROM account_domain_blocks WHERE account_id = $1 AND domain = $2', [req.accountId, accountDomain]));
}
- if (!unpackedPayload.filtered && !req.cachedFilters) {
+ if (!payload.filtered && !req.cachedFilters) {
queries.push(client.query('SELECT filter.id AS id, filter.phrase AS title, filter.context AS context, filter.expires_at AS expires_at, filter.action AS filter_action, keyword.keyword AS keyword, keyword.whole_word AS whole_word FROM custom_filter_keywords keyword JOIN custom_filters filter ON keyword.custom_filter_id = filter.id WHERE filter.account_id = $1 AND (filter.expires_at IS NULL OR filter.expires_at > NOW())', [req.accountId]));
}
Promise.all(queries).then(values => {
- done();
+ releasePgConnection();
+ // Handling blocks & mutes and domain blocks: If one of those applies,
+ // then we don't transmit the payload of the event to the client
if (values[0].rows.length > 0 || (accountDomain && values[1].rows.length > 0)) {
return;
}
- if (!unpackedPayload.filtered && !req.cachedFilters) {
+ // If the payload already contains the `filtered` property, it means
+ // that filtering has been applied on the ruby on rails side, as
+ // such, we don't need to construct or apply the filters in streaming:
+ if (Object.prototype.hasOwnProperty.call(payload, "filtered")) {
+ transmit(event, payload);
+ return;
+ }
+
+ // Handling for constructing the custom filters and caching them on the request
+ // TODO: Move this logic out of the message handling lifecycle
+ if (!req.cachedFilters) {
const filterRows = values[accountDomain ? 2 : 1].rows;
- req.cachedFilters = filterRows.reduce((cache, row) => {
- if (cache[row.id]) {
- cache[row.id].keywords.push([row.keyword, row.whole_word]);
+ req.cachedFilters = filterRows.reduce((cache, filter) => {
+ if (cache[filter.id]) {
+ cache[filter.id].keywords.push([filter.keyword, filter.whole_word]);
} else {
- cache[row.id] = {
- keywords: [[row.keyword, row.whole_word]],
- expires_at: row.expires_at,
- repr: {
- id: row.id,
- title: row.title,
- context: row.context,
- expires_at: row.expires_at,
- filter_action: ['warn', 'hide'][row.filter_action],
+ cache[filter.id] = {
+ keywords: [[filter.keyword, filter.whole_word]],
+ expires_at: filter.expires_at,
+ filter: {
+ id: filter.id,
+ title: filter.title,
+ context: filter.context,
+ expires_at: filter.expires_at,
+ // filter.filter_action is the value from the
+ // custom_filters.action database column, it is an integer
+ // representing a value in an enum defined by Ruby on Rails:
+ //
+ // enum { warn: 0, hide: 1 }
+ filter_action: ['warn', 'hide'][filter.filter_action],
},
};
}
@@ -713,6 +761,10 @@ const startWorker = async (workerId) => {
return cache;
}, {});
+ // Construct the regular expressions for the custom filters: This
+ // needs to be done in a separate loop as the database returns one
+ // filterRow per keyword, so we need all the keywords before
+ // constructing the regular expression
Object.keys(req.cachedFilters).forEach((key) => {
req.cachedFilters[key].regexp = new RegExp(req.cachedFilters[key].keywords.map(([keyword, whole_word]) => {
let expr = keyword.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
@@ -732,31 +784,58 @@ const startWorker = async (workerId) => {
});
}
- // Check filters
- if (req.cachedFilters && !unpackedPayload.filtered) {
- const status = unpackedPayload;
- const searchContent = ([status.spoiler_text || '', status.content].concat((status.poll && status.poll.options) ? status.poll.options.map(option => option.title) : [])).concat(status.media_attachments.map(att => att.description)).join('\n\n').replace(/ /g, '\n').replace(/<\/p>
/g, '\n\n');
- const searchIndex = JSDOM.fragment(searchContent).textContent;
+ // Apply cachedFilters against the payload, constructing a
+ // `filter_results` array of FilterResult entities
+ if (req.cachedFilters) {
+ const status = payload;
+ // TODO: Calculate searchableContent in Ruby on Rails:
+ const searchableContent = ([status.spoiler_text || '', status.content].concat((status.poll && status.poll.options) ? status.poll.options.map(option => option.title) : [])).concat(status.media_attachments.map(att => att.description)).join('\n\n').replace(/ /g, '\n').replace(/<\/p>
/g, '\n\n');
+ const searchableTextContent = JSDOM.fragment(searchableContent).textContent;
const now = new Date();
- payload.filtered = [];
- Object.values(req.cachedFilters).forEach((cachedFilter) => {
- if ((cachedFilter.expires_at === null || cachedFilter.expires_at > now)) {
- const keyword_matches = searchIndex.match(cachedFilter.regexp);
- if (keyword_matches) {
- payload.filtered.push({
- filter: cachedFilter.repr,
- keyword_matches,
- });
- }
+ const filter_results = Object.values(req.cachedFilters).reduce((results, cachedFilter) => {
+ // Check the filter hasn't expired before applying:
+ if (cachedFilter.expires_at !== null && cachedFilter.expires_at < now) {
+ return results;
+ }
+
+ // Just in-case JSDOM fails to find textContent in searchableContent
+ if (!searchableTextContent) {
+ return results;
}
+
+ const keyword_matches = searchableTextContent.match(cachedFilter.regexp);
+ if (keyword_matches) {
+ // results is an Array of FilterResult; status_matches is always
+ // null as we only are only applying the keyword-based custom
+ // filters, not the status-based custom filters.
+ // https://docs.joinmastodon.org/entities/FilterResult/
+ results.push({
+ filter: cachedFilter.filter,
+ keyword_matches,
+ status_matches: null
+ });
+ }
+
+ return results;
+ }, []);
+
+ // Send the payload + the FilterResults as the `filtered` property
+ // to the streaming connection. To reach this code, the `event` must
+ // have been either `update` or `status.update`, meaning the
+ // `payload` is a Status entity, which has a `filtered` property:
+ //
+ // filtered: https://docs.joinmastodon.org/entities/Status/#filtered
+ transmit(event, {
+ ...payload,
+ filtered: filter_results
});
+ } else {
+ transmit(event, payload);
}
-
- transmit();
}).catch(err => {
+ releasePgConnection();
log.error(err);
- done();
});
});
};
@@ -765,7 +844,7 @@ const startWorker = async (workerId) => {
subscribe(`${redisPrefix}${id}`, listener);
});
- if (attachCloseHandler) {
+ if (typeof attachCloseHandler === 'function') {
attachCloseHandler(ids.map(id => `${redisPrefix}${id}`), listener);
}
@@ -802,12 +881,13 @@ const startWorker = async (workerId) => {
/**
* @param {any} req
* @param {function(): void} [closeHandler]
- * @return {function(string[]): void}
+ * @returns {function(string[], SubscriptionListener): void}
*/
- const streamHttpEnd = (req, closeHandler = undefined) => (ids) => {
+
+ const streamHttpEnd = (req, closeHandler = undefined) => (ids, listener) => {
req.on('close', () => {
ids.forEach(id => {
- unsubscribe(id);
+ unsubscribe(id, listener);
});
if (closeHandler) {
@@ -856,15 +936,15 @@ const startWorker = async (workerId) => {
res.write('# TYPE connected_channels gauge\n');
res.write('# HELP connected_channels The number of Redis channels the streaming server is subscribed to\n');
res.write(`connected_channels ${Object.keys(subs).length}.0\n`);
- res.write('# TYPE pg.pool.total_connections gauge \n');
- res.write('# HELP pg.pool.total_connections The total number of clients existing within the pool\n');
- res.write(`pg.pool.total_connections ${pgPool.totalCount}.0\n`);
- res.write('# TYPE pg.pool.idle_connections gauge \n');
- res.write('# HELP pg.pool.idle_connections The number of clients which are not checked out but are currently idle in the pool\n');
- res.write(`pg.pool.idle_connections ${pgPool.idleCount}.0\n`);
- res.write('# TYPE pg.pool.waiting_queries gauge \n');
- res.write('# HELP pg.pool.waiting_queries The number of queued requests waiting on a client when all clients are checked out\n');
- res.write(`pg.pool.waiting_queries ${pgPool.waitingCount}.0\n`);
+ res.write('# TYPE pg_pool_total_connections gauge\n');
+ res.write('# HELP pg_pool_total_connections The total number of clients existing within the pool\n');
+ res.write(`pg_pool_total_connections ${pgPool.totalCount}.0\n`);
+ res.write('# TYPE pg_pool_idle_connections gauge\n');
+ res.write('# HELP pg_pool_idle_connections The number of clients which are not checked out but are currently idle in the pool\n');
+ res.write(`pg_pool_idle_connections ${pgPool.idleCount}.0\n`);
+ res.write('# TYPE pg_pool_waiting_queries gauge\n');
+ res.write('# HELP pg_pool_waiting_queries The number of queued requests waiting on a client when all clients are checked out\n');
+ res.write(`pg_pool_waiting_queries ${pgPool.waitingCount}.0\n`);
res.write('# EOF\n');
res.end();
}));
@@ -1067,7 +1147,7 @@ const startWorker = async (workerId) => {
* @typedef WebSocketSession
* @property {any} socket
* @property {any} request
- * @property {Object.} subscriptions
+ * @property {Object.} subscriptions
*/
/**
@@ -1207,8 +1287,15 @@ const startWorker = async (workerId) => {
ws.on('close', onEnd);
ws.on('error', onEnd);
- ws.on('message', data => {
- const json = parseJSON(data, session.request);
+ ws.on('message', (data, isBinary) => {
+ if (isBinary) {
+ log.warn('socket', 'Received binary data, closing connection');
+ ws.close(1003, 'The mastodon streaming server does not support binary messages');
+ return;
+ }
+ const message = data.toString('utf8');
+
+ const json = parseJSON(message, session.request);
if (!json) return;