mirror of
https://github.com/discourse/discourse.git
synced 2025-09-05 08:59:27 +08:00
Merge branch 'master' into pm-tags
This commit is contained in:
commit
2b509eaa91
357 changed files with 5863 additions and 2023 deletions
|
@ -5,14 +5,14 @@ require_dependency "rate_limiter"
|
|||
|
||||
class Auth::DefaultCurrentUserProvider
|
||||
|
||||
CURRENT_USER_KEY ||= "_DISCOURSE_CURRENT_USER".freeze
|
||||
API_KEY ||= "api_key".freeze
|
||||
USER_API_KEY ||= "HTTP_USER_API_KEY".freeze
|
||||
USER_API_CLIENT_ID ||= "HTTP_USER_API_CLIENT_ID".freeze
|
||||
API_KEY_ENV ||= "_DISCOURSE_API".freeze
|
||||
USER_API_KEY_ENV ||= "_DISCOURSE_USER_API".freeze
|
||||
TOKEN_COOKIE ||= "_t".freeze
|
||||
PATH_INFO ||= "PATH_INFO".freeze
|
||||
CURRENT_USER_KEY ||= "_DISCOURSE_CURRENT_USER"
|
||||
API_KEY ||= "api_key"
|
||||
USER_API_KEY ||= "HTTP_USER_API_KEY"
|
||||
USER_API_CLIENT_ID ||= "HTTP_USER_API_CLIENT_ID"
|
||||
API_KEY_ENV ||= "_DISCOURSE_API"
|
||||
USER_API_KEY_ENV ||= "_DISCOURSE_USER_API"
|
||||
TOKEN_COOKIE ||= "_t"
|
||||
PATH_INFO ||= "PATH_INFO"
|
||||
COOKIE_ATTEMPTS_PER_MIN ||= 10
|
||||
|
||||
# do all current user initialization here
|
||||
|
@ -86,8 +86,11 @@ class Auth::DefaultCurrentUserProvider
|
|||
raise Discourse::InvalidAccess if current_user.suspended? || !current_user.active
|
||||
@env[API_KEY_ENV] = true
|
||||
|
||||
limiter_min = RateLimiter.new(nil, "admin_api_min_#{api_key}", GlobalSetting.max_admin_api_reqs_per_key_per_minute, 60)
|
||||
limiter_min.performed!
|
||||
# we do not run this rate limiter while profiling
|
||||
if Rails.env != "profile"
|
||||
limiter_min = RateLimiter.new(nil, "admin_api_min_#{api_key}", GlobalSetting.max_admin_api_reqs_per_key_per_minute, 60)
|
||||
limiter_min.performed!
|
||||
end
|
||||
end
|
||||
|
||||
# user api key handling
|
||||
|
|
|
@ -368,15 +368,13 @@ class CookedPostProcessor
|
|||
end
|
||||
|
||||
def post_process_oneboxes
|
||||
args = {
|
||||
post_id: @post.id,
|
||||
invalidate_oneboxes: !!@opts[:invalidate_oneboxes],
|
||||
}
|
||||
|
||||
# apply oneboxes
|
||||
Oneboxer.apply(@doc, topic_id: @post.topic_id) do |url|
|
||||
Oneboxer.apply(@doc) do |url|
|
||||
@has_oneboxes = true
|
||||
Oneboxer.onebox(url, args)
|
||||
Oneboxer.onebox(url,
|
||||
invalidate_oneboxes: !!@opts[:invalidate_oneboxes],
|
||||
user_id: @post&.user_id,
|
||||
category_id: @post&.topic&.category_id
|
||||
)
|
||||
end
|
||||
|
||||
oneboxed_images.each do |img|
|
||||
|
|
|
@ -236,15 +236,11 @@ module Discourse
|
|||
end
|
||||
|
||||
def self.route_for(uri)
|
||||
|
||||
uri = URI(uri) rescue nil unless (uri.is_a?(URI))
|
||||
uri = URI(uri) rescue nil unless uri.is_a?(URI)
|
||||
return unless uri
|
||||
|
||||
path = uri.path || ""
|
||||
if (uri.host == Discourse.current_hostname &&
|
||||
path.start_with?(Discourse.base_uri)) ||
|
||||
!uri.host
|
||||
|
||||
if !uri.host || (uri.host == Discourse.current_hostname && path.start_with?(Discourse.base_uri))
|
||||
path.slice!(Discourse.base_uri)
|
||||
return Rails.application.routes.recognize_path(path)
|
||||
end
|
||||
|
|
|
@ -62,7 +62,7 @@ module Email
|
|||
subject = String.new(SiteSetting.email_subject)
|
||||
subject.gsub!("%{site_name}", @template_args[:email_prefix])
|
||||
subject.gsub!("%{optional_re}", @opts[:add_re_to_subject] ? I18n.t('subject_re', @template_args) : '')
|
||||
subject.gsub!("%{optional_pm}", @opts[:private_reply] ? I18n.t('subject_pm', @template_args) : '')
|
||||
subject.gsub!("%{optional_pm}", @opts[:private_reply] ? @template_args[:subject_pm] : '')
|
||||
subject.gsub!("%{optional_cat}", @template_args[:show_category_in_subject] ? "[#{@template_args[:show_category_in_subject]}] " : '')
|
||||
subject.gsub!("%{topic_title}", @template_args[:topic_title]) if @template_args[:topic_title] # must be last for safety
|
||||
else
|
||||
|
|
|
@ -238,11 +238,13 @@ module Email
|
|||
text_content_type = @mail.text_part&.content_type
|
||||
elsif @mail.content_type.to_s["text/html"]
|
||||
html = fix_charset(@mail)
|
||||
else
|
||||
elsif @mail.content_type.blank? || @mail.content_type["text/plain"]
|
||||
text = fix_charset(@mail)
|
||||
text_content_type = @mail.content_type
|
||||
end
|
||||
|
||||
return unless text.present? || html.present?
|
||||
|
||||
if text.present?
|
||||
text = trim_discourse_markers(text)
|
||||
text, elided_text = trim_reply_and_extract_elided(text)
|
||||
|
@ -690,11 +692,17 @@ module Email
|
|||
raise InvalidPostAction.new(e)
|
||||
end
|
||||
|
||||
def is_whitelisted_attachment?(attachment)
|
||||
attachment.content_type !~ SiteSetting.attachment_content_type_blacklist_regex &&
|
||||
attachment.filename !~ SiteSetting.attachment_filename_blacklist_regex
|
||||
end
|
||||
|
||||
def attachments
|
||||
# strip blacklisted attachments (mostly signatures)
|
||||
@attachments ||= @mail.attachments.select do |attachment|
|
||||
attachment.content_type !~ SiteSetting.attachment_content_type_blacklist_regex &&
|
||||
attachment.filename !~ SiteSetting.attachment_filename_blacklist_regex
|
||||
@attachments ||= begin
|
||||
attachments = @mail.attachments.select { |attachment| is_whitelisted_attachment?(attachment) }
|
||||
attachments << @mail if @mail.attachment? && is_whitelisted_attachment?(@mail)
|
||||
attachments
|
||||
end
|
||||
end
|
||||
|
||||
|
|
|
@ -73,7 +73,7 @@ class FinalDestination
|
|||
"Host" => @uri.hostname
|
||||
}
|
||||
|
||||
result['cookie'] = @cookie if @cookie
|
||||
result['Cookie'] = @cookie if @cookie
|
||||
|
||||
result
|
||||
end
|
||||
|
@ -164,7 +164,7 @@ class FinalDestination
|
|||
)
|
||||
|
||||
location = nil
|
||||
headers = nil
|
||||
response_headers = nil
|
||||
|
||||
response_status = response.status.to_i
|
||||
|
||||
|
@ -181,31 +181,29 @@ class FinalDestination
|
|||
return @uri
|
||||
end
|
||||
|
||||
headers = {}
|
||||
response_headers = {}
|
||||
if cookie_val = get_response.get_fields('set-cookie')
|
||||
headers['set-cookie'] = cookie_val.join
|
||||
response_headers[:cookies] = cookie_val
|
||||
end
|
||||
|
||||
# TODO this is confusing why grab location for anything not
|
||||
# between 300-400 ?
|
||||
if location_val = get_response.get_fields('location')
|
||||
headers['location'] = location_val.join
|
||||
response_headers[:location] = location_val.join
|
||||
end
|
||||
end
|
||||
|
||||
unless headers
|
||||
headers = {}
|
||||
response.headers.each do |k, v|
|
||||
headers[k.to_s.downcase] = v
|
||||
end
|
||||
unless response_headers
|
||||
response_headers = {
|
||||
cookies: response.data[:cookies] || response.headers[:"set-cookie"],
|
||||
location: response.headers[:location]
|
||||
}
|
||||
end
|
||||
|
||||
if (300..399).include?(response_status)
|
||||
location = headers["location"]
|
||||
location = response_headers[:location]
|
||||
end
|
||||
|
||||
if set_cookie = headers["set-cookie"]
|
||||
@cookie = set_cookie
|
||||
if cookies = response_headers[:cookies]
|
||||
@cookie = Array.wrap(cookies).map { |c| c.split(';').first.strip }.join('; ')
|
||||
end
|
||||
|
||||
if location
|
||||
|
|
|
@ -22,7 +22,9 @@ module PostGuardian
|
|||
result = if authenticated? && post && !@user.anonymous?
|
||||
|
||||
# post made by staff, but we don't allow staff flags
|
||||
return false if !SiteSetting.allow_flagging_staff? && post.user.staff?
|
||||
return false if is_flag &&
|
||||
(!SiteSetting.allow_flagging_staff?) &&
|
||||
post.user.staff?
|
||||
|
||||
return false if [:notify_user, :notify_moderators].include?(action_key) &&
|
||||
!SiteSetting.enable_personal_messages?
|
||||
|
|
|
@ -72,4 +72,8 @@ module UserGuardian
|
|||
user == @user || is_staff?
|
||||
end
|
||||
|
||||
def can_disable_second_factor?(user)
|
||||
user && can_administer_user?(user)
|
||||
end
|
||||
|
||||
end
|
||||
|
|
|
@ -51,12 +51,14 @@ module Hijack
|
|||
instance.response.headers[k] = v
|
||||
end
|
||||
|
||||
view_start = Process.clock_gettime(Process::CLOCK_MONOTONIC)
|
||||
begin
|
||||
instance.instance_eval(&blk)
|
||||
rescue => e
|
||||
# TODO we need to reuse our exception handling in ApplicationController
|
||||
Discourse.warn_exception(e, message: "Failed to process hijacked response correctly", env: env)
|
||||
end
|
||||
view_runtime = Process.clock_gettime(Process::CLOCK_MONOTONIC) - view_start
|
||||
|
||||
unless instance.response_body || response.committed?
|
||||
instance.status = 500
|
||||
|
@ -94,6 +96,34 @@ module Hijack
|
|||
# happens if client terminated before we responded, ignore
|
||||
io = nil
|
||||
ensure
|
||||
|
||||
if Rails.configuration.try(:lograge).try(:enabled)
|
||||
if timings
|
||||
db_runtime = 0
|
||||
if timings[:sql]
|
||||
db_runtime = timings[:sql][:duration]
|
||||
end
|
||||
|
||||
subscriber = Lograge::RequestLogSubscriber.new
|
||||
payload = ActiveSupport::HashWithIndifferentAccess.new(
|
||||
controller: self.class.name,
|
||||
action: action_name,
|
||||
params: request.filtered_parameters,
|
||||
headers: request.headers,
|
||||
format: request.format.ref,
|
||||
method: request.request_method,
|
||||
path: request.fullpath,
|
||||
view_runtime: view_runtime * 1000.0,
|
||||
db_runtime: db_runtime * 1000.0,
|
||||
timings: timings,
|
||||
status: response.status
|
||||
)
|
||||
|
||||
event = ActiveSupport::Notifications::Event.new("hijack", Time.now, Time.now + timings[:total_duration], "", payload)
|
||||
subscriber.process_action(event)
|
||||
end
|
||||
end
|
||||
|
||||
MethodProfiler.clear
|
||||
Thread.current[Logster::Logger::LOGSTER_ENV] = nil
|
||||
|
||||
|
|
|
@ -1,151 +0,0 @@
|
|||
# frozen_string_literal: true
|
||||
#
|
||||
# this class is used to normalize html output for internal comparisons in specs
|
||||
#
|
||||
require 'oga'
|
||||
|
||||
class HtmlNormalize
|
||||
|
||||
def self.normalize(html)
|
||||
parsed = Oga.parse_html(html.strip, strict: true)
|
||||
if parsed.children.length != 1
|
||||
puts parsed.children.count
|
||||
raise "expecting a single child"
|
||||
end
|
||||
new(parsed.children.first).format
|
||||
end
|
||||
|
||||
SELF_CLOSE = Set.new(%w{area base br col command embed hr img input keygen line meta param source track wbr})
|
||||
|
||||
BLOCK = Set.new(%w{
|
||||
html
|
||||
body
|
||||
aside
|
||||
p
|
||||
h1 h2 h3 h4 h5 h6
|
||||
ol ul
|
||||
address
|
||||
blockquote
|
||||
dl
|
||||
div
|
||||
fieldset
|
||||
form
|
||||
hr
|
||||
noscript
|
||||
table
|
||||
pre
|
||||
})
|
||||
|
||||
def initialize(doc)
|
||||
@doc = doc
|
||||
end
|
||||
|
||||
def format
|
||||
buffer = String.new
|
||||
dump_node(@doc, 0, buffer)
|
||||
buffer.strip!
|
||||
buffer
|
||||
end
|
||||
|
||||
def inline?(node)
|
||||
Oga::XML::Text === node || !BLOCK.include?(node.name.downcase)
|
||||
end
|
||||
|
||||
def dump_node(node, indent = 0, buffer)
|
||||
|
||||
if Oga::XML::Text === node
|
||||
if node.parent&.name
|
||||
buffer << node.text
|
||||
end
|
||||
return
|
||||
end
|
||||
|
||||
name = node.name.downcase
|
||||
|
||||
block = BLOCK.include?(name)
|
||||
|
||||
buffer << " " * indent * 2 if block
|
||||
|
||||
buffer << "<" << name
|
||||
|
||||
attrs = node&.attributes
|
||||
if (attrs && attrs.length > 0)
|
||||
attrs.sort! { |x, y| x.name <=> y.name }
|
||||
attrs.each do |a|
|
||||
buffer << " "
|
||||
buffer << a.name
|
||||
if a.value
|
||||
buffer << "='"
|
||||
buffer << a.value
|
||||
buffer << "'"
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
buffer << ">"
|
||||
|
||||
if block
|
||||
buffer << "\n"
|
||||
end
|
||||
|
||||
children = node.children
|
||||
children = trim(children) if block
|
||||
|
||||
inline_buffer = nil
|
||||
|
||||
children&.each do |child|
|
||||
if block && inline?(child)
|
||||
inline_buffer ||= String.new
|
||||
dump_node(child, indent + 1, inline_buffer)
|
||||
else
|
||||
if inline_buffer
|
||||
buffer << " " * (indent + 1) * 2
|
||||
buffer << inline_buffer.strip
|
||||
inline_buffer = nil
|
||||
else
|
||||
dump_node(child, indent + 1, buffer)
|
||||
end
|
||||
end
|
||||
end
|
||||
|
||||
if inline_buffer
|
||||
buffer << " " * (indent + 1) * 2
|
||||
buffer << inline_buffer.strip
|
||||
inline_buffer = nil
|
||||
end
|
||||
|
||||
if block
|
||||
buffer << "\n" unless buffer[-1] == "\n"
|
||||
buffer << " " * indent * 2
|
||||
end
|
||||
|
||||
unless SELF_CLOSE.include?(name)
|
||||
buffer << "</" << name
|
||||
buffer << ">\n"
|
||||
end
|
||||
end
|
||||
|
||||
def trim(nodes)
|
||||
start = 0
|
||||
finish = nodes.length
|
||||
|
||||
nodes.each do |n|
|
||||
if Oga::XML::Text === n && n.text.blank?
|
||||
start += 1
|
||||
else
|
||||
break
|
||||
end
|
||||
end
|
||||
|
||||
nodes.reverse_each do |n|
|
||||
if Oga::XML::Text === n && n.text.blank?
|
||||
finish -= 1
|
||||
else
|
||||
break
|
||||
end
|
||||
end
|
||||
|
||||
nodes[start...finish]
|
||||
end
|
||||
|
||||
end
|
|
@ -24,6 +24,14 @@ class Middleware::RequestTracker
|
|||
MethodProfiler.patch(Redis::Client, [
|
||||
:call, :call_pipeline
|
||||
], :redis)
|
||||
|
||||
MethodProfiler.patch(Net::HTTP, [
|
||||
:request
|
||||
], :net)
|
||||
|
||||
MethodProfiler.patch(Excon::Connection, [
|
||||
:request
|
||||
], :net)
|
||||
@patched_instrumentation = true
|
||||
end
|
||||
|
||||
|
|
|
@ -1,129 +0,0 @@
|
|||
module Onebox
|
||||
module Engine
|
||||
class DiscourseLocalOnebox
|
||||
include Engine
|
||||
|
||||
# Use this onebox before others
|
||||
def self.priority
|
||||
1
|
||||
end
|
||||
|
||||
def self.===(other)
|
||||
url = other.to_s
|
||||
return false unless url[Discourse.base_url]
|
||||
|
||||
route = Discourse.route_for(url)
|
||||
|
||||
!!(route[:controller] =~ /topics|uploads|users/)
|
||||
rescue ActionController::RoutingError
|
||||
false
|
||||
end
|
||||
|
||||
def to_html
|
||||
uri = URI(@url)
|
||||
path = uri.path || ""
|
||||
route = Discourse.route_for(uri)
|
||||
|
||||
case route[:controller]
|
||||
when "uploads" then upload_html(path)
|
||||
when "topics" then topic_html(route)
|
||||
when "users" then user_html(route)
|
||||
end
|
||||
end
|
||||
|
||||
private
|
||||
|
||||
def upload_html(path)
|
||||
case File.extname(path)
|
||||
when /^\.(mov|mp4|webm|ogv)$/i
|
||||
"<video width='100%' height='100%' controls><source src='#{@url}'><a href='#{@url}'>#{@url}</a></video>"
|
||||
when /^\.(mp3|ogg|wav|m4a)$/i
|
||||
"<audio controls><source src='#{@url}'><a href='#{@url}'>#{@url}</a></audio>"
|
||||
end
|
||||
end
|
||||
|
||||
def topic_html(route)
|
||||
link = "<a href='#{@url}'>#{@url}</a>"
|
||||
source_topic_id = @url[/[&?]source_topic_id=(\d+)/, 1].to_i
|
||||
source_topic = Topic.find_by(id: source_topic_id) if source_topic_id > 0
|
||||
|
||||
if route[:post_number].present? && route[:post_number].to_i > 1
|
||||
post = Post.find_by(topic_id: route[:topic_id], post_number: route[:post_number])
|
||||
return link unless can_see_post?(post, source_topic)
|
||||
|
||||
topic = post.topic
|
||||
slug = Slug.for(topic.title)
|
||||
excerpt = post.excerpt(SiteSetting.post_onebox_maxlength)
|
||||
excerpt.gsub!(/[\r\n]+/, " ")
|
||||
excerpt.gsub!("[/quote]", "[quote]") # don't break my quote
|
||||
|
||||
quote = "[quote=\"#{post.user.username}, topic:#{topic.id}, slug:#{slug}, post:#{post.post_number}\"]\n#{excerpt}\n[/quote]"
|
||||
|
||||
args = {}
|
||||
args[:topic_id] = source_topic_id if source_topic_id > 0
|
||||
|
||||
PrettyText.cook(quote, args)
|
||||
else
|
||||
topic = Topic.find_by(id: route[:topic_id])
|
||||
return link unless can_see_topic?(topic, source_topic)
|
||||
|
||||
first_post = topic.ordered_posts.first
|
||||
|
||||
args = {
|
||||
topic_id: topic.id,
|
||||
avatar: PrettyText.avatar_img(topic.user.avatar_template, "tiny"),
|
||||
original_url: @url,
|
||||
title: PrettyText.unescape_emoji(CGI::escapeHTML(topic.title)),
|
||||
category_html: CategoryBadge.html_for(topic.category),
|
||||
quote: first_post.excerpt(SiteSetting.post_onebox_maxlength),
|
||||
}
|
||||
|
||||
template = File.read("#{Rails.root}/lib/onebox/templates/discourse_topic_onebox.hbs")
|
||||
Mustache.render(template, args)
|
||||
end
|
||||
end
|
||||
|
||||
def user_html(route)
|
||||
link = "<a href='#{@url}'>#{@url}</a>"
|
||||
username = route[:username] || ''
|
||||
user = User.find_by(username_lower: username.downcase)
|
||||
|
||||
if user
|
||||
args = {
|
||||
user_id: user.id,
|
||||
username: user.username,
|
||||
avatar: PrettyText.avatar_img(user.avatar_template, "extra_large"),
|
||||
name: user.name,
|
||||
bio: user.user_profile.bio_excerpt(230),
|
||||
location: user.user_profile.location,
|
||||
joined: I18n.t('joined'),
|
||||
created_at: user.created_at.strftime(I18n.t('datetime_formats.formats.date_only')),
|
||||
website: user.user_profile.website,
|
||||
website_name: UserSerializer.new(user).website_name,
|
||||
original_url: @url
|
||||
}
|
||||
|
||||
template = File.read("#{Rails.root}/lib/onebox/templates/discourse_user_onebox.hbs")
|
||||
Mustache.render(template, args)
|
||||
else
|
||||
return link
|
||||
end
|
||||
end
|
||||
|
||||
def can_see_post?(post, source_topic)
|
||||
return false if post.nil? || post.hidden || post.trashed? || post.topic.nil?
|
||||
Guardian.new.can_see_post?(post) || same_category?(post.topic.category, source_topic)
|
||||
end
|
||||
|
||||
def can_see_topic?(topic, source_topic)
|
||||
return false if topic.nil? || topic.trashed? || topic.private_message?
|
||||
Guardian.new.can_see_topic?(topic) || same_category?(topic.category, source_topic)
|
||||
end
|
||||
|
||||
def same_category?(category, source_topic)
|
||||
source_topic.try(:category_id) == category.try(:id)
|
||||
end
|
||||
|
||||
end
|
||||
end
|
||||
end
|
148
lib/oneboxer.rb
148
lib/oneboxer.rb
|
@ -28,13 +28,13 @@ module Oneboxer
|
|||
def self.preview(url, options = nil)
|
||||
options ||= {}
|
||||
invalidate(url) if options[:invalidate_oneboxes]
|
||||
onebox_raw(url)[:preview]
|
||||
onebox_raw(url, options)[:preview]
|
||||
end
|
||||
|
||||
def self.onebox(url, options = nil)
|
||||
options ||= {}
|
||||
invalidate(url) if options[:invalidate_oneboxes]
|
||||
onebox_raw(url)[:onebox]
|
||||
onebox_raw(url, options)[:onebox]
|
||||
end
|
||||
|
||||
def self.cached_onebox(url)
|
||||
|
@ -76,41 +76,22 @@ module Oneboxer
|
|||
doc
|
||||
end
|
||||
|
||||
def self.append_source_topic_id(url, topic_id)
|
||||
# hack urls to create proper expansions
|
||||
if url =~ Regexp.new("^#{Discourse.base_url.gsub(".", "\\.")}.*$", true)
|
||||
uri = URI.parse(url) rescue nil
|
||||
if uri && uri.path
|
||||
route = Rails.application.routes.recognize_path(uri.path) rescue nil
|
||||
if route && route[:controller] == 'topics'
|
||||
url += (url =~ /\?/ ? "&" : "?") + "source_topic_id=#{topic_id}"
|
||||
end
|
||||
end
|
||||
end
|
||||
url
|
||||
end
|
||||
|
||||
def self.apply(string_or_doc, args = nil)
|
||||
doc = string_or_doc
|
||||
doc = Nokogiri::HTML::fragment(doc) if doc.is_a?(String)
|
||||
changed = false
|
||||
|
||||
each_onebox_link(doc) do |url, element|
|
||||
if args && args[:topic_id]
|
||||
url = append_source_topic_id(url, args[:topic_id])
|
||||
end
|
||||
onebox, _preview = yield(url, element)
|
||||
onebox, _ = yield(url, element)
|
||||
if onebox
|
||||
parsed_onebox = Nokogiri::HTML::fragment(onebox)
|
||||
next unless parsed_onebox.children.count > 0
|
||||
|
||||
# special logic to strip empty p elements
|
||||
if element.parent &&
|
||||
element.parent.node_name &&
|
||||
element.parent.node_name.downcase == "p" &&
|
||||
element.parent.children.count == 1
|
||||
if element&.parent&.node_name&.downcase == "p" && element&.parent&.children&.count == 1
|
||||
element = element.parent
|
||||
end
|
||||
|
||||
changed = true
|
||||
element.swap parsed_onebox.to_html
|
||||
end
|
||||
|
@ -149,7 +130,116 @@ module Oneboxer
|
|||
"onebox__#{url}"
|
||||
end
|
||||
|
||||
def self.onebox_raw(url)
|
||||
def self.onebox_raw(url, opts = {})
|
||||
local_onebox(url, opts) || external_onebox(url)
|
||||
rescue => e
|
||||
# no point warning here, just cause we have an issue oneboxing a url
|
||||
# we can later hunt for failed oneboxes by searching logs if needed
|
||||
Rails.logger.info("Failed to onebox #{url} #{e} #{e.backtrace}")
|
||||
# return a blank hash, so rest of the code works
|
||||
blank_onebox
|
||||
end
|
||||
|
||||
def self.local_onebox(url, opts = {})
|
||||
return unless route = Discourse.route_for(url)
|
||||
|
||||
html =
|
||||
case route[:controller]
|
||||
when "uploads" then local_upload_html(url)
|
||||
when "topics" then local_topic_html(url, route, opts)
|
||||
when "users" then local_user_html(url, route)
|
||||
end
|
||||
|
||||
html = html.presence || "<a href='#{url}'>#{url}</a>"
|
||||
{ onebox: html, preview: html }
|
||||
end
|
||||
|
||||
def self.local_upload_html(url)
|
||||
case File.extname(URI(url).path || "")
|
||||
when /^\.(mov|mp4|webm|ogv)$/i
|
||||
"<video width='100%' height='100%' controls><source src='#{url}'><a href='#{url}'>#{url}</a></video>"
|
||||
when /^\.(mp3|ogg|wav|m4a)$/i
|
||||
"<audio controls><source src='#{url}'><a href='#{url}'>#{url}</a></audio>"
|
||||
end
|
||||
end
|
||||
|
||||
def self.local_topic_html(url, route, opts)
|
||||
return unless current_user = User.find_by(id: opts[:user_id])
|
||||
|
||||
if current_category = Category.find_by(id: opts[:category_id])
|
||||
return unless Guardian.new(current_user).can_see_category?(current_category)
|
||||
end
|
||||
|
||||
if current_topic = Topic.find_by(id: opts[:topic_id])
|
||||
return unless Guardian.new(current_user).can_see_topic?(current_topic)
|
||||
end
|
||||
|
||||
topic = Topic.find_by(id: route[:topic_id])
|
||||
|
||||
return unless topic
|
||||
return if topic.private_message?
|
||||
|
||||
if current_category&.id != topic.category_id
|
||||
return unless Guardian.new.can_see_topic?(topic)
|
||||
end
|
||||
|
||||
post_number = route[:post_number].to_i
|
||||
|
||||
post = post_number > 1 ?
|
||||
topic.posts.where(post_number: post_number).first :
|
||||
topic.ordered_posts.first
|
||||
|
||||
return if !post || post.hidden || post.post_type != Post.types[:regular]
|
||||
|
||||
if post_number > 1 && current_topic&.id == topic.id
|
||||
excerpt = post.excerpt(SiteSetting.post_onebox_maxlength)
|
||||
excerpt.gsub!(/[\r\n]+/, " ")
|
||||
excerpt.gsub!("[/quote]", "[quote]") # don't break my quote
|
||||
|
||||
quote = "[quote=\"#{post.user.username}, topic:#{topic.id}, post:#{post.post_number}\"]\n#{excerpt}\n[/quote]"
|
||||
|
||||
PrettyText.cook(quote)
|
||||
else
|
||||
args = {
|
||||
topic_id: topic.id,
|
||||
avatar: PrettyText.avatar_img(post.user.avatar_template, "tiny"),
|
||||
original_url: url,
|
||||
title: PrettyText.unescape_emoji(CGI::escapeHTML(topic.title)),
|
||||
category_html: CategoryBadge.html_for(topic.category),
|
||||
quote: post.excerpt(SiteSetting.post_onebox_maxlength),
|
||||
}
|
||||
|
||||
template = File.read("#{Rails.root}/lib/onebox/templates/discourse_topic_onebox.hbs")
|
||||
Mustache.render(template, args)
|
||||
end
|
||||
end
|
||||
|
||||
def self.local_user_html(url, route)
|
||||
username = route[:username] || ""
|
||||
|
||||
if user = User.find_by(username_lower: username.downcase)
|
||||
args = {
|
||||
user_id: user.id,
|
||||
username: user.username,
|
||||
avatar: PrettyText.avatar_img(user.avatar_template, "extra_large"),
|
||||
name: user.name,
|
||||
bio: user.user_profile.bio_excerpt(230),
|
||||
location: user.user_profile.location,
|
||||
joined: I18n.t('joined'),
|
||||
created_at: user.created_at.strftime(I18n.t('datetime_formats.formats.date_only')),
|
||||
website: user.user_profile.website,
|
||||
website_name: UserSerializer.new(user).website_name,
|
||||
original_url: url
|
||||
}
|
||||
|
||||
template = File.read("#{Rails.root}/lib/onebox/templates/discourse_user_onebox.hbs")
|
||||
Mustache.render(template, args)
|
||||
else
|
||||
nil
|
||||
end
|
||||
end
|
||||
|
||||
def self.external_onebox(url)
|
||||
Rails.cache.fetch(onebox_cache_key(url), expires_in: 1.day) do
|
||||
fd = FinalDestination.new(url, ignore_redirects: ignore_redirects, force_get_hosts: force_get_hosts)
|
||||
uri = fd.resolve
|
||||
|
@ -169,14 +259,8 @@ module Oneboxer
|
|||
|
||||
r = Onebox.preview(uri.to_s, options)
|
||||
|
||||
{ onebox: r.to_s, preview: r.try(:placeholder_html).to_s }
|
||||
{ onebox: r.to_s, preview: r&.placeholder_html.to_s }
|
||||
end
|
||||
rescue => e
|
||||
# no point warning here, just cause we have an issue oneboxing a url
|
||||
# we can later hunt for failed oneboxes by searching logs if needed
|
||||
Rails.logger.info("Failed to onebox #{url} #{e} #{e.backtrace}")
|
||||
# return a blank hash, so rest of the code works
|
||||
blank_onebox
|
||||
end
|
||||
|
||||
end
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
require_dependency 'search/grouped_search_results'
|
||||
|
||||
class Search
|
||||
INDEX_VERSION = 1.freeze
|
||||
INDEX_VERSION = 2.freeze
|
||||
|
||||
def self.per_facet
|
||||
5
|
||||
|
@ -409,7 +409,7 @@ class Search
|
|||
if match.to_s.length >= SiteSetting.min_search_term_length
|
||||
posts.where("posts.id IN (
|
||||
SELECT post_id FROM post_search_data pd1
|
||||
WHERE pd1.search_data @@ #{Search.ts_query("##{match}")})")
|
||||
WHERE pd1.search_data @@ #{Search.ts_query(term: "##{match}")})")
|
||||
end
|
||||
end
|
||||
end
|
||||
|
@ -511,12 +511,17 @@ class Search
|
|||
end
|
||||
end
|
||||
|
||||
@in_title = false
|
||||
|
||||
if word == 'order:latest' || word == 'l'
|
||||
@order = :latest
|
||||
nil
|
||||
elsif word == 'order:latest_topic'
|
||||
@order = :latest_topic
|
||||
nil
|
||||
elsif word == 'in:title'
|
||||
@in_title = true
|
||||
nil
|
||||
elsif word =~ /topic:(\d+)/
|
||||
topic_id = $1.to_i
|
||||
if topic_id > 1
|
||||
|
@ -681,7 +686,12 @@ class Search
|
|||
posts = posts.joins('JOIN users u ON u.id = posts.user_id')
|
||||
posts = posts.where("posts.raw || ' ' || u.username || ' ' || COALESCE(u.name, '') ilike ?", "%#{term_without_quote}%")
|
||||
else
|
||||
posts = posts.where("post_search_data.search_data @@ #{ts_query}")
|
||||
# A is for title
|
||||
# B is for category
|
||||
# C is for tags
|
||||
# D is for cooked
|
||||
weights = @in_title ? 'A' : (SiteSetting.tagging_enabled ? 'ABCD' : 'ABD')
|
||||
posts = posts.where("post_search_data.search_data @@ #{ts_query(weight_filter: weights)}")
|
||||
exact_terms = @term.scan(/"([^"]+)"/).flatten
|
||||
exact_terms.each do |exact|
|
||||
posts = posts.where("posts.raw ilike ?", "%#{exact}%")
|
||||
|
@ -743,11 +753,9 @@ class Search
|
|||
posts = posts.order("posts.like_count DESC")
|
||||
end
|
||||
else
|
||||
posts = posts.order("TS_RANK_CD(TO_TSVECTOR(#{default_ts_config}, topics.title), #{ts_query}) DESC")
|
||||
|
||||
data_ranking = "TS_RANK_CD(post_search_data.search_data, #{ts_query})"
|
||||
if opts[:aggregate_search]
|
||||
posts = posts.order("SUM(#{data_ranking}) DESC")
|
||||
posts = posts.order("MAX(#{data_ranking}) DESC")
|
||||
else
|
||||
posts = posts.order("#{data_ranking} DESC")
|
||||
end
|
||||
|
@ -772,7 +780,7 @@ class Search
|
|||
self.class.default_ts_config
|
||||
end
|
||||
|
||||
def self.ts_query(term, ts_config = nil, joiner = "&")
|
||||
def self.ts_query(term: , ts_config: nil, joiner: "&", weight_filter: nil)
|
||||
|
||||
data = Post.exec_sql("SELECT TO_TSVECTOR(:config, :term)",
|
||||
config: 'simple',
|
||||
|
@ -786,16 +794,17 @@ class Search
|
|||
|
||||
query = ActiveRecord::Base.connection.quote(
|
||||
all_terms
|
||||
.map { |t| "'#{PG::Connection.escape_string(t)}':*" }
|
||||
.map { |t| "'#{PG::Connection.escape_string(t)}':*#{weight_filter}" }
|
||||
.join(" #{joiner} ")
|
||||
)
|
||||
|
||||
"TO_TSQUERY(#{ts_config || default_ts_config}, #{query})"
|
||||
end
|
||||
|
||||
def ts_query(ts_config = nil)
|
||||
def ts_query(ts_config = nil, weight_filter: nil)
|
||||
@ts_query_cache ||= {}
|
||||
@ts_query_cache["#{ts_config || default_ts_config} #{@term}"] ||= Search.ts_query(@term, ts_config)
|
||||
@ts_query_cache["#{ts_config || default_ts_config} #{@term} #{weight_filter}"] ||=
|
||||
Search.ts_query(term: @term, ts_config: ts_config, weight_filter: weight_filter)
|
||||
end
|
||||
|
||||
def wrap_rows(query)
|
||||
|
|
|
@ -34,7 +34,8 @@ class Stylesheet::Manager
|
|||
theme_key = SiteSetting.default_theme_key
|
||||
end
|
||||
|
||||
cache_key = "#{target}_#{theme_key}"
|
||||
current_hostname = Discourse.current_hostname
|
||||
cache_key = "#{target}_#{theme_key}_#{current_hostname}"
|
||||
tag = cache[cache_key]
|
||||
|
||||
return tag.dup.html_safe if tag
|
||||
|
@ -45,7 +46,7 @@ class Stylesheet::Manager
|
|||
tag = ""
|
||||
else
|
||||
builder.compile unless File.exists?(builder.stylesheet_fullpath)
|
||||
tag = %[<link href="#{builder.stylesheet_path}" media="#{media}" rel="stylesheet" data-target="#{target}"/>]
|
||||
tag = %[<link href="#{builder.stylesheet_path(current_hostname)}" media="#{media}" rel="stylesheet" data-target="#{target}"/>]
|
||||
end
|
||||
|
||||
cache[cache_key] = tag
|
||||
|
@ -181,12 +182,12 @@ class Stylesheet::Manager
|
|||
"#{cache_fullpath}/#{stylesheet_filename_no_digest}"
|
||||
end
|
||||
|
||||
def stylesheet_cdnpath
|
||||
"#{GlobalSetting.cdn_url}#{stylesheet_relpath}?__ws=#{Discourse.current_hostname}"
|
||||
def stylesheet_cdnpath(hostname)
|
||||
"#{GlobalSetting.cdn_url}#{stylesheet_relpath}?__ws=#{hostname}"
|
||||
end
|
||||
|
||||
def stylesheet_path
|
||||
stylesheet_cdnpath
|
||||
def stylesheet_path(hostname)
|
||||
stylesheet_cdnpath(hostname)
|
||||
end
|
||||
|
||||
def root_path
|
||||
|
|
|
@ -5,52 +5,37 @@ end
|
|||
def reindex_search(db = RailsMultisite::ConnectionManagement.current_db)
|
||||
puts "Reindexing '#{db}'"
|
||||
puts ""
|
||||
puts "Posts:"
|
||||
Post.exec_sql("select p.id, p.cooked, c.name category, t.title, p.post_number, t.id topic_id from
|
||||
posts p
|
||||
join topics t on t.id = p.topic_id
|
||||
left join categories c on c.id = t.category_id
|
||||
").each do |p|
|
||||
post_id = p["id"]
|
||||
cooked = p["cooked"]
|
||||
title = p["title"]
|
||||
category = p["cat"]
|
||||
post_number = p["post_number"].to_i
|
||||
topic_id = p["topic_id"].to_i
|
||||
|
||||
SearchIndexer.update_posts_index(post_id, cooked, title, category)
|
||||
SearchIndexer.update_topics_index(topic_id, title , cooked) if post_number == 1
|
||||
|
||||
puts "Posts"
|
||||
Post.includes(topic: [:category, :tags]).find_each do |p|
|
||||
if p.post_number == 1
|
||||
SearchIndexer.index(p.topic, force: true)
|
||||
else
|
||||
SearchIndexer.index(p, force: true)
|
||||
end
|
||||
putc "."
|
||||
end
|
||||
|
||||
puts
|
||||
puts "Users:"
|
||||
User.exec_sql("select id, name, username from users").each do |u|
|
||||
id = u["id"]
|
||||
name = u["name"]
|
||||
username = u["username"]
|
||||
SearchIndexer.update_users_index(id, username, name)
|
||||
|
||||
puts "Users"
|
||||
User.find_each do |u|
|
||||
SearchIndexer.index(u, force: true)
|
||||
putc "."
|
||||
end
|
||||
|
||||
puts
|
||||
puts "Categories"
|
||||
|
||||
Category.exec_sql("select id, name from categories").each do |c|
|
||||
id = c["id"]
|
||||
name = c["name"]
|
||||
SearchIndexer.update_categories_index(id, name)
|
||||
|
||||
putc '.'
|
||||
Category.find_each do |c|
|
||||
SearchIndexer.index(c, force: true)
|
||||
putc "."
|
||||
end
|
||||
|
||||
puts '', 'Tags'
|
||||
puts
|
||||
puts "Tags"
|
||||
|
||||
Tag.exec_sql('select id, name from tags').each do |t|
|
||||
SearchIndexer.update_tags_index(t['id'], t['name'])
|
||||
putc '.'
|
||||
Tag.find_each do |t|
|
||||
SearchIndexer.index(t, force: true)
|
||||
putc "."
|
||||
end
|
||||
|
||||
puts
|
||||
|
|
62
lib/tasks/site_settings.rake
Normal file
62
lib/tasks/site_settings.rake
Normal file
|
@ -0,0 +1,62 @@
|
|||
require 'yaml'
|
||||
|
||||
class SiteSettingsTask
|
||||
def self.export_to_hash
|
||||
site_settings = SiteSetting.all_settings
|
||||
h = {}
|
||||
site_settings.each do |site_setting|
|
||||
h.store(site_setting[:setting].to_s, site_setting[:value])
|
||||
end
|
||||
h
|
||||
end
|
||||
end
|
||||
|
||||
desc "Exports site settings"
|
||||
task "site_settings:export" => :environment do
|
||||
h = SiteSettingsTask.export_to_hash
|
||||
puts h.to_yaml
|
||||
end
|
||||
|
||||
desc "Imports site settings"
|
||||
task "site_settings:import" => :environment do
|
||||
yml = (STDIN.tty?) ? '' : STDIN.read
|
||||
if yml == ''
|
||||
puts ""
|
||||
puts "Please specify a settings yml file"
|
||||
puts "Example: rake site_settings:import < settings.yml"
|
||||
exit 1
|
||||
end
|
||||
|
||||
puts ""
|
||||
puts "starting import..."
|
||||
puts ""
|
||||
|
||||
h = SiteSettingsTask.export_to_hash
|
||||
counts = { updated: 0, not_found: 0, errors: 0 }
|
||||
|
||||
site_settings = YAML::load(yml)
|
||||
site_settings.each do |site_setting|
|
||||
key = site_setting[0]
|
||||
val = site_setting[1]
|
||||
if h.has_key?(key)
|
||||
if val != h[key] #only update if different
|
||||
begin
|
||||
result = SiteSetting.set_and_log(key, val)
|
||||
puts "Changed #{key} FROM: #{result.previous_value} TO: #{result.new_value}"
|
||||
counts[:updated] += 1
|
||||
rescue => e
|
||||
puts "ERROR: #{e.message}"
|
||||
counts[:errors] += 1
|
||||
end
|
||||
end
|
||||
else
|
||||
puts "NOT FOUND: existing site setting not found for #{key}"
|
||||
counts[:not_found] += 1
|
||||
end
|
||||
end
|
||||
puts ""
|
||||
puts "Results:"
|
||||
puts " Updated: #{counts[:updated]}"
|
||||
puts " Not Found: #{counts[:not_found]}"
|
||||
puts " Errors: #{counts[:errors]}"
|
||||
end
|
|
@ -17,7 +17,8 @@ class TextCleaner
|
|||
remove_all_periods_from_the_end: SiteSetting.title_prettify,
|
||||
remove_extraneous_space: SiteSetting.title_prettify && SiteSetting.default_locale == "en",
|
||||
fixes_interior_spaces: true,
|
||||
strip_whitespaces: true
|
||||
strip_whitespaces: true,
|
||||
strip_zero_width_spaces: true
|
||||
}
|
||||
end
|
||||
|
||||
|
@ -47,6 +48,8 @@ class TextCleaner
|
|||
text = normalize_whitespaces(text)
|
||||
# Strip whitespaces
|
||||
text.strip! if opts[:strip_whitespaces]
|
||||
# Strip zero width spaces
|
||||
text.gsub!(/\u200b/, '') if opts[:strip_zero_width_spaces]
|
||||
|
||||
text
|
||||
end
|
||||
|
|
|
@ -610,7 +610,7 @@ class TopicQuery
|
|||
end
|
||||
|
||||
if search = options[:search]
|
||||
result = result.where("topics.id in (select pp.topic_id from post_search_data pd join posts pp on pp.id = pd.post_id where pd.search_data @@ #{Search.ts_query(search.to_s)})")
|
||||
result = result.where("topics.id in (select pp.topic_id from post_search_data pd join posts pp on pp.id = pd.post_id where pd.search_data @@ #{Search.ts_query(term: search.to_s)})")
|
||||
end
|
||||
|
||||
# NOTE protect against SYM attack can be removed with Ruby 2.2
|
||||
|
|
11
lib/validators/max_emojis_validator.rb
Normal file
11
lib/validators/max_emojis_validator.rb
Normal file
|
@ -0,0 +1,11 @@
|
|||
class MaxEmojisValidator < ActiveModel::EachValidator
|
||||
|
||||
def validate_each(record, attribute, value)
|
||||
if Emoji.unicode_unescape(value).scan(/:([\w\-+]*(?::t\d)?):/).size > SiteSetting.max_emojis_in_title
|
||||
record.errors.add(
|
||||
attribute, :max_emojis,
|
||||
max_emojis_count: SiteSetting.max_emojis_in_title
|
||||
)
|
||||
end
|
||||
end
|
||||
end
|
|
@ -82,8 +82,25 @@ class Validators::PostValidator < ActiveModel::Validator
|
|||
|
||||
# Ensure new users can not put too many images in a post
|
||||
def max_images_validator(post)
|
||||
return if acting_user_is_trusted?(post) || private_message?(post)
|
||||
add_error_if_count_exceeded(post, :no_images_allowed, :too_many_images, post.image_count, SiteSetting.newuser_max_images)
|
||||
return if post.acting_user.blank?
|
||||
|
||||
if post.acting_user.trust_level < TrustLevel[SiteSetting.min_trust_to_post_images]
|
||||
add_error_if_count_exceeded(
|
||||
post,
|
||||
:no_images_allowed_trust,
|
||||
:no_images_allowed_trust,
|
||||
post.image_count,
|
||||
0
|
||||
)
|
||||
elsif post.acting_user.trust_level == TrustLevel[0]
|
||||
add_error_if_count_exceeded(
|
||||
post,
|
||||
:no_images_allowed,
|
||||
:too_many_images,
|
||||
post.image_count,
|
||||
SiteSetting.newuser_max_images
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
# Ensure new users can not put too many attachments in a post
|
||||
|
|
|
@ -29,11 +29,11 @@ class Validators::UploadValidator < ActiveModel::Validator
|
|||
end
|
||||
|
||||
def is_authorized?(upload, extension)
|
||||
authorized_extensions(upload, extension, authorized_uploads(upload))
|
||||
extension_authorized?(upload, extension, authorized_extensions(upload))
|
||||
end
|
||||
|
||||
def authorized_image_extension(upload, extension)
|
||||
authorized_extensions(upload, extension, authorized_images(upload))
|
||||
extension_authorized?(upload, extension, authorized_images(upload))
|
||||
end
|
||||
|
||||
def maximum_image_file_size(upload)
|
||||
|
@ -41,7 +41,7 @@ class Validators::UploadValidator < ActiveModel::Validator
|
|||
end
|
||||
|
||||
def authorized_attachment_extension(upload, extension)
|
||||
authorized_extensions(upload, extension, authorized_attachments(upload))
|
||||
extension_authorized?(upload, extension, authorized_attachments(upload))
|
||||
end
|
||||
|
||||
def maximum_attachment_file_size(upload)
|
||||
|
@ -50,38 +50,50 @@ class Validators::UploadValidator < ActiveModel::Validator
|
|||
|
||||
private
|
||||
|
||||
def authorized_uploads(upload)
|
||||
authorized_uploads = Set.new
|
||||
def extensions_to_set(exts)
|
||||
extensions = Set.new
|
||||
|
||||
extensions = upload.for_theme ? SiteSetting.theme_authorized_extensions : SiteSetting.authorized_extensions
|
||||
|
||||
extensions
|
||||
exts
|
||||
.gsub(/[\s\.]+/, "")
|
||||
.downcase
|
||||
.split("|")
|
||||
.each { |extension| authorized_uploads << extension unless extension.include?("*") }
|
||||
.each { |extension| extensions << extension unless extension.include?("*") }
|
||||
|
||||
authorized_uploads
|
||||
extensions
|
||||
end
|
||||
|
||||
def authorized_extensions(upload)
|
||||
extensions = upload.for_theme ? SiteSetting.theme_authorized_extensions : SiteSetting.authorized_extensions
|
||||
extensions_to_set(extensions)
|
||||
end
|
||||
|
||||
def authorized_images(upload)
|
||||
authorized_uploads(upload) & FileHelper.images
|
||||
authorized_extensions(upload) & FileHelper.images
|
||||
end
|
||||
|
||||
def authorized_attachments(upload)
|
||||
authorized_uploads(upload) - FileHelper.images
|
||||
authorized_extensions(upload) - FileHelper.images
|
||||
end
|
||||
|
||||
def authorizes_all_extensions?(upload)
|
||||
if upload.user&.staff?
|
||||
return true if SiteSetting.authorized_extensions_for_staff.include?("*")
|
||||
end
|
||||
extensions = upload.for_theme ? SiteSetting.theme_authorized_extensions : SiteSetting.authorized_extensions
|
||||
extensions.include?("*")
|
||||
end
|
||||
|
||||
def authorized_extensions(upload, extension, extensions)
|
||||
def extension_authorized?(upload, extension, extensions)
|
||||
return true if authorizes_all_extensions?(upload)
|
||||
|
||||
staff_extensions = Set.new
|
||||
if upload.user&.staff?
|
||||
staff_extensions = extensions_to_set(SiteSetting.authorized_extensions_for_staff)
|
||||
return true if staff_extensions.include?(extension.downcase)
|
||||
end
|
||||
|
||||
unless authorized = extensions.include?(extension.downcase)
|
||||
message = I18n.t("upload.unauthorized", authorized_extensions: extensions.to_a.join(", "))
|
||||
message = I18n.t("upload.unauthorized", authorized_extensions: (extensions | staff_extensions).to_a.join(", "))
|
||||
upload.errors.add(:original_filename, message)
|
||||
end
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@ module Discourse
|
|||
MAJOR = 2
|
||||
MINOR = 0
|
||||
TINY = 0
|
||||
PRE = 'beta2'
|
||||
PRE = 'beta3'
|
||||
|
||||
STRING = [MAJOR, MINOR, TINY, PRE].compact.join('.')
|
||||
end
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue