2
0
Fork 0
mirror of https://github.com/discourse/discourse.git synced 2025-09-07 12:02:53 +08:00

FIX: use allowlist and blocklist terminology (#10209)

This is a PR of the renaming whitelist to allowlist and blacklist to the blocklist.
This commit is contained in:
Krzysztof Kotlarek 2020-07-27 10:23:54 +10:00 committed by GitHub
parent 5077cf52fd
commit e0d9232259
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
130 changed files with 676 additions and 570 deletions

View file

@ -70,13 +70,13 @@ describe CrawlerDetection do
end
describe 'allow_crawler?' do
it 'returns true if whitelist and blacklist are blank' do
it 'returns true if allowlist and blocklist are blank' do
expect(CrawlerDetection.allow_crawler?('Googlebot/2.1 (+http://www.google.com/bot.html)')).to eq(true)
end
context 'whitelist is set' do
context 'allowlist is set' do
before do
SiteSetting.whitelisted_crawler_user_agents = 'Googlebot|Twitterbot'
SiteSetting.allowed_crawler_user_agents = 'Googlebot|Twitterbot'
end
it 'returns true for matching user agents' do
@ -91,20 +91,20 @@ describe CrawlerDetection do
expect(CrawlerDetection.allow_crawler?('')).to eq(false)
end
context 'and blacklist is set' do
context 'and blocklist is set' do
before do
SiteSetting.blacklisted_crawler_user_agents = 'Googlebot-Image'
SiteSetting.blocked_crawler_user_agents = 'Googlebot-Image'
end
it 'ignores the blacklist' do
it 'ignores the blocklist' do
expect(CrawlerDetection.allow_crawler?('Googlebot-Image/1.0')).to eq(true)
end
end
end
context 'blacklist is set' do
context 'blocklist is set' do
before do
SiteSetting.blacklisted_crawler_user_agents = 'Googlebot|Twitterbot'
SiteSetting.blocked_crawler_user_agents = 'Googlebot|Twitterbot'
end
it 'returns true for crawlers that do not match' do
@ -122,47 +122,47 @@ describe CrawlerDetection do
end
describe 'is_blocked_crawler?' do
it 'is false if user agent is a crawler and no whitelist or blacklist is defined' do
it 'is false if user agent is a crawler and no allowlist or blocklist is defined' do
expect(CrawlerDetection.is_blocked_crawler?('Twitterbot')).to eq(false)
end
it 'is false if user agent is not a crawler and no whitelist or blacklist is defined' do
it 'is false if user agent is not a crawler and no allowlist or blocklist is defined' do
expect(CrawlerDetection.is_blocked_crawler?('Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36')).to eq(false)
end
it 'is true if user agent is a crawler and is not whitelisted' do
SiteSetting.whitelisted_crawler_user_agents = 'Googlebot'
it 'is true if user agent is a crawler and is not allowlisted' do
SiteSetting.allowed_crawler_user_agents = 'Googlebot'
expect(CrawlerDetection.is_blocked_crawler?('Twitterbot')).to eq(true)
end
it 'is false if user agent is not a crawler and there is a whitelist' do
SiteSetting.whitelisted_crawler_user_agents = 'Googlebot'
it 'is false if user agent is not a crawler and there is a allowlist' do
SiteSetting.allowed_crawler_user_agents = 'Googlebot'
expect(CrawlerDetection.is_blocked_crawler?('Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36')).to eq(false)
end
it 'is true if user agent is a crawler and is blacklisted' do
SiteSetting.blacklisted_crawler_user_agents = 'Twitterbot'
it 'is true if user agent is a crawler and is blocklisted' do
SiteSetting.blocked_crawler_user_agents = 'Twitterbot'
expect(CrawlerDetection.is_blocked_crawler?('Twitterbot')).to eq(true)
end
it 'is true if user agent is a crawler and is not blacklisted' do
SiteSetting.blacklisted_crawler_user_agents = 'Twitterbot'
it 'is true if user agent is a crawler and is not blocklisted' do
SiteSetting.blocked_crawler_user_agents = 'Twitterbot'
expect(CrawlerDetection.is_blocked_crawler?('Googlebot')).to eq(false)
end
it 'is false if user agent is not a crawler and blacklist is defined' do
SiteSetting.blacklisted_crawler_user_agents = 'Mozilla'
it 'is false if user agent is not a crawler and blocklist is defined' do
SiteSetting.blocked_crawler_user_agents = 'Mozilla'
expect(CrawlerDetection.is_blocked_crawler?('Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36')).to eq(false)
end
it 'is true if user agent is missing and whitelist is defined' do
SiteSetting.whitelisted_crawler_user_agents = 'Googlebot'
it 'is true if user agent is missing and allowlist is defined' do
SiteSetting.allowed_crawler_user_agents = 'Googlebot'
expect(CrawlerDetection.is_blocked_crawler?('')).to eq(true)
expect(CrawlerDetection.is_blocked_crawler?(nil)).to eq(true)
end
it 'is false if user agent is missing and blacklist is defined' do
SiteSetting.blacklisted_crawler_user_agents = 'Googlebot'
it 'is false if user agent is missing and blocklist is defined' do
SiteSetting.blocked_crawler_user_agents = 'Googlebot'
expect(CrawlerDetection.is_blocked_crawler?('')).to eq(false)
expect(CrawlerDetection.is_blocked_crawler?(nil)).to eq(false)
end