1
0
Fork 1
forked from fedi/mastodon

Fix sanitizer parsing link text as HTML when stripping unsupported links (#22558)

This commit is contained in:
Claire 2023-01-11 22:21:10 +01:00 committed by GitHub
parent 0c689b9d01
commit 15b88a83ab
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
2 changed files with 5 additions and 1 deletions

View file

@ -49,7 +49,7 @@ class Sanitize
end
end
current_node.replace(current_node.text) unless LINK_PROTOCOLS.include?(scheme)
current_node.replace(Nokogiri::XML::Text.new(current_node.text, current_node.document)) unless LINK_PROTOCOLS.include?(scheme)
end
UNSUPPORTED_ELEMENTS_TRANSFORMER = lambda do |env|

View file

@ -38,6 +38,10 @@ describe Sanitize::Config do
expect(Sanitize.fragment('<a href="foo://bar">Test</a>', subject)).to eq 'Test'
end
it 'does not re-interpret HTML when removing unsupported links' do
expect(Sanitize.fragment('<a href="foo://bar">Test&lt;a href="https://example.com"&gt;test&lt;/a&gt;</a>', subject)).to eq 'Test&lt;a href="https://example.com"&gt;test&lt;/a&gt;'
end
it 'keeps a with href' do
expect(Sanitize.fragment('<a href="http://example.com">Test</a>', subject)).to eq '<a href="http://example.com" rel="nofollow noopener noreferrer" target="_blank">Test</a>'
end