1 # -*- coding: utf-8 -*-
3 # gPodder - A media aggregator and podcast client
4 # Copyright (c) 2005-2023 The gPodder Team
6 # gPodder is free software; you can redistribute it and/or modify
7 # it under the terms of the GNU General Public License as published by
8 # the Free Software Foundation; either version 3 of the License, or
9 # (at your option) any later version.
11 # gPodder is distributed in the hope that it will be useful,
12 # but WITHOUT ANY WARRANTY; without even the implied warranty of
13 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 # GNU General Public License for more details.
16 # You should have received a copy of the GNU General Public License
17 # along with this program. If not, see <http://www.gnu.org/licenses/>.
22 import requests
.exceptions
24 from gpodder
.feedcore
import Fetcher
, NEW_LOCATION
, Result
, UPDATED_FEED
27 class MyFetcher(Fetcher
):
28 def parse_feed(self
, url
, feed_data
, data_stream
, headers
, status
, **kwargs
):
29 return Result(status
, {
32 'feed_data': feed_data
,
33 'data_stream': data_stream
,
35 'extra_args': dict(**kwargs
),
43 <title>Feed Name</title>
45 <title>Some Episode Title</title>
46 <guid>urn:test/ep1</guid>
47 <pubDate>Sun, 25 Nov 2018 17:28:03 +0000</pubDate>
58 def test_easy(httpserver
):
59 httpserver
.expect_request('/feed').respond_with_data(SIMPLE_RSS
, content_type
='text/xml')
60 res
= MyFetcher().fetch(httpserver
.url_for('/feed'), custom_key
='value')
61 assert res
.status
== UPDATED_FEED
62 args
= res
.feed
['parse_feed']
63 assert args
['headers']['content-type'] == 'text/xml'
64 assert isinstance(args
['data_stream'], io
.BytesIO
)
65 assert args
['data_stream'].getvalue().decode('utf-8') == SIMPLE_RSS
66 assert args
['url'] == httpserver
.url_for('/feed')
67 assert args
['extra_args']['custom_key'] == 'value'
70 def test_redirect(httpserver
):
71 httpserver
.expect_request('/endfeed').respond_with_data(SIMPLE_RSS
, content_type
='text/xml')
73 'Location': '/endfeed',
76 httpserver
.expect_request('/feed').respond_with_data(status
=302, headers
=redir_headers
)
77 httpserver
.expect_request('/permanentfeed').respond_with_data(status
=301, headers
=redir_headers
)
79 res
= MyFetcher().fetch(httpserver
.url_for('/feed'))
80 assert res
.status
== UPDATED_FEED
81 args
= res
.feed
['parse_feed']
82 assert args
['headers']['content-type'] == 'text/xml'
83 assert isinstance(args
['data_stream'], io
.BytesIO
)
84 assert args
['data_stream'].getvalue().decode('utf-8') == SIMPLE_RSS
85 assert args
['url'] == httpserver
.url_for('/feed')
87 res
= MyFetcher().fetch(httpserver
.url_for('/permanentfeed'))
88 assert res
.status
== NEW_LOCATION
89 assert res
.feed
== httpserver
.url_for('/endfeed')
92 def test_redirect_loop(httpserver
):
93 """Verify that feedcore fetching will not loop indefinitely on redirects."""
95 'Location': '/feed', # it loops
97 httpserver
.expect_request('/feed').respond_with_data(status
=302, headers
=redir_headers
)
99 with pytest
.raises(requests
.exceptions
.TooManyRedirects
):
100 res
= MyFetcher().fetch(httpserver
.url_for('/feed'))
101 assert res
.status
== UPDATED_FEED
102 args
= res
.feed
['parse_feed']
103 assert args
['headers']['content-type'] == 'text/xml'
104 assert isinstance(args
['data_stream'], io
.BytesIO
)
105 assert args
['data_stream'].getvalue().decode('utf-8') == SIMPLE_RSS
106 assert args
['url'] == httpserver
.url_for('/feed')
109 def test_temporary_error_retry(httpserver
):
110 httpserver
.expect_ordered_request('/feed').respond_with_data(status
=503)
111 httpserver
.expect_ordered_request('/feed').respond_with_data(SIMPLE_RSS
, content_type
='text/xml')
112 res
= MyFetcher().fetch(httpserver
.url_for('/feed'))
113 assert res
.status
== UPDATED_FEED
114 args
= res
.feed
['parse_feed']
115 assert args
['headers']['content-type'] == 'text/xml'
116 assert args
['url'] == httpserver
.url_for('/feed')