2015-11-24 11 views
6

Próbuję pobrać dane z URL z anaconda pytona 3,5Python wnioski - nie mogę załadować dowolny link: „Zdalny koniec zamknął połączenie bez odpowiedzi”

import requests 
url ='http://eutils.ncbi.nlm.nih.gov/entrez/eutils/einfo.fcgi' 
r = requests.get(url) 
r.content 

Adres URL może być otwierany bez problemów w przeglądarce. ..

Ale ja dostaję błąd (dla tego adresu URL i wszelkich innych url staram):

-------------------------------------------------------------------------- TypeError Traceback (most recent call last) C:\Anaconda3\lib\site-packages\requests\packages\urllib3\connectionpool.py in _make_request(self, conn, method, url, timeout, **httplib_request_kw) 375 try: # Python 2.7, use buffering of HTTP responses --> 376 httplib_response = conn.getresponse(buffering=True) 377 except TypeError: # Python 2.6 and older

TypeError: getresponse() got an unexpected keyword argument 'buffering'

During handling of the above exception, another exception occurred:

RemoteDisconnected Traceback (most recent call last) C:\Anaconda3\lib\site-packages\requests\packages\urllib3\connectionpool.py in urlopen(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, **response_kw) 558 timeout=timeout_obj, --> 559 body=body, headers=headers) 560

C:\Anaconda3\lib\site-packages\requests\packages\urllib3\connectionpool.py in _make_request(self, conn, method, url, timeout, **httplib_request_kw) 377 except TypeError: # Python 2.6 and older --> 378 httplib_response = conn.getresponse() 379 except (SocketTimeout, BaseSSLError, SocketError) as e:

C:\Anaconda3\lib\http\client.py in getresponse(self) 1173
try: -> 1174 response.begin() 1175 except ConnectionError:

C:\Anaconda3\lib\http\client.py in begin(self) 281 while True: --> 282 version, status, reason = self._read_status() 283 if status != CONTINUE:

C:\Anaconda3\lib\http\client.py in _read_status(self) 250 # sending a valid response. --> 251 raise RemoteDisconnected("Remote end closed connection without" 252 " response")

RemoteDisconnected: Remote end closed connection without response

During handling of the above exception, another exception occurred:

ProtocolError Traceback (most recent call last) C:\Anaconda3\lib\site-packages\requests\adapters.py in send(self, request, stream, timeout, verify, cert, proxies) 369 retries=self.max_retries, --> 370 timeout=timeout 371 )

C:\Anaconda3\lib\site-packages\requests\packages\urllib3\connectionpool.py in urlopen(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, **response_kw) 608 retries = retries.increment(method, url, error=e, _pool=self, --> 609 _stacktrace=sys.exc_info()[2]) 610 retries.sleep()

C:\Anaconda3\lib\site-packages\requests\packages\urllib3\util\retry.py in increment(self, method, url, response, error, _pool, _stacktrace) 244 if read is False: --> 245 raise six.reraise(type(error), error, _stacktrace) 246 elif read is not None:

C:\Anaconda3\lib\site-packages\requests\packages\urllib3\packages\six.py in reraise(tp, value, tb) 308 if value.traceback is not tb: --> 309 raise value.with_traceback(tb) 310 raise value

C:\Anaconda3\lib\site-packages\requests\packages\urllib3\connectionpool.py in urlopen(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, **response_kw) 558 timeout=timeout_obj, --> 559 body=body, headers=headers) 560

C:\Anaconda3\lib\site-packages\requests\packages\urllib3\connectionpool.py in _make_request(self, conn, method, url, timeout, **httplib_request_kw) 377 except TypeError: # Python 2.6 and older --> 378 httplib_response = conn.getresponse() 379 except (SocketTimeout, BaseSSLError, SocketError) as e:

C:\Anaconda3\lib\http\client.py in getresponse(self) 1173
try: -> 1174 response.begin() 1175 except ConnectionError:

C:\Anaconda3\lib\http\client.py in begin(self) 281 while True: --> 282 version, status, reason = self._read_status() 283 if status != CONTINUE:

C:\Anaconda3\lib\http\client.py in _read_status(self) 250 # sending a valid response. --> 251 raise RemoteDisconnected("Remote end closed connection without" 252 " response")

ProtocolError: ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response',))

During handling of the above exception, another exception occurred:

ConnectionError Traceback (most recent call last) in() 3 import requests 4 url =' http://eutils.ncbi.nlm.nih.gov/entrez/eutils/einfo.fcgi ' ----> 5 r = requests.get(url) 6 r.content

C:\Anaconda3\lib\site-packages\requests\api.py in get(url, params, **kwargs) 67 68 kwargs.setdefault('allow_redirects', True) ---> 69 return request('get', url, params=params, **kwargs) 70 71

C:\Anaconda3\lib\site-packages\requests\api.py in request(method, url, **kwargs) 48 49 session = sessions.Session() ---> 50 response = session.request(method=method, url=url, **kwargs) 51 # By explicitly closing the session, we avoid leaving sockets open which 52 # can trigger a ResourceWarning in some cases, and look like a memory leak

C:\Anaconda3\lib\site-packages\requests\sessions.py in request(self, method, url, params, data, headers, cookies, files, auth, timeout, allow_redirects, proxies, hooks, stream, verify, cert, json) 466 } 467 send_kwargs.update(settings) --> 468 resp = self.send(prep, **send_kwargs) 469 470 return resp

C:\Anaconda3\lib\site-packages\requests\sessions.py in send(self, request, **kwargs) 574 575 # Send the request --> 576 r = adapter.send(request, **kwargs) 577 578 # Total elapsed time of the request (approximately)

C:\Anaconda3\lib\site-packages\requests\adapters.py in send(self, request, stream, timeout, verify, cert, proxies) 410 411 except (ProtocolError, socket.error) as err: --> 412 raise ConnectionError(err, request=request) 413 414 except MaxRetryError as e:

ConnectionError: ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response',))

Odpowiedz

8

stosowanie try sesja żądanie.

import requests 
MAX_RETRIES = 20 
url ='http://eutils.ncbi.nlm.nih.gov/entrez/eutils/einfo.fcgi' 

session = requests.Session() 
adapter = requests.adapters.HTTPAdapter(max_retries=MAX_RETRIES) 
session.mount('https://', adapter) 
session.mount('http://', adapter) 

r = session.get(url) 
print(r.content) 
+11

Czy możesz wyjaśnić kod i co dokładnie robi? –

+2

max_retries - maksymalna liczba ponownych prób, które każde połączenie powinno podjąć. Uwaga: odnosi się to tylko do nieudanych wyszukiwań DNS, połączeń gniazd i limitów czasu połączeń, nigdy do żądań, w których dane trafiły na serwer. Domyślnie żądania nie ponawiają nieudanych połączeń. Jeśli potrzebujesz szczegółowej kontroli warunków, w których ponawiamy żądanie, zaimportuj klasę ponawiania Urllib3 i przekaż ją zamiast tego. źródło: http://docs.python-requests.org/en/master/api/#requests.adapters.HTTPAdapter – Baks

Powiązane problemy