2015-11-24 7 views
6

Sto cercando di recuperare i dati da URL con il pitone anaconda 3,5richieste Python - smussano caricare qualsiasi url: 'fine remoto connessione chiusa senza risposta'

import requests 
url ='http://eutils.ncbi.nlm.nih.gov/entrez/eutils/einfo.fcgi' 
r = requests.get(url) 
r.content 

L'URL può essere aperto senza problemi nel browser. ..

Ma sto ottenendo un errore (per questo URL e qualsiasi altro URL provo):

-------------------------------------------------------------------------- TypeError Traceback (most recent call last) C:\Anaconda3\lib\site-packages\requests\packages\urllib3\connectionpool.py in _make_request(self, conn, method, url, timeout, **httplib_request_kw) 375 try: # Python 2.7, use buffering of HTTP responses --> 376 httplib_response = conn.getresponse(buffering=True) 377 except TypeError: # Python 2.6 and older

TypeError: getresponse() got an unexpected keyword argument 'buffering'

During handling of the above exception, another exception occurred:

RemoteDisconnected Traceback (most recent call last) C:\Anaconda3\lib\site-packages\requests\packages\urllib3\connectionpool.py in urlopen(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, **response_kw) 558 timeout=timeout_obj, --> 559 body=body, headers=headers) 560

C:\Anaconda3\lib\site-packages\requests\packages\urllib3\connectionpool.py in _make_request(self, conn, method, url, timeout, **httplib_request_kw) 377 except TypeError: # Python 2.6 and older --> 378 httplib_response = conn.getresponse() 379 except (SocketTimeout, BaseSSLError, SocketError) as e:

C:\Anaconda3\lib\http\client.py in getresponse(self) 1173
try: -> 1174 response.begin() 1175 except ConnectionError:

C:\Anaconda3\lib\http\client.py in begin(self) 281 while True: --> 282 version, status, reason = self._read_status() 283 if status != CONTINUE:

C:\Anaconda3\lib\http\client.py in _read_status(self) 250 # sending a valid response. --> 251 raise RemoteDisconnected("Remote end closed connection without" 252 " response")

RemoteDisconnected: Remote end closed connection without response

During handling of the above exception, another exception occurred:

ProtocolError Traceback (most recent call last) C:\Anaconda3\lib\site-packages\requests\adapters.py in send(self, request, stream, timeout, verify, cert, proxies) 369 retries=self.max_retries, --> 370 timeout=timeout 371 )

C:\Anaconda3\lib\site-packages\requests\packages\urllib3\connectionpool.py in urlopen(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, **response_kw) 608 retries = retries.increment(method, url, error=e, _pool=self, --> 609 _stacktrace=sys.exc_info()[2]) 610 retries.sleep()

C:\Anaconda3\lib\site-packages\requests\packages\urllib3\util\retry.py in increment(self, method, url, response, error, _pool, _stacktrace) 244 if read is False: --> 245 raise six.reraise(type(error), error, _stacktrace) 246 elif read is not None:

C:\Anaconda3\lib\site-packages\requests\packages\urllib3\packages\six.py in reraise(tp, value, tb) 308 if value.traceback is not tb: --> 309 raise value.with_traceback(tb) 310 raise value

C:\Anaconda3\lib\site-packages\requests\packages\urllib3\connectionpool.py in urlopen(self, method, url, body, headers, retries, redirect, assert_same_host, timeout, pool_timeout, release_conn, **response_kw) 558 timeout=timeout_obj, --> 559 body=body, headers=headers) 560

C:\Anaconda3\lib\site-packages\requests\packages\urllib3\connectionpool.py in _make_request(self, conn, method, url, timeout, **httplib_request_kw) 377 except TypeError: # Python 2.6 and older --> 378 httplib_response = conn.getresponse() 379 except (SocketTimeout, BaseSSLError, SocketError) as e:

C:\Anaconda3\lib\http\client.py in getresponse(self) 1173
try: -> 1174 response.begin() 1175 except ConnectionError:

C:\Anaconda3\lib\http\client.py in begin(self) 281 while True: --> 282 version, status, reason = self._read_status() 283 if status != CONTINUE:

C:\Anaconda3\lib\http\client.py in _read_status(self) 250 # sending a valid response. --> 251 raise RemoteDisconnected("Remote end closed connection without" 252 " response")

ProtocolError: ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response',))

During handling of the above exception, another exception occurred:

ConnectionError Traceback (most recent call last) in() 3 import requests 4 url =' http://eutils.ncbi.nlm.nih.gov/entrez/eutils/einfo.fcgi ' ----> 5 r = requests.get(url) 6 r.content

C:\Anaconda3\lib\site-packages\requests\api.py in get(url, params, **kwargs) 67 68 kwargs.setdefault('allow_redirects', True) ---> 69 return request('get', url, params=params, **kwargs) 70 71

C:\Anaconda3\lib\site-packages\requests\api.py in request(method, url, **kwargs) 48 49 session = sessions.Session() ---> 50 response = session.request(method=method, url=url, **kwargs) 51 # By explicitly closing the session, we avoid leaving sockets open which 52 # can trigger a ResourceWarning in some cases, and look like a memory leak

C:\Anaconda3\lib\site-packages\requests\sessions.py in request(self, method, url, params, data, headers, cookies, files, auth, timeout, allow_redirects, proxies, hooks, stream, verify, cert, json) 466 } 467 send_kwargs.update(settings) --> 468 resp = self.send(prep, **send_kwargs) 469 470 return resp

C:\Anaconda3\lib\site-packages\requests\sessions.py in send(self, request, **kwargs) 574 575 # Send the request --> 576 r = adapter.send(request, **kwargs) 577 578 # Total elapsed time of the request (approximately)

C:\Anaconda3\lib\site-packages\requests\adapters.py in send(self, request, stream, timeout, verify, cert, proxies) 410 411 except (ProtocolError, socket.error) as err: --> 412 raise ConnectionError(err, request=request) 413 414 except MaxRetryError as e:

ConnectionError: ('Connection aborted.', RemoteDisconnected('Remote end closed connection without response',))

risposta

8

uso tentativo sessione richiesta.

import requests 
MAX_RETRIES = 20 
url ='http://eutils.ncbi.nlm.nih.gov/entrez/eutils/einfo.fcgi' 

session = requests.Session() 
adapter = requests.adapters.HTTPAdapter(max_retries=MAX_RETRIES) 
session.mount('https://', adapter) 
session.mount('http://', adapter) 

r = session.get(url) 
print(r.content) 
+11

Puoi spiegare il codice e cosa fa esattamente? –

+2

max_retries - Il numero massimo di tentativi che ogni connessione dovrebbe tentare. Si noti che questo si applica solo alle ricerche DNS non riuscite, alle connessioni socket e ai timeout di connessione, mai alle richieste in cui i dati sono stati inoltrati al server. Per impostazione predefinita, Requests non riprova le connessioni non riuscite. Se hai bisogno di un controllo granulare sulle condizioni in base alle quali riproviamo una richiesta, importa la classe Retry di urllib3 e passa quella. fonte: http://docs.python-requests.org/en/master/api/#requests.adapters.HTTPAdapter – Baks

Problemi correlati