Hallo zusammen,
ich versuche gerade in Python Notebook etwas auszuführen und bleibe an einer Stelle mit einem Fehler hängen.
Der Code
[codebox=python file=Unbenannt.txt]def load_data():
    from GPy.util.datasets import cmu_mocap
    train_motions = ['01', '02', '03', '04', # walking
                '17', '18', '19', '20'] # running
    test_motions = ['05','06','07','08','21','22','23','24']
    data = cmu_mocap('35', train_motions, test_motions, sample_every=4, data_set='cmu_mocap')
    return data
data = load_data()
[/code]
Der Fehler lautet
[codebox=python file=Unbenannt.txt]
gaierror                                  Traceback (most recent call last)
C:\ProgramData\Anaconda3\lib\urllib\request.py in do_open(self, http_class, req, **http_conn_args)
   1317                 h.request(req.get_method(), req.selector, req.data, headers,
-> 1318                           encode_chunked=req.has_header('Transfer-encoding'))
   1319             except OSError as err: # timeout error
C:\ProgramData\Anaconda3\lib\http\client.py in request(self, method, url, body, headers, encode_chunked)
   1238         """Send a complete request to the server."""
-> 1239         self._send_request(method, url, body, headers, encode_chunked)
   1240 
C:\ProgramData\Anaconda3\lib\http\client.py in _send_request(self, method, url, body, headers, encode_chunked)
   1284             body = _encode(body, 'body')
-> 1285         self.endheaders(body, encode_chunked=encode_chunked)
   1286 
C:\ProgramData\Anaconda3\lib\http\client.py in endheaders(self, message_body, encode_chunked)
   1233             raise CannotSendHeader()
-> 1234         self._send_output(message_body, encode_chunked=encode_chunked)
   1235 
C:\ProgramData\Anaconda3\lib\http\client.py in _send_output(self, message_body, encode_chunked)
   1025         del self._buffer[:]
-> 1026         self.send(msg)
   1027 
C:\ProgramData\Anaconda3\lib\http\client.py in send(self, data)
    963             if self.auto_open:
--> 964                 self.connect()
    965             else:
C:\ProgramData\Anaconda3\lib\http\client.py in connect(self)
    935         self.sock = self._create_connection(
--> 936             (self.host,self.port), self.timeout, self.source_address)
    937         self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
C:\ProgramData\Anaconda3\lib\socket.py in create_connection(address, timeout, source_address)
    703     err = None
--> 704     for res in getaddrinfo(host, port, 0, SOCK_STREAM):
    705         af, socktype, proto, canonname, sa = res
C:\ProgramData\Anaconda3\lib\socket.py in getaddrinfo(host, port, family, type, proto, flags)
    742     addrlist = []
--> 743     for res in _socket.getaddrinfo(host, port, family, type, proto, flags):
    744         af, socktype, proto, canonname, sa = res
gaierror: [Errno 11004] getaddrinfo failed
During handling of the above exception, another exception occurred:
URLError                                  Traceback (most recent call last)
<ipython-input-6-fafc48077a51> in <module>()
      1 #data = load_data()
----> 2 data = load_data()
<ipython-input-5-e606ad698102> in load_data()
      4                 '17', '18', '19', '20'] # running
      5     test_motions = ['05','06','07','08','21','22','23','24']
----> 6     data = cmu_mocap('35', train_motions, test_motions, sample_every=4, data_set='cmu_mocap')
      7     return data
      8 
C:\ProgramData\Anaconda3\lib\site-packages\GPy\util\datasets.py in cmu_mocap(subject, train_motions, test_motions, sample_every, data_set)
   1422     data_resources[data_set]['urls'] = resource['urls']
   1423     if resource['urls']:
-> 1424         download_data(data_set)
   1425 
   1426     skel = GPy.util.mocap.acclaim_skeleton(os.path.join(subject_dir, subject + '.asf'))
C:\ProgramData\Anaconda3\lib\site-packages\GPy\util\datasets.py in download_data(dataset_name)
    223     for url, files, save_names, suffices in itertools.zip_longest(*zip_urls, fillvalue=[]):
    224         for f, save_name, suffix in itertools.zip_longest(files, save_names, suffices, fillvalue=None):
--> 225             download_url(os.path.join(url,f), dataset_name, save_name, suffix=suffix)
    226 
    227     return True
C:\ProgramData\Anaconda3\lib\site-packages\GPy\util\datasets.py in download_url(url, store_directory, save_name, messages, suffix)
    128         os.makedirs(dir_name)
    129     try:
--> 130         response = urlopen(url+suffix)
    131     except URLError as e:
    132         if not hasattr(e, "code"):
C:\ProgramData\Anaconda3\lib\urllib\request.py in urlopen(url, data, timeout, cafile, capath, cadefault, context)
    221     else:
    222         opener = _opener
--> 223     return opener.open(url, data, timeout)
    224 
    225 def install_opener(opener):
C:\ProgramData\Anaconda3\lib\urllib\request.py in open(self, fullurl, data, timeout)
    524             req = meth(req)
    525 
--> 526         response = self._open(req, data)
    527 
    528         # post-process response
C:\ProgramData\Anaconda3\lib\urllib\request.py in _open(self, req, data)
    542         protocol = req.type
    543         result = self._call_chain(self.handle_open, protocol, protocol +
--> 544                                   '_open', req)
    545         if result:
    546             return result
C:\ProgramData\Anaconda3\lib\urllib\request.py in _call_chain(self, chain, kind, meth_name, *args)
    502         for handler in handlers:
    503             func = getattr(handler, meth_name)
--> 504             result = func(*args)
    505             if result is not None:
    506                 return result
C:\ProgramData\Anaconda3\lib\urllib\request.py in http_open(self, req)
   1344 
   1345     def http_open(self, req):
-> 1346         return self.do_open(http.client.HTTPConnection, req)
   1347 
   1348     http_request = AbstractHTTPHandler.do_request_
C:\ProgramData\Anaconda3\lib\urllib\request.py in do_open(self, http_class, req, **http_conn_args)
   1318                           encode_chunked=req.has_header('Transfer-encoding'))
   1319             except OSError as err: # timeout error
-> 1320                 raise URLError(err)
   1321             r = h.getresponse()
   1322         except:
URLError: <urlopen error [Errno 11004] getaddrinfo failed>[/code]
Weiß jemand einen Rat?
            
			
									
						
										
						URL Fehler
Danke für die Rückmeldung, das ist nett. Ebenfalls wäre interessant, wie du es gelöst hast.Romaxx hat geschrieben:Habe den Fehler nun selber identifiziert.
MorgenGrauen: 1 Welt, 8 Rassen, 13 Gilden, >250 Abenteuer, >5000 Waffen & Rüstungen,
>7000 NPC, >16000 Räume, >200 freiwillige Programmierer, nur Text, viel Spaß, seit 1992.
			
						>7000 NPC, >16000 Räume, >200 freiwillige Programmierer, nur Text, viel Spaß, seit 1992.
Der Fehler kommt durch den proxy, über den ich ins Internet gehe.
Ich musste die Daten, die durch den Code heruntergeladen werden, lediglich manuell in ein Verzeichniss laden.
Auf der Suche im Internet habe ich gelesen, dass man die proxy Infos im Code eben berücksichtigen müsste.
Da es nicht mein Code ist, war mir das manuelle Herunterladen lieber.
            
			
									
						
										
						Ich musste die Daten, die durch den Code heruntergeladen werden, lediglich manuell in ein Verzeichniss laden.
Auf der Suche im Internet habe ich gelesen, dass man die proxy Infos im Code eben berücksichtigen müsste.
Da es nicht mein Code ist, war mir das manuelle Herunterladen lieber.
