Getting `URLError` with `StormEvent()`
FariborzDaneshvar-NOAA opened this issue · comments
@SorooshMani-NOAA , the StormEvent()
function with given storm name and year (i.e. StormEvent('florence', 2018)) is failing with ConnectionRefusedError
! I've been using it in the last two weeks without an issue, but today it's giving a headache.
Is there been a change into the function, or there might be an issue from my end?!
Here is the full message:
---------------------------------------------------------------------------
ConnectionRefusedError Traceback (most recent call last)
File /lustre/Fariborz/miniconda3/envs/nhc_colab/lib/python3.10/urllib/request.py:1348, in AbstractHTTPHandler.do_open(self, http_class, req, **http_conn_args)
1347 try:
-> 1348 h.request(req.get_method(), req.selector, req.data, headers,
1349 encode_chunked=req.has_header('Transfer-encoding'))
1350 except OSError as err: # timeout error
File /lustre/Fariborz/miniconda3/envs/nhc_colab/lib/python3.10/http/client.py:1283, in HTTPConnection.request(self, method, url, body, headers, encode_chunked)
1282 """Send a complete request to the server."""
-> 1283 self._send_request(method, url, body, headers, encode_chunked)
File /lustre/Fariborz/miniconda3/envs/nhc_colab/lib/python3.10/http/client.py:1329, in HTTPConnection._send_request(self, method, url, body, headers, encode_chunked)
1328 body = _encode(body, 'body')
-> 1329 self.endheaders(body, encode_chunked=encode_chunked)
File /lustre/Fariborz/miniconda3/envs/nhc_colab/lib/python3.10/http/client.py:1278, in HTTPConnection.endheaders(self, message_body, encode_chunked)
1277 raise CannotSendHeader()
-> 1278 self._send_output(message_body, encode_chunked=encode_chunked)
File /lustre/Fariborz/miniconda3/envs/nhc_colab/lib/python3.10/http/client.py:1038, in HTTPConnection._send_output(self, message_body, encode_chunked)
1037 del self._buffer[:]
-> 1038 self.send(msg)
1040 if message_body is not None:
1041
1042 # create a consistent interface to message_body
File /lustre/Fariborz/miniconda3/envs/nhc_colab/lib/python3.10/http/client.py:976, in HTTPConnection.send(self, data)
975 if self.auto_open:
--> 976 self.connect()
977 else:
File /lustre/Fariborz/miniconda3/envs/nhc_colab/lib/python3.10/http/client.py:1448, in HTTPSConnection.connect(self)
1446 "Connect to a host on a given (SSL) port."
-> 1448 super().connect()
1450 if self._tunnel_host:
File /lustre/Fariborz/miniconda3/envs/nhc_colab/lib/python3.10/http/client.py:942, in HTTPConnection.connect(self)
941 sys.audit("http.client.connect", self, self.host, self.port)
--> 942 self.sock = self._create_connection(
943 (self.host,self.port), self.timeout, self.source_address)
944 # Might fail in OSs that don't implement TCP_NODELAY
File /lustre/Fariborz/miniconda3/envs/nhc_colab/lib/python3.10/socket.py:845, in create_connection(address, timeout, source_address)
844 try:
--> 845 raise err
846 finally:
847 # Break explicitly a reference cycle
File /lustre/Fariborz/miniconda3/envs/nhc_colab/lib/python3.10/socket.py:833, in create_connection(address, timeout, source_address)
832 sock.bind(source_address)
--> 833 sock.connect(sa)
834 # Break explicitly a reference cycle
ConnectionRefusedError: [Errno 111] Connection refused
During handling of the above exception, another exception occurred:
URLError Traceback (most recent call last)
Cell In[8], line 1
----> 1 StormEvent(storm_name, storm_year)
File /lustre/Fariborz/miniconda3/envs/nhc_colab/lib/python3.10/site-packages/stormevents/stormevent.py:76, in StormEvent.__init__(self, name, year, start_date, end_date)
47 def __init__(
48 self,
49 name: str,
(...)
52 end_date: datetime = None,
53 ):
54 """
55 :param name: storm name
56 :param year: storm year
(...)
73 StormEvent(name='IDA', year=2021, start_date=Timestamp('2021-08-27 18:00:00'), end_date=Timestamp('2021-08-29 18:00:00'))
74 """
---> 76 storms = nhc_storms(year=year)
77 storms = storms[storms["name"].str.contains(name.upper())]
78 if len(storms) > 0:
File /lustre/Fariborz/miniconda3/envs/nhc_colab/lib/python3.10/site-packages/stormevents/nhc/storms.py:64, in nhc_storms(year)
39 url = "https://ftp.nhc.noaa.gov/atcf/index/storm_list.txt"
41 columns = [
42 "name",
43 "basin",
(...)
62 "nhc_code",
63 ]
---> 64 storms = pandas.read_csv(
65 url,
66 header=0,
67 names=columns,
68 parse_dates=["start_date", "end_date"],
69 date_parser=lambda x: pandas.to_datetime(x.strip(), format="%Y%m%d%H")
70 if x.strip() != "9999999999"
71 else numpy.nan,
72 )
74 storms = storms.astype(
75 {"start_date": "datetime64[s]", "end_date": "datetime64[s]"},
76 copy=False,
77 )
79 storms = storms[
80 [
81 "nhc_code",
(...)
90 ]
91 ]
File /lustre/Fariborz/miniconda3/envs/nhc_colab/lib/python3.10/site-packages/pandas/io/parsers/readers.py:912, in read_csv(filepath_or_buffer, sep, delimiter, header, names, index_col, usecols, dtype, engine, converters, true_values, false_values, skipinitialspace, skiprows, skipfooter, nrows, na_values, keep_default_na, na_filter, verbose, skip_blank_lines, parse_dates, infer_datetime_format, keep_date_col, date_parser, date_format, dayfirst, cache_dates, iterator, chunksize, compression, thousands, decimal, lineterminator, quotechar, quoting, doublequote, escapechar, comment, encoding, encoding_errors, dialect, on_bad_lines, delim_whitespace, low_memory, memory_map, float_precision, storage_options, dtype_backend)
899 kwds_defaults = _refine_defaults_read(
900 dialect,
901 delimiter,
(...)
908 dtype_backend=dtype_backend,
909 )
910 kwds.update(kwds_defaults)
--> 912 return _read(filepath_or_buffer, kwds)
File /lustre/Fariborz/miniconda3/envs/nhc_colab/lib/python3.10/site-packages/pandas/io/parsers/readers.py:577, in _read(filepath_or_buffer, kwds)
574 _validate_names(kwds.get("names", None))
576 # Create the parser.
--> 577 parser = TextFileReader(filepath_or_buffer, **kwds)
579 if chunksize or iterator:
580 return parser
File /lustre/Fariborz/miniconda3/envs/nhc_colab/lib/python3.10/site-packages/pandas/io/parsers/readers.py:1407, in TextFileReader.__init__(self, f, engine, **kwds)
1404 self.options["has_index_names"] = kwds["has_index_names"]
1406 self.handles: IOHandles | None = None
-> 1407 self._engine = self._make_engine(f, self.engine)
File /lustre/Fariborz/miniconda3/envs/nhc_colab/lib/python3.10/site-packages/pandas/io/parsers/readers.py:1661, in TextFileReader._make_engine(self, f, engine)
1659 if "b" not in mode:
1660 mode += "b"
-> 1661 self.handles = get_handle(
1662 f,
1663 mode,
1664 encoding=self.options.get("encoding", None),
1665 compression=self.options.get("compression", None),
1666 memory_map=self.options.get("memory_map", False),
1667 is_text=is_text,
1668 errors=self.options.get("encoding_errors", "strict"),
1669 storage_options=self.options.get("storage_options", None),
1670 )
1671 assert self.handles is not None
1672 f = self.handles.handle
File /lustre/Fariborz/miniconda3/envs/nhc_colab/lib/python3.10/site-packages/pandas/io/common.py:716, in get_handle(path_or_buf, mode, encoding, compression, memory_map, is_text, errors, storage_options)
713 codecs.lookup_error(errors)
715 # open URLs
--> 716 ioargs = _get_filepath_or_buffer(
717 path_or_buf,
718 encoding=encoding,
719 compression=compression,
720 mode=mode,
721 storage_options=storage_options,
722 )
724 handle = ioargs.filepath_or_buffer
725 handles: list[BaseBuffer]
File /lustre/Fariborz/miniconda3/envs/nhc_colab/lib/python3.10/site-packages/pandas/io/common.py:368, in _get_filepath_or_buffer(filepath_or_buffer, encoding, compression, mode, storage_options)
366 # assuming storage_options is to be interpreted as headers
367 req_info = urllib.request.Request(filepath_or_buffer, headers=storage_options)
--> 368 with urlopen(req_info) as req:
369 content_encoding = req.headers.get("Content-Encoding", None)
370 if content_encoding == "gzip":
371 # Override compression based on Content-Encoding header
File /lustre/Fariborz/miniconda3/envs/nhc_colab/lib/python3.10/site-packages/pandas/io/common.py:270, in urlopen(*args, **kwargs)
264 """
265 Lazy-import wrapper for stdlib urlopen, as that imports a big chunk of
266 the stdlib.
267 """
268 import urllib.request
--> 270 return urllib.request.urlopen(*args, **kwargs)
File /lustre/Fariborz/miniconda3/envs/nhc_colab/lib/python3.10/urllib/request.py:216, in urlopen(url, data, timeout, cafile, capath, cadefault, context)
214 else:
215 opener = _opener
--> 216 return opener.open(url, data, timeout)
File /lustre/Fariborz/miniconda3/envs/nhc_colab/lib/python3.10/urllib/request.py:519, in OpenerDirector.open(self, fullurl, data, timeout)
516 req = meth(req)
518 sys.audit('urllib.Request', req.full_url, req.data, req.headers, req.get_method())
--> 519 response = self._open(req, data)
521 # post-process response
522 meth_name = protocol+"_response"
File /lustre/Fariborz/miniconda3/envs/nhc_colab/lib/python3.10/urllib/request.py:536, in OpenerDirector._open(self, req, data)
533 return result
535 protocol = req.type
--> 536 result = self._call_chain(self.handle_open, protocol, protocol +
537 '_open', req)
538 if result:
539 return result
File /lustre/Fariborz/miniconda3/envs/nhc_colab/lib/python3.10/urllib/request.py:496, in OpenerDirector._call_chain(self, chain, kind, meth_name, *args)
494 for handler in handlers:
495 func = getattr(handler, meth_name)
--> 496 result = func(*args)
497 if result is not None:
498 return result
File /lustre/Fariborz/miniconda3/envs/nhc_colab/lib/python3.10/urllib/request.py:1391, in HTTPSHandler.https_open(self, req)
1390 def https_open(self, req):
-> 1391 return self.do_open(http.client.HTTPSConnection, req,
1392 context=self._context, check_hostname=self._check_hostname)
File /lustre/Fariborz/miniconda3/envs/nhc_colab/lib/python3.10/urllib/request.py:1351, in AbstractHTTPHandler.do_open(self, http_class, req, **http_conn_args)
1348 h.request(req.get_method(), req.selector, req.data, headers,
1349 encode_chunked=req.has_header('Transfer-encoding'))
1350 except OSError as err: # timeout error
-> 1351 raise URLError(err)
1352 r = h.getresponse()
1353 except:
URLError: <urlopen error [Errno 111] Connection refused>
I'm getting the same error on both NHC_COLAB_2
and the local system
@FariborzDaneshvar-NOAA sometimes COOPS certificate expires or the server goes down. It seems to be one of those instances. Let's try again later today and see what happens.
Yes, it's working. Thanks!