The full dataset viewer is not available (click to read why). Only showing a preview of the rows.
Couldn't get the size of external files in `_split_generators` because a request failed: HTTPSConnectionPool(host='raw.githubusercontent.com', port=443): Read timed out. (read timeout=10.0) Please consider moving your data files in this dataset repository instead (e.g. inside a data/ folder).
Error code:   ExternalFilesSizeRequestTimeoutError
Exception:    ReadTimeout
Message:      HTTPSConnectionPool(host='raw.githubusercontent.com', port=443): Read timed out. (read timeout=10.0)
Traceback:    Traceback (most recent call last):
                File "/src/services/worker/.venv/lib/python3.9/site-packages/urllib3/connectionpool.py", line 466, in _make_request
                  six.raise_from(e, None)
                File "<string>", line 3, in raise_from
                File "/src/services/worker/.venv/lib/python3.9/site-packages/urllib3/connectionpool.py", line 461, in _make_request
                  httplib_response = conn.getresponse()
                File "/usr/local/lib/python3.9/http/client.py", line 1377, in getresponse
                  response.begin()
                File "/usr/local/lib/python3.9/http/client.py", line 320, in begin
                  version, status, reason = self._read_status()
                File "/usr/local/lib/python3.9/http/client.py", line 281, in _read_status
                  line = str(self.fp.readline(_MAXLINE + 1), "iso-8859-1")
                File "/usr/local/lib/python3.9/socket.py", line 704, in readinto
                  return self._sock.recv_into(b)
                File "/usr/local/lib/python3.9/ssl.py", line 1242, in recv_into
                  return self.read(nbytes, buffer)
                File "/usr/local/lib/python3.9/ssl.py", line 1100, in read
                  return self._sslobj.read(len, buffer)
              socket.timeout: The read operation timed out
              
              During handling of the above exception, another exception occurred:
              
              Traceback (most recent call last):
                File "/src/services/worker/.venv/lib/python3.9/site-packages/requests/adapters.py", line 486, in send
                  resp = conn.urlopen(
                File "/src/services/worker/.venv/lib/python3.9/site-packages/urllib3/connectionpool.py", line 798, in urlopen
                  retries = retries.increment(
                File "/src/services/worker/.venv/lib/python3.9/site-packages/urllib3/util/retry.py", line 550, in increment
                  raise six.reraise(type(error), error, _stacktrace)
                File "/src/services/worker/.venv/lib/python3.9/site-packages/urllib3/packages/six.py", line 770, in reraise
                  raise value
                File "/src/services/worker/.venv/lib/python3.9/site-packages/urllib3/connectionpool.py", line 714, in urlopen
                  httplib_response = self._make_request(
                File "/src/services/worker/.venv/lib/python3.9/site-packages/urllib3/connectionpool.py", line 468, in _make_request
                  self._raise_timeout(err=e, url=url, timeout_value=read_timeout)
                File "/src/services/worker/.venv/lib/python3.9/site-packages/urllib3/connectionpool.py", line 357, in _raise_timeout
                  raise ReadTimeoutError(
              urllib3.exceptions.ReadTimeoutError: HTTPSConnectionPool(host='raw.githubusercontent.com', port=443): Read timed out. (read timeout=10.0)
              
              During handling of the above exception, another exception occurred:
              
              Traceback (most recent call last):
                File "/src/services/worker/src/worker/job_runners/config/parquet_and_info.py", line 488, in _is_too_big_from_external_data_files
                  for i, size in enumerate(pool.imap_unordered(get_size, ext_data_files)):
                File "/usr/local/lib/python3.9/multiprocessing/pool.py", line 870, in next
                  raise value
                File "/usr/local/lib/python3.9/multiprocessing/pool.py", line 125, in worker
                  result = (True, func(*args, **kwds))
                File "/src/services/worker/src/worker/job_runners/config/parquet_and_info.py", line 386, in _request_size
                  response = http_head(url, headers=headers, max_retries=3)
                File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/utils/file_utils.py", line 429, in http_head
                  response = _request_with_retry(
                File "/src/services/worker/.venv/lib/python3.9/site-packages/datasets/utils/file_utils.py", line 328, in _request_with_retry
                  response = requests.request(method=method.upper(), url=url, timeout=timeout, **params)
                File "/src/services/worker/.venv/lib/python3.9/site-packages/requests/api.py", line 59, in request
                  return session.request(method=method, url=url, **kwargs)
                File "/src/services/worker/.venv/lib/python3.9/site-packages/requests/sessions.py", line 589, in request
                  resp = self.send(prep, **send_kwargs)
                File "/src/services/worker/.venv/lib/python3.9/site-packages/requests/sessions.py", line 703, in send
                  r = adapter.send(request, **kwargs)
                File "/src/services/worker/.venv/lib/python3.9/site-packages/requests/adapters.py", line 532, in send
                  raise ReadTimeout(e, request=request)
              requests.exceptions.ReadTimeout: HTTPSConnectionPool(host='raw.githubusercontent.com', port=443): Read timed out. (read timeout=10.0)

Need help to make the dataset viewer work? Open a discussion for direct support.

text
string
msg += " as .tar files. See testing/test_data/fake_examples/celeb_a_hq "
adjust_spines(ax, spines=['left','bottom'])
pid['general']['WORKING_DIRECTORY'] = WORKING_DIRECTORY
node: (offset_x + x, offset_y + y)
execution = wps.execute(processid, inputs, output=outputs)
return cls(func, name=name, **attrs)
Teff = np.array((Teff,))
output += value
self.relax_core()
all_rows.append(new_row)
raise RuntimeError("Exception must be of BaseException type")
section = parse_section_name(line)
b = _compress_bytes(obj, level)
request_method = request.META['REQUEST_METHOD']
w_vec = np.zeros(x_n) # A vector of outcome weights
f_log_avg_u += dotprod - stop(dotprod) # Add zeros_like(dot_prod).
use_request_uri=use_request_uri,
subadres.huisnummer_id is None
SdotST = np.einsum('it,jt->ij', sinv, sinv)
function = function_code_to_function_map[function_code]
tr('Clothing')
import_path = os.path.join(import_root, filepath)
fields['channel'] = channel
msg = ExtendedReceive.from_raw_message(msgraw)
mmol_string = r.text
geom = trimesh.creation.box((0.5, 0.5, 0.01))
current_branch = [l for l in lines if l.startswith('* ')][0]
return EncryptedPassportElement(**data)
init_kwargs['long_description'] = str(readme_text)
config = super(MQTTHandler, self).get_default_config_help()
yvals = np.log10(model.recurrence.occur_rates)
failhard=failhard,
r.env.user = username
ret['chunk_count'] = sym[CHUNK_COUNT]
return string.format(self.name)
a = input(prompt).lower()
close = QtCore.QRegExp(close)
file_subgroup.append(file_path2)
ipam_pool = docker.types.IPAMPool(subnet=subnet_cidr)
raise ValueError("Must specify a polygon to fill points with")
v = _find_value(key, item)
lines = [l.strip() for l in output if l.strip()]
gr.send({'text': json.dumps(knock)})
figsize = (5 + (1 if num_cax > 0 else 0), 5)
return y - savgol_filter(y, win, 2) + np.nanmedian(y)
K : ndarray(n, k)
pairs_df = pairs_df[pairs_df['index_x'] > pairs_df['index_y']]
row = [i, u'en']
fpos = fpos + int(header['block9']['blocklength'])
self.log.debug("ConfigMap %s deleted", cm_key)
raise DailymotionClientError('Missing username or password in grant info for password grant type.')
pars = page.data.get('extext')
labels = self._varargs_as_labels(column_or_columns)
self.signed_in.set('\n'.join(sorted(names)))
run.url = url
youtube_id : str
StackName=stack_name, NextToken=next_token
registry_value = registry_key.GetValueByName('F')
admin_request_announcement_email(request, form, ann)
startp=startp_gls,
reader = csv.reader(eol_checker, skipinitialspace=True)
return NoCountPage(items, page_number, page_size, has_next)
log.debug("No task '%s' registration action for '%s' event", self._name, ev_type)
kwargs["task_class"] = ScrTask
tuning_count += step_method.tune(verbose=self.verbose)
matching_full_hashes = set()
config = copy.deepcopy(defaults)
i = int(s, 10)
parameters = request.get_nonoauth_parameters()
return child_message
ratio = width * 1. / bpwidth
plt.plot([-fb-(n+1)*fs, -fb-(n+1)*fs],line_ampl,'--g', linewidth=2)
self.resize(event.width, event.height)
batch = tz.get_in(("metadata", "batch"), data)
logger.debug('command websocket connected to %s', "ws://{}:{}".format(self.ip, self.port));
gs_blocks_dir = os.path.join(data_dir, GOLD_STANDARD_BLOCKS_DIRNAME)
rtn_format="json",
sfile = pmag.get_named_arg("-f", reqd=True)
obsen_lam = []
lumi_list = self.dbsUtils2.decodeLumiIntervals(lumi_list)
b.append(BedLine(m.bedline))
end_mark = attr_node.yaml_node.end_mark
print('There are orbit breaks right next to each other')
raise Dump2PolarionException("No results read from CSV file '{}'".format(csv_file))
cte = str(msg.get('content-transfer-encoding', '')).lower()
out_f.write(content)
print_warnings : bool
sub = r.pubsub(ignore_subscribe_messages=True)
return course, course.get_task(taskid)
assert self.pay_partner_id, "PAY_PARTNER_ID IS EMPTY"
logging.info('Done.')
print('ERROR IN DELETE')
savename = savename[:-4] + "_" + extra_title + ".png"
rs = np.linalg.norm(self.coords_for_computations, axis=1)
userena.send_activation_email()
supersnps[hslice:hslice+optim, :, :] = arr
break
g = rbh_network(id2desc, rbh, file_name = 'rbh.network.edges.txt')
item = (x, tuple(deps))
D = call_MediaInfo(file_name, mediainfo_path)
End of preview.