Dataset Preview Go to dataset viewer
text (string)
msg += " as .tar files. See testing/test_data/fake_examples/celeb_a_hq "
adjust_spines(ax, spines=['left','bottom'])
pid['general']['WORKING_DIRECTORY'] = WORKING_DIRECTORY
node: (offset_x + x, offset_y + y)
execution = wps.execute(processid, inputs, output=outputs)
return cls(func, name=name, **attrs)
Teff = np.array((Teff,))
output += value
self.relax_core()
all_rows.append(new_row)
raise RuntimeError("Exception must be of BaseException type")
section = parse_section_name(line)
b = _compress_bytes(obj, level)
request_method = request.META['REQUEST_METHOD']
w_vec = np.zeros(x_n) # A vector of outcome weights
f_log_avg_u += dotprod - stop(dotprod) # Add zeros_like(dot_prod).
use_request_uri=use_request_uri,
subadres.huisnummer_id is None
SdotST = np.einsum('it,jt->ij', sinv, sinv)
function = function_code_to_function_map[function_code]
tr('Clothing')
import_path = os.path.join(import_root, filepath)
fields['channel'] = channel
msg = ExtendedReceive.from_raw_message(msgraw)
mmol_string = r.text
geom = trimesh.creation.box((0.5, 0.5, 0.01))
current_branch = [l for l in lines if l.startswith('* ')][0]
return EncryptedPassportElement(**data)
init_kwargs['long_description'] = str(readme_text)
config = super(MQTTHandler, self).get_default_config_help()
yvals = np.log10(model.recurrence.occur_rates)
failhard=failhard,
r.env.user = username
ret['chunk_count'] = sym[CHUNK_COUNT]
return string.format(self.name)
a = input(prompt).lower()
close = QtCore.QRegExp(close)
file_subgroup.append(file_path2)
ipam_pool = docker.types.IPAMPool(subnet=subnet_cidr)
raise ValueError("Must specify a polygon to fill points with")
v = _find_value(key, item)
lines = [l.strip() for l in output if l.strip()]
gr.send({'text': json.dumps(knock)})
figsize = (5 + (1 if num_cax > 0 else 0), 5)
return y - savgol_filter(y, win, 2) + np.nanmedian(y)
K : ndarray(n, k)
pairs_df = pairs_df[pairs_df['index_x'] > pairs_df['index_y']]
row = [i, u'en']
fpos = fpos + int(header['block9']['blocklength'])
self.log.debug("ConfigMap %s deleted", cm_key)
raise DailymotionClientError('Missing username or password in grant info for password grant type.')
pars = page.data.get('extext')
labels = self._varargs_as_labels(column_or_columns)
self.signed_in.set('\n'.join(sorted(names)))
run.url = url
youtube_id : str
StackName=stack_name, NextToken=next_token
registry_value = registry_key.GetValueByName('F')
admin_request_announcement_email(request, form, ann)
startp=startp_gls,
reader = csv.reader(eol_checker, skipinitialspace=True)
return NoCountPage(items, page_number, page_size, has_next)
log.debug("No task '%s' registration action for '%s' event", self._name, ev_type)
kwargs["task_class"] = ScrTask
tuning_count += step_method.tune(verbose=self.verbose)
matching_full_hashes = set()
config = copy.deepcopy(defaults)
i = int(s, 10)
parameters = request.get_nonoauth_parameters()
return child_message
ratio = width * 1. / bpwidth
plt.plot([-fb-(n+1)*fs, -fb-(n+1)*fs],line_ampl,'--g', linewidth=2)
self.resize(event.width, event.height)
batch = tz.get_in(("metadata", "batch"), data)
logger.debug('command websocket connected to %s', "ws://{}:{}".format(self.ip, self.port));
gs_blocks_dir = os.path.join(data_dir, GOLD_STANDARD_BLOCKS_DIRNAME)
rtn_format="json",
sfile = pmag.get_named_arg("-f", reqd=True)
obsen_lam = []
lumi_list = self.dbsUtils2.decodeLumiIntervals(lumi_list)
b.append(BedLine(m.bedline))
end_mark = attr_node.yaml_node.end_mark
print('There are orbit breaks right next to each other')
raise Dump2PolarionException("No results read from CSV file '{}'".format(csv_file))
cte = str(msg.get('content-transfer-encoding', '')).lower()
out_f.write(content)
print_warnings : bool
sub = r.pubsub(ignore_subscribe_messages=True)
return course, course.get_task(taskid)
assert self.pay_partner_id, "PAY_PARTNER_ID IS EMPTY"
logging.info('Done.')
print('ERROR IN DELETE')
savename = savename[:-4] + "_" + extra_title + ".png"
rs = np.linalg.norm(self.coords_for_computations, axis=1)
userena.send_activation_email()
supersnps[hslice:hslice+optim, :, :] = arr
break
g = rbh_network(id2desc, rbh, file_name = 'rbh.network.edges.txt')
item = (x, tuple(deps))
D = call_MediaInfo(file_name, mediainfo_path)
End of preview (truncated to 100 rows)

Dataset of single lines of Python code taken from the CodeSearchNet dataset.

Context

This dataset allows checking the validity of Variational-Autoencoder latent spaces by testing what percentage of random/intermediate latent points can be greedily decoded into valid Python code.

Content

Each row has a parsable line of source code. {'text': '{python source code line}'}

Most lines are < 100 characters while all are under 125 characters.

Contains 2.6 million lines.

All code is in parsable into a python3 ast.

Edit dataset card
Evaluate models HF Leaderboard

Models trained or fine-tuned on Fraser/python-lines

Spaces using Fraser/python-lines