lines
sequencelengths 1
383
| raw_lines
sequencelengths 1
383
| label
sequencelengths 1
383
| type
sequencelengths 1
383
|
---|---|---|---|
[
"def FUNC_28(self):...\n",
"VAR_5 = self._makeContext()\n",
"self.assertIs(VAR_5.evaluate('True'), True)\n",
"self.assertIs(VAR_5.evaluate('False'), False)\n",
"self.assertIs(VAR_5.evaluate('nocall: test'), safe_builtins['test'])\n"
] | [
"def test_builtin_in_path_expr(self):...\n",
"ec = self._makeContext()\n",
"self.assertIs(ec.evaluate('True'), True)\n",
"self.assertIs(ec.evaluate('False'), False)\n",
"self.assertIs(ec.evaluate('nocall: test'), safe_builtins['test'])\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"@VAR_4.route('/answer', methods=['GET', 'POST'])...\n",
"print(VAR_17)\n",
"VAR_20 = request.form.get('option', None)\n",
"if VAR_20 is None or not type(VAR_20) != int:\n",
"return abort(400)\n",
"VAR_20 = int(VAR_20)\n",
"VAR_21 = VAR_17['correct']\n",
"if VAR_21 is None:\n",
"return abort(401)\n",
"VAR_22 = VAR_17['current_munhak']\n",
"if VAR_22 is None:\n",
"return abort(401)\n",
"if VAR_21 == VAR_20:\n",
"VAR_17['quiz_count'] += 1\n",
"if 'quiz_count' not in VAR_17:\n",
"VAR_17['solved_quiz'].append(VAR_22['munhak_seq'])\n",
"VAR_17['quiz_count'] = 0\n",
"if 'solved_quiz' not in VAR_17:\n",
"VAR_17['current_munhak'] = None\n",
"VAR_17['result'] = False\n",
"return 'failed', 404\n",
"return 'success'\n"
] | [
"@app.route('/answer', methods=['GET', 'POST'])...\n",
"print(session)\n",
"option = request.form.get('option', None)\n",
"if option is None or not type(option) != int:\n",
"return abort(400)\n",
"option = int(option)\n",
"correct = session['correct']\n",
"if correct is None:\n",
"return abort(401)\n",
"current_munhak = session['current_munhak']\n",
"if current_munhak is None:\n",
"return abort(401)\n",
"if correct == option:\n",
"session['quiz_count'] += 1\n",
"if 'quiz_count' not in session:\n",
"session['solved_quiz'].append(current_munhak['munhak_seq'])\n",
"session['quiz_count'] = 0\n",
"if 'solved_quiz' not in session:\n",
"session['current_munhak'] = None\n",
"session['result'] = False\n",
"return 'failed', 404\n",
"return 'success'\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Expr'",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Condition",
"Return'",
"Condition",
"AugAssign'",
"Condition",
"Expr'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Return'",
"Return'"
] |
[
"def FUNC_35(VAR_36, VAR_41):...\n",
"if VAR_36.cover:\n",
"VAR_97 = VAR_36.cover\n",
"VAR_97 = os.path.join(constants.STATIC_DIR, 'generic_cover.jpg')\n",
"VAR_75 = os.path.join(config.config_calibre_dir, VAR_41.path, 'cover.jpg')\n",
"copyfile(VAR_97, VAR_75)\n",
"VAR_2.error('Failed to move cover file %s: %s', VAR_75, e)\n",
"if VAR_36.cover:\n",
"flash(_(u'Failed to Move Cover File %(file)s: %(error)s', file=\n new_coverpath, VAR_49=e), category='error')\n",
"os.unlink(VAR_36.cover)\n"
] | [
"def move_coverfile(meta, db_book):...\n",
"if meta.cover:\n",
"coverfile = meta.cover\n",
"coverfile = os.path.join(constants.STATIC_DIR, 'generic_cover.jpg')\n",
"new_coverpath = os.path.join(config.config_calibre_dir, db_book.path,\n 'cover.jpg')\n",
"copyfile(coverfile, new_coverpath)\n",
"log.error('Failed to move cover file %s: %s', new_coverpath, e)\n",
"if meta.cover:\n",
"flash(_(u'Failed to Move Cover File %(file)s: %(error)s', file=\n new_coverpath, error=e), category='error')\n",
"os.unlink(meta.cover)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Expr'"
] |
[
"def FUNC_172(VAR_354):...\n",
"return VAR_354 + ' ' + VAR_354.replace('icon', 'glyphicon')\n"
] | [
"def rename(icon):...\n",
"return icon + ' ' + icon.replace('icon', 'glyphicon')\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_22(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self.client.login(username='super', password='secret')\n",
"VAR_21 = self.client.get('/widget_admin/admin_widgets/cartire/add/')\n",
"self.assertNotContains(VAR_21, 'BMW M3')\n",
"self.assertContains(VAR_21, 'Volkswagon Passat')\n"
] | [
"def testFilterChoicesByRequestUser(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self.client.login(username='super', password='secret')\n",
"response = self.client.get('/widget_admin/admin_widgets/cartire/add/')\n",
"self.assertNotContains(response, 'BMW M3')\n",
"self.assertContains(response, 'Volkswagon Passat')\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_170(VAR_332, VAR_212, VAR_101=VAR_101):...\n",
"VAR_212 = VAR_332.select(VAR_175=(0, 1)).first()\n",
"VAR_101(VAR_101.wiki_tag.wiki_page == VAR_212.id).delete()\n",
"for VAR_440 in (VAR_212.tags or []):\n",
"VAR_440 = VAR_440.strip().lower()\n",
"if VAR_440:\n",
"VAR_101.wiki_tag.insert(VAR_148=tag, wiki_page=page.id)\n"
] | [
"def update_tags_update(dbset, page, db=db):...\n",
"page = dbset.select(limitby=(0, 1)).first()\n",
"db(db.wiki_tag.wiki_page == page.id).delete()\n",
"for tag in (page.tags or []):\n",
"tag = tag.strip().lower()\n",
"if tag:\n",
"db.wiki_tag.insert(name=tag, wiki_page=page.id)\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"For",
"Assign'",
"Condition",
"Expr'"
] |
[
"async def FUNC_12(VAR_16):...\n",
"return {str(u) for u in self.room_members}\n"
] | [
"async def get_users_in_room(room_id):...\n",
"return {str(u) for u in self.room_members}\n"
] | [
0,
0
] | [
"AsyncFunctionDef'",
"Return'"
] |
[
"def FUNC_9(VAR_11, VAR_12):...\n",
"VAR_22 = {'org_id': VAR_12.org_id, 'user_id': VAR_12.id, 'action': 'login',\n 'object_type': 'redash', 'timestamp': int(time.time()), 'user_agent':\n VAR_8.user_agent.string, 'ip': VAR_8.remote_addr}\n",
"record_event.delay(VAR_22)\n"
] | [
"def log_user_logged_in(app, user):...\n",
"event = {'org_id': user.org_id, 'user_id': user.id, 'action': 'login',\n 'object_type': 'redash', 'timestamp': int(time.time()), 'user_agent':\n request.user_agent.string, 'ip': request.remote_addr}\n",
"record_event.delay(event)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'"
] |
[
"def FUNC_8(self, VAR_8, *VAR_9, **VAR_10):...\n",
"\"\"\"docstring\"\"\"\n",
"if not self.isAuthorized():\n",
"VAR_53.session.release_lock()\n",
"if cherry.config['media.transcode'] and VAR_9:\n",
"VAR_93 = VAR_10.pop('bitrate', None) or None\n",
"if VAR_93:\n",
"VAR_9 = os.path.sep.join(VAR_9)\n",
"VAR_93 = max(0, int(VAR_93)) or None\n",
"if sys.version_info < (3, 0):\n",
"VAR_9 = VAR_9.decode('utf-8')\n",
"VAR_9 = codecs.decode(codecs.encode(VAR_9, 'latin1'), 'utf-8')\n",
"VAR_94 = os.path.join(cherry.config['media.basedir'], VAR_9)\n",
"VAR_95 = int(VAR_10.pop('starttime', 0))\n",
"VAR_96 = audiotranscode.AudioTranscode()\n",
"VAR_97 = audiotranscode.mime_type(VAR_8)\n",
"VAR_53.response.headers['Content-Type'] = VAR_97\n",
"return VAR_96.transcode_stream(VAR_94, VAR_8, VAR_93=bitrate, VAR_95=starttime)\n"
] | [
"def trans(self, newformat, *path, **params):...\n",
"\"\"\"docstring\"\"\"\n",
"if not self.isAuthorized():\n",
"cherrypy.session.release_lock()\n",
"if cherry.config['media.transcode'] and path:\n",
"bitrate = params.pop('bitrate', None) or None\n",
"if bitrate:\n",
"path = os.path.sep.join(path)\n",
"bitrate = max(0, int(bitrate)) or None\n",
"if sys.version_info < (3, 0):\n",
"path = path.decode('utf-8')\n",
"path = codecs.decode(codecs.encode(path, 'latin1'), 'utf-8')\n",
"fullpath = os.path.join(cherry.config['media.basedir'], path)\n",
"starttime = int(params.pop('starttime', 0))\n",
"transcoder = audiotranscode.AudioTranscode()\n",
"mimetype = audiotranscode.mime_type(newformat)\n",
"cherrypy.response.headers['Content-Type'] = mimetype\n",
"return transcoder.transcode_stream(fullpath, newformat, bitrate=bitrate,\n starttime=starttime)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Expr'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"@pytest.mark.parametrize('inp,out', [('\">alert(1)', '">alert(1)'),...\n",
"assert escaping.escape_attribute(VAR_1) == VAR_2\n"
] | [
"@pytest.mark.parametrize('inp,out', [('\">alert(1)', '">alert(1)'),...\n",
"assert escaping.escape_attribute(inp) == out\n"
] | [
0,
0
] | [
"Condition",
"Assert'"
] |
[
"@VAR_4.route('/update')...\n",
"if request.args.get('key', None) != VAR_4.config['SECRET_KEY']:\n",
"return 'error'\n",
"FUNC_0()\n",
"VAR_17.clear()\n",
"return f'success! {len(VAR_3)}'\n"
] | [
"@app.route('/update')...\n",
"if request.args.get('key', None) != app.config['SECRET_KEY']:\n",
"return 'error'\n",
"update()\n",
"session.clear()\n",
"return f'success! {len(munhak_rows_data)}'\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Condition",
"Return'",
"Expr'",
"Expr'",
"Return'"
] |
[
"def FUNC_0(self, VAR_10):...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_10:\n",
"self._start_processing()\n"
] | [
"def on_started(self, should_check_for_notifs):...\n",
"\"\"\"docstring\"\"\"\n",
"if should_check_for_notifs:\n",
"self._start_processing()\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Expr'"
] |
[
"def FUNC_76(VAR_212):...\n",
"if VAR_178(VAR_212.getId()) == VAR_178(VAR_6):\n",
"return False\n",
"VAR_285 = VAR_212.getPrimaryPixels()\n",
"if VAR_285 is None or VAR_212.getPrimaryPixels().getPixelsType().getValue(\n",
"return False\n",
"VAR_286 = [VAR_30.getLabel() for VAR_30 in VAR_212.getChannels()]\n",
"VAR_286.sort()\n",
"if VAR_286 != VAR_220:\n",
"return False\n",
"return True\n"
] | [
"def compat(i):...\n",
"if long(i.getId()) == long(iid):\n",
"return False\n",
"pp = i.getPrimaryPixels()\n",
"if pp is None or i.getPrimaryPixels().getPixelsType().getValue(\n",
"return False\n",
"ew = [x.getLabel() for x in i.getChannels()]\n",
"ew.sort()\n",
"if ew != img_ew:\n",
"return False\n",
"return True\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Expr'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_85(VAR_144, VAR_145=1):...\n",
"from frappe.utils.response import json_handler\n",
"return json.dumps(VAR_144, VAR_145=indent, sort_keys=True, VAR_47=\n json_handler, separators=(',', ': '))\n"
] | [
"def as_json(obj, indent=1):...\n",
"from frappe.utils.response import json_handler\n",
"return json.dumps(obj, indent=indent, sort_keys=True, default=json_handler,\n separators=(',', ': '))\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"ImportFrom'",
"Return'"
] |
[
"def FUNC_14(self, VAR_0, VAR_8, VAR_5, VAR_7):...\n",
"VAR_0.get(VAR_8 + self.url)\n",
"VAR_16 = VAR_0.find_element(By.XPATH, \"//input[@name='file']\")\n",
"VAR_16.send_keys(VAR_5)\n",
"assert VAR_16.get_attribute('name') == 'file'\n",
"VAR_17 = VAR_0.find_element(By.XPATH, \"//input[@name='save']\")\n",
"VAR_17.click()\n",
"assert 'save' in VAR_0.page_source\n",
"VAR_0.get(VAR_8 + self.url)\n",
"VAR_16 = VAR_0.find_element(By.XPATH, \"//input[@name='file']\")\n",
"VAR_16.send_keys(VAR_5)\n",
"assert VAR_16.get_attribute('name') == 'file'\n",
"VAR_17 = VAR_0.find_element(By.XPATH, \"//button[@name='save_continue']\")\n",
"VAR_17.click()\n",
"assert 'save_continue' in VAR_0.page_source\n",
"assert 'continue_value' in VAR_0.page_source\n"
] | [
"def test_file_insert_submit_value(self, driver, live_server, upload_file,...\n",
"driver.get(live_server + self.url)\n",
"file_input = driver.find_element(By.XPATH, \"//input[@name='file']\")\n",
"file_input.send_keys(upload_file)\n",
"assert file_input.get_attribute('name') == 'file'\n",
"save_button = driver.find_element(By.XPATH, \"//input[@name='save']\")\n",
"save_button.click()\n",
"assert 'save' in driver.page_source\n",
"driver.get(live_server + self.url)\n",
"file_input = driver.find_element(By.XPATH, \"//input[@name='file']\")\n",
"file_input.send_keys(upload_file)\n",
"assert file_input.get_attribute('name') == 'file'\n",
"save_button = driver.find_element(By.XPATH, \"//button[@name='save_continue']\")\n",
"save_button.click()\n",
"assert 'save_continue' in driver.page_source\n",
"assert 'continue_value' in driver.page_source\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Expr'",
"Assert'",
"Assign'",
"Expr'",
"Assert'",
"Expr'",
"Assign'",
"Expr'",
"Assert'",
"Assign'",
"Expr'",
"Assert'",
"Assert'"
] |
[
"def FUNC_4(self):...\n",
"VAR_12 = self.get_success(self.handler.check_device_registered(VAR_5=\n '@boris:foo', VAR_6='fco', initial_device_display_name='display name'))\n",
"self.assertEqual(VAR_12, 'fco')\n",
"VAR_13 = self.get_success(self.handler.check_device_registered(VAR_5=\n '@boris:foo', VAR_6='fco', initial_device_display_name='new display name'))\n",
"self.assertEqual(VAR_13, 'fco')\n",
"VAR_11 = self.get_success(self.handler.store.get_device('@boris:foo', 'fco'))\n",
"self.assertEqual(VAR_11['display_name'], 'display name')\n"
] | [
"def test_device_is_preserved_if_exists(self):...\n",
"res1 = self.get_success(self.handler.check_device_registered(user_id=\n '@boris:foo', device_id='fco', initial_device_display_name='display name'))\n",
"self.assertEqual(res1, 'fco')\n",
"res2 = self.get_success(self.handler.check_device_registered(user_id=\n '@boris:foo', device_id='fco', initial_device_display_name=\n 'new display name'))\n",
"self.assertEqual(res2, 'fco')\n",
"dev = self.get_success(self.handler.store.get_device('@boris:foo', 'fco'))\n",
"self.assertEqual(dev['display_name'], 'display name')\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def FUNC_28():...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_60 = argparse.ArgumentParser(description=\n 'saved_model_cli: Command-line interface for SavedModel')\n",
"VAR_60.add_argument('-v', '--version', action='version', version='0.1.0')\n",
"VAR_21 = VAR_60.add_subparsers(title='commands', description=\n 'valid commands', help='additional help')\n",
"FUNC_23(VAR_21)\n",
"FUNC_24(VAR_21)\n",
"FUNC_25(VAR_21)\n",
"FUNC_26(VAR_21)\n",
"FUNC_27(VAR_21)\n",
"return VAR_60\n"
] | [
"def create_parser():...\n",
"\"\"\"docstring\"\"\"\n",
"parser = argparse.ArgumentParser(description=\n 'saved_model_cli: Command-line interface for SavedModel')\n",
"parser.add_argument('-v', '--version', action='version', version='0.1.0')\n",
"subparsers = parser.add_subparsers(title='commands', description=\n 'valid commands', help='additional help')\n",
"add_show_subparser(subparsers)\n",
"add_run_subparser(subparsers)\n",
"add_scan_subparser(subparsers)\n",
"add_convert_subparser(subparsers)\n",
"add_aot_compile_cpu_subparser(subparsers)\n",
"return parser\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Return'"
] |
[
"def __repr__(self):...\n",
"return u\"<Registration('{0}')>\".format(self.domain)\n"
] | [
"def __repr__(self):...\n",
"return u\"<Registration('{0}')>\".format(self.domain)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_10(self, VAR_15, VAR_57):...\n",
"if self.whitelist_tags is not None and VAR_15.tag not in self.whitelist_tags:\n",
"return False\n",
"VAR_76, VAR_77, VAR_78, VAR_79, VAR_80 = urlsplit(VAR_57)\n",
"VAR_77 = VAR_77.lower().split(':', 1)[0]\n",
"if VAR_76 not in ('http', 'https'):\n",
"return False\n",
"if VAR_77 in self.host_whitelist:\n",
"return True\n",
"return False\n"
] | [
"def allow_embedded_url(self, el, url):...\n",
"if self.whitelist_tags is not None and el.tag not in self.whitelist_tags:\n",
"return False\n",
"scheme, netloc, path, query, fragment = urlsplit(url)\n",
"netloc = netloc.lower().split(':', 1)[0]\n",
"if scheme not in ('http', 'https'):\n",
"return False\n",
"if netloc in self.host_whitelist:\n",
"return True\n",
"return False\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Condition",
"Return'",
"Condition",
"Return'",
"Return'"
] |
[
"@login_required()...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_143 = FUNC_42(VAR_2, VAR_5)\n",
"VAR_144 = []\n",
"VAR_145 = []\n",
"VAR_146 = set()\n",
"VAR_147 = False\n",
"for VAR_310 in VAR_143:\n",
"VAR_144 += [('%s=%s' % (VAR_310, o.id)) for o in VAR_143[VAR_310]]\n",
"VAR_148 = '&'.join(VAR_144)\n",
"for o in VAR_143[VAR_310]:\n",
"VAR_149 = '|'.join(VAR_144).replace('=', '-')\n",
"VAR_146.add(o.getDetails().group.id.val)\n",
"if len(VAR_146) == 0:\n",
"if not o.canAnnotate():\n",
"if len(VAR_2.GET.getlist('tag')) > 0 or len(VAR_2.GET.getlist('tagset')) > 0:\n",
"VAR_150 = list(VAR_146)[0]\n",
"VAR_147 = \"Can't add annotations because you don't have permissions\"\n",
"VAR_145.append({'type': VAR_310.title(), 'id': o.id, 'name': o.getName()})\n",
"return HttpResponse(\"<h2>Can't batch annotate tags</h2>\")\n",
"return handlerInternalError(VAR_2, 'No objects found')\n",
"VAR_5.SERVICE_OPTS.setOmeroGroup(VAR_150)\n",
"VAR_104 = BaseContainer(VAR_5)\n",
"VAR_117 = VAR_104.listFigureScripts(VAR_143)\n",
"VAR_151 = VAR_104.canExportAsJpg(VAR_2, VAR_143)\n",
"VAR_152 = None\n",
"VAR_153 = []\n",
"if 'image' in VAR_143 and len(VAR_143['image']) > 0:\n",
"VAR_153 = [VAR_318.getId() for VAR_318 in VAR_143['image']]\n",
"if len(VAR_153) > 0:\n",
"VAR_152 = VAR_5.getFilesetFilesInfo(VAR_153)\n",
"VAR_53 = {'iids': VAR_153, 'obj_string': VAR_148, 'link_string': VAR_149,\n 'obj_labels': VAR_145, 'batch_ann': True, 'figScripts': VAR_117,\n 'canExportAsJpg': VAR_151, 'filesetInfo': VAR_152, 'annotationBlocked':\n VAR_147, 'differentGroups': False}\n",
"VAR_283 = VAR_5.getArchivedFilesInfo(VAR_153)\n",
"if len(VAR_146) > 1:\n",
"VAR_152['count'] += VAR_283['count']\n",
"VAR_53['annotationBlocked'\n ] = \"Can't add annotations because objects are in different groups\"\n",
"VAR_53['canDownload'] = VAR_104.canDownload(VAR_143)\n",
"VAR_152['size'] += VAR_283['size']\n",
"VAR_53['differentGroups'] = True\n",
"VAR_53['template'] = 'webclient/annotations/batch_annotate.html'\n",
"VAR_53['webclient_path'] = VAR_350('webindex')\n",
"VAR_53['annotationCounts'] = VAR_104.getBatchAnnotationCounts(FUNC_42(VAR_2,\n VAR_5))\n",
"return VAR_53\n"
] | [
"@login_required()...\n",
"\"\"\"docstring\"\"\"\n",
"objs = getObjects(request, conn)\n",
"obj_ids = []\n",
"obj_labels = []\n",
"groupIds = set()\n",
"annotationBlocked = False\n",
"for key in objs:\n",
"obj_ids += [('%s=%s' % (key, o.id)) for o in objs[key]]\n",
"obj_string = '&'.join(obj_ids)\n",
"for o in objs[key]:\n",
"link_string = '|'.join(obj_ids).replace('=', '-')\n",
"groupIds.add(o.getDetails().group.id.val)\n",
"if len(groupIds) == 0:\n",
"if not o.canAnnotate():\n",
"if len(request.GET.getlist('tag')) > 0 or len(request.GET.getlist('tagset')\n",
"groupId = list(groupIds)[0]\n",
"annotationBlocked = \"Can't add annotations because you don't have permissions\"\n",
"obj_labels.append({'type': key.title(), 'id': o.id, 'name': o.getName()})\n",
"return HttpResponse(\"<h2>Can't batch annotate tags</h2>\")\n",
"return handlerInternalError(request, 'No objects found')\n",
"conn.SERVICE_OPTS.setOmeroGroup(groupId)\n",
"manager = BaseContainer(conn)\n",
"figScripts = manager.listFigureScripts(objs)\n",
"canExportAsJpg = manager.canExportAsJpg(request, objs)\n",
"filesetInfo = None\n",
"iids = []\n",
"if 'image' in objs and len(objs['image']) > 0:\n",
"iids = [i.getId() for i in objs['image']]\n",
"if len(iids) > 0:\n",
"filesetInfo = conn.getFilesetFilesInfo(iids)\n",
"context = {'iids': iids, 'obj_string': obj_string, 'link_string':\n link_string, 'obj_labels': obj_labels, 'batch_ann': True, 'figScripts':\n figScripts, 'canExportAsJpg': canExportAsJpg, 'filesetInfo':\n filesetInfo, 'annotationBlocked': annotationBlocked, 'differentGroups':\n False}\n",
"archivedInfo = conn.getArchivedFilesInfo(iids)\n",
"if len(groupIds) > 1:\n",
"filesetInfo['count'] += archivedInfo['count']\n",
"context['annotationBlocked'\n ] = \"Can't add annotations because objects are in different groups\"\n",
"context['canDownload'] = manager.canDownload(objs)\n",
"filesetInfo['size'] += archivedInfo['size']\n",
"context['differentGroups'] = True\n",
"context['template'] = 'webclient/annotations/batch_annotate.html'\n",
"context['webclient_path'] = reverse('webindex')\n",
"context['annotationCounts'] = manager.getBatchAnnotationCounts(getObjects(\n request, conn))\n",
"return context\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"For",
"AugAssign'",
"Assign'",
"For",
"Assign'",
"Expr'",
"Condition",
"Condition",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Return'",
"Return'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"AugAssign'",
"Assign'",
"Assign'",
"AugAssign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"@VAR_8.route('/', methods=['GET'])...\n",
"VAR_43['state'] = None\n",
"return render_template('frontend/index.html', VAR_78=app.interface.config)\n"
] | [
"@app.route('/', methods=['GET'])...\n",
"session['state'] = None\n",
"return render_template('frontend/index.html', config=app.interface.config)\n"
] | [
0,
0,
0
] | [
"Condition",
"Assign'",
"Return'"
] |
[
"def FUNC_14(VAR_17):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_39 = {}\n",
"for input_raw in filter(bool, VAR_17.split(';')):\n",
"if '=' not in VAR_17:\n",
"return VAR_39\n",
"VAR_68, VAR_69 = input_raw.split('=', 1)\n",
"VAR_39[VAR_68] = eval(VAR_69)\n"
] | [
"def preprocess_input_exprs_arg_string(input_exprs_str):...\n",
"\"\"\"docstring\"\"\"\n",
"input_dict = {}\n",
"for input_raw in filter(bool, input_exprs_str.split(';')):\n",
"if '=' not in input_exprs_str:\n",
"return input_dict\n",
"input_key, expr = input_raw.split('=', 1)\n",
"input_dict[input_key] = eval(expr)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"For",
"Condition",
"Return'",
"Assign'",
"Assign'"
] |
[
"@VAR_0.route('/delete/<int:book_id>', defaults={'book_format': ''})...\n",
"return FUNC_13(VAR_14, VAR_15, False)\n"
] | [
"@editbook.route('/delete/<int:book_id>', defaults={'book_format': ''})...\n",
"return delete_book_from_table(book_id, book_format, False)\n"
] | [
0,
0
] | [
"For",
"Return'"
] |
[
"def FUNC_46(VAR_2):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_161 = [(state_key, int(VAR_1.depth)) for (e_type, state_key), VAR_1 in\n VAR_2.items() if e_type == VAR_188.Member and VAR_1.membership ==\n Membership.JOIN]\n",
"VAR_162 = {}\n",
"for u, d in VAR_161:\n",
"return sorted(VAR_162.items(), VAR_144=lambda d: d[1])\n",
"VAR_196 = get_domain_from_id(u)\n",
"VAR_197 = VAR_162.get(VAR_196)\n",
"if VAR_197:\n",
"VAR_162[VAR_196] = min(d, VAR_197)\n",
"VAR_162[VAR_196] = d\n"
] | [
"def get_domains_from_state(state):...\n",
"\"\"\"docstring\"\"\"\n",
"joined_users = [(state_key, int(event.depth)) for (e_type, state_key),\n event in state.items() if e_type == EventTypes.Member and event.\n membership == Membership.JOIN]\n",
"joined_domains = {}\n",
"for u, d in joined_users:\n",
"return sorted(joined_domains.items(), key=lambda d: d[1])\n",
"dom = get_domain_from_id(u)\n",
"old_d = joined_domains.get(dom)\n",
"if old_d:\n",
"joined_domains[dom] = min(d, old_d)\n",
"joined_domains[dom] = d\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"For",
"Return'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'"
] |
[
"@def_function.function(input_signature=[tensor_spec.TensorSpec(shape=(2048,...\n",
"return {'res': VAR_44 + self.var}\n"
] | [
"@def_function.function(input_signature=[tensor_spec.TensorSpec(shape=(2048,...\n",
"return {'res': x + self.var}\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_16(self):...\n",
"self.assert_expected(self.folder.t, 'CheckNotExpression.html')\n"
] | [
"def testNotExpression(self):...\n",
"self.assert_expected(self.folder.t, 'CheckNotExpression.html')\n"
] | [
0,
0
] | [
"FunctionDef'",
"Expr'"
] |
[
"def FUNC_15(VAR_3, **VAR_4):...\n",
"VAR_15 = Deferred()\n",
"VAR_15.addCallback(FUNC_16)\n",
"self.reactor.callLater(1, VAR_15.callback, True)\n",
"return make_deferred_yieldable(VAR_15)\n"
] | [
"def _callback(request, **kwargs):...\n",
"d = Deferred()\n",
"d.addCallback(_throw)\n",
"self.reactor.callLater(1, d.callback, True)\n",
"return make_deferred_yieldable(d)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Expr'",
"Return'"
] |
[
"def FUNC_18(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_10 = copy.copy(self.data)\n",
"VAR_10['offset'] = '1'\n",
"VAR_10['checksum'] = ''\n",
"self.data = VAR_10\n",
"return self\n"
] | [
"def reset_offset(self):...\n",
"\"\"\"docstring\"\"\"\n",
"data = copy.copy(self.data)\n",
"data['offset'] = '1'\n",
"data['checksum'] = ''\n",
"self.data = data\n",
"return self\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"@wraps(VAR_0)...\n",
"VAR_4 = VAR_1.user\n",
"if not VAR_4.is_authenticated:\n",
"return redirect_to_login(next=request.get_full_path(), login_url=settings.\n LOGIN_URL)\n",
"if not VAR_4.st.is_moderator:\n",
"return VAR_0(VAR_1, *VAR_2, **kwargs)\n"
] | [
"@wraps(view_func)...\n",
"user = request.user\n",
"if not user.is_authenticated:\n",
"return redirect_to_login(next=request.get_full_path(), login_url=settings.\n LOGIN_URL)\n",
"if not user.st.is_moderator:\n",
"return view_func(request, *args, **kwargs)\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Condition",
"Return'",
"Condition",
"Return'"
] |
[
"async def FUNC_49(VAR_102: _NewEventInfo):...\n",
"VAR_1 = VAR_102.event\n",
"VAR_168 = await self._prep_event(VAR_5, VAR_1, VAR_2=ev_info.state, VAR_3=\n ev_info.auth_events, VAR_33=backfilled)\n",
"return VAR_168\n"
] | [
"async def prep(ev_info: _NewEventInfo):...\n",
"event = ev_info.event\n",
"res = await self._prep_event(origin, event, state=ev_info.state,\n auth_events=ev_info.auth_events, backfilled=backfilled)\n",
"return res\n"
] | [
0,
0,
0,
0
] | [
"AsyncFunctionDef'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_25(self):...\n",
"VAR_5 = self._makeContext()\n",
"self.assertEqual(VAR_5.evaluate(' \\n'), None)\n"
] | [
"def test_empty_path_expression_implicit_with_trailing_whitespace(self):...\n",
"ec = self._makeContext()\n",
"self.assertEqual(ec.evaluate(' \\n'), None)\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'"
] |
[
"@defer.inlineCallbacks...\n",
"yield defer.ensureDeferred(self.store.set_profile_avatar_url(self.frank.\n localpart, 'http://my.server/me.png'))\n",
"VAR_6 = yield defer.ensureDeferred(self.handler.get_avatar_url(self.frank))\n",
"self.assertEquals('http://my.server/me.png', VAR_6)\n"
] | [
"@defer.inlineCallbacks...\n",
"yield defer.ensureDeferred(self.store.set_profile_avatar_url(self.frank.\n localpart, 'http://my.server/me.png'))\n",
"avatar_url = yield defer.ensureDeferred(self.handler.get_avatar_url(self.frank)\n )\n",
"self.assertEquals('http://my.server/me.png', avatar_url)\n"
] | [
0,
0,
0,
0
] | [
"Condition",
"Expr'",
"Assign'",
"Expr'"
] |
[
"def FUNC_4(VAR_0):...\n",
"VAR_8 = VAR_0.values.get('pagename', '')\n",
"if VAR_8:\n",
"from MoinMoin import search\n",
"VAR_32 = ''\n",
"VAR_30 = search.searchPages(VAR_0, 't:\"%s\"' % VAR_8)\n",
"VAR_9 = wikiutil.load_wikimap(VAR_0)\n",
"VAR_31 = [p.page_name for p in VAR_30.hits]\n",
"VAR_10 = VAR_9.keys()\n",
"VAR_31.sort()\n",
"VAR_10.sort()\n",
"VAR_31[0:0] = [VAR_8]\n",
"VAR_11 = VAR_0.cfg.interwiki_preferred[:]\n",
"VAR_32 = 'string' % '\\n'.join([('<option value=\"%s\">%s</option>' % (\n wikiutil.escape(page), wikiutil.escape(page))) for page in VAR_31])\n",
"if not VAR_11 or VAR_11 and VAR_11[-1] is not None:\n",
"VAR_33 = VAR_11\n",
"VAR_33 = VAR_11[:-1]\n",
"for iw in VAR_10:\n",
"VAR_10 = '\\n'.join([('<option value=\"%s\">%s</option>' % (wikiutil.escape(\n key), wikiutil.escape(key))) for key in VAR_33])\n",
"if not iw in VAR_11:\n",
"VAR_12 = VAR_0.cfg.url_prefix_static\n",
"VAR_33.append(iw)\n",
"VAR_13 = VAR_0.script_root + '/'\n",
"VAR_14 = VAR_13\n",
"VAR_15 = wikiutil.escape(VAR_0.page.page_name)\n",
"VAR_0.write(u'string' % locals())\n"
] | [
"def link_dialog(request):...\n",
"name = request.values.get('pagename', '')\n",
"if name:\n",
"from MoinMoin import search\n",
"page_list = ''\n",
"searchresult = search.searchPages(request, 't:\"%s\"' % name)\n",
"interwiki_list = wikiutil.load_wikimap(request)\n",
"pages = [p.page_name for p in searchresult.hits]\n",
"interwiki = interwiki_list.keys()\n",
"pages.sort()\n",
"interwiki.sort()\n",
"pages[0:0] = [name]\n",
"iwpreferred = request.cfg.interwiki_preferred[:]\n",
"page_list = (\n \"\"\"\n <tr>\n <td colspan=2>\n <select id=\"sctPagename\" size=\"1\" onchange=\"OnChangePagename(this.value);\">\n %s\n </select>\n <td>\n </tr>\n\"\"\"\n % '\\n'.join([('<option value=\"%s\">%s</option>' % (wikiutil.escape(page\n ), wikiutil.escape(page))) for page in pages]))\n",
"if not iwpreferred or iwpreferred and iwpreferred[-1] is not None:\n",
"resultlist = iwpreferred\n",
"resultlist = iwpreferred[:-1]\n",
"for iw in interwiki:\n",
"interwiki = '\\n'.join([('<option value=\"%s\">%s</option>' % (wikiutil.escape\n (key), wikiutil.escape(key))) for key in resultlist])\n",
"if not iw in iwpreferred:\n",
"url_prefix_static = request.cfg.url_prefix_static\n",
"resultlist.append(iw)\n",
"scriptname = request.script_root + '/'\n",
"action = scriptname\n",
"basepage = wikiutil.escape(request.page.page_name)\n",
"request.write(\n u\"\"\"\n<!--\n * FCKeditor - The text editor for internet\n * Copyright (C) 2003-2004 Frederico Caldeira Knabben\n *\n * Licensed under the terms of the GNU Lesser General Public License:\n * http://www.opensource.org/licenses/lgpl-license.php\n *\n * For further information visit:\n * http://www.fckeditor.net/\n *\n * File Name: fck_link.html\n * Link dialog window.\n *\n * Version: 2.0 FC (Preview)\n * Modified: 2005-02-18 23:55:22\n *\n * File Authors:\n * Frederico Caldeira Knabben (fredck@fckeditor.net)\n-->\n<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0 Transitional//EN\">\n<meta http-equiv=\"Content-Type\" content=\"text/html;charset=utf-8\">\n<meta name=\"robots\" content=\"index,nofollow\">\n<html>\n <head>\n <title>Link Properties</title>\n <meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\" />\n <meta name=\"robots\" content=\"noindex,nofollow\" />\n <script src=\"%(url_prefix_static)s/applets/FCKeditor/editor/dialog/common/fck_dialog_common.js\" type=\"text/javascript\"></script>\n <script src=\"%(url_prefix_static)s/applets/moinFCKplugins/moinlink/fck_link.js\" type=\"text/javascript\"></script>\n <script src=\"%(url_prefix_static)s/applets/moinFCKplugins/moinurllib.js\" type=\"text/javascript\"></script>\n </head>\n <body scroll=\"no\" style=\"OVERFLOW: hidden\">\n <div id=\"divInfo\" style=\"DISPLAY: none\">\n <span fckLang=\"DlgLnkType\">Link Type</span><br />\n <select id=\"cmbLinkType\" onchange=\"SetLinkType(this.value);\">\n <option value=\"wiki\" selected=\"selected\">WikiPage</option>\n <option value=\"interwiki\">Interwiki</option>\n <option value=\"url\" fckLang=\"DlgLnkTypeURL\">URL</option>\n </select>\n <br />\n <br />\n <div id=\"divLinkTypeWiki\">\n <table height=\"100%%\" cellSpacing=\"0\" cellPadding=\"0\" width=\"100%%\" border=\"0\">\n <tr>\n <td>\n <form action=%(action)s method=\"GET\">\n <input type=\"hidden\" name=\"action\" value=\"fckdialog\">\n <input type=\"hidden\" name=\"dialog\" value=\"link\">\n <input type=\"hidden\" id=\"basepage\" name=\"basepage\" value=\"%(basepage)s\">\n <table cellSpacing=\"0\" cellPadding=\"0\" align=\"center\" border=\"0\">\n <tr>\n <td>\n <span fckLang=\"PageDlgName\">Page Name</span><br>\n <input id=\"txtPagename\" name=\"pagename\" size=\"30\" value=\"%(name)s\">\n </td>\n <td valign=\"bottom\">\n <input id=btnSearchpage type=\"submit\" value=\"Search\">\n </td>\n </tr>\n %(page_list)s\n </table>\n </form>\n </td>\n </tr>\n </table>\n </div>\n <div id=\"divLinkTypeInterwiki\">\n <table height=\"100%%\" cellSpacing=\"0\" cellPadding=\"0\" width=\"100%%\" border=\"0\">\n <tr>\n <td>\n <table cellSpacing=\"0\" cellPadding=\"0\" align=\"center\" border=\"0\">\n <tr>\n <td>\n <span fckLang=\"WikiDlgName\">Wiki:PageName</span><br>\n <select id=\"sctInterwiki\" size=\"1\">\n %(interwiki)s\n </select>:\n <input id=\"txtInterwikipagename\"></input>\n </td>\n </tr>\n </table>\n </td>\n </tr>\n </table>\n </div>\n <div id=\"divLinkTypeUrl\">\n <table cellspacing=\"0\" cellpadding=\"0\" width=\"100%%\" border=\"0\">\n <tr>\n <td nowrap=\"nowrap\">\n <span fckLang=\"DlgLnkProto\">Protocol</span><br />\n <select id=\"cmbLinkProtocol\">\n <option value=\"http://\" selected=\"selected\">http://</option>\n <option value=\"https://\">https://</option>\n <option value=\"ftp://\">ftp://</option>\n <option value=\"file://\">file://</option>\n <option value=\"news://\">news://</option>\n <option value=\"mailto:\">mailto:</option>\n <option value=\"\" fckLang=\"DlgLnkProtoOther\"><other></option>\n </select>\n </td>\n <td nowrap=\"nowrap\"> </td>\n <td nowrap=\"nowrap\" width=\"100%%\">\n <span fckLang=\"DlgLnkURL\">URL</span><br />\n <input id=\"txtUrl\" style=\"WIDTH: 100%%\" type=\"text\" onkeyup=\"OnUrlChange();\" onchange=\"OnUrlChange();\" />\n </td>\n </tr>\n </table>\n <br />\n </div>\n </div>\n </body>\n</html>\n\"\"\"\n % locals())\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
2
] | [
"FunctionDef'",
"Assign'",
"Condition",
"ImportFrom'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"For",
"Assign'",
"Condition",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"def FUNC_1(VAR_1, VAR_2=False):...\n",
"\"\"\"docstring\"\"\"\n",
"for hook in VAR_25.get_hooks('override_whitelisted_methods', {}).get(VAR_1, []\n",
"VAR_1 = hook\n",
"if run_server_script_api(VAR_1):\n",
"return None\n",
"VAR_3 = FUNC_11(VAR_1)\n",
"if VAR_25.local.conf.developer_mode:\n",
"if VAR_2:\n",
"VAR_25.respond_as_web_page(title='Invalid Method', html='Method not found',\n indicator_color='red', http_status_code=404)\n",
"return\n",
"VAR_3 = VAR_3.queue\n",
"FUNC_3(VAR_3)\n",
"FUNC_2(VAR_3)\n",
"return VAR_25.call(VAR_3, **frappe.form_dict)\n"
] | [
"def execute_cmd(cmd, from_async=False):...\n",
"\"\"\"docstring\"\"\"\n",
"for hook in frappe.get_hooks('override_whitelisted_methods', {}).get(cmd, []):\n",
"cmd = hook\n",
"if run_server_script_api(cmd):\n",
"return None\n",
"method = get_attr(cmd)\n",
"if frappe.local.conf.developer_mode:\n",
"if from_async:\n",
"frappe.respond_as_web_page(title='Invalid Method', html='Method not found',\n indicator_color='red', http_status_code=404)\n",
"return\n",
"method = method.queue\n",
"is_whitelisted(method)\n",
"is_valid_http_method(method)\n",
"return frappe.call(method, **frappe.form_dict)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
2,
2,
0
] | [
"FunctionDef'",
"Docstring",
"For",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Condition",
"Condition",
"Expr'",
"Return'",
"Assign'",
"Expr'",
"Expr'",
"Return'"
] |
[
"def FUNC_5(self, VAR_24, VAR_25=None):...\n",
"if VAR_25:\n",
"return b''.join(list(self.serialize(VAR_24, VAR_25)))\n",
"return ''.join(list(self.serialize(VAR_24)))\n"
] | [
"def render(self, treewalker, encoding=None):...\n",
"if encoding:\n",
"return b''.join(list(self.serialize(treewalker, encoding)))\n",
"return ''.join(list(self.serialize(treewalker)))\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_11():...\n",
"logout_user()\n",
"if settings.MULTI_ORG and current_org == None:\n",
"VAR_31 = '/'\n",
"if settings.MULTI_ORG:\n",
"return redirect(VAR_31)\n",
"VAR_31 = url_for('redash.index', org_slug=current_org.slug, _external=False)\n",
"VAR_31 = url_for('redash.index', _external=False)\n"
] | [
"def logout_and_redirect_to_index():...\n",
"logout_user()\n",
"if settings.MULTI_ORG and current_org == None:\n",
"index_url = '/'\n",
"if settings.MULTI_ORG:\n",
"return redirect(index_url)\n",
"index_url = url_for('redash.index', org_slug=current_org.slug, _external=False)\n",
"index_url = url_for('redash.index', _external=False)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Condition",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Assign'"
] |
[
"def __init__(self, VAR_1: str):...\n",
"assert VAR_1 == '-'\n",
"self.type = 'stdin'\n",
"self.sourceName = VAR_1\n",
"self.content = None\n"
] | [
"def __init__(self, sourceName: str):...\n",
"assert sourceName == '-'\n",
"self.type = 'stdin'\n",
"self.sourceName = sourceName\n",
"self.content = None\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assert'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"async def FUNC_6(self):...\n",
"if not hasattr(self, 'resolved_ref'):\n",
"self.resolved_ref = await self.get_resolved_ref()\n",
"return f'{self.user}/{self.repo}/{self.resolved_ref}'\n"
] | [
"async def get_resolved_spec(self):...\n",
"if not hasattr(self, 'resolved_ref'):\n",
"self.resolved_ref = await self.get_resolved_ref()\n",
"return f'{self.user}/{self.repo}/{self.resolved_ref}'\n"
] | [
0,
0,
0,
0
] | [
"AsyncFunctionDef'",
"Condition",
"Assign'",
"Return'"
] |
[
"@VAR_0.route('/login', methods=['GET', 'POST'])...\n",
"\"\"\"docstring\"\"\"\n",
"if current_user.is_authenticated:\n",
"return redirect(url_for('dashboard'))\n",
"if request.method == 'POST':\n",
"VAR_85 = request.form.get('username')\n",
"VAR_43 = CLASS_0.query.all()\n",
"VAR_41 = request.form.get('password')\n",
"return render_template('login.html', title='Login', VAR_43=users)\n",
"VAR_37 = CLASS_0.query.filter_by(VAR_37=username).first()\n",
"if VAR_37 and VAR_37.verify_password(VAR_41):\n",
"login_user(VAR_37, True)\n",
"flash('Error identifying the user', 'danger')\n",
"flash('Login successful', 'success')\n",
"return redirect(url_for('dashboard'))\n"
] | [
"@gui.route('/login', methods=['GET', 'POST'])...\n",
"\"\"\"docstring\"\"\"\n",
"if current_user.is_authenticated:\n",
"return redirect(url_for('dashboard'))\n",
"if request.method == 'POST':\n",
"username = request.form.get('username')\n",
"users = User.query.all()\n",
"password = request.form.get('password')\n",
"return render_template('login.html', title='Login', users=users)\n",
"user = User.query.filter_by(user=username).first()\n",
"if user and user.verify_password(password):\n",
"login_user(user, True)\n",
"flash('Error identifying the user', 'danger')\n",
"flash('Login successful', 'success')\n",
"return redirect(url_for('dashboard'))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Condition",
"Return'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Return'",
"Assign'",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Return'"
] |
[
"def FUNC_4(self):...\n",
"if self._client_transport:\n",
"self._client_transport = None\n",
"if self._server_transport:\n",
"self.client.close()\n",
"self._server_transport = None\n",
"self.server.close()\n"
] | [
"def disconnect(self):...\n",
"if self._client_transport:\n",
"self._client_transport = None\n",
"if self._server_transport:\n",
"self.client.close()\n",
"self._server_transport = None\n",
"self.server.close()\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Expr'"
] |
[
"from __future__ import unicode_literals\n",
"from datetime import datetime, timedelta\n",
"from unittest import TestCase\n",
"from django import forms\n",
"from django.conf import settings\n",
"from django.contrib import admin\n",
"from django.contrib.admin import widgets\n",
"from django.contrib.admin.tests import AdminSeleniumWebDriverTestCase\n",
"from django.core.files.storage import default_storage\n",
"from django.core.files.uploadedfile import SimpleUploadedFile\n",
"from django.db.models import CharField, DateField\n",
"from django.test import TestCase as DjangoTestCase\n",
"from django.test.utils import override_settings\n",
"from django.utils import six\n",
"from django.utils import translation\n",
"from django.utils.html import conditional_escape\n",
"from . import models\n",
"from .widgetadmin import site as widget_admin_site\n",
"VAR_0 = lambda : {'ADMIN_STATIC_PREFIX': '%sadmin/' % settings.STATIC_URL}\n",
"\"\"\"\n Tests for correct behavior of ModelAdmin.formfield_for_dbfield\n \"\"\"\n",
"def FUNC_0(self, VAR_1, VAR_2, VAR_3, **VAR_4):...\n",
"\"\"\"docstring\"\"\"\n",
"for k in VAR_4:\n",
"setattr(CLASS_25, k, VAR_4[k])\n",
"VAR_16 = CLASS_25(VAR_1, admin.site)\n",
"VAR_17 = VAR_16.formfield_for_dbfield(VAR_1._meta.get_field(VAR_2), request\n =None)\n",
"if isinstance(VAR_17.widget, widgets.RelatedFieldWidgetWrapper):\n",
"VAR_53 = VAR_17.widget.widget\n",
"VAR_53 = VAR_17.widget\n",
"self.assertTrue(isinstance(VAR_53, VAR_3), \n 'Wrong widget for %s.%s: expected %s, got %s' % (VAR_1.__class__.\n __name__, VAR_2, VAR_3, type(VAR_53)))\n",
"return VAR_17\n"
] | [
"from __future__ import unicode_literals\n",
"from datetime import datetime, timedelta\n",
"from unittest import TestCase\n",
"from django import forms\n",
"from django.conf import settings\n",
"from django.contrib import admin\n",
"from django.contrib.admin import widgets\n",
"from django.contrib.admin.tests import AdminSeleniumWebDriverTestCase\n",
"from django.core.files.storage import default_storage\n",
"from django.core.files.uploadedfile import SimpleUploadedFile\n",
"from django.db.models import CharField, DateField\n",
"from django.test import TestCase as DjangoTestCase\n",
"from django.test.utils import override_settings\n",
"from django.utils import six\n",
"from django.utils import translation\n",
"from django.utils.html import conditional_escape\n",
"from . import models\n",
"from .widgetadmin import site as widget_admin_site\n",
"admin_static_prefix = lambda : {'ADMIN_STATIC_PREFIX': '%sadmin/' %\n settings.STATIC_URL}\n",
"\"\"\"\n Tests for correct behavior of ModelAdmin.formfield_for_dbfield\n \"\"\"\n",
"def assertFormfield(self, model, fieldname, widgetclass, **admin_overrides):...\n",
"\"\"\"docstring\"\"\"\n",
"for k in admin_overrides:\n",
"setattr(MyModelAdmin, k, admin_overrides[k])\n",
"ma = MyModelAdmin(model, admin.site)\n",
"ff = ma.formfield_for_dbfield(model._meta.get_field(fieldname), request=None)\n",
"if isinstance(ff.widget, widgets.RelatedFieldWidgetWrapper):\n",
"widget = ff.widget.widget\n",
"widget = ff.widget\n",
"self.assertTrue(isinstance(widget, widgetclass), \n 'Wrong widget for %s.%s: expected %s, got %s' % (model.__class__.\n __name__, fieldname, widgetclass, type(widget)))\n",
"return ff\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Expr'",
"FunctionDef'",
"Docstring",
"For",
"Expr'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_4(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_13 = 'remote.server'\n",
"VAR_14 = signedjson.key.generate_signing_key('ver1')\n",
"self.expect_outgoing_key_request(VAR_13, VAR_14)\n",
"VAR_12 = self.make_notary_request(VAR_13, 'ed25519:ver1')\n",
"VAR_15 = VAR_12['server_keys']\n",
"self.assertEqual(len(VAR_15), 1)\n",
"self.assertIn('ed25519:ver1', VAR_15[0]['verify_keys'])\n",
"self.assertEqual(len(VAR_15[0]['verify_keys']), 1)\n",
"self.assertIn(VAR_13, VAR_15[0]['signatures'])\n",
"self.assertIn(self.hs.hostname, VAR_15[0]['signatures'])\n"
] | [
"def test_get_key(self):...\n",
"\"\"\"docstring\"\"\"\n",
"SERVER_NAME = 'remote.server'\n",
"testkey = signedjson.key.generate_signing_key('ver1')\n",
"self.expect_outgoing_key_request(SERVER_NAME, testkey)\n",
"resp = self.make_notary_request(SERVER_NAME, 'ed25519:ver1')\n",
"keys = resp['server_keys']\n",
"self.assertEqual(len(keys), 1)\n",
"self.assertIn('ed25519:ver1', keys[0]['verify_keys'])\n",
"self.assertEqual(len(keys[0]['verify_keys']), 1)\n",
"self.assertIn(SERVER_NAME, keys[0]['signatures'])\n",
"self.assertIn(self.hs.hostname, keys[0]['signatures'])\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def __init__(self, VAR_5, VAR_4, *VAR_6, **VAR_7):...\n",
"super().__init__(*VAR_6, **kwargs)\n",
"self.translation = VAR_5\n",
"self.fields['variant'].queryset = VAR_5.unit_set.all()\n",
"self.user = VAR_4\n"
] | [
"def __init__(self, translation, user, *args, **kwargs):...\n",
"super().__init__(*args, **kwargs)\n",
"self.translation = translation\n",
"self.fields['variant'].queryset = translation.unit_set.all()\n",
"self.user = user\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Assign'",
"Assign'"
] |
[
"def FUNC_1(VAR_2):...\n",
"return CLASS_0(VAR_2, VAR_3)\n"
] | [
"def cache_filter(app):...\n",
"return MemcacheMiddleware(app, conf)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_30(self, *VAR_13, **VAR_14):...\n",
"if self.directory is None:\n",
"return\n",
"return super(CLASS_2, self).get(*VAR_13, **kwargs)\n"
] | [
"def get(self, *args, **kwargs):...\n",
"if self.directory is None:\n",
"return\n",
"return super(SafeFileCache, self).get(*args, **kwargs)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_11(self):...\n",
"super().after_script_executed()\n",
"VAR_36, VAR_34 = tempfile.mkstemp(dir=self.temp_dir(), prefix=self.key)\n",
"VAR_37 = self.file_path\n",
"self.file_path = VAR_34\n",
"VAR_0.info(f'Zipping {self.key} to {VAR_34}')\n",
"self.file_path = shutil.make_archive(self.file_path, 'zip', VAR_37)\n",
"VAR_0.info(f'Zip file created {self.file_path}')\n",
"self.generate_download_link = True\n"
] | [
"def after_script_executed(self):...\n",
"super().after_script_executed()\n",
"fd, filename = tempfile.mkstemp(dir=self.temp_dir(), prefix=self.key)\n",
"folder_path = self.file_path\n",
"self.file_path = filename\n",
"logger.info(f'Zipping {self.key} to {filename}')\n",
"self.file_path = shutil.make_archive(self.file_path, 'zip', folder_path)\n",
"logger.info(f'Zip file created {self.file_path}')\n",
"self.generate_download_link = True\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Assign'"
] |
[
"async def FUNC_9(self, VAR_4: str, VAR_6: str, VAR_7: str, VAR_18: str...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_49 = {'client_secret': VAR_6, 'sid': VAR_7, 'token': VAR_18}\n",
"return await self.http_client.post_json_get_json(VAR_4 +\n '/_matrix/identity/api/v1/validate/msisdn/submitToken', VAR_49)\n",
"VAR_0.warning('Error contacting msisdn account_threepid_delegate: %s', e)\n"
] | [
"async def proxy_msisdn_submit_token(self, id_server: str, client_secret:...\n",
"\"\"\"docstring\"\"\"\n",
"body = {'client_secret': client_secret, 'sid': sid, 'token': token}\n",
"return await self.http_client.post_json_get_json(id_server +\n '/_matrix/identity/api/v1/validate/msisdn/submitToken', body)\n",
"logger.warning('Error contacting msisdn account_threepid_delegate: %s', e)\n"
] | [
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Return'",
"Expr'"
] |
[
"def FUNC_58(self):...\n",
"\"\"\"docstring\"\"\"\n",
"from frappe.model.delete_doc import check_if_doc_is_linked, check_if_doc_is_dynamically_linked\n",
"if not self.flags.ignore_links:\n",
"check_if_doc_is_linked(self, VAR_25='Cancel')\n",
"check_if_doc_is_dynamically_linked(self, VAR_25='Cancel')\n"
] | [
"def check_no_back_links_exist(self):...\n",
"\"\"\"docstring\"\"\"\n",
"from frappe.model.delete_doc import check_if_doc_is_linked, check_if_doc_is_dynamically_linked\n",
"if not self.flags.ignore_links:\n",
"check_if_doc_is_linked(self, method='Cancel')\n",
"check_if_doc_is_dynamically_linked(self, method='Cancel')\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"ImportFrom'",
"Condition",
"Expr'",
"Expr'"
] |
[
"def FUNC_48(self):...\n",
"VAR_86 = {'transcodingenabled': cherry.config['media.transcode'],\n 'fetchalbumart': cherry.config['media.fetch_album_art'], 'isadmin':\n VAR_53.session['admin'], 'username': VAR_53.session['username'],\n 'servepath': 'serve/', 'transcodepath': 'trans/', 'auto_login': self.\n autoLoginActive(), 'version': cherry.REPO_VERSION or cherry.VERSION}\n",
"if cherry.config['media.transcode']:\n",
"VAR_112 = list(self.model.transcoder.available_decoder_formats())\n",
"VAR_86['getdecoders'] = []\n",
"VAR_86['getdecoders'] = VAR_112\n",
"VAR_86['getencoders'] = []\n",
"VAR_113 = list(self.model.transcoder.available_encoder_formats())\n",
"return VAR_86\n",
"VAR_86['getencoders'] = VAR_113\n"
] | [
"def api_getconfiguration(self):...\n",
"clientconfigkeys = {'transcodingenabled': cherry.config['media.transcode'],\n 'fetchalbumart': cherry.config['media.fetch_album_art'], 'isadmin':\n cherrypy.session['admin'], 'username': cherrypy.session['username'],\n 'servepath': 'serve/', 'transcodepath': 'trans/', 'auto_login': self.\n autoLoginActive(), 'version': cherry.REPO_VERSION or cherry.VERSION}\n",
"if cherry.config['media.transcode']:\n",
"decoders = list(self.model.transcoder.available_decoder_formats())\n",
"clientconfigkeys['getdecoders'] = []\n",
"clientconfigkeys['getdecoders'] = decoders\n",
"clientconfigkeys['getencoders'] = []\n",
"encoders = list(self.model.transcoder.available_encoder_formats())\n",
"return clientconfigkeys\n",
"clientconfigkeys['getencoders'] = encoders\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'",
"Assign'"
] |
[
"def FUNC_11(VAR_11: str) ->bool:...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_23 = g.ledger.fava_options.collapse_pattern\n",
"return any(pattern.match(VAR_11) for pattern in VAR_23)\n"
] | [
"def collapse_account(account_name: str) ->bool:...\n",
"\"\"\"docstring\"\"\"\n",
"collapse_patterns = g.ledger.fava_options.collapse_pattern\n",
"return any(pattern.match(account_name) for pattern in collapse_patterns)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Return'"
] |
[
"async def FUNC_8(self):...\n",
"if not hasattr(self, 'resolved_ref'):\n",
"self.resolved_ref = await self.get_resolved_ref()\n",
"return f'https://{self.hostname}/{self.user}/{self.gist_id}/{self.resolved_ref}'\n"
] | [
"async def get_resolved_ref_url(self):...\n",
"if not hasattr(self, 'resolved_ref'):\n",
"self.resolved_ref = await self.get_resolved_ref()\n",
"return f'https://{self.hostname}/{self.user}/{self.gist_id}/{self.resolved_ref}'\n"
] | [
0,
0,
0,
0
] | [
"AsyncFunctionDef'",
"Condition",
"Assign'",
"Return'"
] |
[
"from builtins import next\n",
"from builtins import range\n",
"import os\n",
"import datetime\n",
"from xml.sax.saxutils import quoteattr\n",
"import sys\n",
"import logging\n",
"import random\n",
"import glob\n",
"from itertools import cycle\n",
"from flask import Blueprint, url_for, Response, stream_with_context, send_file, jsonify\n",
"from werkzeug.datastructures import Headers\n",
"VAR_0 = 'cocktail'\n",
"VAR_1 = False\n",
"VAR_2 = True\n",
"VAR_3 = VAR_4 = None\n",
"VAR_5 = int(10000.0)\n",
"\"\"\"string\"\"\"\n",
"def FUNC_0(VAR_6):...\n",
"VAR_3 = VAR_6.indexdir\n",
"VAR_4 = VAR_6.dataroot\n",
"VAR_7 = Blueprint('mixer_store', __name__)\n",
"VAR_8 = logging.getLogger(__name__)\n",
"@VAR_7.route('/base/<baseidx>/mixers/<mixeridx>/keywords/<params>')...\n",
"VAR_17 = FUNC_4(VAR_10, VAR_14)\n",
"VAR_21 = int((VAR_11 - 1) * (1.0 / VAR_12) * len(VAR_17))\n",
"VAR_22 = int(VAR_11 * (1.0 / VAR_12) * len(VAR_17))\n",
"VAR_17 = VAR_17[VAR_21:VAR_22]\n",
"print('Mixer Size {}'.format(len(VAR_17)))\n",
"sys.stdout.flush()\n",
"return FUNC_2(VAR_9, VAR_13, VAR_17, VAR_15, VAR_16)\n"
] | [
"from builtins import next\n",
"from builtins import range\n",
"import os\n",
"import datetime\n",
"from xml.sax.saxutils import quoteattr\n",
"import sys\n",
"import logging\n",
"import random\n",
"import glob\n",
"from itertools import cycle\n",
"from flask import Blueprint, url_for, Response, stream_with_context, send_file, jsonify\n",
"from werkzeug.datastructures import Headers\n",
"BASEURL = 'cocktail'\n",
"STYLE = False\n",
"LOCAL_OBJ_URI = True\n",
"INDEXDIR = DATAROOT = None\n",
"ITEMS_PER_ITERATION = int(10000.0)\n",
"\"\"\"\n Example cocktail url:\n /cocktail/base/FFFFFFFFFFFFFFFF/distrbuted/1of2/ mixers/FFFFFFFFFFFFFFFC/keywords/d_42_1.0/classes/gull,cardinal\n /cocktail/base/\"0\"/mixers/FFFFFFFFFFFFFFFC/keywords/d_42_1.0\n /cocktail/base/FFFFFFFFFFFFFFFF/keywords/d_42_1.0\n\"\"\"\n",
"def init(config):...\n",
"INDEXDIR = config.indexdir\n",
"DATAROOT = config.dataroot\n",
"scope_blueprint = Blueprint('mixer_store', __name__)\n",
"_log = logging.getLogger(__name__)\n",
"@scope_blueprint.route('/base/<baseidx>/mixers/<mixeridx>/keywords/<params>')...\n",
"mixer_list = get_mixer_list(mixeridx, classes)\n",
"start_idx = int((index - 1) * (1.0 / total) * len(mixer_list))\n",
"end_idx = int(index * (1.0 / total) * len(mixer_list))\n",
"mixer_list = mixer_list[start_idx:end_idx]\n",
"print('Mixer Size {}'.format(len(mixer_list)))\n",
"sys.stdout.flush()\n",
"return get_scope(baseidx, params, mixer_list, start, limit)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"ImportFrom'",
"ImportFrom'",
"Import'",
"Import'",
"ImportFrom'",
"Import'",
"Import'",
"Import'",
"Import'",
"ImportFrom'",
"ImportFrom'",
"ImportFrom'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Return'"
] |
[
"def FUNC_35(self, VAR_6, VAR_7, VAR_30):...\n",
"if VAR_53.session['admin']:\n",
"if self.userdb.addUser(VAR_6, VAR_7, VAR_30):\n",
"return \"You didn't think that would work, did you?\"\n",
"return 'added new user: %s' % VAR_6\n",
"return 'error, cannot add new user!' % VAR_6\n"
] | [
"def api_adduser(self, username, password, isadmin):...\n",
"if cherrypy.session['admin']:\n",
"if self.userdb.addUser(username, password, isadmin):\n",
"return \"You didn't think that would work, did you?\"\n",
"return 'added new user: %s' % username\n",
"return 'error, cannot add new user!' % username\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Condition",
"Return'",
"Return'",
"Return'"
] |
[
"def FUNC_3(self):...\n",
"VAR_6 = Mock()\n",
"VAR_3 = '@foo:bar'\n",
"VAR_7 = 5000000\n",
"VAR_8 = UserPresenceState.default(VAR_3)\n",
"VAR_8 = VAR_8.copy_and_replace(VAR_10=PresenceState.ONLINE, last_active_ts=\n now - LAST_ACTIVE_GRANULARITY - 1, currently_active=True)\n",
"VAR_9 = VAR_8.copy_and_replace(VAR_10=PresenceState.ONLINE)\n",
"VAR_10, VAR_11, VAR_12 = handle_update(VAR_8, VAR_9, is_mine=True, VAR_6=\n wheel_timer, VAR_7=now)\n",
"self.assertTrue(VAR_11)\n",
"self.assertFalse(VAR_10.currently_active)\n",
"self.assertEquals(VAR_9.state, VAR_10.state)\n",
"self.assertEquals(VAR_9.status_msg, VAR_10.status_msg)\n",
"self.assertEquals(VAR_10.last_federation_update_ts, VAR_7)\n",
"self.assertEquals(VAR_6.insert.call_count, 2)\n",
"VAR_6.insert.assert_has_calls([call(VAR_7=now, obj=user_id, then=new_state.\n last_active_ts + IDLE_TIMER), call(VAR_7=now, obj=user_id, then=\n new_state.last_user_sync_ts + SYNC_ONLINE_TIMEOUT)], any_order=True)\n"
] | [
"def test_online_to_online_last_active(self):...\n",
"wheel_timer = Mock()\n",
"user_id = '@foo:bar'\n",
"now = 5000000\n",
"prev_state = UserPresenceState.default(user_id)\n",
"prev_state = prev_state.copy_and_replace(state=PresenceState.ONLINE,\n last_active_ts=now - LAST_ACTIVE_GRANULARITY - 1, currently_active=True)\n",
"new_state = prev_state.copy_and_replace(state=PresenceState.ONLINE)\n",
"state, persist_and_notify, federation_ping = handle_update(prev_state,\n new_state, is_mine=True, wheel_timer=wheel_timer, now=now)\n",
"self.assertTrue(persist_and_notify)\n",
"self.assertFalse(state.currently_active)\n",
"self.assertEquals(new_state.state, state.state)\n",
"self.assertEquals(new_state.status_msg, state.status_msg)\n",
"self.assertEquals(state.last_federation_update_ts, now)\n",
"self.assertEquals(wheel_timer.insert.call_count, 2)\n",
"wheel_timer.insert.assert_has_calls([call(now=now, obj=user_id, then=\n new_state.last_active_ts + IDLE_TIMER), call(now=now, obj=user_id, then\n =new_state.last_user_sync_ts + SYNC_ONLINE_TIMEOUT)], any_order=True)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"@login_required()...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_19 = VAR_2.session['connector'].server_id\n",
"VAR_66 = omero.sys.Parameters()\n",
"VAR_66.map = {'id': rlong(VAR_14)}\n",
"VAR_65 = VAR_8.getQueryService().findByQuery(\n 'select s from Shape s join fetch s.roi where s.id = :id', VAR_66,\n VAR_8.SERVICE_OPTS)\n",
"if VAR_65 is None:\n",
"VAR_36 = VAR_65.roi.image.id.val\n",
"VAR_64 = FUNC_13(VAR_2, VAR_36, VAR_19=server_id, VAR_8=conn)\n",
"if VAR_64 is None:\n",
"VAR_15, VAR_16 = VAR_64\n",
"return FUNC_9(VAR_2, VAR_8, VAR_15, VAR_65, VAR_16)\n"
] | [
"@login_required()...\n",
"\"\"\"docstring\"\"\"\n",
"server_id = request.session['connector'].server_id\n",
"params = omero.sys.Parameters()\n",
"params.map = {'id': rlong(shapeId)}\n",
"shape = conn.getQueryService().findByQuery(\n 'select s from Shape s join fetch s.roi where s.id = :id', params, conn\n .SERVICE_OPTS)\n",
"if shape is None:\n",
"imageId = shape.roi.image.id.val\n",
"pi = _get_prepared_image(request, imageId, server_id=server_id, conn=conn)\n",
"if pi is None:\n",
"image, compress_quality = pi\n",
"return get_shape_thumbnail(request, conn, image, shape, compress_quality)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Return'"
] |
[
"@VAR_0.route('/dl/<dl_type>/<path:filename>')...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_11 == 'log':\n",
"return send_from_directory(LOG_PATH, VAR_3, as_attachment=True)\n",
"return '', 204\n"
] | [
"@blueprint.route('/dl/<dl_type>/<path:filename>')...\n",
"\"\"\"docstring\"\"\"\n",
"if dl_type == 'log':\n",
"return send_from_directory(LOG_PATH, filename, as_attachment=True)\n",
"return '', 204\n"
] | [
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_4(VAR_1):...\n",
"\"\"\"docstring\"\"\"\n",
"def FUNC_83(VAR_9, *VAR_2, **VAR_3):...\n",
"if GlobalStaff().has_user(VAR_9.user):\n",
"return VAR_1(VAR_9, *VAR_2, **kwargs)\n",
"return HttpResponseForbidden(\n u'Must be {platform_name} staff to perform this action.'.format(\n platform_name=settings.PLATFORM_NAME))\n"
] | [
"def require_global_staff(func):...\n",
"\"\"\"docstring\"\"\"\n",
"def wrapped(request, *args, **kwargs):...\n",
"if GlobalStaff().has_user(request.user):\n",
"return func(request, *args, **kwargs)\n",
"return HttpResponseForbidden(\n u'Must be {platform_name} staff to perform this action.'.format(\n platform_name=settings.PLATFORM_NAME))\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"FunctionDef'",
"Condition",
"Return'",
"Return'"
] |
[
"@FUNC_0...\n",
"if self.config.worker_app:\n",
"return RoomMemberWorkerHandler(self)\n",
"return RoomMemberMasterHandler(self)\n"
] | [
"@cache_in_self...\n",
"if self.config.worker_app:\n",
"return RoomMemberWorkerHandler(self)\n",
"return RoomMemberMasterHandler(self)\n"
] | [
0,
0,
0,
0
] | [
"Condition",
"Condition",
"Return'",
"Return'"
] |
[
"async def FUNC_15(self, VAR_5, VAR_6):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_1 = VAR_6\n",
"VAR_0.debug('on_send_join_request from %s: Got event: %s, signatures: %s',\n VAR_5, VAR_1.event_id, VAR_1.signatures)\n",
"if get_domain_from_id(VAR_1.sender) != VAR_5:\n",
"VAR_0.info('Got /send_join request for user %r from different origin %s',\n VAR_1.sender, VAR_5)\n",
"VAR_1.internal_metadata.outlier = False\n",
"VAR_1.internal_metadata.send_on_behalf_of = VAR_5\n",
"VAR_32 = await self._handle_new_event(VAR_5, VAR_1)\n",
"VAR_0.debug('on_send_join_request: After _handle_new_event: %s, sigs: %s',\n VAR_1.event_id, VAR_1.signatures)\n",
"VAR_94 = await VAR_32.get_prev_state_ids()\n",
"VAR_95 = list(VAR_94.values())\n",
"VAR_63 = await self.store.get_auth_chain(VAR_95)\n",
"VAR_2 = await self.store.get_events(list(VAR_94.values()))\n",
"return {'state': list(VAR_2.values()), 'auth_chain': VAR_63}\n"
] | [
"async def on_send_join_request(self, origin, pdu):...\n",
"\"\"\"docstring\"\"\"\n",
"event = pdu\n",
"logger.debug('on_send_join_request from %s: Got event: %s, signatures: %s',\n origin, event.event_id, event.signatures)\n",
"if get_domain_from_id(event.sender) != origin:\n",
"logger.info('Got /send_join request for user %r from different origin %s',\n event.sender, origin)\n",
"event.internal_metadata.outlier = False\n",
"event.internal_metadata.send_on_behalf_of = origin\n",
"context = await self._handle_new_event(origin, event)\n",
"logger.debug('on_send_join_request: After _handle_new_event: %s, sigs: %s',\n event.event_id, event.signatures)\n",
"prev_state_ids = await context.get_prev_state_ids()\n",
"state_ids = list(prev_state_ids.values())\n",
"auth_chain = await self.store.get_auth_chain(state_ids)\n",
"state = await self.store.get_events(list(prev_state_ids.values()))\n",
"return {'state': list(state.values()), 'auth_chain': auth_chain}\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"AsyncFunctionDef'",
"Docstring",
"Assign'",
"Expr'",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_17(self, VAR_16):...\n",
"if not VAR_53.session['admin']:\n",
"FUNC_0()\n",
"VAR_64 = albumartfetcher.AlbumArtFetcher()\n",
"VAR_65 = VAR_64.fetchurls(VAR_16)\n",
"return VAR_65[:min(len(VAR_65), 10)]\n"
] | [
"def api_fetchalbumarturls(self, searchterm):...\n",
"if not cherrypy.session['admin']:\n",
"_save_and_release_session()\n",
"fetcher = albumartfetcher.AlbumArtFetcher()\n",
"imgurls = fetcher.fetchurls(searchterm)\n",
"return imgurls[:min(len(imgurls), 10)]\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_101(VAR_6):...\n",
"return '<b>{0}</b>'.format(VAR_6)\n"
] | [
"def bold(text):...\n",
"return '<b>{0}</b>'.format(text)\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_11(VAR_18, VAR_19):...\n",
"VAR_48 = []\n",
"VAR_49 = []\n",
"if VAR_19 == 'input':\n",
"for cls in VAR_18:\n",
"for cls in VAR_18:\n",
"VAR_92 = inspect.getdoc(cls.preprocess)\n",
"return VAR_48, VAR_49\n",
"VAR_92 = inspect.getdoc(cls.postprocess)\n",
"VAR_93 = VAR_92.split('\\n')\n",
"VAR_93 = VAR_92.split('\\n')\n",
"VAR_48.append(VAR_93[1].split(':')[-1])\n",
"VAR_48.append(VAR_93[-1].split(':')[-1])\n",
"VAR_49.append(VAR_93[1].split(')')[0].split('(')[-1])\n",
"VAR_49.append(VAR_93[-1].split(')')[0].split('(')[-1])\n"
] | [
"def get_types(cls_set, component):...\n",
"docset = []\n",
"types = []\n",
"if component == 'input':\n",
"for cls in cls_set:\n",
"for cls in cls_set:\n",
"doc = inspect.getdoc(cls.preprocess)\n",
"return docset, types\n",
"doc = inspect.getdoc(cls.postprocess)\n",
"doc_lines = doc.split('\\n')\n",
"doc_lines = doc.split('\\n')\n",
"docset.append(doc_lines[1].split(':')[-1])\n",
"docset.append(doc_lines[-1].split(':')[-1])\n",
"types.append(doc_lines[1].split(')')[0].split('(')[-1])\n",
"types.append(doc_lines[-1].split(')')[0].split('(')[-1])\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Condition",
"For",
"For",
"Assign'",
"Return'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_43(self, VAR_124, VAR_132):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_222 = self._get_login_settings()\n",
"VAR_141 = VAR_222.table_user(**{settings.userfield: username})\n",
"if VAR_141 and VAR_141.get(VAR_222.passfield, False):\n",
"VAR_132 = VAR_222.table_user[VAR_222.passfield].validate(VAR_132)[0]\n",
"for login_method in self.settings.login_methods:\n",
"if (VAR_141.registration_key is None or not VAR_141.registration_key.strip()\n",
"if login_method != self and login_method(VAR_124, VAR_132):\n",
"return False\n",
"self.login_user(VAR_141)\n",
"self.user = VAR_141\n",
"return VAR_141\n",
"return VAR_141\n"
] | [
"def login_bare(self, username, password):...\n",
"\"\"\"docstring\"\"\"\n",
"settings = self._get_login_settings()\n",
"user = settings.table_user(**{settings.userfield: username})\n",
"if user and user.get(settings.passfield, False):\n",
"password = settings.table_user[settings.passfield].validate(password)[0]\n",
"for login_method in self.settings.login_methods:\n",
"if (user.registration_key is None or not user.registration_key.strip()\n",
"if login_method != self and login_method(username, password):\n",
"return False\n",
"self.login_user(user)\n",
"self.user = user\n",
"return user\n",
"return user\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"For",
"Condition",
"Condition",
"Return'",
"Expr'",
"Assign'",
"Return'",
"Return'"
] |
[
"def FUNC_30(VAR_91):...\n",
""
] | [
"def get_domain_data(module):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_159(VAR_82):...\n",
"VAR_366 = to_native(TAG['cas:serviceResponse'](VAR_82, **{'_xmlns:cas':\n 'http://www.yale.edu/tp/cas'}).xml())\n",
"return '<?xml version=\"1.0\" encoding=\"UTF-8\"?>\\n' + VAR_366\n"
] | [
"def build_response(body):...\n",
"xml_body = to_native(TAG['cas:serviceResponse'](body, **{'_xmlns:cas':\n 'http://www.yale.edu/tp/cas'}).xml())\n",
"return '<?xml version=\"1.0\" encoding=\"UTF-8\"?>\\n' + xml_body\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Return'"
] |
[
"def __ne__(self, VAR_8) ->bool:...\n",
"\"\"\"docstring\"\"\"\n",
"return not self == VAR_8\n"
] | [
"def __ne__(self, other) ->bool:...\n",
"\"\"\"docstring\"\"\"\n",
"return not self == other\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"def FUNC_37():...\n",
"if callable(VAR_63):\n",
"VAR_33 = FUNC_21()\n",
"VAR_33 = VAR_63()\n",
"VAR_0.exception('Error while trying to retrieve tracked files for plugin {}'\n .format(VAR_8))\n",
"VAR_33 += FUNC_22()\n",
"if VAR_33:\n",
"VAR_33 += FUNC_23(g.locale.language if g.locale else 'en', 'messages')\n",
"return VAR_33\n",
"if callable(VAR_61):\n",
"return sorted(set(VAR_33))\n",
"VAR_142 = VAR_61()\n",
"VAR_0.exception(\n 'Error while trying to retrieve additional tracked files for plugin {}'\n .format(VAR_8))\n",
"if VAR_142:\n",
"VAR_33 += VAR_142\n"
] | [
"def collect_files():...\n",
"if callable(custom_files):\n",
"files = _get_all_templates()\n",
"files = custom_files()\n",
"_logger.exception('Error while trying to retrieve tracked files for plugin {}'\n .format(key))\n",
"files += _get_all_assets()\n",
"if files:\n",
"files += _get_all_translationfiles(g.locale.language if g.locale else 'en',\n 'messages')\n",
"return files\n",
"if callable(additional_files):\n",
"return sorted(set(files))\n",
"af = additional_files()\n",
"_logger.exception(\n 'Error while trying to retrieve additional tracked files for plugin {}'\n .format(key))\n",
"if af:\n",
"files += af\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"AugAssign'",
"Condition",
"AugAssign'",
"Return'",
"Condition",
"Return'",
"Assign'",
"Expr'",
"Condition",
"AugAssign'"
] |
[
"def FUNC_7(VAR_0, VAR_5, VAR_6, VAR_3=False):...\n",
"VAR_15 = current_user.kobo_only_shelves_sync\n",
"if request.method == 'POST':\n",
"VAR_20 = request.form.to_dict()\n",
"return render_title_template('shelf_edit.html', VAR_0=shelf, VAR_7=\n page_title, VAR_6=page, kobo_sync_enabled=config.config_kobo_sync,\n VAR_15=sync_only_selected_shelves)\n",
"if not current_user.role_edit_shelfs() and VAR_20.get('is_public') == 'on':\n",
"flash(_(u'Sorry you are not allowed to create a public shelf'), category=\n 'error')\n",
"VAR_0.is_public = 1 if VAR_20.get('is_public') else 0\n",
"return redirect(url_for('web.index'))\n",
"if config.config_kobo_sync:\n",
"VAR_0.kobo_sync = True if VAR_20.get('kobo_sync') else False\n",
"VAR_21 = VAR_20.get('title', '')\n",
"if FUNC_8(VAR_0, VAR_21, VAR_3):\n",
"VAR_0.name = VAR_21\n",
"if not VAR_3:\n",
"VAR_0.user_id = int(current_user.id)\n",
"VAR_31 = 'changed'\n",
"ub.session.add(VAR_0)\n",
"VAR_32 = _(u'Shelf %(title)s changed', VAR_7=shelf_title)\n",
"VAR_31 = 'created'\n",
"ub.session.commit()\n",
"ub.session.rollback()\n",
"VAR_32 = _(u'Shelf %(title)s created', VAR_7=shelf_title)\n",
"VAR_1.info(u'Shelf {} {}'.format(VAR_21, VAR_31))\n",
"VAR_1.debug_or_exception(ex)\n",
"flash(VAR_32, category='success')\n",
"VAR_1.error('Settings DB is not Writeable')\n",
"return redirect(url_for('shelf.show_shelf', VAR_3=shelf.id))\n",
"flash(_('Settings DB is not Writeable'), category='error')\n",
"ub.session.rollback()\n",
"VAR_1.debug_or_exception(ex)\n",
"flash(_(u'There was an error'), category='error')\n"
] | [
"def create_edit_shelf(shelf, page_title, page, shelf_id=False):...\n",
"sync_only_selected_shelves = current_user.kobo_only_shelves_sync\n",
"if request.method == 'POST':\n",
"to_save = request.form.to_dict()\n",
"return render_title_template('shelf_edit.html', shelf=shelf, title=\n page_title, page=page, kobo_sync_enabled=config.config_kobo_sync,\n sync_only_selected_shelves=sync_only_selected_shelves)\n",
"if not current_user.role_edit_shelfs() and to_save.get('is_public') == 'on':\n",
"flash(_(u'Sorry you are not allowed to create a public shelf'), category=\n 'error')\n",
"shelf.is_public = 1 if to_save.get('is_public') else 0\n",
"return redirect(url_for('web.index'))\n",
"if config.config_kobo_sync:\n",
"shelf.kobo_sync = True if to_save.get('kobo_sync') else False\n",
"shelf_title = to_save.get('title', '')\n",
"if check_shelf_is_unique(shelf, shelf_title, shelf_id):\n",
"shelf.name = shelf_title\n",
"if not shelf_id:\n",
"shelf.user_id = int(current_user.id)\n",
"shelf_action = 'changed'\n",
"ub.session.add(shelf)\n",
"flash_text = _(u'Shelf %(title)s changed', title=shelf_title)\n",
"shelf_action = 'created'\n",
"ub.session.commit()\n",
"ub.session.rollback()\n",
"flash_text = _(u'Shelf %(title)s created', title=shelf_title)\n",
"log.info(u'Shelf {} {}'.format(shelf_title, shelf_action))\n",
"log.debug_or_exception(ex)\n",
"flash(flash_text, category='success')\n",
"log.error('Settings DB is not Writeable')\n",
"return redirect(url_for('shelf.show_shelf', shelf_id=shelf.id))\n",
"flash(_('Settings DB is not Writeable'), category='error')\n",
"ub.session.rollback()\n",
"log.debug_or_exception(ex)\n",
"flash(_(u'There was an error'), category='error')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Assign'",
"Return'",
"Condition",
"Expr'",
"Assign'",
"Return'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Return'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"@VAR_1.route('/register', methods=['POST'])...\n",
"VAR_6 = request.form['vId']\n",
"if VAR_6 == '':\n",
"VAR_6 = utils.generateToken(5)\n",
"VAR_7 = victim(VAR_6, request.environ['REMOTE_ADDR'], request.user_agent.\n platform, request.user_agent.browser, request.user_agent.version, utils\n .portScanner(request.environ['REMOTE_ADDR']), request.form['cpu'], time\n .strftime('%Y-%m-%d - %H:%M:%S'))\n",
"VAR_8 = victim_geo(VAR_6, 'city', request.form['countryCode'], request.form\n ['country'], request.form['query'], request.form['lat'], request.form[\n 'lon'], request.form['org'], request.form['region'], request.form[\n 'regionName'], request.form['timezone'], request.form['zip'], request.\n form['isp'], str(request.user_agent))\n",
"utils.Go(utils.Color['white'] + '[' + utils.Color['blueBold'] + '*' + utils\n .Color['white'] + ']' + ' A victim has been connected from ' + utils.\n Color['blue'] + VAR_8.ip + utils.Color['white'] +\n ' with the following identifier: ' + utils.Color['green'] + VAR_6 +\n utils.Color['white'])\n",
"VAR_9 = int(VAR_2.sentences_victim('count_times', VAR_6, 3, 0))\n",
"VAR_2.sentences_victim('insert_click', [VAR_6, VAR_0.url_to_clone, time.\n strftime('%Y-%m-%d - %H:%M:%S')], 2)\n",
"VAR_2.sentences_victim('delete_networks', [VAR_6], 2)\n",
"if VAR_9 > 0:\n",
"utils.Go(utils.Color['white'] + '[' + utils.Color['blueBold'] + '*' + utils\n .Color['white'] + ']' + ' ' + \"It's his \" + str(VAR_9 + 1) + ' time')\n",
"utils.Go(utils.Color['white'] + '[' + utils.Color['blueBold'] + '*' + utils\n .Color['white'] + ']' + ' ' + \"It's his first time\")\n",
"VAR_2.sentences_victim('update_victim', [VAR_7, VAR_6, time.time()], 2)\n",
"VAR_2.sentences_victim('insert_victim', [VAR_7, VAR_6, time.time()], 2)\n",
"VAR_2.sentences_victim('update_victim_geo', [VAR_8, VAR_6], 2)\n",
"VAR_2.sentences_victim('insert_victim_geo', [VAR_8, VAR_6], 2)\n",
"return json.dumps({'status': 'OK', 'vId': VAR_6})\n"
] | [
"@app.route('/register', methods=['POST'])...\n",
"vId = request.form['vId']\n",
"if vId == '':\n",
"vId = utils.generateToken(5)\n",
"victimConnect = victim(vId, request.environ['REMOTE_ADDR'], request.\n user_agent.platform, request.user_agent.browser, request.user_agent.\n version, utils.portScanner(request.environ['REMOTE_ADDR']), request.\n form['cpu'], time.strftime('%Y-%m-%d - %H:%M:%S'))\n",
"victimGeo = victim_geo(vId, 'city', request.form['countryCode'], request.\n form['country'], request.form['query'], request.form['lat'], request.\n form['lon'], request.form['org'], request.form['region'], request.form[\n 'regionName'], request.form['timezone'], request.form['zip'], request.\n form['isp'], str(request.user_agent))\n",
"utils.Go(utils.Color['white'] + '[' + utils.Color['blueBold'] + '*' + utils\n .Color['white'] + ']' + ' A victim has been connected from ' + utils.\n Color['blue'] + victimGeo.ip + utils.Color['white'] +\n ' with the following identifier: ' + utils.Color['green'] + vId + utils\n .Color['white'])\n",
"cant = int(db.sentences_victim('count_times', vId, 3, 0))\n",
"db.sentences_victim('insert_click', [vId, trape.url_to_clone, time.strftime\n ('%Y-%m-%d - %H:%M:%S')], 2)\n",
"db.sentences_victim('delete_networks', [vId], 2)\n",
"if cant > 0:\n",
"utils.Go(utils.Color['white'] + '[' + utils.Color['blueBold'] + '*' + utils\n .Color['white'] + ']' + ' ' + \"It's his \" + str(cant + 1) + ' time')\n",
"utils.Go(utils.Color['white'] + '[' + utils.Color['blueBold'] + '*' + utils\n .Color['white'] + ']' + ' ' + \"It's his first time\")\n",
"db.sentences_victim('update_victim', [victimConnect, vId, time.time()], 2)\n",
"db.sentences_victim('insert_victim', [victimConnect, vId, time.time()], 2)\n",
"db.sentences_victim('update_victim_geo', [victimGeo, vId], 2)\n",
"db.sentences_victim('insert_victim_geo', [victimGeo, vId], 2)\n",
"return json.dumps({'status': 'OK', 'vId': vId})\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Condition",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Return'"
] |
[
"def FUNC_8() ->Any:...\n",
"\"\"\"docstring\"\"\"\n",
"return get_translations()._catalog\n"
] | [
"def translations() ->Any:...\n",
"\"\"\"docstring\"\"\"\n",
"return get_translations()._catalog\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Return'"
] |
[
"def __init__(self, *VAR_17, **VAR_18):...\n",
"super().__init__(*VAR_17, **kwargs)\n",
"self.quoted_namespace, VAR_10 = self.spec.split('/', 1)\n",
"self.namespace = urllib.parse.unquote(self.quoted_namespace)\n",
"self.unresolved_ref = urllib.parse.unquote(VAR_10)\n",
"if not self.unresolved_ref:\n"
] | [
"def __init__(self, *args, **kwargs):...\n",
"super().__init__(*args, **kwargs)\n",
"self.quoted_namespace, unresolved_ref = self.spec.split('/', 1)\n",
"self.namespace = urllib.parse.unquote(self.quoted_namespace)\n",
"self.unresolved_ref = urllib.parse.unquote(unresolved_ref)\n",
"if not self.unresolved_ref:\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Condition"
] |
[
"def FUNC_12(VAR_16, VAR_18, VAR_19, VAR_15):...\n",
"if VAR_16:\n",
"if VAR_18:\n",
"if VAR_18:\n",
"return json.dumps([VAR_19, {'location': url_for('editbook.edit_book',\n VAR_15=book_id), 'type': 'success', 'format': VAR_16, 'message': _(\n 'Book Format Successfully Deleted')}])\n",
"flash(_('Book Format Successfully Deleted'), category='success')\n",
"return json.dumps([VAR_19, {'location': url_for('web.index'), 'type':\n 'success', 'format': VAR_16, 'message': _('Book Successfully Deleted')}])\n",
"flash(_('Book Successfully Deleted'), category='success')\n",
"return redirect(url_for('editbook.edit_book', VAR_15=book_id))\n",
"return redirect(url_for('web.index'))\n"
] | [
"def render_delete_book_result(book_format, jsonResponse, warning, book_id):...\n",
"if book_format:\n",
"if jsonResponse:\n",
"if jsonResponse:\n",
"return json.dumps([warning, {'location': url_for('editbook.edit_book',\n book_id=book_id), 'type': 'success', 'format': book_format, 'message':\n _('Book Format Successfully Deleted')}])\n",
"flash(_('Book Format Successfully Deleted'), category='success')\n",
"return json.dumps([warning, {'location': url_for('web.index'), 'type':\n 'success', 'format': book_format, 'message': _(\n 'Book Successfully Deleted')}])\n",
"flash(_('Book Successfully Deleted'), category='success')\n",
"return redirect(url_for('editbook.edit_book', book_id=book_id))\n",
"return redirect(url_for('web.index'))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Condition",
"Condition",
"Return'",
"Expr'",
"Return'",
"Expr'",
"Return'",
"Return'"
] |
[
"def FUNC_63():...\n",
"VAR_81 = 1024 * 20\n",
"while True:\n",
"VAR_3.sleep(0.01)\n",
"if VAR_0.config['FD']:\n",
"VAR_127 = 0\n",
"VAR_128, VAR_129, VAR_129 = select.select([VAR_0.config['FD']], [], [], VAR_127\n )\n",
"if VAR_128:\n",
"VAR_131 = os.read(VAR_0.config['FD'], VAR_81).decode()\n",
"VAR_3.emit('pty-output', {'output': VAR_131}, namespace='/pty')\n"
] | [
"def read_and_forward_pty_output():...\n",
"max_read_bytes = 1024 * 20\n",
"while True:\n",
"socketio.sleep(0.01)\n",
"if gui.config['FD']:\n",
"timeout_sec = 0\n",
"data_ready, _, _ = select.select([gui.config['FD']], [], [], timeout_sec)\n",
"if data_ready:\n",
"output = os.read(gui.config['FD'], max_read_bytes).decode()\n",
"socketio.emit('pty-output', {'output': output}, namespace='/pty')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Condition",
"Expr'",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Expr'"
] |
[
"@app.route('/')...\n",
"VAR_7 = request.args.get('path', '').lstrip('/')\n",
"VAR_26 = data.get_items(VAR_7=path)\n",
"flash('Directory does not exist.', 'error')\n",
"return render_template('home.html', title=path or 'root', search_enabled=\n app.config['SEARCH_CONF']['enabled'], dir=files, current_path=path,\n new_folder_form=forms.NewFolderForm(), delete_form=forms.\n DeleteFolderForm(), rename_form=forms.RenameDirectoryForm(), view_only=\n 0, search_engine=app.config['SEARCH_CONF']['engine'])\n",
"return redirect('/')\n"
] | [
"@app.route('/')...\n",
"path = request.args.get('path', '').lstrip('/')\n",
"files = data.get_items(path=path)\n",
"flash('Directory does not exist.', 'error')\n",
"return render_template('home.html', title=path or 'root', search_enabled=\n app.config['SEARCH_CONF']['enabled'], dir=files, current_path=path,\n new_folder_form=forms.NewFolderForm(), delete_form=forms.\n DeleteFolderForm(), rename_form=forms.RenameDirectoryForm(), view_only=\n 0, search_engine=app.config['SEARCH_CONF']['engine'])\n",
"return redirect('/')\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Expr'",
"Return'",
"Return'"
] |
[
"def FUNC_30(self):...\n",
"VAR_32 = '/rooms/%s/send/m.room.message/mid1' % urlparse.quote(self.room_id)\n",
"VAR_19 = b'{\"body\":\"test\",\"msgtype\":{\"type\":\"a\"}}'\n",
"VAR_22, VAR_23 = self.make_request('PUT', VAR_32, VAR_19)\n",
"self.assertEquals(400, VAR_23.code, msg=channel.result['body'])\n",
"VAR_19 = b'{\"body\":\"test\",\"msgtype\":\"test.custom.text\"}'\n",
"VAR_22, VAR_23 = self.make_request('PUT', VAR_32, VAR_19)\n",
"self.assertEquals(200, VAR_23.code, msg=channel.result['body'])\n",
"VAR_32 = '/rooms/%s/send/m.room.message/mid2' % urlparse.quote(self.room_id)\n",
"VAR_19 = b'{\"body\":\"test2\",\"msgtype\":\"m.text\"}'\n",
"VAR_22, VAR_23 = self.make_request('PUT', VAR_32, VAR_19)\n",
"self.assertEquals(200, VAR_23.code, msg=channel.result['body'])\n"
] | [
"def test_rooms_messages_sent(self):...\n",
"path = '/rooms/%s/send/m.room.message/mid1' % urlparse.quote(self.room_id)\n",
"content = b'{\"body\":\"test\",\"msgtype\":{\"type\":\"a\"}}'\n",
"request, channel = self.make_request('PUT', path, content)\n",
"self.assertEquals(400, channel.code, msg=channel.result['body'])\n",
"content = b'{\"body\":\"test\",\"msgtype\":\"test.custom.text\"}'\n",
"request, channel = self.make_request('PUT', path, content)\n",
"self.assertEquals(200, channel.code, msg=channel.result['body'])\n",
"path = '/rooms/%s/send/m.room.message/mid2' % urlparse.quote(self.room_id)\n",
"content = b'{\"body\":\"test2\",\"msgtype\":\"m.text\"}'\n",
"request, channel = self.make_request('PUT', path, content)\n",
"self.assertEquals(200, channel.code, msg=channel.result['body'])\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Expr'",
"Assign'",
"Assign'",
"Assign'",
"Expr'"
] |
[
"async def FUNC_6(self, VAR_4: str, VAR_15: str, VAR_6: str, VAR_12: int,...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_45 = {'email': VAR_15, 'client_secret': VAR_6, 'send_attempt': VAR_12}\n",
"if VAR_14:\n",
"VAR_45['next_link'] = VAR_14\n",
"if self.hs.config.using_identity_server_from_trusted_list:\n",
"VAR_0.warning('string')\n",
"VAR_46 = await self.http_client.post_json_get_json(VAR_4 +\n '/_matrix/identity/api/v1/validate/email/requestToken', VAR_45)\n",
"VAR_0.info('Proxied requestToken failed: %r', e)\n",
"return VAR_46\n"
] | [
"async def requestEmailToken(self, id_server: str, email: str, client_secret:...\n",
"\"\"\"docstring\"\"\"\n",
"params = {'email': email, 'client_secret': client_secret, 'send_attempt':\n send_attempt}\n",
"if next_link:\n",
"params['next_link'] = next_link\n",
"if self.hs.config.using_identity_server_from_trusted_list:\n",
"logger.warning(\n 'The config option \"trust_identity_server_for_password_resets\" has been replaced by \"account_threepid_delegate\". Please consult the sample config at docs/sample_config.yaml for details and update your config file.'\n )\n",
"data = await self.http_client.post_json_get_json(id_server +\n '/_matrix/identity/api/v1/validate/email/requestToken', params)\n",
"logger.info('Proxied requestToken failed: %r', e)\n",
"return data\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_6(self, VAR_16, VAR_15):...\n",
"if VAR_16.direction != 'rtl':\n",
"return ''\n",
"VAR_103 = f'rtl-{VAR_15}'\n",
"VAR_104 = [VAR_1.format('direction-toggle active', gettext(\n 'Toggle text direction'), VAR_103, 'rtl', 'checked=\"checked\"', 'RTL'),\n VAR_1.format('direction-toggle', gettext('Toggle text direction'),\n VAR_103, 'ltr', '', 'LTR')]\n",
"VAR_101 = [VAR_2.format('data-toggle=\"buttons\"', '\\n'.join(VAR_104))]\n",
"return mark_safe(VAR_3.format('\\n'.join(VAR_101)))\n"
] | [
"def get_rtl_toggle(self, language, fieldname):...\n",
"if language.direction != 'rtl':\n",
"return ''\n",
"rtl_name = f'rtl-{fieldname}'\n",
"rtl_switch = [RADIO_TEMPLATE.format('direction-toggle active', gettext(\n 'Toggle text direction'), rtl_name, 'rtl', 'checked=\"checked\"', 'RTL'),\n RADIO_TEMPLATE.format('direction-toggle', gettext(\n 'Toggle text direction'), rtl_name, 'ltr', '', 'LTR')]\n",
"groups = [GROUP_TEMPLATE.format('data-toggle=\"buttons\"', '\\n'.join(rtl_switch))\n ]\n",
"return mark_safe(TOOLBAR_TEMPLATE.format('\\n'.join(groups)))\n"
] | [
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"@login_required()...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_131 = VAR_8.getObject('Project', VAR_32)\n",
"if VAR_131 is None:\n",
"return HttpJavascriptResponse('[]')\n",
"return [VAR_30.simpleMarshal(VAR_121={'childCount': 0}) for VAR_30 in\n VAR_131.listChildren()]\n"
] | [
"@login_required()...\n",
"\"\"\"docstring\"\"\"\n",
"project = conn.getObject('Project', pid)\n",
"if project is None:\n",
"return HttpJavascriptResponse('[]')\n",
"return [x.simpleMarshal(xtra={'childCount': 0}) for x in project.listChildren()\n ]\n"
] | [
0,
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Assign'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_0():...\n",
"assert utils.snake_case('HTTP') == 'http'\n",
"assert utils.snake_case('HTTP RESPONSE') == 'http_response'\n"
] | [
"def test_snake_case_uppercase_str():...\n",
"assert utils.snake_case('HTTP') == 'http'\n",
"assert utils.snake_case('HTTP RESPONSE') == 'http_response'\n"
] | [
0,
0,
0
] | [
"FunctionDef'",
"Assert'",
"Assert'"
] |
[
"def FUNC_16(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self._set_canonical_alias({'alias': self.test_alias, 'alt_aliases': [self.\n test_alias]})\n",
"VAR_14 = self._get_canonical_alias()\n",
"self.assertEqual(VAR_14['content']['alias'], self.test_alias)\n",
"self.assertEqual(VAR_14['content']['alt_aliases'], [self.test_alias])\n",
"self.get_success(self.handler.delete_association(create_requester(self.\n admin_user), self.room_alias))\n",
"VAR_14 = self._get_canonical_alias()\n",
"self.assertNotIn('alias', VAR_14['content'])\n",
"self.assertNotIn('alt_aliases', VAR_14['content'])\n"
] | [
"def test_remove_alias(self):...\n",
"\"\"\"docstring\"\"\"\n",
"self._set_canonical_alias({'alias': self.test_alias, 'alt_aliases': [self.\n test_alias]})\n",
"data = self._get_canonical_alias()\n",
"self.assertEqual(data['content']['alias'], self.test_alias)\n",
"self.assertEqual(data['content']['alt_aliases'], [self.test_alias])\n",
"self.get_success(self.handler.delete_association(create_requester(self.\n admin_user), self.room_alias))\n",
"data = self._get_canonical_alias()\n",
"self.assertNotIn('alias', data['content'])\n",
"self.assertNotIn('alt_aliases', data['content'])\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Expr'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Assign'",
"Expr'",
"Expr'"
] |
[
"def FUNC_14(VAR_17):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_39 = {}\n",
"for input_raw in filter(bool, VAR_17.split(';')):\n",
"if '=' not in VAR_17:\n",
"return VAR_39\n",
"VAR_68, VAR_69 = input_raw.split('=', 1)\n",
"VAR_39[VAR_68] = eval(VAR_69)\n"
] | [
"def preprocess_input_exprs_arg_string(input_exprs_str):...\n",
"\"\"\"docstring\"\"\"\n",
"input_dict = {}\n",
"for input_raw in filter(bool, input_exprs_str.split(';')):\n",
"if '=' not in input_exprs_str:\n",
"return input_dict\n",
"input_key, expr = input_raw.split('=', 1)\n",
"input_dict[input_key] = eval(expr)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"For",
"Condition",
"Return'",
"Assign'",
"Assign'"
] |
[
"def FUNC_48(self, VAR_112, VAR_139):...\n",
"\"\"\"docstring\"\"\"\n",
"if self.is_logged_in():\n",
"if VAR_112 == VAR_139._auth_next:\n",
"redirect(VAR_112, client_side=self.settings.client_side)\n"
] | [
"def when_is_logged_in_bypass_next_in_url(self, next, session):...\n",
"\"\"\"docstring\"\"\"\n",
"if self.is_logged_in():\n",
"if next == session._auth_next:\n",
"redirect(next, client_side=self.settings.client_side)\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Condition",
"Expr'"
] |
[
"@VAR_2.route('/ajax/bookmark/<int:book_id>/<book_format>', methods=['POST'])...\n",
"VAR_52 = request.form['bookmark']\n",
"ub.session.query(ub.Bookmark).filter(and_(ub.Bookmark.user_id == int(VAR_87\n .id), ub.Bookmark.book_id == VAR_5, ub.Bookmark.format == VAR_6)).delete()\n",
"if not VAR_52:\n",
"ub.session_commit()\n",
"VAR_53 = ub.Bookmark(VAR_11=current_user.id, VAR_5=book_id, format=\n book_format, VAR_52=bookmark_key)\n",
"return '', 204\n",
"ub.session.merge(VAR_53)\n",
"ub.session_commit('Bookmark for user {} in book {} created'.format(VAR_87.\n id, VAR_5))\n",
"return '', 201\n"
] | [
"@web.route('/ajax/bookmark/<int:book_id>/<book_format>', methods=['POST'])...\n",
"bookmark_key = request.form['bookmark']\n",
"ub.session.query(ub.Bookmark).filter(and_(ub.Bookmark.user_id == int(\n current_user.id), ub.Bookmark.book_id == book_id, ub.Bookmark.format ==\n book_format)).delete()\n",
"if not bookmark_key:\n",
"ub.session_commit()\n",
"lbookmark = ub.Bookmark(user_id=current_user.id, book_id=book_id, format=\n book_format, bookmark_key=bookmark_key)\n",
"return '', 204\n",
"ub.session.merge(lbookmark)\n",
"ub.session_commit('Bookmark for user {} in book {} created'.format(\n current_user.id, book_id))\n",
"return '', 201\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"For",
"Assign'",
"Expr'",
"Condition",
"Expr'",
"Assign'",
"Return'",
"Expr'",
"Expr'",
"Return'"
] |
[
"@VAR_0.filter...\n",
"\"\"\"docstring\"\"\"\n",
"if len(VAR_32) > 160 and ',' in VAR_32:\n",
"VAR_32 = mark_safe('<br> ' + ', <br>'.join(VAR_32.split(',')))\n",
"return VAR_32\n"
] | [
"@register.filter...\n",
"\"\"\"docstring\"\"\"\n",
"if len(header) > 160 and ',' in header:\n",
"header = mark_safe('<br> ' + ', <br>'.join(header.split(',')))\n",
"return header\n"
] | [
0,
0,
0,
0,
0
] | [
"Condition",
"Docstring",
"Condition",
"Assign'",
"Return'"
] |
[
"def FUNC_2(self, VAR_23):...\n",
"assert isinstance(VAR_23, text_type)\n",
"if self.encoding:\n",
"return VAR_23.encode(self.encoding, VAR_3)\n",
"return VAR_23\n"
] | [
"def encode(self, string):...\n",
"assert isinstance(string, text_type)\n",
"if self.encoding:\n",
"return string.encode(self.encoding, unicode_encode_errors)\n",
"return string\n"
] | [
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Assert'",
"Condition",
"Return'",
"Return'"
] |
[
"def FUNC_7(VAR_12, VAR_1):...\n",
""
] | [
"def migrate_registration_table(engine, session):...\n",
""
] | [
0,
0
] | [
"FunctionDef'",
"Condition"
] |
[
"def FUNC_3(VAR_3):...\n",
"if VAR_25.session['user'] == 'Guest':\n",
"if VAR_3 not in VAR_25.guest_methods:\n",
"if not VAR_3 in VAR_25.whitelisted:\n",
"VAR_25.throw(_('Not permitted'), VAR_25.PermissionError)\n",
"if VAR_3 not in VAR_25.xss_safe_methods:\n",
"VAR_25.throw(_('Not permitted'), VAR_25.PermissionError)\n",
"for VAR_29, value in VAR_25.form_dict.items():\n",
"if isinstance(value, string_types):\n",
"VAR_25.form_dict[VAR_29] = VAR_25.utils.sanitize_html(value)\n"
] | [
"def is_whitelisted(method):...\n",
"if frappe.session['user'] == 'Guest':\n",
"if method not in frappe.guest_methods:\n",
"if not method in frappe.whitelisted:\n",
"frappe.throw(_('Not permitted'), frappe.PermissionError)\n",
"if method not in frappe.xss_safe_methods:\n",
"frappe.throw(_('Not permitted'), frappe.PermissionError)\n",
"for key, value in frappe.form_dict.items():\n",
"if isinstance(value, string_types):\n",
"frappe.form_dict[key] = frappe.utils.sanitize_html(value)\n"
] | [
0,
2,
2,
2,
2,
2,
2,
2,
2,
2
] | [
"FunctionDef'",
"Condition",
"Condition",
"Condition",
"Expr'",
"Condition",
"Expr'",
"For",
"Condition",
"Assign'"
] |
[
"@FUNC_0...\n",
"return FederationServer(self)\n"
] | [
"@cache_in_self...\n",
"return FederationServer(self)\n"
] | [
0,
0
] | [
"Condition",
"Return'"
] |
[
"def FUNC_49(VAR_21, VAR_22, VAR_23):...\n",
"if VAR_87.filter_language() != 'all':\n",
"VAR_21 = VAR_21.filter(db.Books.languages.any(db.Languages.lang_code ==\n VAR_87.filter_language()))\n",
"for language in VAR_22:\n",
"return VAR_21\n",
"VAR_21 = VAR_21.filter(db.Books.languages.any(db.Languages.id == language))\n",
"for language in VAR_23:\n",
"VAR_21 = VAR_21.filter(not_(db.Books.series.any(db.Languages.id == language)))\n"
] | [
"def adv_search_language(q, include_languages_inputs, exclude_languages_inputs):...\n",
"if current_user.filter_language() != 'all':\n",
"q = q.filter(db.Books.languages.any(db.Languages.lang_code == current_user.\n filter_language()))\n",
"for language in include_languages_inputs:\n",
"return q\n",
"q = q.filter(db.Books.languages.any(db.Languages.id == language))\n",
"for language in exclude_languages_inputs:\n",
"q = q.filter(not_(db.Books.series.any(db.Languages.id == language)))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Assign'",
"For",
"Return'",
"Assign'",
"For",
"Assign'"
] |
[
"def FUNC_158(VAR_7, VAR_135=VAR_135):...\n",
"if VAR_135 is not VAR_3:\n",
"VAR_135(VAR_7)\n",
"return FUNC_157(VAR_262=True)\n"
] | [
"def cas_onaccept(form, onaccept=onaccept):...\n",
"if onaccept is not DEFAULT:\n",
"onaccept(form)\n",
"return allow_access(interactivelogin=True)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Expr'",
"Return'"
] |
[
"@VAR_1.route('/get_preview', methods=['POST'])...\n",
"VAR_10 = request.form['vId']\n",
"VAR_4 = VAR_2.sentences_stats('get_preview', VAR_10)\n",
"VAR_5 = VAR_2.sentences_stats('id_networks', VAR_10)\n",
"return json.dumps({'status': 'OK', 'vId': VAR_10, 'd': VAR_4, 'n': VAR_5})\n"
] | [
"@app.route('/get_preview', methods=['POST'])...\n",
"vId = request.form['vId']\n",
"d = db.sentences_stats('get_preview', vId)\n",
"n = db.sentences_stats('id_networks', vId)\n",
"return json.dumps({'status': 'OK', 'vId': vId, 'd': d, 'n': n})\n"
] | [
0,
0,
2,
2,
0
] | [
"Condition",
"Assign'",
"Assign'",
"Assign'",
"Return'"
] |
[
"def FUNC_7(VAR_2, VAR_8=None):...\n",
"\"\"\"docstring\"\"\"\n",
"if VAR_8 is None:\n",
"VAR_8 = VAR_2.GET.get('active_group')\n",
"VAR_8 = int(VAR_8)\n",
"if 'active_group' not in VAR_2.session or VAR_8 != VAR_2.session['active_group'\n",
"VAR_2.session.modified = True\n",
"VAR_2.session['active_group'] = VAR_8\n"
] | [
"def switch_active_group(request, active_group=None):...\n",
"\"\"\"docstring\"\"\"\n",
"if active_group is None:\n",
"active_group = request.GET.get('active_group')\n",
"active_group = int(active_group)\n",
"if 'active_group' not in request.session or active_group != request.session[\n",
"request.session.modified = True\n",
"request.session['active_group'] = active_group\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Condition",
"Assign'",
"Assign'",
"Condition",
"Assign'",
"Assign'"
] |
[
"def FUNC_23(VAR_22):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_50 = VAR_22.checkpoint_path or os.path.join(VAR_22.dir,\n 'variables/variables')\n",
"if not VAR_22.variables_to_feed:\n",
"VAR_78 = []\n",
"if VAR_22.variables_to_feed.lower() == 'all':\n",
"saved_model_aot_compile.aot_compile_cpu_meta_graph_def(VAR_50=\n checkpoint_path, VAR_4=saved_model_utils.get_meta_graph_def(args.dir,\n args.tag_set), VAR_5=args.signature_def_key, VAR_78=variables_to_feed,\n output_prefix=args.output_prefix, target_triple=args.target_triple,\n target_cpu=args.target_cpu, cpp_class=args.cpp_class, multithreading=\n args.multithreading.lower() not in ('f', 'false', '0'))\n",
"VAR_78 = None\n",
"VAR_78 = VAR_22.variables_to_feed.split(',')\n"
] | [
"def aot_compile_cpu(args):...\n",
"\"\"\"docstring\"\"\"\n",
"checkpoint_path = args.checkpoint_path or os.path.join(args.dir,\n 'variables/variables')\n",
"if not args.variables_to_feed:\n",
"variables_to_feed = []\n",
"if args.variables_to_feed.lower() == 'all':\n",
"saved_model_aot_compile.aot_compile_cpu_meta_graph_def(checkpoint_path=\n checkpoint_path, meta_graph_def=saved_model_utils.get_meta_graph_def(\n args.dir, args.tag_set), signature_def_key=args.signature_def_key,\n variables_to_feed=variables_to_feed, output_prefix=args.output_prefix,\n target_triple=args.target_triple, target_cpu=args.target_cpu, cpp_class\n =args.cpp_class, multithreading=args.multithreading.lower() not in ('f',\n 'false', '0'))\n",
"variables_to_feed = None\n",
"variables_to_feed = args.variables_to_feed.split(',')\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Assign'",
"Condition",
"Expr'",
"Assign'",
"Assign'"
] |
[
"def FUNC_39(self):...\n",
"if self.flags.ignore_mandatory:\n",
"return\n",
"VAR_62 = self._get_missing_mandatory_fields()\n",
"for VAR_21 in self.get_all_children():\n",
"VAR_62.extend(VAR_21._get_missing_mandatory_fields())\n",
"if not VAR_62:\n",
"return\n",
"for VAR_18, VAR_87 in VAR_62:\n",
"msgprint(VAR_87)\n",
"if frappe.flags.print_messages:\n",
"print(self.as_json().encode('utf-8'))\n"
] | [
"def _validate_mandatory(self):...\n",
"if self.flags.ignore_mandatory:\n",
"return\n",
"missing = self._get_missing_mandatory_fields()\n",
"for d in self.get_all_children():\n",
"missing.extend(d._get_missing_mandatory_fields())\n",
"if not missing:\n",
"return\n",
"for fieldname, msg in missing:\n",
"msgprint(msg)\n",
"if frappe.flags.print_messages:\n",
"print(self.as_json().encode('utf-8'))\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Condition",
"Return'",
"Assign'",
"For",
"Expr'",
"Condition",
"Return'",
"For",
"Expr'",
"Condition",
"Expr'"
] |
[
"def FUNC_12(self):...\n",
"VAR_9, VAR_15 = self.make_request('GET', '/profile/%s/displayname' % (self.\n owner,))\n",
"self.assertEqual(VAR_15.code, 200, VAR_15.result)\n",
"return VAR_15.json_body['displayname']\n"
] | [
"def get_displayname(self):...\n",
"request, channel = self.make_request('GET', '/profile/%s/displayname' % (\n self.owner,))\n",
"self.assertEqual(channel.code, 200, channel.result)\n",
"return channel.json_body['displayname']\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Assign'",
"Expr'",
"Return'"
] |
[
"def FUNC_4(self, VAR_15=None):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_28 = [self.shutting_down]\n",
"if VAR_15 is not None:\n",
"VAR_28.append(VAR_15.destroyed)\n",
"if self._tab_id is not None:\n",
"assert self._win_id is not None\n",
"return VAR_28\n",
"VAR_37 = objreg.get('tab', scope='tab', window=self._win_id, VAR_37=self.\n _tab_id)\n",
"VAR_28.append(VAR_37.load_started)\n"
] | [
"def _get_abort_signals(self, owner=None):...\n",
"\"\"\"docstring\"\"\"\n",
"abort_on = [self.shutting_down]\n",
"if owner is not None:\n",
"abort_on.append(owner.destroyed)\n",
"if self._tab_id is not None:\n",
"assert self._win_id is not None\n",
"return abort_on\n",
"tab = objreg.get('tab', scope='tab', window=self._win_id, tab=self._tab_id)\n",
"abort_on.append(tab.load_started)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Condition",
"Expr'",
"Condition",
"Assert'",
"Return'",
"Assign'",
"Expr'"
] |
[
"async def FUNC_11(self, VAR_15, VAR_16, VAR_17):...\n",
"VAR_25 = await self._build_notification_dict(VAR_15, VAR_16, VAR_17)\n",
"if not VAR_25:\n",
"return []\n",
"VAR_26 = await self.http_client.post_json_get_json(self.url, VAR_25)\n",
"VAR_0.warning('Failed to push event %s to %s: %s %s', VAR_15.event_id, self\n .name, type(e), e)\n",
"VAR_21 = []\n",
"return False\n",
"if 'rejected' in VAR_26:\n",
"VAR_21 = VAR_26['rejected']\n",
"return VAR_21\n"
] | [
"async def dispatch_push(self, event, tweaks, badge):...\n",
"notification_dict = await self._build_notification_dict(event, tweaks, badge)\n",
"if not notification_dict:\n",
"return []\n",
"resp = await self.http_client.post_json_get_json(self.url, notification_dict)\n",
"logger.warning('Failed to push event %s to %s: %s %s', event.event_id, self\n .name, type(e), e)\n",
"rejected = []\n",
"return False\n",
"if 'rejected' in resp:\n",
"rejected = resp['rejected']\n",
"return rejected\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"AsyncFunctionDef'",
"Assign'",
"Condition",
"Return'",
"Assign'",
"Expr'",
"Assign'",
"Return'",
"Condition",
"Assign'",
"Return'"
] |
[
"def FUNC_5(self, VAR_5) ->CLASS_3:...\n",
"return CLASS_3(urllib.parse.urljoin(self.sourceName, VAR_5))\n"
] | [
"def relative(self, relativePath) ->UrlInputSource:...\n",
"return UrlInputSource(urllib.parse.urljoin(self.sourceName, relativePath))\n"
] | [
0,
0
] | [
"FunctionDef'",
"Return'"
] |
[
"def FUNC_23(VAR_21):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_47 = 'string'\n",
"VAR_48 = VAR_21.add_parser('show', description=show_msg, formatter_class=\n argparse.RawTextHelpFormatter)\n",
"VAR_48.add_argument('--dir', type=str, required=True, help=\n 'directory containing the SavedModel to inspect')\n",
"VAR_48.add_argument('--all', action='store_true', help=\n 'if set, will output all information in given SavedModel')\n",
"VAR_48.add_argument('--tag_set', type=str, default=None, help=\n \"tag-set of graph in SavedModel to show, separated by ','\")\n",
"VAR_48.add_argument('--signature_def', type=str, default=None, metavar=\n 'SIGNATURE_DEF_KEY', help=\n 'key of SignatureDef to display input(s) and output(s) for')\n",
"VAR_48.set_defaults(func=show)\n"
] | [
"def add_show_subparser(subparsers):...\n",
"\"\"\"docstring\"\"\"\n",
"show_msg = \"\"\"Usage examples:\nTo show all tag-sets in a SavedModel:\n$saved_model_cli show --dir /tmp/saved_model\n\nTo show all available SignatureDef keys in a MetaGraphDef specified by its tag-set:\n$saved_model_cli show --dir /tmp/saved_model --tag_set serve\n\nFor a MetaGraphDef with multiple tags in the tag-set, all tags must be passed in, separated by ';':\n$saved_model_cli show --dir /tmp/saved_model --tag_set serve,gpu\n\nTo show all inputs and outputs TensorInfo for a specific SignatureDef specified by the SignatureDef key in a MetaGraph.\n$saved_model_cli show --dir /tmp/saved_model --tag_set serve --signature_def serving_default\n\nTo show all available information in the SavedModel:\n$saved_model_cli show --dir /tmp/saved_model --all\"\"\"\n",
"parser_show = subparsers.add_parser('show', description=show_msg,\n formatter_class=argparse.RawTextHelpFormatter)\n",
"parser_show.add_argument('--dir', type=str, required=True, help=\n 'directory containing the SavedModel to inspect')\n",
"parser_show.add_argument('--all', action='store_true', help=\n 'if set, will output all information in given SavedModel')\n",
"parser_show.add_argument('--tag_set', type=str, default=None, help=\n \"tag-set of graph in SavedModel to show, separated by ','\")\n",
"parser_show.add_argument('--signature_def', type=str, default=None, metavar\n ='SIGNATURE_DEF_KEY', help=\n 'key of SignatureDef to display input(s) and output(s) for')\n",
"parser_show.set_defaults(func=show)\n"
] | [
0,
0,
0,
0,
0,
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Assign'",
"Expr'",
"Expr'",
"Expr'",
"Expr'",
"Expr'"
] |
[
"def FUNC_7(self):...\n",
"\"\"\"docstring\"\"\"\n",
"VAR_14 = 'this is an example of a string'\n",
"self.assertEqual(format_value(VAR_14), VAR_14)\n"
] | [
"def test_format_value_simple_string(self):...\n",
"\"\"\"docstring\"\"\"\n",
"simple_string = 'this is an example of a string'\n",
"self.assertEqual(format_value(simple_string), simple_string)\n"
] | [
0,
0,
0,
0
] | [
"FunctionDef'",
"Docstring",
"Assign'",
"Expr'"
] |