lines
sequence
raw_lines
sequence
label
sequence
type
sequence
[ "@FUNC_0...\n", "return StateHandler(self)\n" ]
[ "@cache_in_self...\n", "return StateHandler(self)\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_4(VAR_16, VAR_28=40, VAR_18=VAR_26, VAR_20=VAR_27, VAR_29=VAR_70(8203)...\n", "\"\"\"docstring\"\"\"\n", "if VAR_16.tag in VAR_26:\n", "return\n", "VAR_63 = VAR_16.get('class')\n", "if VAR_63:\n", "VAR_91 = False\n", "if VAR_16.text:\n", "VAR_63 = VAR_63.split()\n", "VAR_16.text = FUNC_6(VAR_16.text, VAR_28, VAR_29)\n", "for child in VAR_16:\n", "for avoid in VAR_20:\n", "FUNC_4(child, VAR_28=max_width, VAR_18=avoid_elements, VAR_20=avoid_classes,\n VAR_29=break_character)\n", "if avoid in VAR_63:\n", "if VAR_91:\n", "if child.tail:\n", "VAR_91 = True\n", "return\n", "child.tail = FUNC_6(child.tail, VAR_28, VAR_29)\n" ]
[ "def word_break(el, max_width=40, avoid_elements=_avoid_word_break_elements,...\n", "\"\"\"docstring\"\"\"\n", "if el.tag in _avoid_word_break_elements:\n", "return\n", "class_name = el.get('class')\n", "if class_name:\n", "dont_break = False\n", "if el.text:\n", "class_name = class_name.split()\n", "el.text = _break_text(el.text, max_width, break_character)\n", "for child in el:\n", "for avoid in avoid_classes:\n", "word_break(child, max_width=max_width, avoid_elements=avoid_elements,\n avoid_classes=avoid_classes, break_character=break_character)\n", "if avoid in class_name:\n", "if dont_break:\n", "if child.tail:\n", "dont_break = True\n", "return\n", "child.tail = _break_text(child.tail, max_width, break_character)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Condition", "Return'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "For", "For", "Expr'", "Condition", "Condition", "Condition", "Assign'", "Return'", "Assign'" ]
[ "def FUNC_34(self):...\n", "if VAR_53.session['admin']:\n", "VAR_105 = self.userdb.getUserList()\n", "return json.dumps({'time': 0, 'userlist': []})\n", "for VAR_51 in VAR_105:\n", "if VAR_51['id'] == VAR_53.session['userid']:\n", "VAR_106 = lambda VAR_51: VAR_51['last_time_online']\n", "VAR_51['deletable'] = False\n", "VAR_119 = self.useroptions.forUser(VAR_51['id'])\n", "VAR_105 = sorted(VAR_105, key=sortfunc, reverse=True)\n", "VAR_120 = VAR_119.getOptionValue('last_time_online')\n", "return json.dumps({'time': int(time.time()), 'userlist': VAR_105})\n", "VAR_121 = VAR_119.getOptionValue('media.may_download')\n", "VAR_51['last_time_online'] = VAR_120\n", "VAR_51['may_download'] = VAR_121\n" ]
[ "def api_getuserlist(self):...\n", "if cherrypy.session['admin']:\n", "userlist = self.userdb.getUserList()\n", "return json.dumps({'time': 0, 'userlist': []})\n", "for user in userlist:\n", "if user['id'] == cherrypy.session['userid']:\n", "sortfunc = lambda user: user['last_time_online']\n", "user['deletable'] = False\n", "user_options = self.useroptions.forUser(user['id'])\n", "userlist = sorted(userlist, key=sortfunc, reverse=True)\n", "t = user_options.getOptionValue('last_time_online')\n", "return json.dumps({'time': int(time.time()), 'userlist': userlist})\n", "may_download = user_options.getOptionValue('media.may_download')\n", "user['last_time_online'] = t\n", "user['may_download'] = may_download\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Return'", "For", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Return'", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_25(self, VAR_60, VAR_61):...\n", "self.footnote_index = 0\n", "self.links = VAR_60 or {}\n", "self.footnotes = VAR_61 or {}\n" ]
[ "def setup(self, links, footnotes):...\n", "self.footnote_index = 0\n", "self.links = links or {}\n", "self.footnotes = footnotes or {}\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_4(self) ->str:...\n", "return os.path.dirname(os.path.abspath(self.sourceName))\n" ]
[ "def directory(self) ->str:...\n", "return os.path.dirname(os.path.abspath(self.sourceName))\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_47(self):...\n", "self.model.updateLibrary()\n", "return 'success'\n" ]
[ "def api_updatedb(self):...\n", "self.model.updateLibrary()\n", "return 'success'\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Return'" ]
[ "def FUNC_13(self):...\n", "self.register_user('user_admin', 'pass', admin=True)\n", "VAR_8 = self.login('user_admin', 'pass')\n", "VAR_27 = self.register_user('user_nonadmin', 'pass', admin=False)\n", "VAR_22 = self.login('user_nonadmin', 'pass')\n", "VAR_29 = self.helper.upload_media(self.upload_resource, self.image_data,\n tok=non_admin_user_tok)\n", "VAR_30 = self.helper.upload_media(self.upload_resource, self.image_data,\n tok=non_admin_user_tok)\n", "VAR_33 = VAR_29['content_uri'][6:]\n", "VAR_34 = VAR_30['content_uri'][6:]\n", "VAR_35, VAR_36 = VAR_34.split('/')\n", "self.get_success(self.store.mark_local_media_as_safe(VAR_36))\n", "VAR_0 = '/_synapse/admin/v1/user/%s/media/quarantine' % urllib.parse.quote(\n VAR_27)\n", "VAR_12, VAR_13 = self.make_request('POST', VAR_0.encode('ascii'), VAR_7=\n admin_user_tok)\n", "self.pump(1.0)\n", "self.assertEqual(200, int(VAR_13.result['code']), msg=channel.result['body'])\n", "self.assertEqual(json.loads(VAR_13.result['body'].decode('utf-8')), {\n 'num_quarantined': 1}, 'Expected 1 quarantined item')\n", "self._ensure_quarantined(VAR_8, VAR_33)\n", "VAR_12, VAR_13 = make_request(self.reactor, FakeSite(self.download_resource\n ), 'GET', VAR_34, shorthand=False, VAR_7=non_admin_user_tok)\n", "self.assertEqual(200, int(VAR_13.code), msg=\n 'Expected to receive a 200 on accessing not-quarantined media: %s' %\n server_and_media_id_2)\n" ]
[ "def test_cannot_quarantine_safe_media(self):...\n", "self.register_user('user_admin', 'pass', admin=True)\n", "admin_user_tok = self.login('user_admin', 'pass')\n", "non_admin_user = self.register_user('user_nonadmin', 'pass', admin=False)\n", "non_admin_user_tok = self.login('user_nonadmin', 'pass')\n", "response_1 = self.helper.upload_media(self.upload_resource, self.image_data,\n tok=non_admin_user_tok)\n", "response_2 = self.helper.upload_media(self.upload_resource, self.image_data,\n tok=non_admin_user_tok)\n", "server_and_media_id_1 = response_1['content_uri'][6:]\n", "server_and_media_id_2 = response_2['content_uri'][6:]\n", "_, media_id_2 = server_and_media_id_2.split('/')\n", "self.get_success(self.store.mark_local_media_as_safe(media_id_2))\n", "url = '/_synapse/admin/v1/user/%s/media/quarantine' % urllib.parse.quote(\n non_admin_user)\n", "request, channel = self.make_request('POST', url.encode('ascii'),\n access_token=admin_user_tok)\n", "self.pump(1.0)\n", "self.assertEqual(200, int(channel.result['code']), msg=channel.result['body'])\n", "self.assertEqual(json.loads(channel.result['body'].decode('utf-8')), {\n 'num_quarantined': 1}, 'Expected 1 quarantined item')\n", "self._ensure_quarantined(admin_user_tok, server_and_media_id_1)\n", "request, channel = make_request(self.reactor, FakeSite(self.\n download_resource), 'GET', server_and_media_id_2, shorthand=False,\n access_token=non_admin_user_tok)\n", "self.assertEqual(200, int(channel.code), msg=\n 'Expected to receive a 200 on accessing not-quarantined media: %s' %\n server_and_media_id_2)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'" ]
[ "def FUNC_8(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_1 = self.register_user('user', 'pass')\n", "VAR_10 = self.login('user', 'pass')\n", "VAR_11 = self.register_user('other_user', 'pass')\n", "VAR_12 = self.login('other_user', 'pass')\n", "VAR_20 = self.helper.create_room_as(VAR_11, tok=other_access_token)\n", "self.helper.join(VAR_15=room_id, user=user_id, tok=access_token)\n", "VAR_13 = self.get_success(self.hs.get_datastore().get_user_by_access_token(\n VAR_10))\n", "VAR_14 = VAR_13.token_id\n", "self.get_success(self.hs.get_pusherpool().add_pusher(VAR_1=user_id, VAR_10=\n token_id, kind='http', app_id='m.http', app_display_name=\n 'HTTP Push Notifications', device_display_name='pushy push', pushkey=\n 'a@example.com', lang=None, data={'url': 'example.com'}))\n", "VAR_21 = self.helper.send(VAR_20, VAR_7='Hello there!', tok=other_access_token)\n", "VAR_22 = VAR_21['event_id']\n", "self.push_attempts[0][0].callback({})\n", "self.pump()\n", "self.assertEqual(len(self.push_attempts), 1)\n", "self.assertEqual(self.push_attempts[0][1], 'example.com')\n", "self.assertEqual(self.push_attempts[0][2]['notification']['counts'][\n 'unread'], 0)\n", "VAR_23, VAR_24 = self.make_request('POST', '/rooms/%s/receipt/m.read/%s' %\n (VAR_20, VAR_22), {}, VAR_10=access_token)\n", "self.assertEqual(VAR_24.code, 200, VAR_24.json_body)\n", "self.push_attempts[1][0].callback({})\n", "self.pump()\n", "self.assertEqual(len(self.push_attempts), 2)\n", "self.assertEqual(self.push_attempts[1][2]['notification']['counts'][\n 'unread'], 0)\n", "self.helper.send(VAR_20, VAR_7=\"How's the weather today?\", tok=\n other_access_token)\n", "self.push_attempts[2][0].callback({})\n", "self.pump()\n", "self.assertEqual(len(self.push_attempts), 3)\n", "self.assertEqual(self.push_attempts[2][2]['notification']['counts'][\n 'unread'], 1)\n", "self.helper.send(VAR_20, VAR_7='Hello?', tok=other_access_token)\n", "self.pump()\n", "self.push_attempts[3][0].callback({})\n", "self.helper.send(VAR_20, VAR_7='Hello??', tok=other_access_token)\n", "self.pump()\n", "self.push_attempts[4][0].callback({})\n", "self.helper.send(VAR_20, VAR_7='HELLO???', tok=other_access_token)\n", "self.pump()\n", "self.push_attempts[5][0].callback({})\n", "self.assertEqual(len(self.push_attempts), 6)\n" ]
[ "def _test_push_unread_count(self):...\n", "\"\"\"docstring\"\"\"\n", "user_id = self.register_user('user', 'pass')\n", "access_token = self.login('user', 'pass')\n", "other_user_id = self.register_user('other_user', 'pass')\n", "other_access_token = self.login('other_user', 'pass')\n", "room_id = self.helper.create_room_as(other_user_id, tok=other_access_token)\n", "self.helper.join(room=room_id, user=user_id, tok=access_token)\n", "user_tuple = self.get_success(self.hs.get_datastore().\n get_user_by_access_token(access_token))\n", "token_id = user_tuple.token_id\n", "self.get_success(self.hs.get_pusherpool().add_pusher(user_id=user_id,\n access_token=token_id, kind='http', app_id='m.http', app_display_name=\n 'HTTP Push Notifications', device_display_name='pushy push', pushkey=\n 'a@example.com', lang=None, data={'url': 'example.com'}))\n", "response = self.helper.send(room_id, body='Hello there!', tok=\n other_access_token)\n", "first_message_event_id = response['event_id']\n", "self.push_attempts[0][0].callback({})\n", "self.pump()\n", "self.assertEqual(len(self.push_attempts), 1)\n", "self.assertEqual(self.push_attempts[0][1], 'example.com')\n", "self.assertEqual(self.push_attempts[0][2]['notification']['counts'][\n 'unread'], 0)\n", "request, channel = self.make_request('POST', '/rooms/%s/receipt/m.read/%s' %\n (room_id, first_message_event_id), {}, access_token=access_token)\n", "self.assertEqual(channel.code, 200, channel.json_body)\n", "self.push_attempts[1][0].callback({})\n", "self.pump()\n", "self.assertEqual(len(self.push_attempts), 2)\n", "self.assertEqual(self.push_attempts[1][2]['notification']['counts'][\n 'unread'], 0)\n", "self.helper.send(room_id, body=\"How's the weather today?\", tok=\n other_access_token)\n", "self.push_attempts[2][0].callback({})\n", "self.pump()\n", "self.assertEqual(len(self.push_attempts), 3)\n", "self.assertEqual(self.push_attempts[2][2]['notification']['counts'][\n 'unread'], 1)\n", "self.helper.send(room_id, body='Hello?', tok=other_access_token)\n", "self.pump()\n", "self.push_attempts[3][0].callback({})\n", "self.helper.send(room_id, body='Hello??', tok=other_access_token)\n", "self.pump()\n", "self.push_attempts[4][0].callback({})\n", "self.helper.send(room_id, body='HELLO???', tok=other_access_token)\n", "self.pump()\n", "self.push_attempts[5][0].callback({})\n", "self.assertEqual(len(self.push_attempts), 6)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def __post_init__(self) ->None:...\n", "super().__post_init__()\n", "if self.default is not None:\n", "self.default = (\n f'field(default_factory=lambda: cast({self.get_type_string()}, {self.default}))'\n )\n" ]
[ "def __post_init__(self) ->None:...\n", "super().__post_init__()\n", "if self.default is not None:\n", "self.default = (\n f'field(default_factory=lambda: cast({self.get_type_string()}, {self.default}))'\n )\n" ]
[ 0, 5, 5, 5 ]
[ "FunctionDef'", "Expr'", "Condition", "Assign'" ]
[ "def FUNC_5(*VAR_8):...\n", "return scriptPath('boilerplate', *VAR_8)\n" ]
[ "def boilerplatePath(*segs):...\n", "return scriptPath('boilerplate', *segs)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_18(VAR_20):...\n", "\"\"\"docstring\"\"\"\n", "if VAR_20.all:\n", "FUNC_8(VAR_20.dir)\n", "if VAR_20.tag_set is None:\n", "FUNC_0(VAR_20.dir)\n", "if VAR_20.signature_def is None:\n", "FUNC_1(VAR_20.dir, VAR_20.tag_set)\n", "FUNC_4(VAR_20.dir, VAR_20.tag_set, VAR_20.signature_def)\n" ]
[ "def show(args):...\n", "\"\"\"docstring\"\"\"\n", "if args.all:\n", "_show_all(args.dir)\n", "if args.tag_set is None:\n", "_show_tag_sets(args.dir)\n", "if args.signature_def is None:\n", "_show_signature_def_map_keys(args.dir, args.tag_set)\n", "_show_inputs_outputs(args.dir, args.tag_set, args.signature_def)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Expr'", "Condition", "Expr'", "Condition", "Expr'", "Expr'" ]
[ "def FUNC_69(VAR_43, VAR_126=False, VAR_127=True):...\n", "\"\"\"docstring\"\"\"\n", "import frappe.utils\n", "VAR_61 = FUNC_71(VAR_43, VAR_126=raise_not_found)\n", "if VAR_61:\n", "VAR_61 = frappe.utils.strip(VAR_61)\n", "return []\n", "return [p.strip() for p in VAR_61.splitlines() if not VAR_127 or p.strip() and\n not p.startswith('#')]\n" ]
[ "def get_file_items(path, raise_not_found=False, ignore_empty_lines=True):...\n", "\"\"\"docstring\"\"\"\n", "import frappe.utils\n", "content = read_file(path, raise_not_found=raise_not_found)\n", "if content:\n", "content = frappe.utils.strip(content)\n", "return []\n", "return [p.strip() for p in content.splitlines() if not ignore_empty_lines or\n p.strip() and not p.startswith('#')]\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Import'", "Assign'", "Condition", "Assign'", "Return'", "Return'" ]
[ "def FUNC_9(self):...\n", "VAR_3 = '@foo:bar'\n", "VAR_7 = 5000000\n", "VAR_10 = UserPresenceState.default(VAR_3)\n", "VAR_10 = VAR_10.copy_and_replace(VAR_10=PresenceState.ONLINE,\n last_active_ts=now - SYNC_ONLINE_TIMEOUT - 1, last_user_sync_ts=now -\n SYNC_ONLINE_TIMEOUT - 1)\n", "VAR_9 = handle_timeout(VAR_10, is_mine=True, syncing_user_ids={user_id},\n VAR_7=now)\n", "self.assertIsNotNone(VAR_9)\n", "self.assertEquals(VAR_9.state, PresenceState.ONLINE)\n" ]
[ "def test_sync_online(self):...\n", "user_id = '@foo:bar'\n", "now = 5000000\n", "state = UserPresenceState.default(user_id)\n", "state = state.copy_and_replace(state=PresenceState.ONLINE, last_active_ts=\n now - SYNC_ONLINE_TIMEOUT - 1, last_user_sync_ts=now -\n SYNC_ONLINE_TIMEOUT - 1)\n", "new_state = handle_timeout(state, is_mine=True, syncing_user_ids={user_id},\n now=now)\n", "self.assertIsNotNone(new_state)\n", "self.assertEquals(new_state.state, PresenceState.ONLINE)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'" ]
[ "@login_required()...\n", "" ]
[ "@login_required()...\n", "" ]
[ 0, 0 ]
[ "Condition", "Condition" ]
[ "def FUNC_23(VAR_14, VAR_31):...\n", "from flask import _request_ctx_stack\n", "def FUNC_33(VAR_91, VAR_14, VAR_31):...\n", "return os.path.join(VAR_91, VAR_14, 'LC_MESSAGES', f'{VAR_31}.po')\n" ]
[ "def _get_all_translationfiles(locale, domain):...\n", "from flask import _request_ctx_stack\n", "def get_po_path(basedir, locale, domain):...\n", "return os.path.join(basedir, locale, 'LC_MESSAGES', f'{domain}.po')\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "ImportFrom'", "FunctionDef'", "Return'" ]
[ "def __init__(self, VAR_1):...\n", "self.get_response = VAR_1\n" ]
[ "def __init__(self, get_response):...\n", "self.get_response = get_response\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Assign'" ]
[ "def __str__(self) ->str:...\n", "return self.sourceName\n" ]
[ "def __str__(self) ->str:...\n", "return self.sourceName\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_15(VAR_10, VAR_11, VAR_20=None, VAR_6=None, VAR_17=None, VAR_18='on'):...\n", "if VAR_6 is None:\n", "VAR_56 = None\n", "if VAR_20:\n", "VAR_56 = FUNC_19(VAR_10, VAR_20, VAR_17)\n", "if VAR_56:\n", "VAR_87 = VAR_56\n", "VAR_87, VAR_57 = FUNC_18(VAR_10, VAR_6, VAR_22.path, VAR_17, VAR_18)\n", "VAR_57 = mimetypes.guess_type(VAR_87)[0]\n", "unpack_file(VAR_87, VAR_11, VAR_57, VAR_10)\n", "if VAR_20 and not VAR_56:\n", "FUNC_14(VAR_87, VAR_20, VAR_10)\n", "if not VAR_56:\n", "os.unlink(VAR_87)\n" ]
[ "def unpack_http_url(link, location, download_dir=None, session=None, hashes...\n", "if session is None:\n", "already_downloaded_path = None\n", "if download_dir:\n", "already_downloaded_path = _check_download_dir(link, download_dir, hashes)\n", "if already_downloaded_path:\n", "from_path = already_downloaded_path\n", "from_path, content_type = _download_http_url(link, session, temp_dir.path,\n hashes, progress_bar)\n", "content_type = mimetypes.guess_type(from_path)[0]\n", "unpack_file(from_path, location, content_type, link)\n", "if download_dir and not already_downloaded_path:\n", "_copy_file(from_path, download_dir, link)\n", "if not already_downloaded_path:\n", "os.unlink(from_path)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Expr'", "Condition", "Expr'", "Condition", "Expr'" ]
[ "def FUNC_33(self, VAR_44):...\n", "VAR_13 = FUNC_1(VAR_44.group(2) or VAR_44.group(1))\n", "if VAR_13 not in self.links:\n", "return None\n", "VAR_84 = self.links[VAR_13]\n", "return self._process_link(VAR_44, VAR_84['link'], VAR_84['title'])\n" ]
[ "def output_reflink(self, m):...\n", "key = _keyify(m.group(2) or m.group(1))\n", "if key not in self.links:\n", "return None\n", "ret = self.links[key]\n", "return self._process_link(m, ret['link'], ret['title'])\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Return'", "Assign'", "Return'" ]
[ "def FUNC_22(VAR_45):...\n", "\"\"\"docstring\"\"\"\n", "VAR_1.session.user = VAR_45\n", "VAR_1.session.sid = VAR_45\n", "VAR_1.cache = {}\n", "VAR_1.form_dict = CLASS_0()\n", "VAR_1.jenv = None\n", "VAR_1.session.data = CLASS_0()\n", "VAR_1.role_permissions = {}\n", "VAR_1.new_doc_templates = {}\n", "VAR_1.user_perms = None\n" ]
[ "def set_user(username):...\n", "\"\"\"docstring\"\"\"\n", "local.session.user = username\n", "local.session.sid = username\n", "local.cache = {}\n", "local.form_dict = _dict()\n", "local.jenv = None\n", "local.session.data = _dict()\n", "local.role_permissions = {}\n", "local.new_doc_templates = {}\n", "local.user_perms = None\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'" ]
[ "def FUNC_29(self):...\n", "return self._has_role(constants.ROLE_EDIT_SHELFS)\n" ]
[ "def role_edit_shelfs(self):...\n", "return self._has_role(constants.ROLE_EDIT_SHELFS)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def __repr__(self) ->str:...\n", "return '{}({!r})'.format(self.__class__.__name__, str(self))\n" ]
[ "def __repr__(self) ->str:...\n", "return '{}({!r})'.format(self.__class__.__name__, str(self))\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_0(self, VAR_1):...\n", "from openapi_python_client.parser.properties import ListProperty\n", "VAR_10 = VAR_1.MagicMock()\n", "VAR_11 = VAR_1.MagicMock()\n", "VAR_10.get_type_string.return_value = VAR_11\n", "VAR_4 = VAR_31(VAR_5='test', VAR_26=True, default=None, VAR_10=inner_property)\n", "assert VAR_4.get_type_string() == f'List[{VAR_11}]'\n", "VAR_4.required = False\n", "assert VAR_4.get_type_string() == f'Optional[List[{VAR_11}]]'\n", "VAR_4 = VAR_31(VAR_5='test', VAR_26=True, default=[], VAR_10=inner_property)\n", "assert VAR_4.default == f'field(default_factory=lambda: cast(List[{VAR_11}], []))'\n" ]
[ "def test_get_type_string(self, mocker):...\n", "from openapi_python_client.parser.properties import ListProperty\n", "inner_property = mocker.MagicMock()\n", "inner_type_string = mocker.MagicMock()\n", "inner_property.get_type_string.return_value = inner_type_string\n", "p = ListProperty(name='test', required=True, default=None, inner_property=\n inner_property)\n", "assert p.get_type_string() == f'List[{inner_type_string}]'\n", "p.required = False\n", "assert p.get_type_string() == f'Optional[List[{inner_type_string}]]'\n", "p = ListProperty(name='test', required=True, default=[], inner_property=\n inner_property)\n", "assert p.default == f'field(default_factory=lambda: cast(List[{inner_type_string}], []))'\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "ImportFrom'", "Assign'", "Assign'", "Assign'", "Assign'", "Assert'", "Assign'", "Assert'", "Assign'", "Assert'" ]
[ "def FUNC_19(VAR_2, *VAR_23, **VAR_24):...\n", "VAR_3 = VAR_2.GET.get('url')\n", "if VAR_3 is None or len(VAR_3) == 0:\n", "VAR_3 = VAR_2.get_full_path()\n", "VAR_9 = False\n", "VAR_14 = VAR_24.get('conn', None)\n", "VAR_16 = None\n", "VAR_19 = VAR_24.get('server_id', None)\n", "if VAR_14 is None:\n", "VAR_9 = VAR_20.doConnectionCleanup\n", "VAR_36 = None\n", "VAR_0.debug('Connection not provided, attempting to get one.')\n", "VAR_36 = VAR_21(VAR_2, *VAR_23, **kwargs)\n", "VAR_39 = isinstance(VAR_36, CLASS_0)\n", "return VAR_36\n", "VAR_14 = VAR_20.get_connection(VAR_19, VAR_2)\n", "VAR_0.error('Error retrieving connection.', exc_info=True)\n", "if VAR_14 is None:\n", "if VAR_9 and VAR_39:\n", "VAR_16 = str(x)\n", "return VAR_20.on_not_logged_in(VAR_2, VAR_3, VAR_16)\n", "VAR_20.on_logged_in(VAR_2, VAR_14)\n", "VAR_9 = not VAR_39\n", "VAR_20.verify_is_admin(VAR_14)\n", "VAR_0.debug('Doing connection cleanup? %s' % VAR_9)\n", "VAR_20.verify_is_group_owner(VAR_14, VAR_24.get('gid'))\n", "if VAR_9:\n", "VAR_0.warn('Failed to clean up connection', exc_info=True)\n", "VAR_20.load_server_settings(VAR_14, VAR_2)\n", "if VAR_14 is not None and VAR_14.c is not None:\n", "VAR_15 = VAR_24.get('share_id')\n", "VAR_14.close(hard=False)\n", "VAR_17 = VAR_20.prepare_share_connection(VAR_2, VAR_14, VAR_15)\n", "if VAR_17 is not None:\n", "VAR_20.on_share_connection_prepared(VAR_2, VAR_17)\n", "VAR_24['conn'] = VAR_14\n", "VAR_24['conn'] = VAR_17\n", "VAR_24['url'] = VAR_3\n" ]
[ "def wrapped(request, *args, **kwargs):...\n", "url = request.GET.get('url')\n", "if url is None or len(url) == 0:\n", "url = request.get_full_path()\n", "doConnectionCleanup = False\n", "conn = kwargs.get('conn', None)\n", "error = None\n", "server_id = kwargs.get('server_id', None)\n", "if conn is None:\n", "doConnectionCleanup = ctx.doConnectionCleanup\n", "retval = None\n", "logger.debug('Connection not provided, attempting to get one.')\n", "retval = f(request, *args, **kwargs)\n", "delayConnectionCleanup = isinstance(retval, ConnCleaningHttpResponse)\n", "return retval\n", "conn = ctx.get_connection(server_id, request)\n", "logger.error('Error retrieving connection.', exc_info=True)\n", "if conn is None:\n", "if doConnectionCleanup and delayConnectionCleanup:\n", "error = str(x)\n", "return ctx.on_not_logged_in(request, url, error)\n", "ctx.on_logged_in(request, conn)\n", "doConnectionCleanup = not delayConnectionCleanup\n", "ctx.verify_is_admin(conn)\n", "logger.debug('Doing connection cleanup? %s' % doConnectionCleanup)\n", "ctx.verify_is_group_owner(conn, kwargs.get('gid'))\n", "if doConnectionCleanup:\n", "logger.warn('Failed to clean up connection', exc_info=True)\n", "ctx.load_server_settings(conn, request)\n", "if conn is not None and conn.c is not None:\n", "share_id = kwargs.get('share_id')\n", "conn.close(hard=False)\n", "conn_share = ctx.prepare_share_connection(request, conn, share_id)\n", "if conn_share is not None:\n", "ctx.on_share_connection_prepared(request, conn_share)\n", "kwargs['conn'] = conn\n", "kwargs['conn'] = conn_share\n", "kwargs['url'] = url\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Return'", "Assign'", "Expr'", "Condition", "Condition", "Assign'", "Return'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Condition", "Expr'", "Expr'", "Condition", "Assign'", "Expr'", "Assign'", "Condition", "Expr'", "Assign'", "Assign'", "Assign'" ]
[ "@VAR_0.route('/api/query/unstar', methods=['POST'])...\n", "if get_user() is None:\n", "return 'Unauthorized access', 403\n", "VAR_14 = g.conn.session.query(Query).get(request.form['query_id'])\n", "if VAR_14:\n", "VAR_34 = g.conn.session.query(Star).filter(Star.query_id == request.form[\n 'query_id']).filter(Star.user_id == get_user().id).one()\n", "return 'Query not found', 404\n", "g.conn.session.delete(VAR_34)\n", "g.conn.session.commit()\n", "return ''\n" ]
[ "@app.route('/api/query/unstar', methods=['POST'])...\n", "if get_user() is None:\n", "return 'Unauthorized access', 403\n", "query = g.conn.session.query(Query).get(request.form['query_id'])\n", "if query:\n", "star = g.conn.session.query(Star).filter(Star.query_id == request.form[\n 'query_id']).filter(Star.user_id == get_user().id).one()\n", "return 'Query not found', 404\n", "g.conn.session.delete(star)\n", "g.conn.session.commit()\n", "return ''\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Condition", "Return'", "Assign'", "Condition", "Assign'", "Return'", "Expr'", "Expr'", "Return'" ]
[ "def FUNC_2(VAR_3: str, VAR_4: Callable, VAR_5: Optional[str]=None...\n", "\"\"\"docstring\"\"\"\n", "return re_path(f'^{VAR_3}/?(?:[?#].*)?$', VAR_4, VAR_5=name)\n" ]
[ "def opt_slash_path(route: str, view: Callable, name: Optional[str]=None...\n", "\"\"\"docstring\"\"\"\n", "return re_path(f'^{route}/?(?:[?#].*)?$', view, name=name)\n" ]
[ 0, 0, 0 ]
[ "Condition", "Docstring", "Return'" ]
[ "def FUNC_81(self):...\n", "VAR_14 = self.token['text']\n", "return self.renderer.block_html(VAR_14)\n" ]
[ "def output_close_html(self):...\n", "text = self.token['text']\n", "return self.renderer.block_html(text)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_4(self):...\n", "super().clean()\n", "VAR_113 = {'target', 'contentsum', 'translationsum'}\n", "if not VAR_113.issubset(self.cleaned_data):\n", "return\n", "VAR_17 = self.unit\n", "if self.cleaned_data['contentsum'] != VAR_17.content_hash:\n", "if self.cleaned_data['translationsum'] != VAR_17.get_target_hash():\n", "VAR_24 = VAR_17.get_max_length()\n", "for text in self.cleaned_data['target']:\n", "if len(text) > VAR_24:\n", "if self.user.has_perm('unit.review', VAR_17.translation\n", "self.cleaned_data['state'] = int(self.cleaned_data['review'])\n", "if self.cleaned_data['fuzzy']:\n", "self.cleaned_data['state'] = STATE_FUZZY\n", "self.cleaned_data['state'] = STATE_TRANSLATED\n" ]
[ "def clean(self):...\n", "super().clean()\n", "required = {'target', 'contentsum', 'translationsum'}\n", "if not required.issubset(self.cleaned_data):\n", "return\n", "unit = self.unit\n", "if self.cleaned_data['contentsum'] != unit.content_hash:\n", "if self.cleaned_data['translationsum'] != unit.get_target_hash():\n", "max_length = unit.get_max_length()\n", "for text in self.cleaned_data['target']:\n", "if len(text) > max_length:\n", "if self.user.has_perm('unit.review', unit.translation\n", "self.cleaned_data['state'] = int(self.cleaned_data['review'])\n", "if self.cleaned_data['fuzzy']:\n", "self.cleaned_data['state'] = STATE_FUZZY\n", "self.cleaned_data['state'] = STATE_TRANSLATED\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Condition", "Return'", "Assign'", "Condition", "Condition", "Assign'", "For", "Condition", "Condition", "Assign'", "Condition", "Assign'", "Assign'" ]
[ "def FUNC_0(self, VAR_2, **VAR_3):...\n", "self.federation_domain_whitelist = None\n", "VAR_6 = VAR_2.get('federation_domain_whitelist', None)\n", "if VAR_6 is not None:\n", "self.federation_domain_whitelist = {}\n", "self.federation_ip_range_blacklist = VAR_2.get('federation_ip_range_blacklist',\n [])\n", "for VAR_8 in VAR_6:\n", "self.federation_ip_range_blacklist = IPSet(self.federation_ip_range_blacklist)\n", "VAR_7 = VAR_2.get('federation_metrics_domains') or []\n", "self.federation_domain_whitelist[VAR_8] = True\n", "self.federation_ip_range_blacklist.update(['0.0.0.0', '::'])\n", "validate_config(VAR_0, VAR_7, ('federation_metrics_domains',))\n", "self.federation_metrics_domains = set(VAR_7)\n" ]
[ "def read_config(self, config, **kwargs):...\n", "self.federation_domain_whitelist = None\n", "federation_domain_whitelist = config.get('federation_domain_whitelist', None)\n", "if federation_domain_whitelist is not None:\n", "self.federation_domain_whitelist = {}\n", "self.federation_ip_range_blacklist = config.get('federation_ip_range_blacklist'\n , [])\n", "for domain in federation_domain_whitelist:\n", "self.federation_ip_range_blacklist = IPSet(self.federation_ip_range_blacklist)\n", "federation_metrics_domains = config.get('federation_metrics_domains') or []\n", "self.federation_domain_whitelist[domain] = True\n", "self.federation_ip_range_blacklist.update(['0.0.0.0', '::'])\n", "validate_config(_METRICS_FOR_DOMAINS_SCHEMA, federation_metrics_domains, (\n 'federation_metrics_domains',))\n", "self.federation_metrics_domains = set(federation_metrics_domains)\n" ]
[ 0, 0, 0, 0, 0, 4, 0, 4, 0, 0, 4, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "For", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'" ]
[ "@wraps(VAR_0)...\n", "if VAR_1.user.is_authenticated:\n", "return redirect(VAR_1.GET.get('next', VAR_1.user.st.get_absolute_url()))\n", "return VAR_0(VAR_1, *VAR_2, **kwargs)\n" ]
[ "@wraps(view_func)...\n", "if request.user.is_authenticated:\n", "return redirect(request.GET.get('next', request.user.st.get_absolute_url()))\n", "return view_func(request, *args, **kwargs)\n" ]
[ 0, 0, 4, 0 ]
[ "Condition", "Condition", "Return'", "Return'" ]
[ "def FUNC_160(self=self, VAR_147=VAR_147, VAR_146=VAR_146):...\n", "return self.has_membership(VAR_147=group_id, VAR_146=role)\n" ]
[ "def has_membership(self=self, group_id=group_id, role=role):...\n", "return self.has_membership(group_id=group_id, role=role)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_123(self, VAR_10):...\n", "\"\"\"docstring\"\"\"\n", "return self.__in_base(self.normalize_path(VAR_10), self.base)\n" ]
[ "def in_base(self, f):...\n", "\"\"\"docstring\"\"\"\n", "return self.__in_base(self.normalize_path(f), self.base)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "def FUNC_13(self, VAR_25: Optional[str], VAR_14: str):...\n", "\"\"\"docstring\"\"\"\n", "for VAR_47, val in self._configs.items():\n", "if VAR_47 == VAR_25:\n", "if VAR_14 in dir(val):\n", "return getattr(val, VAR_14)\n" ]
[ "def _get_unclassed_config(self, asking_section: Optional[str], item: str):...\n", "\"\"\"docstring\"\"\"\n", "for key, val in self._configs.items():\n", "if key == asking_section:\n", "if item in dir(val):\n", "return getattr(val, item)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "For", "Condition", "Condition", "Return'" ]
[ "@default('git_credentials')...\n", "if self.private_token:\n", "return 'username=binderhub\\\\npassword={token}'.format(token=self.private_token)\n", "return ''\n" ]
[ "@default('git_credentials')...\n", "if self.private_token:\n", "return 'username=binderhub\\\\npassword={token}'.format(token=self.private_token)\n", "return ''\n" ]
[ 0, 0, 0, 0 ]
[ "Condition", "Condition", "Return'", "Return'" ]
[ "def FUNC_21(self):...\n", "VAR_27 = np.array([[1], [2]])\n", "VAR_28 = np.array(range(5))\n", "VAR_32 = os.path.join(test.get_temp_dir(), 'input.npz')\n", "np.savez(VAR_32, VAR_54=x0, VAR_55=x1)\n", "VAR_21 = 'x=' + VAR_32\n", "saved_model_cli.load_inputs_from_input_arg_string(VAR_21, '', '')\n" ]
[ "def testInputParserErrorNoName(self):...\n", "x0 = np.array([[1], [2]])\n", "x1 = np.array(range(5))\n", "input_path = os.path.join(test.get_temp_dir(), 'input.npz')\n", "np.savez(input_path, a=x0, b=x1)\n", "input_str = 'x=' + input_path\n", "saved_model_cli.load_inputs_from_input_arg_string(input_str, '', '')\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'" ]
[ "@app.route('/bookmarks/new', methods=['GET', 'POST'])...\n", "VAR_8 = app.config.get('DEFAULT_BOOKMARKS_DIR', 'root directory')\n", "VAR_9 = forms.NewBookmarkForm(VAR_7=default_dir)\n", "VAR_9.path.choices = [('', 'root directory')] + [(pathname, pathname) for\n pathname in data.get_dirs()]\n", "if VAR_9.validate_on_submit():\n", "VAR_7 = VAR_9.path.data\n", "VAR_9.url.data = request.args.get('url', '')\n", "VAR_10 = VAR_9.tags.data.split(',') if VAR_9.tags.data != '' else []\n", "VAR_7 = request.args.get('path', VAR_8).strip('/')\n", "VAR_10 = [tag.strip() for tag in VAR_10]\n", "VAR_9.path.data = VAR_7\n", "VAR_28 = DataObj(url=form.url.data, VAR_10=tags, VAR_7=path, type='bookmark')\n", "return render_template('dataobjs/new.html', title='New Bookmark', VAR_9=form)\n", "VAR_28.process_bookmark_url()\n", "VAR_30 = VAR_28.insert()\n", "if VAR_30:\n", "flash('Bookmark Saved!', 'success')\n", "flash(VAR_28.error, 'error')\n", "return redirect(f'/dataobj/{VAR_30}')\n", "return redirect('/bookmarks/new')\n" ]
[ "@app.route('/bookmarks/new', methods=['GET', 'POST'])...\n", "default_dir = app.config.get('DEFAULT_BOOKMARKS_DIR', 'root directory')\n", "form = forms.NewBookmarkForm(path=default_dir)\n", "form.path.choices = [('', 'root directory')] + [(pathname, pathname) for\n pathname in data.get_dirs()]\n", "if form.validate_on_submit():\n", "path = form.path.data\n", "form.url.data = request.args.get('url', '')\n", "tags = form.tags.data.split(',') if form.tags.data != '' else []\n", "path = request.args.get('path', default_dir).strip('/')\n", "tags = [tag.strip() for tag in tags]\n", "form.path.data = path\n", "bookmark = DataObj(url=form.url.data, tags=tags, path=path, type='bookmark')\n", "return render_template('dataobjs/new.html', title='New Bookmark', form=form)\n", "bookmark.process_bookmark_url()\n", "bookmark_id = bookmark.insert()\n", "if bookmark_id:\n", "flash('Bookmark Saved!', 'success')\n", "flash(bookmark.error, 'error')\n", "return redirect(f'/dataobj/{bookmark_id}')\n", "return redirect('/bookmarks/new')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Return'", "Expr'", "Assign'", "Condition", "Expr'", "Expr'", "Return'", "Return'" ]
[ "\"\"\"string\"\"\"\n", "import logging\n", "import warnings\n", "import OFS.interfaces\n", "from AccessControl import safe_builtins\n", "from Acquisition import aq_base\n", "from MultiMapping import MultiMapping\n", "from zExceptions import NotFound\n", "from zExceptions import Unauthorized\n", "from zope.component import queryUtility\n", "from zope.contentprovider.tales import TALESProviderExpression\n", "from zope.i18n import translate\n", "from zope.interface import implementer\n", "from zope.pagetemplate.engine import ZopeEngine as Z3Engine\n", "from zope.proxy import removeAllProxies\n", "from zope.tales.expressions import DeferExpr\n", "from zope.tales.expressions import LazyExpr\n", "from zope.tales.expressions import NotExpr\n", "from zope.tales.expressions import PathExpr\n", "from zope.tales.expressions import StringExpr\n", "from zope.tales.expressions import SubPathExpr\n", "from zope.tales.expressions import Undefs\n", "from zope.tales.pythonexpr import PythonExpr\n", "from zope.tales.tales import Context\n", "from zope.tales.tales import ErrorInfo as BaseErrorInfo\n", "from zope.tales.tales import Iterator\n", "from zope.traversing.adapters import traversePathElement\n", "from zope.traversing.interfaces import ITraversable\n", "from . import ZRPythonExpr\n", "from .interfaces import IUnicodeEncodingConflictResolver\n", "from .interfaces import IZopeAwareEngine\n", "VAR_0 = ZRPythonExpr._SecureModuleImporter()\n", "VAR_1 = logging.getLogger('Expressions')\n", "VAR_2 = Undefs + (NotFound, Unauthorized)\n", "def FUNC_0(VAR_3, VAR_4, VAR_5):...\n", "\"\"\"docstring\"\"\"\n", "VAR_12 = getattr(VAR_5, 'request', None)\n", "VAR_4 = list(VAR_4)\n", "VAR_4.reverse()\n", "while VAR_4:\n", "VAR_19 = VAR_4.pop()\n", "return VAR_3\n", "if VAR_19 == '_':\n", "warnings.warn(\n 'Traversing to the name `_` is deprecated and will be removed in Zope 6.',\n DeprecationWarning)\n", "if VAR_19.startswith('_'):\n", "if OFS.interfaces.ITraversable.providedBy(VAR_3):\n", "VAR_3 = VAR_3.restrictedTraverse(VAR_19)\n", "VAR_3 = traversePathElement(VAR_3, VAR_19, VAR_4, VAR_12=request)\n" ]
[ "\"\"\"Page Template Expression Engine\n\nPage Template-specific implementation of TALES, with handlers\nfor Python expressions, string literals, and paths.\n\"\"\"\n", "import logging\n", "import warnings\n", "import OFS.interfaces\n", "from AccessControl import safe_builtins\n", "from Acquisition import aq_base\n", "from MultiMapping import MultiMapping\n", "from zExceptions import NotFound\n", "from zExceptions import Unauthorized\n", "from zope.component import queryUtility\n", "from zope.contentprovider.tales import TALESProviderExpression\n", "from zope.i18n import translate\n", "from zope.interface import implementer\n", "from zope.pagetemplate.engine import ZopeEngine as Z3Engine\n", "from zope.proxy import removeAllProxies\n", "from zope.tales.expressions import DeferExpr\n", "from zope.tales.expressions import LazyExpr\n", "from zope.tales.expressions import NotExpr\n", "from zope.tales.expressions import PathExpr\n", "from zope.tales.expressions import StringExpr\n", "from zope.tales.expressions import SubPathExpr\n", "from zope.tales.expressions import Undefs\n", "from zope.tales.pythonexpr import PythonExpr\n", "from zope.tales.tales import Context\n", "from zope.tales.tales import ErrorInfo as BaseErrorInfo\n", "from zope.tales.tales import Iterator\n", "from zope.traversing.adapters import traversePathElement\n", "from zope.traversing.interfaces import ITraversable\n", "from . import ZRPythonExpr\n", "from .interfaces import IUnicodeEncodingConflictResolver\n", "from .interfaces import IZopeAwareEngine\n", "SecureModuleImporter = ZRPythonExpr._SecureModuleImporter()\n", "LOG = logging.getLogger('Expressions')\n", "ZopeUndefs = Undefs + (NotFound, Unauthorized)\n", "def boboAwareZopeTraverse(object, path_items, econtext):...\n", "\"\"\"docstring\"\"\"\n", "request = getattr(econtext, 'request', None)\n", "path_items = list(path_items)\n", "path_items.reverse()\n", "while path_items:\n", "name = path_items.pop()\n", "return object\n", "if name == '_':\n", "warnings.warn(\n 'Traversing to the name `_` is deprecated and will be removed in Zope 6.',\n DeprecationWarning)\n", "if name.startswith('_'):\n", "if OFS.interfaces.ITraversable.providedBy(object):\n", "object = object.restrictedTraverse(name)\n", "object = traversePathElement(object, name, path_items, request=request)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1 ]
[ "Expr'", "Import'", "Import'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Docstring", "Assign'", "Assign'", "Expr'", "Condition", "Assign'", "Return'", "Condition", "Expr'", "Condition", "Condition", "Assign'", "Assign'" ]
[ "def __new__(VAR_197, *VAR_9, **VAR_13):...\n", "VAR_215 = thread.get_ident()\n", "VAR_323 = thread.allocate_lock()\n", "VAR_323.acquire()\n", "VAR_323.release()\n", "return VAR_197.instances[VAR_215]\n", "VAR_458 = object.__new__(VAR_197, *VAR_9, **b)\n", "VAR_197.instances[VAR_215] = VAR_458\n", "return VAR_458\n" ]
[ "def __new__(cls, *a, **b):...\n", "id = thread.get_ident()\n", "lock = thread.allocate_lock()\n", "lock.acquire()\n", "lock.release()\n", "return cls.instances[id]\n", "instance = object.__new__(cls, *a, **b)\n", "cls.instances[id] = instance\n", "return instance\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'", "Expr'", "Return'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_5(VAR_3):...\n", "\"\"\"docstring\"\"\"\n", "return re.sub('\\\\s+', ' ', VAR_3).strip()\n" ]
[ "def strip_plus(text):...\n", "\"\"\"docstring\"\"\"\n", "return re.sub('\\\\s+', ' ', text).strip()\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "def FUNC_29():...\n", "return self.build_perspectives_response(VAR_34, VAR_36, VAR_39)\n" ]
[ "def build_response():...\n", "return self.build_perspectives_response(SERVER_NAME, testkey, VALID_UNTIL_TS)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_107(self, VAR_11=None):...\n", "VAR_56 = VAR_263.request\n", "VAR_244 = VAR_263.response\n", "VAR_244.headers['Content-Type'] = 'text/xml'\n", "if not VAR_11:\n", "VAR_11 = VAR_56.args\n", "if VAR_11 and VAR_11[0] in self.xml_procedures:\n", "VAR_278 = self.call_service_function(self.xml_procedures[VAR_11[0]], *\n VAR_11[1:], **dict(request.vars))\n", "self.error()\n", "if hasattr(VAR_278, 'as_list'):\n", "VAR_278 = VAR_278.as_list()\n", "return serializers.xml(VAR_278, quote=False)\n" ]
[ "def serve_xml(self, args=None):...\n", "request = current.request\n", "response = current.response\n", "response.headers['Content-Type'] = 'text/xml'\n", "if not args:\n", "args = request.args\n", "if args and args[0] in self.xml_procedures:\n", "s = self.call_service_function(self.xml_procedures[args[0]], *args[1:], **\n dict(request.vars))\n", "self.error()\n", "if hasattr(s, 'as_list'):\n", "s = s.as_list()\n", "return serializers.xml(s, quote=False)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Expr'", "Condition", "Assign'", "Return'" ]
[ "def FUNC_22():...\n", "from octoprint.util.jinja import get_all_asset_paths\n", "return get_all_asset_paths(app.jinja_env.assets_environment, verifyExist=False)\n" ]
[ "def _get_all_assets():...\n", "from octoprint.util.jinja import get_all_asset_paths\n", "return get_all_asset_paths(app.jinja_env.assets_environment, verifyExist=False)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "ImportFrom'", "Return'" ]
[ "@VAR_2.route('/get_publishers_json', methods=['GET'])...\n", "return calibre_db.get_typeahead(db.Publishers, request.args.get('q'), ('|',\n ','))\n" ]
[ "@web.route('/get_publishers_json', methods=['GET'])...\n", "return calibre_db.get_typeahead(db.Publishers, request.args.get('q'), ('|',\n ','))\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_19(self):...\n", "VAR_5, VAR_15 = self._test_confirm_start()\n", "VAR_3 = self.client.post(VAR_15, {'new_password1': 'anewpassword',\n 'new_password2': 'anewpassword'})\n", "VAR_16 = User.objects.get(VAR_8='staffmember@example.com')\n", "self.assertTrue(VAR_16.check_password('anewpassword'))\n", "VAR_3 = self.client.get(VAR_15)\n", "self.assertContains(VAR_3, 'The password reset link was invalid')\n" ]
[ "def test_confirm_complete(self):...\n", "url, path = self._test_confirm_start()\n", "response = self.client.post(path, {'new_password1': 'anewpassword',\n 'new_password2': 'anewpassword'})\n", "u = User.objects.get(email='staffmember@example.com')\n", "self.assertTrue(u.check_password('anewpassword'))\n", "response = self.client.get(path)\n", "self.assertContains(response, 'The password reset link was invalid')\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'" ]
[ "def FUNC_92(*VAR_79, **VAR_42):...\n", "\"\"\"docstring\"\"\"\n", "import frappe.realtime\n", "return frappe.realtime.publish_progress(*VAR_79, **kwargs)\n" ]
[ "def publish_progress(*args, **kwargs):...\n", "\"\"\"docstring\"\"\"\n", "import frappe.realtime\n", "return frappe.realtime.publish_progress(*args, **kwargs)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Import'", "Return'" ]
[ "def FUNC_3(VAR_3):...\n", "@wraps(VAR_3)...\n", "VAR_12 = FUNC_2()\n", "if VAR_12 is not None:\n", "VAR_16 = ub.session.query(ub.User).join(ub.RemoteAuthToken).filter(ub.\n RemoteAuthToken.auth_token == VAR_12).filter(ub.RemoteAuthToken.\n token_type == 1).first()\n", "return FUNC_7\n", "if VAR_16 is not None:\n", "login_user(VAR_16)\n", "VAR_0.debug('Received Kobo request without a recognizable auth token.')\n", "return VAR_3(*VAR_9, **kwargs)\n", "return abort(401)\n" ]
[ "def requires_kobo_auth(f):...\n", "@wraps(f)...\n", "auth_token = get_auth_token()\n", "if auth_token is not None:\n", "user = ub.session.query(ub.User).join(ub.RemoteAuthToken).filter(ub.\n RemoteAuthToken.auth_token == auth_token).filter(ub.RemoteAuthToken.\n token_type == 1).first()\n", "return inner\n", "if user is not None:\n", "login_user(user)\n", "log.debug('Received Kobo request without a recognizable auth token.')\n", "return f(*args, **kwargs)\n", "return abort(401)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Condition", "Assign'", "Return'", "Condition", "Expr'", "Expr'", "Return'", "Return'" ]
[ "def FUNC_11(VAR_1):...\n", "\"\"\"docstring\"\"\"\n", "if '.' in VAR_1:\n", "VAR_3 = VAR_25.get_attr(VAR_1)\n", "VAR_3 = globals()[VAR_1]\n", "VAR_25.log('method:' + VAR_1)\n", "return VAR_3\n" ]
[ "def get_attr(cmd):...\n", "\"\"\"docstring\"\"\"\n", "if '.' in cmd:\n", "method = frappe.get_attr(cmd)\n", "method = globals()[cmd]\n", "frappe.log('method:' + cmd)\n", "return method\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Assign'", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_1(self, VAR_1, VAR_2, VAR_4):...\n", "self.store = VAR_4.get_datastore()\n", "self.room_creator = VAR_4.get_room_creation_handler()\n" ]
[ "def prepare(self, reactor, clock, homeserver):...\n", "self.store = homeserver.get_datastore()\n", "self.room_creator = homeserver.get_room_creation_handler()\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'" ]
[ "def FUNC_31(self):...\n", "return self._has_role(constants.ROLE_VIEWER)\n" ]
[ "def role_viewer(self):...\n", "return self._has_role(constants.ROLE_VIEWER)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "@staticmethod...\n", "return os.path.abspath(VAR_4) if VAR_4 else VAR_4\n" ]
[ "@staticmethod...\n", "return os.path.abspath(file_path) if file_path else file_path\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def __init__(self, *VAR_0, **VAR_1):...\n", "\"\"\"docstring\"\"\"\n", "self.doctype = self.name = None\n", "self._default_new_docs = {}\n", "self.flags = frappe._dict()\n", "if VAR_0 and VAR_0[0] and isinstance(VAR_0[0], string_types):\n", "if len(VAR_0) == 1:\n", "if VAR_0 and VAR_0[0] and isinstance(VAR_0[0], dict):\n", "self.doctype = self.name = VAR_0[0]\n", "self.doctype = VAR_0[0]\n", "VAR_1 = VAR_0[0]\n", "if VAR_1:\n", "self.load_from_db()\n", "if isinstance(VAR_0[1], dict):\n", "super(CLASS_0, self).__init__(VAR_1)\n", "return\n", "self.name = frappe.db.get_value(VAR_0[0], VAR_0[1], 'name')\n", "self.name = VAR_0[1]\n", "self.init_valid_columns()\n", "if self.name is None:\n", "if 'for_update' in VAR_1:\n", "frappe.throw(_('{0} {1} not found').format(_(VAR_0[0]), VAR_0[1]), frappe.\n DoesNotExistError)\n", "self.flags.for_update = VAR_1.get('for_update')\n" ]
[ "def __init__(self, *args, **kwargs):...\n", "\"\"\"docstring\"\"\"\n", "self.doctype = self.name = None\n", "self._default_new_docs = {}\n", "self.flags = frappe._dict()\n", "if args and args[0] and isinstance(args[0], string_types):\n", "if len(args) == 1:\n", "if args and args[0] and isinstance(args[0], dict):\n", "self.doctype = self.name = args[0]\n", "self.doctype = args[0]\n", "kwargs = args[0]\n", "if kwargs:\n", "self.load_from_db()\n", "if isinstance(args[1], dict):\n", "super(Document, self).__init__(kwargs)\n", "return\n", "self.name = frappe.db.get_value(args[0], args[1], 'name')\n", "self.name = args[1]\n", "self.init_valid_columns()\n", "if self.name is None:\n", "if 'for_update' in kwargs:\n", "frappe.throw(_('{0} {1} not found').format(_(args[0]), args[1]), frappe.\n DoesNotExistError)\n", "self.flags.for_update = kwargs.get('for_update')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Condition", "Condition", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Expr'", "Condition", "Expr'", "Return'", "Assign'", "Assign'", "Expr'", "Condition", "For", "Expr'", "Assign'" ]
[ "def FUNC_64(self, VAR_32, VAR_35=None):...\n", "\"\"\"docstring\"\"\"\n", "if not VAR_35:\n", "VAR_35 = (VAR_19.fieldname for VAR_19 in VAR_32.meta.get('fields', {\n 'fieldtype': ['in', ['Currency', 'Float', 'Percent']]}))\n", "for VAR_18 in VAR_35:\n", "VAR_32.set(VAR_18, flt(VAR_32.get(VAR_18), self.precision(VAR_18, VAR_32.\n parentfield)))\n" ]
[ "def round_floats_in(self, doc, fieldnames=None):...\n", "\"\"\"docstring\"\"\"\n", "if not fieldnames:\n", "fieldnames = (df.fieldname for df in doc.meta.get('fields', {'fieldtype': [\n 'in', ['Currency', 'Float', 'Percent']]}))\n", "for fieldname in fieldnames:\n", "doc.set(fieldname, flt(doc.get(fieldname), self.precision(fieldname, doc.\n parentfield)))\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Assign'", "For", "Expr'" ]
[ "def FUNC_19(self, VAR_82, VAR_83, VAR_84):...\n", "VAR_238 = hmac.new(VAR_199=secret, msg=body, digestmod=self.digestmod)\n", "return compare(self.jwt_b64e(VAR_238.digest()), VAR_83)\n" ]
[ "def verify_signature(self, body, signature, secret):...\n", "mauth = hmac.new(key=secret, msg=body, digestmod=self.digestmod)\n", "return compare(self.jwt_b64e(mauth.digest()), signature)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_0(VAR_2):...\n", "\"\"\"docstring\"\"\"\n", "VAR_22 = saved_model_utils.get_saved_model_tag_sets(VAR_2)\n", "print('The given SavedModel contains the following tag-sets:')\n", "for VAR_3 in sorted(VAR_22):\n", "print('%r' % ', '.join(sorted(VAR_3)))\n" ]
[ "def _show_tag_sets(saved_model_dir):...\n", "\"\"\"docstring\"\"\"\n", "tag_sets = saved_model_utils.get_saved_model_tag_sets(saved_model_dir)\n", "print('The given SavedModel contains the following tag-sets:')\n", "for tag_set in sorted(tag_sets):\n", "print('%r' % ', '.join(sorted(tag_set)))\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Expr'", "For", "Expr'" ]
[ "def FUNC_1(self, VAR_2):...\n", "return parse.urlparse(VAR_2).scheme == 'https'\n" ]
[ "def issecure(self, url):...\n", "return parse.urlparse(url).scheme == 'https'\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_4(VAR_18, VAR_14=None):...\n", "\"\"\"docstring\"\"\"\n", "VAR_32 = FUNC_11('GIDIDX' + VAR_18.upper())\n", "VAR_33 = '*.jpg', '*.JPG', '*.jpeg', '.png'\n", "VAR_34 = []\n", "if VAR_14:\n", "VAR_14.replace('%2C', ',')\n", "def FUNC_14():...\n", "VAR_14.replace('%20', ' ')\n", "VAR_47 = f.readline()\n", "VAR_34 = VAR_14.split(',')\n", "VAR_47 = '/'.join(VAR_47.split('/')[:-2])\n", "VAR_35 = []\n", "for c in VAR_34:\n", "VAR_35.append(FUNC_10(VAR_47 + '/' + c.strip()))\n", "return VAR_35\n" ]
[ "def get_mixer_list(idx, classes=None):...\n", "\"\"\"docstring\"\"\"\n", "mixer_index = _get_index_absolute_path('GIDIDX' + idx.upper())\n", "image_types = '*.jpg', '*.JPG', '*.jpeg', '.png'\n", "classes_list = []\n", "if classes:\n", "classes.replace('%2C', ',')\n", "def get_class_path():...\n", "classes.replace('%20', ' ')\n", "dataset_path = f.readline()\n", "classes_list = classes.split(',')\n", "dataset_path = '/'.join(dataset_path.split('/')[:-2])\n", "class_paths = []\n", "for c in classes_list:\n", "class_paths.append(_get_obj_absolute_path(dataset_path + '/' + c.strip()))\n", "return class_paths\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Condition", "Expr'", "FunctionDef'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "For", "Expr'", "Return'" ]
[ "def FUNC_8(VAR_13, VAR_14, VAR_7):...\n", "\"\"\"docstring\"\"\"\n", "VAR_48 = False\n", "VAR_49 = False\n", "VAR_50 = dict([(identifier.type.lower(), identifier) for identifier in VAR_13])\n", "if len(VAR_13) != len(VAR_50):\n", "VAR_49 = True\n", "VAR_51 = dict([(identifier.type.lower(), identifier) for identifier in VAR_14])\n", "for identifier_type, identifier in VAR_51.items():\n", "if identifier_type not in VAR_50.keys():\n", "for identifier_type, identifier in VAR_50.items():\n", "VAR_7.delete(identifier)\n", "VAR_103 = VAR_50[identifier_type]\n", "if identifier_type not in VAR_51.keys():\n", "return VAR_48, VAR_49\n", "VAR_48 = True\n", "identifier.type = VAR_103.type\n", "VAR_7.add(identifier)\n", "identifier.val = VAR_103.val\n", "VAR_48 = True\n" ]
[ "def modify_identifiers(input_identifiers, db_identifiers, db_session):...\n", "\"\"\"docstring\"\"\"\n", "changed = False\n", "error = False\n", "input_dict = dict([(identifier.type.lower(), identifier) for identifier in\n input_identifiers])\n", "if len(input_identifiers) != len(input_dict):\n", "error = True\n", "db_dict = dict([(identifier.type.lower(), identifier) for identifier in\n db_identifiers])\n", "for identifier_type, identifier in db_dict.items():\n", "if identifier_type not in input_dict.keys():\n", "for identifier_type, identifier in input_dict.items():\n", "db_session.delete(identifier)\n", "input_identifier = input_dict[identifier_type]\n", "if identifier_type not in db_dict.keys():\n", "return changed, error\n", "changed = True\n", "identifier.type = input_identifier.type\n", "db_session.add(identifier)\n", "identifier.val = input_identifier.val\n", "changed = True\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "For", "Condition", "For", "Expr'", "Assign'", "Condition", "Return'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'" ]
[ "def FUNC_36():...\n", "\"\"\"docstring\"\"\"\n", "VAR_3 = FUNC_5()\n", "VAR_96 = request.args(1)\n", "if not VAR_43.flash and VAR_3 == request.application:\n", "VAR_155 = T('ATTENTION: you cannot edit the running application!')\n", "if os.path.exists(apath('%s/compiled' % VAR_3, VAR_122=request)):\n", "VAR_43.flash = VAR_155\n", "session.flash = T('application is compiled and cannot be designed')\n", "VAR_81 = listdir(apath('%s/models/' % VAR_3, VAR_122=request), '.*\\\\.py$')\n", "redirect(URL('site'))\n", "VAR_81 = [x.replace('\\\\', '/') for x in VAR_81]\n", "VAR_82 = {}\n", "for VAR_156 in VAR_81:\n", "VAR_2 = FUNC_3(apath('%s/models/%s' % (VAR_3, VAR_156), VAR_122=request))\n", "VAR_51 = sorted(listdir(apath('%s/controllers/' % VAR_3, VAR_122=request),\n '.*\\\\.py$'))\n", "VAR_82[VAR_156] = re.findall(REGEX_DEFINE_TABLE, VAR_2, re.MULTILINE)\n", "VAR_51 = [x.replace('\\\\', '/') for x in VAR_51]\n", "VAR_82[VAR_156].sort()\n", "VAR_83 = {}\n", "for VAR_48 in VAR_51:\n", "VAR_2 = FUNC_3(apath('%s/controllers/%s' % (VAR_3, VAR_48), VAR_122=request))\n", "VAR_84 = sorted(listdir(apath('%s/views/' % VAR_3, VAR_122=request),\n '[\\\\w/\\\\-]+\\\\.\\\\w+$'))\n", "VAR_95 = find_exposed_functions(VAR_2)\n", "VAR_83[VAR_48] = ['SyntaxError:Line:%d' % err.lineno]\n", "VAR_84 = [x.replace('\\\\', '/') for x in VAR_84]\n", "VAR_83[VAR_48] = VAR_95 and sorted(VAR_95) or []\n", "VAR_85 = {}\n", "VAR_86 = {}\n", "for VAR_48 in VAR_84:\n", "VAR_2 = FUNC_3(apath('%s/views/%s' % (VAR_3, VAR_48), VAR_122=request))\n", "VAR_87 = listdir(apath('%s/modules/' % VAR_3, VAR_122=request), '.*\\\\.py$')\n", "VAR_95 = re.findall(REGEX_EXTEND, VAR_2, re.MULTILINE)\n", "VAR_87 = VAR_87 = [x.replace('\\\\', '/') for x in VAR_87]\n", "if VAR_95:\n", "VAR_87.sort()\n", "VAR_85[VAR_48] = VAR_95[0][1]\n", "VAR_95 = re.findall(REGEX_INCLUDE, VAR_2)\n", "VAR_88 = listdir(apath('%s/private/' % VAR_3, VAR_122=request), '[^\\\\.#].*')\n", "VAR_86[VAR_48] = [VAR_111[1] for VAR_111 in VAR_95]\n", "VAR_88 = [x.replace('\\\\', '/') for x in VAR_88]\n", "VAR_88.sort()\n", "VAR_89 = listdir(apath('%s/static/' % VAR_3, VAR_122=request), '[^\\\\.#].*',\n maxnum=MAXNFILES)\n", "VAR_89 = [x.replace(os.path.sep, '/') for x in VAR_89]\n", "VAR_89.sort()\n", "VAR_91 = sorted([(VAR_188 + '.py') for VAR_188, info in iteritems(T.\n get_possible_languages_info()) if info[2] != 0])\n", "VAR_93 = apath('%s/cron/crontab' % VAR_3, VAR_122=request)\n", "if not os.path.exists(VAR_93):\n", "FUNC_4(VAR_93, '#crontab')\n", "def FUNC_62(VAR_95):...\n", "VAR_32 = re.compile('^plugin_' + VAR_96 + '(/.*|\\\\..*)?$')\n", "return [VAR_70 for VAR_70 in VAR_95 if VAR_70 and VAR_32.match(VAR_70)]\n" ]
[ "def plugin():...\n", "\"\"\"docstring\"\"\"\n", "app = get_app()\n", "plugin = request.args(1)\n", "if not response.flash and app == request.application:\n", "msg = T('ATTENTION: you cannot edit the running application!')\n", "if os.path.exists(apath('%s/compiled' % app, r=request)):\n", "response.flash = msg\n", "session.flash = T('application is compiled and cannot be designed')\n", "models = listdir(apath('%s/models/' % app, r=request), '.*\\\\.py$')\n", "redirect(URL('site'))\n", "models = [x.replace('\\\\', '/') for x in models]\n", "defines = {}\n", "for m in models:\n", "data = safe_read(apath('%s/models/%s' % (app, m), r=request))\n", "controllers = sorted(listdir(apath('%s/controllers/' % app, r=request),\n '.*\\\\.py$'))\n", "defines[m] = re.findall(REGEX_DEFINE_TABLE, data, re.MULTILINE)\n", "controllers = [x.replace('\\\\', '/') for x in controllers]\n", "defines[m].sort()\n", "functions = {}\n", "for c in controllers:\n", "data = safe_read(apath('%s/controllers/%s' % (app, c), r=request))\n", "views = sorted(listdir(apath('%s/views/' % app, r=request),\n '[\\\\w/\\\\-]+\\\\.\\\\w+$'))\n", "items = find_exposed_functions(data)\n", "functions[c] = ['SyntaxError:Line:%d' % err.lineno]\n", "views = [x.replace('\\\\', '/') for x in views]\n", "functions[c] = items and sorted(items) or []\n", "extend = {}\n", "include = {}\n", "for c in views:\n", "data = safe_read(apath('%s/views/%s' % (app, c), r=request))\n", "modules = listdir(apath('%s/modules/' % app, r=request), '.*\\\\.py$')\n", "items = re.findall(REGEX_EXTEND, data, re.MULTILINE)\n", "modules = modules = [x.replace('\\\\', '/') for x in modules]\n", "if items:\n", "modules.sort()\n", "extend[c] = items[0][1]\n", "items = re.findall(REGEX_INCLUDE, data)\n", "privates = listdir(apath('%s/private/' % app, r=request), '[^\\\\.#].*')\n", "include[c] = [i[1] for i in items]\n", "privates = [x.replace('\\\\', '/') for x in privates]\n", "privates.sort()\n", "statics = listdir(apath('%s/static/' % app, r=request), '[^\\\\.#].*', maxnum\n =MAXNFILES)\n", "statics = [x.replace(os.path.sep, '/') for x in statics]\n", "statics.sort()\n", "languages = sorted([(lang + '.py') for lang, info in iteritems(T.\n get_possible_languages_info()) if info[2] != 0])\n", "crontab = apath('%s/cron/crontab' % app, r=request)\n", "if not os.path.exists(crontab):\n", "safe_write(crontab, '#crontab')\n", "def filter_plugins(items):...\n", "regex = re.compile('^plugin_' + plugin + '(/.*|\\\\..*)?$')\n", "return [item for item in items if item and regex.match(item)]\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "For", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "For", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "For", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Condition", "Expr'", "FunctionDef'", "Assign'", "Return'" ]
[ "def FUNC_37(self):...\n", "VAR_32 = models.Honeycomb.objects.create(location='Old tree')\n", "VAR_32.bee_set.create()\n", "VAR_29 = models.Bee._meta.get_field('honeycomb').rel\n", "VAR_27 = widgets.ForeignKeyRawIdWidget(VAR_29, widget_admin_site)\n", "self.assertHTMLEqual(conditional_escape(VAR_27.render('honeycomb_widget',\n VAR_32.pk, attrs={})), 'string' % {'hcombpk': VAR_32.pk})\n" ]
[ "def test_fk_related_model_not_in_admin(self):...\n", "big_honeycomb = models.Honeycomb.objects.create(location='Old tree')\n", "big_honeycomb.bee_set.create()\n", "rel = models.Bee._meta.get_field('honeycomb').rel\n", "w = widgets.ForeignKeyRawIdWidget(rel, widget_admin_site)\n", "self.assertHTMLEqual(conditional_escape(w.render('honeycomb_widget',\n big_honeycomb.pk, attrs={})), \n '<input type=\"text\" name=\"honeycomb_widget\" value=\"%(hcombpk)s\" />&nbsp;<strong>Honeycomb object</strong>'\n % {'hcombpk': big_honeycomb.pk})\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_29(VAR_12, VAR_13, VAR_14, VAR_15):...\n", "if 'tags' in VAR_14:\n", "VAR_40 = calibre_db.session.query(db.Tags).filter(db.Tags.id.in_(VAR_15)).all()\n", "VAR_40 = calibre_db.session.query(db.cc_classes[config.\n config_restricted_column]).filter(db.cc_classes[config.\n config_restricted_column].id.in_(VAR_15)).all()\n", "if not VAR_40:\n", "VAR_102 = [VAR_17.value for VAR_17 in VAR_40]\n", "VAR_102 = [VAR_17.name for VAR_17 in VAR_40]\n", "VAR_70 = VAR_12.__dict__[VAR_14].split(',') if len(VAR_12.__dict__[VAR_14]\n ) else []\n", "if VAR_13 == 'remove':\n", "VAR_70 = [VAR_17 for VAR_17 in VAR_70 if VAR_17 not in VAR_102]\n", "if VAR_13 == 'add':\n", "return ','.join(VAR_70)\n", "VAR_70.extend(VAR_17 for VAR_17 in VAR_102 if VAR_17 not in VAR_70)\n" ]
[ "def prepare_tags(user, action, tags_name, id_list):...\n", "if 'tags' in tags_name:\n", "tags = calibre_db.session.query(db.Tags).filter(db.Tags.id.in_(id_list)).all()\n", "tags = calibre_db.session.query(db.cc_classes[config.config_restricted_column]\n ).filter(db.cc_classes[config.config_restricted_column].id.in_(id_list)\n ).all()\n", "if not tags:\n", "new_tags_list = [x.value for x in tags]\n", "new_tags_list = [x.name for x in tags]\n", "saved_tags_list = user.__dict__[tags_name].split(',') if len(user.__dict__[\n tags_name]) else []\n", "if action == 'remove':\n", "saved_tags_list = [x for x in saved_tags_list if x not in new_tags_list]\n", "if action == 'add':\n", "return ','.join(saved_tags_list)\n", "saved_tags_list.extend(x for x in new_tags_list if x not in saved_tags_list)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Return'", "Expr'" ]
[ "def FUNC_16(self):...\n", "VAR_115 = []\n", "VAR_116 = {'offset', 'checksum'}\n", "for param in sorted(self.cleaned_data):\n", "VAR_13 = self.cleaned_data[param]\n", "return VAR_115\n", "if VAR_13 is None or param in VAR_116:\n", "if isinstance(VAR_13, bool):\n", "if VAR_13:\n", "if isinstance(VAR_13, int):\n", "VAR_115.append((param, '1'))\n", "if VAR_13 > 0:\n", "if isinstance(VAR_13, datetime):\n", "VAR_115.append((param, str(VAR_13)))\n", "VAR_115.append((param, VAR_13.date().isoformat()))\n", "if isinstance(VAR_13, list):\n", "for val in VAR_13:\n", "if isinstance(VAR_13, User):\n", "VAR_115.append((param, val))\n", "VAR_115.append((param, VAR_13.username))\n", "if VAR_13:\n", "VAR_115.append((param, VAR_13))\n" ]
[ "def items(self):...\n", "items = []\n", "ignored = {'offset', 'checksum'}\n", "for param in sorted(self.cleaned_data):\n", "value = self.cleaned_data[param]\n", "return items\n", "if value is None or param in ignored:\n", "if isinstance(value, bool):\n", "if value:\n", "if isinstance(value, int):\n", "items.append((param, '1'))\n", "if value > 0:\n", "if isinstance(value, datetime):\n", "items.append((param, str(value)))\n", "items.append((param, value.date().isoformat()))\n", "if isinstance(value, list):\n", "for val in value:\n", "if isinstance(value, User):\n", "items.append((param, val))\n", "items.append((param, value.username))\n", "if value:\n", "items.append((param, value))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "For", "Assign'", "Return'", "Condition", "Condition", "Condition", "Condition", "Expr'", "Condition", "Condition", "Expr'", "Expr'", "Condition", "For", "Condition", "Expr'", "Expr'", "Condition", "Expr'" ]
[ "@FUNC_0...\n", "return FederationClient(self)\n" ]
[ "@cache_in_self...\n", "return FederationClient(self)\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_13(self, VAR_5, VAR_6, VAR_7, VAR_8=None, VAR_9=None):...\n", "VAR_6 = self.get_success(self.handler.check_device_registered(VAR_5=user_id,\n VAR_6=device_id, initial_device_display_name=display_name))\n", "if VAR_9 is not None:\n", "self.get_success(self.store.insert_client_ip(VAR_5, VAR_8, VAR_9,\n 'user_agent', VAR_6))\n", "self.reactor.advance(1000)\n" ]
[ "def _record_user(self, user_id, device_id, display_name, access_token=None,...\n", "device_id = self.get_success(self.handler.check_device_registered(user_id=\n user_id, device_id=device_id, initial_device_display_name=display_name))\n", "if ip is not None:\n", "self.get_success(self.store.insert_client_ip(user_id, access_token, ip,\n 'user_agent', device_id))\n", "self.reactor.advance(1000)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Expr'", "Expr'" ]
[ "def FUNC_4(VAR_10):...\n", "VAR_10 = VAR_10.strip().lower()\n", "VAR_10 = re.sub('\\\\(\\\\)', '', VAR_10)\n", "VAR_10 = re.sub('[\\\\s/(,]+', '-', VAR_10)\n", "VAR_10 = re.sub('[^a-z0-9_-]', '', VAR_10)\n", "VAR_10 = VAR_10.rstrip('-')\n", "return VAR_10\n" ]
[ "def simplifyText(text):...\n", "text = text.strip().lower()\n", "text = re.sub('\\\\(\\\\)', '', text)\n", "text = re.sub('[\\\\s/(,]+', '-', text)\n", "text = re.sub('[^a-z0-9_-]', '', text)\n", "text = text.rstrip('-')\n", "return text\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "@login_required()...\n", "\"\"\"docstring\"\"\"\n", "VAR_19 = VAR_2.session['connector'].server_id\n", "VAR_115 = []\n", "if VAR_24 == 'p':\n", "VAR_202 = VAR_8.getObject('Project', VAR_25)\n", "if VAR_24 == 'd':\n", "if VAR_202 is None:\n", "VAR_202 = VAR_8.getObject('Dataset', VAR_25)\n", "if VAR_24 == 'w':\n", "for VAR_213 in VAR_202.listChildren():\n", "if VAR_202 is None:\n", "VAR_202 = VAR_8.getObject('Well', VAR_25)\n", "VAR_202 = VAR_8.getObject('Image', VAR_25)\n", "VAR_115.extend(list(VAR_213.listChildren()))\n", "VAR_17 = VAR_202.getName()\n", "VAR_115.extend(list(VAR_202.listChildren()))\n", "if VAR_202 is None:\n", "if VAR_202 is None:\n", "VAR_115 = [VAR_30 for VAR_30 in VAR_115 if not VAR_30.requiresPixelsPyramid()]\n", "VAR_273 = list(filter(None, VAR_2.GET.get('selection', '').split(',')))\n", "VAR_115.extend([VAR_30.getImage() for VAR_30 in VAR_202.listChildren()])\n", "VAR_115.append(VAR_202)\n", "if VAR_2.GET.get('dryrun', False):\n", "if len(VAR_273) > 0:\n", "VAR_124 = VAR_202.getParent()\n", "VAR_54 = json.dumps(len(VAR_115))\n", "if len(VAR_115) == 0:\n", "VAR_1.debug(VAR_273)\n", "VAR_17 = '%s-%s' % (VAR_202.getParent().getName(), VAR_202.getName())\n", "VAR_313 = '%s%s' % (VAR_124.getRowLabels()[VAR_202.row], VAR_124.\n getColumnLabels()[VAR_202.column])\n", "VAR_203 = VAR_2.GET.get('callback', None)\n", "if len(VAR_115) == 1:\n", "VAR_1.debug(VAR_115)\n", "VAR_17 = '%s-%s-%s' % (VAR_124.getParent().getName(), VAR_124.getName(),\n VAR_313)\n", "if VAR_203 is not None and not VAR_9.get('_internal', False):\n", "VAR_202 = VAR_115[0]\n", "VAR_274 = '+'.join(str(VAR_30.getId()) for VAR_30 in VAR_115).encode('utf-8')\n", "VAR_1.debug(traceback.format_exc())\n", "return HttpResponseRedirect(settings.STATIC_URL + 'webgateway/tfiles/' +\n VAR_207)\n", "VAR_115 = [VAR_30 for VAR_30 in VAR_115 if str(VAR_30.getId()) in VAR_273]\n", "VAR_54 = '%s(%s)' % (VAR_203, VAR_54)\n", "return HttpJavascriptResponse(VAR_54)\n", "VAR_79 = '_'.join(str(VAR_30.getId()) for VAR_30 in VAR_202.getAncestry()\n ) + '_' + str(VAR_202.getId()) + '_ome_tiff'\n", "VAR_79 = '_'.join(str(VAR_30.getId()) for VAR_30 in VAR_115[0].getAncestry()\n ) + '_' + md5(VAR_274).hexdigest() + '_ome_tiff_zip'\n", "VAR_1.debug(VAR_115)\n", "VAR_204 = 255 - len(str(VAR_202.getId())) - 10\n", "VAR_206, VAR_207, VAR_208 = webgateway_tempfile.new(VAR_17 + '.zip', VAR_79=key\n )\n", "if len(VAR_115) == 0:\n", "VAR_205 = VAR_202.getName()[:VAR_204]\n", "if VAR_208 is True:\n", "VAR_206, VAR_207, VAR_208 = webgateway_tempfile.new(str(VAR_202.getId()) +\n '-' + VAR_205 + '.ome.tiff', VAR_79=key)\n", "return HttpResponseRedirect(settings.STATIC_URL + 'webgateway/tfiles/' +\n VAR_207)\n", "VAR_1.debug(VAR_206)\n", "if VAR_208 is True:\n", "if VAR_208 is None:\n", "return HttpResponseRedirect(settings.STATIC_URL + 'webgateway/tfiles/' +\n VAR_207)\n", "VAR_209 = webgateway_cache.getOmeTiffImage(VAR_2, VAR_19, VAR_115[0])\n", "VAR_208 = BytesIO()\n", "VAR_275 = zipfile.ZipFile(VAR_208, 'w', zipfile.ZIP_STORED)\n", "if VAR_209 is None:\n", "for VAR_202 in VAR_115:\n", "if VAR_208 is None:\n", "VAR_209 = VAR_115[0].exportOmeTiff()\n", "VAR_1.debug('Failed to export image (2)', exc_info=True)\n", "if VAR_209 is None:\n", "VAR_209 = webgateway_cache.getOmeTiffImage(VAR_2, VAR_19, VAR_202)\n", "VAR_275.close()\n", "VAR_61 = HttpResponse(VAR_209, content_type='image/tiff')\n", "VAR_208.write(VAR_209)\n", "VAR_209 = None\n", "webgateway_tempfile.abort(VAR_206)\n", "webgateway_cache.setOmeTiffImage(VAR_2, VAR_19, VAR_115[0], VAR_209)\n", "if VAR_209 is None:\n", "if VAR_206 is None:\n", "VAR_61['Content-Disposition'] = 'attachment; filename=\"%s.ome.tiff\"' % (str\n (VAR_202.getId()) + '-' + VAR_205)\n", "VAR_208.close()\n", "VAR_209 = VAR_202.exportOmeTiff()\n", "VAR_204 = 255 - len(str(VAR_202.getId())) - 10\n", "VAR_314 = VAR_208.getvalue()\n", "VAR_61['Content-Length'] = len(VAR_209)\n", "return HttpResponseRedirect(settings.STATIC_URL + 'webgateway/tfiles/' +\n VAR_207)\n", "if VAR_209 is None:\n", "VAR_205 = VAR_202.getName()[:VAR_204]\n", "VAR_61 = HttpResponse(VAR_314, content_type='application/zip')\n", "return VAR_61\n", "webgateway_cache.setOmeTiffImage(VAR_2, VAR_19, VAR_202, VAR_209)\n", "VAR_275.writestr(str(VAR_202.getId()) + '-' + VAR_205 + '.ome.tiff', VAR_209)\n", "VAR_61['Content-Disposition'] = 'attachment; filename=\"%s.zip\"' % VAR_17\n", "VAR_61['Content-Length'] = len(VAR_314)\n", "return VAR_61\n" ]
[ "@login_required()...\n", "\"\"\"docstring\"\"\"\n", "server_id = request.session['connector'].server_id\n", "imgs = []\n", "if ctx == 'p':\n", "obj = conn.getObject('Project', cid)\n", "if ctx == 'd':\n", "if obj is None:\n", "obj = conn.getObject('Dataset', cid)\n", "if ctx == 'w':\n", "for d in obj.listChildren():\n", "if obj is None:\n", "obj = conn.getObject('Well', cid)\n", "obj = conn.getObject('Image', cid)\n", "imgs.extend(list(d.listChildren()))\n", "name = obj.getName()\n", "imgs.extend(list(obj.listChildren()))\n", "if obj is None:\n", "if obj is None:\n", "imgs = [x for x in imgs if not x.requiresPixelsPyramid()]\n", "selection = list(filter(None, request.GET.get('selection', '').split(',')))\n", "imgs.extend([x.getImage() for x in obj.listChildren()])\n", "imgs.append(obj)\n", "if request.GET.get('dryrun', False):\n", "if len(selection) > 0:\n", "plate = obj.getParent()\n", "rv = json.dumps(len(imgs))\n", "if len(imgs) == 0:\n", "logger.debug(selection)\n", "name = '%s-%s' % (obj.getParent().getName(), obj.getName())\n", "coord = '%s%s' % (plate.getRowLabels()[obj.row], plate.getColumnLabels()[\n obj.column])\n", "c = request.GET.get('callback', None)\n", "if len(imgs) == 1:\n", "logger.debug(imgs)\n", "name = '%s-%s-%s' % (plate.getParent().getName(), plate.getName(), coord)\n", "if c is not None and not kwargs.get('_internal', False):\n", "obj = imgs[0]\n", "img_ids = '+'.join(str(x.getId()) for x in imgs).encode('utf-8')\n", "logger.debug(traceback.format_exc())\n", "return HttpResponseRedirect(settings.STATIC_URL + 'webgateway/tfiles/' + rpath)\n", "imgs = [x for x in imgs if str(x.getId()) in selection]\n", "rv = '%s(%s)' % (c, rv)\n", "return HttpJavascriptResponse(rv)\n", "key = '_'.join(str(x.getId()) for x in obj.getAncestry()) + '_' + str(obj.\n getId()) + '_ome_tiff'\n", "key = '_'.join(str(x.getId()) for x in imgs[0].getAncestry()) + '_' + md5(\n img_ids).hexdigest() + '_ome_tiff_zip'\n", "logger.debug(imgs)\n", "fnamemax = 255 - len(str(obj.getId())) - 10\n", "fpath, rpath, fobj = webgateway_tempfile.new(name + '.zip', key=key)\n", "if len(imgs) == 0:\n", "objname = obj.getName()[:fnamemax]\n", "if fobj is True:\n", "fpath, rpath, fobj = webgateway_tempfile.new(str(obj.getId()) + '-' +\n objname + '.ome.tiff', key=key)\n", "return HttpResponseRedirect(settings.STATIC_URL + 'webgateway/tfiles/' + rpath)\n", "logger.debug(fpath)\n", "if fobj is True:\n", "if fobj is None:\n", "return HttpResponseRedirect(settings.STATIC_URL + 'webgateway/tfiles/' + rpath)\n", "tiff_data = webgateway_cache.getOmeTiffImage(request, server_id, imgs[0])\n", "fobj = BytesIO()\n", "zobj = zipfile.ZipFile(fobj, 'w', zipfile.ZIP_STORED)\n", "if tiff_data is None:\n", "for obj in imgs:\n", "if fobj is None:\n", "tiff_data = imgs[0].exportOmeTiff()\n", "logger.debug('Failed to export image (2)', exc_info=True)\n", "if tiff_data is None:\n", "tiff_data = webgateway_cache.getOmeTiffImage(request, server_id, obj)\n", "zobj.close()\n", "rsp = HttpResponse(tiff_data, content_type='image/tiff')\n", "fobj.write(tiff_data)\n", "tiff_data = None\n", "webgateway_tempfile.abort(fpath)\n", "webgateway_cache.setOmeTiffImage(request, server_id, imgs[0], tiff_data)\n", "if tiff_data is None:\n", "if fpath is None:\n", "rsp['Content-Disposition'] = 'attachment; filename=\"%s.ome.tiff\"' % (str(\n obj.getId()) + '-' + objname)\n", "fobj.close()\n", "tiff_data = obj.exportOmeTiff()\n", "fnamemax = 255 - len(str(obj.getId())) - 10\n", "zip_data = fobj.getvalue()\n", "rsp['Content-Length'] = len(tiff_data)\n", "return HttpResponseRedirect(settings.STATIC_URL + 'webgateway/tfiles/' + rpath)\n", "if tiff_data is None:\n", "objname = obj.getName()[:fnamemax]\n", "rsp = HttpResponse(zip_data, content_type='application/zip')\n", "return rsp\n", "webgateway_cache.setOmeTiffImage(request, server_id, obj, tiff_data)\n", "zobj.writestr(str(obj.getId()) + '-' + objname + '.ome.tiff', tiff_data)\n", "rsp['Content-Disposition'] = 'attachment; filename=\"%s.zip\"' % name\n", "rsp['Content-Length'] = len(zip_data)\n", "return rsp\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Condition", "Assign'", "Condition", "For", "Condition", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Condition", "Condition", "Assign'", "Assign'", "Expr'", "Expr'", "Condition", "Condition", "Assign'", "Assign'", "Condition", "Expr'", "Assign'", "Assign'", "Assign'", "Condition", "Expr'", "Assign'", "Condition", "Assign'", "Assign'", "Expr'", "Return'", "Assign'", "Assign'", "Return'", "Assign'", "Assign'", "Expr'", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Return'", "Expr'", "Condition", "Condition", "Return'", "Assign'", "Assign'", "Assign'", "Condition", "For", "Condition", "Assign'", "Expr'", "Condition", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Condition", "Condition", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Return'", "Condition", "Assign'", "Assign'", "Return'", "Expr'", "Expr'", "Assign'", "Assign'", "Return'" ]
[ "@staticmethod...\n", "return '`tab{0}`.`{1}`'.format(*VAR_11)\n" ]
[ "@staticmethod...\n", "return '`tab{0}`.`{1}`'.format(*parts)\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_7(VAR_19):...\n", "VAR_34 = FUNC_10(VAR_19)\n", "VAR_36 = '/'.join(VAR_34.split('/')[:-2]) + '/classes.txt'\n", "if os.path.isfile(VAR_36):\n", "return '<object id={} src={} meta={} />'.format(quoteattr(url_for(\n '.get_object_id', VAR_19=object_path)), quoteattr(FUNC_8(VAR_19)),\n quoteattr(url_for('.get_object_meta', VAR_19=object_path)))\n", "return '<object id={} src={} />'.format(quoteattr(url_for('.get_object_id',\n VAR_19=object_path)), quoteattr(FUNC_8(VAR_19)))\n" ]
[ "def _get_object_element(object_path):...\n", "path = _get_obj_absolute_path(object_path)\n", "class_text = '/'.join(path.split('/')[:-2]) + '/classes.txt'\n", "if os.path.isfile(class_text):\n", "return '<object id={} src={} meta={} />'.format(quoteattr(url_for(\n '.get_object_id', object_path=object_path)), quoteattr(\n _get_object_src_uri(object_path)), quoteattr(url_for('.get_object_meta',\n object_path=object_path)))\n", "return '<object id={} src={} />'.format(quoteattr(url_for('.get_object_id',\n object_path=object_path)), quoteattr(_get_object_src_uri(object_path)))\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Condition", "Return'", "Return'" ]
[ "@moderator_required...\n", "VAR_8 = get_object_or_404(Comment, VAR_2=pk)\n", "if is_post(VAR_0):\n", "Comment.objects.filter(VAR_2=pk).update(is_removed=remove)\n", "return render(VAR_0=request, template_name='spirit/comment/moderate.html',\n context={'comment': comment})\n", "return redirect(VAR_0.GET.get('next', VAR_8.get_absolute_url()))\n" ]
[ "@moderator_required...\n", "comment = get_object_or_404(Comment, pk=pk)\n", "if is_post(request):\n", "Comment.objects.filter(pk=pk).update(is_removed=remove)\n", "return render(request=request, template_name='spirit/comment/moderate.html',\n context={'comment': comment})\n", "return redirect(request.GET.get('next', comment.get_absolute_url()))\n" ]
[ 0, 0, 0, 0, 0, 4 ]
[ "Condition", "Assign'", "Condition", "Expr'", "Return'", "Return'" ]
[ "def FUNC_49(self, VAR_38, VAR_39):...\n", "VAR_87 = 'attachment; filename=\"' + VAR_39 + '\"'\n", "VAR_53.response.headers['Content-Type'] = 'application/x-download'\n", "VAR_53.response.headers['Content-Disposition'] = VAR_87\n", "return codecs.encode(VAR_38, 'UTF-8')\n" ]
[ "def serve_string_as_file(self, string, filename):...\n", "content_disposition = 'attachment; filename=\"' + filename + '\"'\n", "cherrypy.response.headers['Content-Type'] = 'application/x-download'\n", "cherrypy.response.headers['Content-Disposition'] = content_disposition\n", "return codecs.encode(string, 'UTF-8')\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def __init__(self, VAR_7, VAR_8):...\n", "self.user_id = VAR_7\n", "self.session_key = VAR_8\n" ]
[ "def __init__(self, user_id, session_key):...\n", "self.user_id = user_id\n", "self.session_key = session_key\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'" ]
[ "def FUNC_87():...\n", "VAR_364 = ','.join(VAR_301.get('columns'))\n", "yield VAR_364\n", "for rows in VAR_301.get('lazy_rows'):\n", "yield '\\n' + '\\n'.join([','.join([VAR_345(VAR_362) for VAR_362 in VAR_378]) for\n VAR_378 in rows])\n" ]
[ "def csv_gen():...\n", "csv_cols = ','.join(table_data.get('columns'))\n", "yield csv_cols\n", "for rows in table_data.get('lazy_rows'):\n", "yield '\\n' + '\\n'.join([','.join([str(d) for d in row]) for row in rows])\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "For", "Expr'" ]
[ "def FUNC_42(VAR_16):...\n", "VAR_21 = None\n", "if VAR_16.get('config_use_google_drive'):\n", "VAR_106 = {}\n", "VAR_81 = not VAR_21 and 'config_use_google_drive' in VAR_16\n", "if not os.path.isfile(gdriveutils.SETTINGS_YAML):\n", "if config.config_use_google_drive and not VAR_81:\n", "config.config_use_google_drive = False\n", "if gdrive_support:\n", "config.config_google_drive_watch_changes_response = {}\n", "config.config_use_google_drive = VAR_81\n", "VAR_21 = gdriveutils.get_error_text(VAR_106)\n", "if 'config_use_google_drive' in VAR_16 and not config.config_use_google_drive and not VAR_21:\n", "if FUNC_41(VAR_16, 'config_google_drive_folder'):\n", "VAR_106 = json.load(settings)['web']\n", "gdriveutils.deleteDatabaseOnChange()\n", "return VAR_21\n", "if not VAR_106:\n", "return FUNC_50(_('client_secrets.json Is Not Configured For Web Application'))\n", "gdriveutils.update_settings(VAR_106['client_id'], VAR_106['client_secret'],\n VAR_106['redirect_uris'][0])\n" ]
[ "def _configuration_gdrive_helper(to_save):...\n", "gdrive_error = None\n", "if to_save.get('config_use_google_drive'):\n", "gdrive_secrets = {}\n", "new_gdrive_value = not gdrive_error and 'config_use_google_drive' in to_save\n", "if not os.path.isfile(gdriveutils.SETTINGS_YAML):\n", "if config.config_use_google_drive and not new_gdrive_value:\n", "config.config_use_google_drive = False\n", "if gdrive_support:\n", "config.config_google_drive_watch_changes_response = {}\n", "config.config_use_google_drive = new_gdrive_value\n", "gdrive_error = gdriveutils.get_error_text(gdrive_secrets)\n", "if 'config_use_google_drive' in to_save and not config.config_use_google_drive and not gdrive_error:\n", "if _config_string(to_save, 'config_google_drive_folder'):\n", "gdrive_secrets = json.load(settings)['web']\n", "gdriveutils.deleteDatabaseOnChange()\n", "return gdrive_error\n", "if not gdrive_secrets:\n", "return _configuration_result(_(\n 'client_secrets.json Is Not Configured For Web Application'))\n", "gdriveutils.update_settings(gdrive_secrets['client_id'], gdrive_secrets[\n 'client_secret'], gdrive_secrets['redirect_uris'][0])\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Expr'", "Return'", "Condition", "Return'", "Expr'" ]
[ "def FUNC_43(self, VAR_25, *VAR_0, **VAR_1):...\n", "return self.run_method(VAR_25, *VAR_0, **kwargs)\n" ]
[ "def run_trigger(self, method, *args, **kwargs):...\n", "return self.run_method(method, *args, **kwargs)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "@VAR_0.filter...\n", "if getattr(VAR_17, 'is_hyperlink', False):\n", "VAR_55 = six.text_type(VAR_17.obj)\n", "if VAR_17 is None or isinstance(VAR_17, bool):\n", "return mark_safe('<a href=%s>%s</a>' % (VAR_17, escape(VAR_55)))\n", "return mark_safe('<code>%s</code>' % {(True): 'true', (False): 'false',\n None: 'null'}[VAR_17])\n", "if isinstance(VAR_17, list):\n", "if any([isinstance(item, (list, dict)) for item in VAR_17]):\n", "if isinstance(VAR_17, dict):\n", "VAR_64 = loader.get_template('rest_framework/admin/list_value.html')\n", "VAR_64 = loader.get_template('rest_framework/admin/simple_list_value.html')\n", "VAR_64 = loader.get_template('rest_framework/admin/dict_value.html')\n", "if isinstance(VAR_17, six.string_types):\n", "VAR_36 = {'value': VAR_17}\n", "VAR_36 = {'value': VAR_17}\n", "if (VAR_17.startswith('http:') or VAR_17.startswith('https:')\n", "return six.text_type(VAR_17)\n", "return VAR_64.render(VAR_36)\n", "return VAR_64.render(VAR_36)\n", "return mark_safe('<a href=\"{value}\">{value}</a>'.format(VAR_17=escape(value)))\n", "if '@' in VAR_17 and not re.search('\\\\s', VAR_17):\n", "return mark_safe('<a href=\"mailto:{value}\">{value}</a>'.format(VAR_17=\n escape(value)))\n", "if '\\n' in VAR_17:\n", "return mark_safe('<pre>%s</pre>' % escape(VAR_17))\n" ]
[ "@register.filter...\n", "if getattr(value, 'is_hyperlink', False):\n", "name = six.text_type(value.obj)\n", "if value is None or isinstance(value, bool):\n", "return mark_safe('<a href=%s>%s</a>' % (value, escape(name)))\n", "return mark_safe('<code>%s</code>' % {(True): 'true', (False): 'false',\n None: 'null'}[value])\n", "if isinstance(value, list):\n", "if any([isinstance(item, (list, dict)) for item in value]):\n", "if isinstance(value, dict):\n", "template = loader.get_template('rest_framework/admin/list_value.html')\n", "template = loader.get_template('rest_framework/admin/simple_list_value.html')\n", "template = loader.get_template('rest_framework/admin/dict_value.html')\n", "if isinstance(value, six.string_types):\n", "context = {'value': value}\n", "context = {'value': value}\n", "if (value.startswith('http:') or value.startswith('https:')) and not re.search(\n", "return six.text_type(value)\n", "return template.render(context)\n", "return template.render(context)\n", "return mark_safe('<a href=\"{value}\">{value}</a>'.format(value=escape(value)))\n", "if '@' in value and not re.search('\\\\s', value):\n", "return mark_safe('<a href=\"mailto:{value}\">{value}</a>'.format(value=escape\n (value)))\n", "if '\\n' in value:\n", "return mark_safe('<pre>%s</pre>' % escape(value))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Condition", "Assign'", "Condition", "Return'", "Return'", "Condition", "For", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Return'", "Return'", "Return'", "Return'", "Condition", "Return'", "Condition", "Return'" ]
[ "def FUNC_71(self):...\n", "\"\"\"docstring\"\"\"\n", "if not self.is_logged_in():\n", "redirect(self.settings.login_url)\n", "VAR_282 = self.table_membership()\n", "VAR_283 = self.db(VAR_282.user_id == self.user.id).select()\n", "VAR_153 = TABLE()\n", "for VAR_284 in VAR_283:\n", "VAR_381 = self.table_group()\n", "if not VAR_283:\n", "VAR_167 = self.db(VAR_381.id == VAR_284.group_id).select()\n", "return None\n", "return VAR_153\n", "if VAR_167:\n", "VAR_405 = VAR_167[0]\n", "VAR_153.append(TR(H3(VAR_405.role, '(%s)' % VAR_405.id)))\n", "VAR_153.append(TR(P(VAR_405.description)))\n" ]
[ "def groups(self):...\n", "\"\"\"docstring\"\"\"\n", "if not self.is_logged_in():\n", "redirect(self.settings.login_url)\n", "table_membership = self.table_membership()\n", "memberships = self.db(table_membership.user_id == self.user.id).select()\n", "table = TABLE()\n", "for membership in memberships:\n", "table_group = self.table_group()\n", "if not memberships:\n", "groups = self.db(table_group.id == membership.group_id).select()\n", "return None\n", "return table\n", "if groups:\n", "group = groups[0]\n", "table.append(TR(H3(group.role, '(%s)' % group.id)))\n", "table.append(TR(P(group.description)))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Expr'", "Assign'", "Assign'", "Assign'", "For", "Assign'", "Condition", "Assign'", "Return'", "Return'", "Condition", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_3(self) ->str:...\n", "\"\"\"docstring\"\"\"\n", "return self._instance_name\n" ]
[ "def get_instance_name(self) ->str:...\n", "\"\"\"docstring\"\"\"\n", "return self._instance_name\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "@classmethod...\n", "return FUNC_0(VAR_4)\n" ]
[ "@classmethod...\n", "return path_exists(file_path)\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_9(self):...\n", "return '{user}-{repo}'.format(user=self.user, repo=self.repo)\n" ]
[ "def get_build_slug(self):...\n", "return '{user}-{repo}'.format(user=self.user, repo=self.repo)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_84(VAR_72, *VAR_73):...\n", "def FUNC_86(self, VAR_25, *VAR_0, **VAR_1):...\n", "FUNC_83(self, VAR_72(self, *VAR_0, **kwargs))\n", "for VAR_6 in VAR_73:\n", "FUNC_83(self, VAR_6(self, VAR_25, *VAR_0, **kwargs))\n", "return self._return_value\n" ]
[ "def compose(fn, *hooks):...\n", "def runner(self, method, *args, **kwargs):...\n", "add_to_return_value(self, fn(self, *args, **kwargs))\n", "for f in hooks:\n", "add_to_return_value(self, f(self, method, *args, **kwargs))\n", "return self._return_value\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "FunctionDef'", "Expr'", "For", "Expr'", "Return'" ]
[ "def FUNC_2(self, VAR_3, VAR_4):...\n", "return django.forms.DateField(**options)\n" ]
[ "def create_date_field(self, field, options):...\n", "return django.forms.DateField(**options)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_12(self, VAR_20):...\n", "\"\"\"docstring\"\"\"\n", "VAR_30 = super().evaluateStructure(VAR_20)\n", "return self._handleText(VAR_30, VAR_20)\n" ]
[ "def evaluateStructure(self, expr):...\n", "\"\"\"docstring\"\"\"\n", "text = super().evaluateStructure(expr)\n", "return self._handleText(text, expr)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Return'" ]
[ "@VAR_0.route('/api/jobs/<int:job_id>/<action>', methods=['PUT'])...\n", "\"\"\"docstring\"\"\"\n", "VAR_71 = request.json\n", "VAR_98 = FUNC_58(f'/internal/jobs/{VAR_9}/{VAR_17}', 'put', json=request_data)\n", "return jsonify({'success': False, 'message': str(err)}), 400\n", "return jsonify(VAR_98)\n" ]
[ "@gui.route('/api/jobs/<int:job_id>/<action>', methods=['PUT'])...\n", "\"\"\"docstring\"\"\"\n", "request_data = request.json\n", "response_info = query_internal_api(f'/internal/jobs/{job_id}/{action}',\n 'put', json=request_data)\n", "return jsonify({'success': False, 'message': str(err)}), 400\n", "return jsonify(response_info)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Assign'", "Return'", "Return'" ]
[ "def FUNC_82(VAR_46, VAR_38):...\n", "\"\"\"docstring\"\"\"\n", "VAR_19 = VAR_46.get('user')\n", "if not VAR_19:\n", "VAR_40 = FUNC_76(VAR_19, VAR_38)\n", "VAR_163 = GeneratedCertificate.certificate_for_student(VAR_40, VAR_38)\n", "if not VAR_163:\n", "return VAR_163\n" ]
[ "def validate_request_data_and_get_certificate(certificate_invalidation,...\n", "\"\"\"docstring\"\"\"\n", "user = certificate_invalidation.get('user')\n", "if not user:\n", "student = get_student(user, course_key)\n", "certificate = GeneratedCertificate.certificate_for_student(student, course_key)\n", "if not certificate:\n", "return certificate\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Return'" ]
[ "@VAR_2.route('/ajax/togglearchived/<int:book_id>', methods=['POST'])...\n", "VAR_54 = ub.session.query(ub.ArchivedBook).filter(and_(ub.ArchivedBook.\n user_id == int(VAR_87.id), ub.ArchivedBook.book_id == VAR_5)).first()\n", "if VAR_54:\n", "VAR_54.is_archived = not VAR_54.is_archived\n", "VAR_54 = ub.ArchivedBook(VAR_11=current_user.id, VAR_5=book_id)\n", "VAR_54.last_modified = datetime.utcnow()\n", "VAR_54.is_archived = True\n", "ub.session.merge(VAR_54)\n", "ub.session_commit('Book {} archivebit toggled'.format(VAR_5))\n", "return ''\n" ]
[ "@web.route('/ajax/togglearchived/<int:book_id>', methods=['POST'])...\n", "archived_book = ub.session.query(ub.ArchivedBook).filter(and_(ub.\n ArchivedBook.user_id == int(current_user.id), ub.ArchivedBook.book_id ==\n book_id)).first()\n", "if archived_book:\n", "archived_book.is_archived = not archived_book.is_archived\n", "archived_book = ub.ArchivedBook(user_id=current_user.id, book_id=book_id)\n", "archived_book.last_modified = datetime.utcnow()\n", "archived_book.is_archived = True\n", "ub.session.merge(archived_book)\n", "ub.session_commit('Book {} archivebit toggled'.format(book_id))\n", "return ''\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Return'" ]
[ "def FUNC_7(VAR_22):...\n", "" ]
[ "def geocode(address):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_18() ->None:...\n", "VAR_41.resolutionBegan(None)\n", "VAR_51 = False\n", "for i in VAR_42:\n", "VAR_7 = IPAddress(i.host)\n", "if not VAR_51:\n", "if FUNC_0(VAR_7, self._ip_whitelist, self._ip_blacklist):\n", "for i in VAR_42:\n", "VAR_41.resolutionComplete()\n", "VAR_0.info('Dropped %s from DNS resolution to %s due to blacklist' % (VAR_7,\n VAR_19))\n", "VAR_41.addressResolved(i)\n", "VAR_51 = True\n" ]
[ "def _callback() ->None:...\n", "r.resolutionBegan(None)\n", "has_bad_ip = False\n", "for i in addresses:\n", "ip_address = IPAddress(i.host)\n", "if not has_bad_ip:\n", "if check_against_blacklist(ip_address, self._ip_whitelist, self._ip_blacklist):\n", "for i in addresses:\n", "r.resolutionComplete()\n", "logger.info('Dropped %s from DNS resolution to %s due to blacklist' % (\n ip_address, hostname))\n", "r.addressResolved(i)\n", "has_bad_ip = True\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "For", "Assign'", "Condition", "Condition", "For", "Expr'", "Expr'", "Expr'", "Assign'" ]
[ "def FUNC_18() ->None:...\n", "VAR_41.resolutionBegan(None)\n", "VAR_51 = False\n", "for i in VAR_42:\n", "VAR_7 = IPAddress(i.host)\n", "if not VAR_51:\n", "if FUNC_0(VAR_7, self._ip_whitelist, self._ip_blacklist):\n", "for i in VAR_42:\n", "VAR_41.resolutionComplete()\n", "VAR_0.info('Dropped %s from DNS resolution to %s due to blacklist' % (VAR_7,\n VAR_19))\n", "VAR_41.addressResolved(i)\n", "VAR_51 = True\n" ]
[ "def _callback() ->None:...\n", "r.resolutionBegan(None)\n", "has_bad_ip = False\n", "for i in addresses:\n", "ip_address = IPAddress(i.host)\n", "if not has_bad_ip:\n", "if check_against_blacklist(ip_address, self._ip_whitelist, self._ip_blacklist):\n", "for i in addresses:\n", "r.resolutionComplete()\n", "logger.info('Dropped %s from DNS resolution to %s due to blacklist' % (\n ip_address, hostname))\n", "r.addressResolved(i)\n", "has_bad_ip = True\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "For", "Assign'", "Condition", "Condition", "For", "Expr'", "Expr'", "Expr'", "Assign'" ]
[ "def FUNC_0(self):...\n", "return self.post()\n" ]
[ "def get(self):...\n", "return self.post()\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_22(VAR_9, VAR_5, VAR_10):...\n", "VAR_13 = calibre_db.session.query(db.Series).filter(db.Series.id == VAR_5\n ).first()\n", "if VAR_13:\n", "VAR_63, VAR_68, VAR_65 = calibre_db.fill_indexpage(VAR_9, 0, db.Books, db.\n Books.series.any(db.Series.id == VAR_5), [VAR_10[0][0]])\n", "abort(404)\n", "return render_title_template('index.html', VAR_68=random, VAR_65=pagination,\n VAR_63=entries, id=book_id, VAR_150=_(u'Series: %(serie)s', serie=name.\n name), VAR_9='series', VAR_10=order[1])\n" ]
[ "def render_series_books(page, book_id, order):...\n", "name = calibre_db.session.query(db.Series).filter(db.Series.id == book_id\n ).first()\n", "if name:\n", "entries, random, pagination = calibre_db.fill_indexpage(page, 0, db.Books,\n db.Books.series.any(db.Series.id == book_id), [order[0][0]])\n", "abort(404)\n", "return render_title_template('index.html', random=random, pagination=\n pagination, entries=entries, id=book_id, title=_(u'Series: %(serie)s',\n serie=name.name), page='series', order=order[1])\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Assign'", "Expr'", "Return'" ]
[ "def __init__(self, VAR_17: Unit, *VAR_6, **VAR_7):...\n", "self.unit = VAR_17\n", "super().__init__(*VAR_6, **kwargs)\n" ]
[ "def __init__(self, unit: Unit, *args, **kwargs):...\n", "self.unit = unit\n", "super().__init__(*args, **kwargs)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'" ]
[ "from mock import Mock\n", "import synapse\n", "import synapse.api.errors\n", "from synapse.api.constants import EventTypes\n", "from synapse.config.room_directory import RoomDirectoryConfig\n", "from synapse.rest.client.v1 import directory, login, room\n", "from synapse.types import RoomAlias, create_requester\n", "from tests import unittest\n", "from tests.test_utils import make_awaitable\n", "\"\"\" Tests the directory service. \"\"\"\n", "def FUNC_0(self, VAR_0, VAR_1):...\n", "self.mock_federation = Mock()\n", "self.mock_registry = Mock()\n", "self.query_handlers = {}\n", "def FUNC_21(VAR_8, VAR_9):...\n", "self.query_handlers[VAR_8] = VAR_9\n", "self.mock_registry.register_query_handler = FUNC_21\n", "VAR_3 = self.setup_test_homeserver(http_client=None,\n resource_for_federation=Mock(), federation_client=self.mock_federation,\n federation_registry=self.mock_registry)\n", "self.handler = VAR_3.get_directory_handler()\n", "self.store = VAR_3.get_datastore()\n", "self.my_room = RoomAlias.from_string('#my-room:test')\n", "self.your_room = RoomAlias.from_string('#your-room:test')\n", "self.remote_room = RoomAlias.from_string('#another:remote')\n", "return VAR_3\n" ]
[ "from mock import Mock\n", "import synapse\n", "import synapse.api.errors\n", "from synapse.api.constants import EventTypes\n", "from synapse.config.room_directory import RoomDirectoryConfig\n", "from synapse.rest.client.v1 import directory, login, room\n", "from synapse.types import RoomAlias, create_requester\n", "from tests import unittest\n", "from tests.test_utils import make_awaitable\n", "\"\"\" Tests the directory service. \"\"\"\n", "def make_homeserver(self, reactor, clock):...\n", "self.mock_federation = Mock()\n", "self.mock_registry = Mock()\n", "self.query_handlers = {}\n", "def register_query_handler(query_type, handler):...\n", "self.query_handlers[query_type] = handler\n", "self.mock_registry.register_query_handler = register_query_handler\n", "hs = self.setup_test_homeserver(http_client=None, resource_for_federation=\n Mock(), federation_client=self.mock_federation, federation_registry=\n self.mock_registry)\n", "self.handler = hs.get_directory_handler()\n", "self.store = hs.get_datastore()\n", "self.my_room = RoomAlias.from_string('#my-room:test')\n", "self.your_room = RoomAlias.from_string('#your-room:test')\n", "self.remote_room = RoomAlias.from_string('#another:remote')\n", "return hs\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0 ]
[ "ImportFrom'", "Import'", "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_26(VAR_66):...\n", "\"\"\"docstring\"\"\"\n", "VAR_81 = StringIO()\n", "VAR_82 = csv.writer(VAR_81)\n", "VAR_82.writerow([VAR_63, VAR_64])\n", "for csv_line in VAR_66:\n", "VAR_82.writerow([str(csv_line[0][:-4]).replace('T', ' '), csv_line[1]])\n", "VAR_81.seek(0)\n", "yield VAR_81.read()\n", "VAR_81.truncate(0)\n", "VAR_81.seek(0)\n" ]
[ "def iter_csv(data):...\n", "\"\"\"docstring\"\"\"\n", "line = StringIO()\n", "writer = csv.writer(line)\n", "writer.writerow([col_1, col_2])\n", "for csv_line in data:\n", "writer.writerow([str(csv_line[0][:-4]).replace('T', ' '), csv_line[1]])\n", "line.seek(0)\n", "yield line.read()\n", "line.truncate(0)\n", "line.seek(0)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Expr'", "For", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_107(VAR_169, VAR_7='utf-8'):...\n", "" ]
[ "def safe_encode(param, encoding='utf-8'):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "import os\n", "from django.core.files.base import ContentFile\n", "from django.core.files.uploadedfile import SimpleUploadedFile\n", "from s3file.middleware import S3FileMiddleware\n", "from s3file.storages import storage\n", "def FUNC_0(self):...\n", "VAR_3 = b'test_get_files_from_storage'\n", "VAR_4 = storage.save('tmp/s3file/test_get_files_from_storage', ContentFile(\n VAR_3))\n", "VAR_5 = S3FileMiddleware.get_files_from_storage([os.path.join(storage.\n aws_location, VAR_4)])\n", "VAR_6 = next(VAR_5)\n", "assert VAR_6.read() == VAR_3\n", "def FUNC_1(self, VAR_0):...\n", "VAR_7 = SimpleUploadedFile('uploaded_file.txt', b'uploaded')\n", "VAR_8 = VAR_0.post('/', data={'file': uploaded_file})\n", "S3FileMiddleware(lambda x: None)(VAR_8)\n", "assert VAR_8.FILES.getlist('file')\n", "assert VAR_8.FILES.get('file').read() == b'uploaded'\n", "storage.save('tmp/s3file/s3_file.txt', ContentFile(b's3file'))\n", "VAR_8 = VAR_0.post('/', data={'file':\n 'custom/location/tmp/s3file/s3_file.txt', 's3file': 'file'})\n", "S3FileMiddleware(lambda x: None)(VAR_8)\n", "assert VAR_8.FILES.getlist('file')\n", "assert VAR_8.FILES.get('file').read() == b's3file'\n", "def FUNC_2(self, VAR_0):...\n", "storage.save('tmp/s3file/s3_file.txt', ContentFile(b's3file'))\n", "storage.save('tmp/s3file/s3_other_file.txt', ContentFile(b'other s3file'))\n", "VAR_8 = VAR_0.post('/', data={'file': [\n 'custom/location/tmp/s3file/s3_file.txt',\n 'custom/location/tmp/s3file/s3_other_file.txt'], 's3file': ['file',\n 'other_file']})\n", "S3FileMiddleware(lambda x: None)(VAR_8)\n", "VAR_5 = VAR_8.FILES.getlist('file')\n", "assert VAR_5[0].read() == b's3file'\n", "assert VAR_5[1].read() == b'other s3file'\n", "def FUNC_3(self, VAR_0, VAR_1):...\n", "VAR_1.AWS_LOCATION = ''\n", "VAR_7 = SimpleUploadedFile('uploaded_file.txt', b'uploaded')\n", "VAR_8 = VAR_0.post('/', data={'file': uploaded_file})\n", "S3FileMiddleware(lambda x: None)(VAR_8)\n", "assert VAR_8.FILES.getlist('file')\n", "assert VAR_8.FILES.get('file').read() == b'uploaded'\n", "storage.save('tmp/s3file/s3_file.txt', ContentFile(b's3file'))\n", "VAR_8 = VAR_0.post('/', data={'file': 'tmp/s3file/s3_file.txt', 's3file':\n 'file'})\n", "S3FileMiddleware(lambda x: None)(VAR_8)\n", "assert VAR_8.FILES.getlist('file')\n", "assert VAR_8.FILES.get('file').read() == b's3file'\n", "def FUNC_4(self, VAR_0, VAR_2):...\n", "VAR_8 = VAR_0.post('/', data={'file': 'does_not_exist.txt', 's3file': 'file'})\n", "S3FileMiddleware(lambda x: None)(VAR_8)\n", "assert not VAR_8.FILES.getlist('file')\n", "assert 'File not found: does_not_exist.txt' in VAR_2.text\n" ]
[ "import os\n", "from django.core.files.base import ContentFile\n", "from django.core.files.uploadedfile import SimpleUploadedFile\n", "from s3file.middleware import S3FileMiddleware\n", "from s3file.storages import storage\n", "def test_get_files_from_storage(self):...\n", "content = b'test_get_files_from_storage'\n", "name = storage.save('tmp/s3file/test_get_files_from_storage', ContentFile(\n content))\n", "files = S3FileMiddleware.get_files_from_storage([os.path.join(storage.\n aws_location, name)])\n", "file = next(files)\n", "assert file.read() == content\n", "def test_process_request(self, rf):...\n", "uploaded_file = SimpleUploadedFile('uploaded_file.txt', b'uploaded')\n", "request = rf.post('/', data={'file': uploaded_file})\n", "S3FileMiddleware(lambda x: None)(request)\n", "assert request.FILES.getlist('file')\n", "assert request.FILES.get('file').read() == b'uploaded'\n", "storage.save('tmp/s3file/s3_file.txt', ContentFile(b's3file'))\n", "request = rf.post('/', data={'file':\n 'custom/location/tmp/s3file/s3_file.txt', 's3file': 'file'})\n", "S3FileMiddleware(lambda x: None)(request)\n", "assert request.FILES.getlist('file')\n", "assert request.FILES.get('file').read() == b's3file'\n", "def test_process_request__multiple_files(self, rf):...\n", "storage.save('tmp/s3file/s3_file.txt', ContentFile(b's3file'))\n", "storage.save('tmp/s3file/s3_other_file.txt', ContentFile(b'other s3file'))\n", "request = rf.post('/', data={'file': [\n 'custom/location/tmp/s3file/s3_file.txt',\n 'custom/location/tmp/s3file/s3_other_file.txt'], 's3file': ['file',\n 'other_file']})\n", "S3FileMiddleware(lambda x: None)(request)\n", "files = request.FILES.getlist('file')\n", "assert files[0].read() == b's3file'\n", "assert files[1].read() == b'other s3file'\n", "def test_process_request__no_location(self, rf, settings):...\n", "settings.AWS_LOCATION = ''\n", "uploaded_file = SimpleUploadedFile('uploaded_file.txt', b'uploaded')\n", "request = rf.post('/', data={'file': uploaded_file})\n", "S3FileMiddleware(lambda x: None)(request)\n", "assert request.FILES.getlist('file')\n", "assert request.FILES.get('file').read() == b'uploaded'\n", "storage.save('tmp/s3file/s3_file.txt', ContentFile(b's3file'))\n", "request = rf.post('/', data={'file': 'tmp/s3file/s3_file.txt', 's3file':\n 'file'})\n", "S3FileMiddleware(lambda x: None)(request)\n", "assert request.FILES.getlist('file')\n", "assert request.FILES.get('file').read() == b's3file'\n", "def test_process_request__no_file(self, rf, caplog):...\n", "request = rf.post('/', data={'file': 'does_not_exist.txt', 's3file': 'file'})\n", "S3FileMiddleware(lambda x: None)(request)\n", "assert not request.FILES.getlist('file')\n", "assert 'File not found: does_not_exist.txt' in caplog.text\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 1 ]
[ "Import'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "ImportFrom'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assert'", "FunctionDef'", "Assign'", "Assign'", "Expr'", "Assert'", "Assert'", "Expr'", "Assign'", "Expr'", "Assert'", "Assert'", "FunctionDef'", "Expr'", "Expr'", "Assign'", "Expr'", "Assign'", "Assert'", "Assert'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Expr'", "Assert'", "Assert'", "Expr'", "Assign'", "Expr'", "Assert'", "Assert'", "FunctionDef'", "Assign'", "Expr'", "Assert'", "Assert'" ]
[ "@parameterized.named_parameters(('non_tfrt', False))...\n", "self.parser = saved_model_cli.create_parser()\n", "VAR_10 = test.test_src_dir_path(VAR_0)\n", "VAR_45 = np.array([[1], [2]])\n", "VAR_46 = np.zeros((6, 3))\n", "VAR_32 = os.path.join(test.get_temp_dir(),\n 'testRunCommandOutOverwrite_inputs.npz')\n", "np.savez(VAR_32, VAR_27=x, VAR_28=x_notused)\n", "VAR_47 = os.path.join(test.get_temp_dir(), 'y.npy')\n", "open(VAR_47, 'a').close()\n", "VAR_11 = self.parser.parse_args(['run', '--dir', VAR_10, '--tag_set',\n 'serve', '--signature_def', 'serving_default', '--inputs', 'x=' +\n VAR_32 + '[x0]', '--outdir', test.get_temp_dir()] + (['--use_tfrt'] if\n VAR_5 else []))\n", "saved_model_cli.run(VAR_11)\n" ]
[ "@parameterized.named_parameters(('non_tfrt', False))...\n", "self.parser = saved_model_cli.create_parser()\n", "base_path = test.test_src_dir_path(SAVED_MODEL_PATH)\n", "x = np.array([[1], [2]])\n", "x_notused = np.zeros((6, 3))\n", "input_path = os.path.join(test.get_temp_dir(),\n 'testRunCommandOutOverwrite_inputs.npz')\n", "np.savez(input_path, x0=x, x1=x_notused)\n", "output_file = os.path.join(test.get_temp_dir(), 'y.npy')\n", "open(output_file, 'a').close()\n", "args = self.parser.parse_args(['run', '--dir', base_path, '--tag_set',\n 'serve', '--signature_def', 'serving_default', '--inputs', 'x=' +\n input_path + '[x0]', '--outdir', test.get_temp_dir()] + (['--use_tfrt'] if\n use_tfrt else []))\n", "saved_model_cli.run(args)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Expr'" ]
[ "def FUNC_4(VAR_7, VAR_9, VAR_8='w'):...\n", "VAR_22 = FUNC_2(VAR_7, VAR_8)\n", "VAR_22.write(VAR_9)\n", "VAR_22.close()\n" ]
[ "def safe_write(a, value, b='w'):...\n", "safe_file = safe_open(a, b)\n", "safe_file.write(value)\n", "safe_file.close()\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_61(self, VAR_25):...\n", "VAR_72 = getattr(self, VAR_25, None)\n", "if not VAR_72:\n", "if not getattr(VAR_72, 'whitelisted', False):\n" ]
[ "def is_whitelisted(self, method):...\n", "fn = getattr(self, method, None)\n", "if not fn:\n", "if not getattr(fn, 'whitelisted', False):\n" ]
[ 0, 2, 0, 2 ]
[ "FunctionDef'", "Assign'", "Condition", "Condition" ]
[ "@app.route('/robots.txt')...\n", "return send_from_directory(app.static_folder, 'robots.txt')\n" ]
[ "@app.route('/robots.txt')...\n", "return send_from_directory(app.static_folder, 'robots.txt')\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_22(self, VAR_19):...\n", "return constants.has_flag(self.role, VAR_19)\n" ]
[ "def _has_role(self, role_flag):...\n", "return constants.has_flag(self.role, role_flag)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_4(self, VAR_11, VAR_12, VAR_13):...\n", "if VAR_13:\n", "if VAR_12:\n", "return ''\n", "return io.StringIO('')\n" ]
[ "def _fail(self, location, str, okayToFail):...\n", "if okayToFail:\n", "if str:\n", "return ''\n", "return io.StringIO('')\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Condition", "Return'", "Return'" ]

Dataset Card for "CVEFixes_Python_with_norm_vul_lines"

More Information needed

Downloads last month
0
Edit dataset card