lines
sequence
raw_lines
sequence
label
sequence
type
sequence
[ "def FUNC_6(VAR_14):...\n", "\"\"\"docstring\"\"\"\n", "return os.stat(VAR_14).st_mtime\n" ]
[ "def mtime(path):...\n", "\"\"\"docstring\"\"\"\n", "return os.stat(path).st_mtime\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "@internationalizeDocstring...\n", "\"\"\"docstring\"\"\"\n", "if not VAR_8:\n", "VAR_8 = str(VAR_7)\n", "VAR_3.reply(self._convertBaseToBase(VAR_8, VAR_7, VAR_6))\n", "VAR_3.error(VAR_0('Invalid <number> for base %s: %s') % (VAR_6, VAR_8))\n", "VAR_7 = 10\n" ]
[ "@internationalizeDocstring...\n", "\"\"\"docstring\"\"\"\n", "if not number:\n", "number = str(to)\n", "irc.reply(self._convertBaseToBase(number, to, frm))\n", "irc.error(_('Invalid <number> for base %s: %s') % (frm, number))\n", "to = 10\n" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Condition", "Assign'", "Expr'", "Expr'", "Assign'" ]
[ "@frappe.whitelist()...\n", "return frappe.db.get_value('Report', VAR_0, 'disable_prepared_report') or 0\n" ]
[ "@frappe.whitelist()...\n", "return frappe.db.get_value('Report', report, 'disable_prepared_report') or 0\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_32(self, *VAR_13, **VAR_14):...\n", "if self.directory is None:\n", "return\n", "return super(CLASS_2, self).delete(*VAR_13, **kwargs)\n" ]
[ "def delete(self, *args, **kwargs):...\n", "if self.directory is None:\n", "return\n", "return super(SafeFileCache, self).delete(*args, **kwargs)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Return'" ]
[ "def FUNC_6(self, VAR_8, VAR_9):...\n", "self.assertEquals(getattr(current_context(), 'request', None), VAR_9)\n", "return VAR_8\n" ]
[ "def check_context(self, val, expected):...\n", "self.assertEquals(getattr(current_context(), 'request', None), expected)\n", "return val\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Return'" ]
[ "@staticmethod...\n", "" ]
[ "@staticmethod...\n", "" ]
[ 0, 0 ]
[ "Condition", "Condition" ]
[ "def FUNC_49(VAR_62):...\n", "import frappe.modules\n", "return frappe.modules.load_doctype_module(VAR_62)\n" ]
[ "def get_meta_module(doctype):...\n", "import frappe.modules\n", "return frappe.modules.load_doctype_module(doctype)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Import'", "Return'" ]
[ "@staticmethod...\n", "\"\"\"docstring\"\"\"\n", "VAR_6.whitelisted = True\n", "return VAR_6\n" ]
[ "@staticmethod...\n", "\"\"\"docstring\"\"\"\n", "f.whitelisted = True\n", "return f\n" ]
[ 2, 0, 2, 2 ]
[ "Condition", "Docstring", "Assign'", "Return'" ]
[ "@FUNC_0...\n", "return ActionGenerator(self)\n" ]
[ "@cache_in_self...\n", "return ActionGenerator(self)\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "@FUNC_0...\n", "return RoomCreationHandler(self)\n" ]
[ "@cache_in_self...\n", "return RoomCreationHandler(self)\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "@staticmethod...\n", "return 'admin/projects', f'component-{VAR_39.name}'\n" ]
[ "@staticmethod...\n", "return 'admin/projects', f'component-{field.name}'\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_24(self):...\n", "self.assert_expected(self.folder.t, 'RepeatVariable.html')\n" ]
[ "def testRepeatVariable(self):...\n", "self.assert_expected(self.folder.t, 'RepeatVariable.html')\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_5(VAR_5, VAR_0):...\n", "if VAR_0 == VAR_12:\n", "self.resync_attempts += 1\n" ]
[ "def query_user_devices(destination, user_id):...\n", "if user_id == remote_user_id:\n", "self.resync_attempts += 1\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Condition", "AugAssign'" ]
[ "def __repr__(self):...\n", "return '<Token %r>' % self.id\n" ]
[ "def __repr__(self):...\n", "return '<Token %r>' % self.id\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_0(VAR_3, VAR_4, VAR_5):...\n", "\"\"\"docstring\"\"\"\n", "VAR_12 = getattr(VAR_5, 'request', None)\n", "VAR_4 = list(VAR_4)\n", "VAR_4.reverse()\n", "while VAR_4:\n", "VAR_19 = VAR_4.pop()\n", "return VAR_3\n", "if VAR_19 == '_':\n", "warnings.warn(\n 'Traversing to the name `_` is deprecated and will be removed in Zope 6.',\n DeprecationWarning)\n", "if VAR_19.startswith('_'):\n", "if OFS.interfaces.ITraversable.providedBy(VAR_3):\n", "VAR_3 = VAR_3.restrictedTraverse(VAR_19)\n", "VAR_3 = traversePathElement(VAR_3, VAR_19, VAR_4, VAR_12=request)\n" ]
[ "def boboAwareZopeTraverse(object, path_items, econtext):...\n", "\"\"\"docstring\"\"\"\n", "request = getattr(econtext, 'request', None)\n", "path_items = list(path_items)\n", "path_items.reverse()\n", "while path_items:\n", "name = path_items.pop()\n", "return object\n", "if name == '_':\n", "warnings.warn(\n 'Traversing to the name `_` is deprecated and will be removed in Zope 6.',\n DeprecationWarning)\n", "if name.startswith('_'):\n", "if OFS.interfaces.ITraversable.providedBy(object):\n", "object = object.restrictedTraverse(name)\n", "object = traversePathElement(object, name, path_items, request=request)\n" ]
[ 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Expr'", "Condition", "Assign'", "Return'", "Condition", "Expr'", "Condition", "Condition", "Assign'", "Assign'" ]
[ "@keep_lazy_text...\n", "\"\"\"docstring\"\"\"\n", "return force_text(unquote_plus(force_str(VAR_16)))\n" ]
[ "@keep_lazy_text...\n", "\"\"\"docstring\"\"\"\n", "return force_text(unquote_plus(force_str(quoted_url)))\n" ]
[ 0, 0, 0 ]
[ "Condition", "Docstring", "Return'" ]
[ "@CLASS_4('help')...\n", "\"\"\"docstring\"\"\"\n", "VAR_23 = VAR_3.path()\n", "if not VAR_23 or VAR_23 == '/':\n", "VAR_23 = 'index.html'\n", "VAR_23 = VAR_23.lstrip('/')\n", "if not docutils.docs_up_to_date(VAR_23):\n", "message.error(\n 'Your documentation is outdated! Please re-run scripts/asciidoc2html.py.')\n", "VAR_15 = 'html/doc/{}'.format(VAR_23)\n", "if not VAR_23.endswith('.html'):\n", "VAR_42 = utils.read_file(VAR_15, binary=True)\n", "VAR_31, VAR_39 = mimetypes.guess_type(VAR_23)\n", "VAR_32 = utils.read_file(VAR_15)\n", "VAR_43 = VAR_15.replace('.html', '.asciidoc')\n", "return 'text/html', VAR_32\n", "assert VAR_31 is not None, VAR_3\n", "if VAR_43.startswith('html/doc/'):\n", "return VAR_31, VAR_42\n", "VAR_43 = VAR_43.replace('html/doc/', '../doc/help/')\n", "VAR_46 = utils.read_file(VAR_43)\n", "VAR_46 = None\n", "if VAR_46 is None:\n", "VAR_44 = textwrap.dedent('string')\n", "return 'text/plain', (VAR_44 + VAR_46).encode('utf-8')\n" ]
[ "@add_handler('help')...\n", "\"\"\"docstring\"\"\"\n", "urlpath = url.path()\n", "if not urlpath or urlpath == '/':\n", "urlpath = 'index.html'\n", "urlpath = urlpath.lstrip('/')\n", "if not docutils.docs_up_to_date(urlpath):\n", "message.error(\n 'Your documentation is outdated! Please re-run scripts/asciidoc2html.py.')\n", "path = 'html/doc/{}'.format(urlpath)\n", "if not urlpath.endswith('.html'):\n", "bdata = utils.read_file(path, binary=True)\n", "mimetype, _encoding = mimetypes.guess_type(urlpath)\n", "data = utils.read_file(path)\n", "asciidoc_path = path.replace('.html', '.asciidoc')\n", "return 'text/html', data\n", "assert mimetype is not None, url\n", "if asciidoc_path.startswith('html/doc/'):\n", "return mimetype, bdata\n", "asciidoc_path = asciidoc_path.replace('html/doc/', '../doc/help/')\n", "asciidoc = utils.read_file(asciidoc_path)\n", "asciidoc = None\n", "if asciidoc is None:\n", "preamble = textwrap.dedent(\n \"\"\"\n There was an error loading the documentation!\n\n This most likely means the documentation was not generated\n properly. If you are running qutebrowser from the git repository,\n please (re)run scripts/asciidoc2html.py and reload this page.\n\n If you're running a released version this is a bug, please use\n :report to report it.\n\n Falling back to the plaintext version.\n\n ---------------------------------------------------------------\n\n\n \"\"\"\n )\n", "return 'text/plain', (preamble + asciidoc).encode('utf-8')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Expr'", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Return'", "Assert'", "Condition", "Return'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Return'" ]
[ "def FUNC_22(VAR_20):...\n", "\"\"\"docstring\"\"\"\n", "VAR_46 = VAR_20.checkpoint_path or os.path.join(VAR_20.dir,\n 'variables/variables')\n", "if not VAR_20.variables_to_feed:\n", "VAR_75 = []\n", "if VAR_20.variables_to_feed.lower() == 'all':\n", "saved_model_aot_compile.aot_compile_cpu_meta_graph_def(VAR_46=\n checkpoint_path, VAR_4=saved_model_utils.get_meta_graph_def(args.dir,\n args.tag_set), VAR_5=args.signature_def_key, VAR_75=variables_to_feed,\n output_prefix=args.output_prefix, target_triple=args.target_triple,\n target_cpu=args.target_cpu, cpp_class=args.cpp_class, multithreading=\n args.multithreading.lower() not in ('f', 'false', '0'))\n", "VAR_75 = None\n", "VAR_75 = VAR_20.variables_to_feed.split(',')\n" ]
[ "def aot_compile_cpu(args):...\n", "\"\"\"docstring\"\"\"\n", "checkpoint_path = args.checkpoint_path or os.path.join(args.dir,\n 'variables/variables')\n", "if not args.variables_to_feed:\n", "variables_to_feed = []\n", "if args.variables_to_feed.lower() == 'all':\n", "saved_model_aot_compile.aot_compile_cpu_meta_graph_def(checkpoint_path=\n checkpoint_path, meta_graph_def=saved_model_utils.get_meta_graph_def(\n args.dir, args.tag_set), signature_def_key=args.signature_def_key,\n variables_to_feed=variables_to_feed, output_prefix=args.output_prefix,\n target_triple=args.target_triple, target_cpu=args.target_cpu, cpp_class\n =args.cpp_class, multithreading=args.multithreading.lower() not in ('f',\n 'false', '0'))\n", "variables_to_feed = None\n", "variables_to_feed = args.variables_to_feed.split(',')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Assign'", "Condition", "Expr'", "Assign'", "Assign'" ]
[ "def FUNC_24(self, VAR_13):...\n", "VAR_13 = super().format_value(VAR_13)\n", "if isinstance(VAR_13, str):\n", "return VAR_13\n", "return json.dumps(VAR_13)\n" ]
[ "def format_value(self, value):...\n", "value = super().format_value(value)\n", "if isinstance(value, str):\n", "return value\n", "return json.dumps(value)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Return'", "Return'" ]
[ "@staticmethod...\n", "\"\"\"docstring\"\"\"\n", "def FUNC_83(self, VAR_71):...\n", "if isinstance(VAR_71, dict):\n", "if not self.get('_return_value'):\n", "self._return_value = VAR_71 or self.get('_return_value')\n", "self._return_value = {}\n", "self._return_value.update(VAR_71)\n", "def FUNC_84(VAR_72, *VAR_73):...\n", "def FUNC_86(self, VAR_25, *VAR_0, **VAR_1):...\n", "FUNC_83(self, VAR_72(self, *VAR_0, **kwargs))\n", "for VAR_6 in VAR_73:\n", "FUNC_83(self, VAR_6(self, VAR_25, *VAR_0, **kwargs))\n", "return self._return_value\n" ]
[ "@staticmethod...\n", "\"\"\"docstring\"\"\"\n", "def add_to_return_value(self, new_return_value):...\n", "if isinstance(new_return_value, dict):\n", "if not self.get('_return_value'):\n", "self._return_value = new_return_value or self.get('_return_value')\n", "self._return_value = {}\n", "self._return_value.update(new_return_value)\n", "def compose(fn, *hooks):...\n", "def runner(self, method, *args, **kwargs):...\n", "add_to_return_value(self, fn(self, *args, **kwargs))\n", "for f in hooks:\n", "add_to_return_value(self, f(self, method, *args, **kwargs))\n", "return self._return_value\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "FunctionDef'", "Condition", "Condition", "Assign'", "Assign'", "Expr'", "FunctionDef'", "FunctionDef'", "Expr'", "For", "Expr'", "Return'" ]
[ "def FUNC_0(VAR_1: Type[ParseArgparserDataType], VAR_2: Union[Namespace,...\n", "\"\"\"docstring\"\"\"\n", "if isinstance(VAR_2, ArgumentParser):\n", "VAR_2 = VAR_1.parse_argparser(VAR_2)\n", "VAR_13 = vars(VAR_2)\n", "VAR_14 = inspect.signature(VAR_1.__init__).parameters\n", "VAR_15 = {name: VAR_13[name] for name in VAR_14 if name in VAR_13}\n", "VAR_15.update(**kwargs)\n", "return VAR_1(**trainer_kwargs)\n" ]
[ "def from_argparse_args(cls: Type[ParseArgparserDataType], args: Union[...\n", "\"\"\"docstring\"\"\"\n", "if isinstance(args, ArgumentParser):\n", "args = cls.parse_argparser(args)\n", "params = vars(args)\n", "valid_kwargs = inspect.signature(cls.__init__).parameters\n", "trainer_kwargs = {name: params[name] for name in valid_kwargs if name in params\n }\n", "trainer_kwargs.update(**kwargs)\n", "return cls(**trainer_kwargs)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Return'" ]
[ "def FUNC_4(VAR_0: str) ->str:...\n", "return stringcase.spinalcase(FUNC_1(FUNC_0(VAR_0)))\n" ]
[ "def kebab_case(value: str) ->str:...\n", "return stringcase.spinalcase(group_title(_sanitize(value)))\n" ]
[ 0, 5 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_2(**VAR_5):...\n", "VAR_27 = dict(python=expressions.PythonExpr, path=TrustedPathExpr)\n", "VAR_27.update(VAR_5)\n", "return FUNC_1(VAR_4=False, **ovr)\n" ]
[ "def createTrustedChameleonEngine(**overrides):...\n", "ovr = dict(python=expressions.PythonExpr, path=TrustedPathExpr)\n", "ovr.update(overrides)\n", "return createChameleonEngine(untrusted=False, **ovr)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Return'" ]
[ "@VAR_0.route('/api/query/meta', methods=['POST'])...\n", "if get_user() is None:\n", "return 'Authentication required', 401\n", "VAR_14 = g.conn.session.query(Query).filter(Query.id == request.form[\n 'query_id']).one()\n", "if VAR_14.user_id != get_user().id:\n", "return 'Authorization denied', 403\n", "if 'title' in request.form:\n", "VAR_14.title = request.form['title']\n", "if 'published' in request.form:\n", "VAR_14.published = request.form['published'] == '1'\n", "if 'description' in request.form:\n", "VAR_14.description = request.form['description']\n", "g.conn.session.add(VAR_14)\n", "g.conn.session.commit()\n", "return json.dumps({'id': VAR_14.id})\n" ]
[ "@app.route('/api/query/meta', methods=['POST'])...\n", "if get_user() is None:\n", "return 'Authentication required', 401\n", "query = g.conn.session.query(Query).filter(Query.id == request.form['query_id']\n ).one()\n", "if query.user_id != get_user().id:\n", "return 'Authorization denied', 403\n", "if 'title' in request.form:\n", "query.title = request.form['title']\n", "if 'published' in request.form:\n", "query.published = request.form['published'] == '1'\n", "if 'description' in request.form:\n", "query.description = request.form['description']\n", "g.conn.session.add(query)\n", "g.conn.session.commit()\n", "return json.dumps({'id': query.id})\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Condition", "Return'", "Assign'", "Condition", "Return'", "For", "Assign'", "For", "Assign'", "For", "Assign'", "Expr'", "Expr'", "Return'" ]
[ "def FUNC_18(self):...\n", "VAR_5 = self._makeContext()\n", "self.assertTrue(VAR_5.evaluate('x | not:exists:x'))\n" ]
[ "def test_hybrid_with_compound_expression_int_value(self):...\n", "ec = self._makeContext()\n", "self.assertTrue(ec.evaluate('x | not:exists:x'))\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'" ]
[ "def FUNC_8(self):...\n", "VAR_7 = self.created_rmid\n", "self.helper.invite(VAR_7=room, src=self.rmcreator_id, targ=self.user_id)\n", "self.helper.invite(VAR_7=room, src=self.user_id, targ=self.rmcreator_id,\n VAR_9=403)\n", "self.helper.change_membership(VAR_7=room, src=self.user_id, targ=self.\n rmcreator_id, membership=Membership.JOIN, VAR_9=403)\n", "self.helper.change_membership(VAR_7=room, src=self.user_id, targ=self.\n rmcreator_id, membership=Membership.LEAVE, VAR_9=403)\n" ]
[ "def test_invited_permissions(self):...\n", "room = self.created_rmid\n", "self.helper.invite(room=room, src=self.rmcreator_id, targ=self.user_id)\n", "self.helper.invite(room=room, src=self.user_id, targ=self.rmcreator_id,\n expect_code=403)\n", "self.helper.change_membership(room=room, src=self.user_id, targ=self.\n rmcreator_id, membership=Membership.JOIN, expect_code=403)\n", "self.helper.change_membership(room=room, src=self.user_id, targ=self.\n rmcreator_id, membership=Membership.LEAVE, expect_code=403)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "@VAR_2.route('/oauth/google', endpoint='authorize')...\n", "VAR_12 = url_for('.callback', _external=True)\n", "VAR_13 = request.args.get('next', url_for('redash.index', VAR_6=session.get\n ('org_slug')))\n", "VAR_0.debug('Callback url: %s', VAR_12)\n", "VAR_0.debug('Next is: %s', VAR_13)\n", "return FUNC_0().authorize(VAR_12=callback, state=next_path)\n" ]
[ "@blueprint.route('/oauth/google', endpoint='authorize')...\n", "callback = url_for('.callback', _external=True)\n", "next_path = request.args.get('next', url_for('redash.index', org_slug=\n session.get('org_slug')))\n", "logger.debug('Callback url: %s', callback)\n", "logger.debug('Next is: %s', next_path)\n", "return google_remote_app().authorize(callback=callback, state=next_path)\n" ]
[ 4, 4, 4, 4, 4, 4 ]
[ "Condition", "Assign'", "Assign'", "Expr'", "Expr'", "Return'" ]
[ "@VAR_2.route('/login', methods=['GET', 'POST'])...\n", "if VAR_87 is not None and VAR_87.is_authenticated:\n", "return redirect(url_for('web.index'))\n", "if config.config_login_type == constants.LOGIN_LDAP and not services.ldap:\n", "VAR_3.error(u'Cannot activate LDAP authentication')\n", "if request.method == 'POST':\n", "flash(_(u'Cannot activate LDAP authentication'), category='error')\n", "VAR_117 = request.form.to_dict()\n", "VAR_97 = request.args.get('next', default=url_for('web.index'), type=str)\n", "VAR_104 = ub.session.query(ub.User).filter(func.lower(ub.User.name) ==\n VAR_117['username'].strip().lower()).first()\n", "if url_for('web.logout') == VAR_97:\n", "if config.config_login_type == constants.LOGIN_LDAP and services.ldap and VAR_104 and VAR_117[\n", "VAR_97 = url_for('web.index')\n", "return render_title_template('login.html', VAR_149=_(u'Login'), VAR_97=\n next_url, config=config, VAR_100=oauth_check, mail=config.\n get_mail_server_configured(), VAR_9='login')\n", "VAR_138, VAR_139 = services.ldap.bind_user(VAR_117['username'], VAR_117[\n 'password'])\n", "VAR_140 = request.headers.get('X-Forwarded-For', request.remote_addr)\n", "if VAR_138:\n", "if 'forgot' in VAR_117 and VAR_117['forgot'] == 'forgot':\n", "login_user(VAR_104, remember=bool(form.get('remember_me')))\n", "if VAR_138 is None and VAR_104 and check_password_hash(str(VAR_104.password\n", "if VAR_104 != None and VAR_104.name != 'Guest':\n", "if VAR_104 and check_password_hash(str(VAR_104.password), VAR_117['password']\n", "ub.store_user_session()\n", "login_user(VAR_104, remember=bool(form.get('remember_me')))\n", "if VAR_138 is None:\n", "VAR_147, VAR_64 = reset_password(VAR_104.id)\n", "flash(_(u'Please enter valid username to reset password'), category='error')\n", "login_user(VAR_104, remember=bool(form.get('remember_me')))\n", "VAR_3.warning('Login failed for user \"%s\" IP-address: %s', VAR_117[\n 'username'], VAR_140)\n", "VAR_3.debug(u\"You are now logged in as: '%s'\", VAR_104.name)\n", "ub.store_user_session()\n", "VAR_3.info(VAR_139)\n", "VAR_140 = request.headers.get('X-Forwarded-For', request.remote_addr)\n", "if VAR_147 == 1:\n", "VAR_3.warning('Username missing for password reset IP-address: %s', VAR_140)\n", "ub.store_user_session()\n", "flash(_(u'Wrong Username or Password'), category='error')\n", "flash(_(u\"you are now logged in as: '%(nickname)s'\", VAR_115=user.name),\n category='success')\n", "VAR_3.info(\"Local Fallback Login as: '%s'\", VAR_104.name)\n", "flash(_(u'Could not login: %(message)s', message=error), category='error')\n", "VAR_3.warning('LDAP Login failed for user \"%s\" IP-address: %s', VAR_117[\n 'username'], VAR_140)\n", "flash(_(u'New Password was send to your email address'), category='info')\n", "VAR_3.error(u'An unknown error occurred. Please try again later')\n", "VAR_3.debug(u\"You are now logged in as: '%s'\", VAR_104.name)\n", "return redirect_back(url_for('web.index'))\n", "flash(_(\n u\"Fallback Login as: '%(nickname)s', LDAP Server not reachable, or user not known\"\n , VAR_115=user.name), category='warning')\n", "flash(_(u'Wrong Username or Password'), category='error')\n", "VAR_3.info('Password reset for user \"%s\" IP-address: %s', VAR_117[\n 'username'], VAR_140)\n", "flash(_(u'An unknown error occurred. Please try again later.'), category=\n 'error')\n", "flash(_(u\"You are now logged in as: '%(nickname)s'\", VAR_115=user.name),\n category='success')\n", "return redirect_back(url_for('web.index'))\n", "config.config_is_initial = False\n", "return redirect_back(url_for('web.index'))\n" ]
[ "@web.route('/login', methods=['GET', 'POST'])...\n", "if current_user is not None and current_user.is_authenticated:\n", "return redirect(url_for('web.index'))\n", "if config.config_login_type == constants.LOGIN_LDAP and not services.ldap:\n", "log.error(u'Cannot activate LDAP authentication')\n", "if request.method == 'POST':\n", "flash(_(u'Cannot activate LDAP authentication'), category='error')\n", "form = request.form.to_dict()\n", "next_url = request.args.get('next', default=url_for('web.index'), type=str)\n", "user = ub.session.query(ub.User).filter(func.lower(ub.User.name) == form[\n 'username'].strip().lower()).first()\n", "if url_for('web.logout') == next_url:\n", "if config.config_login_type == constants.LOGIN_LDAP and services.ldap and user and form[\n", "next_url = url_for('web.index')\n", "return render_title_template('login.html', title=_(u'Login'), next_url=\n next_url, config=config, oauth_check=oauth_check, mail=config.\n get_mail_server_configured(), page='login')\n", "login_result, error = services.ldap.bind_user(form['username'], form[\n 'password'])\n", "ip_Address = request.headers.get('X-Forwarded-For', request.remote_addr)\n", "if login_result:\n", "if 'forgot' in form and form['forgot'] == 'forgot':\n", "login_user(user, remember=bool(form.get('remember_me')))\n", "if login_result is None and user and check_password_hash(str(user.password),\n", "if user != None and user.name != 'Guest':\n", "if user and check_password_hash(str(user.password), form['password']\n", "ub.store_user_session()\n", "login_user(user, remember=bool(form.get('remember_me')))\n", "if login_result is None:\n", "ret, __ = reset_password(user.id)\n", "flash(_(u'Please enter valid username to reset password'), category='error')\n", "login_user(user, remember=bool(form.get('remember_me')))\n", "log.warning('Login failed for user \"%s\" IP-address: %s', form['username'],\n ip_Address)\n", "log.debug(u\"You are now logged in as: '%s'\", user.name)\n", "ub.store_user_session()\n", "log.info(error)\n", "ip_Address = request.headers.get('X-Forwarded-For', request.remote_addr)\n", "if ret == 1:\n", "log.warning('Username missing for password reset IP-address: %s', ip_Address)\n", "ub.store_user_session()\n", "flash(_(u'Wrong Username or Password'), category='error')\n", "flash(_(u\"you are now logged in as: '%(nickname)s'\", nickname=user.name),\n category='success')\n", "log.info(\"Local Fallback Login as: '%s'\", user.name)\n", "flash(_(u'Could not login: %(message)s', message=error), category='error')\n", "log.warning('LDAP Login failed for user \"%s\" IP-address: %s', form[\n 'username'], ip_Address)\n", "flash(_(u'New Password was send to your email address'), category='info')\n", "log.error(u'An unknown error occurred. Please try again later')\n", "log.debug(u\"You are now logged in as: '%s'\", user.name)\n", "return redirect_back(url_for('web.index'))\n", "flash(_(\n u\"Fallback Login as: '%(nickname)s', LDAP Server not reachable, or user not known\"\n , nickname=user.name), category='warning')\n", "flash(_(u'Wrong Username or Password'), category='error')\n", "log.info('Password reset for user \"%s\" IP-address: %s', form['username'],\n ip_Address)\n", "flash(_(u'An unknown error occurred. Please try again later.'), category=\n 'error')\n", "flash(_(u\"You are now logged in as: '%(nickname)s'\", nickname=user.name),\n category='success')\n", "return redirect_back(url_for('web.index'))\n", "config.config_is_initial = False\n", "return redirect_back(url_for('web.index'))\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Condition", "Return'", "Condition", "Expr'", "Condition", "Expr'", "Assign'", "Assign'", "Assign'", "For", "Condition", "Assign'", "Return'", "Assign'", "Assign'", "Condition", "For", "Expr'", "Condition", "Condition", "Condition", "Expr'", "Expr'", "Condition", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "Condition", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Return'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Return'", "Assign'", "Return'" ]
[ "def FUNC_10(VAR_22):...\n", "\"\"\"docstring\"\"\"\n", "VAR_29 = '0123456789abcdefghijklmnopqrstuvwxyz'\n", "if VAR_22 < 0:\n", "if six.PY2:\n", "if not isinstance(VAR_22, six.integer_types):\n", "if VAR_22 < 36:\n", "if VAR_22 > sys.maxint:\n", "return VAR_29[VAR_22]\n", "VAR_30 = ''\n", "while VAR_22 != 0:\n", "VAR_22, VAR_41 = divmod(VAR_22, 36)\n", "return VAR_30\n", "VAR_30 = VAR_29[VAR_41] + VAR_30\n" ]
[ "def int_to_base36(i):...\n", "\"\"\"docstring\"\"\"\n", "char_set = '0123456789abcdefghijklmnopqrstuvwxyz'\n", "if i < 0:\n", "if six.PY2:\n", "if not isinstance(i, six.integer_types):\n", "if i < 36:\n", "if i > sys.maxint:\n", "return char_set[i]\n", "b36 = ''\n", "while i != 0:\n", "i, n = divmod(i, 36)\n", "return b36\n", "b36 = char_set[n] + b36\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Condition", "Condition", "Condition", "Condition", "Condition", "Return'", "Assign'", "Condition", "Assign'", "Return'", "Assign'" ]
[ "def FUNC_42(self, VAR_36, VAR_37=False, VAR_35=''):...\n", "VAR_15 = self.getUserId()\n", "if not VAR_15:\n", "VAR_35 = (VAR_35.strip().rstrip('/') + cherry.config['server.rootpath']\n ).rstrip('/')\n", "VAR_36 = VAR_36.lower()\n", "if VAR_36 == 'm3u':\n", "VAR_111 = self.playlistdb.createM3U\n", "if VAR_36 == 'pls':\n", "VAR_77 = self.playlistdb.showPlaylists(VAR_15, include_public=all)\n", "VAR_111 = self.playlistdb.createPLS\n", "if not VAR_77:\n", "for VAR_110 in VAR_77:\n", "VAR_28 = VAR_110['plid']\n", "VAR_82 = 'application/x-zip-compressed'\n", "VAR_122 = VAR_111(VAR_28=plid, VAR_15=userid, addrstr=hostaddr)\n", "VAR_83 = 'attachment; filename=\"playlists.zip\"'\n", "VAR_81 = self.playlistdb.getName(VAR_28, VAR_15) + '.' + VAR_36\n", "VAR_53.response.headers['Content-Type'] = VAR_82\n", "if not VAR_110['owner']:\n", "VAR_53.response.headers['Content-Disposition'] = VAR_83\n", "VAR_6 = self.userdb.getNameById(VAR_110['userid'])\n", "zip.writestr(VAR_81, VAR_122)\n", "return zip.getbytes()\n", "VAR_81 = VAR_6 + '/' + VAR_81\n" ]
[ "def export_playlists(self, format, all=False, hostaddr=''):...\n", "userid = self.getUserId()\n", "if not userid:\n", "hostaddr = (hostaddr.strip().rstrip('/') + cherry.config['server.rootpath']\n ).rstrip('/')\n", "format = format.lower()\n", "if format == 'm3u':\n", "filemaker = self.playlistdb.createM3U\n", "if format == 'pls':\n", "playlists = self.playlistdb.showPlaylists(userid, include_public=all)\n", "filemaker = self.playlistdb.createPLS\n", "if not playlists:\n", "for pl in playlists:\n", "plid = pl['plid']\n", "zipmime = 'application/x-zip-compressed'\n", "plstr = filemaker(plid=plid, userid=userid, addrstr=hostaddr)\n", "zipname = 'attachment; filename=\"playlists.zip\"'\n", "name = self.playlistdb.getName(plid, userid) + '.' + format\n", "cherrypy.response.headers['Content-Type'] = zipmime\n", "if not pl['owner']:\n", "cherrypy.response.headers['Content-Disposition'] = zipname\n", "username = self.userdb.getNameById(pl['userid'])\n", "zip.writestr(name, plstr)\n", "return zip.getbytes()\n", "name = username + '/' + name\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "For", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Expr'", "Return'", "Assign'" ]
[ "def FUNC_6(VAR_19=None):...\n", "\"\"\"docstring\"\"\"\n", "return formatdate(VAR_19, usegmt=True)\n" ]
[ "def http_date(epoch_seconds=None):...\n", "\"\"\"docstring\"\"\"\n", "return formatdate(epoch_seconds, usegmt=True)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Return'" ]
[ "def FUNC_9(VAR_0, VAR_1: FlaskClient, VAR_3: RequestsMock):...\n", "VAR_3.add(GET, 'https://example.com/', body=\n \"\"\"<html>\n <head><title>Random</title></head><body><p>\n Lorem ipsum dolor sit amet, consectetur adipiscing elit\n </p></body></html>\n \"\"\"\n )\n", "VAR_9 = {'url': 'https://example.com', 'tags': 'testing,bookmark', 'path':\n '', 'submit': 'true'}\n", "VAR_8 = VAR_1.post('/bookmarks/new', data=bookmark_data)\n", "assert VAR_8.status_code == 302\n", "assert not b'invalid' in VAR_8.data\n", "VAR_8 = VAR_1.post('/bookmarks/new', data=bookmark_data, follow_redirects=True)\n", "assert VAR_8.status_code == 200\n", "assert b'<span class=\"post-tag\">bookmark</span>' in VAR_8.data\n", "assert b'<span class=\"post-tag\">testing</span>' in VAR_8.data\n", "assert b'https://example.com' in VAR_8.data\n", "assert b'Random' in VAR_8.data\n" ]
[ "def test_create_new_bookmark(test_app, client: FlaskClient,...\n", "mocked_responses.add(GET, 'https://example.com/', body=\n \"\"\"<html>\n <head><title>Random</title></head><body><p>\n Lorem ipsum dolor sit amet, consectetur adipiscing elit\n </p></body></html>\n \"\"\"\n )\n", "bookmark_data = {'url': 'https://example.com', 'tags': 'testing,bookmark',\n 'path': '', 'submit': 'true'}\n", "resp = client.post('/bookmarks/new', data=bookmark_data)\n", "assert resp.status_code == 302\n", "assert not b'invalid' in resp.data\n", "resp = client.post('/bookmarks/new', data=bookmark_data, follow_redirects=True)\n", "assert resp.status_code == 200\n", "assert b'<span class=\"post-tag\">bookmark</span>' in resp.data\n", "assert b'<span class=\"post-tag\">testing</span>' in resp.data\n", "assert b'https://example.com' in resp.data\n", "assert b'Random' in resp.data\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Assign'", "Assign'", "Assert'", "Assert'", "Assign'", "Assert'", "Assert'", "Assert'", "Assert'", "Assert'" ]
[ "def FUNC_6(VAR_10: realization.RealAccount, VAR_11: str...\n", "\"\"\"docstring\"\"\"\n", "if VAR_10.account == VAR_11:\n", "return VAR_10\n", "return realization.get_or_create(VAR_10, VAR_11)\n" ]
[ "def get_or_create(account: realization.RealAccount, account_name: str...\n", "\"\"\"docstring\"\"\"\n", "if account.account == account_name:\n", "return account\n", "return realization.get_or_create(account, account_name)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Condition", "Return'", "Return'" ]
[ "def FUNC_4(VAR_5):...\n", "\"\"\"docstring\"\"\"\n", "VAR_13 = Path(VAR_5.file_path).relative_to(click_web.OUTPUT_FOLDER)\n", "VAR_14 = f'/static/results/{VAR_13.as_posix()}'\n", "return f'<a href=\"{VAR_14}\">{VAR_5.link_name}</a>'\n" ]
[ "def _get_download_link(field_info):...\n", "\"\"\"docstring\"\"\"\n", "rel_file_path = Path(field_info.file_path).relative_to(click_web.OUTPUT_FOLDER)\n", "uri = f'/static/results/{rel_file_path.as_posix()}'\n", "return f'<a href=\"{uri}\">{field_info.link_name}</a>'\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_2(VAR_4, VAR_5):...\n", "\"\"\"docstring\"\"\"\n", "if VAR_5 not in VAR_4.signature_def:\n", "return VAR_4.signature_def[VAR_5].inputs\n" ]
[ "def _get_inputs_tensor_info_from_meta_graph_def(meta_graph_def,...\n", "\"\"\"docstring\"\"\"\n", "if signature_def_key not in meta_graph_def.signature_def:\n", "return meta_graph_def.signature_def[signature_def_key].inputs\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Condition", "Return'" ]
[ "def FUNC_15(VAR_3, **VAR_4):...\n", "" ]
[ "def _callback(request, **kwargs):...\n", "" ]
[ 0, 0 ]
[ "FunctionDef'", "Condition" ]
[ "def FUNC_44(self):...\n", "if self.user:\n", "return self.user\n", "return self.email\n" ]
[ "def _display(self):...\n", "if self.user:\n", "return self.user\n", "return self.email\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Condition", "Return'", "Return'" ]
[ "def FUNC_34(self):...\n", "return ''\n" ]
[ "def get_glossary_flags(self):...\n", "return ''\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def __exit__(self, VAR_171, VAR_105, VAR_178):...\n", "FUNC_9()\n" ]
[ "def __exit__(self, type, value, traceback):...\n", "destroy()\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_78(self, VAR_47):...\n", "\"\"\"docstring\"\"\"\n", "from frappe.desk.doctype.tag.tag import DocTags\n", "DocTags(self.doctype).add(self.name, VAR_47)\n" ]
[ "def add_tag(self, tag):...\n", "\"\"\"docstring\"\"\"\n", "from frappe.desk.doctype.tag.tag import DocTags\n", "DocTags(self.doctype).add(self.name, tag)\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "ImportFrom'", "Expr'" ]
[ "@ensure_csrf_cookie...\n", "\"\"\"docstring\"\"\"\n", "if not microsite.get_value('ALLOW_AUTOMATED_SIGNUPS', settings.FEATURES.get\n", "return HttpResponseForbidden()\n", "VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)\n", "VAR_48 = []\n", "VAR_49 = []\n", "VAR_50 = []\n", "if CourseMode.is_white_label(VAR_10):\n", "VAR_24 = CourseMode.DEFAULT_SHOPPINGCART_MODE_SLUG\n", "VAR_24 = None\n", "if 'students_list' in VAR_9.FILES:\n", "VAR_125 = []\n", "VAR_50.append({'username': '', 'email': '', 'response': _(\n 'File is not attached.')})\n", "VAR_194 = VAR_9.FILES.get('students_list')\n", "VAR_50.append({'username': '', 'email': '', 'response': _(\n 'Could not read uploaded file.')})\n", "VAR_194.close()\n", "VAR_12 = []\n", "VAR_51 = {'row_errors': VAR_49, 'general_errors': VAR_50, 'warnings': VAR_48}\n", "if VAR_194.name.endswith('.csv'):\n", "VAR_166 = 0\n", "return JsonResponse(VAR_51)\n", "VAR_125 = [row for row in VAR_26.reader(VAR_194.read().splitlines())]\n", "VAR_50.append({'username': '', 'email': '', 'response': _(\n 'Make sure that the file you upload is in CSV format with no extraneous characters or rows.'\n )})\n", "for VAR_40 in VAR_125:\n", "VAR_65 = get_course_by_id(VAR_10)\n", "VAR_166 = VAR_166 + 1\n", "if len(VAR_40) != 4:\n", "if len(VAR_40) > 0:\n", "VAR_14 = VAR_40[VAR_5]\n", "VAR_50.append({'username': '', 'email': '', 'response': _(\n 'Data in row #{row_num} must have exactly four columns: email, username, full name, and country'\n ).format(VAR_166=row_num)})\n", "VAR_15 = VAR_40[VAR_6]\n", "VAR_16 = VAR_40[VAR_7]\n", "VAR_17 = VAR_40[VAR_8][:2]\n", "VAR_25 = get_email_params(VAR_65, True, VAR_168=request.is_secure())\n", "validate_email(VAR_14)\n", "VAR_49.append({'username': VAR_15, 'email': VAR_14, 'response': _(\n 'Invalid email {email_address}.').format(email_address=email)})\n", "if User.objects.filter(VAR_14=email).exists():\n", "VAR_19 = User.objects.get(VAR_14=email)\n", "VAR_18 = FUNC_9(VAR_12)\n", "if not User.objects.filter(VAR_14=email, VAR_15=username).exists():\n", "VAR_56 = FUNC_12(VAR_14, VAR_15, VAR_16, VAR_17, VAR_18, VAR_10, VAR_24,\n VAR_9.user, VAR_25)\n", "VAR_211 = _('string').format(VAR_14=email, VAR_15=username)\n", "VAR_0.info(u\"user already exists with username '%s' and email '%s'\", VAR_15,\n VAR_14)\n", "VAR_49.extend(VAR_56)\n", "VAR_48.append({'username': VAR_15, 'email': VAR_14, 'response': VAR_211})\n", "if not CourseEnrollment.is_enrolled(VAR_19, VAR_10):\n", "VAR_0.warning(u'email %s already exist', VAR_14)\n", "FUNC_11(VAR_19=user, VAR_10=course_id, VAR_20=course_mode, VAR_21=request.\n user, VAR_22='Enrolling via csv upload', VAR_23=UNENROLLED_TO_ENROLLED)\n", "enroll_email(VAR_10=course_id, student_email=email, VAR_60=True, VAR_61=\n True, VAR_25=email_params)\n" ]
[ "@ensure_csrf_cookie...\n", "\"\"\"docstring\"\"\"\n", "if not microsite.get_value('ALLOW_AUTOMATED_SIGNUPS', settings.FEATURES.get\n", "return HttpResponseForbidden()\n", "course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)\n", "warnings = []\n", "row_errors = []\n", "general_errors = []\n", "if CourseMode.is_white_label(course_id):\n", "course_mode = CourseMode.DEFAULT_SHOPPINGCART_MODE_SLUG\n", "course_mode = None\n", "if 'students_list' in request.FILES:\n", "students = []\n", "general_errors.append({'username': '', 'email': '', 'response': _(\n 'File is not attached.')})\n", "upload_file = request.FILES.get('students_list')\n", "general_errors.append({'username': '', 'email': '', 'response': _(\n 'Could not read uploaded file.')})\n", "upload_file.close()\n", "generated_passwords = []\n", "results = {'row_errors': row_errors, 'general_errors': general_errors,\n 'warnings': warnings}\n", "if upload_file.name.endswith('.csv'):\n", "row_num = 0\n", "return JsonResponse(results)\n", "students = [row for row in csv.reader(upload_file.read().splitlines())]\n", "general_errors.append({'username': '', 'email': '', 'response': _(\n 'Make sure that the file you upload is in CSV format with no extraneous characters or rows.'\n )})\n", "for student in students:\n", "course = get_course_by_id(course_id)\n", "row_num = row_num + 1\n", "if len(student) != 4:\n", "if len(student) > 0:\n", "email = student[EMAIL_INDEX]\n", "general_errors.append({'username': '', 'email': '', 'response': _(\n 'Data in row #{row_num} must have exactly four columns: email, username, full name, and country'\n ).format(row_num=row_num)})\n", "username = student[USERNAME_INDEX]\n", "name = student[NAME_INDEX]\n", "country = student[COUNTRY_INDEX][:2]\n", "email_params = get_email_params(course, True, secure=request.is_secure())\n", "validate_email(email)\n", "row_errors.append({'username': username, 'email': email, 'response': _(\n 'Invalid email {email_address}.').format(email_address=email)})\n", "if User.objects.filter(email=email).exists():\n", "user = User.objects.get(email=email)\n", "password = generate_unique_password(generated_passwords)\n", "if not User.objects.filter(email=email, username=username).exists():\n", "errors = create_and_enroll_user(email, username, name, country, password,\n course_id, course_mode, request.user, email_params)\n", "warning_message = _(\n 'An account with email {email} exists but the provided username {username} is different. Enrolling anyway with {email}.'\n ).format(email=email, username=username)\n", "log.info(u\"user already exists with username '%s' and email '%s'\", username,\n email)\n", "row_errors.extend(errors)\n", "warnings.append({'username': username, 'email': email, 'response':\n warning_message})\n", "if not CourseEnrollment.is_enrolled(user, course_id):\n", "log.warning(u'email %s already exist', email)\n", "create_manual_course_enrollment(user=user, course_id=course_id, mode=\n course_mode, enrolled_by=request.user, reason=\n 'Enrolling via csv upload', state_transition=UNENROLLED_TO_ENROLLED)\n", "enroll_email(course_id=course_id, student_email=email, auto_enroll=True,\n email_students=True, email_params=email_params)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Condition", "Return'", "Assign'", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Assign'", "Assign'", "Condition", "Assign'", "Return'", "Assign'", "Expr'", "For", "Assign'", "Assign'", "Condition", "Condition", "Assign'", "Expr'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Expr'", "Condition", "Assign'", "Assign'", "Condition", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Condition", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_14(self):...\n", "VAR_5, VAR_15 = self._test_confirm_start()\n", "VAR_3 = self.client.get(VAR_15)\n", "self.assertContains(VAR_3, 'Please enter your new password')\n" ]
[ "def test_confirm_valid(self):...\n", "url, path = self._test_confirm_start()\n", "response = self.client.get(path)\n", "self.assertContains(response, 'Please enter your new password')\n" ]
[ 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Expr'" ]
[ "def FUNC_1(self):...\n", "\"\"\"docstring\"\"\"\n", "VAR_1 = self.register_user('user', 'pass')\n", "VAR_10 = self.login('user', 'pass')\n", "VAR_11 = self.register_user('otheruser', 'pass')\n", "VAR_12 = self.login('otheruser', 'pass')\n", "VAR_13 = self.get_success(self.hs.get_datastore().get_user_by_access_token(\n VAR_10))\n", "VAR_14 = VAR_13.token_id\n", "self.get_success(self.hs.get_pusherpool().add_pusher(VAR_1=user_id, VAR_10=\n token_id, kind='http', app_id='m.http', app_display_name=\n 'HTTP Push Notifications', device_display_name='pushy push', pushkey=\n 'a@example.com', lang=None, data={'url': 'example.com'}))\n", "VAR_15 = self.helper.create_room_as(VAR_1, tok=access_token)\n", "self.helper.join(VAR_15=room, user=other_user_id, tok=other_access_token)\n", "self.helper.send(VAR_15, VAR_7='Hi!', tok=other_access_token)\n", "self.helper.send(VAR_15, VAR_7='There!', tok=other_access_token)\n", "VAR_16 = self.get_success(self.hs.get_datastore().get_pushers_by({\n 'user_name': VAR_1}))\n", "VAR_16 = list(VAR_16)\n", "self.assertEqual(len(VAR_16), 1)\n", "VAR_17 = VAR_16[0]['last_stream_ordering']\n", "self.pump()\n", "VAR_16 = self.get_success(self.hs.get_datastore().get_pushers_by({\n 'user_name': VAR_1}))\n", "VAR_16 = list(VAR_16)\n", "self.assertEqual(len(VAR_16), 1)\n", "self.assertEqual(VAR_17, VAR_16[0]['last_stream_ordering'])\n", "self.assertEqual(len(self.push_attempts), 1)\n", "self.assertEqual(self.push_attempts[0][1], 'example.com')\n", "self.assertEqual(self.push_attempts[0][2]['notification']['content']['body'\n ], 'Hi!')\n", "self.push_attempts[0][0].callback({})\n", "self.pump()\n", "VAR_16 = self.get_success(self.hs.get_datastore().get_pushers_by({\n 'user_name': VAR_1}))\n", "VAR_16 = list(VAR_16)\n", "self.assertEqual(len(VAR_16), 1)\n", "self.assertTrue(VAR_16[0]['last_stream_ordering'] > VAR_17)\n", "VAR_17 = VAR_16[0]['last_stream_ordering']\n", "self.assertEqual(len(self.push_attempts), 2)\n", "self.assertEqual(self.push_attempts[1][1], 'example.com')\n", "self.assertEqual(self.push_attempts[1][2]['notification']['content']['body'\n ], 'There!')\n", "self.push_attempts[1][0].callback({})\n", "self.pump()\n", "VAR_16 = self.get_success(self.hs.get_datastore().get_pushers_by({\n 'user_name': VAR_1}))\n", "VAR_16 = list(VAR_16)\n", "self.assertEqual(len(VAR_16), 1)\n", "self.assertTrue(VAR_16[0]['last_stream_ordering'] > VAR_17)\n" ]
[ "def test_sends_http(self):...\n", "\"\"\"docstring\"\"\"\n", "user_id = self.register_user('user', 'pass')\n", "access_token = self.login('user', 'pass')\n", "other_user_id = self.register_user('otheruser', 'pass')\n", "other_access_token = self.login('otheruser', 'pass')\n", "user_tuple = self.get_success(self.hs.get_datastore().\n get_user_by_access_token(access_token))\n", "token_id = user_tuple.token_id\n", "self.get_success(self.hs.get_pusherpool().add_pusher(user_id=user_id,\n access_token=token_id, kind='http', app_id='m.http', app_display_name=\n 'HTTP Push Notifications', device_display_name='pushy push', pushkey=\n 'a@example.com', lang=None, data={'url': 'example.com'}))\n", "room = self.helper.create_room_as(user_id, tok=access_token)\n", "self.helper.join(room=room, user=other_user_id, tok=other_access_token)\n", "self.helper.send(room, body='Hi!', tok=other_access_token)\n", "self.helper.send(room, body='There!', tok=other_access_token)\n", "pushers = self.get_success(self.hs.get_datastore().get_pushers_by({\n 'user_name': user_id}))\n", "pushers = list(pushers)\n", "self.assertEqual(len(pushers), 1)\n", "last_stream_ordering = pushers[0]['last_stream_ordering']\n", "self.pump()\n", "pushers = self.get_success(self.hs.get_datastore().get_pushers_by({\n 'user_name': user_id}))\n", "pushers = list(pushers)\n", "self.assertEqual(len(pushers), 1)\n", "self.assertEqual(last_stream_ordering, pushers[0]['last_stream_ordering'])\n", "self.assertEqual(len(self.push_attempts), 1)\n", "self.assertEqual(self.push_attempts[0][1], 'example.com')\n", "self.assertEqual(self.push_attempts[0][2]['notification']['content']['body'\n ], 'Hi!')\n", "self.push_attempts[0][0].callback({})\n", "self.pump()\n", "pushers = self.get_success(self.hs.get_datastore().get_pushers_by({\n 'user_name': user_id}))\n", "pushers = list(pushers)\n", "self.assertEqual(len(pushers), 1)\n", "self.assertTrue(pushers[0]['last_stream_ordering'] > last_stream_ordering)\n", "last_stream_ordering = pushers[0]['last_stream_ordering']\n", "self.assertEqual(len(self.push_attempts), 2)\n", "self.assertEqual(self.push_attempts[1][1], 'example.com')\n", "self.assertEqual(self.push_attempts[1][2]['notification']['content']['body'\n ], 'There!')\n", "self.push_attempts[1][0].callback({})\n", "self.pump()\n", "pushers = self.get_success(self.hs.get_datastore().get_pushers_by({\n 'user_name': user_id}))\n", "pushers = list(pushers)\n", "self.assertEqual(len(pushers), 1)\n", "self.assertTrue(pushers[0]['last_stream_ordering'] > last_stream_ordering)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'" ]
[ "def FUNC_7(self, VAR_14, VAR_15):...\n", "for m in self.mappings:\n", "VAR_43 = m.get(VAR_14, self)\n", "return VAR_15\n", "if VAR_43 is not self:\n", "return VAR_43\n" ]
[ "def get(self, key, default):...\n", "for m in self.mappings:\n", "value = m.get(key, self)\n", "return default\n", "if value is not self:\n", "return value\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "For", "Assign'", "Return'", "Condition", "Return'" ]
[ "@property...\n", "VAR_15 = CLASS_10.get_enum(self.reference.class_name)\n", "if VAR_15:\n", "return 'enum_property.pyi'\n", "return 'ref_property.pyi'\n" ]
[ "@property...\n", "enum = EnumProperty.get_enum(self.reference.class_name)\n", "if enum:\n", "return 'enum_property.pyi'\n", "return 'ref_property.pyi'\n" ]
[ 0, 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Condition", "Return'", "Return'" ]
[ "@VAR_25.whitelist(allow_guest=True)...\n", "\"\"\"docstring\"\"\"\n", "VAR_14 = VAR_25.get_doc(VAR_9, VAR_10)\n", "if getattr(VAR_14, VAR_11, VAR_25._dict()).is_whitelisted:\n", "VAR_25.call(getattr(VAR_14, VAR_11), **frappe.local.form_dict)\n", "VAR_25.throw(_('Not permitted'), VAR_25.PermissionError)\n" ]
[ "@frappe.whitelist(allow_guest=True)...\n", "\"\"\"docstring\"\"\"\n", "doc = frappe.get_doc(doctype, name)\n", "if getattr(doc, custom_method, frappe._dict()).is_whitelisted:\n", "frappe.call(getattr(doc, custom_method), **frappe.local.form_dict)\n", "frappe.throw(_('Not permitted'), frappe.PermissionError)\n" ]
[ 2, 0, 2, 2, 2, 2 ]
[ "Condition", "Docstring", "Assign'", "Condition", "Expr'", "Expr'" ]
[ "@VAR_0.route('/outputstate_unique_id/<unique_id>/<channel_id>')...\n", "\"\"\"docstring\"\"\"\n", "VAR_22 = OutputChannel.query.filter(OutputChannel.unique_id == VAR_10).first()\n", "VAR_42 = DaemonControl()\n", "VAR_24 = VAR_42.output_state(VAR_9, VAR_22.channel)\n", "return jsonify(VAR_24)\n" ]
[ "@blueprint.route('/outputstate_unique_id/<unique_id>/<channel_id>')...\n", "\"\"\"docstring\"\"\"\n", "channel = OutputChannel.query.filter(OutputChannel.unique_id == channel_id\n ).first()\n", "daemon_control = DaemonControl()\n", "state = daemon_control.output_state(unique_id, channel.channel)\n", "return jsonify(state)\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "@FUNC_3('staff')...\n", "\"\"\"docstring\"\"\"\n", "VAR_173 = VAR_9.POST['invoice_number']\n", "return HttpResponseBadRequest('Missing required invoice_number parameter')\n", "VAR_173 = int(VAR_173)\n", "return HttpResponseBadRequest(\n 'invoice_number must be an integer, {value} provided'.format(value=\n invoice_number))\n", "VAR_174 = VAR_9.POST['event_type']\n", "return HttpResponseBadRequest('Missing required event_type parameter')\n", "VAR_10 = SlashSeparatedCourseKey.from_deprecated_string(VAR_10)\n", "VAR_27 = CourseRegistrationCodeInvoiceItem.objects.select_related('invoice'\n ).get(invoice_id=invoice_number, VAR_10=course_id)\n", "return HttpResponseNotFound(_(\"Invoice number '{num}' does not exist.\").\n format(num=invoice_number))\n", "if VAR_174 == 'invalidate':\n", "VAR_27 = VAR_27.invoice\n", "return FUNC_22(VAR_27)\n", "return FUNC_23(VAR_27)\n" ]
[ "@require_level('staff')...\n", "\"\"\"docstring\"\"\"\n", "invoice_number = request.POST['invoice_number']\n", "return HttpResponseBadRequest('Missing required invoice_number parameter')\n", "invoice_number = int(invoice_number)\n", "return HttpResponseBadRequest(\n 'invoice_number must be an integer, {value} provided'.format(value=\n invoice_number))\n", "event_type = request.POST['event_type']\n", "return HttpResponseBadRequest('Missing required event_type parameter')\n", "course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id)\n", "obj_invoice = CourseRegistrationCodeInvoiceItem.objects.select_related(\n 'invoice').get(invoice_id=invoice_number, course_id=course_id)\n", "return HttpResponseNotFound(_(\"Invoice number '{num}' does not exist.\").\n format(num=invoice_number))\n", "if event_type == 'invalidate':\n", "obj_invoice = obj_invoice.invoice\n", "return invalidate_invoice(obj_invoice)\n", "return re_validate_invoice(obj_invoice)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Return'", "Assign'", "Return'", "Assign'", "Return'", "Assign'", "Assign'", "Return'", "Condition", "Assign'", "Return'", "Return'" ]
[ "async def FUNC_14():...\n", "" ]
[ "async def do_iterations():...\n", "" ]
[ 0, 0 ]
[ "AsyncFunctionDef'", "Condition" ]
[ "@action('resetpasswords', lazy_gettext('Reset Password'), '', 'fa-lock',...\n", "return redirect(url_for(self.appbuilder.sm.resetpasswordview.__name__ +\n '.this_form_get', VAR_23=item.id))\n" ]
[ "@action('resetpasswords', lazy_gettext('Reset Password'), '', 'fa-lock',...\n", "return redirect(url_for(self.appbuilder.sm.resetpasswordview.__name__ +\n '.this_form_get', pk=item.id))\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "@CLASS_4('pyeval')...\n", "\"\"\"docstring\"\"\"\n", "VAR_20 = jinja.render('pre.html', title='pyeval', content=pyeval_output)\n", "return 'text/html', VAR_20\n" ]
[ "@add_handler('pyeval')...\n", "\"\"\"docstring\"\"\"\n", "html = jinja.render('pre.html', title='pyeval', content=pyeval_output)\n", "return 'text/html', html\n" ]
[ 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Return'" ]
[ "@override_settings(LOGIN_URL='login')...\n", "self.assertLoginURLEquals('/login/?next=/login_required/')\n" ]
[ "@override_settings(LOGIN_URL='login')...\n", "self.assertLoginURLEquals('/login/?next=/login_required/')\n" ]
[ 0, 0 ]
[ "Condition", "Expr'" ]
[ "def __call__(self):...\n", "return 'dummy'\n" ]
[ "def __call__(self):...\n", "return 'dummy'\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_13(self) ->HomeServerConfig:...\n", "return self.config\n" ]
[ "def get_config(self) ->HomeServerConfig:...\n", "return self.config\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Return'" ]
[ "def FUNC_0(self, VAR_2, VAR_3):...\n", "VAR_7 = self.setup_test_homeserver(http_client=None)\n", "self.handler = VAR_7.get_federation_handler()\n", "self.store = VAR_7.get_datastore()\n", "return VAR_7\n" ]
[ "def make_homeserver(self, reactor, clock):...\n", "hs = self.setup_test_homeserver(http_client=None)\n", "self.handler = hs.get_federation_handler()\n", "self.store = hs.get_datastore()\n", "return hs\n" ]
[ 0, 4, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_28(VAR_9, VAR_10):...\n", "VAR_10 = VAR_10 or []\n", "VAR_69 = ub.session.query(ub.ArchivedBook).filter(ub.ArchivedBook.user_id ==\n int(VAR_87.id)).filter(ub.ArchivedBook.is_archived == True).all()\n", "VAR_70 = [VAR_54.book_id for VAR_54 in VAR_69]\n", "VAR_71 = db.Books.id.in_(VAR_70)\n", "VAR_63, VAR_68, VAR_65 = calibre_db.fill_indexpage_with_archived_books(VAR_9,\n 0, db.Books, VAR_71, VAR_10, allow_show_archived=True)\n", "VAR_13 = _(u'Archived Books') + ' (' + str(len(VAR_70)) + ')'\n", "VAR_72 = 'archived'\n", "return render_title_template('index.html', VAR_68=random, VAR_63=entries,\n VAR_65=pagination, VAR_149=name, VAR_9=pagename)\n" ]
[ "def render_archived_books(page, order):...\n", "order = order or []\n", "archived_books = ub.session.query(ub.ArchivedBook).filter(ub.ArchivedBook.\n user_id == int(current_user.id)).filter(ub.ArchivedBook.is_archived == True\n ).all()\n", "archived_book_ids = [archived_book.book_id for archived_book in archived_books]\n", "archived_filter = db.Books.id.in_(archived_book_ids)\n", "entries, random, pagination = calibre_db.fill_indexpage_with_archived_books(\n page, 0, db.Books, archived_filter, order, allow_show_archived=True)\n", "name = _(u'Archived Books') + ' (' + str(len(archived_book_ids)) + ')'\n", "pagename = 'archived'\n", "return render_title_template('index.html', random=random, entries=entries,\n pagination=pagination, title=name, page=pagename)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_62(VAR_26, VAR_27, VAR_28, VAR_29=0, VAR_30=0):...\n", "VAR_80 = struct.pack('HHHH', VAR_27, VAR_28, VAR_29, VAR_30)\n", "fcntl.ioctl(VAR_26, termios.TIOCSWINSZ, VAR_80)\n" ]
[ "def set_windowsize(fd, row, col, xpix=0, ypix=0):...\n", "winsize = struct.pack('HHHH', row, col, xpix, ypix)\n", "fcntl.ioctl(fd, termios.TIOCSWINSZ, winsize)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Expr'" ]
[ "def FUNC_40(self, VAR_28, VAR_35):...\n", "VAR_15 = self.getUserId()\n", "VAR_80 = self.playlistdb.createPLS(VAR_28=plid, VAR_15=userid, addrstr=hostaddr\n )\n", "VAR_81 = self.playlistdb.getName(VAR_28, VAR_15)\n", "if VAR_80 and VAR_81:\n", "return self.serve_string_as_file(VAR_80, VAR_81 + '.pls')\n" ]
[ "def api_downloadpls(self, plid, hostaddr):...\n", "userid = self.getUserId()\n", "pls = self.playlistdb.createPLS(plid=plid, userid=userid, addrstr=hostaddr)\n", "name = self.playlistdb.getName(plid, userid)\n", "if pls and name:\n", "return self.serve_string_as_file(pls, name + '.pls')\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Condition", "Return'" ]
[ "def FUNC_11(VAR_15, VAR_17):...\n", "ub.session.query(ub.BookShelf).filter(ub.BookShelf.book_id == VAR_15).delete()\n", "ub.session.query(ub.ReadBook).filter(ub.ReadBook.book_id == VAR_15).delete()\n", "ub.delete_download(VAR_15)\n", "ub.session_commit()\n", "FUNC_7([u''], VAR_17.authors, db.Authors, calibre_db.session, 'author')\n", "FUNC_7([u''], VAR_17.tags, db.Tags, calibre_db.session, 'tags')\n", "FUNC_7([u''], VAR_17.series, db.Series, calibre_db.session, 'series')\n", "FUNC_7([u''], VAR_17.languages, db.Languages, calibre_db.session, 'languages')\n", "FUNC_7([u''], VAR_17.publishers, db.Publishers, calibre_db.session,\n 'publishers')\n", "VAR_52 = calibre_db.session.query(db.Custom_Columns).filter(db.\n Custom_Columns.datatype.notin_(db.cc_exceptions)).all()\n", "for VAR_29 in VAR_52:\n", "VAR_31 = 'custom_column_' + str(VAR_29.id)\n", "calibre_db.session.query(db.Books).filter(db.Books.id == VAR_15).delete()\n", "if not VAR_29.is_multiple:\n", "if len(getattr(VAR_17, VAR_31)) > 0:\n", "FUNC_7([u''], getattr(VAR_17, VAR_31), db.cc_classes[VAR_29.id], calibre_db\n .session, 'custom')\n", "if VAR_29.datatype == 'bool' or VAR_29.datatype == 'integer' or VAR_29.datatype == 'float':\n", "VAR_105 = getattr(VAR_17, VAR_31)[0]\n", "if VAR_29.datatype == 'rating':\n", "getattr(VAR_17, VAR_31).remove(VAR_105)\n", "VAR_105 = getattr(VAR_17, VAR_31)[0]\n", "VAR_105 = getattr(VAR_17, VAR_31)[0]\n", "VAR_2.debug('remove ' + str(VAR_29.id))\n", "getattr(VAR_17, VAR_31).remove(VAR_105)\n", "getattr(VAR_17, VAR_31).remove(VAR_105)\n", "calibre_db.session.delete(VAR_105)\n", "if len(VAR_105.books) == 0:\n", "VAR_2.debug('remove ' + str(VAR_29.id))\n", "calibre_db.session.commit()\n", "VAR_2.debug('remove ' + str(VAR_29.id))\n", "calibre_db.session.delete(VAR_105)\n", "calibre_db.session.delete(VAR_105)\n", "calibre_db.session.commit()\n", "calibre_db.session.commit()\n" ]
[ "def delete_whole_book(book_id, book):...\n", "ub.session.query(ub.BookShelf).filter(ub.BookShelf.book_id == book_id).delete()\n", "ub.session.query(ub.ReadBook).filter(ub.ReadBook.book_id == book_id).delete()\n", "ub.delete_download(book_id)\n", "ub.session_commit()\n", "modify_database_object([u''], book.authors, db.Authors, calibre_db.session,\n 'author')\n", "modify_database_object([u''], book.tags, db.Tags, calibre_db.session, 'tags')\n", "modify_database_object([u''], book.series, db.Series, calibre_db.session,\n 'series')\n", "modify_database_object([u''], book.languages, db.Languages, calibre_db.\n session, 'languages')\n", "modify_database_object([u''], book.publishers, db.Publishers, calibre_db.\n session, 'publishers')\n", "cc = calibre_db.session.query(db.Custom_Columns).filter(db.Custom_Columns.\n datatype.notin_(db.cc_exceptions)).all()\n", "for c in cc:\n", "cc_string = 'custom_column_' + str(c.id)\n", "calibre_db.session.query(db.Books).filter(db.Books.id == book_id).delete()\n", "if not c.is_multiple:\n", "if len(getattr(book, cc_string)) > 0:\n", "modify_database_object([u''], getattr(book, cc_string), db.cc_classes[c.id],\n calibre_db.session, 'custom')\n", "if c.datatype == 'bool' or c.datatype == 'integer' or c.datatype == 'float':\n", "del_cc = getattr(book, cc_string)[0]\n", "if c.datatype == 'rating':\n", "getattr(book, cc_string).remove(del_cc)\n", "del_cc = getattr(book, cc_string)[0]\n", "del_cc = getattr(book, cc_string)[0]\n", "log.debug('remove ' + str(c.id))\n", "getattr(book, cc_string).remove(del_cc)\n", "getattr(book, cc_string).remove(del_cc)\n", "calibre_db.session.delete(del_cc)\n", "if len(del_cc.books) == 0:\n", "log.debug('remove ' + str(c.id))\n", "calibre_db.session.commit()\n", "log.debug('remove ' + str(c.id))\n", "calibre_db.session.delete(del_cc)\n", "calibre_db.session.delete(del_cc)\n", "calibre_db.session.commit()\n", "calibre_db.session.commit()\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Assign'", "For", "Assign'", "Expr'", "Condition", "Condition", "Expr'", "Condition", "Assign'", "Condition", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "Expr'", "Expr'", "Condition", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "@log_function...\n", "\"\"\"docstring\"\"\"\n", "VAR_2 = FUNC_1('/groups/%s/profile', VAR_30)\n", "return self.client.get_json(VAR_5=destination, VAR_2=path, VAR_3={\n 'requester_user_id': requester_user_id}, VAR_15=True)\n" ]
[ "@log_function...\n", "\"\"\"docstring\"\"\"\n", "path = _create_v1_path('/groups/%s/profile', group_id)\n", "return self.client.get_json(destination=destination, path=path, args={\n 'requester_user_id': requester_user_id}, ignore_backoff=True)\n" ]
[ 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Return'" ]
[ "def FUNC_6(self):...\n", "self.export_doc()\n" ]
[ "def on_update(self):...\n", "self.export_doc()\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def __setstate__(self, VAR_174):...\n", "self.update(VAR_174)\n" ]
[ "def __setstate__(self, d):...\n", "self.update(d)\n" ]
[ 0, 0 ]
[ "FunctionDef'", "Expr'" ]
[ "def FUNC_84(VAR_206):...\n", "VAR_317 = []\n", "for VAR_3, VAR_375 in VAR_206.items():\n", "if isinstance(VAR_375, dict):\n", "VAR_317.sort(VAR_310=lambda x: x['name'].lower())\n", "VAR_317.append({'name': VAR_3, 'ul': FUNC_84(VAR_375)})\n", "VAR_317.append({'name': VAR_3, 'id': VAR_375})\n", "return VAR_317\n" ]
[ "def ul_to_list(ul):...\n", "dir_list = []\n", "for name, value in ul.items():\n", "if isinstance(value, dict):\n", "dir_list.sort(key=lambda x: x['name'].lower())\n", "dir_list.append({'name': name, 'ul': ul_to_list(value)})\n", "dir_list.append({'name': name, 'id': value})\n", "return dir_list\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "For", "Condition", "Expr'", "Expr'", "Expr'", "Return'" ]
[ "@VAR_0.route('/api/plugins/<plugin_name>', methods=['GET'])...\n", "\"\"\"docstring\"\"\"\n", "VAR_111 = FUNC_58(f'/internal/plugins/{VAR_12}', 'get')\n", "return jsonify({'success': False, 'message': str(err)}), 400\n", "return jsonify(VAR_111)\n" ]
[ "@gui.route('/api/plugins/<plugin_name>', methods=['GET'])...\n", "\"\"\"docstring\"\"\"\n", "plugin_info = query_internal_api(f'/internal/plugins/{plugin_name}', 'get')\n", "return jsonify({'success': False, 'message': str(err)}), 400\n", "return jsonify(plugin_info)\n" ]
[ 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Return'", "Return'" ]
[ "@property...\n", "VAR_26 = ['calendar.js', 'admin/DateTimeShortcuts.js']\n", "return forms.Media(VAR_26=[static('admin/js/%s' % path) for path in js])\n" ]
[ "@property...\n", "js = ['calendar.js', 'admin/DateTimeShortcuts.js']\n", "return forms.Media(js=[static('admin/js/%s' % path) for path in js])\n" ]
[ 0, 0, 0 ]
[ "Condition", "Assign'", "Return'" ]
[ "def FUNC_1(VAR_3):...\n", "VAR_7 = {'Authorization': 'OAuth {}'.format(VAR_3)}\n", "VAR_8 = requests.get('https://www.googleapis.com/oauth2/v1/userinfo', VAR_7\n =headers)\n", "if VAR_8.status_code == 401:\n", "VAR_0.warning('Failed getting user profile (response code 401).')\n", "return VAR_8.json()\n", "return None\n" ]
[ "def get_user_profile(access_token):...\n", "headers = {'Authorization': 'OAuth {}'.format(access_token)}\n", "response = requests.get('https://www.googleapis.com/oauth2/v1/userinfo',\n headers=headers)\n", "if response.status_code == 401:\n", "logger.warning('Failed getting user profile (response code 401).')\n", "return response.json()\n", "return None\n" ]
[ 0, 4, 4, 4, 4, 4, 4 ]
[ "FunctionDef'", "Assign'", "Assign'", "Condition", "Expr'", "Return'", "Return'" ]
[ "@login_required()...\n", "\"\"\"docstring\"\"\"\n", "VAR_60 = FUNC_5(VAR_2=request, VAR_6=iid, VAR_10=w, VAR_11=h, VAR_8=conn,\n VAR_12=_defcb, **kwargs)\n", "VAR_61 = HttpResponse(VAR_60, content_type='image/jpeg')\n", "return VAR_61\n" ]
[ "@login_required()...\n", "\"\"\"docstring\"\"\"\n", "jpeg_data = _render_thumbnail(request=request, iid=iid, w=w, h=h, conn=conn,\n _defcb=_defcb, **kwargs)\n", "rsp = HttpResponse(jpeg_data, content_type='image/jpeg')\n", "return rsp\n" ]
[ 0, 0, 0, 0, 0 ]
[ "Condition", "Docstring", "Assign'", "Assign'", "Return'" ]
[ "@def_function.function(input_signature=[tensor_spec.TensorSpec(shape=(2, 2),...\n", "return VAR_44 + 2\n" ]
[ "@def_function.function(input_signature=[tensor_spec.TensorSpec(shape=(2, 2),...\n", "return x + 2\n" ]
[ 0, 0 ]
[ "Condition", "Return'" ]
[ "def FUNC_29(self, VAR_21):...\n", "\"\"\"docstring\"\"\"\n", "VAR_48 = self.encode(VAR_21).encode('utf-8')\n", "self.transport.write(VAR_48)\n", "self.transport.flush()\n" ]
[ "def send(self, msg):...\n", "\"\"\"docstring\"\"\"\n", "raw = self.encode(msg).encode('utf-8')\n", "self.transport.write(raw)\n", "self.transport.flush()\n" ]
[ 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Expr'", "Expr'" ]
[ "@log_function...\n", "VAR_2 = FUNC_1('/exchange_third_party_invite/%s', VAR_6)\n", "VAR_37 = await self.client.put_json(VAR_5=destination, VAR_2=path, VAR_39=\n event_dict)\n", "return VAR_37\n" ]
[ "@log_function...\n", "path = _create_v1_path('/exchange_third_party_invite/%s', room_id)\n", "response = await self.client.put_json(destination=destination, path=path,\n data=event_dict)\n", "return response\n" ]
[ 0, 0, 0, 0 ]
[ "Condition", "Assign'", "Assign'", "Return'" ]
[ "@VAR_2.route('/show/<int:book_id>/<book_format>', defaults={'anyname': 'None'})...\n", "VAR_6 = VAR_6.split('.')[0]\n", "VAR_95 = calibre_db.get_book(VAR_5)\n", "VAR_8 = calibre_db.get_book_format(VAR_5, VAR_6.upper())\n", "if not VAR_8:\n", "return 'File not in Database'\n", "VAR_3.info('Serving book: %s', VAR_8.name)\n", "if config.config_use_google_drive:\n", "if VAR_6.upper() == 'TXT':\n", "VAR_134 = Headers()\n", "VAR_3.debug_or_exception(ex)\n", "return send_from_directory(os.path.join(config.config_calibre_dir, VAR_95.\n path), VAR_8.name + '.' + VAR_6)\n", "VAR_146 = open(os.path.join(config.config_calibre_dir, VAR_95.path, VAR_8.\n name + '.' + VAR_6), 'rb').read()\n", "VAR_3.error('File Not Found')\n", "VAR_134['Content-Type'] = mimetypes.types_map.get('.' + VAR_6,\n 'application/octet-stream')\n", "return 'File Not Found'\n", "VAR_136 = chardet.detect(VAR_146)\n", "return 'File Not Found'\n", "VAR_135 = getFileFromEbooksFolder(VAR_95.path, VAR_8.name + '.' + VAR_6)\n", "return make_response(VAR_146.decode(VAR_136['encoding'], 'surrogatepass').\n encode('utf-8', 'surrogatepass'))\n", "return do_gdrive_download(VAR_135, VAR_134, VAR_6.upper() == 'TXT')\n" ]
[ "@web.route('/show/<int:book_id>/<book_format>', defaults={'anyname': 'None'})...\n", "book_format = book_format.split('.')[0]\n", "book = calibre_db.get_book(book_id)\n", "data = calibre_db.get_book_format(book_id, book_format.upper())\n", "if not data:\n", "return 'File not in Database'\n", "log.info('Serving book: %s', data.name)\n", "if config.config_use_google_drive:\n", "if book_format.upper() == 'TXT':\n", "headers = Headers()\n", "log.debug_or_exception(ex)\n", "return send_from_directory(os.path.join(config.config_calibre_dir, book.\n path), data.name + '.' + book_format)\n", "rawdata = open(os.path.join(config.config_calibre_dir, book.path, data.name +\n '.' + book_format), 'rb').read()\n", "log.error('File Not Found')\n", "headers['Content-Type'] = mimetypes.types_map.get('.' + book_format,\n 'application/octet-stream')\n", "return 'File Not Found'\n", "result = chardet.detect(rawdata)\n", "return 'File Not Found'\n", "df = getFileFromEbooksFolder(book.path, data.name + '.' + book_format)\n", "return make_response(rawdata.decode(result['encoding'], 'surrogatepass').\n encode('utf-8', 'surrogatepass'))\n", "return do_gdrive_download(df, headers, book_format.upper() == 'TXT')\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "For", "Assign'", "Assign'", "Assign'", "Condition", "Return'", "Expr'", "Condition", "Condition", "Assign'", "Expr'", "Return'", "Assign'", "Expr'", "Assign'", "Return'", "Assign'", "Return'", "Assign'", "Return'", "Return'" ]
[ "@pytest.mark.not_frozen...\n", "VAR_6.start(VAR_11=['--temp-basedir'] + _base_args(request.config), VAR_16=\n {'PYTHONOPTIMIZE': level})\n", "if VAR_10 == '2':\n", "VAR_23 = (\n 'Running on optimize level higher than 1, unexpected behavior may occur.')\n", "VAR_6.send_cmd(':quit')\n", "VAR_18 = VAR_6.wait_for(message=msg)\n", "VAR_6.wait_for_quit()\n", "VAR_18.expected = True\n" ]
[ "@pytest.mark.not_frozen...\n", "quteproc_new.start(args=['--temp-basedir'] + _base_args(request.config),\n env={'PYTHONOPTIMIZE': level})\n", "if level == '2':\n", "msg = 'Running on optimize level higher than 1, unexpected behavior may occur.'\n", "quteproc_new.send_cmd(':quit')\n", "line = quteproc_new.wait_for(message=msg)\n", "quteproc_new.wait_for_quit()\n", "line.expected = True\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Condition", "Expr'", "Condition", "Assign'", "Expr'", "Assign'", "Expr'", "Assign'" ]
[ "import logging\n", "import pathlib\n", "from s3file.storages import local_dev, storage\n", "from . import views\n", "VAR_0 = logging.getLogger('s3file')\n", "def __init__(self, VAR_1):...\n", "self.get_response = VAR_1\n", "def __call__(self, VAR_2):...\n", "VAR_4 = VAR_2.POST.getlist('s3file')\n", "for field_name in VAR_4:\n", "VAR_3 = VAR_2.POST.getlist(field_name)\n", "if local_dev and VAR_2.path == '/__s3_mock__/':\n", "VAR_2.FILES.setlist(field_name, list(self.get_files_from_storage(VAR_3)))\n", "return views.S3MockView.as_view()(VAR_2)\n", "return self.get_response(VAR_2)\n" ]
[ "import logging\n", "import pathlib\n", "from s3file.storages import local_dev, storage\n", "from . import views\n", "logger = logging.getLogger('s3file')\n", "def __init__(self, get_response):...\n", "self.get_response = get_response\n", "def __call__(self, request):...\n", "file_fields = request.POST.getlist('s3file')\n", "for field_name in file_fields:\n", "paths = request.POST.getlist(field_name)\n", "if local_dev and request.path == '/__s3_mock__/':\n", "request.FILES.setlist(field_name, list(self.get_files_from_storage(paths)))\n", "return views.S3MockView.as_view()(request)\n", "return self.get_response(request)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0 ]
[ "Import'", "Import'", "ImportFrom'", "ImportFrom'", "Assign'", "FunctionDef'", "Assign'", "FunctionDef'", "Assign'", "For", "Assign'", "Condition", "Expr'", "Return'", "Return'" ]
[ "def FUNC_79(VAR_325):...\n", "VAR_30, VAR_29 = VAR_325\n", "return int((VAR_30 - VAR_84 + VAR_88) * VAR_95), int((VAR_29 - VAR_85 +\n VAR_90) * VAR_95)\n" ]
[ "def resizeXY(xy):...\n", "x, y = xy\n", "return int((x - newX + left_xs) * factor), int((y - newY + top_xs) * factor)\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Return'" ]
[ "@defer.inlineCallbacks...\n", "yield defer.ensureDeferred(self.handler.set_avatar_url(self.frank, synapse.\n types.create_requester(self.frank), 'http://my.server/pic.gif'))\n", "self.assertEquals((yield defer.ensureDeferred(self.store.\n get_profile_avatar_url(self.frank.localpart))), 'http://my.server/pic.gif')\n", "yield defer.ensureDeferred(self.handler.set_avatar_url(self.frank, synapse.\n types.create_requester(self.frank), 'http://my.server/me.png'))\n", "self.assertEquals((yield defer.ensureDeferred(self.store.\n get_profile_avatar_url(self.frank.localpart))), 'http://my.server/me.png')\n" ]
[ "@defer.inlineCallbacks...\n", "yield defer.ensureDeferred(self.handler.set_avatar_url(self.frank, synapse.\n types.create_requester(self.frank), 'http://my.server/pic.gif'))\n", "self.assertEquals((yield defer.ensureDeferred(self.store.\n get_profile_avatar_url(self.frank.localpart))), 'http://my.server/pic.gif')\n", "yield defer.ensureDeferred(self.handler.set_avatar_url(self.frank, synapse.\n types.create_requester(self.frank), 'http://my.server/me.png'))\n", "self.assertEquals((yield defer.ensureDeferred(self.store.\n get_profile_avatar_url(self.frank.localpart))), 'http://my.server/me.png')\n" ]
[ 0, 0, 0, 0, 0 ]
[ "Condition", "Expr'", "Expr'", "Expr'", "Expr'" ]
[ "def FUNC_2(VAR_0):...\n", "VAR_9 = GeneratorError()\n", "VAR_2 = VAR_0.patch('openapi_python_client._get_document', return_value=error)\n", "VAR_4 = VAR_0.patch('openapi_python_client.parser.GeneratorData.from_dict')\n", "VAR_6 = VAR_0.MagicMock()\n", "VAR_7 = VAR_0.MagicMock()\n", "from openapi_python_client import _get_project_for_url_or_path\n", "VAR_8 = VAR_10(VAR_6=url, VAR_7=path)\n", "VAR_2.assert_called_once_with(VAR_6=url, VAR_7=path)\n", "VAR_4.assert_not_called()\n", "assert VAR_8 == VAR_9\n" ]
[ "def test__get_project_for_url_or_path_document_error(mocker):...\n", "error = GeneratorError()\n", "_get_document = mocker.patch('openapi_python_client._get_document',\n return_value=error)\n", "from_dict = mocker.patch('openapi_python_client.parser.GeneratorData.from_dict'\n )\n", "url = mocker.MagicMock()\n", "path = mocker.MagicMock()\n", "from openapi_python_client import _get_project_for_url_or_path\n", "project = _get_project_for_url_or_path(url=url, path=path)\n", "_get_document.assert_called_once_with(url=url, path=path)\n", "from_dict.assert_not_called()\n", "assert project == error\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "ImportFrom'", "Assign'", "Expr'", "Expr'", "Assert'" ]
[ "async def FUNC_5(self):...\n", "VAR_33 = AsyncHTTPClient()\n", "VAR_34 = HTTPRequest('https://doi.org/{}'.format(self.spec), user_agent=\n 'BinderHub')\n", "VAR_35 = await VAR_33.fetch(VAR_34)\n", "self.record_id = VAR_35.effective_url.rsplit('/', maxsplit=1)[1]\n", "return self.record_id\n" ]
[ "async def get_resolved_ref(self):...\n", "client = AsyncHTTPClient()\n", "req = HTTPRequest('https://doi.org/{}'.format(self.spec), user_agent=\n 'BinderHub')\n", "r = await client.fetch(req)\n", "self.record_id = r.effective_url.rsplit('/', maxsplit=1)[1]\n", "return self.record_id\n" ]
[ 0, 0, 0, 0, 0, 0 ]
[ "AsyncFunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_28(VAR_9, VAR_7):...\n", "VAR_10 = VAR_7[0] or []\n", "VAR_69 = ub.session.query(ub.ArchivedBook).filter(ub.ArchivedBook.user_id ==\n int(VAR_87.id)).filter(ub.ArchivedBook.is_archived == True).all()\n", "VAR_70 = [VAR_142.book_id for VAR_142 in VAR_69]\n", "VAR_71 = db.Books.id.in_(VAR_70)\n", "VAR_63, VAR_68, VAR_65 = calibre_db.fill_indexpage_with_archived_books(VAR_9,\n 0, db.Books, VAR_71, VAR_10, allow_show_archived=True)\n", "VAR_13 = _(u'Archived Books') + ' (' + str(len(VAR_70)) + ')'\n", "VAR_72 = 'archived'\n", "return render_title_template('index.html', VAR_68=random, VAR_63=entries,\n VAR_65=pagination, VAR_150=name, VAR_9=pagename, VAR_10=sort[1])\n" ]
[ "def render_archived_books(page, sort):...\n", "order = sort[0] or []\n", "archived_books = ub.session.query(ub.ArchivedBook).filter(ub.ArchivedBook.\n user_id == int(current_user.id)).filter(ub.ArchivedBook.is_archived == True\n ).all()\n", "archived_book_ids = [archived_book.book_id for archived_book in archived_books]\n", "archived_filter = db.Books.id.in_(archived_book_ids)\n", "entries, random, pagination = calibre_db.fill_indexpage_with_archived_books(\n page, 0, db.Books, archived_filter, order, allow_show_archived=True)\n", "name = _(u'Archived Books') + ' (' + str(len(archived_book_ids)) + ')'\n", "pagename = 'archived'\n", "return render_title_template('index.html', random=random, entries=entries,\n pagination=pagination, title=name, page=pagename, order=sort[1])\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Return'" ]
[ "def FUNC_18(VAR_23, VAR_17):...\n", "VAR_39 = False\n", "VAR_23 = VAR_23 or '1'\n", "if not VAR_23.replace('.', '', 1).isdigit():\n", "flash(_('%(seriesindex)s is not a valid number, skipping', seriesindex=\n series_index), category='warning')\n", "if VAR_17.series_index != VAR_23:\n", "return False\n", "VAR_17.series_index = VAR_23\n", "return VAR_39\n", "VAR_39 = True\n" ]
[ "def edit_book_series_index(series_index, book):...\n", "modif_date = False\n", "series_index = series_index or '1'\n", "if not series_index.replace('.', '', 1).isdigit():\n", "flash(_('%(seriesindex)s is not a valid number, skipping', seriesindex=\n series_index), category='warning')\n", "if book.series_index != series_index:\n", "return False\n", "book.series_index = series_index\n", "return modif_date\n", "modif_date = True\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'", "Condition", "Expr'", "Condition", "Return'", "Assign'", "Return'", "Assign'" ]
[ "import pytest\n", "import openapi_python_client.schema as oai\n", "from openapi_python_client.parser.errors import PropertyError\n", "VAR_0 = 'openapi_python_client.parser.properties'\n", "def FUNC_0(self):...\n", "from openapi_python_client.parser.properties import Property\n", "VAR_4 = Property(VAR_5='test', VAR_26=True, default=None)\n", "VAR_4._type_string = 'TestType'\n", "assert VAR_4.get_type_string() == 'TestType'\n", "VAR_4.required = False\n", "assert VAR_4.get_type_string() == 'Optional[TestType]'\n", "def FUNC_1(self, VAR_1):...\n", "from openapi_python_client.parser.properties import Property\n", "VAR_5 = VAR_1.MagicMock()\n", "VAR_6 = VAR_1.patch('openapi_python_client.utils.snake_case')\n", "VAR_4 = Property(VAR_5=name, VAR_26=True, default=None)\n", "VAR_7 = VAR_1.patch.object(VAR_4, 'get_type_string')\n", "assert VAR_4.to_string() == f'{VAR_6(VAR_5)}: {VAR_7()}'\n", "VAR_4.required = False\n", "assert VAR_4.to_string() == f'{VAR_6(VAR_5)}: {VAR_7()} = None'\n", "VAR_4.default = 'TEST'\n", "assert VAR_4.to_string() == f'{VAR_6(VAR_5)}: {VAR_7()} = TEST'\n", "def FUNC_2(self, VAR_1):...\n", "from openapi_python_client.parser.properties import Property\n", "VAR_5 = VAR_1.MagicMock()\n", "VAR_1.patch('openapi_python_client.utils.snake_case')\n", "VAR_4 = Property(VAR_5=name, VAR_26=True, default=None)\n", "assert VAR_4.get_imports(VAR_9='') == set()\n", "VAR_4.required = False\n", "assert VAR_4.get_imports(VAR_9='') == {'from typing import Optional'}\n", "def test___post_init__(self):...\n", "from openapi_python_client.parser.properties import StringProperty\n", "VAR_8 = VAR_35(VAR_5='test', VAR_26=True, default='A Default Value')\n", "assert VAR_8.default == '\"A Default Value\"'\n", "def FUNC_0(self):...\n", "from openapi_python_client.parser.properties import StringProperty\n", "VAR_4 = VAR_35(VAR_5='test', VAR_26=True, default=None)\n", "assert VAR_4.get_type_string() == 'str'\n", "VAR_4.required = False\n", "assert VAR_4.get_type_string() == 'Optional[str]'\n", "def FUNC_2(self, VAR_1):...\n", "from openapi_python_client.parser.properties import DateTimeProperty\n", "VAR_5 = VAR_1.MagicMock()\n", "VAR_1.patch('openapi_python_client.utils.snake_case')\n", "VAR_4 = VAR_36(VAR_5=name, VAR_26=True, default=None)\n", "assert VAR_4.get_imports(VAR_9='') == {'from datetime import datetime',\n 'from typing import cast'}\n", "VAR_4.required = False\n", "assert VAR_4.get_imports(VAR_9='') == {'from typing import Optional',\n 'from datetime import datetime', 'from typing import cast'}\n", "def FUNC_2(self, VAR_1):...\n", "from openapi_python_client.parser.properties import DateProperty\n", "VAR_5 = VAR_1.MagicMock()\n", "VAR_1.patch('openapi_python_client.utils.snake_case')\n", "VAR_4 = VAR_37(VAR_5=name, VAR_26=True, default=None)\n", "assert VAR_4.get_imports(VAR_9='') == {'from datetime import date',\n 'from typing import cast'}\n", "VAR_4.required = False\n", "assert VAR_4.get_imports(VAR_9='') == {'from typing import Optional',\n 'from datetime import date', 'from typing import cast'}\n", "def FUNC_2(self, VAR_1):...\n", "from openapi_python_client.parser.properties import FileProperty\n", "VAR_5 = VAR_1.MagicMock()\n", "VAR_1.patch('openapi_python_client.utils.snake_case')\n", "VAR_9 = 'blah'\n", "VAR_4 = VAR_38(VAR_5=name, VAR_26=True, default=None)\n", "assert VAR_4.get_imports(VAR_9=prefix) == {f'from {VAR_9}.types import File',\n 'from dataclasses import astuple'}\n", "VAR_4.required = False\n", "assert VAR_4.get_imports(VAR_9=prefix) == {'from typing import Optional',\n f'from {VAR_9}.types import File', 'from dataclasses import astuple'}\n", "def FUNC_0(self, VAR_1):...\n", "from openapi_python_client.parser.properties import ListProperty\n", "VAR_10 = VAR_1.MagicMock()\n", "VAR_11 = VAR_1.MagicMock()\n", "VAR_10.get_type_string.return_value = VAR_11\n", "VAR_4 = VAR_31(VAR_5='test', VAR_26=True, default=None, VAR_10=inner_property)\n", "assert VAR_4.get_type_string() == f'List[{VAR_11}]'\n", "VAR_4.required = False\n", "assert VAR_4.get_type_string() == f'Optional[List[{VAR_11}]]'\n", "VAR_4 = VAR_31(VAR_5='test', VAR_26=True, default=[], VAR_10=inner_property)\n", "assert VAR_4.default == f'field(default_factory=lambda: cast(List[{VAR_11}], []))'\n", "def FUNC_3(self, VAR_1):...\n", "from openapi_python_client.parser.properties import ListProperty\n", "VAR_10 = VAR_1.MagicMock()\n", "VAR_12 = VAR_1.MagicMock()\n", "VAR_10.get_imports.return_value = {VAR_12}\n", "VAR_9 = VAR_1.MagicMock()\n", "VAR_4 = VAR_31(VAR_5='test', VAR_26=True, default=None, VAR_10=inner_property)\n", "assert VAR_4.get_imports(VAR_9=prefix) == {VAR_12, 'from typing import List'}\n", "VAR_4.required = False\n", "assert VAR_4.get_imports(VAR_9=prefix) == {VAR_12,\n 'from typing import List', 'from typing import Optional'}\n", "VAR_4.default = VAR_1.MagicMock()\n", "assert VAR_4.get_imports(VAR_9=prefix) == {VAR_12,\n 'from typing import Optional', 'from typing import List',\n 'from typing import cast', 'from dataclasses import field'}\n", "def FUNC_0(self, VAR_1):...\n", "from openapi_python_client.parser.properties import UnionProperty\n", "VAR_13 = VAR_1.MagicMock()\n", "VAR_13.get_type_string.return_value = 'inner_type_string_1'\n", "VAR_14 = VAR_1.MagicMock()\n", "VAR_14.get_type_string.return_value = 'inner_type_string_2'\n", "VAR_4 = VAR_33(VAR_5='test', VAR_26=True, default=None, inner_properties=[\n inner_property_1, inner_property_2])\n", "assert VAR_4.get_type_string(\n ) == 'Union[inner_type_string_1, inner_type_string_2]'\n", "VAR_4.required = False\n", "assert VAR_4.get_type_string(\n ) == 'Optional[Union[inner_type_string_1, inner_type_string_2]]'\n", "def FUNC_3(self, VAR_1):...\n", "from openapi_python_client.parser.properties import UnionProperty\n", "VAR_13 = VAR_1.MagicMock()\n", "VAR_15 = VAR_1.MagicMock()\n", "VAR_13.get_imports.return_value = {VAR_15}\n", "VAR_14 = VAR_1.MagicMock()\n", "VAR_16 = VAR_1.MagicMock()\n", "VAR_14.get_imports.return_value = {VAR_16}\n", "VAR_9 = VAR_1.MagicMock()\n", "VAR_4 = VAR_33(VAR_5='test', VAR_26=True, default=None, inner_properties=[\n inner_property_1, inner_property_2])\n", "assert VAR_4.get_imports(VAR_9=prefix) == {VAR_15, VAR_16,\n 'from typing import Union'}\n", "VAR_4.required = False\n", "assert VAR_4.get_imports(VAR_9=prefix) == {VAR_15, VAR_16,\n 'from typing import Union', 'from typing import Optional'}\n", "def test___post_init__(self, VAR_1):...\n", "VAR_5 = VAR_1.MagicMock()\n", "VAR_6 = VAR_1.patch('openapi_python_client.utils.snake_case')\n", "VAR_17 = VAR_1.MagicMock(class_name='MyTestEnum')\n", "VAR_18 = VAR_1.MagicMock(class_name='Deduped')\n", "VAR_19 = VAR_1.patch(f'{VAR_0}.Reference.from_ref', side_effect=[\n fake_reference, deduped_reference, deduped_reference])\n", "from openapi_python_client.parser import properties\n", "VAR_20 = VAR_1.MagicMock()\n", "properties._existing_enums = {'MyTestEnum': VAR_20}\n", "VAR_21 = {'FIRST': 'first', 'SECOND': 'second'}\n", "VAR_22 = properties.EnumProperty(VAR_5=name, VAR_26=True, default='second',\n VAR_21=values, title='a_title')\n", "assert VAR_22.default == 'Deduped.SECOND'\n", "assert VAR_22.python_name == VAR_6(VAR_5)\n", "VAR_19.assert_has_calls([VAR_1.call('a_title'), VAR_1.call('MyTestEnum1')])\n", "assert VAR_22.reference == VAR_18\n", "assert properties._existing_enums == {'MyTestEnum': VAR_20, 'Deduped': VAR_22}\n", "assert properties.EnumProperty(VAR_5=name, VAR_26=True, default='second',\n VAR_21=values, title='a_title') == VAR_22\n", "assert properties._existing_enums == {'MyTestEnum': VAR_20, 'Deduped': VAR_22}\n", "VAR_20.values = VAR_21\n", "VAR_19.reset_mock()\n", "VAR_19.side_effect = [VAR_17]\n", "VAR_22 = properties.EnumProperty(VAR_5=name, VAR_26=True, default='second',\n VAR_21=values, title='a_title')\n", "assert VAR_22.default == 'MyTestEnum.SECOND'\n", "assert VAR_22.python_name == VAR_6(VAR_5)\n", "VAR_19.assert_called_once_with('a_title')\n", "assert VAR_22.reference == VAR_17\n", "assert len(properties._existing_enums) == 2\n", "properties._existing_enums = {}\n", "def FUNC_0(self, VAR_1):...\n", "VAR_17 = VAR_1.MagicMock(class_name='MyTestEnum')\n", "VAR_1.patch(f'{VAR_0}.Reference.from_ref', return_value=fake_reference)\n", "from openapi_python_client.parser import properties\n", "VAR_22 = properties.EnumProperty(VAR_5='test', VAR_26=True, default=None,\n VAR_21={}, title='a_title')\n", "assert VAR_22.get_type_string() == 'MyTestEnum'\n", "VAR_22.required = False\n", "assert VAR_22.get_type_string() == 'Optional[MyTestEnum]'\n", "properties._existing_enums = {}\n", "def FUNC_2(self, VAR_1):...\n", "VAR_17 = VAR_1.MagicMock(class_name='MyTestEnum', module_name='my_test_enum')\n", "VAR_1.patch(f'{VAR_0}.Reference.from_ref', return_value=fake_reference)\n", "VAR_9 = VAR_1.MagicMock()\n", "from openapi_python_client.parser import properties\n", "VAR_22 = properties.EnumProperty(VAR_5='test', VAR_26=True, default=None,\n VAR_21={}, title='a_title')\n", "assert VAR_22.get_imports(VAR_9=prefix) == {\n f'from {VAR_9}.{VAR_17.module_name} import {VAR_17.class_name}'}\n", "VAR_22.required = False\n", "assert VAR_22.get_imports(VAR_9=prefix) == {\n f'from {VAR_9}.{VAR_17.module_name} import {VAR_17.class_name}',\n 'from typing import Optional'}\n", "properties._existing_enums = {}\n", "def FUNC_4(self):...\n", "from openapi_python_client.parser.properties import EnumProperty\n", "VAR_23 = ['abc', '123', 'a23', '1bc']\n", "VAR_24 = VAR_27.values_from_list(VAR_23)\n", "assert VAR_24 == {'ABC': 'abc', 'VALUE_1': '123', 'A23': 'a23', 'VALUE_3':\n '1bc'}\n", "def FUNC_5(self):...\n", "from openapi_python_client.parser.properties import EnumProperty\n", "VAR_23 = ['abc', '123', 'a23', 'abc']\n", "VAR_27.values_from_list(VAR_23)\n", "def FUNC_6(self, VAR_1):...\n", "from openapi_python_client.parser import properties\n", "properties._existing_enums = VAR_1.MagicMock()\n", "assert properties.EnumProperty.get_all_enums() == properties._existing_enums\n", "properties._existing_enums = {}\n", "def FUNC_7(self):...\n", "from openapi_python_client.parser import properties\n", "properties._existing_enums = {'test': 'an enum'}\n", "assert properties.EnumProperty.get_enum('test') == 'an enum'\n", "properties._existing_enums = {}\n", "def FUNC_8(self, VAR_1):...\n", "from openapi_python_client.parser.properties import RefProperty\n", "VAR_25 = VAR_28(VAR_5='test', VAR_26=True, default=None, reference=mocker.\n MagicMock(class_name='MyRefClass'))\n", "assert VAR_25.template == 'ref_property.pyi'\n", "VAR_1.patch(f'{VAR_0}.EnumProperty.get_enum', return_value='an enum')\n", "assert VAR_25.template == 'enum_property.pyi'\n", "def FUNC_0(self, VAR_1):...\n", "from openapi_python_client.parser.properties import RefProperty\n", "VAR_25 = VAR_28(VAR_5='test', VAR_26=True, default=None, reference=mocker.\n MagicMock(class_name='MyRefClass'))\n", "assert VAR_25.get_type_string() == 'MyRefClass'\n", "VAR_25.required = False\n", "assert VAR_25.get_type_string() == 'Optional[MyRefClass]'\n", "def FUNC_2(self, VAR_1):...\n", "VAR_17 = VAR_1.MagicMock(class_name='MyRefClass', module_name='my_test_enum')\n", "VAR_9 = VAR_1.MagicMock()\n", "from openapi_python_client.parser.properties import RefProperty\n", "VAR_4 = VAR_28(VAR_5='test', VAR_26=True, default=None, reference=\n fake_reference)\n", "assert VAR_4.get_imports(VAR_9=prefix) == {\n f'from {VAR_9}.{VAR_17.module_name} import {VAR_17.class_name}',\n 'from typing import Dict', 'from typing import cast'}\n", "VAR_4.required = False\n", "assert VAR_4.get_imports(VAR_9=prefix) == {\n f'from {VAR_9}.{VAR_17.module_name} import {VAR_17.class_name}',\n 'from typing import Dict', 'from typing import cast',\n 'from typing import Optional'}\n", "def test___post_init__(self):...\n", "from openapi_python_client.parser.properties import DictProperty\n", "VAR_4 = DictProperty(VAR_5='blah', VAR_26=True, default={})\n", "assert VAR_4.default == 'field(default_factory=lambda: cast(Dict[Any, Any], {}))'\n", "def FUNC_2(self, VAR_1):...\n", "from openapi_python_client.parser.properties import DictProperty\n", "VAR_5 = VAR_1.MagicMock()\n", "VAR_1.patch('openapi_python_client.utils.snake_case')\n", "VAR_9 = VAR_1.MagicMock()\n", "VAR_4 = DictProperty(VAR_5=name, VAR_26=True, default=None)\n", "assert VAR_4.get_imports(VAR_9=prefix) == {'from typing import Dict'}\n", "VAR_4.required = False\n", "assert VAR_4.get_imports(VAR_9=prefix) == {'from typing import Optional',\n 'from typing import Dict'}\n", "VAR_4.default = VAR_1.MagicMock()\n", "assert VAR_4.get_imports(VAR_9=prefix) == {'from typing import Optional',\n 'from typing import Dict', 'from typing import cast',\n 'from dataclasses import field'}\n", "def FUNC_9(self, VAR_1):...\n", "VAR_5 = VAR_1.MagicMock()\n", "VAR_26 = VAR_1.MagicMock()\n", "VAR_23 = VAR_1.MagicMock(title=None)\n", "VAR_27 = VAR_1.patch(f'{VAR_0}.EnumProperty')\n", "from openapi_python_client.parser.properties import property_from_data\n", "VAR_4 = property_from_data(VAR_5=name, VAR_26=required, VAR_23=data)\n", "VAR_27.values_from_list.assert_called_once_with(VAR_23.enum)\n", "VAR_27.assert_called_once_with(VAR_5=name, VAR_26=required, VAR_21=\n EnumProperty.values_from_list(), default=data.default, title=name)\n", "assert VAR_4 == VAR_27()\n", "VAR_27.reset_mock()\n", "VAR_23.title = VAR_1.MagicMock()\n", "property_from_data(VAR_5=name, VAR_26=required, VAR_23=data)\n", "VAR_27.assert_called_once_with(VAR_5=name, VAR_26=required, VAR_21=\n EnumProperty.values_from_list(), default=data.default, title=data.title)\n", "def FUNC_10(self, VAR_1):...\n", "VAR_5 = VAR_1.MagicMock()\n", "VAR_26 = VAR_1.MagicMock()\n", "VAR_23 = oai.Reference.construct(ref=mocker.MagicMock())\n", "VAR_19 = VAR_1.patch(f'{VAR_0}.Reference.from_ref')\n", "VAR_28 = VAR_1.patch(f'{VAR_0}.RefProperty')\n", "from openapi_python_client.parser.properties import property_from_data\n", "VAR_4 = property_from_data(VAR_5=name, VAR_26=required, VAR_23=data)\n", "VAR_19.assert_called_once_with(VAR_23.ref)\n", "VAR_28.assert_called_once_with(VAR_5=name, VAR_26=required, reference=\n from_ref(), default=None)\n", "assert VAR_4 == VAR_28()\n", "def FUNC_11(self, VAR_1):...\n", "VAR_29 = VAR_1.patch(f'{VAR_0}._string_based_property')\n", "VAR_5 = VAR_1.MagicMock()\n", "VAR_26 = VAR_1.MagicMock()\n", "VAR_23 = oai.Schema.construct(type='string')\n", "from openapi_python_client.parser.properties import property_from_data\n", "VAR_4 = property_from_data(VAR_5=name, VAR_26=required, VAR_23=data)\n", "assert VAR_4 == VAR_29.return_value\n", "VAR_29.assert_called_once_with(VAR_5=name, VAR_26=required, VAR_23=data)\n", "@pytest.mark.parametrize('openapi_type,python_type', [('number',...\n", "VAR_5 = VAR_1.MagicMock()\n", "VAR_26 = VAR_1.MagicMock()\n", "VAR_23 = oai.Schema.construct(type=openapi_type)\n", "VAR_30 = VAR_1.patch(f'{VAR_0}.{VAR_3}')\n", "from openapi_python_client.parser.properties import property_from_data\n", "VAR_4 = property_from_data(VAR_5=name, VAR_26=required, VAR_23=data)\n", "VAR_30.assert_called_once_with(VAR_5=name, VAR_26=required, default=None)\n", "assert VAR_4 == VAR_30()\n", "VAR_30.reset_mock()\n", "VAR_23.default = VAR_1.MagicMock()\n", "property_from_data(VAR_5=name, VAR_26=required, VAR_23=data)\n", "VAR_30.assert_called_once_with(VAR_5=name, VAR_26=required, default=data.\n default)\n", "def FUNC_13(self, VAR_1):...\n", "VAR_5 = VAR_1.MagicMock()\n", "VAR_26 = VAR_1.MagicMock()\n", "VAR_23 = oai.Schema(type='array', items={'type': 'number', 'default': '0.0'})\n", "VAR_31 = VAR_1.patch(f'{VAR_0}.ListProperty')\n", "VAR_32 = VAR_1.patch(f'{VAR_0}.FloatProperty')\n", "from openapi_python_client.parser.properties import property_from_data\n", "VAR_4 = property_from_data(VAR_5=name, VAR_26=required, VAR_23=data)\n", "VAR_32.assert_called_once_with(VAR_5=f'{name}_item', VAR_26=True, default='0.0'\n )\n", "VAR_31.assert_called_once_with(VAR_5=name, VAR_26=required, default=None,\n VAR_10=FloatProperty.return_value)\n", "assert VAR_4 == VAR_31.return_value\n", "def FUNC_14(self, VAR_1):...\n", "VAR_5 = VAR_1.MagicMock()\n", "VAR_26 = VAR_1.MagicMock()\n", "VAR_23 = oai.Schema(type='array')\n", "from openapi_python_client.parser.properties import property_from_data\n", "VAR_4 = property_from_data(VAR_5=name, VAR_26=required, VAR_23=data)\n", "assert VAR_4 == PropertyError(VAR_23=data, detail=\n 'type array must have items defined')\n", "def FUNC_15(self, VAR_1):...\n", "VAR_5 = VAR_1.MagicMock()\n", "VAR_26 = VAR_1.MagicMock()\n", "VAR_23 = oai.Schema(type='array', items={})\n", "from openapi_python_client.parser.properties import property_from_data\n", "VAR_4 = property_from_data(VAR_5=name, VAR_26=required, VAR_23=data)\n", "assert VAR_4 == PropertyError(VAR_23=oai.Schema(), detail=\n f'invalid data in items of array {name}')\n", "def FUNC_16(self, VAR_1):...\n", "VAR_5 = VAR_1.MagicMock()\n", "VAR_26 = VAR_1.MagicMock()\n", "VAR_23 = oai.Schema(anyOf=[{'type': 'number', 'default': '0.0'}, {'type':\n 'integer', 'default': '0'}])\n", "VAR_33 = VAR_1.patch(f'{VAR_0}.UnionProperty')\n", "VAR_32 = VAR_1.patch(f'{VAR_0}.FloatProperty')\n", "VAR_34 = VAR_1.patch(f'{VAR_0}.IntProperty')\n", "from openapi_python_client.parser.properties import property_from_data\n", "VAR_4 = property_from_data(VAR_5=name, VAR_26=required, VAR_23=data)\n", "VAR_32.assert_called_once_with(VAR_5=name, VAR_26=required, default='0.0')\n", "VAR_34.assert_called_once_with(VAR_5=name, VAR_26=required, default='0')\n", "VAR_33.assert_called_once_with(VAR_5=name, VAR_26=required, default=None,\n inner_properties=[FloatProperty.return_value, IntProperty.return_value])\n", "assert VAR_4 == VAR_33.return_value\n", "def FUNC_17(self, VAR_1):...\n", "VAR_5 = VAR_1.MagicMock()\n", "VAR_26 = VAR_1.MagicMock()\n", "VAR_23 = oai.Schema(anyOf=[{}])\n", "from openapi_python_client.parser.properties import property_from_data\n", "VAR_4 = property_from_data(VAR_5=name, VAR_26=required, VAR_23=data)\n", "assert VAR_4 == PropertyError(detail=f'Invalid property in union {name}',\n VAR_23=oai.Schema())\n", "def FUNC_18(self, VAR_1):...\n", "VAR_5 = VAR_1.MagicMock()\n", "VAR_26 = VAR_1.MagicMock()\n", "VAR_23 = oai.Schema.construct(type=mocker.MagicMock())\n", "from openapi_python_client.parser.errors import PropertyError\n", "from openapi_python_client.parser.properties import property_from_data\n", "assert property_from_data(VAR_5=name, VAR_26=required, VAR_23=data\n ) == PropertyError(VAR_23=data, detail=f'unknown type {data.type}')\n", "def FUNC_19(self):...\n", "from openapi_python_client.parser.errors import PropertyError\n", "from openapi_python_client.parser.properties import property_from_data\n", "VAR_23 = oai.Schema()\n", "assert property_from_data(VAR_5='blah', VAR_26=True, VAR_23=data\n ) == PropertyError(VAR_23=data, detail=\n 'Schemas must either have one of enum, anyOf, or type defined.')\n", "def FUNC_20(self, VAR_1):...\n", "VAR_5 = VAR_1.MagicMock()\n", "VAR_26 = VAR_1.MagicMock()\n", "VAR_23 = oai.Schema.construct(type='string')\n", "VAR_35 = VAR_1.patch(f'{VAR_0}.StringProperty')\n", "from openapi_python_client.parser.properties import _string_based_property\n", "VAR_4 = VAR_29(VAR_5=name, VAR_26=required, VAR_23=data)\n", "VAR_35.assert_called_once_with(VAR_5=name, VAR_26=required, pattern=None,\n default=None)\n", "assert VAR_4 == VAR_35.return_value\n", "VAR_35.reset_mock()\n", "VAR_23.default = VAR_1.MagicMock()\n", "VAR_23.pattern = VAR_1.MagicMock()\n", "VAR_29(VAR_5=name, VAR_26=required, VAR_23=data)\n", "VAR_35.assert_called_once_with(VAR_5=name, VAR_26=required, pattern=data.\n pattern, default=data.default)\n", "def FUNC_21(self, VAR_1):...\n", "VAR_5 = VAR_1.MagicMock()\n", "VAR_26 = VAR_1.MagicMock()\n", "VAR_23 = oai.Schema.construct(type='string', schema_format='date-time')\n", "VAR_36 = VAR_1.patch(f'{VAR_0}.DateTimeProperty')\n", "from openapi_python_client.parser.properties import _string_based_property\n", "VAR_4 = VAR_29(VAR_5=name, VAR_26=required, VAR_23=data)\n", "VAR_36.assert_called_once_with(VAR_5=name, VAR_26=required, default=None)\n", "assert VAR_4 == VAR_36.return_value\n", "VAR_36.reset_mock()\n", "VAR_23.default = VAR_1.MagicMock()\n", "VAR_29(VAR_5=name, VAR_26=required, VAR_23=data)\n", "VAR_36.assert_called_once_with(VAR_5=name, VAR_26=required, default=data.\n default)\n", "def FUNC_22(self, VAR_1):...\n", "VAR_5 = VAR_1.MagicMock()\n", "VAR_26 = VAR_1.MagicMock()\n", "VAR_23 = oai.Schema.construct(type='string', schema_format='date')\n", "VAR_37 = VAR_1.patch(f'{VAR_0}.DateProperty')\n", "from openapi_python_client.parser.properties import _string_based_property\n", "VAR_4 = VAR_29(VAR_5=name, VAR_26=required, VAR_23=data)\n", "VAR_37.assert_called_once_with(VAR_5=name, VAR_26=required, default=None)\n", "assert VAR_4 == VAR_37.return_value\n", "VAR_37.reset_mock()\n", "VAR_23.default = VAR_1.MagicMock()\n", "VAR_29(VAR_5=name, VAR_26=required, VAR_23=data)\n", "VAR_37.assert_called_once_with(VAR_5=name, VAR_26=required, default=data.\n default)\n", "def FUNC_23(self, VAR_1):...\n", "VAR_5 = VAR_1.MagicMock()\n", "VAR_26 = VAR_1.MagicMock()\n", "VAR_23 = oai.Schema.construct(type='string', schema_format='binary')\n", "VAR_38 = VAR_1.patch(f'{VAR_0}.FileProperty')\n", "from openapi_python_client.parser.properties import _string_based_property\n", "VAR_4 = VAR_29(VAR_5=name, VAR_26=required, VAR_23=data)\n", "VAR_38.assert_called_once_with(VAR_5=name, VAR_26=required, default=None)\n", "assert VAR_4 == VAR_38.return_value\n", "VAR_38.reset_mock()\n", "VAR_23.default = VAR_1.MagicMock()\n", "VAR_29(VAR_5=name, VAR_26=required, VAR_23=data)\n", "VAR_38.assert_called_once_with(VAR_5=name, VAR_26=required, default=data.\n default)\n", "def FUNC_24(self, VAR_1):...\n", "VAR_5 = VAR_1.MagicMock()\n", "VAR_26 = VAR_1.MagicMock()\n", "VAR_23 = oai.Schema.construct(type='string', schema_format=mocker.MagicMock())\n", "VAR_35 = VAR_1.patch(f'{VAR_0}.StringProperty')\n", "from openapi_python_client.parser.properties import _string_based_property\n", "VAR_4 = VAR_29(VAR_5=name, VAR_26=required, VAR_23=data)\n", "VAR_35.assert_called_once_with(VAR_5=name, VAR_26=required, pattern=None,\n default=None)\n", "assert VAR_4 == VAR_35.return_value\n", "VAR_35.reset_mock()\n", "VAR_23.default = VAR_1.MagicMock()\n", "VAR_23.pattern = VAR_1.MagicMock()\n", "VAR_29(VAR_5=name, VAR_26=required, VAR_23=data)\n", "VAR_35.assert_called_once_with(VAR_5=name, VAR_26=required, pattern=data.\n pattern, default=data.default)\n" ]
[ "import pytest\n", "import openapi_python_client.schema as oai\n", "from openapi_python_client.parser.errors import PropertyError\n", "MODULE_NAME = 'openapi_python_client.parser.properties'\n", "def test_get_type_string(self):...\n", "from openapi_python_client.parser.properties import Property\n", "p = Property(name='test', required=True, default=None)\n", "p._type_string = 'TestType'\n", "assert p.get_type_string() == 'TestType'\n", "p.required = False\n", "assert p.get_type_string() == 'Optional[TestType]'\n", "def test_to_string(self, mocker):...\n", "from openapi_python_client.parser.properties import Property\n", "name = mocker.MagicMock()\n", "snake_case = mocker.patch('openapi_python_client.utils.snake_case')\n", "p = Property(name=name, required=True, default=None)\n", "get_type_string = mocker.patch.object(p, 'get_type_string')\n", "assert p.to_string() == f'{snake_case(name)}: {get_type_string()}'\n", "p.required = False\n", "assert p.to_string() == f'{snake_case(name)}: {get_type_string()} = None'\n", "p.default = 'TEST'\n", "assert p.to_string() == f'{snake_case(name)}: {get_type_string()} = TEST'\n", "def test_get_imports(self, mocker):...\n", "from openapi_python_client.parser.properties import Property\n", "name = mocker.MagicMock()\n", "mocker.patch('openapi_python_client.utils.snake_case')\n", "p = Property(name=name, required=True, default=None)\n", "assert p.get_imports(prefix='') == set()\n", "p.required = False\n", "assert p.get_imports(prefix='') == {'from typing import Optional'}\n", "def test___post_init__(self):...\n", "from openapi_python_client.parser.properties import StringProperty\n", "sp = StringProperty(name='test', required=True, default='A Default Value')\n", "assert sp.default == '\"A Default Value\"'\n", "def test_get_type_string(self):...\n", "from openapi_python_client.parser.properties import StringProperty\n", "p = StringProperty(name='test', required=True, default=None)\n", "assert p.get_type_string() == 'str'\n", "p.required = False\n", "assert p.get_type_string() == 'Optional[str]'\n", "def test_get_imports(self, mocker):...\n", "from openapi_python_client.parser.properties import DateTimeProperty\n", "name = mocker.MagicMock()\n", "mocker.patch('openapi_python_client.utils.snake_case')\n", "p = DateTimeProperty(name=name, required=True, default=None)\n", "assert p.get_imports(prefix='') == {'from datetime import datetime',\n 'from typing import cast'}\n", "p.required = False\n", "assert p.get_imports(prefix='') == {'from typing import Optional',\n 'from datetime import datetime', 'from typing import cast'}\n", "def test_get_imports(self, mocker):...\n", "from openapi_python_client.parser.properties import DateProperty\n", "name = mocker.MagicMock()\n", "mocker.patch('openapi_python_client.utils.snake_case')\n", "p = DateProperty(name=name, required=True, default=None)\n", "assert p.get_imports(prefix='') == {'from datetime import date',\n 'from typing import cast'}\n", "p.required = False\n", "assert p.get_imports(prefix='') == {'from typing import Optional',\n 'from datetime import date', 'from typing import cast'}\n", "def test_get_imports(self, mocker):...\n", "from openapi_python_client.parser.properties import FileProperty\n", "name = mocker.MagicMock()\n", "mocker.patch('openapi_python_client.utils.snake_case')\n", "prefix = 'blah'\n", "p = FileProperty(name=name, required=True, default=None)\n", "assert p.get_imports(prefix=prefix) == {f'from {prefix}.types import File',\n 'from dataclasses import astuple'}\n", "p.required = False\n", "assert p.get_imports(prefix=prefix) == {'from typing import Optional',\n f'from {prefix}.types import File', 'from dataclasses import astuple'}\n", "def test_get_type_string(self, mocker):...\n", "from openapi_python_client.parser.properties import ListProperty\n", "inner_property = mocker.MagicMock()\n", "inner_type_string = mocker.MagicMock()\n", "inner_property.get_type_string.return_value = inner_type_string\n", "p = ListProperty(name='test', required=True, default=None, inner_property=\n inner_property)\n", "assert p.get_type_string() == f'List[{inner_type_string}]'\n", "p.required = False\n", "assert p.get_type_string() == f'Optional[List[{inner_type_string}]]'\n", "p = ListProperty(name='test', required=True, default=[], inner_property=\n inner_property)\n", "assert p.default == f'field(default_factory=lambda: cast(List[{inner_type_string}], []))'\n", "def test_get_type_imports(self, mocker):...\n", "from openapi_python_client.parser.properties import ListProperty\n", "inner_property = mocker.MagicMock()\n", "inner_import = mocker.MagicMock()\n", "inner_property.get_imports.return_value = {inner_import}\n", "prefix = mocker.MagicMock()\n", "p = ListProperty(name='test', required=True, default=None, inner_property=\n inner_property)\n", "assert p.get_imports(prefix=prefix) == {inner_import, 'from typing import List'\n }\n", "p.required = False\n", "assert p.get_imports(prefix=prefix) == {inner_import,\n 'from typing import List', 'from typing import Optional'}\n", "p.default = mocker.MagicMock()\n", "assert p.get_imports(prefix=prefix) == {inner_import,\n 'from typing import Optional', 'from typing import List',\n 'from typing import cast', 'from dataclasses import field'}\n", "def test_get_type_string(self, mocker):...\n", "from openapi_python_client.parser.properties import UnionProperty\n", "inner_property_1 = mocker.MagicMock()\n", "inner_property_1.get_type_string.return_value = 'inner_type_string_1'\n", "inner_property_2 = mocker.MagicMock()\n", "inner_property_2.get_type_string.return_value = 'inner_type_string_2'\n", "p = UnionProperty(name='test', required=True, default=None,\n inner_properties=[inner_property_1, inner_property_2])\n", "assert p.get_type_string() == 'Union[inner_type_string_1, inner_type_string_2]'\n", "p.required = False\n", "assert p.get_type_string(\n ) == 'Optional[Union[inner_type_string_1, inner_type_string_2]]'\n", "def test_get_type_imports(self, mocker):...\n", "from openapi_python_client.parser.properties import UnionProperty\n", "inner_property_1 = mocker.MagicMock()\n", "inner_import_1 = mocker.MagicMock()\n", "inner_property_1.get_imports.return_value = {inner_import_1}\n", "inner_property_2 = mocker.MagicMock()\n", "inner_import_2 = mocker.MagicMock()\n", "inner_property_2.get_imports.return_value = {inner_import_2}\n", "prefix = mocker.MagicMock()\n", "p = UnionProperty(name='test', required=True, default=None,\n inner_properties=[inner_property_1, inner_property_2])\n", "assert p.get_imports(prefix=prefix) == {inner_import_1, inner_import_2,\n 'from typing import Union'}\n", "p.required = False\n", "assert p.get_imports(prefix=prefix) == {inner_import_1, inner_import_2,\n 'from typing import Union', 'from typing import Optional'}\n", "def test___post_init__(self, mocker):...\n", "name = mocker.MagicMock()\n", "snake_case = mocker.patch('openapi_python_client.utils.snake_case')\n", "fake_reference = mocker.MagicMock(class_name='MyTestEnum')\n", "deduped_reference = mocker.MagicMock(class_name='Deduped')\n", "from_ref = mocker.patch(f'{MODULE_NAME}.Reference.from_ref', side_effect=[\n fake_reference, deduped_reference, deduped_reference])\n", "from openapi_python_client.parser import properties\n", "fake_dup_enum = mocker.MagicMock()\n", "properties._existing_enums = {'MyTestEnum': fake_dup_enum}\n", "values = {'FIRST': 'first', 'SECOND': 'second'}\n", "enum_property = properties.EnumProperty(name=name, required=True, default=\n 'second', values=values, title='a_title')\n", "assert enum_property.default == 'Deduped.SECOND'\n", "assert enum_property.python_name == snake_case(name)\n", "from_ref.assert_has_calls([mocker.call('a_title'), mocker.call('MyTestEnum1')])\n", "assert enum_property.reference == deduped_reference\n", "assert properties._existing_enums == {'MyTestEnum': fake_dup_enum,\n 'Deduped': enum_property}\n", "assert properties.EnumProperty(name=name, required=True, default='second',\n values=values, title='a_title') == enum_property\n", "assert properties._existing_enums == {'MyTestEnum': fake_dup_enum,\n 'Deduped': enum_property}\n", "fake_dup_enum.values = values\n", "from_ref.reset_mock()\n", "from_ref.side_effect = [fake_reference]\n", "enum_property = properties.EnumProperty(name=name, required=True, default=\n 'second', values=values, title='a_title')\n", "assert enum_property.default == 'MyTestEnum.SECOND'\n", "assert enum_property.python_name == snake_case(name)\n", "from_ref.assert_called_once_with('a_title')\n", "assert enum_property.reference == fake_reference\n", "assert len(properties._existing_enums) == 2\n", "properties._existing_enums = {}\n", "def test_get_type_string(self, mocker):...\n", "fake_reference = mocker.MagicMock(class_name='MyTestEnum')\n", "mocker.patch(f'{MODULE_NAME}.Reference.from_ref', return_value=fake_reference)\n", "from openapi_python_client.parser import properties\n", "enum_property = properties.EnumProperty(name='test', required=True, default\n =None, values={}, title='a_title')\n", "assert enum_property.get_type_string() == 'MyTestEnum'\n", "enum_property.required = False\n", "assert enum_property.get_type_string() == 'Optional[MyTestEnum]'\n", "properties._existing_enums = {}\n", "def test_get_imports(self, mocker):...\n", "fake_reference = mocker.MagicMock(class_name='MyTestEnum', module_name=\n 'my_test_enum')\n", "mocker.patch(f'{MODULE_NAME}.Reference.from_ref', return_value=fake_reference)\n", "prefix = mocker.MagicMock()\n", "from openapi_python_client.parser import properties\n", "enum_property = properties.EnumProperty(name='test', required=True, default\n =None, values={}, title='a_title')\n", "assert enum_property.get_imports(prefix=prefix) == {\n f'from {prefix}.{fake_reference.module_name} import {fake_reference.class_name}'\n }\n", "enum_property.required = False\n", "assert enum_property.get_imports(prefix=prefix) == {\n f'from {prefix}.{fake_reference.module_name} import {fake_reference.class_name}'\n , 'from typing import Optional'}\n", "properties._existing_enums = {}\n", "def test_values_from_list(self):...\n", "from openapi_python_client.parser.properties import EnumProperty\n", "data = ['abc', '123', 'a23', '1bc']\n", "result = EnumProperty.values_from_list(data)\n", "assert result == {'ABC': 'abc', 'VALUE_1': '123', 'A23': 'a23', 'VALUE_3':\n '1bc'}\n", "def test_values_from_list_duplicate(self):...\n", "from openapi_python_client.parser.properties import EnumProperty\n", "data = ['abc', '123', 'a23', 'abc']\n", "EnumProperty.values_from_list(data)\n", "def test_get_all_enums(self, mocker):...\n", "from openapi_python_client.parser import properties\n", "properties._existing_enums = mocker.MagicMock()\n", "assert properties.EnumProperty.get_all_enums() == properties._existing_enums\n", "properties._existing_enums = {}\n", "def test_get_enum(self):...\n", "from openapi_python_client.parser import properties\n", "properties._existing_enums = {'test': 'an enum'}\n", "assert properties.EnumProperty.get_enum('test') == 'an enum'\n", "properties._existing_enums = {}\n", "def test_template(self, mocker):...\n", "from openapi_python_client.parser.properties import RefProperty\n", "ref_property = RefProperty(name='test', required=True, default=None,\n reference=mocker.MagicMock(class_name='MyRefClass'))\n", "assert ref_property.template == 'ref_property.pyi'\n", "mocker.patch(f'{MODULE_NAME}.EnumProperty.get_enum', return_value='an enum')\n", "assert ref_property.template == 'enum_property.pyi'\n", "def test_get_type_string(self, mocker):...\n", "from openapi_python_client.parser.properties import RefProperty\n", "ref_property = RefProperty(name='test', required=True, default=None,\n reference=mocker.MagicMock(class_name='MyRefClass'))\n", "assert ref_property.get_type_string() == 'MyRefClass'\n", "ref_property.required = False\n", "assert ref_property.get_type_string() == 'Optional[MyRefClass]'\n", "def test_get_imports(self, mocker):...\n", "fake_reference = mocker.MagicMock(class_name='MyRefClass', module_name=\n 'my_test_enum')\n", "prefix = mocker.MagicMock()\n", "from openapi_python_client.parser.properties import RefProperty\n", "p = RefProperty(name='test', required=True, default=None, reference=\n fake_reference)\n", "assert p.get_imports(prefix=prefix) == {\n f'from {prefix}.{fake_reference.module_name} import {fake_reference.class_name}'\n , 'from typing import Dict', 'from typing import cast'}\n", "p.required = False\n", "assert p.get_imports(prefix=prefix) == {\n f'from {prefix}.{fake_reference.module_name} import {fake_reference.class_name}'\n , 'from typing import Dict', 'from typing import cast',\n 'from typing import Optional'}\n", "def test___post_init__(self):...\n", "from openapi_python_client.parser.properties import DictProperty\n", "p = DictProperty(name='blah', required=True, default={})\n", "assert p.default == 'field(default_factory=lambda: cast(Dict[Any, Any], {}))'\n", "def test_get_imports(self, mocker):...\n", "from openapi_python_client.parser.properties import DictProperty\n", "name = mocker.MagicMock()\n", "mocker.patch('openapi_python_client.utils.snake_case')\n", "prefix = mocker.MagicMock()\n", "p = DictProperty(name=name, required=True, default=None)\n", "assert p.get_imports(prefix=prefix) == {'from typing import Dict'}\n", "p.required = False\n", "assert p.get_imports(prefix=prefix) == {'from typing import Optional',\n 'from typing import Dict'}\n", "p.default = mocker.MagicMock()\n", "assert p.get_imports(prefix=prefix) == {'from typing import Optional',\n 'from typing import Dict', 'from typing import cast',\n 'from dataclasses import field'}\n", "def test_property_from_data_enum(self, mocker):...\n", "name = mocker.MagicMock()\n", "required = mocker.MagicMock()\n", "data = mocker.MagicMock(title=None)\n", "EnumProperty = mocker.patch(f'{MODULE_NAME}.EnumProperty')\n", "from openapi_python_client.parser.properties import property_from_data\n", "p = property_from_data(name=name, required=required, data=data)\n", "EnumProperty.values_from_list.assert_called_once_with(data.enum)\n", "EnumProperty.assert_called_once_with(name=name, required=required, values=\n EnumProperty.values_from_list(), default=data.default, title=name)\n", "assert p == EnumProperty()\n", "EnumProperty.reset_mock()\n", "data.title = mocker.MagicMock()\n", "property_from_data(name=name, required=required, data=data)\n", "EnumProperty.assert_called_once_with(name=name, required=required, values=\n EnumProperty.values_from_list(), default=data.default, title=data.title)\n", "def test_property_from_data_ref(self, mocker):...\n", "name = mocker.MagicMock()\n", "required = mocker.MagicMock()\n", "data = oai.Reference.construct(ref=mocker.MagicMock())\n", "from_ref = mocker.patch(f'{MODULE_NAME}.Reference.from_ref')\n", "RefProperty = mocker.patch(f'{MODULE_NAME}.RefProperty')\n", "from openapi_python_client.parser.properties import property_from_data\n", "p = property_from_data(name=name, required=required, data=data)\n", "from_ref.assert_called_once_with(data.ref)\n", "RefProperty.assert_called_once_with(name=name, required=required, reference\n =from_ref(), default=None)\n", "assert p == RefProperty()\n", "def test_property_from_data_string(self, mocker):...\n", "_string_based_property = mocker.patch(f'{MODULE_NAME}._string_based_property')\n", "name = mocker.MagicMock()\n", "required = mocker.MagicMock()\n", "data = oai.Schema.construct(type='string')\n", "from openapi_python_client.parser.properties import property_from_data\n", "p = property_from_data(name=name, required=required, data=data)\n", "assert p == _string_based_property.return_value\n", "_string_based_property.assert_called_once_with(name=name, required=required,\n data=data)\n", "@pytest.mark.parametrize('openapi_type,python_type', [('number',...\n", "name = mocker.MagicMock()\n", "required = mocker.MagicMock()\n", "data = oai.Schema.construct(type=openapi_type)\n", "clazz = mocker.patch(f'{MODULE_NAME}.{python_type}')\n", "from openapi_python_client.parser.properties import property_from_data\n", "p = property_from_data(name=name, required=required, data=data)\n", "clazz.assert_called_once_with(name=name, required=required, default=None)\n", "assert p == clazz()\n", "clazz.reset_mock()\n", "data.default = mocker.MagicMock()\n", "property_from_data(name=name, required=required, data=data)\n", "clazz.assert_called_once_with(name=name, required=required, default=data.\n default)\n", "def test_property_from_data_array(self, mocker):...\n", "name = mocker.MagicMock()\n", "required = mocker.MagicMock()\n", "data = oai.Schema(type='array', items={'type': 'number', 'default': '0.0'})\n", "ListProperty = mocker.patch(f'{MODULE_NAME}.ListProperty')\n", "FloatProperty = mocker.patch(f'{MODULE_NAME}.FloatProperty')\n", "from openapi_python_client.parser.properties import property_from_data\n", "p = property_from_data(name=name, required=required, data=data)\n", "FloatProperty.assert_called_once_with(name=f'{name}_item', required=True,\n default='0.0')\n", "ListProperty.assert_called_once_with(name=name, required=required, default=\n None, inner_property=FloatProperty.return_value)\n", "assert p == ListProperty.return_value\n", "def test_property_from_data_array_no_items(self, mocker):...\n", "name = mocker.MagicMock()\n", "required = mocker.MagicMock()\n", "data = oai.Schema(type='array')\n", "from openapi_python_client.parser.properties import property_from_data\n", "p = property_from_data(name=name, required=required, data=data)\n", "assert p == PropertyError(data=data, detail=\n 'type array must have items defined')\n", "def test_property_from_data_array_invalid_items(self, mocker):...\n", "name = mocker.MagicMock()\n", "required = mocker.MagicMock()\n", "data = oai.Schema(type='array', items={})\n", "from openapi_python_client.parser.properties import property_from_data\n", "p = property_from_data(name=name, required=required, data=data)\n", "assert p == PropertyError(data=oai.Schema(), detail=\n f'invalid data in items of array {name}')\n", "def test_property_from_data_union(self, mocker):...\n", "name = mocker.MagicMock()\n", "required = mocker.MagicMock()\n", "data = oai.Schema(anyOf=[{'type': 'number', 'default': '0.0'}, {'type':\n 'integer', 'default': '0'}])\n", "UnionProperty = mocker.patch(f'{MODULE_NAME}.UnionProperty')\n", "FloatProperty = mocker.patch(f'{MODULE_NAME}.FloatProperty')\n", "IntProperty = mocker.patch(f'{MODULE_NAME}.IntProperty')\n", "from openapi_python_client.parser.properties import property_from_data\n", "p = property_from_data(name=name, required=required, data=data)\n", "FloatProperty.assert_called_once_with(name=name, required=required, default\n ='0.0')\n", "IntProperty.assert_called_once_with(name=name, required=required, default='0')\n", "UnionProperty.assert_called_once_with(name=name, required=required, default\n =None, inner_properties=[FloatProperty.return_value, IntProperty.\n return_value])\n", "assert p == UnionProperty.return_value\n", "def test_property_from_data_union_bad_type(self, mocker):...\n", "name = mocker.MagicMock()\n", "required = mocker.MagicMock()\n", "data = oai.Schema(anyOf=[{}])\n", "from openapi_python_client.parser.properties import property_from_data\n", "p = property_from_data(name=name, required=required, data=data)\n", "assert p == PropertyError(detail=f'Invalid property in union {name}', data=\n oai.Schema())\n", "def test_property_from_data_unsupported_type(self, mocker):...\n", "name = mocker.MagicMock()\n", "required = mocker.MagicMock()\n", "data = oai.Schema.construct(type=mocker.MagicMock())\n", "from openapi_python_client.parser.errors import PropertyError\n", "from openapi_python_client.parser.properties import property_from_data\n", "assert property_from_data(name=name, required=required, data=data\n ) == PropertyError(data=data, detail=f'unknown type {data.type}')\n", "def test_property_from_data_no_valid_props_in_data(self):...\n", "from openapi_python_client.parser.errors import PropertyError\n", "from openapi_python_client.parser.properties import property_from_data\n", "data = oai.Schema()\n", "assert property_from_data(name='blah', required=True, data=data\n ) == PropertyError(data=data, detail=\n 'Schemas must either have one of enum, anyOf, or type defined.')\n", "def test__string_based_property_no_format(self, mocker):...\n", "name = mocker.MagicMock()\n", "required = mocker.MagicMock()\n", "data = oai.Schema.construct(type='string')\n", "StringProperty = mocker.patch(f'{MODULE_NAME}.StringProperty')\n", "from openapi_python_client.parser.properties import _string_based_property\n", "p = _string_based_property(name=name, required=required, data=data)\n", "StringProperty.assert_called_once_with(name=name, required=required,\n pattern=None, default=None)\n", "assert p == StringProperty.return_value\n", "StringProperty.reset_mock()\n", "data.default = mocker.MagicMock()\n", "data.pattern = mocker.MagicMock()\n", "_string_based_property(name=name, required=required, data=data)\n", "StringProperty.assert_called_once_with(name=name, required=required,\n pattern=data.pattern, default=data.default)\n", "def test__string_based_property_datetime_format(self, mocker):...\n", "name = mocker.MagicMock()\n", "required = mocker.MagicMock()\n", "data = oai.Schema.construct(type='string', schema_format='date-time')\n", "DateTimeProperty = mocker.patch(f'{MODULE_NAME}.DateTimeProperty')\n", "from openapi_python_client.parser.properties import _string_based_property\n", "p = _string_based_property(name=name, required=required, data=data)\n", "DateTimeProperty.assert_called_once_with(name=name, required=required,\n default=None)\n", "assert p == DateTimeProperty.return_value\n", "DateTimeProperty.reset_mock()\n", "data.default = mocker.MagicMock()\n", "_string_based_property(name=name, required=required, data=data)\n", "DateTimeProperty.assert_called_once_with(name=name, required=required,\n default=data.default)\n", "def test__string_based_property_date_format(self, mocker):...\n", "name = mocker.MagicMock()\n", "required = mocker.MagicMock()\n", "data = oai.Schema.construct(type='string', schema_format='date')\n", "DateProperty = mocker.patch(f'{MODULE_NAME}.DateProperty')\n", "from openapi_python_client.parser.properties import _string_based_property\n", "p = _string_based_property(name=name, required=required, data=data)\n", "DateProperty.assert_called_once_with(name=name, required=required, default=None\n )\n", "assert p == DateProperty.return_value\n", "DateProperty.reset_mock()\n", "data.default = mocker.MagicMock()\n", "_string_based_property(name=name, required=required, data=data)\n", "DateProperty.assert_called_once_with(name=name, required=required, default=\n data.default)\n", "def test__string_based_property_binary_format(self, mocker):...\n", "name = mocker.MagicMock()\n", "required = mocker.MagicMock()\n", "data = oai.Schema.construct(type='string', schema_format='binary')\n", "FileProperty = mocker.patch(f'{MODULE_NAME}.FileProperty')\n", "from openapi_python_client.parser.properties import _string_based_property\n", "p = _string_based_property(name=name, required=required, data=data)\n", "FileProperty.assert_called_once_with(name=name, required=required, default=None\n )\n", "assert p == FileProperty.return_value\n", "FileProperty.reset_mock()\n", "data.default = mocker.MagicMock()\n", "_string_based_property(name=name, required=required, data=data)\n", "FileProperty.assert_called_once_with(name=name, required=required, default=\n data.default)\n", "def test__string_based_property_unsupported_format(self, mocker):...\n", "name = mocker.MagicMock()\n", "required = mocker.MagicMock()\n", "data = oai.Schema.construct(type='string', schema_format=mocker.MagicMock())\n", "StringProperty = mocker.patch(f'{MODULE_NAME}.StringProperty')\n", "from openapi_python_client.parser.properties import _string_based_property\n", "p = _string_based_property(name=name, required=required, data=data)\n", "StringProperty.assert_called_once_with(name=name, required=required,\n pattern=None, default=None)\n", "assert p == StringProperty.return_value\n", "StringProperty.reset_mock()\n", "data.default = mocker.MagicMock()\n", "data.pattern = mocker.MagicMock()\n", "_string_based_property(name=name, required=required, data=data)\n", "StringProperty.assert_called_once_with(name=name, required=required,\n pattern=data.pattern, default=data.default)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "Import'", "Import'", "ImportFrom'", "Assign'", "FunctionDef'", "ImportFrom'", "Assign'", "Assign'", "Assert'", "Assign'", "Assert'", "FunctionDef'", "ImportFrom'", "Assign'", "Assign'", "Assign'", "Assign'", "Assert'", "Assign'", "Assert'", "Assign'", "Assert'", "FunctionDef'", "ImportFrom'", "Assign'", "Expr'", "Assign'", "Assert'", "Assign'", "Assert'", "FunctionDef'", "ImportFrom'", "Assign'", "Assert'", "FunctionDef'", "ImportFrom'", "Assign'", "Assert'", "Assign'", "Assert'", "FunctionDef'", "ImportFrom'", "Assign'", "Expr'", "Assign'", "Assert'", "Assign'", "Assert'", "FunctionDef'", "ImportFrom'", "Assign'", "Expr'", "Assign'", "Assert'", "Assign'", "Assert'", "FunctionDef'", "ImportFrom'", "Assign'", "Expr'", "Assign'", "Assign'", "Assert'", "Assign'", "Assert'", "FunctionDef'", "ImportFrom'", "Assign'", "Assign'", "Assign'", "Assign'", "Assert'", "Assign'", "Assert'", "Assign'", "Assert'", "FunctionDef'", "ImportFrom'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assert'", "Assign'", "Assert'", "Assign'", "Assert'", "FunctionDef'", "ImportFrom'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assert'", "Assign'", "Assert'", "FunctionDef'", "ImportFrom'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assert'", "Assign'", "Assert'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "ImportFrom'", "Assign'", "Assign'", "Assign'", "Assign'", "Assert'", "Assert'", "Expr'", "Assert'", "Assert'", "Assert'", "Assert'", "Assign'", "Expr'", "Assign'", "Assign'", "Assert'", "Assert'", "Expr'", "Assert'", "Assert'", "Assign'", "FunctionDef'", "Assign'", "Expr'", "ImportFrom'", "Assign'", "Assert'", "Assign'", "Assert'", "Assign'", "FunctionDef'", "Assign'", "Expr'", "Assign'", "ImportFrom'", "Assign'", "Assert'", "Assign'", "Assert'", "Assign'", "FunctionDef'", "ImportFrom'", "Assign'", "Assign'", "Assert'", "FunctionDef'", "ImportFrom'", "Assign'", "Expr'", "FunctionDef'", "ImportFrom'", "Assign'", "Assert'", "Assign'", "FunctionDef'", "ImportFrom'", "Assign'", "Assert'", "Assign'", "FunctionDef'", "ImportFrom'", "Assign'", "Assert'", "Expr'", "Assert'", "FunctionDef'", "ImportFrom'", "Assign'", "Assert'", "Assign'", "Assert'", "FunctionDef'", "Assign'", "Assign'", "ImportFrom'", "Assign'", "Assert'", "Assign'", "Assert'", "FunctionDef'", "ImportFrom'", "Assign'", "Assert'", "FunctionDef'", "ImportFrom'", "Assign'", "Expr'", "Assign'", "Assign'", "Assert'", "Assign'", "Assert'", "Assign'", "Assert'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "ImportFrom'", "Assign'", "Expr'", "Expr'", "Assert'", "Expr'", "Assign'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "ImportFrom'", "Assign'", "Expr'", "Expr'", "Assert'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "ImportFrom'", "Assign'", "Assert'", "Expr'", "Condition", "Assign'", "Assign'", "Assign'", "Assign'", "ImportFrom'", "Assign'", "Expr'", "Assert'", "Expr'", "Assign'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "ImportFrom'", "Assign'", "Expr'", "Expr'", "Assert'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "ImportFrom'", "Assign'", "Assert'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "ImportFrom'", "Assign'", "Assert'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "Assign'", "ImportFrom'", "Assign'", "Expr'", "Expr'", "Expr'", "Assert'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "ImportFrom'", "Assign'", "Assert'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "ImportFrom'", "ImportFrom'", "Assert'", "FunctionDef'", "ImportFrom'", "ImportFrom'", "Assign'", "Assert'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "ImportFrom'", "Assign'", "Expr'", "Assert'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "ImportFrom'", "Assign'", "Expr'", "Assert'", "Expr'", "Assign'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "ImportFrom'", "Assign'", "Expr'", "Assert'", "Expr'", "Assign'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "ImportFrom'", "Assign'", "Expr'", "Assert'", "Expr'", "Assign'", "Expr'", "Expr'", "FunctionDef'", "Assign'", "Assign'", "Assign'", "Assign'", "ImportFrom'", "Assign'", "Expr'", "Assert'", "Expr'", "Assign'", "Assign'", "Expr'", "Expr'" ]
[ "def __init__(self, VAR_13=None):...\n", "self.root = VAR_13\n", "self.default_template_dir = pkg_resources.resource_filename('synapse',\n 'res/templates')\n" ]
[ "def __init__(self, root_config=None):...\n", "self.root = root_config\n", "self.default_template_dir = pkg_resources.resource_filename('synapse',\n 'res/templates')\n" ]
[ 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Assign'" ]
[ "def FUNC_27(self, VAR_18):...\n", "\"\"\"docstring\"\"\"\n", "VAR_26 = self.get(VAR_18)\n", "VAR_53 = self._doc_before_save.get(VAR_18)\n", "VAR_54 = True\n", "if len(VAR_53) != len(VAR_26):\n", "VAR_54 = False\n", "for i, VAR_21 in enumerate(VAR_53):\n", "return VAR_54\n", "VAR_98 = VAR_26[i].as_dict(convert_dates_to_str=True)\n", "VAR_99 = VAR_21.as_dict(convert_dates_to_str=True)\n", "for VAR_43 in ('modified', 'modified_by', 'creation'):\n", "if VAR_99 != VAR_98:\n", "VAR_54 = False\n" ]
[ "def is_child_table_same(self, fieldname):...\n", "\"\"\"docstring\"\"\"\n", "value = self.get(fieldname)\n", "original_value = self._doc_before_save.get(fieldname)\n", "same = True\n", "if len(original_value) != len(value):\n", "same = False\n", "for i, d in enumerate(original_value):\n", "return same\n", "new_child = value[i].as_dict(convert_dates_to_str=True)\n", "original_child = d.as_dict(convert_dates_to_str=True)\n", "for key in ('modified', 'modified_by', 'creation'):\n", "if original_child != new_child:\n", "same = False\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "Assign'", "Assign'", "Condition", "Assign'", "For", "Return'", "Assign'", "Assign'", "For", "Condition", "Assign'" ]
[ "def FUNC_2(*VAR_2, **VAR_3):...\n", "\"\"\"docstring\"\"\"\n", "VAR_47 = []\n", "VAR_47 += [(arg, None) for arg in VAR_2]\n", "VAR_47 += [(VAR_160, VAR_3[VAR_160]) for VAR_160 in VAR_3]\n", "def FUNC_84(VAR_1):...\n", "def FUNC_83(*VAR_2, **VAR_3):...\n", "VAR_9 = VAR_2[0]\n", "VAR_193 = {'error': 'Missing required query parameter(s)', 'parameters': [],\n 'info': {}}\n", "for VAR_209, extra in VAR_47:\n", "VAR_199 = object()\n", "if len(VAR_193['parameters']) > 0:\n", "if VAR_9.POST.get(VAR_209, VAR_199) == VAR_199:\n", "return JsonResponse(VAR_193, status=400)\n", "return VAR_1(*VAR_2, **kwargs)\n", "VAR_193['parameters'].append(VAR_209)\n", "VAR_193['info'][VAR_209] = extra\n" ]
[ "def require_post_params(*args, **kwargs):...\n", "\"\"\"docstring\"\"\"\n", "required_params = []\n", "required_params += [(arg, None) for arg in args]\n", "required_params += [(key, kwargs[key]) for key in kwargs]\n", "def decorator(func):...\n", "def wrapped(*args, **kwargs):...\n", "request = args[0]\n", "error_response_data = {'error': 'Missing required query parameter(s)',\n 'parameters': [], 'info': {}}\n", "for param, extra in required_params:\n", "default = object()\n", "if len(error_response_data['parameters']) > 0:\n", "if request.POST.get(param, default) == default:\n", "return JsonResponse(error_response_data, status=400)\n", "return func(*args, **kwargs)\n", "error_response_data['parameters'].append(param)\n", "error_response_data['info'][param] = extra\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Docstring", "Assign'", "AugAssign'", "AugAssign'", "FunctionDef'", "FunctionDef'", "Assign'", "Assign'", "For", "Assign'", "Condition", "Condition", "Return'", "Return'", "Expr'", "Assign'" ]
[ "def FUNC_5(VAR_3, VAR_8, VAR_6, VAR_4, VAR_9):...\n", "VAR_50 = False\n", "if VAR_4 == 'languages':\n", "VAR_84 = VAR_8.lang_code\n", "if VAR_4 == 'custom':\n", "for VAR_11 in VAR_9:\n", "VAR_84 = VAR_8.value\n", "VAR_84 = VAR_8.name\n", "VAR_10 = VAR_6.query(VAR_8).filter(VAR_84 == VAR_11).first()\n", "return VAR_50\n", "if VAR_4 == 'author':\n", "VAR_101 = VAR_8(VAR_11, helper.get_sorted_author(VAR_11.replace('|', ',')), '')\n", "if VAR_4 == 'series':\n", "if VAR_10 is None:\n", "VAR_101 = VAR_8(VAR_11, VAR_11)\n", "if VAR_4 == 'custom':\n", "VAR_50 = True\n", "VAR_10 = FUNC_6(VAR_10, VAR_11, VAR_4)\n", "VAR_101 = VAR_8(value=add_element)\n", "if VAR_4 == 'publisher':\n", "VAR_6.add(VAR_101)\n", "VAR_50 = True\n", "VAR_101 = VAR_8(VAR_11, None)\n", "VAR_101 = VAR_8(VAR_11)\n", "VAR_3.append(VAR_101)\n", "VAR_50 = True\n", "VAR_3.append(VAR_10)\n" ]
[ "def add_objects(db_book_object, db_object, db_session, db_type, add_elements):...\n", "changed = False\n", "if db_type == 'languages':\n", "db_filter = db_object.lang_code\n", "if db_type == 'custom':\n", "for add_element in add_elements:\n", "db_filter = db_object.value\n", "db_filter = db_object.name\n", "db_element = db_session.query(db_object).filter(db_filter == add_element\n ).first()\n", "return changed\n", "if db_type == 'author':\n", "new_element = db_object(add_element, helper.get_sorted_author(add_element.\n replace('|', ',')), '')\n", "if db_type == 'series':\n", "if db_element is None:\n", "new_element = db_object(add_element, add_element)\n", "if db_type == 'custom':\n", "changed = True\n", "db_element = create_objects_for_addition(db_element, add_element, db_type)\n", "new_element = db_object(value=add_element)\n", "if db_type == 'publisher':\n", "db_session.add(new_element)\n", "changed = True\n", "new_element = db_object(add_element, None)\n", "new_element = db_object(add_element)\n", "db_book_object.append(new_element)\n", "changed = True\n", "db_book_object.append(db_element)\n" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
[ "FunctionDef'", "Assign'", "Condition", "Assign'", "Condition", "For", "Assign'", "Assign'", "Assign'", "Return'", "Condition", "Assign'", "Condition", "Condition", "Assign'", "Condition", "Assign'", "Assign'", "Assign'", "Condition", "Expr'", "Assign'", "Assign'", "Assign'", "Expr'", "Assign'", "Expr'" ]