text
stringlengths
2
6.14k
import React from 'react'; import { ErrorField } from 'uniforms-bootstrap5'; import createContext from './_createContext'; import mount from './_mount'; const error = { error: 'validation-error', reason: 'X is required', details: [{ name: 'x', type: 'required', details: { value: null } }], message: 'X is required [validation-error]', }; test('<ErrorField> - works', () => { const element = <ErrorField name="x" />; const wrapper = mount(element, createContext({ x: { type: String } })); expect(wrapper.find(ErrorField)).toHaveLength(1); }); test('<ErrorField> - renders correct error message (context)', () => { const element = <ErrorField name="x" />; const wrapper = mount( element, createContext({ x: { type: String } }, { error }), ); expect(wrapper.find(ErrorField)).toHaveLength(1); expect(wrapper.find(ErrorField).text()).toBe('X is required'); }); test('<ErrorField> - renders correct error message (specified)', () => { const element = ( <ErrorField name="x" error={error.details[0]} errorMessage="X is required" /> ); const wrapper = mount(element, createContext({ x: { type: String } })); expect(wrapper.find(ErrorField)).toHaveLength(1); expect(wrapper.find(ErrorField).text()).toBe('X is required'); }); test('<ErrorField> - renders correct children if specified', () => { const element = <ErrorField name="x">Error</ErrorField>; const wrapper = mount( element, createContext({ x: { type: String } }, { error }), ); expect(wrapper.find(ErrorField)).toHaveLength(1); expect(wrapper.find(ErrorField).text()).toBe('Error'); });
package com.bitdubai.fermat_dmp_plugin.layer.world.blockchain_info.developer.bitdubai.version_1.structure.api_v_1.wallet; import junit.framework.TestCase; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.runners.MockitoJUnitRunner; import java.util.ArrayList; import java.util.List; /** * Created by leon on 5/6/15. */ @RunWith(MockitoJUnitRunner.class) public class GetAddressesTest extends TestCase { String apiCode = "91c646ef-c3fd-4dd0-9dc9-eba5c5600549"; Wallet wallet; @Before public void setUp() { wallet = new Wallet("7a9dc256-0e67-441f-886a-c4364fec9369", "Blockchain91-"); wallet.setApiCode(apiCode); } @Test public void testGetBalance_NotNull() { List<Address> addresses = new ArrayList<>(); try { addresses = wallet.listAddresses(0); } catch (Exception e) { System.out.println(e); } for (Address address : addresses){ System.out.println("i'm an address: "+address.getAddress()); } } }
<?php /** * @testCase * @dataProvider ../../dbals.ini */ namespace NextrasTests\Migrations; use Mockery; use Nextras\Migrations\Engine\Runner; use Tester; use Tester\Assert; require __DIR__ . '/../../bootstrap.php'; class FirstRunTest extends IntegrationTestCase { public function testReset() { $this->runner->run(Runner::MODE_RESET); Assert::same([ 'Nextras Migrations', 'RESET', '5 migrations need to be executed.', '- structures/001.sql; 1 queries; XX ms', '- structures/002.sql; 1 queries; XX ms', '- basic-data/003.sql; 2 queries; XX ms', '- dummy-data/004.sql; 1 queries; XX ms', '- structures/005.sql; 1 queries; XX ms', 'OK', ], $this->printer->lines); $migrations = $this->driver->getAllMigrations(); Assert::count(5, $migrations); Assert::same('001.sql', $migrations[0]->filename); Assert::type('string', $migrations[0]->checksum); Assert::same(TRUE, $migrations[0]->completed); Assert::type('DateTime', $migrations[0]->executedAt); Assert::same('structures', $migrations[0]->group); Assert::same('002.sql', $migrations[1]->filename); Assert::type('string', $migrations[1]->checksum); Assert::same(TRUE, $migrations[1]->completed); Assert::type('DateTime', $migrations[1]->executedAt); Assert::same('structures', $migrations[1]->group); Assert::same('003.sql', $migrations[2]->filename); Assert::type('string', $migrations[2]->checksum); Assert::same(TRUE, $migrations[2]->completed); Assert::type('DateTime', $migrations[2]->executedAt); Assert::same('basic-data', $migrations[2]->group); } public function testContinue() { $this->runner->run(Runner::MODE_CONTINUE); Assert::same([ 'Nextras Migrations', 'CONTINUE', '5 migrations need to be executed.', '- structures/001.sql; 1 queries; XX ms', '- structures/002.sql; 1 queries; XX ms', '- basic-data/003.sql; 2 queries; XX ms', '- dummy-data/004.sql; 1 queries; XX ms', '- structures/005.sql; 1 queries; XX ms', 'OK', ], $this->printer->lines); $migrations = $this->driver->getAllMigrations(); Assert::count(5, $migrations); Assert::same('001.sql', $migrations[0]->filename); Assert::type('string', $migrations[0]->checksum); Assert::same(TRUE, $migrations[0]->completed); Assert::type('DateTime', $migrations[0]->executedAt); Assert::same('structures', $migrations[0]->group); Assert::same('002.sql', $migrations[1]->filename); Assert::type('string', $migrations[1]->checksum); Assert::same(TRUE, $migrations[1]->completed); Assert::type('DateTime', $migrations[1]->executedAt); Assert::same('structures', $migrations[1]->group); Assert::same('003.sql', $migrations[2]->filename); Assert::type('string', $migrations[2]->checksum); Assert::same(TRUE, $migrations[2]->completed); Assert::type('DateTime', $migrations[2]->executedAt); Assert::same('basic-data', $migrations[2]->group); } public function testInit() { $options = Tester\Environment::loadData(); $this->runner->run(Runner::MODE_INIT); $files = [ __DIR__ . "/Runner.FirstRun.init.$options[driver].$options[dbal].txt", __DIR__ . "/Runner.FirstRun.init.$options[driver].txt", ]; foreach ($files as $file) { if (is_file($file)) { Assert::matchFile($file, $this->printer->out); break; } } } } (new FirstRunTest)->run();
require("chai") .use(require("chai-as-promised")) .should() const { expect } = require("chai") // Contracts const TransactionRecorder = artifacts.require("./TransactionRecorder.sol") const TransactionRequestCore = artifacts.require("./TransactionRequestCore.sol") const { waitUntilBlock } = require("@digix/tempo")(web3) // Bring in config.web3 (v1.0.0) const config = require("../../config") const { RequestData, parseAbortData, wasAborted } = require("../dataHelpers.js") contract("tests execution rejected if cancelled", async (accounts) => { it("will reject the execution if it was cancelled", async () => { const Owner = accounts[0] const gasPrice = config.web3.utils.toWei("66", "gwei") const requiredDeposit = config.web3.utils.toWei("66", "kwei") // TransactionRequest constants const claimWindowSize = 25 // blocks const freezePeriod = 5 // blocks const reservedWindowSize = 10 // blocks const executionWindow = 10 // blocks const curBlockNum = await config.web3.eth.getBlockNumber() const windowStart = curBlockNum + 38 const txRecorder = await TransactionRecorder.new() const txRequest = await TransactionRequestCore.new() await txRequest.initialize( [ Owner, // createdBy Owner, // owner accounts[1], // fee recipient txRecorder.address, // toAddress ], [ 0, // fee 0, // bounty claimWindowSize, freezePeriod, reservedWindowSize, 1, // temporalUnit = 1, aka blocks executionWindow, windowStart, 2000000, // callGas 0, // callValue gasPrice, requiredDeposit, ], "some-call-data-could-be-anything", { value: config.web3.utils.toWei("1") } ) const requestData = await RequestData.from(txRequest) expect(await txRecorder.wasCalled()).to.be.false expect(requestData.meta.wasCalled).to.be.false expect(requestData.meta.isCancelled).to.be.false const cancelTx = await txRequest.cancel({ from: Owner }) expect(cancelTx.receipt).to.exist await requestData.refresh() expect(requestData.meta.isCancelled).to.be.true await waitUntilBlock(0, windowStart) const executeTx = await txRequest.execute({ gas: 3000000, gasPrice, }) await requestData.refresh() expect(await txRecorder.wasCalled()).to.be.false expect(requestData.meta.wasCalled).to.be.false expect(wasAborted(executeTx)).to.be.true expect(parseAbortData(executeTx).find(reason => reason === "WasCancelled")).to.exist }) })
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Utility functions.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function # pylint: disable=unused-import,line-too-long,wildcard-import from tensorflow.contrib.kfac.python.ops.utils import * from tensorflow.python.util.all_util import remove_undocumented # pylint: enable=unused-import,line-too-long,wildcard-import _allowed_symbols = [ "set_global_constants", "SequenceDict", "tensors_to_column", "column_to_tensors", "kronecker_product", "layer_params_to_mat2d", "mat2d_to_layer_params", "posdef_inv", "posdef_inv_matrix_inverse", "posdef_inv_cholesky", "posdef_inv_funcs", "SubGraph", "generate_random_signs", "fwd_gradients", "ensure_sequence", "batch_execute", ] remove_undocumented(__name__, allowed_exception_list=_allowed_symbols)
#ifndef CONFIGUREVEHICLE_H #define CONFIGUREVEHICLE_H #include <QWidget> #include <QSqlTableModel> #include "vehicledisplaymodel.h" #include "vehiclemodel.h" #include "ui_configurevehiclebase.h" class ConfigureVehicle : public QWidget,private Ui_configureVehicleBase { Q_OBJECT public: explicit ConfigureVehicle(QWidget *parent = nullptr); ~ConfigureVehicle(); signals: public slots: void saveVehicle(); void refreshTable(); //public: //void createModel(); //void deleteModel(); private: Ui_configureVehicleBase vehicleBase; VehicleModel *model; }; #endif // CONFIGUREVEHICLE_H
import { Attribute } from './Attribute'; import { VertexAttribPointer } from './VertexAttribPointer'; /** * @hidden */ export declare function computePointers(attributes: { [name: string]: Attribute; }, aNames: string[]): VertexAttribPointer[];
#!/usr/bin/python -tt # Copyright 2010 Google Inc. # Licensed under the Apache License, Version 2.0 # http://www.apache.org/licenses/LICENSE-2.0 # Google's Python Class # http://code.google.com/edu/languages/google-python-class/ # Basic list exercises # Fill in the code for the functions below. main() is already set up # to call the functions with a few different inputs, # printing 'OK' when each function is correct. # The starter code for each function includes a 'return' # which is just a placeholder for your code. # It's ok if you do not complete all the functions, and there # are some additional functions to try in list2.py. # A. match_ends # Given a list of strings, return the count of the number of # strings where the string length is 2 or more and the first # and last chars of the string are the same. # Note: python does not have a ++ operator, but += works. def match_ends(words): # +++your code here+++ count = 0 for word in words: if len(word)>=2 and word[0]==word[-1]: count +=1 return count # B. front_x # Given a list of strings, return a list with the strings # in sorted order, except group all the strings that begin with 'x' first. # e.g. ['mix', 'xyz', 'apple', 'xanadu', 'aardvark'] yields # ['xanadu', 'xyz', 'aardvark', 'apple', 'mix'] # Hint: this can be done by making 2 lists and sorting each of them # before combining them. def front_x(words): # +++your code here+++ x_list=[] n_list=[] for word in words: if word[0]=='x': x_list.append(word) else: n_list.append(word) return sorted(x_list) + sorted(n_list) # C. sort_last # Given a list of non-empty tuples, return a list sorted in increasing # order by the last element in each tuple. # e.g. [(1, 7), (1, 3), (3, 4, 5), (2, 2)] yields # [(2, 2), (1, 3), (3, 4, 5), (1, 7)] # Hint: use a custom key= function to extract the last element form each tuple. def sort_last(tuples): # +++your code here+++ def last_in_tuple(elem): return elem[1] return sorted(tuples, key=last_in_tuple) # Simple provided test() function used in main() to print # what each function returns vs. what it's supposed to return. def test(got, expected): if got == expected: prefix = ' OK ' else: prefix = ' X ' print '%s got: %s expected: %s' % (prefix, repr(got), repr(expected)) # Calls the above functions with interesting inputs. def main(): print 'match_ends' test(match_ends(['aba', 'xyz', 'aa', 'x', 'bbb']), 3) test(match_ends(['', 'x', 'xy', 'xyx', 'xx']), 2) test(match_ends(['aaa', 'be', 'abc', 'hello']), 1) print print 'front_x' test(front_x(['bbb', 'ccc', 'axx', 'xzz', 'xaa']), ['xaa', 'xzz', 'axx', 'bbb', 'ccc']) test(front_x(['ccc', 'bbb', 'aaa', 'xcc', 'xaa']), ['xaa', 'xcc', 'aaa', 'bbb', 'ccc']) test(front_x(['mix', 'xyz', 'apple', 'xanadu', 'aardvark']), ['xanadu', 'xyz', 'aardvark', 'apple', 'mix']) print print 'sort_last' test(sort_last([(1, 3), (3, 2), (2, 1)]), [(2, 1), (3, 2), (1, 3)]) test(sort_last([(2, 3), (1, 2), (3, 1)]), [(3, 1), (1, 2), (2, 3)]) test(sort_last([(1, 7), (1, 3), (3, 4, 5), (2, 2)]), [(2, 2), (1, 3), (3, 4, 5), (1, 7)]) if __name__ == '__main__': main()
/* * See the NOTICE file distributed with this work for additional * information regarding copyright ownership. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ package org.xwiki.store.filesystem.internal.migration; import java.io.File; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import javax.inject.Named; import javax.inject.Singleton; import org.hibernate.HibernateException; import org.xwiki.component.annotation.Component; import org.xwiki.model.reference.AttachmentReference; import com.xpn.xwiki.store.migration.XWikiDBVersion; /** * Migration for XWIKI-14697. Make sure all attachments have the right content store id. * * @version $Id$ * @since 9.10RC1 */ @Component @Named("R910000XWIKI14697") @Singleton public class R910000XWIKI14697DataMigration extends AbstractXWIKI14697DataMigration { /** * The default constructor. */ public R910000XWIKI14697DataMigration() { super("XWikiAttachmentContent", "contentStore"); } @Override public String getDescription() { return "Make sure all attachments have the right content store id."; } @Override public XWikiDBVersion getVersion() { return new XWikiDBVersion(910000); } @Override protected boolean isFile(AttachmentReference attachmentReference) { File attachmentFolder = getAttachmentDir(attachmentReference); try { return new File(attachmentFolder, URLEncoder.encode(attachmentReference.getName(), "UTF8")).exists(); } catch (UnsupportedEncodingException e) { throw new HibernateException("UTF8 is unknown", e); } } }
package org.cache2k.core.eviction; /* * #%L * cache2k core implementation * %% * Copyright (C) 2000 - 2021 headissue GmbH, Munich * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import org.cache2k.core.Entry; import org.cache2k.core.IntegrityState; import java.util.function.Supplier; /** * Interface to the eviction data structure. * * @author Jens Wilke */ @SuppressWarnings("rawtypes") public interface Eviction { /** * Submit to eviction for inserting or removing from the replacement list. * However, eviction should not be triggered (which in turn triggers a hash table * update) since the hash segment lock is hold at the moment. */ boolean submitWithoutTriggeringEviction(Entry e); /** * Updates the weight on the entry and recalculates the total weight if needed. * * <p>Expected not to hold the entry lock, which means, that this does not run * in sync with the actual update. That is okay as long as it runs after every * update. * * <p>Since we need to lock the eviction structure, this can happen in a separate thread. * * @return hint whether eviction should be run. for a bulk operation we want to do * eviction once, so not do it within this method */ boolean updateWeight(Entry e); /** * Evict if needed, focused on the segment addressed by the hash code. * Called before a new entry is inserted (changed from after in v1.4) */ void evictEventuallyBeforeInsertOnSegment(int hashCodeHint); /** * Evict if needed, checks all segments. * Called before a new entry is inserted (changed from after in v1.4) */ void evictEventuallyBeforeInsert(); /** * Evict if needed, checks all segments. */ void evictEventually(); /** * Remove all entries from the eviction data structure. * * @return entry count */ long removeAll(); /** * Drain eviction queue and do updates in the eviction data structures. * Does no eviction when size limit is reached. * * @return true, if eviction is needed */ boolean drain(); /** * Start concurrent eviction threads. */ void start(); /** * Stop concurrent threads that may access the eviction data structures. * Needs to be called before checkIntegrity or accessing the counter * values. */ void stop(); /** * Free resources, for example thread pool or queue. */ void close(); /** * Runs job making sure concurrent evictions operations pause. */ <T> T runLocked(Supplier<T> j); void checkIntegrity(IntegrityState integrityState); /** * Get metrics related to the eviction. No lock is needed before calling this method. * Best efforts are mad to extract consistent statistics and interruption short. */ EvictionMetrics getMetrics(); boolean isWeigherPresent(); /** * Change the capacity. If capacity is reduced, it will evict entries * before returning. */ void changeCapacity(long entryCountOrWeight); }
from __future__ import absolute_import from __future__ import division from __future__ import unicode_literals from __future__ import print_function import apache_beam as beam import pandas as pd import dialog_pb2 import movie_lens_rating_pb2 as rating_pb2 def encode_line_as_rating_proto(line): """ Parses a comma separated value of movielens rating as MovieLensRating proto message""" tokens = line.split(",") rating = rating_pb2.MovieLensRating( user_id=int(tokens[0]), movie_id=int(tokens[1]), rating=float(tokens[2]), timestamp=int(tokens[3])) return rating def map_seq_to_dialog(user_id_and_watch_seq, movies_dict, substitute_movie_id_with_title): """ Generates a Dialog proto message from the given sequence of liked movies. """ user_id = user_id_and_watch_seq[0] watch_seq = user_id_and_watch_seq[1] if substitute_movie_id_with_title: input_movie_titles_seq = " @ ".join( movies_dict[int(id)] for id in watch_seq[:-1]) target_movie_title = movies_dict[int(watch_seq[-1])] else: input_movie_titles_seq = " ".join("@" + str(id) for id in watch_seq[:-1]) target_movie_title = "@" + str(watch_seq[-1]) dialog_events = [] dialog_events.append( dialog_pb2.DialogEvent( speaker="USER", utterance=input_movie_titles_seq.encode("utf-8"), time_ms=1)) dialog_events.append( dialog_pb2.DialogEvent( speaker="ASSISTANT", utterance=target_movie_title.encode("utf-8"), time_ms=2)) dialog = dialog_pb2.Dialog( source="conversation_{}".format(user_id), events=dialog_events) return dialog def create_movies_dict(movies_path): """ returns a dictionary of movie Id to movie title.""" with open(movies_path) as fh: movies_df = pd.read_csv(fh) movies_df = movies_df.set_index(["movieId"]) return movies_df["title"].to_dict() def create_pipeline(input_ratings_path, movies_dict_path, liked_threshold, num_ratings_per_user, substitute_movie_id_with_title): movies_dict = create_movies_dict(movies_dict_path) def pipeline(root): user_watch_seq = ( root | "CreateRatings" >> beam.io.ReadFromText( input_ratings_path, skip_header_lines=1) | "EncodeAsProto" >> beam.Map(lambda line: encode_line_as_rating_proto(line)) | "FilterByRatings" >> beam.Filter(lambda rating: rating.rating >= liked_threshold) | "SetUserIdAsKey" >> beam.Map(lambda rating: (rating.user_id, rating)) | "GroupByUser" >> beam.GroupByKey() | "ConvertValueToList" >> beam.Map(lambda kv: (kv[0], list(kv[1]))) | "FilterByUserRatingsCount" >> beam.Filter(lambda kv: len(kv[1]) >= num_ratings_per_user) | "SortRatingsByTimestamp" >> beam.Map(lambda kv: (kv[ 0 ], list(sorted(kv[1], key=lambda rating_proto: rating_proto.timestamp))) ) | "KeepLatestRatings" >> beam.Map(lambda kv: (kv[0], kv[1][-num_ratings_per_user:])) | "KeepOnlyMovieIdList" >> beam.Map(lambda kv: (kv[0], [rating.movie_id for rating in kv[1]]))) user_watch_dialog = ( user_watch_seq | "WatchSeqToDialog" >> beam.Map(map_seq_to_dialog, movies_dict, substitute_movie_id_with_title)) return user_watch_seq, user_watch_dialog return pipeline
const header = '[TSD-JSDoc]'; let isVerbose = false; export function setVerbose(value: boolean) { isVerbose = value; } export function warn(msg: string, data?: any) { if (typeof(console) === 'undefined') return; let prefix = header; if (data && data.meta) { const meta = data.meta; prefix = `${prefix} ${meta.filename}:${meta.lineno}:${meta.columnno}`; } console.warn(`${prefix} ${msg}`); if (isVerbose && arguments.length > 1) { console.warn(data); } if (isDebug) { // `console.warn()` pushes in stderr. // Let's push the message in stdout with `console.log()` as well, as `debug()` does for a better debugging experience. console.log(`${header} WARN: ${msg}`); if (arguments.length > 1) { console.log(data); } } } let isDebug = false; export function setDebug(value: boolean) { isDebug = value; } export function debug(msg: string, data?: any) { if (typeof(console) === 'undefined') return; if (isDebug) { // Mix of tsd-jsdoc header with the jsdoc pattern on 'debug' option activated. console.log(`${header} DEBUG: ${msg}`); if (arguments.length > 1) { console.log(data); } } } export function docletDebugInfo(doclet: TAnyDoclet) : string { let debugInfo = `{longname='${doclet.longname}', kind='${doclet.kind}'`; if ((doclet.kind !== 'package') && doclet.meta) { if (doclet.meta.code.id) debugInfo += `, id='${doclet.meta.code.id}'`; if (doclet.meta.range) debugInfo += `, range=[${doclet.meta.range[0]}-${doclet.meta.range[1]}]`; else if (doclet.meta.lineno) debugInfo += `, lineno=${doclet.meta.lineno}`; } debugInfo += `}`; return debugInfo; }
// ********************************************************************** // // Copyright (c) 2003-2015 ZeroC, Inc. All rights reserved. // // This copy of Ice is licensed to you under the terms described in the // ICE_LICENSE file included in this distribution. // // ********************************************************************** #ifndef ICE_RESPONSE_HANDLER_H #define ICE_RESPONSE_HANDLER_H #include <IceUtil/Shared.h> #include <IceUtil/Handle.h> #include <Ice/Config.h> #include <Ice/LocalException.h> #include <Ice/ResponseHandlerF.h> namespace IceInternal { class BasicStream; class ResponseHandler : virtual public ::IceUtil::Shared { public: virtual ~ResponseHandler(); virtual void sendResponse(Ice::Int, BasicStream*, Ice::Byte, bool) = 0; virtual void sendNoResponse() = 0; virtual bool systemException(Ice::Int, const Ice::SystemException&, bool) = 0; virtual void invokeException(Ice::Int, const Ice::LocalException&, int, bool) = 0; }; } #endif
/* * Copyright (C) 2016 TIBCO Jaspersoft Corporation. All rights reserved. * http://community.jaspersoft.com/project/mobile-sdk-android * * Unless you have purchased a commercial license agreement from TIBCO Jaspersoft, * the following license terms apply: * * This program is part of TIBCO Jaspersoft Mobile SDK for Android. * * TIBCO Jaspersoft Mobile SDK is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * TIBCO Jaspersoft Mobile SDK is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with TIBCO Jaspersoft Mobile SDK for Android. If not, see * <http://www.gnu.org/licenses/lgpl>. */ package com.jaspersoft.android.sdk.service.report; import com.jaspersoft.android.sdk.env.JrsEnvironmentRule; import com.jaspersoft.android.sdk.env.ReportTestBundle; import com.jaspersoft.android.sdk.service.data.report.PageRange; import com.jaspersoft.android.sdk.service.exception.ServiceException; import junitparams.JUnitParamsRunner; import junitparams.Parameters; import org.junit.ClassRule; import org.junit.Test; import org.junit.runner.RunWith; import java.util.List; /** * @author Tom Koptel * @since 2.3 */ @RunWith(JUnitParamsRunner.class) public class ReportServiceTest { @ClassRule public static JrsEnvironmentRule sEnv = new JrsEnvironmentRule(); @Test @Parameters(method = "reports") public void report_service_should_export(ReportTestBundle bundle) throws Exception { ReportExecution execution = runReport(bundle); ReportExportOptions exportOptions = ReportExportOptions.builder() .withPageRange(PageRange.parse("1")) .withFormat(ReportFormat.HTML) .build(); ReportExport export = execution.export(exportOptions); List<ReportAttachment> attachments = export.getAttachments(); for (ReportAttachment reportAttachment : attachments) { reportAttachment.download(); } export.download(); } @Test @Parameters(method = "reports") public void report_service_should_await_complete_event(ReportTestBundle bundle) throws Exception { ReportExecution execution = runReport(bundle); execution.waitForReportCompletion(); } @Test @Parameters(method = "reports") public void report_service_should_update_execution(ReportTestBundle bundle) throws Exception { if (bundle.hasParams()) { ReportExecution execution = runReport(bundle); execution.updateExecution(bundle.getParams()); } } private ReportExecution runReport(ReportTestBundle bundle) throws ServiceException { ReportService reportService = ReportService.newService(bundle.getClient()); ReportExecutionOptions executionOptions = ReportExecutionOptions.builder() .withFormat(ReportFormat.HTML) .withFreshData(true) .withInteractive(true) .build(); return reportService.run(bundle.getUri(), executionOptions); } private Object[] reports() { return sEnv.listReports(); } }
/* * Copyright (c) 2011-2022, The DART development contributors * All rights reserved. * * The list of contributors can be found at: * https://github.com/dartsim/dart/blob/master/LICENSE * * This file is provided under the following "BSD-style" License: * Redistribution and use in source and binary forms, with or * without modification, are permitted provided that the following * conditions are met: * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following * disclaimer in the documentation and/or other materials provided * with the distribution. * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND * CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, * INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ #ifndef DART_CONSTRAINT_PGSBOXEDLCPSOLVER_HPP_ #define DART_CONSTRAINT_PGSBOXEDLCPSOLVER_HPP_ #include <vector> #include "dart/constraint/BoxedLcpSolver.hpp" namespace dart { namespace constraint { /// Implementation of projected Gauss-Seidel (PGS) LCP solver. class PgsBoxedLcpSolver : public BoxedLcpSolver { public: struct Option { int mMaxIteration; double mDeltaXThreshold; double mRelativeDeltaXTolerance; double mEpsilonForDivision; bool mRandomizeConstraintOrder; Option( int maxIteration = 30, double deltaXTolerance = 1e-6, double relativeDeltaXTolerance = 1e-3, double epsilonForDivision = 1e-9, bool randomizeConstraintOrder = false); }; // Documentation inherited. const std::string& getType() const override; /// Returns type for this class static const std::string& getStaticType(); // Documentation inherited. bool solve( int n, double* A, double* x, double* b, int nub, double* lo, double* hi, int* findex, bool earlyTermination) override; #ifndef NDEBUG // Documentation inherited. bool canSolve(int n, const double* A) override; #endif /// Sets options void setOption(const Option& option); /// Returns options. const Option& getOption() const; protected: Option mOption; mutable std::vector<int> mCacheOrder; mutable std::vector<double> mCacheD; mutable Eigen::VectorXd mCachedNormalizedA; mutable Eigen::MatrixXd mCachedNormalizedB; mutable Eigen::VectorXd mCacheZ; mutable Eigen::VectorXd mCacheOldX; }; } // namespace constraint } // namespace dart #endif // DART_CONSTRAINT_PGSBOXEDLCPSOLVER_HPP_
from django.contrib.gis.geos.collections import MultiPolygon from django.contrib.gis.geos.polygon import Polygon WGS_84 = 4326 MIN_LONGITUDE_DEGREES = -180 MAX_LONGITUDE_DEGREES = +180 class UTMZone(object): """ Universal Transverse Mercator (UTM) zone """ HEMISPHERE_PREFIXES = dict(north=326, south=327) NUMBER_OF_ZONES_PER_HEMISPHERE = 60 VALID_ZONE_NUMBERS = range(1, NUMBER_OF_ZONES_PER_HEMISPHERE + 1) ZONE_WIDTH_DEGREES = 360 / NUMBER_OF_ZONES_PER_HEMISPHERE # How far (in degrees) from an UTM zone's central meridian the re-projection (coordinate transformation) # from WGS-84 to the UTM zone in question still works: MAX_LONGITUDE_OFFSET = 90.0 - 9.9e-14 # Found experimentally to work with both PostGIS and GeoDjango. def __init__(self, hemisphere, utm_zone_number): assert hemisphere in self.HEMISPHERE_PREFIXES assert utm_zone_number in self.VALID_ZONE_NUMBERS self.hemisphere = hemisphere self.utm_zone_number = utm_zone_number self._prepared_domain = None # Will be lazily set by self.domain def can_represent(self, geom): return self.domain.covers(geom.transform(WGS_84, clone=True)) @property def domain(self): if self._prepared_domain is None: self._prepared_domain = self._computed_domain.prepared return self._prepared_domain @property def _computed_domain(self): xmin, ymin, xmax, ymax = ( wrap_longitude_degrees(self.central_meridian_longitude_degrees - self.MAX_LONGITUDE_OFFSET), -90, wrap_longitude_degrees(self.central_meridian_longitude_degrees + self.MAX_LONGITUDE_OFFSET), 90, ) if xmin <= xmax: domain = Polygon.from_bbox((xmin, ymin, xmax, ymax)) domain.srid = WGS_84 return domain else: # cut at idealized international date line return MultiPolygon( Polygon.from_bbox((xmin, ymin, MAX_LONGITUDE_DEGREES, ymax)), Polygon.from_bbox((MIN_LONGITUDE_DEGREES, ymin, xmax, ymax)), srid=WGS_84, ) @property def srid(self): return self.HEMISPHERE_PREFIXES[self.hemisphere] * 100 + self.utm_zone_number @property def central_meridian_longitude_degrees(self): return MIN_LONGITUDE_DEGREES + (self.utm_zone_number - 0.5) * self.ZONE_WIDTH_DEGREES def __eq__(self, other): return self.hemisphere, self.utm_zone_number == other.hemisphere, other.utm_zone_number def __hash__(self): return hash((self.hemisphere, self.utm_zone_number)) def __str__(self): return "UTM Zone {zone_number}, {hemisphere}ern hemisphere".format( zone_number=self.utm_zone_number, hemisphere=self.hemisphere, ) def __repr__(self): return "{cls}({hemisphere}, {zone_number})".format( cls=type(self).__name__, zone_number=self.utm_zone_number, hemisphere=repr(self.hemisphere), ) UTM_ZONE_NUMBERS = UTMZone.VALID_ZONE_NUMBERS ALL_UTM_ZONES = frozenset( UTMZone(hs, nr) for hs in UTMZone.HEMISPHERE_PREFIXES for nr in UTM_ZONE_NUMBERS ) def utm_zones_for_representing(geom): return frozenset(zone for zone in ALL_UTM_ZONES if zone.can_represent(geom)) def wrap_longitude_degrees(longitude_degrees): return confine(longitude_degrees, MIN_LONGITUDE_DEGREES, MAX_LONGITUDE_DEGREES) def confine(value, lower_bound, upper_bound): modulus = upper_bound - lower_bound result = (value - lower_bound) % modulus + lower_bound assert lower_bound <= result <= upper_bound return result
/* * * ChartAxis.cpp * * Written by Cédric Moonen (cedric_moonen@hotmail.com) * * * * This code may be used for any non-commercial and commercial purposes in a compiled form. * The code may be redistributed as long as it remains unmodified and providing that the * author name and this disclaimer remain intact. The sources can be modified WITH the author * consent only. * * This code is provided without any garanties. I cannot be held responsible for the damage or * the loss of time it causes. Use it at your own risks * * An e-mail to notify me that you are using this code is appreciated also. * */ #include "stdafx.h" #include "ChartStandardAxis.h" #include "ChartCtrl.h" #include <sstream> #include <iostream> #include <iomanip> #include <math.h> using namespace std; CChartStandardAxis::CChartStandardAxis() : CChartAxis(), m_dFirstTickValue(0), m_dTickIncrement(1.0), m_uDecCount(0) { } CChartStandardAxis::~CChartStandardAxis() { } void CChartStandardAxis::SetTickIncrement(bool bAuto, double newIncrement) { m_bAutoTicks = bAuto; if (!m_bAutoTicks) { m_dTickIncrement = newIncrement; int Zeros = (int)floor(log10(m_dTickIncrement)); int Digits = 0; if (Zeros<0) { //We must set decimal places. In the other cases, Digits will be 0. Digits = (int)fabs(Zeros*1.0); } SetDecimals(Digits); } } double CChartStandardAxis::GetFirstTickValue() const { double dRetVal = m_dFirstTickValue; if (m_bDiscrete) { dRetVal = m_dFirstTickValue - m_dTickIncrement; } return dRetVal; } bool CChartStandardAxis::GetNextTickValue(double dCurrentTick, double& dNextTick) const { if (m_dTickIncrement==0) return false; dNextTick = dCurrentTick + m_dTickIncrement; if (dNextTick <= m_MaxValue) return true; else return false; } long CChartStandardAxis::ValueToScreenDiscrete(double dValue) const { // In discrete mode, all values between two ticks are "directed" // to the middle of the interval. double precision = 0.0000000001; if (dValue < 0) precision = -0.0000000001; int tickNr = (int)((dValue+precision)/m_dTickIncrement); dValue = tickNr * m_dTickIncrement; dValue += m_dTickIncrement/2.0; return ValueToScreenStandard(dValue); } long CChartStandardAxis::GetTickPos(double TickVal) const { // The tick is always at the same position, // even if the axis is discrete. return ValueToScreenStandard(TickVal); } TChartString CChartStandardAxis::GetTickLabel(double TickValue) const { TChartStringStream ssLabel; ssLabel << fixed << setprecision(m_uDecCount) << TickValue; return ssLabel.str(); } void CChartStandardAxis::RefreshTickIncrement() { if (!m_bAutoTicks) return; if (m_MaxValue == m_MinValue) { m_dTickIncrement = 0; return; } int PixelSpace; if (m_bIsHorizontal) PixelSpace = 30; else PixelSpace = 20; int MaxTickNumber = (int)fabs((m_EndPos-m_StartPos)/PixelSpace * 1.0); //Calculate the appropriate TickSpace (1 tick every 30 pixel +/-) //Temporary tick increment double TempTickIncrement = (m_MaxValue-m_MinValue)/MaxTickNumber; // Calculate appropriate tickSpace (not rounded on 'strange values' but // on something like 1, 2 or 5*10^X where X is optimalized for showing the most // significant digits) int Zeros = (int)floor(log10(TempTickIncrement)); double MinTickIncrement = pow(10.0,Zeros); int Digits = 0; if (Zeros<0) { //We must set decimal places. In the other cases, Digits will be 0. Digits = (int)fabs(Zeros*1.0); } if (MinTickIncrement>=TempTickIncrement) { m_dTickIncrement = MinTickIncrement; SetDecimals(Digits); } else if (MinTickIncrement*2>=TempTickIncrement) { m_dTickIncrement = MinTickIncrement*2; SetDecimals(Digits); } else if (MinTickIncrement*5>=TempTickIncrement) { m_dTickIncrement = MinTickIncrement*5; SetDecimals(Digits); } else if (MinTickIncrement*10>=TempTickIncrement) { m_dTickIncrement = MinTickIncrement*10; if (Digits) SetDecimals(Digits-1); else SetDecimals(Digits); } } void CChartStandardAxis::RefreshFirstTick() { if (m_dTickIncrement!=0) { if (m_MinValue == 0) m_dFirstTickValue = 0; else if (m_MinValue>0) { m_dFirstTickValue = (int)(m_MinValue/m_dTickIncrement) * m_dTickIncrement; while (m_dFirstTickValue<m_MinValue) m_dFirstTickValue += m_dTickIncrement; } else { m_dFirstTickValue = (int)(m_MinValue/m_dTickIncrement) * m_dTickIncrement; while (m_dFirstTickValue>m_MinValue) m_dFirstTickValue -= m_dTickIncrement; if (!(m_dFirstTickValue == m_MinValue)) m_dFirstTickValue += m_dTickIncrement; } } else // m_TickIncrement!=0 { m_dFirstTickValue = m_MinValue; } }
/* globals $ */ function solve() { return function (selector) { if (typeof selector === 'undefined') { throw new Error('Selector can not be undefined!'); } var template = '<div class="container">' + '<h1>Animals</h1>' + '<ul class="animals-list">' + '{{#each animals}}' + '<li>' + '{{#if this.url}}' + '<a href="{{this.url}}">' + 'See a {{this.name}}' + '</a>' + '{{else}}' + '<a href="http://cdn.playbuzz.com/cdn/3170bee8-985c-47bc-bbb5-2bcb41e85fe9/d8aa4750-deef-44ac-83a1-f2b5e6ee029a.jpg">' + 'No link for {{this.name}} , here is Batman!' + '</a>' + '{{/if}}' + '</li>' + '{{/each}}' + '</ul>' + '</div>' ; var data = { animals: [{ name: 'Lion', url: "https://susanmcmovies.files.wordpress.com/2014/12/the-lion-king-wallpaper-the-lion-king-2-simbas-pride-4685023-1024-768.jpg" }, { name: 'Turtle', url: 'http://www.enkivillage.com/s/upload/images/a231e4349b9e3f28c740d802d4565eaf.jpg' }, { name: 'Dog' }, { name: 'Cat', url: 'http://i.imgur.com/Ruuef.jpg' }, { name: 'Dog Again' }] }; //var templateCompiler = handlebars.compile(template), // result = templateCompiler(data); //document.getElementById(selector).innerHTML = result; $(selector).html(template); }; }; module.exports = solve;
import matplotlib.pyplot as plt from OSIM.Modeling.Components.NPN_Vertical_Bipolar_Intercompany_Model.VBIC_Currents.IRCI import * from OSIM.Modeling.Components.Resistor import Resistor from OSIM.Modeling.Components.VoltageSource import VoltageSource from OSIM.Modeling.CircuitSystemEquations import CircuitSystemEquations from OSIM.Modeling.Components.NPN_Vertical_Bipolar_Intercompany_Model.NPN_VBIC import NPN_VBIC from OSIM.Simulation.CircuitAnalysis.CircuitAnalysis import CircuitAnalysis from OSIM.Simulation.NRConvergenceException import NRConvergenceException import numpy as np fig = plt.figure() ax = fig.gca(projection='3d') nb = 'b' nc = 'c' ne = 'e' m1 = 'm1' m2 = 'm2' #Anzeigeparamter raster = 0.05 BFIX = 0.9 CMIN = -0.5 CMAX = 0.5 EMIN = -0.5 EMAX = 0.5 r1 = Resistor([m1,nb],"R1",0.001,None) r2 = Resistor([m2,nc],"R2",0.001,None) v1 = VoltageSource([m1,'0'],"V1",0,None) v2 = VoltageSource([m2,'0'],"V2",0,None) v3 = VoltageSource([ne,'0'],"V3",0,None) npn = NPN_VBIC([nc, nb, ne, '0'], "Q", 0, None, pParams="../../__Parameter/NPN_VBIC_npn13G2.comp") TBSys = CircuitSystemEquations([npn,r1,r2,v3,v1,v2]) print(TBSys.compDict) ca = CircuitAnalysis(TBSys) TBSys.atype = CircuitSystemEquations.ATYPE_DC xE = np.arange(EMIN, EMAX, raster) yC = np.arange(CMIN, CMAX, raster) B, C = np.meshgrid(xE, yC) I = np.zeros((len(yC),len(xE))) v1.changeMyVoltageInSys(BFIX) for cidx, c in enumerate(yC): for eidx,e in enumerate(xE): v3.changeMyVoltageInSys(e) v2.changeMyVoltageInSys(c) try: ca.newtonRaphson(TBSys) sol = npn.getTransportCurrent() except NRConvergenceException: print("Convergence problem at: ") print("E: %G"%(e)) print("C: %G"%(c)) npn.IT.debugPrint() #x = raw_input() sol = 0 #print(TBSys.curNewtonIteration) #a = raw_input() I[cidx][eidx] = sol # ax.plot_surface(B, C, I, rstride=8, cstride=8, alpha=0.3) ax.plot_wireframe(B, C, I, rstride=5, cstride=5, alpha=0.3) #cset = ax.contour(B, C, I, zdir='x', offset=BMAX, cmap=cm.coolwarm) ax.set_xlabel('E') ax.set_xlim(EMIN, EMAX) ax.set_ylabel('C') ax.set_ylim(CMIN, CMAX) ax.set_zlabel('I') ax.set_zlim(np.amin(I), np.amax(I)) plt.show()
# -*- coding: utf-8 -*- """ github3.auths ============= This module contains the Authorization object. """ from __future__ import unicode_literals from .decorators import requires_basic_auth from .models import GitHubCore class Authorization(GitHubCore): """The :class:`Authorization <Authorization>` object. Two authorization instances can be checked like so:: a1 == a2 a1 != a2 And is equivalent to:: a1.id == a2.id a1.id != a2.id See also: http://developer.github.com/v3/oauth/#oauth-authorizations-api """ def __init__(self, auth, session=None): super(Authorization, self).__init__(auth, session) #: Details about the application (name, url) self.app = auth.get('app', {}) #: Returns the Authorization token self.token = auth.get('token', '') #: App name self.name = self.app.get('name', '') #: URL about the note self.note_url = auth.get('note_url') or '' #: Note about the authorization self.note = auth.get('note') or '' #: List of scopes this applies to self.scopes = auth.get('scopes', []) #: Unique id of the authorization self.id = auth.get('id', 0) self._api = self._build_url('authorizations', str(self.id)) #: datetime object representing when the authorization was created. self.created_at = self._strptime(auth.get('created_at')) #: datetime object representing when the authorization was updated. self.updated_at = self._strptime(auth.get('updated_at')) def _repr(self): return '<Authorization [{0}]>'.format(self.name) def _update_(self, auth): self.__init__(auth, self._session) @requires_basic_auth def delete(self): """delete this authorization""" return self._boolean(self._delete(self._api), 204, 404) @requires_basic_auth def update(self, scopes=[], add_scopes=[], rm_scopes=[], note='', note_url=''): """Update this authorization. :param list scopes: (optional), replaces the authorization scopes with these :param list add_scopes: (optional), scopes to be added :param list rm_scopes: (optional), scopes to be removed :param str note: (optional), new note about authorization :param str note_url: (optional), new note URL about this authorization :returns: bool """ success = False json = None if scopes: d = {'scopes': scopes} json = self._json(self._post(self._api, data=d), 200) if add_scopes: d = {'add_scopes': add_scopes} json = self._json(self._post(self._api, data=d), 200) if rm_scopes: d = {'remove_scopes': rm_scopes} json = self._json(self._post(self._api, data=d), 200) if note or note_url: d = {'note': note, 'note_url': note_url} json = self._json(self._post(self._api, data=d), 200) if json: self._update_(json) success = True return success
import time from bittrader.api import API class bot(API): """ Parent Bot class for automated trading. Specific strategies are implemented as a subclass. See simpleBot for example. """ def __init__(self, keyfile, exchange): api.__init__(self, keyfile, exchange) self.orders = [] self.profit = 0 self.basis = 0 #average cost basis = self.positions = self.get_positions() def calc_profit(self): pass #todo class simpleBot(bot): """ Simple trading bot. Checks the price and standard deviation of the book every minute, and . """ def __init__(self): self.prices = [] self.price_avg = 0 self.price_sd = 0 self.interval = 20.0 # for campbx, don't make this smaller than 500 milliseconds def average_price(self): pass def run(self): """ Runs the automatic bot. We want to calculate both the profit (difference in value in USD) due to trading and profit if just buy and hold strategy was employed for comparison. """ while True: tick = self.ticker() price = float(tick['Last Trade']) self.prices.append(price) # add the new price if len(self.prices) == int((60/self.interval)*60): #keep 1 hour worth of prices self.prices.pop(0) n_prices = len(prices) self.price_avg = sum(prices) / n_prices self.price_sd = sum([(a-self.price_avg)**2 for a in prices]) / n_prices if (criteria == True): # if stats indicate that the bot should buy order = self.buy(size, price) elif (criteria == True): # if stats indicate that the bot should sell order = self.sell(size, price) else: # if no action is taken pass time.sleep(20.0)
// this import should be first in order to load some required settings (like globals and reflect-metadata) import { platformNativeScriptDynamic } from "nativescript-angular/platform"; import { AppModule } from "./modules/app.module"; platformNativeScriptDynamic().bootstrapModule(AppModule);
# Performs actions requested by the user import builtins from subprocess import Popen, PIPE import urllib.request import os.path from includes.util import * import time from datetime import datetime, timedelta import shutil from includes.defines import * from includes.output import * from objects.tredly.container import * from objects.tredly.tredlyfile import * from objects.tredly.unboundfile import * from objects.tredly.tredlyhost import TredlyHost from objects.layer4proxy.layer4proxyfile import * class ActionStop(): def __init__(self, subject, target, identifier, actionArgs): if (subject == "container"): self.stopContainer(target) elif (subject == "containers"): # target can be a partition name or None (all containers on host) self.stopContainers(target) else: e_error("No command " + subject + " found.") exit(1) # stop a container def stopContainer(self, uuid): startTime = time.time() tredlyHost = TredlyHost() ############################### # Start Pre flight checks # make sure the uuid exists if (uuid is None): e_error("No UUID specified.") exit(1) # make sure the container exists if (not tredlyHost.containerExists(uuid)): e_error("No container with UUID " + uuid + " exists.") exit(1) # End pre flight checks ############################### # find which partition this container resides on partitionName = tredlyHost.getContainerPartition(uuid) # load the container from ZFS container = Container() # set up the dataset name that contains this containers data containerDataset = ZFS_TREDLY_PARTITIONS_DATASET + "/" + partitionName + "/" + TREDLY_CONTAINER_DIR_NAME + "/" + uuid # make the container populate itself from zfs container.loadFromZFS(containerDataset) if (container.uuid is None): container.uuid = uuid zfsContainer = ZFSDataset(container.dataset, container.mountPoint) e_header("Stopping Container - " + container.name) # check if its already running if (not container.isRunning()): e_error("Container already stopped") exit(1) # run through the stop process container.stop() endTime = time.time() e_success("Stop container completed at " + time.strftime('%Y-%m-%d %H:%M:%S %z', time.localtime(endTime))) timeTaken = int(endTime) - int(startTime) # 0 seconds doesnt sound right if (timeTaken == 0): timeTaken = 1 e_success("Total time taken: " + str(timeTaken) + " seconds") # stop a container def stopContainers(self, partitionName): startTime = time.time() tredlyHost = TredlyHost() ############################### # Start Pre flight checks # End pre flight checks ############################### containers = [] # if partitionName was set then get that partitions containers if (partitionName is not None): # get a list of containers on this partition containers = tredlyHost.getPartitionContainerUUIDs(partitionName) else: # get a list of all containers on host containers = tredlyHost.getAllContainerUUIDs() # form a list of running containers runningContainers = [] for uuid in containers: # check if its running if (tredlyHost.containerIsRunning(uuid)): # append it to the running containers list runningContainers.append(uuid); if (len(runningContainers) == 0): e_note("No containers to stop.") exit(0) # prompt the user since there are many containers to stop e_note("The following containers will be stopped:") for uuid in runningContainers: print(" " + tredlyHost.getContainerNameFromUUID(uuid, partitionName)) userInput = input("Are you sure you wish to stop these containers? (y/n) ") # if the user said yes then stop all containers if (userInput.lower() == 'y'): # loop over and stop the containers for uuid in runningContainers: # load the container from ZFS container = Container() # if partitionName was none then find this containers partitionName if (partitionName is None): containerPartitionName = tredlyHost.getContainerPartition(uuid) else: containerPartitionName = partitionName # set up the dataset name that contains this containers data containerDataset = ZFS_TREDLY_PARTITIONS_DATASET + "/" + containerPartitionName + "/" + TREDLY_CONTAINER_DIR_NAME + "/" + uuid # make the container populate itself from zfs container.loadFromZFS(containerDataset) if (container.uuid is None): container.uuid = uuid zfsContainer = ZFSDataset(container.dataset, container.mountPoint) e_header("Stopping Container - " + container.name) # check if its already running if (not container.isRunning()): e_error("Container already stopped") exit(1) # run through the stop process container.stop() # show the user how long this took endTime = time.time() e_success("Stop containers completed at " + time.strftime('%Y-%m-%d %H:%M:%S %z', time.localtime(endTime))) timeTaken = int(endTime) - int(startTime) # 0 seconds doesnt sound right if (timeTaken == 0): timeTaken = 1 e_success("Total time taken: " + str(timeTaken) + " seconds")
/* Copyright (c) 2015 Paolo Patierno Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ using Amqp; using ppatierno.AzureSBLite.Utility; using System; using System.Text; namespace ppatierno.AzureSBLite { /// <summary> /// SAS token provider /// </summary> public class SharedAccessSignatureTokenProvider : TokenProvider { /// <summary> /// Key name /// </summary> internal string KeyName { get; private set; } /// <summary> /// Shared access key /// </summary> internal string SharedAccessKey { get; private set; } /// <summary> /// Shared access signature /// </summary> internal string ShareAccessSignature { get; private set; } /// <summary> /// Constructor /// </summary> /// <param name="keyName">Key name</param> /// <param name="sharedAccessKey">Shared access key</param> internal SharedAccessSignatureTokenProvider(string keyName, string sharedAccessKey) { this.KeyName = keyName; this.SharedAccessKey = sharedAccessKey; } /// <summary> /// Constructor /// </summary> /// <param name="shareAccessSignature">Shared access signature</param> internal SharedAccessSignatureTokenProvider(string shareAccessSignature) { this.ShareAccessSignature = shareAccessSignature; } private static readonly long UtcReference = (new DateTime(1970, 1, 1, 0, 0, 0, 0)).Ticks; public static string GetSharedAccessSignature(string keyName, string sharedAccessKey, string resource, TimeSpan tokenTimeToLive) { // http://msdn.microsoft.com/en-us/library/azure/dn170477.aspx // the canonical Uri scheme is http because the token is not amqp specific // signature is computed from joined encoded request Uri string and expiry string #if NETMF // needed in .Net Micro Framework to use standard RFC4648 Base64 encoding alphabet System.Convert.UseRFC4648Encoding = true; #endif string expiry = ((long)(DateTime.UtcNow - new DateTime(UtcReference, DateTimeKind.Utc) + tokenTimeToLive).TotalSeconds()).ToString(); string encodedUri = HttpUtility.UrlEncode(resource); byte[] hmac = SHA.computeHMAC_SHA256(Encoding.UTF8.GetBytes(sharedAccessKey), Encoding.UTF8.GetBytes(encodedUri + "\n" + expiry)); string sig = Convert.ToBase64String(hmac); return Fx.Format( "SharedAccessSignature sr={0}&sig={1}&se={2}&skn={3}", encodedUri, HttpUtility.UrlEncode(sig), HttpUtility.UrlEncode(expiry), HttpUtility.UrlEncode(keyName)); } public static string GetPublisherSharedAccessSignature(Uri endpoint, string entityPath, string publisher, string keyName, string key, TimeSpan tokenTimeToLive) { string publisherPath = Fx.Format("http://{0}/{1}/Publishers/{2}", endpoint.Host, entityPath, publisher); return GetSharedAccessSignature(keyName, key, publisherPath, tokenTimeToLive); } } }
import boto3 import sure # noqa import six from botocore.exceptions import ClientError from moto import mock_logs, settings from nose.tools import assert_raises _logs_region = 'us-east-1' if settings.TEST_SERVER_MODE else 'us-west-2' @mock_logs def test_log_group_create(): conn = boto3.client('logs', 'us-west-2') log_group_name = 'dummy' response = conn.create_log_group(logGroupName=log_group_name) response = conn.describe_log_groups(logGroupNamePrefix=log_group_name) assert len(response['logGroups']) == 1 response = conn.delete_log_group(logGroupName=log_group_name) @mock_logs def test_exceptions(): conn = boto3.client('logs', 'us-west-2') log_group_name = 'dummy' log_stream_name = 'dummp-stream' conn.create_log_group(logGroupName=log_group_name) with assert_raises(ClientError): conn.create_log_group(logGroupName=log_group_name) # descrine_log_groups is not implemented yet conn.create_log_stream( logGroupName=log_group_name, logStreamName=log_stream_name ) with assert_raises(ClientError): conn.create_log_stream( logGroupName=log_group_name, logStreamName=log_stream_name ) conn.put_log_events( logGroupName=log_group_name, logStreamName=log_stream_name, logEvents=[ { 'timestamp': 0, 'message': 'line' }, ], ) with assert_raises(ClientError): conn.put_log_events( logGroupName=log_group_name, logStreamName="invalid-stream", logEvents=[ { 'timestamp': 0, 'message': 'line' }, ], ) @mock_logs def test_put_logs(): conn = boto3.client('logs', 'us-west-2') log_group_name = 'dummy' log_stream_name = 'stream' conn.create_log_group(logGroupName=log_group_name) conn.create_log_stream( logGroupName=log_group_name, logStreamName=log_stream_name ) messages = [ {'timestamp': 0, 'message': 'hello'}, {'timestamp': 0, 'message': 'world'} ] putRes = conn.put_log_events( logGroupName=log_group_name, logStreamName=log_stream_name, logEvents=messages ) res = conn.get_log_events( logGroupName=log_group_name, logStreamName=log_stream_name ) events = res['events'] nextSequenceToken = putRes['nextSequenceToken'] assert isinstance(nextSequenceToken, six.string_types) == True assert len(nextSequenceToken) == 56 events.should.have.length_of(2) @mock_logs def test_filter_logs_interleaved(): conn = boto3.client('logs', 'us-west-2') log_group_name = 'dummy' log_stream_name = 'stream' conn.create_log_group(logGroupName=log_group_name) conn.create_log_stream( logGroupName=log_group_name, logStreamName=log_stream_name ) messages = [ {'timestamp': 0, 'message': 'hello'}, {'timestamp': 0, 'message': 'world'} ] conn.put_log_events( logGroupName=log_group_name, logStreamName=log_stream_name, logEvents=messages ) res = conn.filter_log_events( logGroupName=log_group_name, logStreamNames=[log_stream_name], interleaved=True, ) events = res['events'] for original_message, resulting_event in zip(messages, events): resulting_event['eventId'].should.equal(str(resulting_event['eventId'])) resulting_event['timestamp'].should.equal(original_message['timestamp']) resulting_event['message'].should.equal(original_message['message'])
''' Copyright (c) 2012 Alexander Abbott This file is part of the Cheshire Cyber Defense Scoring Engine (henceforth referred to as Cheshire). Cheshire is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Cheshire is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. You should have received a copy of the GNU Affero General Public License along with Cheshire. If not, see <http://www.gnu.org/licenses/>. ''' from flask import Response, g from . import blueprint from flask.ext.login import login_required from CheshireCat.utils import requires_no_parameters, requires_roles, convert_datetime_to_timestamp from bson import json_util import json @blueprint.route("/scores", methods=['GET']) @login_required @requires_roles('administrator', 'organizer') @requires_no_parameters def get_all_scores_for_teams(): data = g.db.get_scores_for_all_teams() for item in data: item['timestamp'] = convert_datetime_to_timestamp(item['timestamp']) js = json.dumps(data, default=json_util.default) resp = Response(js, status=200, mimetype='application/json') return resp @blueprint.route("/<team_id>/score", methods=['GET']) @login_required @requires_roles('administrator', 'organizer') @requires_no_parameters def get_score_for_team(team_id): data = g.db.get_score_for_team(team_id) if len(data) == 0: return Response(status=404) data[0]['timestamp'] = convert_datetime_to_timestamp(data[0]['timestamp']) js = json.dumps(data[0], default=json_util.default) resp = Response(js, status=200, mimetype='application/json') return resp
from nose.tools import assert_true, assert_equal, assert_not_equal import tempfile from pathlib import Path import os from .schema_external import Attach def test_attach_attributes(): """ test saving files in attachments """ # create a mock file table = Attach() source_folder = tempfile.mkdtemp() for i in range(2): attach1 = Path(source_folder, 'attach1.img') data1 = os.urandom(100) with attach1.open('wb') as f: f.write(data1) attach2 = Path(source_folder, 'attach2.txt') data2 = os.urandom(200) with attach2.open('wb') as f: f.write(data2) table.insert1(dict(attach=i, img=attach1, txt=attach2)) download_folder = Path(tempfile.mkdtemp()) keys, path1, path2 = table.fetch("KEY", 'img', 'txt', download_path=download_folder, order_by="KEY") # verify that different attachment are renamed if their filenames collide assert_not_equal(path1[0], path2[0]) assert_not_equal(path1[0], path1[1]) assert_equal(Path(path1[0]).parent, download_folder) with Path(path1[-1]).open('rb') as f: check1 = f.read() with Path(path2[-1]).open('rb') as f: check2 = f.read() assert_equal(data1, check1) assert_equal(data2, check2) # verify that existing files are not duplicated if their filename matches issue #592 p1, p2 = (Attach & keys[0]).fetch1('img', 'txt', download_path=download_folder) assert_equal(p1, path1[0]) assert_equal(p2, path2[0]) def test_return_string(): """ test returning string on fetch """ # create a mock file table = Attach() source_folder = tempfile.mkdtemp() attach1 = Path(source_folder, 'attach1.img') data1 = os.urandom(100) with attach1.open('wb') as f: f.write(data1) attach2 = Path(source_folder, 'attach2.txt') data2 = os.urandom(200) with attach2.open('wb') as f: f.write(data2) table.insert1(dict(attach=2, img=attach1, txt=attach2)) download_folder = Path(tempfile.mkdtemp()) keys, path1, path2 = table.fetch( "KEY", 'img', 'txt', download_path=download_folder, order_by="KEY") assert_true(isinstance(path1[0], str))
"""July URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.10/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.conf.urls import url, include 2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls')) """ from django.conf.urls import url, include from users.views import RegisterView, LoginView, LogoutView, PasswordView, RestPasswordView,MideaAuthenticateView from blog.views import * from blog.feeds import RssSiteNewsFeed, AtomSiteNewsFeed from django.contrib.sitemaps.views import sitemap from blog.sitemap import BlogSitemap from werobot.contrib.django import make_view from .robot import robot from django.views.generic.base import RedirectView from django.conf import settings from django.conf.urls.static import static sitemaps = { 'static': BlogSitemap, } urlpatterns = [ # ========== Blog Home ========= url(r'^$', ArticleListView.as_view(), name='index'), url(r'^about/$', AboutView.as_view(), name='about'), url(r'^article/(?P<article_url>.*)$', ArticleDetailView.as_view(), name='article'), url(r'^blog/', include('blog.urls', namespace='blog')), # ========== User ========= url(r'^login/$', LoginView.as_view(), name='login'), url(r'^logout/$', LogoutView.as_view(), name='logout'), url(r'^register/$', RegisterView.as_view(), name='register'), url(r'^password/$', PasswordView.as_view(), name='password'), url(r'^rest_password/$', RestPasswordView.as_view(), name='restpassword'), url(r'^midea_authenticate/$', MideaAuthenticateView.as_view(), name='mideaauthenticate'), # ========== Admin ========= url(r'^admin/', include('admin.urls', namespace='admin')), url(r'^sitemap\.xml$', sitemap, {'sitemaps': sitemaps}, name='django.contrib.sitemaps.views.sitemap'), url(r'^rss\.xml$', RssSiteNewsFeed()), url(r'^atom\.xml$', AtomSiteNewsFeed()), # ========== WechatRobot ========= url(r'^robot/',make_view(robot)), url(r'^favicon.ico$',RedirectView.as_view(url=r'static/favicon.ico')), ]+ static(settings.STATIC_URL, document_root=settings.STATIC_URL)+ static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
<?php use Illuminate\Database\Schema\Blueprint; use Illuminate\Database\Migrations\Migration; class CreateCommentsTable extends Migration { /** * Run the migrations. * * @return void */ public function up() { Schema::create('comments', function(Blueprint $table) { $table->increments('id'); $table->string('body'); $table->dateTime('updated_at'); $table->dateTime('created_at'); $table->integer('post_id'); }); } /** * Reverse the migrations. * * @return void */ public function down() { Schema::drop('posts'); } }
using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; // General Information about an assembly is controlled through the following // set of attributes. Change these attribute values to modify the information // associated with an assembly. [assembly: AssemblyTitle("jab")] [assembly: AssemblyDescription("")] [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("")] [assembly: AssemblyProduct("jab")] [assembly: AssemblyCopyright("Copyright © 2016")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] // Setting ComVisible to false makes the types in this assembly not visible // to COM components. If you need to access a type in this assembly from // COM, set the ComVisible attribute to true on that type. [assembly: ComVisible(false)] // The following GUID is for the ID of the typelib if this project is exposed to COM [assembly: Guid("83130d42-aca4-451f-8783-bd69216e3bf9")] // Version information for an assembly consists of the following four values: // // Major Version // Minor Version // Build Number // Revision // // You can specify all the values or you can default the Build and Revision Numbers // by using the '*' as shown below: // [assembly: AssemblyVersion("1.0.*")] [assembly: AssemblyVersion("1.0.0.0")] [assembly: AssemblyFileVersion("1.0.0.0")]
(function () { "use strict"; var controllerId = "endpointsController"; angular .module("app-owinapigateway-admin") .controller("endpointsController", endpointsController); endpointsController.$inject = ["$scope", "common", "context", "config"]; function endpointsController($scope, common, context, config) { var vm = this; vm.model = { configuration: null, availableStatuses: [{ code: 0, name: "Unknown" }, { code: 1, name: "Up" }, { code: 2, name: "Down" } ], }; vm.getStatusText = getStatusText; vm.getSubstatusText = getSubstatusText; vm.startEndpointEditing = startEndpointEditing; vm.cancelEndpointEditing = cancelEndpointEditing; vm.updatedEditedEndpoint = updatedEditedEndpoint; vm.addNewEndpoint = addNewEndpoint; vm.deleteEndpoint = deleteEndpoint; vm.deleteInstance = deleteInstance; vm.addNewInstance = addNewInstance; activate(); function activate() { vm.isBusy = true; var promises = []; promises.push(getCurrentConfiguration()); common.activateController(promises, controllerId) .then(function () { // setView(); }) .catch(function () { }) .then(function () { vm.isBusy = false; }); } function deleteInstance(endpoint, instanceToDel) { var index = endpoint.instances.instance.indexOf(instanceToDel); if (index != -1) { endpoint.instances.instance.splice(index, 1); } } function addNewInstance(endpoint) { if (endpoint.instances == null) { endpoint.instances = {}; } if (endpoint.instances.instance == null) { endpoint.instances.instance = []; } endpoint.instances.instance.push({}); } function addNewEndpoint() { var newEndpoint = {}; vm.editedEndpoint = newEndpoint; } function deleteEndpoint(endpoint) { var index = vm.model.configuration.endpoints.indexOf(endpoint); if (index != -1) { vm.model.configuration.endpoints.splice(index, 1); udateConfiguration(vm.model.configuration); } } function updatedEditedEndpoint() { // add new endpoint to the configuration if (vm.model.configuration.endpoints == null) { vm.model.configuration.endpoints = []; } if (vm.model.configuration.endpoints.indexOf(vm.editedEndpoint) == -1) { vm.model.configuration.endpoints.push(vm.editedEndpoint); } udateConfiguration(vm.model.configuration); } function udateConfiguration(configuration) { context.updateCurrentConfiguration(configuration).then(function (response) { if (response.status === 200) { cancelEndpointEditing(); } else { logError("Sorry, something went wrong during..."); } }); } function cancelEndpointEditing() { vm.editedEndpoint = null; } function startEndpointEditing(endpoint) { vm.editedEndpoint = endpoint; } function getSubstatusText(endpoint) { var ins = endpoint.instances.instance; var isAtLeastOneDown = false; var isAtLeastOneUnknown = false; if (ins) { for (var i = 0; i < ins.length; i++) { var instance = ins[i]; if (instance.status == 0) { isAtLeastOneUnknown = true; } if (instance.status == 2) { isAtLeastOneDown = true; } } } var subStatusText = ""; if (isAtLeastOneDown) { subStatusText += "[down]"; } if (isAtLeastOneUnknown) { subStatusText += "[unknown]"; } return subStatusText; } function getStatusText(endpoint) { var ins = endpoint.instances.instance; var isAtLeastOneUp = false; if (ins) { for (var i = 0; i < ins.length; i++) { var instance = ins[i]; if (instance.status == 1) { isAtLeastOneUp = true; } } } return isAtLeastOneUp ? "OK" : "NOT OK"; } function getCurrentConfiguration() { vm.modelPromise = context.getCurrentConfiguration() .then(function (response) { if (response.status === 200) { vm.model.configuration = response.data; } else { logError("Sorry, something went wrong during..."); vm.model = null; } return vm.model; }); return common.$q.when(vm.modelPromise); } } })();
import json from urllib2 import (Request, urlopen) RESULT_COUNT = 3 CODE_LIST_FILE = 'code_list.txt' QPX_ENDPOINT = 'https://www.googleapis.com/qpxExpress/v1/trips/search?key=AIzaSyDUl8LR3B0b6_TODUTO62xRvC7BLZZ_o0c' USD_PREFIX = 'USD' def get_connections(origin, destination, departureDate, arrivalDate): with open(CODE_LIST_FILE, 'r') as code_file: codes = code_file.readlines() direct_early = format_request(origin, destination, departureDate, 1) direct_late = format_request(origin, destination, arrivalDate, 1) stopover_options = [] for code in codes: first_leg_request = format_request(origin, code, departureDate, 0) second_leg_request = format_request(code, destination, arrivalDate, 0) first_leg_response = format_response(post_request(first_leg_request)) second_leg_response = format_response(post_request(second_leg_request)) best_combo = get_best_combinations(first_leg_response, second_leg_response) for combination in best_combo: obj = {'saleTotal': '%s%0.2f' % (USD_PREFIX, get_total_price(combination))} obj['slice'] = [] for leg in combination: obj['slice'].extend(leg['slice']) stopover_options.append(obj) stopover_options.sort(key=lambda x: get_price_number(x['saleTotal'])) direct_options = format_response(post_request(direct_early))['response']['options'] direct_options.extend(format_response(post_request(direct_late))['response']['options']) direct_options.sort(key=lambda x: get_price_number(x['saleTotal'])) response = {'response': {'stopoverOptions': stopover_options[0:RESULT_COUNT], 'directOptions': direct_options[0:RESULT_COUNT]}} response['request'] = {'origin': origin, 'destination': destination, 'departureDate': departureDate, 'arrivalDate': arrivalDate} return response def format_request(origin, destination, date, max_stops): return json.dumps({ 'request': { 'passengers': {'adultCount': 1}, 'slice': [ { 'origin': origin, 'destination': destination, 'date': date } ], 'solutions': RESULT_COUNT, 'maxStops': max_stops } }) def post_request(json_request): headers = {'Content-Type': 'application/json'} search_request = Request(QPX_ENDPOINT, json_request, headers) return json.loads(urlopen(search_request).read()) def format_response(raw_response): response_data = {'response': {'options': []}} trip_options = raw_response['trips']['tripOption'] cities = raw_response['trips']['data']['city'] airports = raw_response['trips']['data']['airport'] code_city_map = {} for city in cities: code_city_map[city['code']] = city['name'] airport_city_map = {} for airport in airports: airport_city_map[airport['code']] = code_city_map[airport['city']] for trip in trip_options: option_data = {'saleTotal': trip['saleTotal'], 'slice': []} for sl in trip['slice']: for leg in sl['segment']: leg_obj = leg['leg'][0] leg_data = {} leg_data['origin'] = leg_obj['origin'] leg_data['originName'] = airport_city_map[leg_obj['origin']] leg_data['departureTime'] = leg_obj['departureTime'] leg_data['destination'] = leg_obj['destination'] leg_data['destinationName'] = airport_city_map[leg_obj['destination']] leg_data['arrivalTime'] = leg_obj['arrivalTime'] option_data['slice'].append(leg_data) response_data['response']['options'].append(option_data) return response_data def combine_results(combined_arrays, all_arrays, start_index, output): if start_index < len(all_arrays): new_arrays = [] for combined_array in combined_arrays: for item in all_arrays[start_index]: new_array = [i for i in combined_array] new_array.append(item) new_arrays.append(new_array) output[0] = new_arrays combine_results(new_arrays, all_arrays, start_index + 1, output) def get_best_combinations(first_leg, second_leg): output = [[]] combine_results([[]], [first_leg['response']['options'], second_leg['response']['options']], 0, output) combos = output[0] combos.sort(key=lambda x: get_total_price(x)) return combos[0:RESULT_COUNT] def get_total_price(legs): total = 0 for leg in legs: total += get_price_number(leg['saleTotal']) return total def get_price_number(price): return float(price[3:])
"use strict"; /* tslint:disable:no-unused-variable */ var testing_1 = require("@angular/core/testing"); var forms_1 = require("@angular/forms"); var ng2_bootstrap_modal_1 = require("ng2-bootstrap-modal"); var dist_1 = require("angular2-notifications/dist"); var router_1 = require("@angular/router"); var goal_1 = require("../../models/goal"); var goals_list_page_component_1 = require("./goals-list-page.component"); var goals_list_page_service_1 = require("./goals-list-page.service"); var subgoal_1 = require("../../models/subgoal"); describe('GoalsListPageComponent', function () { var component; var fixture; var redirectedUrl; var notificationSuccess; var mockData; var dialogCallback; var mockSubgoal = new subgoal_1.Subgoal(); mockSubgoal._id = 'test_subgoal_id'; mockSubgoal.goalId = 'test_goal_id'; mockSubgoal.description = 'test_subgoal_description'; var mockGoal = new goal_1.Goal(); mockGoal._id = 'test_goal_id'; mockGoal.description = 'test_goal_description'; mockGoal.subgoals = [mockSubgoal]; var MockGoalsListPageService = (function () { function MockGoalsListPageService() { } MockGoalsListPageService.prototype.deleteGoal = function () { return Promise.resolve(); }; MockGoalsListPageService.prototype.getGoals = function () { return Promise.resolve([]); }; return MockGoalsListPageService; }()); var MockActivatedRoute = (function () { function MockActivatedRoute() { this.snapshot = { data: { goals: { data: [mockGoal] } } }; } return MockActivatedRoute; }()); var MockRouter = (function () { function MockRouter() { } MockRouter.prototype.navigate = function (url) { redirectedUrl = url[0]; }; return MockRouter; }()); var MockDialogService = (function () { function MockDialogService() { } MockDialogService.prototype.addDialog = function (component, data) { expect(data.title).toBe('Delete Goal'); expect(data.cancelText).toBe('Cancel'); expect(data.message).toBe('Are you sure you want to delete this goal?'); data.confirmFunction().then(function () { dialogCallback(mockData); }); return { subscribe: function (cb) { dialogCallback = cb; } }; }; return MockDialogService; }()); var MockNotificationsService = (function () { function MockNotificationsService() { } MockNotificationsService.prototype.success = function () { notificationSuccess = true; }; return MockNotificationsService; }()); beforeEach(testing_1.async(function () { testing_1.TestBed.configureTestingModule({ imports: [forms_1.FormsModule, router_1.RouterModule], declarations: [goals_list_page_component_1.GoalsListPageComponent], providers: [ { provide: router_1.ActivatedRoute, useClass: MockActivatedRoute }, { provide: router_1.Router, useClass: MockRouter }, { provide: ng2_bootstrap_modal_1.DialogService, useClass: MockDialogService }, { provide: dist_1.NotificationsService, useClass: MockNotificationsService } ] }).overrideComponent(goals_list_page_component_1.GoalsListPageComponent, { set: { providers: [{ provide: goals_list_page_service_1.GoalsListPageService, useClass: MockGoalsListPageService }] } }) .compileComponents(); })); beforeEach(function () { redirectedUrl = null; notificationSuccess = null; mockData = null; fixture = testing_1.TestBed.createComponent(goals_list_page_component_1.GoalsListPageComponent); component = fixture.componentInstance; fixture.detectChanges(); }); it('should test ngOnInit', function () { component.ngOnInit(); expect(component.goals.length).toBe(1); }); it('should test addNewGoal', function () { component.addNewGoal('test'); expect(redirectedUrl).toBe('/goals/test'); }); it('should test getProgressClass', function () { component.ngOnInit(); expect(component.getProgressClass(0)).toBeNull(); component.goals[0].progress = 50; expect(component.getProgressClass(0)).toBe('in-progress'); component.goals[0].progress = 100; expect(component.getProgressClass(0)).toBe('complete'); }); it('should test removeGoal', testing_1.fakeAsync(function () { mockData = { data: [] }; component.ngOnInit(); expect(component.goals.length).toBe(1); component.removeGoal(0); testing_1.tick(); expect(component.goals.length).toBe(0); })); });
import { Server, IncomingMessage, ServerResponse } from 'http' import { FastifyInstance, FastifyPluginOptions } from 'fastify' export default ( fastify: FastifyInstance<Server, IncomingMessage, ServerResponse>, _: FastifyPluginOptions, next: (error?: Error) => void, ) => { fastify.get('/health', (_, reply) => { reply.send({ status: 'UP' }) }) next() }
using Microsoft.Xna.Framework; using System; using System.Collections.Generic; using System.Linq; using System.Text; namespace LostSoul { public abstract class Behavior { public abstract void Run(GameTime gameTime, Entity entity); } }
// Licensed to the .NET Foundation under one or more agreements. // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information namespace DotNetNuke.Collections { using System.Collections.Generic; // Taken from Rob Conery's Blog post on the ASP.Net MVC PagedList Helper // http://blog.wekeroad.com/2007/12/10/aspnet-mvc-pagedlistt/ /// <summary> /// Provides an interface to a paged list, which contains a snapshot /// of a single page of data from the data store. /// </summary> /// <typeparam name = "T">The type of objects stored in the list.</typeparam> public interface IPagedList<T> : IList<T> { /// <summary> /// Gets a value indicating whether gets a boolean indicating if there is a next page available. /// </summary> bool HasNextPage { get; } /// <summary> /// Gets a value indicating whether gets a boolean indicating if there is a previous page available. /// </summary> bool HasPreviousPage { get; } /// <summary> /// Gets a value indicating whether gets a boolean indicating if this is the first page. /// </summary> bool IsFirstPage { get; } /// <summary> /// Gets a value indicating whether gets a boolean indicating if this is the last page. /// </summary> bool IsLastPage { get; } /// <summary> /// Gets or sets the no of pages in this list. /// </summary> int PageCount { get; set; } /// <summary> /// Gets or sets the index of the page contained in this list. /// </summary> int PageIndex { get; set; } /// <summary> /// Gets or sets the size of the page in this list. /// </summary> int PageSize { get; set; } /// <summary> /// Gets or sets the total number of objects in the data store. /// </summary> int TotalCount { get; set; } } }
import sys from unittest import TestCase import plotly.graph_objs as go if sys.version_info >= (3, 3): from unittest.mock import MagicMock else: from mock import MagicMock class TestRestyleMessage(TestCase): def setUp(self): # Construct with mocked _send_restyle_msg method self.figure = go.Figure( data=[ go.Scatter(), go.Bar(), go.Parcoords(dimensions=[{}, {"label": "dim 2"}, {}]), ] ) # Mock out the message method self.figure._send_restyle_msg = MagicMock() def test_property_assignment_toplevel(self): # Set bar marker self.figure.data[1].marker = {"color": "green"} self.figure._send_restyle_msg.assert_called_once_with( {"marker": [{"color": "green"}]}, trace_indexes=1 ) def test_property_assignment_nested(self): # Set scatter marker color self.figure.data[0].marker.color = "green" self.figure._send_restyle_msg.assert_called_once_with( {"marker.color": ["green"]}, trace_indexes=0 ) def test_property_assignment_nested_array(self): # Set parcoords dimension self.figure.data[2].dimensions[0].label = "dim 1" self.figure._send_restyle_msg.assert_called_once_with( {"dimensions.0.label": ["dim 1"]}, trace_indexes=2 ) # plotly_restyle def test_plotly_restyle_toplevel(self): # Set bar marker self.figure.plotly_restyle({"marker": {"color": "green"}}, trace_indexes=1) self.figure._send_restyle_msg.assert_called_once_with( {"marker": {"color": "green"}}, trace_indexes=[1] ) def test_plotly_restyle_nested(self): # Set scatter marker color self.figure.plotly_restyle({"marker.color": "green"}, trace_indexes=0) self.figure._send_restyle_msg.assert_called_once_with( {"marker.color": "green"}, trace_indexes=[0] ) def test_plotly_restyle_nested_array(self): # Set parcoords dimension self.figure.plotly_restyle({"dimensions[0].label": "dim 1"}, trace_indexes=2) self.figure._send_restyle_msg.assert_called_once_with( {"dimensions[0].label": "dim 1"}, trace_indexes=[2] ) def test_plotly_restyle_multi_prop(self): self.figure.plotly_restyle( {"marker": {"color": "green"}, "name": "MARKER 1"}, trace_indexes=1 ) self.figure._send_restyle_msg.assert_called_once_with( {"marker": {"color": "green"}, "name": "MARKER 1"}, trace_indexes=[1] ) def test_plotly_restyle_multi_trace(self): self.figure.plotly_restyle( {"marker": {"color": "green"}, "name": "MARKER 1"}, trace_indexes=[0, 1] ) self.figure._send_restyle_msg.assert_called_once_with( {"marker": {"color": "green"}, "name": "MARKER 1"}, trace_indexes=[0, 1] )
from __future__ import absolute_import, print_function, division import numpy import theano from theano.compat import izip from theano.gof.utils import hash_from_code def hash_from_ndarray(data): """ Return a hash from an ndarray. It takes care of the data, shapes, strides and dtype. """ # We need to hash the shapes and strides as hash_from_code only hashes # the data buffer. Otherwise, this will cause problem with shapes like: # (1, 0) and (2, 0) and problem with inplace transpose. # We also need to add the dtype to make the distinction between # uint32 and int32 of zeros with the same shape and strides. # python hash are not strong, so I always use md5 in order not to have a # too long hash, I call it again on the concatenation of all parts. if not data.flags["C_CONTIGUOUS"]: # hash_from_code needs a C-contiguous array. data = numpy.ascontiguousarray(data) return hash_from_code(hash_from_code(data) + hash_from_code(str(data.shape)) + hash_from_code(str(data.strides)) + hash_from_code(str(data.dtype))) def shape_of_variables(fgraph, input_shapes): """ Compute the numeric shape of all intermediate variables given input shapes. Parameters ---------- fgraph The theano.FunctionGraph in question. input_shapes : dict A dict mapping input to shape. Returns ------- shapes : dict A dict mapping variable to shape .. warning:: This modifies the fgraph. Not pure. Examples -------- >>> import theano >>> x = theano.tensor.matrix('x') >>> y = x[512:]; y.name = 'y' >>> fgraph = theano.FunctionGraph([x], [y], clone=False) >>> d = shape_of_variables(fgraph, {x: (1024, 1024)}) >>> d[y] (array(512), array(1024)) >>> d[x] (array(1024), array(1024)) """ if not hasattr(fgraph, 'shape_feature'): fgraph.attach_feature(theano.tensor.opt.ShapeFeature()) input_dims = [dimension for inp in fgraph.inputs for dimension in fgraph.shape_feature.shape_of[inp]] output_dims = [dimension for shape in fgraph.shape_feature.shape_of.values() for dimension in shape] compute_shapes = theano.function(input_dims, output_dims) if any([i not in fgraph.inputs for i in input_shapes.keys()]): raise ValueError( "input_shapes keys aren't in the fgraph.inputs. FunctionGraph()" " interface changed. Now by default, it clones the graph it receives." " To have the old behavior, give it this new parameter `clone=False`.") numeric_input_dims = [dim for inp in fgraph.inputs for dim in input_shapes[inp]] numeric_output_dims = compute_shapes(*numeric_input_dims) sym_to_num_dict = dict(izip(output_dims, numeric_output_dims)) l = {} for var in fgraph.shape_feature.shape_of: l[var] = tuple(sym_to_num_dict[sym] for sym in fgraph.shape_feature.shape_of[var]) return l
# -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*- # # Copyright (C) 2016-2018 Canonical Ltd # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License version 3 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import os from testtools.matchers import ( DirExists, FileExists, Not ) from tests import integration class LibraryPrecedenceTestCase(integration.TestCase): def test_snapped_library_takes_precedence_over_system(self): self.run_snapcraft('stage', 'fake-curl-library') self.run_snapcraft(['prime', 'main', 'fake-curl']) # We will have everything in, given that we require dependencies # to be primed. self.assertThat( os.path.join(self.prime_dir, 'bin', 'main'), FileExists()) self.assertThat( os.path.join(self.prime_dir, 'lib'), DirExists()) # If this exist, snapcraft brought libbcurl in from the host. self.assertThat( os.path.join(self.prime_dir, 'usr'), Not(DirExists())) # Prime the rest of the way. self.run_snapcraft('prime') # Now verify the lib we got was the one from the snap, not from the # system. self.assertThat( os.path.join(self.prime_dir, 'lib', 'libcurl.so'), FileExists()) self.assertThat( os.path.join(self.prime_dir, 'usr'), Not(DirExists()))
import auf = require("aurelia-framework"); //import {ViewCompiler, ViewResources, ViewSlot} from 'aurelia-templating'; //import {ResourcePool} from '../services/resource-pool'; import rp = require("../services/resource-pool"); var hasTemplateElement = ('content' in document.createElement('template')); export class ResourcePooled { static metadata() { return auf.Behavior .customElement('resource-pooled') .withProperty('pool') .skipContentProcessing() .noView(); } public template: HTMLElement; public pool: string; public view: auf.View; public viewFactory: auf.ViewFactory; static inject = [Element, rp.ResourcePool, auf.ViewCompiler, auf.ViewResources, auf.ViewSlot, auf.Container]; constructor( public element: Element, public resourcePool: rp.ResourcePool, public viewCompiler: auf.ViewCompiler, public viewResources: auf.ViewResources, public viewSlot: auf.ViewSlot, public container: auf.Container ) { var template = document.createElement('template'); if (hasTemplateElement) { template["content"] = document.createDocumentFragment(); } while (element.firstChild) { template["content"].appendChild(element.firstChild); } this.template = template; } bind(context) { if (!this.pool) { this.pool = this.viewResources.viewUrl; } this.viewFactory = this.resourcePool.get(this.pool, 'viewFactory',() => this.viewCompiler.compile(this.template, this.viewResources)); this.resourcePool.free(this.pool, 'viewFactory'); this.view = this.resourcePool.get(this.pool, 'view', () => { console.log("Creating pooled view: " + this.pool); return this.viewFactory.create(this.container, null, { suppressBind: true }); }); this.view.bind(context); this.viewSlot.add(this.view); } unbind() { this.viewSlot.remove(this.view); this.view.unbind(); this.resourcePool.free(this.pool, 'view', this.view); this.view = null; } }
"""Crypto-Balances Usage: run.py [(address [(--add|--remove) <address_type> <address>...])] [(exclusion [(--add|--remove) <asset>...])] [base <currency> [--precision <n>]] [-m --minimum <balance>] [-i --itemize] Options: -h --help Show this screen -v --version Show version -a --add -r --remove -i --itemize Show asset balance for individual addresses -b --base <currency> Asset value base denomination -p --precision <n> Base currency decimal places -m --minimum <balance> Threshold asset balance for print """ from docopt import docopt from src import config, util, portfolio def main(argv): config_manip = [argv['--add'], argv['--remove']] if argv['address']: if argv['--add']: config.add_address(argv['<address_type>'], argv['<address>']) if argv['--remove']: config.remove_address(argv['<address_type>'], argv['<address>']) if not any(config_manip): config.display_addresses() if argv['exclusion']: if argv['--add']: config.add_exclusion(argv['<asset>']) if argv['--remove']: config.remove_exclusion(argv['<asset>']) if not any(config_manip): config.display_exclusions() base_currency = argv['<currency>'] or 'BTC' base_precision = argv['--precision'] or 8 min_balance = argv['--minimum'] or 0 addr_data = util.json_from_file(config.addr_data_file) addr_config = util.json_from_file(config.addr_config_file) excluded_assets = util.list_from_file(config.excluded_assets_file) P = portfolio.Portfolio(addr_data, addr_config, excluded_assets) P.filter_addr_assets(min_balance) P.retrieve_asset_prices(base_currency) if P.isempty(): print('No addresses have been added') elif argv['--itemize']: P.print_address_balances(8, base_precision) else: P.print_total_balances(8, base_precision) if __name__ == '__main__': args = docopt(__doc__, version='Crypto-Balances v1.0.0') main(args) # """ # # KNOWN BUGS # -- # # TO DO # change config file so that absolute paths to json values can be used # # """
// ---------------------------------------------------------------------------------- // // Copyright Microsoft Corporation // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // ---------------------------------------------------------------------------------- namespace Microsoft.WindowsAzure.Commands.Utilities.Scheduler.Model { public class PSJobHistoryStorageDetail: PSJobHistoryDetail { public string StorageAccountName { get; internal set; } public string StorageQueueName { get; internal set; } public string ResponseStatus { get; internal set; } public string ResponseBody { get; internal set; } } }
#!/usr/bin/env python2 # -*- coding: utf-8 -*- """ ELVIS variables dictionaries. :History: Created on Fri Sep 22 12:04:09 2017 :author: Ruyman Azzollini """ script_keys = [ 'frames', 'program', 'test', 'IDL', 'IDH', 'IG1_1_T', 'IG1_2_T', 'IG1_3_T', 'IG1_1_B', 'IG1_2_B', 'IG1_3_B', 'IG2_T', 'IG2_B', 'OD_1_T', 'OD_1_B', 'OD_2_T', 'OD_2_B', 'OD_3_T', 'OD_3_B', 'RD_T', 'RD_B', 'IPHI1', 'IPHI2', 'IPHI3', 'IPHI4', 'rdmode', 'flushes', 'siflsh', 'siflsh_p', 'swellw', 'swelldly', 'inisweep', 'vstart', 'vend', 'toi_fl', 'toi_tp', 'toi_ro', 'toi_ch', 'chinj', 'chinj_on', 'chinj_of', 'id_wid', 'id_dly', 'chin_dly', 's_tpump', 's_tpmod', 'v_tpump', 'v_tpmod', 's_tp_cnt', 'v_tp_cnt', 'dwell_v', 'dwell_s', 'exptime', 'shuttr', 'e_shuttr', 'pos_mirr', 'wave', 'motr', 'motr_cnt', 'motr_siz', 'source', 'operator', 'sn_ccd1', 'sn_ccd2', 'sn_ccd3', 'sn_roe', 'sn_rpsu', 'comments'] explog_keys = [ 'ObsID', 'File_name', 'CCD', 'ROE', 'date', 'program', 'test', 'calscrpt', 'sn_ccd1', 'sn_ccd2', 'sn_ccd3', 'sn_roe', 'sn_rpsu', 'BUNIT', 'operator', 'con_file', 'exptime', 'fl_rdout', 'ci_rdout', 'IPHI', 'vstart', 'vend', 'rdmode', 'flushes', 'siflsh', 'siflsh_p', 'swellw', 'swelldly', 'inisweep', 'spw_clk', 'chinj', 'chinj_on', 'chinj_of', 'id_wid', 'id_dly', 'chin_dly', 'v_tpump', 's_tpump', 'v_tp_mod', 's_tp_mod', 'v_tp_cnt', 's_tp_cnt', 'dwell_v', 'dwell_s', 'toi_fl', 'toi_tp', 'toi_ro', 'toi_ch', 'fpga_ver', 'egse_ver', 'motr', 'motr_cnt', 'motr_siz', 'source', 'wave', 'pos_mirr', 'R1C1_TT', 'R1C1_TB', 'R1C2_TT', 'R1C2_TB', 'R1C3_TT', 'R1C3_TB', 'IDL', 'IDH', 'IG1_1_T', 'IG1_2_T', 'IG1_3_T', 'IG1_1_B', 'IG1_2_B', 'IG1_3_B', 'IG2_T', 'IG2_B', 'OD_1_T', 'OD_2_T', 'OD_3_T', 'OD_1_B', 'OD_2_B', 'OD_3_B', 'RD_T', 'RD_B'] FITS_keys = ['XTENSION', 'BITPIX', 'NAXIS', 'NAXIS1', 'NAXIS2', 'PCOUNT', 'GCOUNT', 'BZERO', 'BSCALE', 'EXTNAME', 'BUNIT', 'PROGRAM', 'OBJECT', 'OBSID', 'OPERATOR', 'FULLPATH', 'LAB_VER', 'CON_FILE', 'DATE', 'EXPTIME', 'FL_RDOUT', 'CI_RDOUT', 'IPHI', 'CHINJ', 'CHINJ_ON', 'CHINJ_OF', 'ID_WID', 'ID_DLY', 'chin_dly', 'V_TPUMP', 'S_TPUMP', 'V_TP_MOD', 'S_TP_MOD', 'V_TP_CNT', 'S_TP_CNT', 'DWELL_V', 'DWELL_S', 'TOI_FL', 'TOI_TP', 'TOI_RO', 'TOI_CH', 'SIFLSH', 'SIFLSH_P', 'FLUSHES', 'VSTART', 'VEND', 'RDMODE', 'SWELLW', 'SWELLDLY', 'INISWEEP', 'SPW_CLK', 'FPGA_VER', 'EGSE_VER', 'MOTR', 'MOTR_CNT', 'MOTR_SIZ', 'SOURCE', 'WAVE', 'POS_MIRR', 'SN_CCD1', 'SN_CCD2', 'SN_CCD3', 'SN_ROE', 'SN_RPSU', 'CALSCRPT', 'COMMENTS', 'R1C1_TT', 'R1C1_TB', 'R1C2_TT', 'R1C2_TB', 'R1C3_TT', 'R1C3_TT', 'IDL', 'IDH', 'IG1_1_T', 'IG1_2_T', 'IG1_3_T', 'IG1_1_B', 'IG1_2_B', 'IG1_3_B', 'IG2_T', 'IG2_B', 'OD_1_T', 'OD_2_T', 'OD_3_T', 'OD_1_B', 'OD_2_B', 'OD_3_B', 'RD_T', 'RD_B']
<html> <head> <title>Input tag parsed</title> </head> <body> This page can only be reached through following the input tag's "value" attribu\ te. </body> </html>
#!/usr/bin/env python2.7 # # Copyright 2017 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """The standard window / viewport mapping used in graphics.""" import logging import geom import utils # Get logger logger = logging.getLogger("wms_maps") class WindowViewportMapping(object): """Well-known graphics window <-> viewport transformation. Many explanations on the web, one is: http://www.siggraph.org/education/materials/HyperGraph/viewing/ view2d/pwint.htm Window (here 'logical') is the projection space, viewport ('physical') is tilepixel space. """ def __init__(self, log_rect, phys_rect): """Initialization. The corners of <log_rect> and <phys_rect> have to correspond. Args: log_rect: the logical rectangle, the 'window'. phys_rect: the physical rectangle, the 'viewport' or 'device'. """ logger.debug("Initializing xform: log: %s phys: %s", str(log_rect), str(phys_rect)) self.log_rect = log_rect self.phys_rect = phys_rect # These are not necessarily positive, is why we don't use width or height. self._w_log = self.log_rect.SignedWidth() self._h_log = self.log_rect.SignedHeight() self._w_phys = self.phys_rect.SignedWidth() self._h_phys = self.phys_rect.SignedHeight() self._logical_to_physical_x_scale = ( phys_rect.SignedWidth() / log_rect.SignedWidth()) self._logical_to_physical_y_scale = ( phys_rect.SignedHeight() / log_rect.SignedHeight()) self._physical_x_offset = ( self._logical_to_physical_x_scale * -self.log_rect.x0 + self.phys_rect.x0) self._physical_y_offset = ( self._logical_to_physical_y_scale * -self.log_rect.y0 + self.phys_rect.y0) def LogPtToPhys(self, log_pt): """Find tilepixel space point, of <log_pt>. Args: log_pt: window point. Returns: Corresponding viewport, for us tilepixel space, point. """ utils.Assert(isinstance(log_pt, geom.Pair), "logpt is not a geom.Pair") phys_x = (log_pt.x * self._logical_to_physical_x_scale + self._physical_x_offset) phys_y = (log_pt.y * self._logical_to_physical_y_scale + self._physical_y_offset) return geom.Pair(phys_x, phys_y) def LogRectToPhys(self, log_rect): """Transforms a window rectangle -> viewport. Args: log_rect: window rect. Returns: viewport rect. """ physxy0 = self.LogPtToPhys(log_rect.xy0) physxy1 = self.LogPtToPhys(log_rect.xy1) return geom.Rect.FromLowerLeftAndUpperRight(physxy0, physxy1) def PhysPtToLog(self, phys_pt): """Transforms 'back', from a pixel to the map-projected space. Args: phys_pt: Physical point. Returns: viewport rect. """ log_x = ((phys_pt.x - self.phys_rect.x0) / self._w_phys * self._w_log + self.log_rect.x0) log_y = ((phys_pt.y - self.phys_rect.y0) / self._h_phys * self._h_log + self.log_rect.y0) return geom.Pair(log_x, log_y) def main(): phy = WindowViewportMapping((-3, 3), (-10, 10)) print phy.PhysPtToLog((-5, 5)) if __name__ == "__main__": main()
#include <iostream> #include <cstdio> #include <cstring> using namespace std; int board[100][100]; long long cache[100][100]; int n; long long solve(int row, int col); int main() { memset(cache, -1, sizeof(cache)); scanf("%d", &n); for(int i=0; i<n; i++) { for(int j=0; j<n; j++) { scanf("%d", &board[i][j]); } } printf("%lld\n", solve(0,0)); return 0; } long long solve(int row, int col) { if(row == n-1 && col == n-1) return 1; else if(row >= n && col >= n) return 0; long long& ret = cache[row][col]; if(ret != -1) return ret; ret = 0; ret += solve(row+board[row][col], col); ret += solve(row, col+board[row][col]); return ret; }
<?php namespace App\DataTables; use App\CourseTable; use Yajra\Datatables\Services\DataTable; use Illuminate\Database\Eloquent\Builder; class CourseTablesDataTable extends DataTable { /** * Display ajax response. * * @return \Illuminate\Http\JsonResponse */ public function ajax() { return $this->datatables ->eloquent($this->query()) ->addColumn('action', 'courseTable.datatables.action') ->editColumn('user_id', 'courseTable.datatables.user-name') ->filterColumn('user_id', function ($query, $keyword) { /* @var Builder $query */ $query->whereIn('user_id', function ($query) use ($keyword) { /* @var Builder $query */ $query->select('users.id') ->from('users') ->join('course_tables', 'users.id', '=', 'course_tables.user_id') ->whereRaw('users.name LIKE ?', ['%' . $keyword . '%']); }); }) ->make(true); } /** * Get the query object to be processed by dataTables. * * @return \Illuminate\Database\Eloquent\Builder|\Illuminate\Database\Query\Builder|\Illuminate\Support\Collection */ public function query() { $user = auth()->user(); /* @var Builder $query */ $query = CourseTable::with('user')->where('public', true); return $this->applyScopes($query); } /** * Optional method if you want to use html builder. * * @return \Yajra\Datatables\Html\Builder */ public function html() { return $this->builder() ->columns($this->getColumns()) ->ajax('') ->addAction(['title' => '操作']) ->parameters($this->getBuilderParameters()) ->parameters([ 'order' => [[0, 'asc']], 'pageLength' => 50, ]); } /** * Get columns. * * @return array */ protected function getColumns() { return [ 'id' => ['title' => '#'], 'user_id' => ['title' => '擁有者'], 'name' => ['title' => '課表名稱'], ]; } /** * Get filename for export. * * @return string */ protected function filename() { return 'coursetables_' . time(); } }
# ! /usr/bin/env python # _*_ coding:utf-8 _*_ """ @author = lucas.wang @create_time = 2018-01-12 """ import os import time import smtplib from email.mime.text import MIMEText mailto_list = ['xxoo.qin@fantasee.cn','xx@fantasee.cn','oo@fantasee.cn'] mail_host = "smtp.163.com" # 设置服务器 mail_user = "xianyin0@163.com" # 用户名 mail_pass = "5213344" # 口令 def send_mail(to_list, sub, content):#有错误发送邮件 me='xianyin0@163.com' msg = MIMEText(content, format, 'utf-8') msg["Accept-Language"] = "zh-CN" msg["Accept-Charset"] = "utf-8" msg = MIMEText(content) msg['Subject'] = sub msg['From'] = me msg['To'] = ",".join(to_list) try: server = smtplib.SMTP() server.connect(mail_host) server.login(mail_user, mail_pass) server.sendmail(me, to_list, msg.as_string()) server.close() except Exception as e: print(str(e)) def logger(content):#记录日志 date = time.strftime('%Y-%m-%d %H:%M:%S',time.localtime()) with open('/root/log.txt','a') as f: f.write('[%s]:%s\n'%(date,content)) def yunpos2():#yunpos2打包apk代码 os.chdir(r'/usr/local/yunpos2/pos/yunpos') content=os.popen('svn up') data = content.read() if len(data)>50: try: data0=os.popen(r'gradle aR') os.chdir(r'/usr/local/yunpos2/pos/yunpos/app/build/outputs/apk/') data2 = os.popen('ls') for i in data2.readlines(): pass date2 = i.split('_',4)[0] + '_' + i.split('_',4)[1] + '_' + i.split('_',4)[2] + '_' + i.split('_',4)[3] os.system(r'zip -r /var/ftp/apk/%s.zip yunpos2*.apk'%date2) os.system('rm -rf yunpos2*.apk') except: logger('yunpos2打包失败') send_mail(mailto_list, "yunpos2 packaging failure",data) else: logger('yunpos2代码没有更新') def possdk():#possdk打包apk代码 os.chdir(r'/usr/local/yunpos2/pos/possdk') content=os.popen('svn up') data = content.read() if len(data)>50: try: os.system(r'gradle aR') os.chdir(r'/usr/local/yunpos2/pos/possdk/app/build/outputs/apk') data2 = os.popen('ls') for i in data2.readlines(): date2 = i.split('_',4)[0] + '_' + i.split('_',4)[1] + '_' + i.split('_',4)[2] + '_' + i.split('_',4)[3] break os.system(r'zip -r /var/ftp/apk/%s.zip yunpos*.apk'%date2) os.system('rm -rf yunpos*.apk') except: logger('possdk打包失败') send_mail(mailto_list, "possdk packaging failure",data) else: logger('possdk代码没有更新') def yunpos():#yunpos打包apk代码 os.chdir(r'/usr/local/yunpos/android/yunpos') content=os.popen('svn up') data = content.read() if len(data)>50: try: os.system(r'gradle aR') os.chdir(r'/usr/local/yunpos/android/yunpos/build/outputs/apk') data2 = os.popen('ls') for i in data2.readlines(): date2 = i.split('_',4)[0] + '_' + i.split('_',4)[1] + '_' + i.split('_',4)[2] + '_' + i.split('_',4)[3] break os.system(r'zip -r /var/ftp/apk/%s.zip yunpos*.apk'%date2) os.system('rm -rf yunpos*.apk') except: logger('yunpos打包失败') send_mail(mailto_list, "yunpos packaging failure",data) else: logger('yunpos代码没有更新') if __name__ == '__main__': yunpos2() possdk() yunpos()
<!doctype html> <html> <head> <title>TAPE Example</title> <script src="/testem.js"></script> <script src="bundle.js"></script> </head> <body> </body> </html>
declare const environmentName: String; const env: String = environmentName; export { env as environmentName };
# downscale cru data in a CLI way if __name__ == '__main__': import glob, os, itertools, rasterio from downscale import DeltaDownscale, Baseline, Dataset, utils, Mask from functools import partial import numpy as np import argparse # parse the commandline arguments parser = argparse.ArgumentParser( description='downscale the AR5-CMIP5 data to the AKCAN extent required by SNAP' ) parser.add_argument( "-ts", "--ts", action='store', dest='cru_ts', type=str, help="path to the cru file to use in downscaling (.nc)" ) parser.add_argument( "-cl", "--clim_path", action='store', dest='clim_path', type=str, help="path to the directory where the 12 geotiff climatology files are stored" ) parser.add_argument( "-o", "--output_path", action='store', dest='output_path', type=str, help="path to the output directory" ) parser.add_argument( "-m", "--model", action='store', dest='model', type=str, help="cmip5 model name (exact)" ) parser.add_argument( "-v", "--variable", action='store', dest='variable', type=str, help="cmip5 variable name (exact)" ) parser.add_argument( "-u", "--units", action='store', dest='units', type=str, help="string name of the units data are in" ) parser.add_argument( "-met", "--metric", action='store', dest='metric', type=str, help="string name of the metric data are in" ) parser.add_argument( "-nc", "--ncpus", action='store', dest='ncpus', type=int, help="number of cpus to use in multiprocessing" ) parser.add_argument( "-ov", "--out_varname", action='store', dest='out_varname', type=str, help="string name of output name to use instead of variable in file" ) args = parser.parse_args() # unpack args cru_ts = args.cru_ts clim_path = args.clim_path output_path = args.output_path model = args.model variable = args.variable units = args.units metric = args.metric ncpus = args.ncpus out_varname = args.out_varname # standard args clim_begin = '01-1961' clim_end = '12-1990' scenario = 'historical' project = 'cru' anom = True # write out anoms (True) or not (False) interp = True # interpolate across space -- Low Res # AOI MASK -- HARDWIRE -- GCLL for CRU aoi_mask_fn = '/workspace/Shared/Tech_Projects/EPSCoR_Southcentral/project_data/akcan_template/akcan_aoi_mask_GCLL.shp' # RUN 2.0 filelist = glob.glob( os.path.join( clim_path, '*.tif' ) ) filelist = [ i for i in filelist if '_14_' not in i ] # remove the GD ANNUAL _14_ file. baseline = Baseline( filelist ) # DOWNSCALE mask = rasterio.open( baseline.filelist[0] ).read_masks( 1 ) historical = Dataset( cru_ts, variable, model, scenario, project, units, metric, method='linear', ncpus=32 ) # post_downscale_function -- rounding if variable == 'pr' or variable == 'pre': # print variable rounder = np.rint downscaling_operation = 'mult' find_bounds = True fix_clim = True # make AOI_Mask at input resolution for computing 95th percentiles... if aoi_mask_fn is not None: aoi_mask = Mask( aoi_mask_fn, historical, 1, 0 ) else: aoi_mask = None else: rounder = partial( np.around, decimals=1 ) downscaling_operation = 'add' find_bounds = False fix_clim = False aoi_mask = None def round_it( arr ): return rounder( arr ) # FOR CRU WE PASS THE interp=True so we interpolate across space first when creating the Dataset() ar5 = DeltaDownscale( baseline, clim_begin, clim_end, historical, future=None, downscaling_operation=downscaling_operation, mask=mask, mask_value=0, ncpus=32, src_crs={'init':'epsg:4326'}, src_nodata=None, dst_nodata=None, post_downscale_function=round_it, varname=out_varname, modelname=None, anom=anom, interp=interp, find_bounds=find_bounds, fix_clim=fix_clim, aoi_mask=aoi_mask ) if not os.path.exists( output_path ): os.makedirs( output_path ) ar5.downscale( output_dir=output_path )
<?php namespace TYPO3\Flow\Tests\Functional\Aop\Fixtures; /* * * This script belongs to the TYPO3 Flow framework. * * * * It is free software; you can redistribute it and/or modify it under * * the terms of the GNU Lesser General Public License, either version 3 * * of the License, or (at your option) any later version. * * * * The TYPO3 project - inspiring people to share! * * */ /** * An abstract class with an abstract and a concrete method * */ abstract class AbstractClass { /** * @param $foo * @return string */ abstract public function abstractMethod($foo); /** * @param $foo * @return string */ public function concreteMethod($foo) { return "foo: $foo"; } } ?>
# This file is part of Indico. # Copyright (C) 2002 - 2021 CERN # # Indico is free software; you can redistribute it and/or # modify it under the terms of the MIT License; see the # LICENSE file for more details. from indico.modules.designer.controllers import (RHAddCategoryTemplate, RHAddEventTemplate, RHCloneCategoryTemplate, RHCloneEventTemplate, RHDeleteDesignerTemplate, RHDownloadTemplateImage, RHEditDesignerTemplate, RHGetTemplateData, RHListBacksideTemplates, RHListCategoryTemplates, RHListEventTemplates, RHToggleBadgeDefaultOnCategory, RHToggleTicketDefaultOnCategory, RHUploadBackgroundImage) from indico.util.caching import memoize from indico.web.flask.util import make_view_func from indico.web.flask.wrappers import IndicoBlueprint _bp = IndicoBlueprint('designer', __name__, template_folder='templates', virtual_template_folder='designer') @memoize def _dispatch(event_rh, category_rh): event_view = make_view_func(event_rh) categ_view = make_view_func(category_rh) def view_func(**kwargs): return categ_view(**kwargs) if kwargs['object_type'] == 'category' else event_view(**kwargs) return view_func _bp.add_url_rule('/category/<int:category_id>/manage/designer/<int:template_id>/toggle-default-ticket', 'toggle_category_default_ticket', RHToggleTicketDefaultOnCategory, methods=('POST',)) _bp.add_url_rule('/category/<int:category_id>/manage/designer/<int:template_id>/toggle-default-badge', 'toggle_category_default_badge', RHToggleBadgeDefaultOnCategory, methods=('POST',)) for object_type in ('event', 'category'): if object_type == 'category': prefix = '/category/<int:category_id>' else: prefix = '/event/<int:event_id>' prefix += '/manage/designer' _bp.add_url_rule(prefix + '/', 'template_list', _dispatch(RHListEventTemplates, RHListCategoryTemplates), defaults={'object_type': object_type}) _bp.add_url_rule(prefix + '/<int:template_id>/backsides', 'backside_template_list', RHListBacksideTemplates, defaults={'object_type': object_type}) _bp.add_url_rule(prefix + '/add', 'add_template', _dispatch(RHAddEventTemplate, RHAddCategoryTemplate), defaults={'object_type': object_type}, methods=('GET', 'POST')) _bp.add_url_rule(prefix + '/<int:template_id>/', 'edit_template', RHEditDesignerTemplate, defaults={'object_type': object_type}, methods=('GET', 'POST')) _bp.add_url_rule(prefix + '/<int:template_id>/', 'delete_template', RHDeleteDesignerTemplate, defaults={'object_type': object_type}, methods=('DELETE',)) _bp.add_url_rule(prefix + '/<int:template_id>/clone', 'clone_template', _dispatch(RHCloneEventTemplate, RHCloneCategoryTemplate), defaults={'object_type': object_type}, methods=('POST',)) _bp.add_url_rule(prefix + '/<int:template_id>/data', 'get_template_data', RHGetTemplateData, defaults={'object_type': object_type}) _bp.add_url_rule(prefix + '/<int:template_id>/images/<int:image_id>/<filename>', 'download_image', RHDownloadTemplateImage, defaults={'object_type': object_type}) _bp.add_url_rule(prefix + '/<int:template_id>/images', 'upload_image', RHUploadBackgroundImage, defaults={'object_type': object_type}, methods=('POST',))
<?php namespace Noherczeg\RestExt\Providers; final class Charset { const __default = self::UTF8; const UTF8 = 'utf-8'; const ISO_8859_1 = 'iso-8859-1'; const ISO_8859_2 = 'iso-8859-2'; }
#! /usr/bin/env python # # Dependency: # pip install keyring # # Fred C. http://github.com/0x9900/ # """ Generate a passworg base on the domain name of a website, your userid on this website and a key. The first time you use this program it will as you a key. This key will be stored in your Mac keychain. Examples: ]$ ./genpassword fred www.yahoo.com yahoo.com: Password: vk61-borA-wlIu-BYSK ]$ ./genpassword fred yahoo.com yahoo.com: Password: vk61-borA-wlIu-BYSK ]$ ./genpassword fred YAHOO.COM yahoo.com: Password: vk61-borA-wlIu-BYSK """ from __future__ import print_function import getpass import os import random import sys from argparse import ArgumentParser from argparse import RawDescriptionHelpFormatter from hashlib import sha256 from urlparse import urlparse import json import keyring PASSWD_LEN = 19 ALPHABET = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ_abcdefghijklmnopqrstuvwxyz' GENPASSWORD_DAT = '~/.genpassword.dat' class IJSONEncoder(json.JSONEncoder): """Special JSON encoder capable of encoding sets""" def default(self, obj): if isinstance(obj, set): return {'__type__': 'set', 'value': list(obj)} else: return super(IJSONEncoder, self).default(obj) class IJSONDecoder(json.JSONDecoder): """Special JSON decoder capable of decoding sets encodes by IJSONEncoder""" def __init__(self): super(IJSONDecoder, self).__init__(object_hook=self.dict_to_object) def dict_to_object(self, json_obj): if '__type__' not in json_obj: return json_obj if json_obj['__type__'] == 'set': return set(json_obj['value']) return json_obj IENCODE = IJSONEncoder(indent=2).encode IDECODE = IJSONDecoder().decode def normalize_url(url): """Extract the domain name from the url, remove the leading www and return the domain name in lower case""" url = urlparse(url) url = url.netloc or url.path url = url.lower().replace('www.', '') return url def get_key(program, token='Password Generator'): """Try to find the encryption key for that token in the keyring. If the key cannot be found prompt the user. """ key = keyring.get_password(program, token) # the key hasn't been found in the keyring. Request for a new one. if not key: key = getpass.getpass('Encryption key: ') try: keyring.set_password(program, token, key) except keyring.errors.PasswordSetError as exp: print(exp, file=sys.stderr) return str(key) def save_pwinfo(username, url): try: pifd = open(os.path.expanduser(GENPASSWORD_DAT), 'r+') pwinfo = IDECODE(pifd.read()) except IOError: pifd = open(os.path.expanduser(GENPASSWORD_DAT), 'a') pwinfo = dict() pwinfo.setdefault(url, set()).add(username) pifd.seek(0L) pifd.write(IENCODE(pwinfo)) pifd.close() def parse_arguments(): """Parse the command arguments""" parser = ArgumentParser( description="Password generator", epilog=globals()['__doc__'], formatter_class=RawDescriptionHelpFormatter ) parser.add_argument('-i', '--interactive', action="store_true", default=False, help="Do not use key stored in the keychain") parser.add_argument('username', nargs=1, help="Site username") parser.add_argument('url', nargs=1, help="Site's domain name http://example.com/") opts = parser.parse_args() opts.username = opts.username.pop() opts.url = normalize_url(opts.url.pop()) return opts def main(): """This is where everything happens""" opts = parse_arguments() if opts.interactive: key = getpass.getpass('Encryption key: ') else: program_name = os.path.basename(sys.argv[0]) key = get_key(program_name) seed = sha256(opts.url + key + opts.username).hexdigest() random.seed(seed) charlist = [c for c in ALPHABET] random.shuffle(charlist) password = ''.join([c if i%5 else '-' for i, c in enumerate(charlist[:PASSWD_LEN], 1)]) save_pwinfo(opts.username, opts.url) if os.isatty(sys.stdout.fileno()): print("Site: {}: Password: {}".format(opts.url, password)) else: sys.stdout.write(password) sys.stdout.flush() if __name__ == '__main__': main()
using System; using System.ComponentModel; using System.IO; using OfficeDevPnP.Core; using OfficeDevPnP.Core.Entities; using OfficeDevPnP.Core.Utilities; using LanguageTemplateHash = System.Collections.Generic.Dictionary<string, System.Collections.Generic.List<string>>; namespace Microsoft.SharePoint.Client { /// <summary> /// Class that deals with branding features /// </summary> public static partial class BrandingExtensions { #region TO BE DEPRECATED IN MARCH 2016 RELEASE - Long deprecation time to avoid issues /// <summary> /// Disables the Responsive UI on a Classic SharePoint Site /// </summary> /// <param name="site">The Site to disable the Responsive UI on</param> [Obsolete("Use DisableResponsiveUI(site)")] public static void DisableReponsiveUI(this Site site) { try { site.DeleteJsLink("PnPResponsiveUI"); } catch { // Swallow exception as responsive UI might not be active. } } /// <summary> /// Disables the Responsive UI on a Classic SharePoint Web /// </summary> /// <param name="web">The Web to disable the Responsive UI on</param> [Obsolete("Use DisableResponsiveUI(web)")] public static void DisableReponsiveUI(this Web web) { try { web.DeleteJsLink("PnPResponsiveUI"); } catch { // Swallow exception as responsive UI might not be active. } } #endregion } }
/* * This file is part of reportpp. * * reportpp is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * reportpp is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with reportpp. * If not, see <http://www.gnu.org/licenses/>. * * --- * Copyright (C) 2014, mickey <mickey.mouse-1985@libero.it> */ #ifndef DATAPAGE_EXECUTOR_HPP #define DATAPAGE_EXECUTOR_HPP // #include "BlockExecutor.hpp" namespace reportpp { namespace executors { class DataPageExecutor: public PageExecutor { public: DataPageExecutor() { } ~DataPageExecutor() { } bool addRecord(ReportGlobals &globals, const std::list< std::string > &record) { return true; } }; }} //namespace reportpp::executors #endif /* ifndef DATAPAGE_EXECUTOR_HPP */
# Time: O(n) on average # Space: O(1) from random import randint # Quick select solution. class Solution(object): def minMoves2(self, nums): """ :type nums: List[int] :rtype: int """ def kthElement(nums, k): def PartitionAroundPivot(left, right, pivot_idx, nums): pivot_value = nums[pivot_idx] new_pivot_idx = left nums[pivot_idx], nums[right] = nums[right], nums[pivot_idx] for i in xrange(left, right): if nums[i] > pivot_value: nums[i], nums[new_pivot_idx] = nums[new_pivot_idx], nums[i] new_pivot_idx += 1 nums[right], nums[new_pivot_idx] = nums[new_pivot_idx], nums[right] return new_pivot_idx left, right = 0, len(nums) - 1 while left <= right: pivot_idx = randint(left, right) new_pivot_idx = PartitionAroundPivot(left, right, pivot_idx, nums) if new_pivot_idx == k - 1: return nums[new_pivot_idx] elif new_pivot_idx > k - 1: right = new_pivot_idx - 1 else: # new_pivot_idx < k - 1. left = new_pivot_idx + 1 median = kthElement(nums, len(nums)/2 + 1) return sum(abs(num - median) for num in nums) def minMoves22(self, nums): """ :type nums: List[int] :rtype: int """ median = sorted(nums)[len(nums) / 2] return sum(abs(num - median) for num in nums)
from django.conf.urls import url from django.contrib import admin from django.contrib.auth import get_user_model from django.contrib.auth.admin import UserAdmin from findingaids.fa_admin import views from findingaids.fa_admin.models import Archivist # Define an inline admin descriptor for Employee model # which acts a bit like a singleton class ArchivistInline(admin.StackedInline): model = Archivist can_delete = False verbose_name_plural = 'archivist' fields = ('archives', ) # patch some custom properties onto User for display in admin site User = get_user_model() def group_list(self): return ', '.join(group.name for group in self.groups.all()) group_list.short_description = 'Groups' def archive_list(self): return ', '.join(archive.label for archive in self.archivist.archives.all()) archive_list.short_description = 'Archives' def is_admin(self): return self.is_superuser is_admin.short_description = 'Admin' is_admin.boolean = True def staff_status(self): return self.is_staff staff_status.short_description = 'Staff' staff_status.boolean = True User.group_list = group_list User.archive_list = archive_list User.is_admin = is_admin User.staff_status = staff_status # Customize user admin to include archivist information class ArchivistUserAdmin(UserAdmin): inlines = (ArchivistInline, ) list_filter = ('archivist__archives', 'is_staff', 'is_superuser', 'is_active', 'groups') list_display = ('username', 'first_name', 'last_name', 'group_list', 'archive_list', 'is_active', 'staff_status', 'is_admin') def get_urls(self): return [ url(r'ldap-user/$', views.init_ldap_user, name='init-ldap-user') ] + super(ArchivistUserAdmin, self).get_urls() # Re-register UserAdmin admin.site.unregister(User) admin.site.register(User, ArchivistUserAdmin)
using System; using System.Collections.Generic; using System.Linq; using System.Text; using Cosmos.HAL; namespace Cosmos.System { public static class Global { public static readonly Cosmos.Debug.Kernel.Debugger Dbg = new Cosmos.Debug.Kernel.Debugger("System", ""); public static Console Console = new Console(null); public static bool NumLock { get { return HAL.Global.NumLock; } set { HAL.Global.NumLock = value; } } public static bool CapsLock { get { return HAL.Global.CapsLock; } set { HAL.Global.CapsLock = value; } } public static bool ScrollLock { get { return HAL.Global.ScrollLock; } set { HAL.Global.ScrollLock = value; } } public static void Init(TextScreenBase textScreen, Keyboard keyboard) { // We must init Console before calling Inits. This is part of the // "minimal" boot to allow output Global.Dbg.Send("Creating Console"); if (textScreen != null) { Console = new Console(textScreen); } Global.Dbg.Send("HW Init"); Cosmos.HAL.Global.Init(textScreen, keyboard); NumLock = false; CapsLock = false; ScrollLock = false; //Network.NetworkStack.Init(); } } }
/** * flagger_switch.js - BackBone View for Flagger on/off switch * * This file is part of Flagger, which is licensed under version 3 of the GNU * General Public License as published by the Free Software Foundation. * * You should have received a copy of the GNU General Public License * along with this program; if not, see <http://www.gnu.org/licenses/>. */ var FlaggerSwitchView = Backbone.View.extend({ el: '#flagger_switch', events: { 'click #toggle_switch': 'toggle' }, initialize: function() { this.render(); }, render: function() { if (SETTINGS.flagger_active) document.getElementById('toggle_switch').checked = true; this.handle_switch_state(); window.setTimeout(function() { $('#flagger_switch').css('opacity', 1); }, 50); return this; }, toggle: function() { SETTINGS.flagger_active = $('#toggle_switch').prop('checked'); addon_io.call('set_setting', {setting: 'flagger_active', val: SETTINGS.flagger_active}, function(data) { this.handle_switch_state(); }.bind(this)); }, handle_switch_state: function() { $('body').toggleClass('inactive', !SETTINGS.flagger_active); $('#toggle_switch').prop('checked', SETTINGS.flagger_active); if (typeof VIEWS.options == 'undefined') return; if (SETTINGS.flagger_active) VIEWS.options.show(); else VIEWS.options.hide(); } });
package com.alorma.github.sdk.services.content; import com.alorma.github.sdk.bean.dto.request.NewContentRequest; import com.alorma.github.sdk.bean.dto.response.NewContentResponse; import com.alorma.github.sdk.bean.info.RepoInfo; import com.alorma.github.sdk.services.client.GithubClient; import retrofit.RestAdapter; import rx.Observable; public class NewFileClient extends GithubClient<NewContentResponse> { private NewContentRequest newContentRequest; private final RepoInfo repoInfo; private final String path; public NewFileClient(NewContentRequest newContentRequest, RepoInfo repoInfo, String path) { this.newContentRequest = newContentRequest; this.repoInfo = repoInfo; this.path = path; } @Override protected Observable<NewContentResponse> getApiObservable(RestAdapter restAdapter) { ContentService contentService = restAdapter.create(ContentService.class); return contentService.createFile(repoInfo.owner, repoInfo.name, path, newContentRequest); } }
""" Download the VGG and deep residual model to extract image features. Version: 1.0 Contributor: Jiasen Lu """ import os import argparse import json def download_VGG(): print('Downloading VGG model from http://www.robots.ox.ac.uk/~vgg/software/very_deep/caffe/VGG_ILSVRC_19_layers.caffemodel') os.system('wget http://www.robots.ox.ac.uk/~vgg/software/very_deep/caffe/VGG_ILSVRC_19_layers.caffemodel') os.system('wget https://gist.githubusercontent.com/ksimonyan/3785162f95cd2d5fee77/raw/bb2b4fe0a9bb0669211cf3d0bc949dfdda173e9e/VGG_ILSVRC_19_layers_deploy.prototxt') def download_deep_residual(): print('Downloading deep residual model from https://d2j0dndfm35trm.cloudfront.net/resnet-200.t7') os.system('wget https://d2j0dndfm35trm.cloudfront.net/resnet-200.t7') os.system('wget https://raw.githubusercontent.com/facebook/fb.resnet.torch/master/datasets/transforms.lua') def main(params): if params['download'] == 'VGG': download_VGG() else: download_deep_residual() if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument('--download', default='VGG', help='VGG or Residual') # input json args = parser.parse_args() params = vars(args) print 'parsed input parameters:' print json.dumps(params, indent = 2) main(params)
import zmq from zmq.utils.strtypes import cast_bytes import threading from . import env from .security import Authenticator class ServiceServer(object): ''' Example: >>> def callback(req): ... return 'req = {req}'.format(req=req) ... >>> service = jps.ServiceServer(callback) >>> service.spin() ''' def __init__(self, callback, host=None, res_port=None, use_security=False): if host is None: host = env.get_master_host() context = zmq.Context() self._socket = context.socket(zmq.REP) self._auth = None if use_security: self._auth = Authenticator.instance( env.get_server_public_key_dir()) self._auth.set_server_key( self._socket, env.get_server_secret_key_path()) if res_port is None: res_port = env.get_res_port() self._socket.connect( 'tcp://{host}:{port}'.format(host=host, port=res_port)) self._callback = callback self._thread = None self._lock = threading.Lock() def spin(self, use_thread=False): '''call callback for all data forever (until \C-c) :param use_thread: use thread for spin (do not block) ''' if use_thread: if self._thread is not None: raise 'spin called twice' self._thread = threading.Thread(target=self._spin_internal) self._thread.setDaemon(True) self._thread.start() else: self._spin_internal() def _spin_internal(self): while True: self.spin_once() def spin_once(self): with self._lock: request = self._socket.recv() self._socket.send(cast_bytes(self._callback(request))) def _stop_if_running(self): if self._auth is not None: self._auth.stop() self._auth = None if self._thread is not None: self._thread.join(1.0) self._thread = None def close(self): self._stop_if_running() with self._lock: self._socket.close() def __del__(self): self._stop_if_running() class ServiceClient(object): def __init__(self, host=None, req_port=None, use_security=False): if host is None: host = env.get_master_host() context = zmq.Context() self._socket = context.socket(zmq.REQ) self._auth = None if use_security: self._auth = Authenticator.instance( env.get_server_public_key_dir()) self._auth.set_client_key(self._socket, env.get_client_secret_key_path(), env.get_server_public_key_path()) if req_port is None: req_port = env.get_req_port() self._socket.connect( 'tcp://{host}:{port}'.format(host=host, port=req_port)) def call(self, request): self._socket.send(request) return self._socket.recv() __call__ = call def __del__(self): if self._auth is not None: self._auth.stop()
/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ /* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ /* ** File: prmem.h ** Description: API to NSPR memory management functions ** */ #ifndef prmem_h___ #define prmem_h___ #include "prtypes.h" #include <stdlib.h> PR_BEGIN_EXTERN_C #define PR_Malloc malloc #define PR_Calloc calloc #define PR_Realloc realloc #define PR_Free free /* ** Thread safe memory allocation. ** ** NOTE: pr wraps up malloc, free, calloc, realloc so they are already ** thread safe (and are not declared here - look in stdlib.h). */ /* ** PR_Malloc, PR_Calloc, PR_Realloc, and PR_Free have the same signatures ** as their libc equivalent malloc, calloc, realloc, and free, and have ** the same semantics. (Note that the argument type size_t is replaced ** by PRUint32.) Memory allocated by PR_Malloc, PR_Calloc, or PR_Realloc ** must be freed by PR_Free. */ NSPR_API(void *) PR_Malloc(size_t size); NSPR_API(void *) PR_Calloc(size_t nelem, size_t elsize); NSPR_API(void *) PR_Realloc(void *ptr, size_t size); NSPR_API(void) PR_Free(void *ptr); /* ** The following are some convenience macros defined in terms of ** PR_Malloc, PR_Calloc, PR_Realloc, and PR_Free. */ /*********************************************************************** ** FUNCTION: PR_MALLOC() ** DESCRIPTION: ** PR_NEW() allocates an untyped item of size _size from the heap. ** INPUTS: _size: size in bytes of item to be allocated ** OUTPUTS: untyped pointer to the node allocated ** RETURN: pointer to node or error returned from malloc(). ***********************************************************************/ #define PR_MALLOC(_bytes) (PR_Malloc((_bytes))) /*********************************************************************** ** FUNCTION: PR_NEW() ** DESCRIPTION: ** PR_NEW() allocates an item of type _struct from the heap. ** INPUTS: _struct: a data type ** OUTPUTS: pointer to _struct ** RETURN: pointer to _struct or error returns from malloc(). ***********************************************************************/ #define PR_NEW(_struct) ((_struct *) PR_MALLOC(sizeof(_struct))) /*********************************************************************** ** FUNCTION: PR_REALLOC() ** DESCRIPTION: ** PR_REALLOC() re-allocates _ptr bytes from the heap as a _size ** untyped item. ** INPUTS: _ptr: pointer to node to reallocate ** _size: size of node to allocate ** OUTPUTS: pointer to node allocated ** RETURN: pointer to node allocated ***********************************************************************/ #define PR_REALLOC(_ptr, _size) (PR_Realloc((_ptr), (_size))) /*********************************************************************** ** FUNCTION: PR_CALLOC() ** DESCRIPTION: ** PR_CALLOC() allocates a _size bytes untyped item from the heap ** and sets the allocated memory to all 0x00. ** INPUTS: _size: size of node to allocate ** OUTPUTS: pointer to node allocated ** RETURN: pointer to node allocated ***********************************************************************/ #define PR_CALLOC(_size) (PR_Calloc(1, (_size))) /*********************************************************************** ** FUNCTION: PR_NEWZAP() ** DESCRIPTION: ** PR_NEWZAP() allocates an item of type _struct from the heap ** and sets the allocated memory to all 0x00. ** INPUTS: _struct: a data type ** OUTPUTS: pointer to _struct ** RETURN: pointer to _struct ***********************************************************************/ #define PR_NEWZAP(_struct) ((_struct*)PR_Calloc(1, sizeof(_struct))) /*********************************************************************** ** FUNCTION: PR_DELETE() ** DESCRIPTION: ** PR_DELETE() unallocates an object previosly allocated via PR_NEW() ** or PR_NEWZAP() to the heap. ** INPUTS: pointer to previously allocated object ** OUTPUTS: the referenced object is returned to the heap ** RETURN: void ***********************************************************************/ #define PR_DELETE(_ptr) { PR_Free(_ptr); (_ptr) = NULL; } /*********************************************************************** ** FUNCTION: PR_FREEIF() ** DESCRIPTION: ** PR_FREEIF() conditionally unallocates an object previously allocated ** vial PR_NEW() or PR_NEWZAP(). If the pointer to the object is ** equal to zero (0), the object is not released. ** INPUTS: pointer to previously allocated object ** OUTPUTS: the referenced object is conditionally returned to the heap ** RETURN: void ***********************************************************************/ #define PR_FREEIF(_ptr) if (_ptr) PR_DELETE(_ptr) PR_END_EXTERN_C #endif /* prmem_h___ */
#!/usr/bin/env python # # Test cases for tournament.py from tournament import * def test_delete_matches(): delete_matches() print "1. Old matches can be deleted." def test_delete(): delete_matches() delete_players() print "2. Player records can be deleted." def test_count(): delete_matches() delete_players() c = count_players() if c == '0': raise TypeError( "count_players() should return numeric zero, \ not string '0'.") if c != 0: raise ValueError("After deleting, countPlayers should \ return zero.") print "3. After deleting, count_players() returns zero." def test_register(): delete_matches() delete_players() register_player("Chandra Nalaar") c = count_players() if c != 1: raise ValueError( "After one player registers, count_players() should be 1.") print "4. After registering a player, count_players() returns 1." def test_register_count_delete(): delete_matches() delete_players() register_player("Markov Chaney") register_player("Joe Malik") register_player("Mao Tsu-hsi") register_player("Atlanta Hope") c = count_players() if c != 4: raise ValueError( "After registering four players, countPlayers should be 4.") delete_players() c = count_players() if c != 0: raise ValueError("After deleting, countPlayers should \ return zero.") print "5. Players can be registered and deleted." def test_standings_before_matches(): delete_matches() delete_players() register_player("Melpomene Murray") register_player("Randy Schwartz") standings = player_standings() if len(standings) < 2: raise ValueError("Players should appear in playerStandings \ even before they have played any matches.") elif len(standings) > 2: raise ValueError("Only registered players should appear in \ standings.") if len(standings[0]) != 4: raise ValueError("Each playerStandings row should have four \ columns.") [(id1, name1, wins1, matches1), (id2, name2, wins2, matches2)] \ = standings if matches1 != 0 or matches2 != 0 or wins1 != 0 or wins2 != 0: raise ValueError( "Newly registered players should have no matches or wins.") if set([name1, name2]) != set(["Melpomene Murray", "Randy Schwartz"]): raise ValueError("Registered players' names should appear in \ standings, even if they have no matches played.") print "6. Newly registered players appear in the standings with no \ matches." def test_report_matches(): delete_matches() delete_players() register_player("Bruno Walton") register_player("Boots O'Neal") register_player("Cathy Burton") register_player("Diane Grant") standings = player_standings() [id1, id2, id3, id4] = [row[0] for row in standings] report_match(id1, id2) report_match(id3, id4) standings = player_standings() for (i, n, w, m) in standings: if m != 1: raise ValueError("Each player should have one match \ recorded.") if i in (id1, id3) and w != 1: raise ValueError("Each match winner should have one win \ recorded.") elif i in (id2, id4) and w != 0: raise ValueError("Each match loser should have zero wins \ recorded.") print "7. After a match, players have updated standings." def test_pairings(): delete_matches() delete_players() register_player("Twilight Sparkle") register_player("Fluttershy") register_player("Applejack") register_player("Pinkie Pie") standings = player_standings() [id1, id2, id3, id4] = [row[0] for row in standings] report_match(id1, id2) report_match(id3, id4) pairings = swiss_pairings() if len(pairings) != 2: raise ValueError( "For four players, swissPairings should return two pairs.") [(pid1, pname1, pid2, pname2), (pid3, pname3, pid4, pname4)] = \ pairings correct_pairs = set([frozenset([id1, id3]), frozenset([id2, id4])]) actual_pairs = set([frozenset([pid1, pid2]), frozenset([pid3, pid4])]) if correct_pairs != actual_pairs: raise ValueError( "After one match, players with one win should be paired.") print "8. After one match, players with one win are paired." if __name__ == '__main__': test_delete_matches() test_delete() test_count() test_register() test_register_count_delete() test_standings_before_matches() test_report_matches() test_pairings() print "Success! All tests pass!"
from django.core.exceptions import PermissionDenied from django.template.response import TemplateResponse from django.test import SimpleTestCase, modify_settings, override_settings from django.urls import path class MiddlewareAccessingContent: def __init__(self, get_response): self.get_response = get_response def __call__(self, request): response = self.get_response(request) # Response.content should be available in the middleware even with a # TemplateResponse-based exception response. assert response.content return response def template_response_error_handler(request, exception=None): return TemplateResponse(request, 'test_handler.html', status=403) def permission_denied_view(request): raise PermissionDenied urlpatterns = [ path('', permission_denied_view), ] handler403 = template_response_error_handler @override_settings(ROOT_URLCONF='handlers.tests_custom_error_handlers') @modify_settings(MIDDLEWARE={'append': 'handlers.tests_custom_error_handlers.MiddlewareAccessingContent'}) class CustomErrorHandlerTests(SimpleTestCase): def test_handler_renders_template_response(self): """ BaseHandler should render TemplateResponse if necessary. """ response = self.client.get('/') self.assertContains(response, 'Error handler content', status_code=403)
"""Run this test with MPI as: mpirun -np 4 python test_mpi.py """ import numpy as np import healpy as hp import pysm from pysm.nominal import models import sys try: from mpi4py import MPI except ImportError: print("Skipping MPI test as mpi4py is missing") sys.exit(0) def is_power2(num): return num != 0 and ((num & (num - 1)) == 0) def build_sky_config(pysm_model, nside, pixel_indices=None, mpi_comm=None): """Build a PySM sky configuration dict from a model string""" sky_components = [ 'synchrotron', 'dust', 'freefree', 'cmb', 'ame', ] sky_config = dict() for component_model in pysm_model.split(','): full_component_name = [ each for each in sky_components if each.startswith(component_model[0])][0] sky_config[full_component_name] = \ models(component_model, nside=nside, pixel_indices=pixel_indices, mpi_comm=mpi_comm) return sky_config def test_mpi_read(): comm = MPI.COMM_WORLD assert is_power2(comm.size), "Run with a number of MPI processes which is power of 2" nside = 64 npix = hp.nside2npix(nside) num_local_pixels = npix // comm.size if comm.size == 1: pixel_indices = None comm = None else: pixel_indices = np.arange(comm.rank * num_local_pixels, (comm.rank+1) * num_local_pixels, dtype=np.int) pysm_model = "s3,d7,f1,c1,a2" sky_config = build_sky_config(pysm_model, nside, pixel_indices, comm) sky = pysm.Sky(sky_config, mpi_comm=comm) instrument_bpass = { 'use_smoothing': False, 'nside': nside, 'add_noise': False, 'use_bandpass': True, 'channels': [(np.linspace(20, 25, 10), np.ones(10))], 'channel_names': ['channel_1'], 'output_units': 'uK_RJ', 'output_directory': './', 'output_prefix': 'test', 'noise_seed': 1234, 'pixel_indices': pixel_indices } instrument = pysm.Instrument(instrument_bpass) local_map = instrument.observe(sky, write_outputs=False) # Run PySM again locally on each process on the full map sky_config = build_sky_config(pysm_model, nside) sky = pysm.Sky(sky_config, mpi_comm=comm) instrument_bpass["pixel_indices"] = None instrument = pysm.Instrument(instrument_bpass) complete_map = instrument.observe(sky, write_outputs=False) if pixel_indices is None: pixel_indices = np.arange(npix) np.testing.assert_array_almost_equal( local_map[0], complete_map[0][:, :, pixel_indices]) if __name__ == "__main__": test_mpi_read()
from gluon.fileutils import read_file, write_file if DEMO_MODE or MULTI_USER_MODE: session.flash = T('disabled in demo mode') redirect(URL('default', 'site')) if not have_mercurial: session.flash = T("Sorry, could not find mercurial installed") redirect(URL('default', 'design', args=request.args(0))) _hgignore_content = """\ syntax: glob *~ *.pyc *.pyo *.bak *.bak2 cache/* private/* uploads/* databases/* sessions/* errors/* """ def hg_repo(path): import os uio = ui.ui() uio.quiet = True if not os.environ.get('HGUSER') and not uio.config("ui", "username"): os.environ['HGUSER'] = 'web2py@localhost' try: repo = hg.repository(ui=uio, path=path) except: repo = hg.repository(ui=uio, path=path, create=True) hgignore = os.path.join(path, '.hgignore') if not os.path.exists(hgignore): write_file(hgignore, _hgignore_content) return repo def commit(): app = request.args(0) path = apath(app, r=request) repo = hg_repo(path) form = FORM('Comment:', INPUT(_name='comment', requires=IS_NOT_EMPTY()), INPUT(_type='submit', _value=T('Commit'))) if form.accepts(request.vars, session): oldid = repo[repo.lookup('.')] addremove(repo) repo.commit(text=form.vars.comment) if repo[repo.lookup('.')] == oldid: response.flash = 'no changes' try: files = TABLE(*[TR(file) for file in repo[repo.lookup('.')].files()]) changes = TABLE(TR(TH('revision'), TH('description'))) for change in repo.changelog: ctx = repo.changectx(change) revision, description = ctx.rev(), ctx.description() changes.append(TR(A(revision, _href=URL('revision', args=(app, revision))), description)) except: files = [] changes = [] return dict(form=form, files=files, changes=changes, repo=repo) def revision(): app = request.args(0) path = apath(app, r=request) repo = hg_repo(path) revision = request.args(1) ctx = repo.changectx(revision) form = FORM(INPUT(_type='submit', _value=T('Revert'))) if form.accepts(request.vars): hg.update(repo, revision) session.flash = "reverted to revision %s" % ctx.rev() redirect(URL('default', 'design', args=app)) return dict( files=ctx.files(), rev=str(ctx.rev()), desc=ctx.description(), form=form )
"""Add mods and media Revision ID: 2ef141f5132 Revises: 1fb18596264 Create Date: 2014-06-06 00:00:28.128296 """ # revision identifiers, used by Alembic. revision = '2ef141f5132' down_revision = '1fb18596264' from alembic import op import sqlalchemy as sa def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.add_column('media', sa.Column('mod_id', sa.Integer(), nullable=True)) op.add_column('mod', sa.Column('approved', sa.Boolean(), nullable=True)) op.add_column('mod', sa.Column('created', sa.DateTime(), nullable=True)) op.add_column('mod', sa.Column('description', sa.Unicode(length=100000), nullable=True)) op.add_column('mod', sa.Column('donation_link', sa.String(length=128), nullable=True)) op.add_column('mod', sa.Column('external_link', sa.String(length=128), nullable=True)) op.add_column('mod', sa.Column('installation', sa.Unicode(length=100000), nullable=True)) op.add_column('mod', sa.Column('keywords', sa.String(length=256), nullable=True)) op.add_column('mod', sa.Column('license', sa.String(length=128), nullable=True)) op.add_column('mod', sa.Column('published', sa.Boolean(), nullable=True)) op.add_column('mod', sa.Column('user_id', sa.Integer(), nullable=True)) op.add_column('mod', sa.Column('votes', sa.Integer(), nullable=True)) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_column('mod', 'votes') op.drop_column('mod', 'user_id') op.drop_column('mod', 'published') op.drop_column('mod', 'license') op.drop_column('mod', 'keywords') op.drop_column('mod', 'installation') op.drop_column('mod', 'external_link') op.drop_column('mod', 'donation_link') op.drop_column('mod', 'description') op.drop_column('mod', 'created') op.drop_column('mod', 'approved') op.drop_column('media', 'mod_id') ### end Alembic commands ###
(function(){ /** * Object Bubble() * Defines a plugin that takes the responsibility to manage bubble * @param {Object} config */ JenScript.Bubble = function(config) { this.init(config); }; JenScript.Model.inheritPrototype(JenScript.Bubble, JenScript.Plugin); JenScript.Model.addMethods(JenScript.Bubble, { /** * Initialize Bubble * Defines bubble * @param {Object} config */ init : function(config){ config = config || {}; config.priority = 100; this.Id = 'bubble'+JenScript.sequenceId++; this.center = (config.center !== undefined)?config.center : new JenScript.Point2D(0,0); this.radius = (config.radius !== undefined)?config.radius : 50; this.fillColor = (config.fillColor !== undefined)?config.fillColor : 'black'; this.fillOpacity = (config.fillOpacity !== undefined)?config.fillOpacity : 1; this.strokeColor = config.strokeColor; this.strokeWidth = (config.strokeWidth !== undefined)?config.strokeWidth : 1; this.strokeOpacity = (config.strokeOpacity !== undefined)?config.strokeOpacity : 1; }, /** * get bubble center in user coordiante * @returns bubble center */ getCenter : function(){ return this.center; }, /** * set bubble center in user coordinate * @param {Object} bubble center */ setCenter : function(center){ this.center = center; }, /** * get bubble radius in pixel * @returns bubble radius */ getRadius : function(){ return this.radius; }, /** * set bubble radius in pixel * @param {Number} bubble radius */ setRadius : function(radius){ this.radius = radius; }, /** * equals bubble if this bubble id match with the given bubble o * @param {Object} o */ equals : function(o){ if(!(o instanceof JenScript.Bubble)) return false; if(o.Id === this.Id) return true; } }); })();
'use strict'; // eslint-disable-next-line node/no-unpublished-require const EmberApp = require('ember-cli/lib/broccoli/ember-app'); const Plugin = require('./index'); const merge = require('lodash.merge'); const debug = require('debug')('ember-css-modules:plugin-registry'); const normalizePostcssPlugins = require('../utils/normalize-postcss-plugins'); module.exports = class PluginRegistry { constructor(parent) { this.parent = parent; this._plugins = null; } computeOptions(includerOptions) { let env = EmberApp.env(); let baseOptions = merge({}, includerOptions); baseOptions.plugins = normalizePostcssPlugins(baseOptions.plugins); let pluginOptions = this._computePluginOptions(env, baseOptions); return merge(pluginOptions, baseOptions); } notify(event) { for (let plugin of this.getPlugins()) { if (typeof plugin[event] === 'function') { plugin[event](); } } } getPlugins() { if (this._plugins === null) { this._plugins = this._instantiatePlugins(); } return this._plugins; } _instantiatePlugins() { let plugins = this._discoverPlugins( this.parent.addons, 'ember-css-modules-plugin' ); // For addons under development, crawl the host app's available plugins for linting tools so they can be devDependencies if ( typeof this.parent.isDevelopingAddon === 'function' && this.parent.isDevelopingAddon() ) { let parentAddonNames = new Set( this.parent.addons.map((addon) => addon.name) ); let hostAddons = this.parent.project.addons.filter( (addon) => !parentAddonNames.has(addon.name) ); plugins = plugins.concat( this._discoverPlugins(hostAddons, 'ember-css-modules-lint-plugin') ); } return plugins; } _discoverPlugins(addons, keyword) { return addons .filter((addon) => this._isPlugin(addon, keyword)) .map((addon) => this._instantiatePluginFor(addon)); } _isPlugin(addon, keyword) { return ( addon.pkg && addon.pkg.keywords && addon.pkg.keywords.indexOf(keyword) >= 0 ); } _instantiatePluginFor(addon) { debug('instantiating plugin %s', addon.name); const plugin = addon.createCssModulesPlugin(this.parent); if (!(plugin instanceof Plugin)) { this.parent.ui.writeWarnLine( `Addon ${addon.name} did not return a Plugin instance from its createCssModulesPlugin hook` ); } return plugin; } _computePluginOptions(env, baseOptions) { let options = merge({}, baseOptions); for (let plugin of this.getPlugins()) { if (plugin.config) { merge(options, plugin.config(env, baseOptions)); } } return options; } };
// 2000-12-19 bkoz // Copyright (C) 2000-2014 Free Software Foundation, Inc. // // This file is part of the GNU ISO C++ Library. This library is free // software; you can redistribute it and/or modify it under the // terms of the GNU General Public License as published by the // Free Software Foundation; either version 3, or (at your option) // any later version. // This library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // You should have received a copy of the GNU General Public License along // with this library; see the file COPYING3. If not see // <http://www.gnu.org/licenses/>. // 27.4.2.5 ios_base storage functions #include <sstream> #include <iostream> #include <testsuite_hooks.h> class derived : public std::ios_base { public: derived() {} }; void test03() { derived d; d.pword(0) = &d; d.iword(0) = 1; } int main(void) { __gnu_test::set_memory_limits(); test03(); return 0; }
""" The acscteforwardmodel module contains a function `acscteforwardmodel` that calls the ACSCTE forward model executable. Use this function to facilitate batch runs of the forward model. Only WFC full-frame and some 2K subarrays are currently supported. See `ACS Data Handbook <http://www.stsci.edu/hst/acs/documents/handbooks/currentDHB/>`_ for more details. For guidance on running the CTE forward model, see the Jupyter notebook `ACS CTE Forward Model Example <https://github.com/spacetelescope/acs-notebook/blob/master/acs_cte_forward_model/acs_cte_forward_model_example.ipynb>`_. .. note:: Calibration flags are controlled by primary header. Examples -------- >>> from acstools import acscteforwardmodel >>> acscteforwardmodel.acscteforwardmodel('*blc_tmp.fits') For help usage use ``exe_args=['--help']`` """ # STDLIB import os import subprocess # nosec __taskname__ = "acscteforwardmodel" __version__ = "1.0" __vdate__ = "19-Jul-2018" __all__ = ['acscteforwardmodel'] def acscteforwardmodel(input, exec_path='', time_stamps=False, verbose=False, quiet=False, single_core=False, exe_args=None): r""" Run the acscteforwardmodel.e executable as from the shell. Expect input to be ``*_blc_tmp.fits`` or ``*_flc.fits``. Output is automatically named ``*_ctefmod.fits``. Parameters ---------- input : str or list of str Input filenames in one of these formats: * a single filename ('j1234567q_blc_tmp.fits') * a Python list of filenames * a partial filename with wildcards ('\*blc_tmp.fits') * filename of an ASN table ('j12345670_asn.fits') * an at-file (``@input``) exec_path : str, optional The complete path to ACSCTE forward model executable. If not given, run ACSCTE given by 'acscteforwardmodel.e'. time_stamps : bool, optional Set to True to turn on the printing of time stamps. verbose : bool, optional Set to True for verbose output. quiet : bool, optional Set to True for quiet output. single_core : bool, optional CTE correction in the ACSCTE forward model will by default try to use all available CPUs on your computer. Set this to True to force the use of just one CPU. exe_args : list, optional Arbitrary arguments passed to underlying executable call. Note: Implementation uses subprocess.call and whitespace is not permitted. E.g. use exe_args=['--nThreads', '1'] """ from stsci.tools import parseinput # Optional package dependency if exec_path: if not os.path.exists(exec_path): raise OSError('Executable not found: ' + exec_path) call_list = [exec_path] else: call_list = ['acscteforwardmodel.e'] # Parse input to get list of filenames to process. # acscte.e only takes 'file1,file2,...' infiles, dummy_out = parseinput.parseinput(input) call_list.append(','.join(infiles)) if time_stamps: call_list.append('-t') if verbose: call_list.append('-v') if quiet: call_list.append('-q') if single_core: call_list.append('-1') if exe_args: call_list.extend(exe_args) subprocess.check_call(call_list) # nosec
// set timeouts for feedback messages $(document).ready(function () { $('.feedback .alert-success').delay(2000).fadeOut(500); $('.feedback .alert-info').delay(2500).fadeOut(500); $('.feedback .alert-warning').delay(3000).fadeOut(500); $('.feedback .alert-danger').delay(4500).fadeOut(500); $('.timeshift-blink').each(function() { var elem = $(this); setInterval(function() { if (elem.css('visibility') == 'hidden') { elem.css('visibility', 'visible'); } else { elem.css('visibility', 'hidden'); } }, 500); }); });
<?xml version="1.0" encoding="UTF-8"?> <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"> <html lang="ja" xml:lang="ja"> <!-- Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. --> <head> <meta content="text/html; charset=utf-8" http-equiv="Content-Type" /> <meta name="copyright" content="(C) Copyright 2005" /> <meta name="DC.rights.owner" content="(C) Copyright 2005" /> <meta content="public" name="security" /> <meta content="index,follow" name="Robots" /> <meta http-equiv="PICS-Label" content='(PICS-1.1 "http://www.icra.org/ratingsv02.html" l gen true r (cz 1 lz 1 nz 1 oz 1 vz 1) "http://www.rsac.org/ratingsv01.html" l gen true r (n 0 s 0 v 0 l 0) "http://www.classify.org/safesurf/" l gen true r (SS~~000 1))' /> <meta content="reference" name="DC.Type" /> <meta name="DC.Title" content="getProcedureColumnsの引数" /> <meta scheme="URI" name="DC.Relation" content="rrefjdbc15905.html" /> <meta scheme="URI" name="DC.Relation" content="rrefdmrs.html" /> <meta scheme="URI" name="DC.Relation" content="rrefgpc1.html" /> <meta scheme="URI" name="DC.Relation" content="rrefcrsrgpc1.html" /> <meta scheme="URI" name="DC.Relation" content="rrefdmdfns1.html" /> <meta content="XHTML" name="DC.Format" /> <meta content="rrefpgc1" name="DC.Identifier" /> <meta content="ja" name="DC.Language" /> <link href="commonltr.css" type="text/css" rel="stylesheet" /> <title>getProcedureColumnsの引数</title> </head> <body id="rrefpgc1"><a name="rrefpgc1"><!-- --></a> <h1 class="topictitle1">getProcedureColumnsの引数</h1> <div> <div class="section"><p>JDBC APIで定められたこのメソッドの引数は、以下のとおりです。</p> </div> <div class="section"> <ul> <li><em>catalog</em> <p><span>Derby</span>では、この引数には<em>null</em>を渡してください。</p> </li> <li><em>schemaPattern</em> <p>Javaの手続きはスキーマを持ちます。</p> </li> <li><em>procedureNamePattern</em> <p>手続き名のパターンを表す文字列オブジェクトです。</p> </li> <li id="rrefpgc1__i1024753"><a name="rrefpgc1__i1024753"><!-- --></a><em id="rrefpgc1__jdbc95532"><a name="rrefpgc1__jdbc95532"><!-- --></a>column-Name-Pattern</em> <p>引数名もしくは返り値の名前パターンを表す文字列オブジェクトです。この値に一致する名前が、CREATE PROCEDURE文にてJavaの手続きに定義されています。全ての引数名を見つけるためには「%」を使ってください。</p> </li> </ul> </div> </div> <div> <div class="familylinks"> <div class="parentlink"><strong>親トピック:</strong> <a href="rrefjdbc15905.html" title="">java.sql.DatabaseMetaData インターフェイス</a></div> </div> <div class="relref"><strong>関連資料</strong><br /> <div><a href="rrefdmrs.html" title="">DatabaseMetaData 結果セット</a></div> <div><a href="rrefgpc1.html" title="">java.sql.DatabaseMetaData.getProcedureColumnsメソッド</a></div> <div><a href="rrefcrsrgpc1.html" title="">getProcedureColumnsから返る結果セットにある列について</a></div> <div><a href="rrefdmdfns1.html" title="java.sql.DatabaseMetaData.getBestRowIdentifierメソッドは決められた優先順位で識別子を探します。必ずしも行を特定できる情報が見つかるわけではありません。">java.sql.DatabaseMetaData.getBestRowIdentifierメソッド</a></div> </div> </div> </body> </html>
# Copyright (c) 2010-2012 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. """ Domain Remap Middleware Middleware that translates container and account parts of a domain to path parameters that the proxy server understands. container.account.storageurl/object gets translated to container.account.storageurl/path_root/account/container/object account.storageurl/path_root/container/object gets translated to account.storageurl/path_root/account/container/object Browsers can convert a host header to lowercase, so check that reseller prefix on the account is the correct case. This is done by comparing the items in the reseller_prefixes config option to the found prefix. If they match except for case, the item from reseller_prefixes will be used instead of the found reseller prefix. The reseller_prefixes list is exclusive. If defined, any request with an account prefix not in that list will be ignored by this middleware. reseller_prefixes defaults to 'AUTH'. Note that this middleware requires that container names and account names (except as described above) must be DNS-compatible. This means that the account name created in the system and the containers created by users cannot exceed 63 characters or have UTF-8 characters. These are restrictions over and above what swift requires and are not explicitly checked. Simply put, the this middleware will do a best-effort attempt to derive account and container names from elements in the domain name and put those derived values into the URL path (leaving the Host header unchanged). Also note that using container sync with remapped domain names is not advised. With container sync, you should use the true storage end points as sync destinations. """ from swift.common.swob import Request, HTTPBadRequest class DomainRemapMiddleware(object): """ Domain Remap Middleware See above for a full description. :param app: The next WSGI filter or app in the paste.deploy chain. :param conf: The configuration dict for the middleware. """ def __init__(self, app, conf): self.app = app self.storage_domain = conf.get('storage_domain', 'example.com') if self.storage_domain and self.storage_domain[0] != '.': self.storage_domain = '.' + self.storage_domain self.path_root = conf.get('path_root', 'v1').strip('/') prefixes = conf.get('reseller_prefixes', 'AUTH') self.reseller_prefixes = [x.strip() for x in prefixes.split(',') if x.strip()] self.reseller_prefixes_lower = [x.lower() for x in self.reseller_prefixes] def __call__(self, env, start_response): if not self.storage_domain: return self.app(env, start_response) if 'HTTP_HOST' in env: given_domain = env['HTTP_HOST'] else: given_domain = env['SERVER_NAME'] port = '' if ':' in given_domain: given_domain, port = given_domain.rsplit(':', 1) if given_domain.endswith(self.storage_domain): parts_to_parse = given_domain[:-len(self.storage_domain)] parts_to_parse = parts_to_parse.strip('.').split('.') len_parts_to_parse = len(parts_to_parse) if len_parts_to_parse == 2: container, account = parts_to_parse elif len_parts_to_parse == 1: container, account = None, parts_to_parse[0] else: resp = HTTPBadRequest(request=Request(env), body='Bad domain in host header', content_type='text/plain') return resp(env, start_response) if len(self.reseller_prefixes) > 0: if '_' not in account and '-' in account: account = account.replace('-', '_', 1) account_reseller_prefix = account.split('_', 1)[0].lower() if account_reseller_prefix not in self.reseller_prefixes_lower: # account prefix is not in config list. bail. return self.app(env, start_response) prefix_index = self.reseller_prefixes_lower.index( account_reseller_prefix) real_prefix = self.reseller_prefixes[prefix_index] if not account.startswith(real_prefix): account_suffix = account[len(real_prefix):] account = real_prefix + account_suffix path = env['PATH_INFO'].strip('/') new_path_parts = ['', self.path_root, account] if container: new_path_parts.append(container) if path.startswith(self.path_root): path = path[len(self.path_root):].lstrip('/') if path: new_path_parts.append(path) new_path = '/'.join(new_path_parts) env['PATH_INFO'] = new_path return self.app(env, start_response) def filter_factory(global_conf, **local_conf): conf = global_conf.copy() conf.update(local_conf) def domain_filter(app): return DomainRemapMiddleware(app, conf) return domain_filter
#!/usr/bin/env python3 import argparse from itertools import chain from pipeline import Pipeline from pipeline.cmdline import ( get_shared_optional_arguments, get_shared_positional_arguments, logger, ) from pipeline.io import BBBinaryRepoSink, video_generator from pipeline.objects import Image, PipelineResult, Timestamp from pipeline.pipeline import GeneratorProcessor, get_auto_config from bb_binary import Repository, parse_video_fname def process_video(args): config = get_auto_config() logger.info(f"Initializing {args.num_threads} pipeline(s)") plines = [ Pipeline([Image, Timestamp], [PipelineResult], **config) for _ in range(args.num_threads) ] logger.info(f"Loading bb_binary repository {args.repo_output_path}") repo = Repository(args.repo_output_path) camId, _, _ = parse_video_fname(args.video_path) logger.info(f"Parsed camId = {camId}") gen_processor = GeneratorProcessor( plines, lambda: BBBinaryRepoSink(repo, camId=camId), use_tqdm=args.progressbar ) logger.info(f"Processing video frames from {args.video_path}") gen_processor( video_generator(args.video_path, args.timestamp_format, args.text_root_path) ) def main(): # pragma: no cover parser = argparse.ArgumentParser( prog="BeesBook pipeline", description="Process a video using the beesbook pipeline", ) parser.add_argument("video_path", help="path of input video", type=str) for arg, kwargs in chain( get_shared_positional_arguments(), get_shared_optional_arguments() ): parser.add_argument(arg, **kwargs) args = parser.parse_args() logger.info(f"Processing video: {args.video_path}") logger.info(f"Config: {args}") process_video(args) if __name__ == "__main__": # pragma: no cover main()
#!/usr/bin/env python3 import os import gzip import sys # Run this scrpipt under 33208_Vertebrate folder, mirroring the following site. # http://eggnog5.embl.de/download/eggnog_5.0/per_tax_level/33208/ dirname_curr = os.path.dirname(os.path.realpath(__file__)) dirname_output = 'MODtree_ENOG50.raw_alg' filename_out_base = 'MODtree_ENOG50' # Run under 33208_Metazoa with following file. filename_members = '33208_members.tsv.gz' # Directory generated from 33208_raw_algs.tar dirname_align = '33208' # Make it by grep 'BLAST_UniProt_GN' e5.sequence_aliases.tsv filename_GN = os.path.join(dirname_curr, 'MODtree_ENOG50.gene_names.tsv.gz') filename_species = os.path.join(dirname_curr, 'MODtree_species.txt') # UniProt_ID UP_taxId EN_taxId sp_code sp_name GOA_name # UP000005640 9606 9606 HUMAN homo_sapiens 25.H_sapiens.goa species_list = dict() f_species = open(filename_species, 'r') for line in f_species: if line.startswith('#'): continue tokens = line.strip().split("\t") tax_id = tokens[2] sp_code = tokens[3] species_list[tax_id] = sp_code f_species.close() sys.stderr.write('Read gene names...') gene_names = dict() # 9541.XP_005587739.1 RFX2 BLAST_KEGG_NAME BLAST_UniProt_GN RefSeq_gene f_GN = gzip.open(filename_GN, 'rt') for line in f_GN: tokens = line.strip().split("\t") seq_id = tokens[0] tmp_name = tokens[1] tax_id = seq_id.split('.')[0] if tax_id in species_list: gene_names[seq_id] = tmp_name f_GN.close() sys.stderr.write('Done.\n') f_out_members = open('%s.members.tsv' % filename_out_base, 'w') f_out_members.write('# %s\t%s\t%s\t%s\t%s\t%s\n' % ('EN_Id', 'TotalSpecies', 'TotalSeqs', 'MODtreeSpecies', 'MODtreeSeqs', 'MODtreeSpeciesList')) family2seq = dict() exclude_family = dict() f_members = open(filename_members, 'r') if filename_members.endswith('.gz'): f_members = gzip.open(filename_members, 'rt') for line in f_members: tokens = line.strip().split("\t") family_id = tokens[1] total_seqs = int(tokens[2]) total_species = int(tokens[3]) seq_list = [] sp_code_list = [] for tmp_id in tokens[4].split(','): tmp_tax_id = tmp_id.split('.')[0] if tmp_tax_id in species_list: sp_code_list.append(species_list[tmp_tax_id]) seq_list.append(tmp_id) count_seqs = len(seq_list) if count_seqs == 0: continue sp_code_list = sorted(list(set(sp_code_list))) count_species = len(sp_code_list) species_str = ','.join(sp_code_list) f_out_members.write('%s\t%d\t%d\t%d\t%d\t%s\n' % (family_id, total_species, total_seqs, count_species, count_seqs, species_str)) if count_seqs > 1: family2seq[family_id] = seq_list if count_seqs > 150: exclude_family[family_id] = 1 f_members.close() sys.stderr.write('Processed members.tsv.\n') f_out_combined = open('%s.combined.faa' % filename_out_base, 'w') for tmp_family_id in family2seq.keys(): tmp_filename_fa = os.path.join(dirname_align, '%s.raw_alg.faa.gz' % tmp_family_id) tmp_seq_list = dict() f_fa = gzip.open(tmp_filename_fa, 'rt') for line in f_fa: if line.startswith('>'): tmp_h = line.strip().lstrip('>') tmp_seq_list[tmp_h] = [] else: tmp_seq_list[tmp_h].append(line.strip()) f_fa.close() tmp_filename_out = os.path.join(dirname_output, '%s.raw_alg.faa' % tmp_family_id) if tmp_family_id in exclude_family: tmp_filename_out = os.path.join('excluded_%s.raw_alg.faa' % tmp_family_id) f_fa_out = open(tmp_filename_out, 'w') for tmp_seq_id in family2seq[tmp_family_id]: tmp_tax_id = tmp_seq_id.split('.')[0] if tmp_seq_id not in tmp_seq_list: sys.stderr.write('%s has no sequences. (%s)\n' % (tmp_seq_id, tmp_filename_fa)) continue tmp_sp_code = species_list[tmp_tax_id] tmp_name = 'NotAvail' if tmp_seq_id in gene_names: tmp_name = gene_names[tmp_seq_id] # Refine the gene name # because of DANRE name like si:ch211-151m7.6 tmp_name = tmp_name.replace(':', '_') tmp_name = tmp_name.replace(' ', '_') # because of CIOIN name like zf(cchc)-22 tmp_name = tmp_name.replace('(', '_').replace(')', '_') tmp_name = tmp_name.replace('/', '_') tmp_id = tmp_seq_id.split('.')[1] tmp_new_h = '%s|%s|%s|%s' % (tmp_name, tmp_sp_code, tmp_family_id, tmp_id) tmp_seq = ''.join(tmp_seq_list[tmp_seq_id]) tmp_new_seq = tmp_seq.replace('-', '') if tmp_family_id in exclude_family: tmp_new_h = '%s|excluded' % tmp_new_h f_fa_out.write('>%s\n%s\n' % (tmp_new_h, tmp_seq)) f_out_combined.write('>%s\n%s\n' % (tmp_new_h, tmp_new_seq)) f_fa_out.close() f_out_combined.write
/* * WengoPhone, a voice over Internet phone * Copyright (C) 2004-2007 Wengo * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ #include <webcam_test.h> #include <util/Logger.h> #include <util/SafeDelete.h> #include <iostream> #include <fstream> using namespace std; const unsigned DEST_W = 176; const unsigned DEST_H = 144; void WebcamTest::frameCapturedEventHandler(IWebcamDriver * sender, piximage * image) { frameCaptured(image); } void WebcamTest::frameCapturedSlot(piximage * image) { Mutex::ScopedLock lock(_mutex); pix_free(_rgbImage); _rgbImage = pix_alloc(PIX_OSI_RGB32, width(), height()); pix_convert(PIX_NO_FLAG, _rgbImage, image); OWSAFE_DELETE(_image); _image = new QImage(QSize(width(), height()), QImage::Format_ARGB32); memcpy(_image->bits(), _rgbImage->data, _rgbImage->width * _rgbImage->height * 4); update(); } void WebcamTest::paintEvent(QPaintEvent *) { Mutex::ScopedLock lock(_mutex); QPainter painter(this); if (_image) { painter.drawImage(0, 0, *_image, 0, 0, _image->width(), _image->height()); } } WebcamTest::WebcamTest() : _image(NULL) { _driver = WebcamDriver::getInstance(); connect(this, SIGNAL(frameCaptured(piximage *)), SLOT(frameCapturedSlot(piximage*))); _rgbImage = pix_alloc(PIX_OSI_RGB32, DEST_W, DEST_H); LOG_INFO("Device list:"); StringList deviceList = _driver->getDeviceList(); for (register unsigned i = 0 ; i < deviceList.size() ; i++) { LOG_INFO("- " + deviceList[i]); } string device = _driver->getDefaultDevice(); string title = "Webcam test: using " + device; LOG_INFO(title); _driver->frameCapturedEvent += boost::bind(&WebcamTest::frameCapturedEventHandler, this, _1, _2); _driver->setDevice(device); _driver->setPalette(PIX_OSI_YUV420P); _driver->setResolution(DEST_W, DEST_H); setWindowTitle(title.c_str()); cout << "Using width: " << _driver->getWidth() << ", height: " << _driver->getHeight() << " FPS: " << _driver->getFPS() << endl; show(); if (_driver->isOpen()) { cout << "**Starting capture..." << endl; _driver->startCapture(); } else { QMessageBox::critical(0, "Webcam test", "No webcam found"); } } WebcamTest::~WebcamTest() { pix_free(_rgbImage); OWSAFE_DELETE(_image); OWSAFE_DELETE(_driver); } int main(int argc, char **argv) { QApplication app(argc, argv); new WebcamTest(); return app.exec(); }
# -*- coding: utf-8 -*- ############################################################################### # # Insert # Inserts a permission for a file. # # Python versions 2.6, 2.7, 3.x # # Copyright 2014, Temboo Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, # either express or implied. See the License for the specific # language governing permissions and limitations under the License. # # ############################################################################### from temboo.core.choreography import Choreography from temboo.core.choreography import InputSet from temboo.core.choreography import ResultSet from temboo.core.choreography import ChoreographyExecution import json class Insert(Choreography): def __init__(self, temboo_session): """ Create a new instance of the Insert Choreo. A TembooSession object, containing a valid set of Temboo credentials, must be supplied. """ super(Insert, self).__init__(temboo_session, '/Library/Google/Drive/Permissions/Insert') def new_input_set(self): return InsertInputSet() def _make_result_set(self, result, path): return InsertResultSet(result, path) def _make_execution(self, session, exec_id, path): return InsertChoreographyExecution(session, exec_id, path) class InsertInputSet(InputSet): """ An InputSet with methods appropriate for specifying the inputs to the Insert Choreo. The InputSet object is used to specify input parameters when executing this Choreo. """ def set_RequestBody(self, value): """ Set the value of the RequestBody input for this Choreo. ((required, json) A JSON representation of fields in a permissions resource. The JSON string must contain keys for role, type, and value. See documentation for formatting examples.) """ super(InsertInputSet, self)._set_input('RequestBody', value) def set_AccessToken(self, value): """ Set the value of the AccessToken input for this Choreo. ((optional, string) A valid access token retrieved during the OAuth2 process. This is required unless you provide the ClientID, ClientSecret, and RefreshToken to generate a new access token.) """ super(InsertInputSet, self)._set_input('AccessToken', value) def set_ClientID(self, value): """ Set the value of the ClientID input for this Choreo. ((conditional, string) The Client ID provided by Google. Required unless providing a valid AccessToken.) """ super(InsertInputSet, self)._set_input('ClientID', value) def set_ClientSecret(self, value): """ Set the value of the ClientSecret input for this Choreo. ((conditional, string) The Client Secret provided by Google. Required unless providing a valid AccessToken.) """ super(InsertInputSet, self)._set_input('ClientSecret', value) def set_Fields(self, value): """ Set the value of the Fields input for this Choreo. ((optional, string) Selector specifying a subset of fields to include in the response.) """ super(InsertInputSet, self)._set_input('Fields', value) def set_FileID(self, value): """ Set the value of the FileID input for this Choreo. ((required, string) The ID of the file.) """ super(InsertInputSet, self)._set_input('FileID', value) def set_RefreshToken(self, value): """ Set the value of the RefreshToken input for this Choreo. ((conditional, string) An OAuth refresh token used to generate a new access token when the original token is expired. Required unless providing a valid AccessToken.) """ super(InsertInputSet, self)._set_input('RefreshToken', value) def set_SendNotificationEmails(self, value): """ Set the value of the SendNotificationEmails input for this Choreo. ((optional, boolean) Whether to send notification emails. (Default: true).) """ super(InsertInputSet, self)._set_input('SendNotificationEmails', value) class InsertResultSet(ResultSet): """ A ResultSet with methods tailored to the values returned by the Insert Choreo. The ResultSet object is used to retrieve the results of a Choreo execution. """ def getJSONFromString(self, str): return json.loads(str) def get_Response(self): """ Retrieve the value for the "Response" output from this Choreo execution. ((json) The response from Google.) """ return self._output.get('Response', None) def get_NewAccessToken(self): """ Retrieve the value for the "NewAccessToken" output from this Choreo execution. ((string) Contains a new AccessToken when the RefreshToken is provided.) """ return self._output.get('NewAccessToken', None) class InsertChoreographyExecution(ChoreographyExecution): def _make_result_set(self, response, path): return InsertResultSet(response, path)
# -*- coding: utf-8 -*- """ /*************************************************************************** IsogeoDockWidget A QGIS plugin Isogeo search engine within QGIS ------------------- begin : 2016-07-22 git sha : $Format:%H$ copyright : (C) 2016 by Isogeo, Theo Sinatti, GeoJulien email : projets+qgis@isogeo.fr ***************************************************************************/ /*************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * ***************************************************************************/ """ import os from qgis.PyQt import uic from qgis.PyQt.QtWidgets import QDockWidget from qgis.PyQt.QtCore import pyqtSignal FORM_CLASS, _ = uic.loadUiType( os.path.join(os.path.dirname(__file__), "isogeo_dockwidget_base.ui") ) class IsogeoDockWidget(QDockWidget, FORM_CLASS): closingPlugin = pyqtSignal() def __init__(self, parent=None): """Constructor.""" super(IsogeoDockWidget, self).__init__(parent) # Set up the user interface from Designer. # After setupUI you can access any designer object by doing # self.<objectname>, and you can use autoconnect slots - see # http://qt-project.org/doc/qt-4.8/designer-using-a-ui-file.html # #widgets-and-dialogs-with-auto-connect self.setupUi(self) def closeEvent(self, event): self.closingPlugin.emit() event.accept()
var ajaxWidgets, ajaxPopulateWidgets, quickPressLoad; jQuery(document).ready( function($) { /* Dashboard Welcome Panel */ var welcomePanel = $('#welcome-panel'), welcomePanelHide = $('#wp_welcome_panel-hide'), updateWelcomePanel = function( visible ) { $.post( ajaxurl, { action: 'update-welcome-panel', visible: visible, welcomepanelnonce: $('#welcomepanelnonce').val() }); }; if ( welcomePanel.hasClass('hidden') && welcomePanelHide.prop('checked') ) welcomePanel.removeClass('hidden'); $('.welcome-panel-close, .welcome-panel-dismiss a', welcomePanel).click( function(e) { e.preventDefault(); welcomePanel.addClass('hidden'); updateWelcomePanel( 0 ); $('#wp_welcome_panel-hide').prop('checked', false); }); welcomePanelHide.click( function() { welcomePanel.toggleClass('hidden', ! this.checked ); updateWelcomePanel( this.checked ? 1 : 0 ); }); // These widgets are sometimes populated via ajax ajaxWidgets = [ 'dashboard_incoming_links', 'dashboard_primary', 'dashboard_secondary', 'dashboard_plugins' ]; ajaxPopulateWidgets = function(el) { function show(i, id) { var p, e = $('#' + id + ' div.inside:visible').find('.widget-loading'); if ( e.length ) { p = e.parent(); setTimeout( function(){ p.load( ajaxurl + '?action=dashboard-widgets&widget=' + id, '', function() { p.hide().slideDown('normal', function(){ $(this).css('display', ''); }); }); }, i * 500 ); } } if ( el ) { el = el.toString(); if ( $.inArray(el, ajaxWidgets) != -1 ) show(0, el); } else { $.each( ajaxWidgets, show ); } }; ajaxPopulateWidgets(); postboxes.add_postbox_toggles(pagenow, { pbshow: ajaxPopulateWidgets } ); /* QuickPress */ quickPressLoad = function() { var act = $('#quickpost-action'), t; t = $('#quick-press').submit( function() { $('#dashboard_quick_press #publishing-action img.waiting').css('visibility', 'visible'); $('#quick-press .submit input[type="submit"], #quick-press .submit input[type="reset"]').prop('disabled', true); if ( 'post' == act.val() ) { act.val( 'post-quickpress-publish' ); } $('#dashboard_quick_press div.inside').load( t.attr( 'action' ), t.serializeArray(), function() { $('#dashboard_quick_press #publishing-action img.waiting').css('visibility', 'hidden'); $('#quick-press .submit input[type="submit"], #quick-press .submit input[type="reset"]').prop('disabled', false); $('#dashboard_quick_press ul').next('p').remove(); $('#dashboard_quick_press ul').find('li').each( function() { $('#dashboard_recent_drafts ul').prepend( this ); } ).end().remove(); quickPressLoad(); } ); return false; } ); $('#publish').click( function() { act.val( 'post-quickpress-publish' ); } ); }; quickPressLoad(); } );
/* * c64dtvmemrom.c -- C64 DTV ROM access. * * Written by * M.Kiesel <mayne@users.sourceforge.net> * Based on code by * Andreas Boose <viceteam@t-online.de> * * This file is part of VICE, the Versatile Commodore Emulator. * See README for copyright notice. * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA * 02111-1307 USA. * */ #include "vice.h" #include "c64mem.h" #include "c64memrom.h" #include "types.h" #include "c64dtvmem.h" #include "c64dtvflash.h" /* These are read directly from flash in the DTV emulation and not used. */ #ifdef USE_EMBEDDED #include "c64basic.h" #include "c64kernal.h" #else BYTE c64memrom_basic64_rom[C64_BASIC_ROM_SIZE]; BYTE c64memrom_kernal64_rom[C64_KERNAL_ROM_SIZE]; #endif BYTE c64memrom_kernal64_trap_rom[C64_KERNAL_ROM_SIZE]; BYTE c64memrom_kernal64_read(WORD addr) { int mapping = c64dtvmem_memmapper[0]; int paddr = ((mapping & 0x1f) << 16) + addr; if ((mapping >> 6)==0) return c64dtvflash_read(paddr); else return mem_ram[paddr]; } static void c64memrom_kernal64_store(WORD addr, BYTE value) { int mapping = c64dtvmem_memmapper[0]; int paddr = ((mapping & 0x1f) << 16) + addr; if ((mapping >> 6)==0) c64dtvflash_store_direct(paddr, value); else mem_ram[paddr] = value; } BYTE c64memrom_basic64_read(WORD addr) { int mapping = c64dtvmem_memmapper[1]; int paddr = ((mapping & 0x1f) << 16) + addr; if ((mapping >> 6)==0) return c64dtvflash_read(paddr); else return mem_ram[paddr]; } /* static void c64memrom_basic64_store(WORD addr, BYTE value) { } */ /* We don't use trap_rom in the DTV emulation. Traps are installed in */ /* flash/RAM directly and temporarily removed when accessing $d10x. */ BYTE c64memrom_trap_read(WORD addr) { switch (addr & 0xf000) { case 0xe000: case 0xf000: return c64memrom_kernal64_read(addr); break; } return 0; } void c64memrom_trap_store(WORD addr, BYTE value) { switch (addr & 0xf000) { case 0xe000: case 0xf000: c64memrom_kernal64_store(addr, value); break; } } BYTE c64memrom_rom64_read(WORD addr) { switch (addr & 0xf000) { case 0xa000: case 0xb000: case 0xd000: case 0xe000: case 0xf000: return c64dtvflash_read(addr); } return 0; } void c64memrom_rom64_store(WORD addr, BYTE value) { return; }
from euler_funcs import divisor_list from itertools import takewhile, chain from functools import reduce from operator import mul prod = lambda L: reduce(mul, L, 1) def pair_solutions(k): dvs = chain([1],takewhile(lambda e: e<=(k-1)//2, divisor_list(k-1))) l = set([(1+d, 1+(k-1)//d) for d in dvs]) l = [list(e) for e in list(l)] l.sort(key=lambda e: e[0]*e[1]) return l def factorizations(n): if n == 1: return divs = divisor_list(n) for d in divs: nd = n//d if d > nd: break yield [d, nd] for factorization in factorizations(nd): yield [d] + factorization def uniq_factorizations(n): L = factorizations(n) s = set([]) for fact in L: fact.sort() s.add(tuple(fact)) return s def k_value(n=None, l=[]): # what number of ones is needed to make n1 * prod(l) = n1 + sum(l) if n is None: if len(l) == 0: return -1 else: n = prod(l) sl = sum(l) n1 = n - sl else: if len(l) == 0: return -1 else: sl = sum(l) n1 = n-sl return n1 def list_k_values(n): uniq = uniq_factorizations(n) for fact in uniq: kv = k_value(n, fact) print("k={}: 1^{} * {} = 1*{} + {}".format(len(fact) + kv, kv, "*".join(map(str, fact)), kv, "+".join(map(str,fact)))) def doit(n): cache = {} for v in range(1,n): for k in get_ks(v): if v <= cache.get(k, v): cache[k] = v return cache def get_ks(k): uniq = uniq_factorizations(k) for fact in uniq: kv = k_value(k, fact) yield len(fact)+kv def convert_to_list(d): # d type: Dict[int,int] return list(sorted(((k,v) for k,v in d.items()), key=lambda e: e[0])) def verify(n, t): for i in range(4, n): for fact in uniq_factorizations(i): kv = k_value(fact) if kv + len(fact) == t: return (fact, kv, i) def main(): it = doit(13000) psn = list(zip(*convert_to_list(it)))[1] print(sum(set(psn[:11998]))) if __name__ == '__main__': main()
#!/usr/bin/env python3 # *-* coding:utf-8 *-* """ :mod:`lab_json` -- JSON to YAML and back again ========================================= LAB_JSON Learning Objective: Learn to navigate a JSON file and convert to a python object. :: a. Create a script that expects 3 command line arguments: -j or -y, json_filename, yaml_filename The first argument is -j or -y based on whether to convert from JSON to YAML (-j) or YAML to JSON (-y) The second argument is the name of the json file to parse or save to The third argument is the name of the yaml file to parse or save to b. Based on the -y/-j selection, parse the contents of the input file using the appropriate library. c. Using the other library, save the parsed object to the output filename d. Test your script using the json and yml files in the data directory. e. If you have time, create your own JSON and YAML files and translate between the formats. """ import json import yaml import sys if len(sys.argv) < 4: print('Usage: {} -j/-y <json_filename> <yaml_filename>'.format(sys.argv[0])) raise SystemExit(1) option = sys.argv[1] if option == '-j': json_to_yaml = True elif option == '-y': json_to_yaml = False else: print('Invalid option') print('Usage: {} -j/-y <json_filename> <yaml_filename>'.format(sys.argv[0])) raise SystemExit(1) json_fname = sys.argv[2] yaml_fname = sys.argv[3] if json_to_yaml: # translate json to yaml with open(json_fname, 'r') as infile: json_obj = json.load(infile) with open(yaml_fname, 'w') as outfile: yaml.dump(json_obj, outfile, default_flow_style=False) else: # translate yaml to json with open(yaml_fname, 'r') as infile, open(json_fname, 'w') as outfile: yaml_obj = yaml.safe_load(infile) json.dump(yaml_obj, outfile)
import errno import json import hashlib import psutil from pandaharvester.commit_timestamp import timestamp as commitTimestamp from pandaharvester.panda_pkg_info import release_version as releaseVersion class harvesterPackageInfo(object): """ """ _attributes = ('commit_info', 'version', 'info_digest') def __init__(self, local_info_file): self.local_info_file = local_info_file self.commit_info = commitTimestamp self.version = releaseVersion @staticmethod def _get_hash(data): h = hashlib.md5() h.update(str(data).encode('utf-8')) return h.hexdigest() @property def info_digest(self): return self._get_hash(self.commit_info) @property def _local_info_dict(self): info_dict = {} try: with open(self.local_info_file, 'r') as f: info_dict = json.load(f) except IOError as e: if e.errno == errno.ENOENT: pass else: raise return info_dict def renew_local_info(self): info_dict = {} for attr in self._attributes: info_dict[attr] = getattr(self, attr) with open(self.local_info_file, 'w') as f: json.dump(info_dict, f) @property def package_changed(self): return self.info_digest != self._local_info_dict.get('info_digest')
# Copyright (c) 2017, Arista Networks, Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # Neither the name of Arista Networks nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL ARISTA NETWORKS # BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR # BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, # WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE # OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN # IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # __metaclass__ = type import re def range_search(value, range_string): """ Returns true if value is found in the list of ranges specified. Args: value (int): an integer value to be searched for. range_list (str): a string representation of a list of ranges in the format "2-5,7,9-11,20-22,44". Returns: True if the value is included in the range string, or False otherwise if it is not. """ tpl = re.compile(r'^(\d+)\-(\d+)$') sgl = re.compile(r'^\d+$') range_list = range_string.split(',') ranges = [] for item in range_list: this_tpl = tpl.match(item) if this_tpl: tup = (this_tpl.group(1), this_tpl.group(2)) ranges.append(tup) else: this_sgl = sgl.match(item) if this_sgl: tup = (this_sgl.group(0), this_sgl.group(0)) ranges.append(tup) else: raise ValueError("filter_plugin/range.py: improperly formatted " "range string passed in - %s" % range_string) for (lower, upper) in ranges: if (lower <= value <= upper): return True return False class FilterModule(object): def filters(self): return { 'range_search': range_search, }
import os import simplejson from behave import given, when, then def caption_add(caption_file_and_name, caption_name, caption_language, entry_id, kid, app): resp = app.post('/service/add_caption/', data={'entry_id': entry_id, 'file': caption_file_and_name, 'kaltura_id': kid, 'name': caption_name, 'language': caption_language}) resp_json = simplejson.loads(resp.data) assert resp_json.get('success'), 'Failed while trying to add caption' assert resp_json.get('caption_id'), 'Response must contain caption_id' return resp_json['caption_id'] @given(u'a caption file at path {path}') def given_caption_file(context, path): context.caption_file = open(path, 'rb') context.caption_file_name = os.path.basename(path) @when(u'this caption file is applied to file') def upload_caption_to_file(context): context.caption_id = caption_add( (context.caption_file, context.caption_file_name), "testcaption", "French", context.entry_id, context.kaltura_id, context.app ) context.caption_file.close() @when(u'caption list is requested') def request_caption_list(context): r = context.app.get('/service/list_captions/?id=%s:%s' % (context.kaltura_id, context.entry_id)) context.captions_list = simplejson.loads(r.data) @then(u'the caption list contains uploaded caption') def caption_list_includes_new_caption(context): cap_format = os.path.splitext(context.caption_file_name)[-1].lstrip('.') cap_format = 'srt' if cap_format == 'srt' else 'dfxp' assert context.captions_list[0]['language'] == "French", \ "expected language 'French', got %s" % context.captions_list[0]['language'] assert context.captions_list[0]['name'] == "testcaption", \ "expected name 'testcaption', got %s" % context.captions_list[0]['name'] assert context.captions_list[0]['format'] == cap_format, \ "expected format '%s', got %s" % (cap_format, context.captions_list[0]['format']) @given(u'the file has captions') def given_file_has_captions(context): # it SHOULD, since it's the same file as last scenario. r = context.app.get('/service/list_captions/?id=%s:%s' % (context.kaltura_id, context.entry_id)) context.captions_list = simplejson.loads(r.data) assert context.captions_list context.caption = context.captions_list[0] @when(u'a caption is updated') def request_update_caption(context): r = context.app.get('/service/update_caption/?kaltura_id=%s&caption_id=%s%s' % (context.kaltura_id, context.caption['id'], '&name=foobar&language=Malay')) assert simplejson.loads(r.data)['success'], "Update failed" @then(u'the caption list contains updated details') def caption_list_contains_updated_caption(context): r = context.app.get('/service/list_captions/?id=%s:%s' % (context.kaltura_id, context.entry_id)) context.captions_list = simplejson.loads(r.data) assert context.captions_list context.caption = context.captions_list[0] assert context.captions_list[0]['language'] == "Malay", \ "expected language 'Malay', got %s" % context.captions_list[0]['language'] assert context.captions_list[0]['name'] == "foobar", \ "expected name 'foobar', got %s" % context.captions_list[0]['name'] @when(u'a caption is deleted') def request_caption_delete(context): r = context.app.get('/service/remove_caption/?kaltura_id=%s&caption_id=%s' % (context.kaltura_id, context.caption['id'])) assert simplejson.loads(r.data)['success'], "Delete failed" @then(u'the caption list no longer contains it') def step_impl(context): r = context.app.get('/service/list_captions/?id=%s:%s' % (context.kaltura_id, context.entry_id)) captions_list = simplejson.loads(r.data) caption_ids = [x['id'] for x in captions_list] assert context.caption['id'] not in caption_ids, \ "caption id still in caption list"
# -*- coding: utf-8 -*- # Define here the models for your spider middleware # # See documentation in: # http://doc.scrapy.org/en/latest/topics/spider-middleware.html from scrapy import signals class CnnSpiderMiddleware(object): # Not all methods need to be defined. If a method is not defined, # scrapy acts as if the spider middleware does not modify the # passed objects. @classmethod def from_crawler(cls, crawler): # This method is used by Scrapy to create your spiders. s = cls() crawler.signals.connect(s.spider_opened, signal=signals.spider_opened) return s def process_spider_input(response, spider): # Called for each response that goes through the spider # middleware and into the spider. # Should return None or raise an exception. return None def process_spider_output(response, result, spider): # Called with the results returned from the Spider, after # it has processed the response. # Must return an iterable of Request, dict or Item objects. for i in result: yield i def process_spider_exception(response, exception, spider): # Called when a spider or process_spider_input() method # (from other spider middleware) raises an exception. # Should return either None or an iterable of Response, dict # or Item objects. pass def process_start_requests(start_requests, spider): # Called with the start requests of the spider, and works # similarly to the process_spider_output() method, except # that it doesn’t have a response associated. # Must return only requests (not items). for r in start_requests: yield r def spider_opened(self, spider): spider.logger.info('Spider opened: %s' % spider.name)
#!/usr/bin/python3 # # Copyright (C) 2013 Red Hat, Inc. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License as published # by the Free Software Foundation; either version 2.1 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from mock import Mock import unittest import os class BaseTestCase(unittest.TestCase): def setUp(self): import sys sys.modules["anaconda_log"] = Mock() sys.modules["block"] = Mock() from pyanaconda import kickstart import pykickstart.version self.handler = pykickstart.version.makeVersion(kickstart.superclass.version) self._commandMap = kickstart.commandMap self._dataMap = kickstart.dataMap # Verify that each kickstart command in anaconda uses the correct version of # that command as provided by pykickstart. That is, if there's an FC3 and an # F10 version of a command, make sure anaconda >= F10 uses the F10 version. class CommandVersionTestCase(BaseTestCase): def commands_test(self): """Test that anaconda uses the right versions of kickstart commands""" for (commandName, commandObj) in self._commandMap.items(): pykickstartClass = self.handler.commands[commandName].__class__ self.assertIsInstance(commandObj(), pykickstartClass) # Do the same thing as CommandVersionTestCase, but for data objects. class DataVersionTestCase(BaseTestCase): def data_test(self): """Test that anaconda uses the right versions of kickstart data""" for (dataName, dataObj) in self._dataMap.items(): # pykickstart does not expose data objects as a mapping the way # it does command objects. pykickstartClass = eval("self.handler.%s" % dataName) self.assertIsInstance(dataObj(), pykickstartClass) # Copy the commands tests but with the command map from dracut/parse-kickstart class DracutCommandVersionTestCase(CommandVersionTestCase): def setUp(self): CommandVersionTestCase.setUp(self) # top_srcdir should have been set by nosetests.sh. If it wasn't, the KeyError # will fail the test. parse_kickstart_path = os.path.join(os.environ['top_srcdir'], 'dracut', 'parse-kickstart') import tempfile with tempfile.NamedTemporaryFile() as parse_temp: # Compile the file manually to a tempfile so that the import doesn't automatically # crud up the source directory with parse-kickstartc import py_compile parse_temp = tempfile.NamedTemporaryFile() py_compile.compile(parse_kickstart_path, parse_temp.name) with open(parse_temp.name, "rb") as parse_temp_content: # Use imp to pretend that hyphens are ok for module names import imp parse_module = imp.load_module('parse_kickstart', parse_temp_content, parse_temp.name, ('', 'rb', imp.PY_COMPILED)) self._commandMap = parse_module.dracutCmds
import {Injectable, EventEmitter} from '@angular/core'; import {Court} from "./court.class"; import {Http, Response, Headers} from "@angular/http"; import 'rxjs/Rx'; import {CourtType} from "./court-type.class"; import appGlobals = require('../app.global'); //<==== config @Injectable() export class CourtsService { courts: Court[] = []; courtTypes: CourtType[] = []; courtsUpdated = new EventEmitter<Court[]>(); courtTypesUpdated = new EventEmitter<CourtType[]>(); headers = new Headers({ 'Content-Type': 'application/json', 'Authorization': localStorage.getItem('id_token') }); constructor(private http: Http) { } getCourts() { return this.courts; } getCourtTypes() { return this.courtTypes; } getCourtById(id: number) { for (var court of this.courts) { if (court.id === id) { return court; } } return null; } deleteCourt(court: Court) { const id = court.id; this.http.delete(appGlobals.rest_server + 'courts/' + id, {headers: this.headers}).subscribe((data) => { var res = JSON.stringify(data); var obj = JSON.parse(res); if (obj._body != null && obj._body != undefined) { if (obj._body == 'DONE') { this.courts.splice(this.courts.indexOf(court), 1); this.courtsUpdated.emit(this.courts); } else { var _body = JSON.parse(obj._body); if (_body != null && _body != undefined) { if (_body.errno == '1451') { alert("Невозможно удалить " + court.name + ", ограничение внешнего ключа. Необходимо удалить связанные данные."); } else { alert("Произошла ошибка. Обратитесь к администратору"); } } } } }); } editCourt(oldCourt: Court, newCourt: Court) { this.courts[this.courts.indexOf(oldCourt)] = newCourt; const body = JSON.stringify(newCourt); return this.http.put(appGlobals.rest_server + 'courts?nocache=' + new Date().getTime(), body, {headers: this.headers}).subscribe((res) => { this.courtsUpdated.emit(this.courts); }); } addCourt(court: Court) { const body = JSON.stringify(court); return this.http.post(appGlobals.rest_server + 'courts?nocache=' + new Date().getTime(), body, {headers: this.headers}) .map((response: Response) => response.json()) .subscribe((data) => { court.id = data[0].id; this.courts.push(court); this.courtsUpdated.emit(this.courts); }); } fetchCourts() { return this.http.get(appGlobals.rest_server + 'courts?nocache=' + new Date().getTime(), {headers: this.headers}) .map((response: Response) => response.json()) .subscribe( (data: Court[]) => { this.courts = data; this.courtsUpdated.emit(this.courts); } ); } fetchCourtTypes() { return this.http.get(appGlobals.rest_server + 'courtTypes?nocache=' + new Date().getTime(), {headers: this.headers}) .map((response: Response) => response.json()) .subscribe( (data: CourtType[]) => { this.courtTypes = data; this.courtTypesUpdated.emit(this.courtTypes); } ); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.jsmpp.session.state; import java.io.IOException; import org.jsmpp.bean.Command; import org.jsmpp.extra.SessionState; import org.jsmpp.session.ResponseHandler; /** * This class is bound_trx state implementation of {@link SMPPSessionState}. * Response both to transmit and receive related transaction. * * @author uudashr * @version 1.0 * @since 2.0 * */ class SMPPSessionBoundTRX extends SMPPSessionBoundTX implements SMPPSessionState { @Override public SessionState getSessionState() { return SessionState.BOUND_TRX; } @Override public void processDeliverSm(Command pduHeader, byte[] pdu, ResponseHandler responseHandler) throws IOException { processDeliverSm0(pduHeader, pdu, responseHandler); } @Override public void processAlertNotification(Command pduHeader, byte[] pdu, ResponseHandler responseHandler) { SMPPSessionBoundRX.processAlertNotification0(pduHeader, pdu, responseHandler); } }
#ifndef NAVIGATORCAMERA_SHARER_H #define NAVIGATORCAMERA_SHARER_H #include <pthread.h> #include <jderobot/pose3d.h> #include <cv.h> #include "quaternion.h" namespace navigatorCamera { /** This class provides a shared memory point. * * All methods of an instance of this class are thread-safe via a mutex and * provide or save a copy of the related data. * */ class Sharer { public: /// Default constructor. Sharer(); /// Default destructor. virtual ~Sharer(); /** Set method to control the flag 'guiVisible'. * * @param status <code>true</code> if the gui is visible, * <code>false</code> otherwise. */ void setGuiVisible(bool status); /** Set method to control the flag 'controlActive'. * * @param status <code>true</code> if the control is active, * <code>false</code> otherwise. */ void setControlActive(bool status); /** Set method to put the current Pose3D. * * @param p current Pose3D Pointer. */ void setPose3D(jderobot::Pose3DDataPtr p); /** Set method to put the current RGB Image. * * @param image current RGB Image. */ void setImage(cv::Mat image); /** Set method to put the current translation step, the distance * to move when is used the translation controls of GUI. * * @param step current translation step. */ void setTranslationStep(double step); /** Set method to put the current rotation step, the angle * to move when is used the rotation controls of GUI. * * @param step current rotation step. */ void setRotationStep(double step); void setSpeedX(double step); void setSpeedY(double step); /** Get method to know the status of flag 'guiVisible'. * * @return <code>true</code> if the gui is visible, * <code>false</code> otherwise. */ bool getGuiVisible(); /** Get method to know the status of flag 'controlActive'. * * @return <code>true</code> if the control is active, * <code>false</code> otherwise. */ bool getControlActive(); /** Get method to know the current Pose3D. * * @return current Pose3D Pointer. */ jderobot::Pose3DDataPtr getPose3D(); /** Get method to know the current RGB Image. * * @return current RGB image. */ cv::Mat getImage(); /** Get method to know the current translation step. * * @return current translation step. */ double getTranslationStep(); /** Get method to know the current rotation step. * * @return current rotation step. */ double getRotationStep(); double getSpeedX(); double getSpeedY(); /** Method to change the translation of current Pose3D. * * @param sX sign to apply at translation step for 'x' component. * @param sY sign to apply at translation step for 'y' component. * @param sZ sign to apply at translation step for 'z' component. */ void changePose3dTranslation(double sX, double sY, double sZ); void changePose3dTranslationSpeed(); /** Method to change the rotation of current Pose3D. * * @param sY sign to apply at rotation step for yaw angle. * @param sP sign to apply at rotation step for pitch angle. * @param sR sign to apply at rotation step for roll angle. */ void changePose3dRotation(double sY, double sP, double sR); /** Method to restart the current Pose3D. * */ void restartPose3D(); private: pthread_mutex_t synch; ///< Mutex for thread-safe access to internal data. pthread_mutex_t synchFlags; ///< Mutex for thread-safe access to internal flags. pthread_mutex_t synchPose3D; ///< Mutex for thread-safe access to pose3d related data. pthread_mutex_t synchCamera; ///< Mutex for thread-safe access to camera related data. bool guiVisible; ///< Flag for the visibility of the GUI. bool controlActive; ///< Flag for the status of the control threads. jderobot::Pose3DDataPtr pose3d; ///< Current Pose3D. cv::Mat RGBimage; ///< Current RGB image double trlnStp; ///< Current translation step to move when is used the translation controls of GUI. double rtnStp; ///< Current rotation step to move when is used the rotation controls of GUI. double speed_x; double speed_y; }; /* class Sharer */ } /* namespace navigatorCamera */ #endif /* NAVIGATORCAMERA_SHARER_H */
import BaseRoute from './-base'; export default BaseRoute.extend({ getURL({ date }) { return `/api/flights/date/${date}`; }, });
from django.db import models from django import forms from django.utils.translation import ugettext as _ from django.utils.safestring import mark_safe from xadmin.fields import ImageWithThumbField from xadmin.sites import site from xadmin.views import BaseAdminPlugin, ModelFormAdminView, DetailAdminView, ListAdminView from xadmin.widgets import AdminFileWidget IMAGE_FORMAT_STR = """<a href="%s" target="_blank" title="%s" data-gallery="gallery" data-download="%s"><img src="%s" class="field_img"/></a>""" def get_gallery_modal(): return """ <!-- modal-gallery is the modal dialog used for the image gallery --> <div id="modal-gallery" class="modal modal-gallery fade" tabindex="-1"> <div class="modal-dialog"> <div class="modal-content"> <div class="modal-header"> <button type="button" class="close" data-dismiss="modal" aria-hidden="true">&times;</button> <h4 class="modal-title"></h4> </div> <div class="modal-body"><div class="modal-image"><h1 class="loader"><i class="fa-spinner fa-spin fa fa-large loader"></i></h1></div></div> <div class="modal-footer"> <a class="btn btn-info modal-prev"><i class="fa fa-arrow-left"></i> <span>%s</span></a> <a class="btn btn-primary modal-next"><span>%s</span> <i class="fa fa-arrow-right"></i></a> <a class="btn btn-success modal-play modal-slideshow" data-slideshow="5000"><i class="fa fa-play"></i> <span>%s</span></a> <a class="btn btn-default modal-download" target="_blank"><i class="fa fa-download"></i> <span>%s</span></a> </div> </div><!-- /.modal-content --> </div><!-- /.modal-dialog --> </div> """ % (_('Previous'), _('Next'), _('Slideshow'), _('Download')) def get_image_urls(image, field): if isinstance(field, ImageWithThumbField): small = field.get_small(image) medium = field.get_medium(image) small_url = small.url medium_url = medium.url else: small_url = image.url medium_url = image.url return small_url, medium_url class AdminImageField(forms.ImageField): def widget_attrs(self, widget): return {'label': self.label} class AdminImageWidget(AdminFileWidget): """ A ImageField Widget that shows its current value if it has one. """ def __init__(self, attrs={}): super(AdminImageWidget, self).__init__(attrs) def render(self, name, value, attrs=None): output = [] if value and hasattr(value, "url"): label = self.attrs.get('label', name) small_url, medium_url = get_image_urls(value, value.field) output.append(IMAGE_FORMAT_STR % (medium_url, label, value.url, small_url)) output.append(super(AdminImageWidget, self).render(name, value, attrs)) return mark_safe(u''.join(output)) class ModelDetailPlugin(BaseAdminPlugin): def __init__(self, admin_view): super(ModelDetailPlugin, self).__init__(admin_view) self.include_image = False def get_field_attrs(self, attrs, db_field, **kwargs): if isinstance(db_field, models.ImageField): attrs['widget'] = AdminImageWidget attrs['form_class'] = AdminImageField self.include_image = True return attrs def get_field_result(self, result, field_name): if isinstance(result.field, models.ImageField): if result.value: img = getattr(result.obj, field_name) small_url, medium_url = get_image_urls(img, result.field) result.text = mark_safe(IMAGE_FORMAT_STR % (medium_url, result.label, img.url, small_url)) self.include_image = True return result # Media def get_media(self, media): if self.include_image: media = media + self.vendor('image-gallery.js', 'image-gallery.css') return media def block_before_fieldsets(self, context, node): if self.include_image: return '<div id="gallery" data-toggle="modal-gallery" data-target="#modal-gallery">' def block_after_fieldsets(self, context, node): if self.include_image: return "</div>" def block_extrabody(self, context, node): if self.include_image: return get_gallery_modal() class ModelListPlugin(BaseAdminPlugin): list_gallery = False def init_request(self, *args, **kwargs): return bool(self.list_gallery) # Media def get_media(self, media): return media + self.vendor('image-gallery.js', 'image-gallery.css') def block_results_top(self, context, node): return '<div id="gallery" data-toggle="modal-gallery" data-target="#modal-gallery">' def block_results_bottom(self, context, node): return "</div>" def block_extrabody(self, context, node): return get_gallery_modal() site.register_plugin(ModelDetailPlugin, DetailAdminView) site.register_plugin(ModelDetailPlugin, ModelFormAdminView) site.register_plugin(ModelListPlugin, ListAdminView)
import { ReactNode } from "react"; import { render, screen } from "@testing-library/react"; import { MemoryRouter } from "react-router-dom"; import ErrorBoundary from ".."; describe("<ErrorBoundary />", () => { const tree = (children?: ReactNode) => render( <MemoryRouter> <ErrorBoundary>{children}</ErrorBoundary> </MemoryRouter> ).container.firstChild; it("renders nothing if no children", () => { expect(tree()).toMatchSnapshot(); }); it("renders children if no error", () => { expect( tree( <div> <h1>I am Welly</h1> </div> ) ).toMatchSnapshot(); }); it("renders error view if an error occurs", () => { console.error = jest.fn(); tree(<div>{new Error()}</div>); expect(screen.getByTestId("error-view")).toBeInTheDocument(); }); });
#! /usr/bin/env python ## Copyright (c) 2016 Aging Miser ## This file is part of TFS HISCORES UPDATER. ## ## TFS HISCORES UPDATER is free software: you can redistribute it and/or ## modify it under the terms of the GNU General Public License as published ## by the Free Software Foundation, either version 3 of the License, or (at ## your option) any later version. ## ## TFS HISCORES UPDATER is distributed in the hope that it will be useful, ## but WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General ## Public License for more details. ## ## You should have received a copy of the GNU General Public License ## along with TFS HISCORES UPDATER. If not, see http://www.gnu.org/licenses/. import time, inspect, os LOG_DIRECTORY = 'log/' class Log: """ Generates a timestamped log file in log/ and writes to it. """ def __init__(self): """Initialize logger object.""" self.runtime = get_current_time() if not os.path.exists(LOG_DIRECTORY): os.mkdir(LOG_DIRECTORY) self.log_fname = os.path.join( LOG_DIRECTORY, 'update-log_{timestamp}.log'.format( timestamp=self.runtime) ) open(self.log_fname, 'w').close() self.log('Runtime set to %s' % self.runtime) self.log('Setting log file to %s' % self.log_fname) def log(self, text): """Log the caller function and the message to be logged.""" # inspect.getouterframes() -> # Each record contains a frame object, filename, line number, function # name, a list of lines of context, and index within the context. UP_ONE_LEVEL = 1 (_, _, _, caller, _, _) = inspect.getouterframes( inspect.currentframe(), 2 )[UP_ONE_LEVEL] with open(self.log_fname, 'a') as log_obj: log_obj.write(u'%20s: ' % caller[:20]) log_obj.write(unicode(text)) log_obj.write(u'\n') def get_current_time(): """ Return string representing the current GMT time, yyyy-mm-dd-hh-mm-ss. """ return '-'.join([str(elem).zfill(2) for elem in time.gmtime()[:6]]) def get_new_logger(): """Return a new logger object and the time at which it was created.""" logger = Log() return (logger, logger.runtime)
""" Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import os from resource_management import * #from resource_management.libraries.functions.dfs_datanode_helper import handle_dfs_data_dir from resource_management.libraries.functions.mounted_dirs_helper import handle_mounted_dirs from utils import service def create_dirs(data_dir): """ :param data_dir: The directory to create :param params: parameters """ import params Directory(data_dir, create_parents=True, cd_access="a", mode=0755, owner=params.hdfs_user, group=params.user_group, ignore_failures=True ) def datanode(action=None): import params if action == "configure": Directory(params.dfs_domain_socket_dir, create_parents=True, mode=0751, owner=params.hdfs_user, group=params.user_group) # handle_mounted_dirs ensures that we don't create dfs data dirs which are temporary unavailable (unmounted), and intended to reside on a different mount. data_dir_to_mount_file_content = handle_mounted_dirs(create_dirs, params.dfs_data_dirs, params.data_dir_mount_file, params) # create a history file used by handle_mounted_dirs File(params.data_dir_mount_file, owner=params.hdfs_user, group=params.user_group, mode=0644, content=data_dir_to_mount_file_content ) elif action == "start" or action == "stop": Directory(params.hadoop_pid_dir_prefix, mode=0755, owner=params.hdfs_user, group=params.user_group ) service( action=action, name="datanode", user=params.hdfs_user, create_pid_dir=True, create_log_dir=True ) elif action == "status": import status_params check_process_status(status_params.datanode_pid_file)
from eth_utils import to_canonical_address, to_checksum_address from raiden.network.proxies.token import Token from raiden.network.rpc.client import JSONRPCClient from raiden.utils import privatekey_to_address def test_token(deploy_client, token_proxy, private_keys, web3, contract_manager): privkey = private_keys[1] address = privatekey_to_address(privkey) address = to_canonical_address(address) other_client = JSONRPCClient(web3, privkey) other_token_proxy = Token( jsonrpc_client=other_client, token_address=to_canonical_address(token_proxy.proxy.contract.address), contract_manager=contract_manager, ) # send some funds from deployer to generated address transfer_funds = 100 token_proxy.transfer(address, transfer_funds) assert transfer_funds == token_proxy.balance_of(address) allow_funds = 100 token_proxy.approve(address, allow_funds) assert allow_funds == token_proxy.proxy.contract.functions.allowance( to_checksum_address(deploy_client.address), to_checksum_address(address) ).call(block_identifier="latest") other_token_proxy.transfer(deploy_client.address, transfer_funds) assert token_proxy.balance_of(address) == 0