text
stringlengths
1
1.05M
<reponame>MrDemonRush/longest-consecutive-sequence module.exports = function longestConsecutiveLength(array) { if(array.length == 0){ return 0; } else if(array.length == 1){ return 1; } let count = 0; array.sort(function(a,b){return a-b;}) const arr = []; for(let i = 0 ; i < array.length ; i++){ if(array[i]-array[i+1] !== -1 && array[i]!=array[i+1]){ count = 0; }else if(array[i]-array[i+1] === -1){ count++; arr.push(count); } } if(arr.length === 0){ return 1; } const max = Math.max.apply(Math, arr); return max+1; }
#include <vector> template <typename Type, typename volMesh> class YourDerivedClass : public transformFvPatchField<Type> { public: YourDerivedClass(const fvPatch& p, const DimensionedField<Type, volMesh>& iF) : transformFvPatchField<Type>(p, iF), fixedValue_(p.size(), pTraits<Type>::zero) { // Additional initialization code if needed } private: std::vector<Type> fixedValue_; };
<filename>title/arm9/source/bootstrapsettings.cpp #include "common/inifile.h" #include "common/bootstrappaths.h" #include "bootstrapsettings.h" #include <string.h> BootstrapSettings::BootstrapSettings() { bstrap_debug = false; bstrap_logging = false; bstrap_romreadled = BootstrapSettings::ELEDNone; bstrap_loadingScreen = BootstrapSettings::ELoadingRegular; } void BootstrapSettings::loadSettings() { CIniFile bootstrapini(BOOTSTRAP_INI); // UI settings. bstrap_debug = bootstrapini.GetInt("NDS-BOOTSTRAP", "DEBUG", bstrap_debug); bstrap_logging = bootstrapini.GetInt("NDS-BOOTSTRAP", "LOGGING", bstrap_logging); bstrap_romreadled = bootstrapini.GetInt("NDS-BOOTSTRAP", "ROMREAD_LED", bstrap_romreadled); bstrap_loadingScreen = bootstrapini.GetInt( "NDS-BOOTSTRAP", "LOADING_SCREEN", bstrap_loadingScreen); } void BootstrapSettings::saveSettings() { CIniFile bootstrapini(BOOTSTRAP_INI); // UI settings. bootstrapini.SetInt("NDS-BOOTSTRAP", "DEBUG", bstrap_debug); bootstrapini.SetInt("NDS-BOOTSTRAP", "LOGGING", bstrap_logging); bootstrapini.SetInt("NDS-BOOTSTRAP", "ROMREAD_LED", bstrap_romreadled); bootstrapini.SetInt( "NDS-BOOTSTRAP", "LOADING_SCREEN", bstrap_loadingScreen); bootstrapini.SaveIniFile(BOOTSTRAP_INI); }
#include <iostream> #include <cstdlib> #include <cstring> void disp_help(const char* programName) { // Implementation of help display logic } int main(int argc, char* argv[]) { char rangeChar[10]; // Assuming a maximum length for the range character int opt; while ((opt = getopt(argc, argv, "hr:c:")) != -1) { switch (opt) { case 'h': disp_help(argv[0]); return 0; case 'r': if (1 != sscanf(optarg, "%9s", rangeChar)) { std::cout << "error ==> wrong format of " << optarg << std::endl; // Handle incorrect format error } else { // Process the 'r' option argument with the correct format } break; case 'c': // Implement the logic to handle the 'c' option // Process the 'c' option as required break; default: // Handle unknown options or errors break; } } // Additional logic for processing other command-line arguments return 0; }
public static int factorial(int n) { if (n == 0) { return 1; } return n * factorial(n-1); } // testing public static void main(String[] args) { System.out.println(factorial(5)); // Output: 120 }
<reponame>adrienkohlbecker/hypervisor #!/usr/bin/python import sys USAGE = 'USAGE:\n\tsort_ini.py file.ini' def sort_ini(fname): """sort .ini file: sorts sections and in each section sorts keys""" f = file(fname) lines = f.readlines() f.close() f = file(fname, 'w') f.truncate(0) section = '' subcat = '' sections = {} for line in lines: line = line.strip() if line: if line.startswith('[['): subcat = line continue if line.startswith('['): section = line subcat = '' continue if not sections.has_key(section): sections[section] = {} if not sections[section].has_key(subcat): sections[section][subcat] = [] sections[section][subcat].append(line) if sections: sk = sections.keys() sk.sort() for k in sk: vals = sections[k] sks = vals.keys() sks.sort() if k != '': f.write(k.strip()+'\n') for sk in sks: subvals = vals[sk] subvals.sort() if sk != '': f.write(sk.strip()+'\n') f.write('\n'.join([v.strip() for v in subvals])) f.write('\n') if len(sys.argv) < 2: print USAGE else: sort_ini(sys.argv[1])
package com.bitsys.common.http.entity.content; import com.bitsys.common.http.header.ContentType; /** * This class defines a string body part. */ public class StringBodyPart extends AbstractContentBodyPart<String> { /** * Constructs a new {@linkplain StringBodyPart} from the given text and * content type. * * @param text the content body. * @param contentType the content type. */ public StringBodyPart(final String text, final ContentType contentType) { super(text, null, contentType); } }
#!/bin/sh if [ "$TRAVIS_EVENT_TYPE" != "cron" ] then export DEPLOY_DESTINATION=${DEPLOY_DESTINATION:-/var/www/html/releases} else export DEPLOY_DESTINATION=${DEPLOY_DESTINATION:-/var/www/html/edge/osx} fi export DEPLOY_USER="${DEPLOY_USER:-ubuntu}" REMOTE_HOST="$1" if [ "$TRAVIS_EVENT_TYPE" != "cron" ] then REMOTE_DIR="${DEPLOY_DESTINATION}/${PX_VERSION}" else REMOTE_DIR="${DEPLOY_DESTINATION}" fi #since we saved $1 to REMOTE_HOST delete it from args via shift export REMOTE_FILE_COUNT=$(ssh -o StrictHostKeyChecking=no -p 2220 ${DEPLOY_USER}@${REMOTE_HOST} "ls -lrt $DEPLOY_DESTINATION|wc -l") export REMOTE_FILE_OLD=$(ssh -o StrictHostKeyChecking=no -p 2220 ${DEPLOY_USER}@${REMOTE_HOST} "ls -t $DEPLOY_DESTINATION|tail -1") export REMOTE_TEMPDIR=$(ssh -o StrictHostKeyChecking=no -p 2220 ${DEPLOY_USER}@${REMOTE_HOST} "mktemp -d") filename=$2 scp -P 2220 ${filename} ${DEPLOY_USER}@${REMOTE_HOST}:${REMOTE_TEMPDIR} echo "${filename} ${DEPLOY_USER}@${REMOTE_HOST}:${REMOTE_TEMPDIR}" SSH="ssh -tt -o StrictHostKeyChecking=no -p 2220 -l ${DEPLOY_USER} ${REMOTE_HOST}" $SSH "set -e; sudo rm -rf $REMOTE_DIR; sudo mkdir $REMOTE_DIR; cd $REMOTE_TEMPDIR; echo "sudo tar -C $REMOTE_DIR -xvzf ${filename}"; sudo tar -C $REMOTE_DIR -xvzf ${filename}; sudo mv $REMOTE_DIR/release/* $REMOTE_DIR/. ; sudo rm -rf $REMOTE_DIR/release; sudo rm -rf ${REMOTE_TEMPDIR}; echo $REMOTE_FILE_COUNT; if [ $REMOTE_FILE_COUNT -gt 30 ] then sudo rm -rf $REMOTE_FILE_OLD; fi "
#!/usr/bin/env bash source $DOT_ROOT/constants.sh source $DOT_ROOT/lib/os.sh DOT_MODULE="enable-services" #TODO 4jane if [ "$DOT_OS" == "linux_arch" ]; then if pacman -Q sddm &> /dev/null; then log info "enabling sddm" sudo systemctl enable sddm fi fi
package main import ( "context" "crypto/sha256" "fmt" "log" "net/http" "os" "time" "github.com/gorilla/mux" "github.com/tarikeshaq/personal-blog-api/models" "go.mongodb.org/mongo-driver/mongo" "go.mongodb.org/mongo-driver/mongo/options" ) type key string const ( hostKey = key("hostKey") usernameKey = key("usernameKey") passwordKey = key("passwordKey") databaseKey = key("databaseKey") ) func hasher(s string) []byte { val := sha256.Sum256([]byte(s)) return val[:] } var client *mongo.Client var database *mongo.Database type Post = models.Post func setupContext() (context.Context, context.CancelFunc) { ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second) ctx = context.WithValue(ctx, hostKey, os.Getenv("MONGO_HOST")) ctx = context.WithValue(ctx, usernameKey, os.Getenv("MONGO_USERNAME")) ctx = context.WithValue(ctx, passwordKey, os.Getenv("MONGO_PASSWORD")) ctx = context.WithValue(ctx, databaseKey, os.Getenv("MONGO_DATABASE")) return ctx, cancel } func setupDB(ctx context.Context, cancel context.CancelFunc) *mongo.Database { defer cancel() uri := fmt.Sprintf(`mongodb://%s:%s@%s/%s`, ctx.Value(usernameKey).(string), ctx.Value(passwordKey).(string), ctx.Value(hostKey).(string), ctx.Value(databaseKey).(string), ) client, err := mongo.NewClient(options.Client().ApplyURI(uri)) if err != nil { log.Fatalf(err.Error()) } err = client.Connect(ctx) if err != nil { log.Fatalf(err.Error()) } return client.Database("blogs") } func setupRoutes() *mux.Router { password := <PASSWORD>(os.Getenv("PASSWORD")) username := hasher(os.Getenv("USERNAME")) router := mux.NewRouter() router.HandleFunc("/blogs", GetAllBlogsHandler).Methods("GET") router.HandleFunc("/blogs/{blogId}", GetOneBlogHandler).Methods("GET") router.HandleFunc("/blogs", BasicAuth(AddNewBlogHandler, username, password, "Please input your username and password")).Methods("POST") router.HandleFunc("/blogs/{blogId}", BasicAuth(RemoveBlogHandler, username, password, "Please input your username and password")).Methods("DELETE") router.HandleFunc("/blogs", BasicAuth(RemoveAllBlogsHandler, username, password, "Please input your username and password")).Methods("DELETE") return router } func main() { ctx, cancel := setupContext() database = setupDB(ctx, cancel) router := setupRoutes() http.ListenAndServe(":"+os.Getenv("PORT"), router) }
#!/bin/sh # # Copyright (c) 2005, 2020, Oracle and/or its affiliates. All rights reserved. # DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. # # This code is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License version 2 only, as # published by the Free Software Foundation. # # This code is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or # FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License # version 2 for more details (a copy is included in the LICENSE file that # accompanied this code). # # You should have received a copy of the GNU General Public License version # 2 along with this work; if not, write to the Free Software Foundation, # Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. # # Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA # or visit www.oracle.com if you need additional information or have any # questions. # # # # Common setup for the script API unit tests. Setups up the following variables: # # PS - path sep. # FS - file sep. # JAVA - java cmd. # JAVAC - javac cmd. # JAR - jar cmd. OS=`uname -s` case "$OS" in Linux | Darwin | AIX ) PS=":" FS="/" ;; Windows* ) PS=";" OS="Windows" FS="\\" ;; CYGWIN* ) PS=";" OS="Windows" FS="\\" isCygwin=true ;; * ) echo "Unrecognized system!" exit 1; ;; esac if [ "${TESTJAVA}" = "" ] then echo "TESTJAVA not set. Test cannot execute. Failed." exit 1 fi if [ "${COMPILEJAVA}" = "" ] then COMPILEJAVA="${TESTJAVA}" fi if [ "${TESTSRC}" = "" ] then echo "TESTSRC not set. Test cannot execute. Failed." exit 1 fi if [ "${TESTCLASSES}" = "" ] then echo "TESTCLASSES not set. Test cannot execute. Failed." exit 1 fi JAVA="${TESTJAVA}/bin/java" JAVAC="${COMPILEJAVA}/bin/javac" JAR="${COMPILEJAVA}/bin/jar"
package com.createchance.imageeditor.drawers; import com.createchance.imageeditor.shaders.WindowBlindsTransShader; /** * Window blinds transition drawer. * * @author createchance * @date 2019/1/1 */ public class WindowBlindsTransDrawer extends AbstractTransDrawer { @Override protected void getTransitionShader() { mTransitionShader = new WindowBlindsTransShader(); } }
#!/bin/sh # check to see if the publish directory is a git submodule verify_publish_submodule() { write_out "y" "TEST" write_out -1 "Verify if publish directory '${INPUT_HUGO_PUBLISH_DIRECTORY}' contains a git submodule." if [ -f ".gitmodules" ]; then SUBMODULE_PATH=$(git config -f .gitmodules --get-regexp "${INPUT_HUGO_PUBLISH_DIRECTORY}" | grep -e "\.path" | cut -d" " -f2) if [ "${SUBMODULE_PATH}" = "${INPUT_HUGO_PUBLISH_DIRECTORY}" ]; then write_out "g" "PASSED - '${INPUT_HUGO_PUBLISH_DIRECTORY}' publish directory confirmed as a git submodule. Release will be published to the submodule.\n" PUBLISH_TO_SUBMODULE=true else write_out "b" "'${INPUT_HUGO_PUBLISH_DIRECTORY}' is not a git submodule. Action will skip submodule steps.\n" PUBLISH_TO_SUBMODULE=false fi else write_out "b" "No '.gitmodules' file found. Will not publish release build to a submodule.\n" PUBLISH_TO_SUBMODULE=false fi } # if publish directory is a submodule, make sure it was cloned by actions/checkout step verify_submodule_cloned() { write_out "y" "TEST" write_out -1 "Verify git submodule in '${INPUT_HUGO_PUBLISH_DIRECTORY}' was properly cloned." if [ "${PUBLISH_TO_SUBMODULE}" = true ]; then DIR_CONTENTS=$(ls "${INPUT_HUGO_PUBLISH_DIRECTORY}") if [ -z "${DIR_CONTENTS}" ]; then write_out "r" "FAILED - '${INPUT_HUGO_PUBLISH_DIRECTORY}' is empty. Did you include 'submodules: recursive' in the 'action/checkout' step?\n" SUBMODULE_CLONED=false else write_out "g" "PASSED - Contents found in '${INPUT_HUGO_PUBLISH_DIRECTORY}'. Submodule was checked out properly.\n" SUBMODULE_CLONED=true fi else write_out "b" "Publish directory not marked as submodule. Skipping test.\n" SUBMODULE_CLONED=false fi } verify_submodule_branch_exists() { # Make check in submodule repo for submodule deploy branch write_out "y" "TEST" write_out -1 "[Verify Submodule Branch] -> tests 'submodule_release_branch' input" if [ "${SUBMODULE_CLONED}" = true ]; then git -C "${INPUT_HUGO_PUBLISH_DIRECTORY}" fetch --quiet --depth=1 origin "refs/heads/${INPUT_SUBMODULE_RELEASE_BRANCH}" VERIFY_SUBMODULE_RELEASE_BRANCH=$(git rev-parse --verify "remotes/origin/${INPUT_SUBMODULE_RELEASE_BRANCH}") if [ -z "${VERIFY_SUBMODULE_RELEASE_BRANCH}" ]; then write_out "y" "WARNING - no branch '${INPUT_SUBMODULE_RELEASE_BRANCH}' found in the publish submodule.\nA new branch will be created when you run the action. Please make sure you want this.\n" else write_out "g" "PASSED\n" fi else write_out "b" "Submodule not cloned with action/checkout step. Skipping test.\n" fi }
/** * Created by Bob on 29-1-2016. */ $(function() { //var test = document.getElementById('test'); //test.addEventListener('click', ajaxHandler); $('#test').click(function(){ //$.ajax({ // url: 'assessment/'+$(this).data('block'), // type: "get", // success: function(data){ // //alert(data); // } //}); }); });
package io.cattle.iaas.healthcheck.process; import io.cattle.iaas.healthcheck.service.HealthcheckService; import io.cattle.platform.core.model.HealthcheckInstance; import io.cattle.platform.core.model.HealthcheckInstanceHostMap; import io.cattle.platform.core.model.Instance; import io.cattle.platform.engine.handler.HandlerResult; import io.cattle.platform.engine.handler.ProcessPreListener; import io.cattle.platform.engine.process.ProcessInstance; import io.cattle.platform.engine.process.ProcessState; import io.cattle.platform.process.common.handler.AbstractObjectProcessLogic; import io.cattle.platform.util.type.Priority; import javax.inject.Inject; import javax.inject.Named; @Named public class HealthcheckInstanceHostMapRemovePostHandler extends AbstractObjectProcessLogic implements ProcessPreListener, Priority { @Inject HealthcheckService hcSvc; @Override public String[] getProcessNames() { return new String[] { "healthcheckinstancehostmap.remove" }; } @Override public HandlerResult handle(ProcessState state, ProcessInstance process) { HealthcheckInstanceHostMap hostMap = (HealthcheckInstanceHostMap) state.getResource(); HealthcheckInstance hInstance = objectManager.loadResource(HealthcheckInstance.class, hostMap.getHealthcheckInstanceId()); if (hInstance == null || hInstance.getRemoved() != null) { return null; } Instance instance = objectManager.loadResource(Instance.class, hInstance.getInstanceId()); if (instance == null || instance.getRemoved() != null) { return null; } hcSvc.registerForHealtcheck(HealthcheckService.HealthcheckInstanceType.INSTANCE, instance.getId()); return null; } @Override public int getPriority() { return Priority.DEFAULT; } }
<reponame>sbnair/PolkaJS<gh_stars>1-10 import type { CodecHash, Hash } from '../interfaces/runtime'; import type { Codec, Constructor, Registry } from '../types'; import BN from 'bn.js'; declare type SetValues = Record<string, number | BN>; /** * @name Set * @description * An Set is an array of string values, represented an an encoded type by * a bitwise representation of the values. */ export declare class CodecSet extends Set<string> implements Codec { #private; readonly registry: Registry; createdAtHash?: Hash; constructor(registry: Registry, setValues: SetValues, value?: string[] | Set<string> | Uint8Array | BN | number | string, bitLength?: number); static with(values: SetValues, bitLength?: number): Constructor<CodecSet>; /** * @description The length of the value when encoded as a Uint8Array */ get encodedLength(): number; /** * @description returns a hash of the contents */ get hash(): CodecHash; /** * @description true is the Set contains no values */ get isEmpty(): boolean; /** * @description The actual set values as a string[] */ get strings(): string[]; /** * @description The encoded value for the set members */ get valueEncoded(): BN; /** * @description adds a value to the Set (extended to allow for validity checking) */ add: (key: string) => this; /** * @description Compares the value of the input to see if there is a match */ eq(other?: unknown): boolean; /** * @description Returns a hex string representation of the value */ toHex(): string; /** * @description Converts the Object to to a human-friendly JSON, with additional fields, expansion and formatting of information */ toHuman(): string[]; /** * @description Converts the Object to JSON, typically used for RPC transfers */ toJSON(): string[]; /** * @description The encoded value for the set members */ toNumber(): number; /** * @description Returns the base runtime type name for this instance */ toRawType(): string; /** * @description Returns the string representation of the value */ toString(): string; /** * @description Encodes the value as a Uint8Array as per the SCALE specifications * @param isBare true when the value has none of the type-specific prefixes (internal) */ toU8a(isBare?: boolean): Uint8Array; } export {};
def WordSplit(data): splitList = [] for item in data: splitList += item.split(' ') return splitList
const path = require('path') const merge = require('webpack-merge') const config = require('../config') const webpack = require('webpack') const HtmlWebpackPlugin = require('html-webpack-plugin') const webpackBaseConfig = require('./webpack.base.conf') const UglifyJSPlugin = require('uglifyjs-webpack-plugin') const ExtractTextPlugin = require('extract-text-webpack-plugin') const OptimizeCSSPlugin = require('optimize-css-assets-webpack-plugin') module.exports = merge(webpackBaseConfig, { output: { filename: '[name].[chunkhash].js', path: path.resolve(__dirname, '../dist') }, plugins: [ new webpack.DefinePlugin({ 'process.env': config.build.env }), new UglifyJSPlugin(), new ExtractTextPlugin({ filename: '[name].[contenthash].css' }), new OptimizeCSSPlugin(), ...['index'].map( v => new HtmlWebpackPlugin({ statPrefix: 'online', filename: v + '.html', template: 'src/index.html', chunks: ['common', v], inject: true, minify: { minifyCSS: true, minifyJS: true, removeComments: true, collapseWhitespace: true, removeAttributeQuotes: false }, chunksSortMode: 'dependency' }) ), new webpack.optimize.CommonsChunkPlugin({ name: 'common', minChunks: 2 }), ] })
<gh_stars>1-10 import moduleHome from './home/module_home' import moduleDetail from './detail/module_detail' const modules = { moduleHome, moduleDetail } export default modules
export APP_PATH=/me/wendysa/helloservlet; export CATALINA_HOME=/usr/local/tomcat; export TARGET_DIR=$CATALINA_HOME/webapps/ROOT/WEB-INF/classes; export TOMCAT_CONTAINER_NAME=tomcatdev; export WEB_INF=$CATALINA_HOME/webapps/ROOT/WEB-INF; # Ensure that `classes` folder in $TOMCAT_CONTAINER_NAME:$TARGET_DIR directory has been created docker exec -it $TOMCAT_CONTAINER_NAME sh -c "mkdir -p $TARGET_DIR"; # Copy everything inside dist into $TOMCAT_CONTAINER_NAME:$TARGET_DIR directory if [ -d "./dist" ]; then docker exec -it $TOMCAT_CONTAINER_NAME sh -c "rm -rf $TARGET_DIR$APP_PATH"; docker cp ./dist/. $TOMCAT_CONTAINER_NAME:$TARGET_DIR; fi # Copy web.xml file into $TOMCAT_CONTAINER_NAME:$WEB_INF directory docker cp ./web.xml $TOMCAT_CONTAINER_NAME:$WEB_INF; # Shutdown the Tomcat server docker exec -it $TOMCAT_CONTAINER_NAME sh -c "./bin/shutdown.sh"; # Restart the Tomcat server docker start $TOMCAT_CONTAINER_NAME; unset APP_PATH; unset CATALINA_HOME; unset TARGET_DIR; unset TOMCAT_CONTAINER_NAME; unset WEB_INF;
<reponame>MOAMaster/AudioPlugSharp-SamplePlugins<filename>vst3sdk/public.sdk/samples/vst/syncdelay/source/syncdelaycontroller.cpp //----------------------------------------------------------------------------- // Project : VST SDK // // Category : Examples // Filename : public.sdk/samples/vst/syncdelay/source/syncdelaycontroller.cpp // Created by : Steinberg, 01/2020 // Description : // //----------------------------------------------------------------------------- // LICENSE // (c) 2020, Steinberg Media Technologies GmbH, All Rights Reserved //----------------------------------------------------------------------------- // Redistribution and use in source and binary forms, with or without modification, // are permitted provided that the following conditions are met: // // * Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above copyright notice, // this list of conditions and the following disclaimer in the documentation // and/or other materials provided with the distribution. // * Neither the name of the Steinberg Media Technologies nor the names of its // contributors may be used to endorse or promote products derived from this // software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND // ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED // WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. // IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, // INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, // BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF // LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE // OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED // OF THE POSSIBILITY OF SUCH DAMAGE. //----------------------------------------------------------------------------- #include "syncdelaycontroller.h" #include "sync.h" #include "syncdelayids.h" #include "base/source/fstreamer.h" #include "pluginterfaces/base/futils.h" #include "pluginterfaces/base/ibstream.h" #include "pluginterfaces/base/ustring.h" namespace Steinberg { namespace Vst { //----------------------------------------------------------------------------- tresult PLUGIN_API SyncDelayController::initialize (FUnknown* context) { auto result = EditController::initialize (context); if (result == kResultTrue) { auto delayParam = new StringListParameter (STR16 ("Delay"), kDelayId, nullptr); for (const auto& entry : Synced) delayParam->appendString (entry.title); parameters.addParameter (delayParam); // parameters takes ownership of delayParam parameters.addParameter (STR16 ("Bypass"), nullptr, 1, 0, ParameterInfo::kCanAutomate | ParameterInfo::kIsBypass, kBypassId); } return kResultTrue; } //------------------------------------------------------------------------ tresult PLUGIN_API SyncDelayController::setComponentState (IBStream* state) { if (!state) return kResultFalse; IBStreamer streamer (state, kLittleEndian); uint32 savedDelay = 0.f; if (streamer.readInt32u (savedDelay) == false) return kResultFalse; int32 savedBypassState = 0; if (streamer.readInt32 (savedBypassState) == false) return kResultFalse; setParamNormalized (kDelayId, ToNormalized<ParamValue> (savedDelay, static_cast<int32> (Synced.size () - 1))); setParamNormalized (kBypassId, savedBypassState ? 1 : 0); return kResultOk; } //------------------------------------------------------------------------ } // namespace Vst } // namespace Steinberg
package com.nils.engine.state; import java.awt.Graphics; import com.nils.engine.main.GameContainer; public abstract class State { protected StateHandler sh; protected GameContainer gc; protected String tag; public State(StateHandler sh, GameContainer gc, String tag) { this.sh = sh; this.gc = gc; this.tag = tag; } public abstract void update(double dt); public abstract void render(Graphics g); public StateHandler getSh() { return sh; } public void setSh(StateHandler sh) { this.sh = sh; } public GameContainer getGc() { return gc; } public void setGc(GameContainer gc) { this.gc = gc; } public String getTag() { return tag; } public void setTag(String tag) { this.tag = tag; } }
#!/usr/bin/env bash set -ev mkdir build cd build cmake -DCMAKE_BUILD_TYPE=$CONF .. make -j 4 ./unit_test_all cd ../pykeyvi python setup.py build --mode $CONF python setup.py install --user py.test tests cd ..
# Use embedded signing from template with added document # Check that we're in a bash shell if [[ $SHELL != *"bash"* ]]; then echo "PROBLEM: Run these scripts from within the bash shell." fi # Check for a valid cc email and prompt the user if #CC_EMAIL and CC_NAME haven't been set in the config file. source ./examples/eSignature/lib/utils.sh CheckForValidCCEmail # Step 1: Obtain your OAuth token # Note: Substitute these values with your own ACCESS_TOKEN=$(cat config/ds_access_token.txt) # Set up variables for full code example # Note: Substitute these values with your own account_id=$(cat config/API_ACCOUNT_ID) base_path="https://demo.docusign.net/restapi" # Check that we have a template id if [ ! -f config/TEMPLATE_ID ]; then echo "" echo "PROBLEM: A template id is needed. Fix: execute script eg008CreateTemplate.sh" echo "" exit 0 fi template_id=`cat config/TEMPLATE_ID` # temp files: request_data=$(mktemp /tmp/request-eg-013.XXXXXX) response=$(mktemp /tmp/response-eg-013.XXXXXX) doc1_base64=$(mktemp /tmp/eg-013-doc1.XXXXXX) # ***DS.snippet.0.start # Fetch docs and encode cat demo_documents/added_document.html | base64 > $doc1_base64 echo "" echo "Sending the envelope request to DocuSign..." echo "A template is used, it has one document. A second document will be" echo "added by using Composite Templates" # Concatenate the different parts of the request # document 1 (html) has tag **signature_1** printf \ '{ "compositeTemplates": [ { "compositeTemplateId": "1", "inlineTemplates": [ { "recipients": { "carbonCopies": [ { "email": "'"${CC_EMAIL}"'", "name": "'"${CC_NAME}"'", "recipientId": "2", "roleName": "cc" } ], "signers": [ { "clientUserId": "1000", "email": "'"${SIGNER_EMAIL}"'", "name": "'"${SIGNER_NAME}"'", "recipientId": "1", "roleName": "signer" } ] }, "sequence": "2" } ], "serverTemplates": [ { "sequence": "1", "templateId": "' > $request_data printf "${template_id}" >> $request_data printf '" } ] }, { "compositeTemplateId": "2", "document": { "documentBase64": "' >> $request_data cat $doc1_base64 >> $request_data printf '", "documentId": "1", "fileExtension": "html", "name": "Appendix 1--Sales order" }, "inlineTemplates": [ { "recipients": { "carbonCopies": [ { "email": "'${CC_EMAIL}'", "name": "'"${CC_NAME}"'", "recipientId": "2", "roleName": "cc" } ], "signers": [ { "email": "'"${SIGNER_EMAIL}"'", "name": "'"${SIGNER_NAME}"'", "recipientId": "1", "roleName": "signer", "tabs": { "signHereTabs": [ { "anchorString": "**signature_1**", "anchorUnits": "pixels", "anchorXOffset": "20", "anchorYOffset": "10" } ] } } ] }, "sequence": "1" } ] } ], "status": "sent" }' >> $request_data curl --header "Authorization: Bearer ${ACCESS_TOKEN}" \ --header "Content-Type: application/json" \ --data-binary @${request_data} \ --request POST ${base_path}/v2.1/accounts/${account_id}/envelopes \ --output $response echo "" echo "Results:" echo "" cat $response # pull out the envelopeId envelope_id=`cat $response | grep envelopeId | sed 's/.*\"envelopeId\":\"//' | sed 's/\",.*//'` echo "EnvelopeId: ${envelope_id}" # Step 2. Create a recipient view (an embedded signing view) # that the signer will directly open in their browser to sign. # # The returnUrl is normally your own web app. DocuSign will redirect # the signer to returnUrl when the DocuSign signing completes. # For this example, we'll use http://httpbin.org/get to show the # query parameters passed back from DocuSign echo "" echo "Requesting the url for the embedded signing..." curl --header "Authorization: Bearer ${ACCESS_TOKEN}" \ --header "Content-Type: application/json" \ --data-binary ' { "returnUrl": "http://httpbin.org/get", "authenticationMethod": "none", "email": "'"${SIGNER_EMAIL}"'", "userName": "'"${SIGNER_NAME}"'", "clientUserId": 1000, }' \ --request POST ${base_path}/v2.1/accounts/${account_id}/envelopes/${envelope_id}/views/recipient \ --output ${response} echo "" echo "Response:" cat $response echo "" signing_url=`cat $response | grep url | sed 's/.*\"url\":\"//' | sed 's/\".*//'` # ***DS.snippet.0.end echo "" printf "The embedded signing URL is ${signing_url}\n" printf "It is only valid for five minutes. Attempting to automatically open your browser...\n" if which xdg-open &> /dev/null ; then xdg-open "$signing_url" elif which open &> /dev/null ; then open "$signing_url" elif which start &> /dev/null ; then start "$signing_url" fi # cleanup rm "$request_data" rm "$response" rm "$doc1_base64" echo "" echo "" echo "Done." echo ""
#!/bin/bash #SBATCH --mail-type=END #SBATCH --mail-user=ma.xu1@northeastern.edu #SBATCH -N 1 #SBATCH -p ai-jumpstart #SBATCH --gres=gpu:8 #SBATCH --cpus-per-task=64 #SBATCH --mem=512Gb #SBATCH --time=1-23:59:00 #SBATCH --output=%j_fcvt_v5_32_TTFF_W_13_13.log source activate timm cd /scratch/ma.xu1/ShiftFormer CUDA_VISIBLE_DEVICES=0,1,2,3,4,5,6,7 ./distributed_train.sh 8 /dev/shm/imagenet/ --model fcvt_v5_32_TTFF_W_13_13 -b 256 --lr 1e-3 --drop-path 0.1 --apex-amp --model-ema
#!/bin/bash cd `dirname $0` pkill -f localdriver mkdir -p ~/voldriver_plugins rm ~/voldriver_plugins/localdriver.* mkdir -p ../mountdir driversPath=$HOME/voldriver_plugins ~/localdriver -listenAddr="127.0.0.1:9876" -transport="tcp-json" -mountDir="../mountdir" -driversPath="${driversPath}" -requireSSL=true -caFile="$PWD/certs/localdriver_ca.crt" -certFile="$PWD/certs/localdriver_server.crt" -keyFile="$PWD/certs/localdriver_server.key" -clientCertFile="$PWD/certs/localdriver_client.crt" -clientKeyFile="$PWD/certs/localdriver_client.key" -insecureSkipVerify=true &
<reponame>derikolsson/trendable<filename>lib/concerns/trendable.rb module Trendable module Concern extend ActiveSupport::Concern included do scope :order_by_trending, -> { order( trending_power: :desc ) } scope :items_to_fade_trending_power, -> { where( "trending_power > 0" ) } def self.has_trendable_concern? true end end def boost_trending_power!( add_value = 1000 ) self.update_attributes( trending_power: trending_power + add_value ) end def fade_out_trending_power!( multiplier = 0.9 ) self.update_attributes( trending_power: trending_power * multiplier ) end end end
#!/bin/bash # Actifio Copy Data Storage SARGPACK # Copyright (c) 2018 Actifio Inc. All Rights Reserved # This script collects health checks # Version 1.0 Initial Release # Now check for inputs app name length (l) delim (c) help (h) while getopts :f opt do case "$opt" in f) fileonly=y;; esac done # we use this for file names, don't touch this: currentday=$(/bin/date +"%Y-%m-%d") ################################################################################################ ################ THE SECTION BELOW NEEDS TO BE CUSTOMIZED. LEAVING AS DEFAULT WILL NOT WORK! ################################################################################################ #user name on each appliance. You define this on each Actifio Appliance and use CLI to access username=admin # key location on the RM server. # It is the private key that matches the public key assigned to every user on every Actifio Appliance userkey=/home/actifio/.ssh/id_rsa # location where we keep temporary files reportsubject="reporthealth_" emailsubject="Reporthealth from Actifio Report Generator" # directory where the report will be placed workingdirectory="/home/actifio" # destination for report to be sent to # You can place multiple email addresses in double quotes with a single comma between each address, example shown: blindrecipient="anthonyv@acme.com" recipient="bilbo@acme.com" emailserver=10.10.10.1 emailuser=ActifioReports@acme.com # list of clusters clusterlist="actifio1}10.10.10.10 actifio2}10.10.10.11" ################################################################################################ ################ THE SECTION ABOVE NEEDS TO BE CUSTOMIZED. LEAVING AS DEFAULT WILL NOT WORK! ################################################################################################ #count the appliances cleancount=0 clustercount=$(echo "$clusterlist" | wc -l) # fetch the current date time currentdate=$(/bin/date +"%Y-%m-%d %H:%M:%S %Z") reportname="$workingdirectory/$reportsubject$currentday.txt" # check the list if [ -z "$clusterlist" ]; then echo "There are no clusters listed to check" > $reportname exit fi # start the report output echo "--------------------------------------------------------------------------------" > $reportname # work the list, Note the use of a closed loop to not lose the cleancount updated inside a while loop echo "$clusterlist" | { while IFS="}" read -ra cluster; do reporthealthout=$(ssh -n -o BatchMode=yes -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -i $userkey $username@${cluster[1]} "reporthealth -flncm" 2>/dev/null) [[ -z "$reporthealthout" ]] && echo "Failed to SSH to ${cluster[0]} using IP address ${cluster[1]} with username $username" >> $reportname [[ $(echo "$reporthealthout" | wc -l) -eq 2 ]] && let cleancount+=1 && cleanlist=$(echo "$reporthealthout" | tail -1 | awk -F"," '{ print $1 }' | awk -F":" '{ print $2 }'; echo "$cleanlist") && continue colput=$(echo "$reporthealthout" | head -2| tail -1;echo >> $reportname;echo "$reporthealthout" | head -1;echo "$reporthealthout" | tail -n +3;echo "-----------------------------,-----------------------------,---------------"; echo "$colput") done echo "$colput" | column -t -s, >> $reportname # Create the final report: [[ $cleancount -gt 0 ]] && echo "Clean Appliances were:" >> $reportname [[ $cleancount -eq 0 ]] && echo "There were no clean Appliances" >> $reportname echo "$cleanlist" | sort >> $reportname [[ $clustercount -ne $cleancount ]] && sed -i "1i$clustercount Appliances were checked and $cleancount Clean Appliances were found. Appliances with messages are displayed below:" $reportname [[ $clustercount -eq $cleancount ]] && sed -i "1i$clustercount Appliances were checked and $cleancount Clean Appliances were found." $reportname sed -i "1iReport from Actifio Report Manager created on ${currentdate}" $reportname } # the close bracket above is part of a closed loop, don't remove it # remove any blank lines left in the file sed -i '/^$/d' $reportname # Execute file only, print and exit without mailing if [ "$fileonly" == "y" ]; then cat $reportname exit 0 fi # use sendmail (echo "To: $recipient" echo "Bcc: $blindrecipient" echo "Subject: $emailsubject" echo 'MIME-Version: 1.0 Content-Type: multipart/alternative; boundary="=_myboundary" --=_myboundary Content-Type: text/html; charset=us-ascii Content-Transfer-Encoding: quoted-printable <html> <body> <pre style="font: monospace">' cat $reportname echo '</pre> </body> </html> --=_myboundary--') | /usr/sbin/sendmail -F "" -f $emailuser -t # clean old report files that are older than one week find $workingdirectory -name "$reportsubject*.txt" -mtime +8 -exec rm -f {} \;
package com.example.memorandum.activity; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.os.Bundle; import android.support.v7.app.ActionBar; import android.support.v7.widget.Toolbar; import android.view.MenuItem; import android.view.View; import com.example.memorandum.R; import com.example.memorandum.util.AppGlobal; import solid.ren.skinlibrary.base.SkinBaseActivity; public class SettingsActivity extends SkinBaseActivity implements View.OnClickListener { BroadcastReceiver mybroad = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { // TODO Auto-generated method stub //finish(); recreate(); } }; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); if ("normal".equals(AppGlobal.THEME)) { setTheme(R.style.AppTheme); } else if ("pink".equals(AppGlobal.THEME)) { setTheme(R.style.PinkTheme); } else if ("dark".equals(AppGlobal.THEME)) { setTheme(R.style.DarkTheme); } IntentFilter filter=new IntentFilter(); filter.addAction("change style"); this.registerReceiver(mybroad, filter); setContentView(R.layout.activity_settings); setTitle("Settings"); Toolbar toolbar = (Toolbar) findViewById(R.id.toolkit); setSupportActionBar(toolbar); ActionBar actionBar = getSupportActionBar(); if (actionBar != null) { actionBar.setDisplayHomeAsUpEnabled(true); actionBar.setHomeAsUpIndicator(R.drawable.back); } View rlResetPassword = findViewById(R.id.rl_resetpassword); View rlSwitchTheme = findViewById(R.id.rl_switchtheme); rlResetPassword.setOnClickListener(this); rlSwitchTheme.setOnClickListener(this); } @Override public void onClick(View v) { switch (v.getId()) { case R.id.rl_resetpassword: startActivity(new Intent(SettingsActivity.this, ResetPasswordActivity.class)); break; case R.id.rl_switchtheme: startActivity(new Intent(SettingsActivity.this, ThemeActivity.class)); break; default: break; } } @Override public boolean onOptionsItemSelected(MenuItem item) { switch (item.getItemId()) { case android.R.id.home: super.onBackPressed(); } return true; } @Override protected void onDestroy() { //取消注册 super.onDestroy(); unregisterReceiver(mybroad); } }
import json import logging import requests import responses import pebbles.drivers.provisioning.openshift_driver as openshift_driver from pebbles.tests.base import BaseTestCase logging.basicConfig() logger = logging.getLogger(__name__) logger.setLevel(logging.DEBUG) class PBClientMock(object): def __init__(self): self.instance_data = {} config = dict( memory_limit='512m', environment_vars='' ) self.blueprint_data = { 'bp-01': dict( id='bp-01', name='test blueprint 01', config=config, full_config=dict( image='csc/test_image', port=8888, memory_limit='512M', environment_vars='VAR=value ANOTHER_VAR=value2', autodownload_url='http://example.org/materials.md', autodownload_file='example_materials.md', openshift_cluster_id='TEST', ) ), 'bp-02': dict( id='bp-02', name='test blueprint 02', config=config, full_config=dict( image='csc/test_image', port=8888, memory_limit='512M', openshift_cluster_id='TEST', volume_mount_dir='/scratch', ) ), 'bp-ostemplate-01': dict( id='bp-ostemplate-01', name='test os template blueprint 01', config=config, full_config=dict( os_template='http://example.org/template.yaml', app_label='some_app', memory_limit='512M', environment_vars='VAR=value ANOTHER_VAR=value2', openshift_cluster_id='TEST', ) ) } def add_instance_data(self, instance_id, blueprint_id='bp-01'): self.instance_data[instance_id] = dict( id='%s' % instance_id, name='pb-%s' % instance_id, username='user<EMAIL>', user_id='1111-1111-1111-1111', state='starting', blueprint_id=blueprint_id ) def get_instance_description(self, instance_id): return self.instance_data[instance_id] def get_blueprint_description(self, blueprint_id): return self.blueprint_data[blueprint_id] def do_instance_patch(self, instance_id, payload): data = self.instance_data[instance_id] data.update(payload) if 'instance_data' in data.keys() and isinstance(data['instance_data'], str): data['instance_data'] = json.loads(data['instance_data']) class OpenShiftClientMock(openshift_driver.OpenShiftClient): def _request_token(self, current_ts=None): return { 'access_token': '<PASSWORD>', 'lifetime': 86400, 'expires_at': 186400, } # noinspection PyUnusedLocal class OpenShiftDriverAccessMock(object): def __init__(self, m2m_creds): self._m2m_creds = m2m_creds self.pbc_mock = PBClientMock() def get_pb_client(self, token, base_url, ssl_verify): return self.pbc_mock def get_openshift_client(self, cluster_id): key_base = 'OSD_%s_' % cluster_id return OpenShiftClientMock( base_url=self._m2m_creds.get(key_base + 'BASE_URL'), subdomain=self._m2m_creds.get(key_base + 'SUBDOMAIN'), user=self._m2m_creds.get(key_base + 'USER'), password=self._m2m_creds.get(key_base + 'PASSWORD'), ) # noinspection PyProtectedMember class OpenShiftDriverTestCase(BaseTestCase): def setUp(self): pass @staticmethod def create_openshift_driver(): config = dict( INSTANCE_DATA_DIR='/tmp', M2M_CREDENTIAL_STORE='', INTERNAL_API_BASE_URL='http://bogus/api/v1', TEST_MODE=True, PUBLIC_IPV4='10.0.0.1', EXTERNAL_HTTPS_PORT=443, ) osd = openshift_driver.OpenShiftDriver(logger, config) osd._ap = OpenShiftDriverAccessMock( m2m_creds={ 'OSD_TEST_BASE_URL': 'https://localhost:8443/', 'OSD_TEST_SUBDOMAIN': 'oso.example.com', 'OSD_TEST_USER': 'm2m', 'OSD_TEST_PASSWORD': '<PASSWORD>', } ) return osd @responses.activate def test_provision_deprovision(self): osd = self.create_openshift_driver() osdam = osd._get_access_proxy() osc = osdam.get_openshift_client('TEST') user1_ns = 'user1-at-example-com-1111' # spawn a simple instance and destroy it osdam.pbc_mock.add_instance_data('1001') self.populate_responses(osc, user1_ns, 'pb-1001') osd.do_provision(token='foo', instance_id='1001') osd.do_deprovision(token='foo', instance_id='1001') @responses.activate def test_provision_deprovision_volume(self): osd = self.create_openshift_driver() osdam = osd._get_access_proxy() osc = osdam.get_openshift_client('TEST') user1_ns = 'user1-at-example-com-1111' # spawn an instance with a volume osdam.pbc_mock.add_instance_data('1002', blueprint_id='bp-02') self.populate_responses(osc, user1_ns, 'pb-1002') osd.do_provision(token='foo', instance_id='1002') osd.do_deprovision(token='foo', instance_id='1002') @responses.activate def test_print_response(self): url = 'http://example.org/test' # first add provisioning responses responses.add( responses.GET, url, json=dict(items=[dict(foo='bar', foo2='bar2')]) ) resp = requests.get(url, verify=False) openshift_driver.OpenShiftClient.print_response(resp) @staticmethod def populate_responses(osc, user1_ns, instance_name): oapi_ns = osc.oapi_base_url + '/namespaces/' + user1_ns kubeapi_ns = osc.kube_base_url + '/namespaces/' + user1_ns # first add provisioning responses responses.add( responses.GET, osc.oapi_base_url + '/projects/' + user1_ns, ) responses.add( responses.POST, oapi_ns + '/deploymentconfigs', ) responses.add( responses.GET, kubeapi_ns + '/pods', json=dict( items=[dict( status=dict( phase='Running'))] ) ) responses.add( responses.POST, kubeapi_ns + '/services', ) responses.add( responses.POST, oapi_ns + '/routes', json={'spec': {'host': '%s.%s' % (instance_name, osc.subdomain)}} ) # then add deprovisioning responses responses.add( responses.GET, kubeapi_ns + '/replicationcontrollers', json={'items': [{'spec': {'replicas': 1}, 'metadata': {'name': '%s-1' % instance_name}}]} ) responses.add( responses.PUT, kubeapi_ns + '/replicationcontrollers/%s-1' % instance_name, ) responses.add( responses.GET, kubeapi_ns + '/replicationcontrollers/%s-1' % instance_name, json={'status': {'replicas': 0}, 'metadata': {'name': '%s-1' % instance_name}} ) responses.add( responses.DELETE, oapi_ns + '/deploymentconfigs/%s' % instance_name, ) responses.add( responses.DELETE, kubeapi_ns + '/replicationcontrollers/%s-1' % instance_name, ) responses.add( responses.DELETE, oapi_ns + '/routes/%s' % instance_name, ) responses.add( responses.DELETE, kubeapi_ns + '/services/%s' % instance_name, )
<reponame>isaacmariga/Akan_Name_Site $('#submit').on('click', () => { let year = $('#birthYear').val(); let fname = $('#firstName').val(); let month = $('#birthMonth').val(); let date = $('#birthDate').val(); let gen = $('#gender').val(); let date2 = new Date($('#dateDate').val()); let day2 = date2.getDate() let month2 = date2.getMonth() + 1; let fullyear2 = date2.getFullYear() let year2 = String(fullyear2) MM = parseInt(month2); DD = parseInt(day2); CC = parseInt(year2.substring(0, 2)) YY = parseInt(year2.substring(2, 5)) let D = Math.floor((((CC / 4) - 2 * CC - 1) + ((5 * YY / 4)) + ((26 * (MM + 1) / 10)) + DD) % 7) console.log(DD); console.log(MM); console.log(CC); console.log(typeof(YY)); console.log(D) let maleName = ["Kwasi", "Kwadwo", "Kwabena", "Kwaku", "Yaw", "Kofi", "Kwame"]; let femaleName = ["Akosua", "Adwoa", "Abenaa", "Akua", "Yaa", "Afua", "Ama"]; if (gen == "male") { alert("Your Akan name is " + fname + " " + maleName[D]) } else if (maleFemale = "female") { alert("Your Akan name is " + fname + " " + femaleName[D]) } })
var _; _ = Uint8ClampedArray.length; _ = Uint8ClampedArray.name; _ = Uint8ClampedArray.prototype; _ = Uint8ClampedArray.BYTES_PER_ELEMENT;
import Route from '@ember/routing/route'; import { underscore } from '@ember/string'; import store from 'kursausschreibung/framework/store'; export default Route.extend({ model(params) { let event = store.getEventById(params.event_id); // check if event exists in area and category let areaKey = underscore(this.paramsFor('list').area_of_education); let categoryKey = underscore(this.paramsFor('list.category').category); if ( event === undefined || event.areaKey !== areaKey || event.categoryKey !== categoryKey ) { this.replaceWith('list.category'); return; } return event; } });
#!/usr/bin/env python3 # encoding: utf-8 import numpy as np import tensorflow as tf import tensorflow_probability as tfp from tensorflow.keras import Model as M from tensorflow.keras import Input as I from tensorflow.keras import Sequential from skimage.util.shape import view_as_windows from tensorflow.keras.layers import (Dense, Flatten, LayerNormalization) from rls.utils.tf2_utils import (clip_nn_log_std, squash_rsample, gaussian_entropy, update_target_net_weights) from rls.algos.base.off_policy import Off_Policy from rls.utils.sundry_utils import LinearAnnealing from rls.nn.networks import get_visual_network_from_type from rls.nn.initializers import initKernelAndBias from rls.utils.indexs import VisualNetworkType from rls.utils.build_networks import (ValueNetwork, DoubleValueNetwork) from rls.utils.indexs import OutputNetworkType class VisualEncoder(M): def __init__(self, img_dim, fc_dim): super().__init__() self.net = Sequential([ get_visual_network_from_type(VisualNetworkType.NATURE)(), Dense(fc_dim, **initKernelAndBias), LayerNormalization() ]) self(I(shape=img_dim)) def call(self, vis): return self.net(vis) def random_crop(imgs, output_size): """ Vectorized way to do random crop using sliding windows and picking out random ones args: imgs, batch images with shape (B,C,H,W) """ # batch size n = imgs.shape[0] img_size = imgs.shape[-1] crop_max = img_size - output_size imgs = np.transpose(imgs, (0, 2, 3, 1)) w1 = np.random.randint(0, crop_max, n) h1 = np.random.randint(0, crop_max, n) # creates all sliding windows combinations of size (output_size) windows = view_as_windows( imgs, (1, output_size, output_size, 1))[..., 0, :, :, 0] # selects a random window for each batch element cropped_imgs = windows[np.arange(n), w1, h1] return cropped_imgs def center_crop_image(image, output_size): h, w = image.shape[1:3] new_h, new_w = output_size, output_size top = (h - new_h) // 2 left = (w - new_w) // 2 image = image[:, top:top + new_h, left:left + new_w] return image class CURL(Off_Policy): """ CURL: Contrastive Unsupervised Representations for Reinforcement Learning, http://arxiv.org/abs/2004.04136 """ def __init__(self, envspec, alpha=0.2, annealing=True, last_alpha=0.01, ployak=0.995, discrete_tau=1.0, log_std_bound=[-20, 2], network_settings={ 'actor_continuous': { 'share': [128, 128], 'mu': [64], 'log_std': [64] }, 'actor_discrete': [64, 32], 'q': [128, 128], 'encoder': 128 }, auto_adaption=True, actor_lr=5.0e-4, critic_lr=1.0e-3, alpha_lr=5.0e-4, curl_lr=5.0e-4, img_size=64, **kwargs): super().__init__(envspec=envspec, **kwargs) assert self.visual_sources == 1 self.ployak = ployak self.discrete_tau = discrete_tau self.log_std_min, self.log_std_max = log_std_bound[:] self.auto_adaption = auto_adaption self.annealing = annealing self.img_size = img_size self.img_dim = [img_size, img_size, self.visual_dim[-1]] self.vis_feat_size = network_settings['encoder'] if self.auto_adaption: self.log_alpha = tf.Variable(initial_value=0.0, name='log_alpha', dtype=tf.float32, trainable=True) else: self.log_alpha = tf.Variable(initial_value=tf.math.log(alpha), name='log_alpha', dtype=tf.float32, trainable=False) if self.annealing: self.alpha_annealing = LinearAnnealing(alpha, last_alpha, 1.0e6) def _create_net(name): return DoubleValueNetwork( name=name, value_net_type=OutputNetworkType.CRITIC_QVALUE_ONE, value_net_kwargs=dict(vector_dim=self.s_dim + self.vis_feat_size, action_dim=self.a_dim, network_settings=network_settings['q']) ) self.critic_net = _create_net('critic_net') self.critic_target_net = _create_net('critic_target_net') if self.is_continuous: self.actor_net = ValueNetwork( name='actor_net', value_net_type=OutputNetworkType.ACTOR_CTS, value_net_kwargs=dict(vector_dim=self.s_dim + self.vis_feat_size, output_shape=self.a_dim, network_settings=network_settings['actor_continuous']) ) else: self.actor_net = ValueNetwork( name='actor_net', value_net_type=OutputNetworkType.ACTOR_DCT, value_net_kwargs=dict(vector_dim=self.s_dim + self.vis_feat_size, output_shape=self.a_dim, network_settings=network_settings['actor_discrete']) ) self.gumbel_dist = tfp.distributions.Gumbel(0, 1) # entropy = -log(1/|A|) = log |A| self.target_entropy = 0.98 * (-self.a_dim if self.is_continuous else np.log(self.a_dim)) self.encoder = VisualEncoder(self.img_dim, self.vis_feat_size) self.encoder_target = VisualEncoder(self.img_dim, self.vis_feat_size) self.curl_w = tf.Variable(initial_value=tf.random.normal(shape=(self.vis_feat_size, self.vis_feat_size)), name='curl_w', dtype=tf.float32, trainable=True) self.critic_tv = self.critic_net.trainable_variables + self.encoder.trainable_variables update_target_net_weights( self.critic_target_net.weights + self.encoder_target.trainable_variables, self.critic_net.weights + self.encoder.trainable_variables ) self.actor_lr, self.critic_lr, self.alpha_lr, self.curl_lr = map(self.init_lr, [actor_lr, critic_lr, alpha_lr, curl_lr]) self.optimizer_actor, self.optimizer_critic, self.optimizer_alpha, self.optimizer_curl = map(self.init_optimizer, [self.actor_lr, self.critic_lr, self.alpha_lr, self.curl_lr]) self._worker_params_dict.update(self.actor_net._policy_models) self._worker_params_dict.update(encoder=self.encoder) self._all_params_dict.update(self.actor_net._all_models) self._all_params_dict.update(self.critic_net._all_models) self._all_params_dict.update(curl_w=self.curl_w, encoder=self.encoder, optimizer_actor=self.optimizer_actor, optimizer_critic=self.optimizer_critic, optimizer_alpha=self.optimizer_alpha, optimizer_curl=self.optimizer_curl) self._model_post_process() def choose_action(self, s, visual_s, evaluation=False): visual_s = center_crop_image(visual_s[:, 0], self.img_size) mu, pi = self._get_action(s, visual_s) a = mu.numpy() if evaluation else pi.numpy() return a @tf.function def _get_action(self, s, visual_s): with tf.device(self.device): feat = tf.concat([self.encoder(visual_s), s], axis=-1) if self.is_continuous: mu, log_std = self.actor_net.value_net(feat) log_std = clip_nn_log_std(log_std, self.log_std_min, self.log_std_max) pi, _ = squash_rsample(mu, log_std) mu = tf.tanh(mu) # squash mu else: logits = self.actor_net.value_net(feat) mu = tf.argmax(logits, axis=1) cate_dist = tfp.distributions.Categorical(logits=tf.nn.log_softmax(logits)) pi = cate_dist.sample() return mu, pi def _process_before_train(self, data): data['visual_s'] = np.transpose(data['visual_s'][:, 0].numpy(), (0, 3, 1, 2)) data['visual_s_'] = np.transpose(data['visual_s_'][:, 0].numpy(), (0, 3, 1, 2)) data['pos'] = self.data_convert( np.transpose(random_crop(data['visual_s'], self.img_size), (0, 2, 3, 1)) ) data['visual_s'] = self.data_convert( np.transpose(random_crop(data['visual_s'], self.img_size), (0, 2, 3, 1)) ) data['visual_s_'] = self.data_convert( np.transpose(random_crop(data['visual_s_'], self.img_size), (0, 2, 3, 1)) ) return (data,) def _target_params_update(self): update_target_net_weights(self.critic_target_net.weights + self.encoder_target.trainable_variables, self.critic_net.weights + self.encoder.trainable_variables, self.ployak) def learn(self, **kwargs): self.train_step = kwargs.get('train_step') for i in range(self.train_times_per_step): self._learn(function_dict={ 'summary_dict': dict([ ['LEARNING_RATE/actor_lr', self.actor_lr(self.train_step)], ['LEARNING_RATE/critic_lr', self.critic_lr(self.train_step)], ['LEARNING_RATE/alpha_lr', self.alpha_lr(self.train_step)] ]), 'train_data_list': ['s', 'visual_s', 'a', 'r', 's_', 'visual_s_', 'done', 'pos'], }) @property def alpha(self): return tf.exp(self.log_alpha) def _train(self, memories, isw, cell_state): td_error, summaries = self.train(memories, isw, cell_state) if self.annealing and not self.auto_adaption: self.log_alpha.assign(tf.math.log(tf.cast(self.alpha_annealing(self.global_step.numpy()), tf.float32))) return td_error, summaries @tf.function(experimental_relax_shapes=True) def train(self, memories, isw, cell_state): s, visual_s, a, r, s_, visual_s_, done, pos = memories with tf.device(self.device): with tf.GradientTape(persistent=True) as tape: vis_feat = self.encoder(visual_s) vis_feat_ = self.encoder(visual_s_) target_vis_feat_ = self.encoder_target(visual_s_) feat = tf.concat([vis_feat, s], axis=-1) feat_ = tf.concat([vis_feat_, s_], axis=-1) target_feat_ = tf.concat([target_vis_feat_, s_], axis=-1) if self.is_continuous: target_mu, target_log_std = self.actor_net.value_net(feat_) target_log_std = clip_nn_log_std(target_log_std, self.log_std_min, self.log_std_max) target_pi, target_log_pi = squash_rsample(target_mu, target_log_std) else: target_logits = self.actor_net.value_net(feat_) target_cate_dist = tfp.distributions.Categorical(logits=tf.nn.log_softmax(target_logits)) target_pi = target_cate_dist.sample() target_log_pi = target_cate_dist.log_prob(target_pi) target_pi = tf.one_hot(target_pi, self.a_dim, dtype=tf.float32) q1, q2 = self.critic_net.value_net(feat, a) q1_target, q2_target = self.critic_target_net.value_net(feat_, target_pi) q_target = tf.minimum(q1_target, q2_target) dc_r = tf.stop_gradient(r + self.gamma * (1 - done) * (q_target - self.alpha * target_log_pi)) td_error1 = q1 - dc_r td_error2 = q2 - dc_r q1_loss = tf.reduce_mean(tf.square(td_error1) * isw) q2_loss = tf.reduce_mean(tf.square(td_error2) * isw) critic_loss = 0.5 * q1_loss + 0.5 * q2_loss z_a = vis_feat # [B, N] z_out = self.encoder_target(pos) logits = tf.matmul(z_a, tf.matmul(self.curl_w, tf.transpose(z_out, [1, 0]))) logits -= tf.reduce_max(logits, axis=-1, keepdims=True) curl_loss = tf.reduce_mean(tf.keras.losses.sparse_categorical_crossentropy(tf.range(self.batch_size), logits)) critic_grads = tape.gradient(critic_loss, self.critic_tv) self.optimizer_critic.apply_gradients( zip(critic_grads, self.critic_tv) ) curl_grads = tape.gradient(curl_loss, [self.curl_w] + self.encoder.trainable_variables) self.optimizer_curl.apply_gradients( zip(curl_grads, [self.curl_w] + self.encoder.trainable_variables) ) with tf.GradientTape() as tape: if self.is_continuous: mu, log_std = self.actor_net.value_net(feat) log_std = clip_nn_log_std(log_std, self.log_std_min, self.log_std_max) pi, log_pi = squash_rsample(mu, log_std) entropy = gaussian_entropy(log_std) else: logits = self.actor_net.value_net(feat) logp_all = tf.nn.log_softmax(logits) gumbel_noise = tf.cast(self.gumbel_dist.sample(a.shape), dtype=tf.float32) _pi = tf.nn.softmax((logp_all + gumbel_noise) / self.discrete_tau) _pi_true_one_hot = tf.one_hot(tf.argmax(_pi, axis=-1), self.a_dim) _pi_diff = tf.stop_gradient(_pi_true_one_hot - _pi) pi = _pi_diff + _pi log_pi = tf.reduce_sum(tf.multiply(logp_all, pi), axis=1, keepdims=True) entropy = -tf.reduce_mean(tf.reduce_sum(tf.exp(logp_all) * logp_all, axis=1, keepdims=True)) q_s_pi = self.critic_net.get_min(feat, pi) actor_loss = -tf.reduce_mean(q_s_pi - self.alpha * log_pi) actor_grads = tape.gradient(actor_loss, self.actor_net.trainable_variables) self.optimizer_actor.apply_gradients( zip(actor_grads, self.actor_net.trainable_variables) ) if self.auto_adaption: with tf.GradientTape() as tape: if self.is_continuous: mu, log_std = self.actor_net.value_net(feat) log_std = clip_nn_log_std(log_std, self.log_std_min, self.log_std_max) norm_dist = tfp.distributions.Normal(loc=mu, scale=tf.exp(log_std)) log_pi = tf.reduce_sum(norm_dist.log_prob(norm_dist.sample()), axis=-1) else: logits = self.actor_net.value_net(feat) cate_dist = tfp.distributions.Categorical(logits=tf.nn.log_softmax(logits)) log_pi = cate_dist.log_prob(cate_dist.sample()) alpha_loss = -tf.reduce_mean(self.alpha * tf.stop_gradient(log_pi + self.target_entropy)) alpha_grad = tape.gradient(alpha_loss, self.log_alpha) self.optimizer_alpha.apply_gradients( [(alpha_grad, self.log_alpha)] ) self.global_step.assign_add(1) summaries = dict([ ['LOSS/actor_loss', actor_loss], ['LOSS/q1_loss', q1_loss], ['LOSS/q2_loss', q2_loss], ['LOSS/critic_loss', critic_loss], ['LOSS/curl_loss', curl_loss], ['Statistics/log_alpha', self.log_alpha], ['Statistics/alpha', self.alpha], ['Statistics/entropy', entropy], ['Statistics/q_min', tf.reduce_min(tf.minimum(q1, q2))], ['Statistics/q_mean', tf.reduce_mean(tf.minimum(q1, q2))], ['Statistics/q_max', tf.reduce_max(tf.maximum(q1, q2))] ]) if self.auto_adaption: summaries.update({ 'LOSS/alpha_loss': alpha_loss }) return (td_error1 + td_error2) / 2., summaries
<filename>certbot/configuration.py """Certbot user-supplied configuration.""" import copy import os from six.moves.urllib import parse # pylint: disable=import-error import zope.interface from certbot import constants from certbot import errors from certbot import interfaces from certbot import util @zope.interface.implementer(interfaces.IConfig) class NamespaceConfig(object): """Configuration wrapper around :class:`argparse.Namespace`. For more documentation, including available attributes, please see :class:`certbot.interfaces.IConfig`. However, note that the following attributes are dynamically resolved using :attr:`~certbot.interfaces.IConfig.work_dir` and relative paths defined in :py:mod:`certbot.constants`: - `accounts_dir` - `csr_dir` - `in_progress_dir` - `key_dir` - `temp_checkpoint_dir` And the following paths are dynamically resolved using :attr:`~certbot.interfaces.IConfig.config_dir` and relative paths defined in :py:mod:`certbot.constants`: - `default_archive_dir` - `live_dir` - `renewal_configs_dir` :ivar namespace: Namespace typically produced by :meth:`argparse.ArgumentParser.parse_args`. :type namespace: :class:`argparse.Namespace` """ def __init__(self, namespace): object.__setattr__(self, 'namespace', namespace) self.namespace.config_dir = os.path.abspath(self.namespace.config_dir) self.namespace.work_dir = os.path.abspath(self.namespace.work_dir) self.namespace.logs_dir = os.path.abspath(self.namespace.logs_dir) # Check command line parameters sanity, and error out in case of problem. check_config_sanity(self) def __getattr__(self, name): return getattr(self.namespace, name) def __setattr__(self, name, value): setattr(self.namespace, name, value) @property def server_path(self): """File path based on ``server``.""" parsed = parse.urlparse(self.namespace.server) return (parsed.netloc + parsed.path).replace('/', os.path.sep) @property def accounts_dir(self): # pylint: disable=missing-docstring return os.path.join( self.namespace.config_dir, constants.ACCOUNTS_DIR, self.server_path) @property def backup_dir(self): # pylint: disable=missing-docstring return os.path.join(self.namespace.work_dir, constants.BACKUP_DIR) @property def csr_dir(self): # pylint: disable=missing-docstring return os.path.join(self.namespace.config_dir, constants.CSR_DIR) @property def in_progress_dir(self): # pylint: disable=missing-docstring return os.path.join(self.namespace.work_dir, constants.IN_PROGRESS_DIR) @property def key_dir(self): # pylint: disable=missing-docstring return os.path.join(self.namespace.config_dir, constants.KEY_DIR) @property def temp_checkpoint_dir(self): # pylint: disable=missing-docstring return os.path.join( self.namespace.work_dir, constants.TEMP_CHECKPOINT_DIR) def __deepcopy__(self, _memo): # Work around https://bugs.python.org/issue1515 for py26 tests :( :( # https://travis-ci.org/letsencrypt/letsencrypt/jobs/106900743#L3276 new_ns = copy.deepcopy(self.namespace) return type(self)(new_ns) @property def default_archive_dir(self): # pylint: disable=missing-docstring return os.path.join(self.namespace.config_dir, constants.ARCHIVE_DIR) @property def live_dir(self): # pylint: disable=missing-docstring return os.path.join(self.namespace.config_dir, constants.LIVE_DIR) @property def renewal_configs_dir(self): # pylint: disable=missing-docstring return os.path.join( self.namespace.config_dir, constants.RENEWAL_CONFIGS_DIR) @property def renewal_hooks_dir(self): """Path to directory with hooks to run with the renew subcommand.""" return os.path.join(self.namespace.config_dir, constants.RENEWAL_HOOKS_DIR) @property def renewal_pre_hooks_dir(self): """Path to the pre-hook directory for the renew subcommand.""" return os.path.join(self.renewal_hooks_dir, constants.RENEWAL_PRE_HOOKS_DIR) @property def renewal_deploy_hooks_dir(self): """Path to the deploy-hook directory for the renew subcommand.""" return os.path.join(self.renewal_hooks_dir, constants.RENEWAL_DEPLOY_HOOKS_DIR) @property def renewal_post_hooks_dir(self): """Path to the post-hook directory for the renew subcommand.""" return os.path.join(self.renewal_hooks_dir, constants.RENEWAL_POST_HOOKS_DIR) def check_config_sanity(config): """Validate command line options and display error message if requirements are not met. :param config: IConfig instance holding user configuration :type args: :class:`certbot.interfaces.IConfig` """ # Port check if config.http01_port == config.tls_sni_01_port: raise errors.ConfigurationError( "Trying to run http-01 and tls-sni-01 " "on the same port ({0})".format(config.tls_sni_01_port)) # Domain checks if config.namespace.domains is not None: for domain in config.namespace.domains: # This may be redundant, but let's be paranoid util.enforce_domain_sanity(domain)
<filename>gridgo-bean/src/main/java/io/gridgo/bean/impl/AbstractBContainer.java<gh_stars>1-10 package io.gridgo.bean.impl; import io.gridgo.bean.BContainer; import io.gridgo.bean.factory.BFactory; import lombok.Getter; import lombok.Setter; public abstract class AbstractBContainer extends AbstractBElement implements BContainer { @Setter @Getter private BFactory factory; }
SELECT department, AVG(salary) FROM employees GROUP BY department;
function sleep(ms) { return new Promise(resolve => setTimeout(resolve, ms)); } function ReplaceContentInContainer(id, content) { var container = document.getElementById(id); container.innerHTML = content; } async function Lyrics() { for (var key in json) { await sleep(4500); ReplaceContentInContainer('lyricsdiv', json[key].msg); } }
gpu=$1 model=$2 bert_dir=$3 output_dir=$4 adapter_dir=$5 adapter_dir_2=$6 add1=$7 add2=$8 add3=$9 ## DST CUDA_VISIBLE_DEVICES=$gpu python main_domain_adapter_fusion.py \ --my_model=BeliefTracker \ --model_type=${model} \ --dataset='["multiwoz"]' \ --task_name="dst" \ --earlystop="joint_acc" \ --output_dir=${output_dir}/DST/MWOZ_stack_full/ \ --do_train \ --task=dst \ --example_type=turn \ --cache_dir="./save/transformers" \ --model_name_or_path=${bert_dir} \ --adapter_name_or_path=${adapter_dir} \ --adapter_name_or_path_2=${adapter_dir_2} \ --save_adapter_path=${output_dir}/DST/MWOZ_stack_full/RS \ --batch_size=6 --eval_batch_size=6 \ --usr_token=[USR] --sys_token=[SYS] \ --eval_by_step=1000 \ $add1 $add2 $add3 ### Response Retrieval #CUDA_VISIBLE_DEVICES=$gpu python main_domain_adapter_fusion.py \ # --my_model=dual_encoder_ranking \ # --do_train \ # --task=nlg \ # --task_name=rs \ # --example_type=turn \ # --model_type=${model} \ # --model_name_or_path=${bert_dir} \ # --output_dir=${output_dir}/RR/MWOZ_stack_full/ \ # --cache_dir="./save/transformers" \ # --adapter_name_or_path=${adapter_dir} \ # --adapter_name_or_path_2=${adapter_dir_2} \ # --save_adapter_path=${output_dir}/RR/MWOZ_stack_full/RS \ # --batch_size=15 --eval_batch_size=100 \ # --usr_token=[USR] --sys_token=[SYS] \ # --fix_rand_seed \ # --eval_by_step=1000 \ # --max_seq_length=256 \ # $add1 $add2 $add3
#!/usr/bin/env bash CURRENT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" source "$CURRENT_DIR/helpers.sh" # script global variables color_charge_primary_tier8='' color_charge_primary_tier7='' color_charge_primary_tier6='' color_charge_primary_tier5='' color_charge_primary_tier4='' color_charge_primary_tier3='' color_charge_primary_tier2='' color_charge_primary_tier1='' color_charge_secondary_tier8='' color_charge_secondary_tier7='' color_charge_secondary_tier6='' color_charge_secondary_tier5='' color_charge_secondary_tier4='' color_charge_secondary_tier3='' color_charge_secondary_tier2='' color_charge_secondary_tier1='' # script default variables color_charge_primary_tier8_default='#00ff00' color_charge_primary_tier7_default='#55ff00' color_charge_primary_tier6_default='#aaff00' color_charge_primary_tier5_default='#ffff00' color_charge_primary_tier4_default='#ffc000' color_charge_primary_tier3_default='#ff8000' color_charge_primary_tier2_default='#ff4000' color_charge_primary_tier1_default='#ff0000' color_charge_secondary_tier8_default='colour0' color_charge_secondary_tier7_default='colour0' color_charge_secondary_tier6_default='colour0' color_charge_secondary_tier5_default='colour0' color_charge_secondary_tier4_default='colour0' color_charge_secondary_tier3_default='colour0' color_charge_secondary_tier2_default='colour0' color_charge_secondary_tier1_default='colour0' # colors are set as script global variables get_color_charge_settings() { color_charge_primary_tier8=$(get_tmux_option "@batt_color_charge_primary_tier8" "$color_charge_primary_tier8_default") color_charge_primary_tier7=$(get_tmux_option "@batt_color_charge_primary_tier7" "$color_charge_primary_tier7_default") color_charge_primary_tier6=$(get_tmux_option "@batt_color_charge_primary_tier6" "$color_charge_primary_tier6_default") color_charge_primary_tier5=$(get_tmux_option "@batt_color_charge_primary_tier5" "$color_charge_primary_tier5_default") color_charge_primary_tier4=$(get_tmux_option "@batt_color_charge_primary_tier4" "$color_charge_primary_tier4_default") color_charge_primary_tier3=$(get_tmux_option "@batt_color_charge_primary_tier3" "$color_charge_primary_tier3_default") color_charge_primary_tier2=$(get_tmux_option "@batt_color_charge_primary_tier2" "$color_charge_primary_tier2_default") color_charge_primary_tier1=$(get_tmux_option "@batt_color_charge_primary_tier1" "$color_charge_primary_tier1_default") color_charge_secondary_tier8=$(get_tmux_option "@batt_color_charge_secondary_tier8" "$color_charge_secondary_tier8_default") color_charge_secondary_tier7=$(get_tmux_option "@batt_color_charge_secondary_tier7" "$color_charge_secondary_tier7_default") color_charge_secondary_tier6=$(get_tmux_option "@batt_color_charge_secondary_tier6" "$color_charge_secondary_tier6_default") color_charge_secondary_tier5=$(get_tmux_option "@batt_color_charge_secondary_tier5" "$color_charge_secondary_tier5_default") color_charge_secondary_tier4=$(get_tmux_option "@batt_color_charge_secondary_tier4" "$color_charge_secondary_tier4_default") color_charge_secondary_tier3=$(get_tmux_option "@batt_color_charge_secondary_tier3" "$color_charge_secondary_tier3_default") color_charge_secondary_tier2=$(get_tmux_option "@batt_color_charge_secondary_tier2" "$color_charge_secondary_tier2_default") color_charge_secondary_tier1=$(get_tmux_option "@batt_color_charge_secondary_tier1" "$color_charge_secondary_tier1_default") } print_color_charge() { local primary_plane="$1" local secondary_plane="" if [ "$primary_plane" == "bg" ]; then secondary_plane="fg" else secondary_plane="bg" fi percentage=$($CURRENT_DIR/battery_percentage.sh | sed -e 's/%//') if [ $percentage -ge 95 -o "$percentage" == "" ]; then # if percentage is empty, assume it's a desktop printf "#[$primary_plane=$color_charge_primary_tier8${color_charge_secondary_tier8:+",$secondary_plane=$color_charge_secondary_tier8"}]" elif [ $percentage -ge 80 ]; then printf "#[$primary_plane=$color_charge_primary_tier7${color_charge_secondary_tier7:+",$secondary_plane=$color_charge_secondary_tier7"}]" elif [ $percentage -ge 65 ]; then printf "#[$primary_plane=$color_charge_primary_tier6${color_charge_secondary_tier6:+",$secondary_plane=$color_charge_secondary_tier6"}]" elif [ $percentage -ge 50 ]; then printf "#[$primary_plane=$color_charge_primary_tier5${color_charge_secondary_tier5:+",$secondary_plane=$color_charge_secondary_tier5"}]" elif [ $percentage -ge 35 ]; then printf "#[$primary_plane=$color_charge_primary_tier4${color_charge_secondary_tier4:+",$secondary_plane=$color_charge_secondary_tier4"}]" elif [ $percentage -ge 20 ]; then printf "#[$primary_plane=$color_charge_primary_tier3${color_charge_secondary_tier3:+",$secondary_plane=$color_charge_secondary_tier3"}]" elif [ $percentage -gt 5 ]; then printf "#[$primary_plane=$color_charge_primary_tier2${color_charge_secondary_tier2:+",$secondary_plane=$color_charge_secondary_tier2"}]" else printf "#[$primary_plane=$color_charge_primary_tier1${color_charge_secondary_tier1:+",$secondary_plane=$color_charge_secondary_tier1"}]" fi } main() { local plane="$1" get_color_charge_settings print_color_charge "$plane" } main $@
#!/bin/sh -l set -e # if a command fails it stops the execution set -u # script fails if trying to access to an undefined variable echo "[+] Action start" SOURCE_BEFORE_DIRECTORY="${1}" SOURCE_DIRECTORY="${2}" DESTINATION_GITHUB_USERNAME="${3}" DESTINATION_REPOSITORY_NAME="${4}" GITHUB_SERVER="${5}" USER_EMAIL="${6}" USER_NAME="${7}" DESTINATION_REPOSITORY_USERNAME="${8}" TARGET_BRANCH="${9}" COMMIT_MESSAGE="${10}" TARGET_DIRECTORY="${11}" TARGET_TAG="${12}" if [ -z "$DESTINATION_REPOSITORY_USERNAME" ] then DESTINATION_REPOSITORY_USERNAME="$DESTINATION_GITHUB_USERNAME" fi if [ -z "$USER_NAME" ] then USER_NAME="$DESTINATION_GITHUB_USERNAME" fi CLONE_DIR=$(mktemp -d) echo "[+] Cloning destination git repository $DESTINATION_REPOSITORY_NAME" # Setup git git config --global user.email "$USER_EMAIL" git config --global user.name "$USER_NAME" { git clone --single-branch --branch "$TARGET_BRANCH" "https://$USER_NAME:$API_TOKEN_GITHUB@$GITHUB_SERVER/$DESTINATION_REPOSITORY_USERNAME/$DESTINATION_REPOSITORY_NAME.git" "$CLONE_DIR" } || { echo "::error::Could not clone the destination repository. Command:" echo "::error::git clone --single-branch --branch $TARGET_BRANCH https://$USER_NAME:the_api_token@$GITHUB_SERVER/$DESTINATION_REPOSITORY_USERNAME/$DESTINATION_REPOSITORY_NAME.git $CLONE_DIR" echo "::error::(Note that the USER_NAME and API_TOKEN is redacted by GitHub)" echo "::error::Please verify that the target repository exist AND that it contains the destination branch name, and is accesible by the API_TOKEN_GITHUB" exit 1 } ls -la "$CLONE_DIR" TEMP_DIR=$(mktemp -d) # This mv has been the easier way to be able to remove files that were there # but not anymore. Otherwise we had to remove the files from "$CLONE_DIR", # including "." and with the exception of ".git/" mv "$CLONE_DIR/.git" "$TEMP_DIR/.git" # $TARGET_DIRECTORY is '' by default ABSOLUTE_TARGET_DIRECTORY="$CLONE_DIR/$TARGET_DIRECTORY/" echo "[+] Deleting $ABSOLUTE_TARGET_DIRECTORY" rm -rf "$ABSOLUTE_TARGET_DIRECTORY" echo "[+] Creating (now empty) $ABSOLUTE_TARGET_DIRECTORY" mkdir -p "$ABSOLUTE_TARGET_DIRECTORY" echo "[+] Listing Current Directory Location" ls -al echo "[+] Listing root Location" ls -al / mv "$TEMP_DIR/.git" "$CLONE_DIR/.git" echo "[+] List contents of $SOURCE_DIRECTORY" ls "$SOURCE_DIRECTORY" echo "[+] Checking if local $SOURCE_DIRECTORY exist" if [ ! -d "$SOURCE_DIRECTORY" ] then echo "ERROR: $SOURCE_DIRECTORY does not exist" echo "This directory needs to exist when push-to-another-repository is executed" echo echo "In the example it is created by ./build.sh: https://github.com/cpina/push-to-another-repository-example/blob/main/.github/workflows/ci.yml#L19" echo echo "If you want to copy a directory that exist in the source repository" echo "to the target repository: you need to clone the source repository" echo "in a previous step in the same build section. For example using" echo "actions/checkout@v2. See: https://github.com/cpina/push-to-another-repository-example/blob/main/.github/workflows/ci.yml#L16" exit 1 fi echo "[+] Copying contents of source repository folder $SOURCE_DIRECTORY to folder $TARGET_DIRECTORY in git repo $DESTINATION_REPOSITORY_NAME" cp -ra "$SOURCE_DIRECTORY"/. "$CLONE_DIR/$TARGET_DIRECTORY" cd "$CLONE_DIR" echo "[+] Files that will be pushed" ls -la ORIGIN_COMMIT="https://$GITHUB_SERVER/$GITHUB_REPOSITORY/commit/$GITHUB_SHA" COMMIT_MESSAGE="${COMMIT_MESSAGE/ORIGIN_COMMIT/$ORIGIN_COMMIT}" COMMIT_MESSAGE="${COMMIT_MESSAGE/\$GITHUB_REF/$GITHUB_REF}" echo "[+] Adding git commit" git add . echo "[+] git status:" git status echo "[+] git diff-index:" # git diff-index : to avoid doing the git commit failing if there are no changes to be commit git diff-index --quiet HEAD || git commit --message "$COMMIT_MESSAGE" if [ ! -z "$TARGET_TAG" ] then echo "[+] Create tag" git tag "$TARGET_TAG" echo "[+] Pushing git commit with tag" # --set-upstream: sets de branch when pushing to a branch that does not exist git push "https://$USER_NAME:$API_TOKEN_GITHUB@$GITHUB_SERVER/$DESTINATION_REPOSITORY_USERNAME/$DESTINATION_REPOSITORY_NAME.git" --set-upstream "$TARGET_BRANCH" "$TARGET_TAG" else echo "[+] Pushing git commit without tag" # --set-upstream: sets de branch when pushing to a branch that does not exist git push "https://$USER_NAME:$API_TOKEN_GITHUB@$GITHUB_SERVER/$DESTINATION_REPOSITORY_USERNAME/$DESTINATION_REPOSITORY_NAME.git" --set-upstream "$TARGET_BRANCH" fi
<reponame>my-msblog/msblog-vite<filename>src/api/model/client/article.ts import { TagVO } from "./home"; export interface CommentItemVO{ id: number; articleId: number; parentId: number; publishTime: Date; children: CommentItemVO[]; context: string; like: number; commenterId: number; isLike: boolean; publisher: string; respondent: string; } export interface ArticleVO{ id: number; title: string; content: string; writer: string; contentMd: string; likes: number; reading: number; wordCount: number; cover: string; tags: Array<TagVO>; type: number; typeName: string; createTime: Date | string; updateTime: Date | string; } export interface CommentSubmitDTO{ commentId: number; context: string; replyTime: Date; } export interface GiveLikesDTO{ userId: number; commentId: number; is: boolean; time: Date; } export interface RecommendVO{ id: number; title: string; cover: string; createTime: string; }
def is_perfect_cube(n): i = 0 while i*i*i < abs(n): i = i + 1 if i*i*i == abs(n): return True else: False; n = 8 print(is_perfect_cube(n))
import React, {useState, useEffect} from 'react'; const MyList = (props) => { const [listItems, setListItems] = useState([]); const [searchText, setSearchText] = useState(''); const [sortCriteria, setSortCriteria] = useState(''); useEffect(() => { let items = [...props.data]; if (searchText) { items = items.filter((item) => item.includes(searchText)); } if (sortCriteria) { items.sort((a, b) => a[sortCriteria] - b[sortCriteria]); } setListItems(items); }, [searchText, sortCriteria]); return ( <div> <input type="search" placeholder="search" onChange={(e) => setSearchText(e.target.value)} /> <select onChange={(e)=>setSortCriteria(e.target.value)}> <option value="">Sort by</option> {props.sortCriterias.map((c) => ( <option value={c}>{c}</option> ))} </select> {listItems.map((item) => ( <div key={item}>{item}</div> ))} </div> ); }; export default MyList;
<gh_stars>1-10 package com.wixpress.dst.greyhound.core.zioutils import org.apache.kafka.common.KafkaFuture import zio.blocking.Blocking import zio.{blocking, RIO, ZIO} object KafkaFutures { implicit class KafkaFutureOps[A](val future: KafkaFuture[A]) { def asZio: RIO[Blocking, A] = { RIO.effectAsyncInterrupt[Blocking, A] { cb => future.whenComplete { (a, e) => cb(if (e == null) ZIO.succeed(a) else ZIO.fail(e)) } Left(blocking.effectBlocking(future.cancel(true)).ignore) } } } }
<reponame>lujanan/leetcode package algorithm_0 import "sort" // 合并区间 // https://leetcode-cn.com/problems/merge-intervals/ func merge(intervals [][]int) [][]int { if len(intervals) <= 0 { return nil } // 找到区间最大值 var max = 0 for _, v := range intervals { if v[1] > max { max = v[1] } } var ( same []int sameMap = make(map[int]bool) box = make([]int, max+1) ) for _, v := range intervals { if v[0] == v[1] { // 区间左边与右边相等 sameMap[v[0]] = false continue } for i := v[0]; i <= v[1]; i++ { box[i]++ if i == v[1] { box[i]-- } } } for k := range sameMap { same = append(same, k) } if len(same) > 1 { sort.Ints(same) } var ( result = make([][]int, 0) tmp []int index int ) for i, v := range box { if v <= 0 && len(tmp) <= 0 { continue } if len(tmp) <= 0 { index = 0 for _, v := range same { if v < i { result = append(result, []int{v, v}) index++ } else { break } } same = same[index:] tmp = append(tmp, i) } else if v == 0 && len(tmp) > 0 { tmp = append(tmp, i) index = 0 for _, v := range same { if v >= tmp[0] && v <= tmp[1] { index++ } else { break } } same = same[index:] result = append(result, []int{tmp[0], tmp[1]}) tmp = tmp[:0] } } for _, v := range same { result = append(result, []int{v, v}) } return result }
<reponame>hongyuanhua/Devflow import { config } from "../config.js"; const { backend } = config; const { host, port } = backend; export const checkSession = (app) => { const url = host + port + "/auth/check-session"; console.log("in check session"); fetch(url) .then((json) => { console.log("check session success"); if (json && json.curUserId) { app.setState({ curUserId: json.curUserId }); } }) .catch((error) => { console.log(error); }); }; export const login = async (userName, password) => { // Create our request constructor with all the parameters we need console.log("process.env.NODE_ENV: ", process.env.NODE_ENV) console.log("host: ", host) console.log("port: ", port) const url = process.env.NODE_ENV === 'development' ? host + port + "/auth/login" : host + "/auth/login" const request = new Request(url, { method: "post", body: JSON.stringify({ data: { userName: userName, password: password, }, }), headers: { Accept: "application/json, text/plain, */*", "Content-Type": "application/json", }, }); console.log("at login"); // Send the request with fetch() const isSuccessLogin = await fetch(request); // console.log(await isSuccessLogin.json()) return isSuccessLogin; // .then(res => { // console.log("login fetch"); // console.log(res.status) // console.log(res.status === 200) // if (res.status === 200) { // console.log("res.status") // // console.log(res); // return true; // } // }) // .catch((error) => { // console.log("fail loginfetch"); // // console.log(res) // console.log(error); // }); }; export const register = (data) => { const { firstName, lastName, companyName, userName, password } = data; // Create our request constructor with all the parameters we need const request = new Request(host + port + "/auth/register", { method: "put", body: JSON.stringify({ firstName: firstName, lastName: lastName, companyName: companyName, userName: userName, password: password, }), headers: { Accept: "application/json, text/plain, */*", "Content-Type": "application/json", }, }); // Send the request with fetch() fetch(request) .then((res) => { console.log("success register fetch"); console.log(res); return res; }) .catch((error) => { console.log("fail register fetch"); // console.log(res) console.log(error); }); }; export const logout = () => { const request = new Request(host + port + "/auth/logout", { method: "post", }); fetch(request); }; // export const register = (data) => { // const { firstName, lastName, companyName, userName, password } = data; // // Create our request constructor with all the parameters we need // const request = new Request(host + port + "/auth/register", { // method: "put", // body: JSON.stringify({ // firstName: firstName, // lastName: lastName, // companyName: companyName, // userName: userName, // password: password, // }), // headers: { // Accept: "application/json, text/plain, */*", // "Content-Type": "application/json", // }, // }); // // Send the request with fetch() // fetch(request) // .then((res) => { // console.log("success register fetch"); // console.log(res); // return res; // }) // .catch((error) => { // console.log("fail register fetch"); // // console.log(res) // console.log(error); // }); // }; // export const checkSession = (app) => { // const url = host + port + "/auth/check-session"; // fetch(url) // .then((res) => { // console.log("check session fetch"); // console.log(res); // return res.json(); // }) // // .then(json => { // // if (json && json.currentUser) { // // app.setState({ currentUser: json.currentUser }); // // } // // }) // .catch((error) => { // console.log("fail check session fetch"); // console.log(error); // }); // }; // export const getCurrMember = () => { // const addr = host + port + "/auth/check-session"; // axios // .get(addr, { // headers: { // 'content-type': 'application/json' // } // }) // .then(memberId => { // return memberId; // }).catch(err => { // console.log(err); // }); // } // module.export = { login: login };
<gh_stars>0 package kbasesearchengine; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.annotation.Generated; import com.fasterxml.jackson.annotation.JsonAnyGetter; import com.fasterxml.jackson.annotation.JsonAnySetter; import com.fasterxml.jackson.annotation.JsonInclude; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonPropertyOrder; /** * <p>Original spec-file type: SearchObjectsInput</p> * <pre> * Input parameters for 'search_objects' method. * object_types - list of the types of objects to search on (optional). The * function will search on all objects if the list is not specified * or is empty. The list size must be less than 50. * match_filter - see MatchFilter. * sorting_rules - see SortingRule (optional). * access_filter - see AccessFilter. * pagination - see Pagination (optional). * post_processing - see PostProcessing (optional). * </pre> * */ @JsonInclude(JsonInclude.Include.NON_NULL) @Generated("com.googlecode.jsonschema2pojo") @JsonPropertyOrder({ "object_types", "match_filter", "sorting_rules", "access_filter", "pagination", "post_processing" }) public class SearchObjectsInput { @JsonProperty("object_types") private List<String> objectTypes; /** * <p>Original spec-file type: MatchFilter</p> * <pre> * Optional rules of defining constrains for object properties * including values of keywords or metadata/system properties (like * object name, creation time range) or full-text search in all * properties. * boolean exclude_subobjects - don't return any subobjects in the search results if true. * Default false. * list<string> source_tags - source tags are arbitrary strings applied to data at the data * source (for example, the workspace service). The source_tags list may optionally be * populated with a set of tags that will determine what data is returned in a search. * By default, the list behaves as a whitelist and only data with at least one of the * tags will be returned. * source_tags_blacklist - if true, the source_tags list behaves as a blacklist and any * data with at least one of the tags will be excluded from the search results. If missing * or false, the default behavior is maintained. * </pre> * */ @JsonProperty("match_filter") private MatchFilter matchFilter; @JsonProperty("sorting_rules") private List<SortingRule> sortingRules; /** * <p>Original spec-file type: AccessFilter</p> * <pre> * Optional rules of access constraints. * - with_private - include data found in workspaces not marked * as public, default value is true for authenticated users. Value set to false for unauthenticated users. * - with_public - include data found in public workspaces, * default value is false for authenticated users. Value set to true for unauthenticated users. * - with_all_history - include all versions (last one and all * old versions) of objects matching constrains, default * value is false. * </pre> * */ @JsonProperty("access_filter") private AccessFilter accessFilter; /** * <p>Original spec-file type: Pagination</p> * <pre> * Pagination rules. Default values are: start = 0, count = 50. * </pre> * */ @JsonProperty("pagination") private Pagination pagination; /** * <p>Original spec-file type: PostProcessing</p> * <pre> * Rules for what to return about found objects. * skip_keys - do not include keyword values for object * ('key_props' field in ObjectData structure), * skip_data - do not include raw data for object ('data' and * 'parent_data' fields in ObjectData structure), * include_highlight - include highlights of fields that * matched query, * ids_only - shortcut to mark both skips as true and * include_highlight as false. * add_narrative_info - if true, narrative info gets added to the * search results. Default is false. * add_access_group_info - if true, access groups and objects info get added * to the search results. Default is false. * </pre> * */ @JsonProperty("post_processing") private PostProcessing postProcessing; private Map<java.lang.String, Object> additionalProperties = new HashMap<java.lang.String, Object>(); @JsonProperty("object_types") public List<String> getObjectTypes() { return objectTypes; } @JsonProperty("object_types") public void setObjectTypes(List<String> objectTypes) { this.objectTypes = objectTypes; } public SearchObjectsInput withObjectTypes(List<String> objectTypes) { this.objectTypes = objectTypes; return this; } /** * <p>Original spec-file type: MatchFilter</p> * <pre> * Optional rules of defining constrains for object properties * including values of keywords or metadata/system properties (like * object name, creation time range) or full-text search in all * properties. * boolean exclude_subobjects - don't return any subobjects in the search results if true. * Default false. * list<string> source_tags - source tags are arbitrary strings applied to data at the data * source (for example, the workspace service). The source_tags list may optionally be * populated with a set of tags that will determine what data is returned in a search. * By default, the list behaves as a whitelist and only data with at least one of the * tags will be returned. * source_tags_blacklist - if true, the source_tags list behaves as a blacklist and any * data with at least one of the tags will be excluded from the search results. If missing * or false, the default behavior is maintained. * </pre> * */ @JsonProperty("match_filter") public MatchFilter getMatchFilter() { return matchFilter; } /** * <p>Original spec-file type: MatchFilter</p> * <pre> * Optional rules of defining constrains for object properties * including values of keywords or metadata/system properties (like * object name, creation time range) or full-text search in all * properties. * boolean exclude_subobjects - don't return any subobjects in the search results if true. * Default false. * list<string> source_tags - source tags are arbitrary strings applied to data at the data * source (for example, the workspace service). The source_tags list may optionally be * populated with a set of tags that will determine what data is returned in a search. * By default, the list behaves as a whitelist and only data with at least one of the * tags will be returned. * source_tags_blacklist - if true, the source_tags list behaves as a blacklist and any * data with at least one of the tags will be excluded from the search results. If missing * or false, the default behavior is maintained. * </pre> * */ @JsonProperty("match_filter") public void setMatchFilter(MatchFilter matchFilter) { this.matchFilter = matchFilter; } public SearchObjectsInput withMatchFilter(MatchFilter matchFilter) { this.matchFilter = matchFilter; return this; } @JsonProperty("sorting_rules") public List<SortingRule> getSortingRules() { return sortingRules; } @JsonProperty("sorting_rules") public void setSortingRules(List<SortingRule> sortingRules) { this.sortingRules = sortingRules; } public SearchObjectsInput withSortingRules(List<SortingRule> sortingRules) { this.sortingRules = sortingRules; return this; } /** * <p>Original spec-file type: AccessFilter</p> * <pre> * Optional rules of access constraints. * - with_private - include data found in workspaces not marked * as public, default value is true for authenticated users. Value set to false for unauthenticated users. * - with_public - include data found in public workspaces, * default value is false for authenticated users. Value set to true for unauthenticated users. * - with_all_history - include all versions (last one and all * old versions) of objects matching constrains, default * value is false. * </pre> * */ @JsonProperty("access_filter") public AccessFilter getAccessFilter() { return accessFilter; } /** * <p>Original spec-file type: AccessFilter</p> * <pre> * Optional rules of access constraints. * - with_private - include data found in workspaces not marked * as public, default value is true for authenticated users. Value set to false for unauthenticated users. * - with_public - include data found in public workspaces, * default value is false for authenticated users. Value set to true for unauthenticated users. * - with_all_history - include all versions (last one and all * old versions) of objects matching constrains, default * value is false. * </pre> * */ @JsonProperty("access_filter") public void setAccessFilter(AccessFilter accessFilter) { this.accessFilter = accessFilter; } public SearchObjectsInput withAccessFilter(AccessFilter accessFilter) { this.accessFilter = accessFilter; return this; } /** * <p>Original spec-file type: Pagination</p> * <pre> * Pagination rules. Default values are: start = 0, count = 50. * </pre> * */ @JsonProperty("pagination") public Pagination getPagination() { return pagination; } /** * <p>Original spec-file type: Pagination</p> * <pre> * Pagination rules. Default values are: start = 0, count = 50. * </pre> * */ @JsonProperty("pagination") public void setPagination(Pagination pagination) { this.pagination = pagination; } public SearchObjectsInput withPagination(Pagination pagination) { this.pagination = pagination; return this; } /** * <p>Original spec-file type: PostProcessing</p> * <pre> * Rules for what to return about found objects. * skip_keys - do not include keyword values for object * ('key_props' field in ObjectData structure), * skip_data - do not include raw data for object ('data' and * 'parent_data' fields in ObjectData structure), * include_highlight - include highlights of fields that * matched query, * ids_only - shortcut to mark both skips as true and * include_highlight as false. * add_narrative_info - if true, narrative info gets added to the * search results. Default is false. * add_access_group_info - if true, access groups and objects info get added * to the search results. Default is false. * </pre> * */ @JsonProperty("post_processing") public PostProcessing getPostProcessing() { return postProcessing; } /** * <p>Original spec-file type: PostProcessing</p> * <pre> * Rules for what to return about found objects. * skip_keys - do not include keyword values for object * ('key_props' field in ObjectData structure), * skip_data - do not include raw data for object ('data' and * 'parent_data' fields in ObjectData structure), * include_highlight - include highlights of fields that * matched query, * ids_only - shortcut to mark both skips as true and * include_highlight as false. * add_narrative_info - if true, narrative info gets added to the * search results. Default is false. * add_access_group_info - if true, access groups and objects info get added * to the search results. Default is false. * </pre> * */ @JsonProperty("post_processing") public void setPostProcessing(PostProcessing postProcessing) { this.postProcessing = postProcessing; } public SearchObjectsInput withPostProcessing(PostProcessing postProcessing) { this.postProcessing = postProcessing; return this; } @JsonAnyGetter public Map<java.lang.String, Object> getAdditionalProperties() { return this.additionalProperties; } @JsonAnySetter public void setAdditionalProperties(java.lang.String name, Object value) { this.additionalProperties.put(name, value); } @Override public java.lang.String toString() { return ((((((((((((((("SearchObjectsInput"+" [objectTypes=")+ objectTypes)+", matchFilter=")+ matchFilter)+", sortingRules=")+ sortingRules)+", accessFilter=")+ accessFilter)+", pagination=")+ pagination)+", postProcessing=")+ postProcessing)+", additionalProperties=")+ additionalProperties)+"]"); } }
require.context('../', true, /\.(html|json|txt|dat)$/i) require.context('../images/', true, /\.(gif|jpg|png|svg|eot|ttf|woff|woff2)$/i) require.context('../stylesheets/', true, /\.(css|scss)$/i) // TODO import 'bootstrap' import React from 'react' import ReactDOM from 'react-dom' import TopNav from './components/TopNav' import 'bootstrap/dist/css/bootstrap.min.css'; import Intro from './components/Intro' import About from './components/About' import ProjectList from './components/ProjectList' import Resume from './components/Resume' import Contact from './components/Contact' ReactDOM.render( <> <TopNav/> <Intro/> <About/> <ProjectList/> <Resume/> <Contact/> </> , document.getElementById('main'))
<gh_stars>100-1000 import { commit } from '@collectable/core'; import { ListStructure, appendValues, createList } from '../internals'; export function of<T> (value: T): ListStructure<T> { var state = createList<T>(true); appendValues(state, [value]); return commit(state); }
#!/bin/bash ls ../source/. > file_list
import tensorflow as tf #define a layer def layer(inputs, in_size, out_size): weights = tf.Variable(tf.random_normal([in_size, out_size])) biases = tf.Variable(tf.zeros([1, out_size]) + 0.1) Wx_b = tf.matmul(inputs, weights) + biases outputs = tf.nn.relu(Wx_b) return outputs #define inputs inputs = tf.placeholder(tf.float32, [None, 5]) #define output pred = layer(inputs, 5, 1) #define loss function loss = tf.reduce_mean(tf.square(pred - y)) #define optimizer train_step = tf.train.AdamOptimizer(0.2).minimize(loss)
-- *************************************************************************** -- File: 9_31.sql -- -- Developed By TUSC -- -- Disclaimer: Neither Osborne/McGraw-Hill, TUSC, nor the author warrant -- that this source code is error-free. If any errors are -- found in this source code, please report them to TUSC at -- (630)960-2909 ext 1011 or <EMAIL>. -- *************************************************************************** SPOOL 9_31.lis DECLARE CURSOR cur_trace IS SELECT a.username, a.sid, a.serial#, b.physical_reads, b.block_gets, b.consistent_gets FROM v$session a, v$sess_io b WHERE a.sid = b.sid AND NVL(a.username,'XX') NOT IN ('SYS', 'SYSTEM', 'XX') ORDER BY b.physical_reads DESC; lv_count_num PLS_INTEGER := 0; BEGIN FOR cur_trace_rec IN cur_trace LOOP lv_count_num := lv_count_num + 1; IF lv_count_num = 4 THEN EXIT; END IF; DBMS_SYSTEM.SET_SQL_TRACE_IN_SESSION(cur_trace_rec.sid, cur_trace_rec.serial#, TRUE); DBMS_OUTPUT.PUT_LINE('Sessions Traced---Username: ' || RPAD(cur_trace_rec.username,20) || ' Sid: ' || RPAD(cur_trace_rec.sid,8) || ' Serial#: ' || cur_trace_rec.serial#); END LOOP; END; / SPOOL OFF
# Download the helper library from https://www.twilio.com/docs/ruby/install require 'rubygems' require 'twilio-ruby' # Your Account Sid and Auth Token from twilio.com/console # DANGER! This is insecure. See http://twil.io/secure account_sid = 'AC<KEY>' auth_token = '<PASSWORD>' @client = Twilio::REST::Client.new(account_sid, auth_token) sim = @client.supersim.sims('HSXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') .update( account_sid: 'ACbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb' ) puts sim.account_sid
#!/bin/bash # Generates a 10 character random password # Generate random alphanumeric characters of length 10 # -n: length of characters # -c: all characters including special characters password=$(< /dev/urandom tr -dc _A-Z-a-z-0-9 | head -c${1:-10}) echo "$password"
#!/usr/bin/env bash # This script is executed inside the builder image set -e PASS_ARGS="$@" source ./ci/matrix.sh if [ "$RUN_INTEGRATIONTESTS" != "true" ]; then echo "Skipping integration tests" exit 0 fi export LD_LIBRARY_PATH=$BUILD_DIR/depends/$HOST/lib cd build-ci/zenxcore-$BUILD_TARGET if [ "$SOCKETEVENTS" = "" ]; then # Let's switch socketevents mode to some random mode R=$(($RANDOM%3)) if [ "$R" == "0" ]; then SOCKETEVENTS="select" elif [ "$R" == "1" ]; then SOCKETEVENTS="poll" else SOCKETEVENTS="epoll" fi fi echo "Using socketevents mode: $SOCKETEVENTS" EXTRA_ARGS="--zenxd-arg=-socketevents=$SOCKETEVENTS" set +e ./test/functional/test_runner.py --ci --coverage --failfast --nocleanup --tmpdir=$(pwd)/testdatadirs $PASS_ARGS $EXTRA_ARGS RESULT=$? set -e echo "Collecting logs..." BASEDIR=$(ls testdatadirs) if [ "$BASEDIR" != "" ]; then mkdir testlogs for d in $(ls testdatadirs/$BASEDIR | grep -v '^cache$'); do mkdir testlogs/$d ./test/functional/combine_logs.py -c ./testdatadirs/$BASEDIR/$d > ./testlogs/$d/combined.log ./test/functional/combine_logs.py --html ./testdatadirs/$BASEDIR/$d > ./testlogs/$d/combined.html cd testdatadirs/$BASEDIR/$d LOGFILES="$(find . -name 'debug.log' -or -name "test_framework.log")" cd ../../.. for f in $LOGFILES; do d2="testlogs/$d/$(dirname $f)" mkdir -p $d2 cp testdatadirs/$BASEDIR/$d/$f $d2/ done done fi mv testlogs ../../ exit $RESULT
#!/bin/sh -e set -x version=${TRAVIS_TAG:-} pyver=${TRAVIS_PYTHON_VERSION:-${PYENV_VERSION}} if [ "$BUILD_DIST" = 'true' ]; then appid=$(grep __app_id__ apluslms_roman/__init__.py|head -n1|cut -d"'" -f2) appid="$appid.roman_tki" if [ "$TRAVIS_OS_NAME" = 'osx' ]; then # pyenv PYENV_VERSION=${PYENV_VERSION:-$TRAVIS_PYTHON_VERSION} [ "$PYENV_VERSION" ] || { echo "Missing env PYENV_VERSION"; exit 1; } PYENV_ROOT="$HOME/.pyenv-roman" PATH="$PYENV_ROOT/bin:$PATH" hash -r eval "$(pyenv init -)" hash -r PYENV_VERSION=$(pyenv install --list|tr -d '[ \t]'|grep "^$PYENV_VERSION"|tail -n1) pip install . simple_gui/ pip install -r requirements_build_osx.txt # wheel distribution python setup.py bdist_wheel # pyinstaller app image in dmg and zip (cd simple_gui && ./create_icns_on_osx.sh) pyinstaller --noconsole --onefile --name Roman --icon simple_gui/roman.icns --osx-bundle-identifier="$appid" simple_gui/roman_tki.py ./packaging/osx/create_dmg.sh dist/roman-gui-$version-mac-unsigned.dmg (cd dist && zip -r roman-gui-$version-mac-unsigned.zip Roman.app) else pip install . simple_gui/ pip install -r requirements_build_linux.txt # source distribution python setup.py sdist # pyinstaller binary in zip pyinstaller --noconsole --onefile --name roman --add-data simple_gui/roman.png:. simple_gui/roman_tki.py (cd dist && zip -r roman-gui-$version-linux.zip roman) # pyinstaller dir in appimage release="https://github.com/AppImage/AppImageKit/releases/download/10" dir=dist/Roman.AppDir mkdir -p $dir pyinstaller --noconsole --name Roman --add-data simple_gui/roman.png:. simple_gui/roman_tki.py mv dist/Roman dist/Roman.AppDir/usr/ ln -sT usr/roman.png $dir/roman.png mkdir -p $dir/usr/share/applications sed 's,^Exec=.*$,Exec=./Roman,' simple_gui/roman_tki.desktop > $dir/usr/share/applications/$appid.desktop ln -sT usr/share/applications/$appid.desktop $dir/roman.desktop mkdir -p $dir/usr/share/metainfo # FIXME: following should be in path $dir/usr/share/metainfo/$appid.appdata.xml, but that is not recognized by appimagetool sed -e "s,__app_id__,$appid,g" simple_gui/roman_tki.appdata.xml > $dir/usr/share/metainfo/roman.appdata.xml (cd $dir && wget "$release/AppRun-x86_64" -O AppRun && chmod +x AppRun) (cd dist && \ wget "$release/appimagetool-x86_64.AppImage" && chmod +x appimagetool-x86_64.AppImage && \ ./appimagetool-x86_64.AppImage --appimage-extract && \ ./squashfs-root/AppRun --comp xz Roman.AppDir && \ mv Roman-x86_64.AppImage Roman-gui-$version-linux.AppImage && \ rm -rf appimagetool-x86_64.AppImage squashfs-root Roman.AppDir) fi fi
#!/usr/bin/env bash # Terminate already running bar instances killall -q polybar # If all your bars have ipc enabled, you can also use # polybar-msg cmd quit # Launch bar(s) #echo "---" | tee -a /tmp/polybar1.log /tmp/polybar2.log #polybar example 2>&1 | tee -a /tmp/polybar1.log & disown if type "xrandr"; then for m in $(xrandr --query | grep " connected" | cut -d" " -f1); do MONITOR=$m polybar --reload いちごミルク & done else polybar --reload いちごミルク & fi echo "Bars launched..." #example bar using these fonts #yay -S xorg-fonts-misc ttf-unifont siji-git #https://github.com/polybar/polybar/wiki/Fonts #https://github.com/stark/siji/issues/28
#!/bin/bash source script/common/version.sh header "$0" # # download the latest raspian image # verify the checksum of the image # image_chk="$source_image_hash_expected $package" if [ ! -f "$package" ]; then msg "downloading $package" curl $source_image_url/$source_image_archive -L -o $package -silent -output $package.log msg "downloading $package_checksum" curl $source_image_url/$source_image_hash -L -o $package_checksum -silent -output $package_checksum.log else msg "found existing $package" fi msg "verifying $package.." sha256sum $package > $package_checksum checksum=$(cat $package_checksum) if [ "$image_chk" = "$checksum" ]; then okmsg "OK" else errr "FAILED" msg "calculated: $checksum" msg "cached: $image_chk" fi exit 0
package io.lindstrom.mpd.data; import com.fasterxml.jackson.annotation.JsonPropertyOrder; import com.fasterxml.jackson.dataformat.xml.annotation.JacksonXmlProperty; import io.lindstrom.mpd.data.descriptor.Descriptor; import io.lindstrom.mpd.support.Utils; import java.util.List; import java.util.Objects; @JsonPropertyOrder({ "framePackings", "audioChannelConfigurations", "contentProtections", "essentialProperties", "supplementalProperties", "inbandEventStreams" }) public abstract class RepresentationBase { @JacksonXmlProperty(localName = "FramePacking", namespace = MPD.NAMESPACE) private final List<Descriptor> framePackings; @JacksonXmlProperty(localName = "AudioChannelConfiguration", namespace = MPD.NAMESPACE) private final List<Descriptor> audioChannelConfigurations; @JacksonXmlProperty(localName = "ContentProtection", namespace = MPD.NAMESPACE) private final List<Descriptor> contentProtections; @JacksonXmlProperty(localName = "EssentialProperty", namespace = MPD.NAMESPACE) private final List<Descriptor> essentialProperties; @JacksonXmlProperty(localName = "SupplementalProperty", namespace = MPD.NAMESPACE) private final List<Descriptor> supplementalProperties; @JacksonXmlProperty(localName = "InbandEventStream", namespace = MPD.NAMESPACE) private final List<EventStream> inbandEventStreams; @JacksonXmlProperty(isAttribute = true) private final String profiles; @JacksonXmlProperty(isAttribute = true) private final Long width; @JacksonXmlProperty(isAttribute = true) private final Long height; @JacksonXmlProperty(isAttribute = true) private final Ratio sar; @JacksonXmlProperty(isAttribute = true) private final FrameRate frameRate; @JacksonXmlProperty(isAttribute = true) private final String audioSamplingRate; @JacksonXmlProperty(isAttribute = true) private final String mimeType; @JacksonXmlProperty(isAttribute = true) private final String segmentProfiles; @JacksonXmlProperty(isAttribute = true) private final String codecs; @JacksonXmlProperty(isAttribute = true) private final Double maximumSAPPeriod; @JacksonXmlProperty(isAttribute = true) private final Long startWithSAP; @JacksonXmlProperty(isAttribute = true) private final Double maxPlayoutRate; @JacksonXmlProperty(isAttribute = true) private final Boolean codingDependency; @JacksonXmlProperty(isAttribute = true) private final VideoScanType scanType; RepresentationBase(List<Descriptor> framePackings, List<Descriptor> audioChannelConfigurations, List<Descriptor> contentProtections, List<Descriptor> essentialProperties, List<Descriptor> supplementalProperties, List<EventStream> inbandEventStreams, String profiles, Long width, Long height, Ratio sar, FrameRate frameRate, String audioSamplingRate, String mimeType, String segmentProfiles, String codecs, Double maximumSAPPeriod, Long startWithSAP, Double maxPlayoutRate, Boolean codingDependency, VideoScanType scanType) { this.framePackings = framePackings; this.audioChannelConfigurations = audioChannelConfigurations; this.contentProtections = contentProtections; this.essentialProperties = essentialProperties; this.supplementalProperties = supplementalProperties; this.inbandEventStreams = inbandEventStreams; this.profiles = profiles; this.width = width; this.height = height; this.sar = sar; this.frameRate = frameRate; this.audioSamplingRate = audioSamplingRate; this.mimeType = mimeType; this.segmentProfiles = segmentProfiles; this.codecs = codecs; this.maximumSAPPeriod = maximumSAPPeriod; this.startWithSAP = startWithSAP; this.maxPlayoutRate = maxPlayoutRate; this.codingDependency = codingDependency; this.scanType = scanType; } RepresentationBase() { this.framePackings = null; this.audioChannelConfigurations = null; this.contentProtections = null; this.essentialProperties = null; this.supplementalProperties = null; this.inbandEventStreams = null; this.profiles = null; this.width = null; this.height = null; this.sar = null; this.frameRate = null; this.audioSamplingRate = null; this.mimeType = null; this.segmentProfiles = null; this.codecs = null; this.maximumSAPPeriod = null; this.startWithSAP = null; this.maxPlayoutRate = null; this.codingDependency = null; this.scanType = null; } public List<Descriptor> getFramePackings() { return Utils.unmodifiableList(framePackings); } public List<Descriptor> getAudioChannelConfigurations() { return Utils.unmodifiableList(audioChannelConfigurations); } public List<Descriptor> getContentProtections() { return Utils.unmodifiableList(contentProtections); } public List<Descriptor> getEssentialProperties() { return Utils.unmodifiableList(essentialProperties); } public List<Descriptor> getSupplementalProperties() { return Utils.unmodifiableList(supplementalProperties); } public List<EventStream> getInbandEventStreams() { return Utils.unmodifiableList(inbandEventStreams); } public String getProfiles() { return profiles; } public Long getWidth() { return width; } public Long getHeight() { return height; } public Ratio getSar() { return sar; } public FrameRate getFrameRate() { return frameRate; } public String getAudioSamplingRate() { return audioSamplingRate; } public String getMimeType() { return mimeType; } public String getSegmentProfiles() { return segmentProfiles; } public String getCodecs() { return codecs; } public Double getMaximumSAPPeriod() { return maximumSAPPeriod; } public Long getStartWithSAP() { return startWithSAP; } public Double getMaxPlayoutRate() { return maxPlayoutRate; } public Boolean getCodingDependency() { return codingDependency; } public VideoScanType getScanType() { return scanType; } @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof RepresentationBase)) return false; RepresentationBase that = (RepresentationBase) o; return Objects.equals(framePackings, that.framePackings) && Objects.equals(audioChannelConfigurations, that.audioChannelConfigurations) && Objects.equals(contentProtections, that.contentProtections) && Objects.equals(essentialProperties, that.essentialProperties) && Objects.equals(supplementalProperties, that.supplementalProperties) && Objects.equals(inbandEventStreams, that.inbandEventStreams) && Objects.equals(profiles, that.profiles) && Objects.equals(width, that.width) && Objects.equals(height, that.height) && Objects.equals(sar, that.sar) && Objects.equals(frameRate, that.frameRate) && Objects.equals(audioSamplingRate, that.audioSamplingRate) && Objects.equals(mimeType, that.mimeType) && Objects.equals(segmentProfiles, that.segmentProfiles) && Objects.equals(codecs, that.codecs) && Objects.equals(maximumSAPPeriod, that.maximumSAPPeriod) && Objects.equals(startWithSAP, that.startWithSAP) && Objects.equals(maxPlayoutRate, that.maxPlayoutRate) && Objects.equals(codingDependency, that.codingDependency) && scanType == that.scanType; } @Override public int hashCode() { return Objects.hash(framePackings, audioChannelConfigurations, contentProtections, essentialProperties, supplementalProperties, inbandEventStreams, profiles, width, height, sar, frameRate, audioSamplingRate, mimeType, segmentProfiles, codecs, maximumSAPPeriod, startWithSAP, maxPlayoutRate, codingDependency, scanType); } @Override public String toString() { return "RepresentationBase{" + "framePackings=" + framePackings + ", audioChannelConfigurations=" + audioChannelConfigurations + ", contentProtections=" + contentProtections + ", essentialProperties=" + essentialProperties + ", supplementalProperties=" + supplementalProperties + ", inbandEventStreams=" + inbandEventStreams + ", profiles='" + profiles + '\'' + ", width=" + width + ", height=" + height + ", sar='" + sar + '\'' + ", frameRate='" + frameRate + '\'' + ", audioSamplingRate='" + audioSamplingRate + '\'' + ", mimeType='" + mimeType + '\'' + ", segmentProfiles='" + segmentProfiles + '\'' + ", codecs='" + codecs + '\'' + ", maximumSAPPeriod=" + maximumSAPPeriod + ", startWithSAP=" + startWithSAP + ", maxPlayoutRate=" + maxPlayoutRate + ", codingDependency=" + codingDependency + ", scanType=" + scanType + '}'; } <T extends AbstractBuilder<T>> T buildUpon(T builder) { return builder .withFramePackings(framePackings) .withAudioChannelConfigurations(audioChannelConfigurations) .withContentProtections(contentProtections) .withEssentialProperties(essentialProperties) .withSupplementalProperties(supplementalProperties) .withInbandEventStreams(inbandEventStreams) .withProfiles(profiles) .withWidth(width) .withHeight(height) .withSar(sar) .withFrameRate(frameRate) .withAudioSamplingRate(audioSamplingRate) .withMimeType(mimeType) .withSegmentProfiles(segmentProfiles) .withCodecs(codecs) .withMaximumSAPPeriod(maximumSAPPeriod) .withStartWithSAP(startWithSAP) .withMaxPlayoutRate(maxPlayoutRate) .withCodingDependency(codingDependency) .withScanType(scanType); } static abstract class AbstractBuilder<T> { List<Descriptor> framePackings; List<Descriptor> audioChannelConfigurations; List<Descriptor> contentProtections; List<Descriptor> essentialProperties; List<Descriptor> supplementalProperties; List<EventStream> inbandEventStreams; String profiles; Long width; Long height; Ratio sar; FrameRate frameRate; String audioSamplingRate; String mimeType; String segmentProfiles; String codecs; Double maximumSAPPeriod; Long startWithSAP; Double maxPlayoutRate; Boolean codingDependency; VideoScanType scanType; abstract T getThis(); public T withFramePackings(List<Descriptor> framePackings) { this.framePackings = framePackings; return getThis(); } public T withAudioChannelConfigurations(List<Descriptor> audioChannelConfigurations) { this.audioChannelConfigurations = audioChannelConfigurations; return getThis(); } public T withAudioChannelConfigurations(Descriptor audioChannelConfiguration, Descriptor ...moreAudioChannelConfigurations) { this.audioChannelConfigurations = Utils.varargsToList(audioChannelConfiguration, moreAudioChannelConfigurations); return getThis(); } public T withContentProtections(List<Descriptor> contentProtections) { this.contentProtections = contentProtections; return getThis(); } public T withEssentialProperties(List<Descriptor> essentialProperties) { this.essentialProperties = essentialProperties; return getThis(); } public T withSupplementalProperties(List<Descriptor> supplementalProperties) { this.supplementalProperties = supplementalProperties; return getThis(); } public T withInbandEventStreams(List<EventStream> inbandEventStreams) { this.inbandEventStreams = inbandEventStreams; return getThis(); } public T withProfiles(String profiles) { this.profiles = profiles; return getThis(); } public T withWidth(Long width) { this.width = width; return getThis(); } public T withWidth(int width) { this.width = (long) width; return getThis(); } public T withHeight(Long height) { this.height = height; return getThis(); } public T withHeight(int height) { this.height = (long) height; return getThis(); } public T withSar(Ratio sar) { this.sar = sar; return getThis(); } public T withFrameRate(FrameRate frameRate) { this.frameRate = frameRate; return getThis(); } public T withAudioSamplingRate(String audioSamplingRate) { this.audioSamplingRate = audioSamplingRate; return getThis(); } public T withMimeType(String mimeType) { this.mimeType = mimeType; return getThis(); } public T withSegmentProfiles(String segmentProfiles) { this.segmentProfiles = segmentProfiles; return getThis(); } public T withCodecs(String codecs) { this.codecs = codecs; return getThis(); } public T withMaximumSAPPeriod(Double maximumSAPPeriod) { this.maximumSAPPeriod = maximumSAPPeriod; return getThis(); } public T withStartWithSAP(Long startWithSAP) { this.startWithSAP = startWithSAP; return getThis(); } public T withMaxPlayoutRate(Double maxPlayoutRate) { this.maxPlayoutRate = maxPlayoutRate; return getThis(); } public T withCodingDependency(Boolean codingDependency) { this.codingDependency = codingDependency; return getThis(); } public T withScanType(VideoScanType scanType) { this.scanType = scanType; return getThis(); } } }
#!/bin/bash set -e if [[ -z "$TRUSTY_URL" ]]; then export TRUSTY_URL=http://10.77.88.101:7880 fi echo "TRUSTY_URL: $TRUSTY_URL" cmd="$*" echo "*** trusty: waiting for server..." until curl -k $TRUSTY_URL/v1/status; do >&2 echo "trusty is unavailable $TRUSTY_URL - sleeping" sleep 6 done >&2 echo "trusty is up - executing command:" >&2 echo $cmd exec $cmd
export LIB_DIR=src export TEST_DIR=test export IMPLEMENTATIONS="(sagittarius@0.9.2)"
<filename>mobile/tests/utils/test_cache_helper.py from unittest.mock import MagicMock from django.core.cache import cache from django.test import override_settings from common.tests.core import SimpleTestCase from mobile.utils.cache_helper import CacheHelper, get_or_set @override_settings(CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'LOCATION': 'unique-snowflake', } }) class CacheHelperTest(SimpleTestCase): def tearDown(self): cache.clear() def test_get_or_set_when_already_have_value(self): cache.set('cache_key', 'other_value') CacheHelper.get_or_set('cache_key', 'value') cache.get('cache_key').should.be.equal('other_value') def test_get_or_set_when_not_set_yet(self): CacheHelper.get_or_set('cache_key', 'value') cache.get('cache_key').should.be.equal('value') @override_settings(CACHES={ 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'LOCATION': 'unique-snowflake', } }) class GetOrSetDecoratorTest(SimpleTestCase): def tearDown(self): cache.clear() def test_get_or_set_when_already_have_value(self): cache.set('cache_key', 'other_value') method = MagicMock(return_value='value') decorated_method = get_or_set('cache_key')(method) decorated_method().should.equal('other_value') def test_get_or_set_when_not_set_yet(self): method = MagicMock(return_value='value') decorated_method = get_or_set('cache_key')(method) decorated_method().should.equal('value')
package com.playMidi.player; import android.util.Log; import com.playMidi.player.Midi.MidiEvent; import java.util.ArrayList; /** * Created by ra on 12/5/2016. */ public class SoundEventRecycler { private ArrayList<SoundEvent> allocated; private ArrayList<SoundEvent> cached; public SoundEventRecycler(){ allocated = new ArrayList<SoundEvent>(); cached = new ArrayList<SoundEvent>(); } public SoundEvent get(int index){ return allocated.get(index); } public void remove(int index){ SoundEvent se = allocated.remove(index); cached.add(se); } public void add(int noteNumber, int noteLength, int sampleRate, int track){ for(int i = 0; i<allocated.size(); i++){ if(allocated.get(i).getNoteNumber() == noteNumber){ return; } } //og.("SoundeventRecycler.add", ""+noteLength); SoundEvent se; if(cached.size()>0){ se = cached.remove(0); se.reset(noteNumber, noteLength, sampleRate, track); } else{ se = new SoundEvent(noteNumber, noteLength, sampleRate, track); } allocated.add(se); } public void add(MidiEvent me, int sampleRate)throws Exception{ SoundEvent se; if(cached.size()>0){ se = cached.remove(0); se.reset(me, sampleRate); } else{ se = SoundEvent.new_SoundEvent(me, sampleRate); } allocated.add(se); } public int size(){ return allocated.size(); } public void clear(){ allocated.clear(); cached.clear(); } public String toString(){ return allocated.toString(); } }
<reponame>flerro/ddb-mapping-plugin package com.github.flerro.ddbmapping; import com.intellij.codeInsight.generation.PsiFieldMember; import com.intellij.ide.util.MemberChooser; import com.intellij.ide.util.PropertiesComponent; import com.intellij.openapi.project.Project; import com.intellij.ui.NonFocusableCheckBox; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.util.*; import java.util.function.Predicate; import java.util.stream.Collectors; public final class FieldSelectionDialog { @Nullable public static List<PsiFieldMember> show(final List<PsiFieldMember> members, final Project project) { if (members == null || members.isEmpty()) return new ArrayList<>(); // Generate options final PropertiesComponent props = PropertiesComponent.getInstance(); final List<JCheckBox> optionsCheckBoxes = new ArrayList<>(); for (CodeGenerationOption option : CodeGenerationOption.values()) { optionsCheckBoxes.add(buildOptionCheckBox(props, option)); } // Create Dialog final PsiFieldMember[] memberArray = members.toArray(new PsiFieldMember[0]); boolean allowEmptySelection = false; boolean allowMultiSelection = true; final MemberChooser<PsiFieldMember> chooser = new MemberChooser<>(memberArray, allowEmptySelection, allowMultiSelection, project, null, optionsCheckBoxes.toArray(new JCheckBox[0])); chooser.setTitle("Select Fields:"); chooser.selectElements(memberArray); return chooser.showAndGet() ? chooser.getSelectedElements() : null; } private static JCheckBox buildOptionCheckBox(final PropertiesComponent propertiesComponent, final CodeGenerationOption option) { final JCheckBox optionCheckBox = new NonFocusableCheckBox(option.getLabel()); optionCheckBox.setToolTipText(option.getToolip()); String currentOption = option.getProperty(); optionCheckBox.setSelected(propertiesComponent.isTrueValue(currentOption)); optionCheckBox.addItemListener(event -> propertiesComponent.setValue(currentOption, Boolean.toString(optionCheckBox.isSelected()))); return optionCheckBox; } public static Set<CodeGenerationOption> selectedOptions() { final PropertiesComponent propertiesComponent = PropertiesComponent.getInstance(); final Predicate<CodeGenerationOption> isSelected = o -> propertiesComponent.getBoolean(o.getProperty(), false); return Arrays.stream(CodeGenerationOption.values()).filter(isSelected).collect(Collectors.toSet()); } }
/** * <a href="http://www.openolat.org"> * OpenOLAT - Online Learning and Training</a><br> * <p> * Licensed under the Apache License, Version 2.0 (the "License"); <br> * you may not use this file except in compliance with the License.<br> * You may obtain a copy of the License at the * <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a> * <p> * Unless required by applicable law or agreed to in writing,<br> * software distributed under the License is distributed on an "AS IS" BASIS, <br> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br> * See the License for the specific language governing permissions and <br> * limitations under the License. * <p> * Initial code contributed and copyrighted by<br> * frentix GmbH, http://www.frentix.com * <p> */ package org.olat.restapi; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.util.List; import java.util.Locale; import java.util.Set; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.UriBuilder; import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; import org.apache.http.client.methods.HttpDelete; import org.apache.http.client.methods.HttpGet; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; import org.apache.http.util.EntityUtils; import org.apache.logging.log4j.Logger; import org.junit.Assert; import org.junit.Test; import org.olat.admin.securitygroup.gui.IdentitiesAddEvent; import org.olat.basesecurity.BaseSecurity; import org.olat.basesecurity.GroupRoles; import org.olat.basesecurity.OrganisationService; import org.olat.core.commons.persistence.DB; import org.olat.core.id.Identity; import org.olat.core.id.Organisation; import org.olat.core.id.Roles; import org.olat.core.logging.Tracing; import org.olat.core.util.mail.MailPackage; import org.olat.fileresource.types.ImsQTI21Resource; import org.olat.modules.taxonomy.Taxonomy; import org.olat.modules.taxonomy.TaxonomyLevel; import org.olat.modules.taxonomy.manager.TaxonomyDAO; import org.olat.modules.taxonomy.manager.TaxonomyLevelDAO; import org.olat.modules.taxonomy.restapi.TaxonomyLevelVO; import org.olat.repository.RepositoryEntry; import org.olat.repository.RepositoryEntryEducationalType; import org.olat.repository.RepositoryEntryRelationType; import org.olat.repository.RepositoryEntryStatusEnum; import org.olat.repository.RepositoryEntryToTaxonomyLevel; import org.olat.repository.RepositoryManager; import org.olat.repository.RepositoryService; import org.olat.repository.handlers.RepositoryHandler; import org.olat.repository.handlers.RepositoryHandlerFactory; import org.olat.repository.manager.RepositoryEntryToTaxonomyLevelDAO; import org.olat.restapi.support.vo.RepositoryEntryAccessVO; import org.olat.restapi.support.vo.RepositoryEntryEducationalTypeVO; import org.olat.restapi.support.vo.RepositoryEntryMetadataVO; import org.olat.test.JunitTestHelper; import org.olat.test.OlatRestTestCase; import org.olat.user.restapi.UserVO; import org.olat.user.restapi.UserVOFactory; import org.springframework.beans.factory.annotation.Autowired; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; /** * * Initial date: 2 mai 2018<br> * @author srosse, <EMAIL>, http://www.frentix.com * */ public class RepositoryEntryWebServiceTest extends OlatRestTestCase { private static final Logger log = Tracing.createLoggerFor(RepositoryEntryWebServiceTest.class); @Autowired private DB dbInstance; @Autowired private BaseSecurity securityManager; @Autowired private TaxonomyDAO taxonomyDao; @Autowired private TaxonomyLevelDAO taxonomyLevelDao; @Autowired private RepositoryManager repositoryManager; @Autowired private RepositoryService repositoryService; @Autowired private OrganisationService organisationService; @Autowired private RepositoryEntryToTaxonomyLevelDAO repositoryEntryToTaxonomyLevelDao; @Test public void exportCourse() throws IOException, URISyntaxException { RestConnection conn = new RestConnection(); Assert.assertTrue(conn.login("administrator", "openolat")); Identity author = JunitTestHelper.createAndPersistIdentityAsRndUser("course-owner"); RepositoryEntry course = JunitTestHelper.deployBasicCourse(author); dbInstance.closeSession(); URI request = UriBuilder.fromUri(getContextURI()).path("repo").path("entries") .path(course.getKey().toString()).path("file").build(); HttpGet method = conn.createGet(request, "application/zip", true); HttpResponse response = conn.execute(method); Assert.assertEquals(200, response.getStatusLine().getStatusCode()); byte[] exportedFile = EntityUtils.toByteArray(response.getEntity()); Assert.assertTrue(exportedFile.length > 1000); } @Test public void exportQTI21Test() throws IOException, URISyntaxException { //deploy QTI 2.1 test Identity author = JunitTestHelper.createAndPersistIdentityAsRndUser("test-owner"); URL testUrl = JunitTestHelper.class.getResource("file_resources/qti21/simple_QTI_21_hotspot.zip"); File testFile = new File(testUrl.toURI()); RepositoryHandler courseHandler = RepositoryHandlerFactory.getInstance() .getRepositoryHandler(ImsQTI21Resource.TYPE_NAME); Organisation defOrganisation = organisationService.getDefaultOrganisation(); RepositoryEntry testEntry = courseHandler.importResource(author, null, "Test QTI 2.1", "", true, defOrganisation, Locale.ENGLISH, testFile, null); dbInstance.closeSession(); RestConnection conn = new RestConnection(); Assert.assertTrue(conn.login("administrator", "openolat")); URI request = UriBuilder.fromUri(getContextURI()).path("repo").path("entries") .path(testEntry.getKey().toString()).path("file").build(); HttpGet method = conn.createGet(request, "application/zip", true); HttpResponse response = conn.execute(method); Assert.assertEquals(200, response.getStatusLine().getStatusCode()); byte[] exportedFile = EntityUtils.toByteArray(response.getEntity()); Assert.assertTrue(exportedFile.length > 1000); } @Test public void getOwners() throws IOException, URISyntaxException { Identity owner1 = JunitTestHelper.createAndPersistIdentityAsRndAuthor("author-1"); Identity owner2 = JunitTestHelper.createAndPersistIdentityAsRndAuthor("author-2"); RepositoryEntry re = JunitTestHelper.createAndPersistRepositoryEntry(); repositoryManager.addOwners(owner1, new IdentitiesAddEvent(owner1), re, null); repositoryManager.addOwners(owner1, new IdentitiesAddEvent(owner2), re, null); dbInstance.commitAndCloseSession(); //get the owners RestConnection conn = new RestConnection(); assertTrue(conn.login("administrator", "openolat")); URI request = UriBuilder.fromUri(getContextURI()).path("repo/entries").path(re.getKey().toString()).path("owners").build(); HttpGet method = conn.createGet(request, MediaType.APPLICATION_JSON, true); HttpResponse response = conn.execute(method); assertEquals(200, response.getStatusLine().getStatusCode()); List<UserVO> users = parseUserArray(response.getEntity()); Assert.assertNotNull(users); Assert.assertEquals(2, users.size());//our 2 int found = 0; for(UserVO user:users) { Long userKey = user.getKey(); Assert.assertNotNull(userKey); if(owner1.getKey().equals(userKey) || owner2.getKey().equals(userKey)) { found++; } } Assert.assertEquals(2, found); conn.shutdown(); } @Test public void addOwner() throws IOException, URISyntaxException { Identity owner = JunitTestHelper.createAndPersistIdentityAsRndAuthor("author-3"); RepositoryEntry re = JunitTestHelper.createAndPersistRepositoryEntry(); dbInstance.commitAndCloseSession(); //add an owner RestConnection conn = new RestConnection(); assertTrue(conn.login("administrator", "openolat")); URI request = UriBuilder.fromUri(getContextURI()) .path("repo/entries").path(re.getKey().toString()).path("owners").path(owner.getKey().toString()) .build(); HttpPut method = conn.createPut(request, MediaType.APPLICATION_JSON, true); HttpResponse response = conn.execute(method); assertEquals(200, response.getStatusLine().getStatusCode()); EntityUtils.consume(response.getEntity()); conn.shutdown(); //check List<Identity> owners = repositoryService.getMembers(re, RepositoryEntryRelationType.defaultGroup, GroupRoles.owner.name()); Assert.assertNotNull(owners); Assert.assertEquals(1, owners.size()); Assert.assertTrue(owners.contains(owner)); } @Test public void addOwners() throws IOException, URISyntaxException { Identity owner1 = JunitTestHelper.createAndPersistIdentityAsRndUser("author-3b-"); Identity owner2 = JunitTestHelper.createAndPersistIdentityAsRndUser("author-3c-"); RepositoryEntry re = JunitTestHelper.createAndPersistRepositoryEntry(); dbInstance.commitAndCloseSession(); //add an owner UserVO[] newOwners = new UserVO[2]; newOwners[0] = UserVOFactory.get(owner1); newOwners[1] = UserVOFactory.get(owner2); RestConnection conn = new RestConnection(); assertTrue(conn.login("administrator", "openolat")); URI request = UriBuilder.fromUri(getContextURI()) .path("repo").path("entries").path(re.getKey().toString()).path("owners").build(); HttpPut method = conn.createPut(request, MediaType.APPLICATION_JSON, true); conn.addJsonEntity(method, newOwners); HttpResponse response = conn.execute(method); assertEquals(200, response.getStatusLine().getStatusCode()); EntityUtils.consume(response.getEntity()); conn.shutdown(); //check List<Identity> owners = repositoryService.getMembers(re, RepositoryEntryRelationType.defaultGroup, GroupRoles.owner.name()); Assert.assertNotNull(owners); Assert.assertEquals(2, owners.size()); Assert.assertTrue(owners.contains(owner1)); Assert.assertTrue(owners.contains(owner2)); } @Test public void removeOwner() throws IOException, URISyntaxException { Identity owner = JunitTestHelper.createAndPersistIdentityAsRndAuthor("author-4-"); RepositoryEntry re = JunitTestHelper.createAndPersistRepositoryEntry(); repositoryManager.addOwners(owner, new IdentitiesAddEvent(owner), re, new MailPackage(false)); dbInstance.commitAndCloseSession(); //remove the owner RestConnection conn = new RestConnection(); assertTrue(conn.login("administrator", "openolat")); URI request = UriBuilder.fromUri(getContextURI()) .path("repo/entries").path(re.getKey().toString()).path("owners").path(owner.getKey().toString()).build(); HttpDelete method = conn.createDelete(request, MediaType.APPLICATION_JSON); HttpResponse response = conn.execute(method); assertEquals(200, response.getStatusLine().getStatusCode()); EntityUtils.consume(response.getEntity()); conn.shutdown(); //check List<Identity> owners = repositoryService.getMembers(re, RepositoryEntryRelationType.defaultGroup, GroupRoles.owner.name()); Assert.assertNotNull(owners); Assert.assertEquals(0, owners.size()); Assert.assertFalse(owners.contains(owner)); } @Test public void getCoaches() throws IOException, URISyntaxException { Identity coach1 = JunitTestHelper.createAndPersistIdentityAsRndAuthor("coach-1"); Identity coach2 = JunitTestHelper.createAndPersistIdentityAsRndAuthor("coach-2"); RepositoryEntry re = JunitTestHelper.createAndPersistRepositoryEntry(); repositoryManager.addTutors(coach1, Roles.administratorRoles(), new IdentitiesAddEvent(coach1), re, new MailPackage(false)); repositoryManager.addTutors(coach1, Roles.administratorRoles(), new IdentitiesAddEvent(coach2), re, new MailPackage(false)); dbInstance.commitAndCloseSession(); //get the coaches RestConnection conn = new RestConnection(); assertTrue(conn.login("administrator", "openolat")); URI request = UriBuilder.fromUri(getContextURI()).path("repo/entries").path(re.getKey().toString()).path("coaches").build(); HttpGet method = conn.createGet(request, MediaType.APPLICATION_JSON, true); HttpResponse response = conn.execute(method); assertEquals(200, response.getStatusLine().getStatusCode()); List<UserVO> users = parseUserArray(response.getEntity()); Assert.assertNotNull(users); Assert.assertEquals(2, users.size());//our 2 int found = 0; for(UserVO user:users) { Long identityKey = user.getKey(); Assert.assertNotNull(identityKey); if(coach1.getKey().equals(identityKey) || coach2.getKey().equals(identityKey)) { found++; } } Assert.assertEquals(2, found); conn.shutdown(); } @Test public void addCoach() throws IOException, URISyntaxException { Identity coach = JunitTestHelper.createAndPersistIdentityAsRndAuthor("coach-3"); RepositoryEntry re = JunitTestHelper.createAndPersistRepositoryEntry(); dbInstance.commitAndCloseSession(); //add an owner RestConnection conn = new RestConnection(); assertTrue(conn.login("administrator", "openolat")); URI request = UriBuilder.fromUri(getContextURI()) .path("repo/entries").path(re.getKey().toString()).path("coaches").path(coach.getKey().toString()) .build(); HttpPut method = conn.createPut(request, MediaType.APPLICATION_JSON, true); HttpResponse response = conn.execute(method); assertEquals(200, response.getStatusLine().getStatusCode()); EntityUtils.consume(response.getEntity()); conn.shutdown(); //check List<Identity> coaches = repositoryService.getMembers(re, RepositoryEntryRelationType.defaultGroup, GroupRoles.coach.name()); Assert.assertNotNull(coaches); Assert.assertEquals(1, coaches.size()); Assert.assertTrue(coaches.contains(coach)); } @Test public void addCoaches() throws IOException, URISyntaxException { Identity coach1 = JunitTestHelper.createAndPersistIdentityAsRndUser("coach-3b-"); Identity coach2 = JunitTestHelper.createAndPersistIdentityAsRndUser("coach-3c-"); RepositoryEntry re = JunitTestHelper.createAndPersistRepositoryEntry(); dbInstance.commitAndCloseSession(); //add an owner RestConnection conn = new RestConnection(); Assert.assertTrue(conn.login("administrator", "openolat")); UserVO[] newCoaches = new UserVO[2]; newCoaches[0] = UserVOFactory.get(coach1); newCoaches[1] = UserVOFactory.get(coach2); URI request = UriBuilder.fromUri(getContextURI()) .path("repo").path("entries").path(re.getKey().toString()).path("coaches").build(); HttpPut method = conn.createPut(request, MediaType.APPLICATION_JSON, true); conn.addJsonEntity(method, newCoaches); HttpResponse response = conn.execute(method); assertEquals(200, response.getStatusLine().getStatusCode()); EntityUtils.consume(response.getEntity()); conn.shutdown(); //check List<Identity> coaches = repositoryService.getMembers(re, RepositoryEntryRelationType.defaultGroup, GroupRoles.coach.name()); Assert.assertNotNull(coaches); Assert.assertEquals(2, coaches.size()); Assert.assertTrue(coaches.contains(coach1)); Assert.assertTrue(coaches.contains(coach2)); } @Test public void removeCoach() throws IOException, URISyntaxException { Identity coach = JunitTestHelper.createAndPersistIdentityAsRndAuthor("coach-4"); RepositoryEntry re = JunitTestHelper.createAndPersistRepositoryEntry(); repositoryManager.addTutors(coach, Roles.administratorRoles(), new IdentitiesAddEvent(coach), re, new MailPackage(false)); dbInstance.commitAndCloseSession(); //remove the owner RestConnection conn = new RestConnection(); assertTrue(conn.login("administrator", "openolat")); URI request = UriBuilder.fromUri(getContextURI()) .path("repo/entries").path(re.getKey().toString()).path("coaches").path(coach.getKey().toString()).build(); HttpDelete method = conn.createDelete(request, MediaType.APPLICATION_JSON); HttpResponse response = conn.execute(method); assertEquals(200, response.getStatusLine().getStatusCode()); EntityUtils.consume(response.getEntity()); conn.shutdown(); //check List<Identity> coaches = repositoryService.getMembers(re, RepositoryEntryRelationType.defaultGroup, GroupRoles.coach.name()); Assert.assertNotNull(coaches); Assert.assertTrue(coaches.isEmpty()); Assert.assertFalse(coaches.contains(coach)); } @Test public void getParticipants() throws IOException, URISyntaxException { Identity participant1 = JunitTestHelper.createAndPersistIdentityAsRndAuthor("participant-1"); Identity participant2 = JunitTestHelper.createAndPersistIdentityAsRndAuthor("participant-2"); Roles part1Roles = securityManager.getRoles(participant1); RepositoryEntry re = JunitTestHelper.createAndPersistRepositoryEntry(); repositoryManager.addParticipants(participant1, part1Roles, new IdentitiesAddEvent(participant1), re, null); repositoryManager.addParticipants(participant1, part1Roles, new IdentitiesAddEvent(participant2), re, null); dbInstance.commitAndCloseSession(); //get the coaches RestConnection conn = new RestConnection(); assertTrue(conn.login("administrator", "openolat")); URI request = UriBuilder.fromUri(getContextURI()).path("repo/entries").path(re.getKey().toString()).path("participants").build(); HttpGet method = conn.createGet(request, MediaType.APPLICATION_JSON, true); HttpResponse response = conn.execute(method); assertEquals(200, response.getStatusLine().getStatusCode()); List<UserVO> users = parseUserArray(response.getEntity()); Assert.assertNotNull(users); Assert.assertEquals(2, users.size());//our 2 int found = 0; for(UserVO user:users) { Long identityKey = user.getKey(); Assert.assertNotNull(identityKey); if(participant1.getKey().equals(identityKey) || participant2.getKey().equals(identityKey)) { found++; } } Assert.assertEquals(2, found); conn.shutdown(); } @Test public void addParticipant() throws IOException, URISyntaxException { Identity participant = JunitTestHelper.createAndPersistIdentityAsRndAuthor("participant-3"); RepositoryEntry re = JunitTestHelper.createAndPersistRepositoryEntry(); dbInstance.commitAndCloseSession(); //add an owner RestConnection conn = new RestConnection(); assertTrue(conn.login("administrator", "openolat")); URI request = UriBuilder.fromUri(getContextURI()) .path("repo/entries").path(re.getKey().toString()).path("participants").path(participant.getKey().toString()) .build(); HttpPut method = conn.createPut(request, MediaType.APPLICATION_JSON, true); HttpResponse response = conn.execute(method); assertEquals(200, response.getStatusLine().getStatusCode()); EntityUtils.consume(response.getEntity()); conn.shutdown(); //check List<Identity> participants = repositoryService.getMembers(re, RepositoryEntryRelationType.defaultGroup, GroupRoles.participant.name()); Assert.assertNotNull(participants); Assert.assertEquals(1, participants.size()); Assert.assertTrue(participants.contains(participant)); } @Test public void addParticipants() throws IOException, URISyntaxException { Identity participant1 = JunitTestHelper.createAndPersistIdentityAsRndUser("participant-3b-"); Identity participant2 = JunitTestHelper.createAndPersistIdentityAsRndUser("participant-3c-"); RepositoryEntry re = JunitTestHelper.createAndPersistRepositoryEntry(); dbInstance.commitAndCloseSession(); //add an owner RestConnection conn = new RestConnection(); assertTrue(conn.login("administrator", "openolat")); //add the 2 participants to the course UserVO[] newParticipants = new UserVO[2]; newParticipants[0] = UserVOFactory.get(participant1); newParticipants[1] = UserVOFactory.get(participant2); URI request = UriBuilder.fromUri(getContextURI()).path("repo/entries") .path(re.getKey().toString()).path("participants").build(); HttpPut method = conn.createPut(request, MediaType.APPLICATION_JSON, true); conn.addJsonEntity(method, newParticipants); HttpResponse response = conn.execute(method); assertEquals(200, response.getStatusLine().getStatusCode()); EntityUtils.consume(response.getEntity()); conn.shutdown(); //check List<Identity> participants = repositoryService.getMembers(re, RepositoryEntryRelationType.defaultGroup, GroupRoles.participant.name()); Assert.assertNotNull(participants); Assert.assertEquals(2, participants.size()); Assert.assertTrue(participants.contains(participant1)); Assert.assertTrue(participants.contains(participant2)); } @Test public void testRemoveParticipant() throws IOException, URISyntaxException { Identity participant = JunitTestHelper.createAndPersistIdentityAsRndAuthor("participant-4"); Roles partRoles = securityManager.getRoles(participant); RepositoryEntry re = JunitTestHelper.createAndPersistRepositoryEntry(); repositoryManager.addParticipants(participant, partRoles, new IdentitiesAddEvent(participant), re, null); dbInstance.commitAndCloseSession(); //remove the owner RestConnection conn = new RestConnection(); assertTrue(conn.login("administrator", "openolat")); URI request = UriBuilder.fromUri(getContextURI()) .path("repo/entries").path(re.getKey().toString()).path("participants").path(participant.getKey().toString()).build(); HttpDelete method = conn.createDelete(request, MediaType.APPLICATION_JSON); HttpResponse response = conn.execute(method); assertEquals(200, response.getStatusLine().getStatusCode()); EntityUtils.consume(response.getEntity()); conn.shutdown(); //check List<Identity> participants = repositoryService.getMembers(re, RepositoryEntryRelationType.defaultGroup, GroupRoles.participant.name()); Assert.assertNotNull(participants); Assert.assertTrue(participants.isEmpty()); Assert.assertFalse(participants.contains(participant)); } @Test public void getMetadata() throws IOException, URISyntaxException { RepositoryEntry re = JunitTestHelper.createAndPersistRepositoryEntry(); dbInstance.commitAndCloseSession(); List<RepositoryEntryEducationalType> educationalTypes = repositoryManager.getAllEducationalTypes(); RepositoryEntryEducationalType educationalType = educationalTypes.get(0); re = repositoryManager.setDescriptionAndName(re, re.getDisplayname(), "Ext-REF", "Auth", "RE description", "RE objectives", "RE requirements", "RE credits", "DE", "Zurich", "3 days", null, null, null, educationalType); dbInstance.commitAndCloseSession(); //remove the owner RestConnection conn = new RestConnection(); assertTrue(conn.login("administrator", "openolat")); URI request = UriBuilder.fromUri(getContextURI()) .path("repo/entries").path(re.getKey().toString()).path("metadata").build(); HttpGet method = conn.createGet(request, MediaType.APPLICATION_JSON, true); HttpResponse response = conn.execute(method); Assert.assertEquals(200, response.getStatusLine().getStatusCode()); RepositoryEntryMetadataVO metadataVo = conn.parse(response, RepositoryEntryMetadataVO.class); conn.shutdown(); //check Assert.assertNotNull(metadataVo); Assert.assertEquals(re.getKey(), metadataVo.getKey()); Assert.assertEquals(re.getDisplayname(), metadataVo.getDisplayname()); Assert.assertEquals("Ext-REF", metadataVo.getExternalRef()); Assert.assertEquals("Auth", metadataVo.getAuthors()); Assert.assertEquals("RE description", metadataVo.getDescription()); Assert.assertEquals("RE objectives", metadataVo.getObjectives()); Assert.assertEquals("RE requirements", metadataVo.getRequirements()); Assert.assertEquals("RE credits", metadataVo.getCredits()); Assert.assertEquals("DE", metadataVo.getMainLanguage()); Assert.assertEquals("Zurich", metadataVo.getLocation()); Assert.assertEquals("3 days", metadataVo.getExpenditureOfWork()); RepositoryEntryEducationalTypeVO educationTypeVo = metadataVo.getEducationalType(); Assert.assertNotNull(educationTypeVo); Assert.assertEquals(educationalType.getKey(), educationTypeVo.getKey()); Assert.assertEquals(educationalType.getIdentifier(), educationTypeVo.getIdentifier()); } @Test public void updateMetadata() throws IOException, URISyntaxException { RepositoryEntry re = JunitTestHelper.createAndPersistRepositoryEntry(); dbInstance.commitAndCloseSession(); RestConnection conn = new RestConnection(); assertTrue(conn.login("administrator", "openolat")); URI request = UriBuilder.fromUri(getContextURI()) .path("repo/entries").path(re.getKey().toString()).path("metadata").build(); HttpGet method = conn.createGet(request, MediaType.APPLICATION_JSON, true); HttpResponse response = conn.execute(method); Assert.assertEquals(200, response.getStatusLine().getStatusCode()); RepositoryEntryMetadataVO metadataVo = conn.parse(response, RepositoryEntryMetadataVO.class); // fill the metadata metadataVo.setAuthors("Authors"); metadataVo.setCredits("The credits"); metadataVo.setDescription("A description"); metadataVo.setExpenditureOfWork("4 weeks"); metadataVo.setExternalRef("Reference"); metadataVo.setLocation("Biel/Bienne"); metadataVo.setMainLanguage("French"); metadataVo.setObjectives("Our objectives"); metadataVo.setRequirements("Their requirements"); List<RepositoryEntryEducationalType> educationalTypes = repositoryManager.getAllEducationalTypes(); RepositoryEntryEducationalType educationalType = educationalTypes.get(0); metadataVo.setEducationalType(RepositoryEntryEducationalTypeVO.valueOf(educationalType)); URI updateRequest = UriBuilder.fromUri(getContextURI()) .path("repo/entries").path(re.getKey().toString()).path("metadata").build(); HttpPost updateMethod = conn.createPost(updateRequest, MediaType.APPLICATION_JSON); conn.addJsonEntity(updateMethod, metadataVo); HttpResponse updateResponse = conn.execute(updateMethod); Assert.assertEquals(200, updateResponse.getStatusLine().getStatusCode()); RepositoryEntryMetadataVO updatedMetadataVo = conn.parse(updateResponse, RepositoryEntryMetadataVO.class); //check the response Assert.assertNotNull(metadataVo); Assert.assertEquals(re.getKey(), updatedMetadataVo.getKey()); Assert.assertEquals(re.getDisplayname(), updatedMetadataVo.getDisplayname()); Assert.assertEquals("Reference", updatedMetadataVo.getExternalRef()); Assert.assertEquals("Authors", updatedMetadataVo.getAuthors()); Assert.assertEquals("A description", updatedMetadataVo.getDescription()); Assert.assertEquals("Our objectives", updatedMetadataVo.getObjectives()); Assert.assertEquals("Their requirements", updatedMetadataVo.getRequirements()); Assert.assertEquals("The credits", updatedMetadataVo.getCredits()); Assert.assertEquals("French", updatedMetadataVo.getMainLanguage()); Assert.assertEquals("Biel/Bienne", updatedMetadataVo.getLocation()); Assert.assertEquals("4 weeks", updatedMetadataVo.getExpenditureOfWork()); RepositoryEntryEducationalTypeVO educationTypeVo = updatedMetadataVo.getEducationalType(); Assert.assertNotNull(educationTypeVo); Assert.assertEquals(educationalType.getKey(), educationTypeVo.getKey()); Assert.assertEquals(educationalType.getIdentifier(), educationTypeVo.getIdentifier()); RepositoryEntry updatedRe = repositoryService.loadByKey(re.getKey()); Assert.assertEquals(re.getKey(), updatedRe.getKey()); Assert.assertEquals(re.getDisplayname(), updatedRe.getDisplayname()); Assert.assertEquals("Reference", updatedRe.getExternalRef()); Assert.assertEquals("Authors", updatedRe.getAuthors()); Assert.assertEquals("A description", updatedRe.getDescription()); Assert.assertEquals("Our objectives", updatedRe.getObjectives()); Assert.assertEquals("Their requirements", updatedRe.getRequirements()); Assert.assertEquals("The credits", updatedRe.getCredits()); Assert.assertEquals("French", updatedRe.getMainLanguage()); Assert.assertEquals("Biel/Bienne", updatedRe.getLocation()); Assert.assertEquals("4 weeks", updatedRe.getExpenditureOfWork()); Assert.assertEquals(educationalType, updatedRe.getEducationalType()); } @Test public void getAccess() throws IOException, URISyntaxException { RepositoryEntry re = JunitTestHelper.createAndPersistRepositoryEntry(); dbInstance.commitAndCloseSession(); //remove the owner RestConnection conn = new RestConnection(); assertTrue(conn.login("administrator", "openolat")); URI request = UriBuilder.fromUri(getContextURI()) .path("repo/entries").path(re.getKey().toString()).path("access").build(); HttpGet method = conn.createGet(request, MediaType.APPLICATION_JSON, true); HttpResponse response = conn.execute(method); Assert.assertEquals(200, response.getStatusLine().getStatusCode()); RepositoryEntryAccessVO accessVo = conn.parse(response, RepositoryEntryAccessVO.class); conn.shutdown(); //check Assert.assertNotNull(accessVo); Assert.assertEquals(re.getKey(), accessVo.getRepoEntryKey()); Assert.assertEquals(re.getStatus(), accessVo.getStatus()); Assert.assertEquals(re.isAllUsers(), accessVo.isAllUsers()); Assert.assertEquals(re.isGuests(), accessVo.isGuests()); } @Test public void updateAccess() throws IOException, URISyntaxException { RepositoryEntry re = JunitTestHelper.createAndPersistRepositoryEntry(false); dbInstance.commitAndCloseSession(); Assert.assertTrue(re.isAllUsers()); Assert.assertFalse(re.isGuests()); //remove the owner RestConnection conn = new RestConnection(); assertTrue(conn.login("administrator", "openolat")); RepositoryEntryAccessVO accessVo = new RepositoryEntryAccessVO(); accessVo.setStatus(RepositoryEntryStatusEnum.published.name()); accessVo.setAllUsers(false); accessVo.setGuests(false); URI request = UriBuilder.fromUri(getContextURI()) .path("repo/entries").path(re.getKey().toString()).path("access").build(); HttpPost method = conn.createPost(request, MediaType.APPLICATION_JSON); conn.addJsonEntity(method, accessVo); HttpResponse response = conn.execute(method); Assert.assertEquals(200, response.getStatusLine().getStatusCode()); RepositoryEntryAccessVO updatedAccessVo = conn.parse(response, RepositoryEntryAccessVO.class); conn.shutdown(); // check return value Assert.assertNotNull(updatedAccessVo); Assert.assertEquals(re.getKey(), updatedAccessVo.getRepoEntryKey()); Assert.assertEquals(RepositoryEntryStatusEnum.published.name(), updatedAccessVo.getStatus()); Assert.assertFalse(updatedAccessVo.isAllUsers()); Assert.assertFalse(updatedAccessVo.isGuests()); // check database value RepositoryEntry updatedRe = repositoryService.loadByKey(re.getKey()); Assert.assertEquals(RepositoryEntryStatusEnum.published, updatedRe.getEntryStatus()); Assert.assertFalse(updatedRe.isAllUsers()); Assert.assertFalse(updatedRe.isGuests()); } @Test public void getTaxonomylevels() throws IOException, URISyntaxException { RepositoryEntry re = JunitTestHelper.createAndPersistRepositoryEntry(false); dbInstance.commit(); Taxonomy taxonomy = taxonomyDao.createTaxonomy("ID-500", "Leveled taxonomy", null, null); TaxonomyLevel level = taxonomyLevelDao.createTaxonomyLevel("ID-Level-0", "My first taxonomy level", "A basic level", null, null, null, null, taxonomy); repositoryEntryToTaxonomyLevelDao.createRelation(re, level); dbInstance.commitAndCloseSession(); RestConnection conn = new RestConnection(); assertTrue(conn.login("administrator", "openolat")); URI request = UriBuilder.fromUri(getContextURI()) .path("repo/entries").path(re.getKey().toString()) .path("taxonomy").path("levels").build(); HttpGet method = conn.createGet(request, MediaType.APPLICATION_JSON, true); HttpResponse response = conn.execute(method); Assert.assertEquals(200, response.getStatusLine().getStatusCode()); List<TaxonomyLevelVO> levels = parseTaxonomyLevelVOArray(response.getEntity()); Assert.assertNotNull(levels); Assert.assertEquals(1, levels.size()); Assert.assertEquals(level.getKey(), levels.get(0).getKey()); } @Test public void addTaxonomyLevels() throws IOException, URISyntaxException { RepositoryEntry entry = JunitTestHelper.createAndPersistRepositoryEntry(false); dbInstance.commit(); Taxonomy taxonomy = taxonomyDao.createTaxonomy("ID-501", "Leveled taxonomy", null, null); TaxonomyLevel level = taxonomyLevelDao.createTaxonomyLevel("ID-Level-0", "My first taxonomy level", "A basic level", null, null, null, null, taxonomy); dbInstance.commitAndCloseSession(); RestConnection conn = new RestConnection(); Assert.assertTrue(conn.login("administrator", "openolat")); URI uri = UriBuilder.fromUri(getContextURI()).path("repo").path("entries") .path(entry.getKey().toString()) .path("taxonomy").path("levels").path(level.getKey().toString()).build(); HttpPut method = conn.createPut(uri, MediaType.APPLICATION_JSON, true); HttpResponse response = conn.execute(method); Assert.assertEquals(200, response.getStatusLine().getStatusCode()); EntityUtils.consume(response.getEntity()); RepositoryEntry reloadedEntry = repositoryService.loadByKey(entry.getKey()); Set<RepositoryEntryToTaxonomyLevel> relationToLevels = reloadedEntry.getTaxonomyLevels(); Assert.assertNotNull(relationToLevels); Assert.assertEquals(1, relationToLevels.size()); RepositoryEntryToTaxonomyLevel relationToLevel = relationToLevels.iterator().next(); Assert.assertEquals(level, relationToLevel.getTaxonomyLevel()); } @Test public void addTwiceTaxonomyLevels() throws IOException, URISyntaxException { RepositoryEntry entry = JunitTestHelper.createAndPersistRepositoryEntry(false); dbInstance.commit(); Taxonomy taxonomy = taxonomyDao.createTaxonomy("ID-502", "Leveled taxonomy", null, null); TaxonomyLevel level = taxonomyLevelDao.createTaxonomyLevel("ID-Level-0", "My first taxonomy level", "A basic level", null, null, null, null, taxonomy); repositoryEntryToTaxonomyLevelDao.createRelation(entry, level); dbInstance.commitAndCloseSession(); RestConnection conn = new RestConnection(); Assert.assertTrue(conn.login("administrator", "openolat")); URI uri = UriBuilder.fromUri(getContextURI()).path("repo").path("entries") .path(entry.getKey().toString()) .path("taxonomy").path("levels").path(level.getKey().toString()).build(); HttpPut method = conn.createPut(uri, MediaType.APPLICATION_JSON, true); HttpResponse response = conn.execute(method); Assert.assertEquals(304, response.getStatusLine().getStatusCode()); EntityUtils.consume(response.getEntity()); RepositoryEntry reloadedEntry = repositoryService.loadByKey(entry.getKey()); Set<RepositoryEntryToTaxonomyLevel> relationToLevels = reloadedEntry.getTaxonomyLevels(); Assert.assertNotNull(relationToLevels); Assert.assertEquals(1, relationToLevels.size()); RepositoryEntryToTaxonomyLevel relationToLevel = relationToLevels.iterator().next(); Assert.assertEquals(level, relationToLevel.getTaxonomyLevel()); } @Test public void deleteTaxonomyLevel() throws IOException, URISyntaxException { RepositoryEntry entry = JunitTestHelper.createAndPersistRepositoryEntry(false); dbInstance.commit(); Taxonomy taxonomy = taxonomyDao.createTaxonomy("ID-202", "Leveled taxonomy", null, null); TaxonomyLevel level1 = taxonomyLevelDao.createTaxonomyLevel("ID-Level-0", "My first taxonomy level", "A basic level", null, null, null, null, taxonomy); TaxonomyLevel level2 = taxonomyLevelDao.createTaxonomyLevel("ID-Level-0", "My first taxonomy level", "A basic level", null, null, null, null, taxonomy); repositoryEntryToTaxonomyLevelDao.createRelation(entry, level1); repositoryEntryToTaxonomyLevelDao.createRelation(entry, level2); dbInstance.commitAndCloseSession(); // make sure we have something to delete List<TaxonomyLevel> levels = repositoryEntryToTaxonomyLevelDao.getTaxonomyLevels(entry); Assert.assertEquals(2, levels.size()); dbInstance.commitAndCloseSession(); RestConnection conn = new RestConnection(); Assert.assertTrue(conn.login("administrator", "openolat")); URI uri = UriBuilder.fromUri(getContextURI()).path("repo").path("entries") .path(entry.getKey().toString()) .path("taxonomy").path("levels").path(level1.getKey().toString()).build(); HttpDelete method = conn.createDelete(uri, MediaType.APPLICATION_JSON); HttpResponse response = conn.execute(method); Assert.assertEquals(200, response.getStatusLine().getStatusCode()); EntityUtils.consume(response.getEntity()); // check that the right relation was deleted List<TaxonomyLevel> survivingLevels = repositoryEntryToTaxonomyLevelDao.getTaxonomyLevels(entry); Assert.assertEquals(1, survivingLevels.size()); Assert.assertEquals(level2, survivingLevels.get(0)); } private List<TaxonomyLevelVO> parseTaxonomyLevelVOArray(HttpEntity entity) { try(InputStream in=entity.getContent()) { ObjectMapper mapper = new ObjectMapper(jsonFactory); return mapper.readValue(in, new TypeReference<List<TaxonomyLevelVO>>(){/* */}); } catch (Exception e) { log.error("", e); return null; } } private List<UserVO> parseUserArray(HttpEntity entity) { try(InputStream in=entity.getContent()) { ObjectMapper mapper = new ObjectMapper(jsonFactory); return mapper.readValue(in, new TypeReference<List<UserVO>>(){/* */}); } catch (Exception e) { log.error("", e); return null; } } }
<filename>src/document/document.service.ts import { Injectable } from '@nestjs/common'; import { CreateDocumentDto } from './dto/create-document.dto'; import { UpdateDocumentDto } from './dto/update-document.dto'; import { InjectModel } from '@nestjs/mongoose'; import { Model } from 'mongoose'; import { Documents, DocumentsSchema } from './schemas/document.schema'; var mongoose = require('mongoose'); @Injectable() export class DocumentService { constructor( @InjectModel(Documents.name) private documentsModel: Model<CreateDocumentDto> ) {} create(createDocumentDto: CreateDocumentDto) { return 'This action adds a new document'; } findAll() { return `This action returns all document`; } findOne(id: number) { return `This action returns a #${id} document`; } checkData(id,role){ const checkData = this.documentsModel.findOne( {$and: [{'user_id': mongoose.Types.ObjectId(id)},{'role': role}]}); return checkData; } checkAccountStatusFromId(user_id){ const checkData = this.documentsModel.find({'user_id': mongoose.Types.ObjectId(user_id)}); return checkData; } updateInsert(id,role,data,type) { data['user_id'] = id; data['role'] = role; if(type == 'insert'){ const uploadUser = new this.documentsModel(data); return uploadUser.save(); }else if(type == 'update'){ const updateuser = this.documentsModel.findOneAndUpdate({$and: [{'user_id': mongoose.Types.ObjectId(id)},{'role': role}]},{$set: data}); return updateuser; } } remove(id: number) { return `This action removes a #${id} document`; } }
#!/bin/bash node_version='0.12.7' #Check if node version manager is installed. if [ ! -f ~/.nvm/nvm.sh ] ; then echo "Installing node version manager - to let you easilly switch between different versions of node" wget -qO- https://raw.githubusercontent.com/creationix/nvm/v0.23.3/install.sh | bash fi . $HOME/.nvm/nvm.sh echo "Installing node version" $node_version nvm install $node_version > /dev/null nvm use $node_version > /dev/null
<filename>modules/caas/backend/src/main/java/io/cattle/platform/allocator/port/PortManager.java package io.cattle.platform.allocator.port; import io.cattle.platform.core.util.PortSpec; import java.util.Collection; public interface PortManager { boolean portsFree(long clusterId, long hostId, Collection<PortSpec> ports); void assignPorts(long clusterId, long hostId, long instanceId, Collection<PortSpec> ports); boolean optionallyAssignPorts(long clusterId, long hostId, long instanceId, Collection<PortSpec> ports); void releasePorts(long clusterId, long hostId, long instanceId, Collection<PortSpec> ports); }
# ubuntu/libs init() { echo "init" } run() { apt-get update apt-get install -y --no-install-recommends \ autoconf \ automake \ cmake \ curl \ dpkg-dev \ file \ gfortran \ libbluetooth-dev \ libbz2-dev \ libc6-dev \ libexpat1-dev \ libffi-dev \ libgdbm-dev \ libgit2-28 \ libgit2-dev \ liblzma-dev \ libncursesw5-dev \ libnghttp2-dev \ libreadline-dev \ libsqlite3-dev \ libssh2-1 \ libssh2-1-dev\ libssl-dev \ nghttp2 \ ninja-build \ pkg-config \ python3 \ tk-dev \ unzip \ uuid-dev \ wget \ xz-utils \ zlib1g \ zlib1g-dev \ rm -rf /var/lib/apt/lists/* mkdir /var/lib/apt/lists/partial } clean() { rm -rf pkgs } # imagemagick \ # libcurl4-openssl-dev \ # libdb-dev \ # libevent-dev \ # libglib2.0-dev \ # libgmp-dev \ # libjpeg-dev \ # libkrb5-dev \ # libmagickcore-dev \ # libmagickwand-dev \ # libmaxminddb-dev \ # libncurses5-dev \ # libncursesw5-dev \ # libpng-dev \ # libpq-dev \ # libtool \ # libwebp-dev \ # libxml2-dev \ # libxslt-dev \ # libyaml-dev \
<reponame>benoitc/pypy """ The rpython-level part of locale module """ import sys from pypy.rpython.lltypesystem import rffi, lltype from pypy.translator.tool.cbuild import ExternalCompilationInfo from pypy.rpython.tool import rffi_platform as platform from pypy.rpython.extfunc import register_external class LocaleError(Exception): def __init__(self, message): self.message = message HAVE_LANGINFO = sys.platform != 'win32' HAVE_LIBINTL = sys.platform != 'win32' libraries = [] if HAVE_LIBINTL: try: platform.verify_eci(ExternalCompilationInfo(includes=['libintl.h'], libraries=['intl'])) libraries.append('intl') except platform.CompilationError: try: platform.verify_eci(ExternalCompilationInfo(includes=['libintl.h'])) except platform.CompilationError: HAVE_LIBINTL = False class CConfig: includes = ['locale.h', 'limits.h', 'ctype.h'] libraries = libraries if HAVE_LANGINFO: includes += ['langinfo.h'] if HAVE_LIBINTL: includes += ['libintl.h'] if sys.platform == 'win32': includes += ['windows.h'] _compilation_info_ = ExternalCompilationInfo( includes=includes, libraries=libraries ) HAVE_BIND_TEXTDOMAIN_CODESET = platform.Has('bind_textdomain_codeset') lconv = platform.Struct("struct lconv", [ # Numeric (non-monetary) information. ("decimal_point", rffi.CCHARP), # Decimal point character. ("thousands_sep", rffi.CCHARP), # Thousands separator. ## Each element is the number of digits in each group; ## elements with higher indices are farther left. ## An element with value CHAR_MAX means that no further grouping is done. ## An element with value 0 means that the previous element is used ## for all groups farther left. */ ("grouping", rffi.CCHARP), ## Monetary information. ## First three chars are a currency symbol from ISO 4217. ## Fourth char is the separator. Fifth char is '\0'. ("int_curr_symbol", rffi.CCHARP), ("currency_symbol", rffi.CCHARP), # Local currency symbol. ("mon_decimal_point", rffi.CCHARP), # Decimal point character. ("mon_thousands_sep", rffi.CCHARP), # Thousands separator. ("mon_grouping", rffi.CCHARP), # Like `grouping' element (above). ("positive_sign", rffi.CCHARP), # Sign for positive values. ("negative_sign", rffi.CCHARP), # Sign for negative values. ("int_frac_digits", rffi.UCHAR), # Int'l fractional digits. ("frac_digits", rffi.UCHAR), # Local fractional digits. ## 1 if currency_symbol precedes a positive value, 0 if succeeds. ("p_cs_precedes", rffi.UCHAR), ## 1 iff a space separates currency_symbol from a positive value. ("p_sep_by_space", rffi.UCHAR), ## 1 if currency_symbol precedes a negative value, 0 if succeeds. ("n_cs_precedes", rffi.UCHAR), ## 1 iff a space separates currency_symbol from a negative value. ("n_sep_by_space", rffi.UCHAR), ## Positive and negative sign positions: ## 0 Parentheses surround the quantity and currency_symbol. ## 1 The sign string precedes the quantity and currency_symbol. ## 2 The sign string follows the quantity and currency_symbol. ## 3 The sign string immediately precedes the currency_symbol. ## 4 The sign string immediately follows the currency_symbol. ("p_sign_posn", rffi.UCHAR), ("n_sign_posn", rffi.UCHAR), ]) constants = {} constant_names = ( 'LC_CTYPE', 'LC_NUMERIC', 'LC_TIME', 'LC_COLLATE', 'LC_MONETARY', 'LC_MESSAGES', 'LC_ALL', 'LC_PAPER', 'LC_NAME', 'LC_ADDRESS', 'LC_TELEPHONE', 'LC_MEASUREMENT', 'LC_IDENTIFICATION', 'LC_MIN', 'LC_MAX', # from limits.h 'CHAR_MAX', ) for name in constant_names: setattr(CConfig, name, platform.DefinedConstantInteger(name)) langinfo_names = [] if HAVE_LANGINFO: # some of these consts have an additional #ifdef directives # should we support them? langinfo_names.extend('RADIXCHAR THOUSEP CRNCYSTR D_T_FMT D_FMT T_FMT ' 'AM_STR PM_STR CODESET T_FMT_AMPM ERA ERA_D_FMT ' 'ERA_D_T_FMT ERA_T_FMT ALT_DIGITS YESEXPR NOEXPR ' '_DATE_FMT'.split()) for i in range(1, 8): langinfo_names.append("DAY_%d" % i) langinfo_names.append("ABDAY_%d" % i) for i in range(1, 13): langinfo_names.append("MON_%d" % i) langinfo_names.append("ABMON_%d" % i) if sys.platform == 'win32': langinfo_names.extend('LOCALE_USER_DEFAULT LOCALE_SISO639LANGNAME ' 'LOCALE_SISO3166CTRYNAME LOCALE_IDEFAULTLANGUAGE ' ''.split()) for name in langinfo_names: setattr(CConfig, name, platform.DefinedConstantInteger(name)) class cConfig(object): pass for k, v in platform.configure(CConfig).items(): setattr(cConfig, k, v) # needed to export the constants inside and outside. see __init__.py for name in constant_names: value = getattr(cConfig, name) if value is not None: constants[name] = value for name in langinfo_names: value = getattr(cConfig, name) if value is not None and sys.platform != 'win32': constants[name] = value locals().update(constants) HAVE_BIND_TEXTDOMAIN_CODESET = cConfig.HAVE_BIND_TEXTDOMAIN_CODESET def external(name, args, result, calling_conv='c', **kwds): return rffi.llexternal(name, args, result, compilation_info=CConfig._compilation_info_, calling_conv=calling_conv, sandboxsafe=True, **kwds) _lconv = lltype.Ptr(cConfig.lconv) localeconv = external('localeconv', [], _lconv) def numeric_formatting(): """Specialized function to get formatting for numbers""" return numeric_formatting_impl() def numeric_formatting_impl(): conv = localeconv() decimal_point = rffi.charp2str(conv.c_decimal_point) thousands_sep = rffi.charp2str(conv.c_thousands_sep) grouping = rffi.charp2str(conv.c_grouping) return decimal_point, thousands_sep, grouping def oo_numeric_formatting(): return '.', '', '' register_external(numeric_formatting, [], (str, str, str), llimpl=numeric_formatting_impl, ooimpl=oo_numeric_formatting, sandboxsafe=True) _setlocale = external('setlocale', [rffi.INT, rffi.CCHARP], rffi.CCHARP) def setlocale(category, locale): if cConfig.LC_MAX is not None: if not cConfig.LC_MIN <= category <= cConfig.LC_MAX: raise LocaleError("invalid locale category") ll_result = _setlocale(rffi.cast(rffi.INT, category), locale) if not ll_result: raise LocaleError("unsupported locale setting") return rffi.charp2str(ll_result) isalpha = external('isalpha', [rffi.INT], rffi.INT, oo_primitive='locale_isalpha') isupper = external('isupper', [rffi.INT], rffi.INT, oo_primitive='locale_isupper') islower = external('islower', [rffi.INT], rffi.INT, oo_primitive='locale_islower') tolower = external('tolower', [rffi.INT], rffi.INT, oo_primitive='locale_tolower') isalnum = external('isalnum', [rffi.INT], rffi.INT, oo_primitive='locale_isalnum') if HAVE_LANGINFO: _nl_langinfo = external('nl_langinfo', [rffi.INT], rffi.CCHARP) def nl_langinfo(key): if key in constants.values(): return rffi.charp2str(_nl_langinfo(rffi.cast(rffi.INT, key))) raise ValueError #___________________________________________________________________ # getdefaultlocale() implementation for Windows if sys.platform == 'win32': from pypy.rlib import rwin32 LCID = LCTYPE = rwin32.DWORD GetACP = external('GetACP', [], rffi.INT, calling_conv='win') GetLocaleInfo = external('GetLocaleInfoA', [LCID, LCTYPE, rwin32.LPSTR, rffi.INT], rffi.INT, calling_conv='win') def getdefaultlocale(): encoding = "cp%d" % GetACP() BUFSIZE = 50 buf_lang = lltype.malloc(rffi.CCHARP.TO, BUFSIZE, flavor='raw') buf_country = lltype.malloc(rffi.CCHARP.TO, BUFSIZE, flavor='raw') try: if (GetLocaleInfo(cConfig.LOCALE_USER_DEFAULT, cConfig.LOCALE_SISO639LANGNAME, buf_lang, BUFSIZE) and GetLocaleInfo(cConfig.LOCALE_USER_DEFAULT, cConfig.LOCALE_SISO3166CTRYNAME, buf_country, BUFSIZE)): lang = rffi.charp2str(buf_lang) country = rffi.charp2str(buf_country) language = "%s_%s" % (lang, country) # If we end up here, this windows version didn't know about # ISO639/ISO3166 names (it's probably Windows 95). Return the # Windows language identifier instead (a hexadecimal number) elif GetLocaleInfo(cConfig.LOCALE_USER_DEFAULT, cConfig.LOCALE_IDEFAULTLANGUAGE, buf_lang, BUFSIZE): lang = rffi.charp2str(buf_lang) language = "0x%s" % (lang,) else: language = None finally: lltype.free(buf_lang, flavor='raw') lltype.free(buf_country, flavor='raw') return language, encoding
<html> <head> <title>Sign Up</title> <script> function validateForm() { var email = document.forms["signup"]["email"].value; var username = document.forms["signup"]["username"].value; var password = document.forms["signup"]["password"].value; if (email == "") { alert("Email address is required"); return false; } if (username == "") { alert("Username is required"); return false; } if (password == "") { alert("Password is required"); return false; } alert("Sign up successful"); } </script> </head> <body> <form name="signup" action="" onsubmit="return validateForm()"> Email: <input type="text" name="email"><br> Username: <input type="text" name="username"><br> Password: <input type="password" name="password"><br> <input type="submit" value="Submit"> </form> </body> </html>
pattern = r"^(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])\.){3}(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])$"
var boots = [ { name:"<NAME>", type:"Boots", weight:0, hc:false, season:false, craft:{ rp:25,ad:12,vc:45,fs:2,db:8 }, smartLoot:[ "<NAME>", "Monk", "Barbarian", "Crusader", "Wizard", "Witch Doctor" ], primary:{ RANDOM:4 }, secondary:{ RANDOM:2 }, set:'Asheara\'s Vestments', image:'//media.blizzard.com/d3/icons/items/large/unique_boots_014_x1_demonhunter_male.png', flavor:'The Iron Wolves have traveled far and done much in the last twenty years. From the docks of Kurast to the palaces of Caldeum, they upheld their duty until they were driven from the emperor\'s side by the Imperial Guard.' }, { name: "<NAME>", type: "Boots", weight: 100, hc: false, season: false, smartLoot: [ "<NAME>", "Monk", "Barbarian", "Crusader", "Wizard", "Witch Doctor" ], primary:{ MAIN:null, Vitality:null, RANDOM:2 }, secondary:{ RANDOM:2 }, set:'Blackthorne\'s Battlegear', image:'//media.blizzard.com/d3/icons/items/large/unique_boots_019_x1_demonhunter_male.png', flavor:'The leather and scale joints provide improved flexibility for better foot positioning in dueling and stability in mounted combat.' }, { name:"<NAME>", type:"Boots", weight:0, hc:false, season:false, craft:{ rp:25,ad:12,vc:45,fs:2 }, smartLoot:[ "Demon Hunter", "Monk", "Barbarian", "Crusader", "Wizard", "Witch Doctor" ], primary:{ MoveSpeed:null, RANDOM:3 }, secondary:{ RANDOM:2 }, image:'//media.blizzard.com/d3/icons/items/large/unique_boots_005_x1_demonhunter_male.png', flavor:'Well worn, but still sturdy, these boots look as if they have traveled all the lands of Sanctuary many times over.' }, { name: "<NAME>", type: "Boots", weight: 100, hc: false, season: false, smartLoot: [ "Demon Hunter", "Monk", "Barbarian", "Crusader", "Wizard", "Witch Doctor" ], primary:{ MAIN:null, SKILLDAMAGE:null, RANDOM:2 }, secondary:{ RANDOM:2 }, image:'//media.blizzard.com/d3/icons/items/large/unique_boots_001_x1_demonhunter_male.png', flavor:'Do not overlook the importance of a good boot.' }, { name:"<NAME>", type:"Boots", weight: 0, hc: false, season: false, craft:{ rp:25,ad:12,vc:45,fs:2,db:8 }, smartLoot:[ "<NAME>", "Monk", "Barbarian", "Crusader", "Wizard", "Witch Doctor" ], primary:{ RANDOM:4 }, secondary:{ RANDOM:2 }, set:'Cain\'s Destiny', image:'//media.blizzard.com/d3/icons/items/large/unique_boots_015_x1_demonhunter_male.png', flavor:'<NAME> spent the last twenty years of his life traveling the world in search of information needed to defeat the Lords of Hell.' }, { name:"<NAME>", type:"Boots", weight:0, hc:false, season:false, craft:{ rp:25,ad:12,vc:45,fs:2,db:8 }, smartLoot:[ "Demon Hunter", "Monk", "Barbarian", "Crusader", "Wizard", "Witch Doctor" ], primary:{ RANDOM:4 }, secondary:{ RANDOM:2 }, set:'Captain Crimson\'s Trimmings', image:'//media.blizzard.com/d3/icons/items/large/unique_boots_017_x1_demonhunter_male.png', flavor:'Captain Crimson, scourge of the Twin Seas, claimed his numerous victories were the result of sensible—yet fashionable—footwear.' }, { name: "Eight-Demon Boots", type: "Boots", weight: 100, hc: false, season: false, smartLoot: [ "Monk" ], primary:{ Dexterity:null, Vitality:null, RANDOM:2 }, secondary:{ RANDOM:2 }, set:'Raiment of a Thousand Storms', image:'//media.blizzard.com/d3/icons/items/large/unique_boots_set_08_x1_demonhunter_male.png', flavor:'"Infused with the powers of wind, lightning, all that kind of thing! Then again... a couple of extra demons never hurt." —<NAME>' }, { name: "<NAME>", type: "Boots", weight: 100, hc: false, season: false, smartLoot: [ "Demon Hunter", "Monk", "Barbarian", "Crusader", "Wizard", "Witch Doctor" ], primary:{ MAIN:null, MoveSpeed:null, RANDOM:2 }, secondary:{ FireWalker:{ min:300, max:400 }, RANDOM:1 }, image:'//media.blizzard.com/d3/icons/items/large/unique_boots_007_p2_demonhunter_male.png', flavor:'Tradition has it that these boots were forged in the heart of the great Challsop Volcano.' }, { name: "<NAME>", type: "Boots", weight: 100, hc: false, season: false, smartLoot: [ "Wizard" ], primary:{ Intelligence:null, Vitality:null, RANDOM:2 }, secondary:{ RANDOM:2 }, set:'Firebird\'s Finery', image:'//media.blizzard.com/d3/icons/items/large/unique_boots_set_06_x1_demonhunter_male.png', flavor:'"The metal plates of these greaves hang on swivels, permitting them to swing. The plates exaggerate the soma of a casting wizard. From a design standpoint, truly remarkable feats!" —Landes the Keeth' }, { name:"<NAME>", type:"Boots", weight:100, hc:false, season:false, smartLoot: [ "Demon Hunter" ], primary:{ Dexterity:null, Vitality:null, RANDOM:2 }, secondary:{ RANDOM:2 }, set:'Unhallowed Essence', image:'//media.blizzard.com/d3/icons/items/large/unique_boots_set_03_p2_demonhunter_male.png', flavor:'"Hell is a state of mind."-Kovan the Merciless' }, { name: "<NAME>", type: "Boots", weight: 100, hc: false, season: false, smartLoot: [ "Witch Doctor" ], primary:{ Intelligence:null, Vitality:null, RANDOM:2 }, secondary:{ RANDOM:2 }, set:'Helltooth Harness', image:'//media.blizzard.com/d3/icons/items/large/unique_boots_set_16_x1_demonhunter_male.png', flavor:'"As I slay more and more demons, I have come to know their purpose. They exist to make us stronger." —Jeram of the Whispering Valley' }, { name: "<NAME>", type: "Boots", weight: 25, hc: false, season: false, smartLoot: [ "Demon Hunter", "Monk", "Barbarian", "Crusader", "Wizard", "Witch Doctor" ], primary:{ MAIN:null, AllResist:null, ColdReduc:{ min:7, max:10 }, RANDOM:1 }, secondary:{ FreezeImmune:null, RANDOM:1 }, image:'//media.blizzard.com/d3/icons/items/large/unique_boots_008_x1_demonhunter_male.png', flavor:'Worn by the famous explorer Theradonn, known for his sunny disposition.' }, { name: "<NAME>", type: "Boots", weight: 100, hc: false, season: false, smartLoot: [ "Barbarian" ], primary:{ Strength:null, AllResist:null, RANDOM:2 }, secondary:{ RANDOM:2 }, set:'Immortal King\'s Call', image:'//media.blizzard.com/d3/icons/items/large/unique_boots_012_x1_demonhunter_male.png', flavor:'The king of the barbarians strode the world like a god.' }, { name:"<NAME>", type:"Boots", weight:100, hc:false, season:false, smartLoot:[ ], primary:{ Dexterity:null, AllResist:null, RANDOM:2 }, secondary:{ RANDOM:2 }, set:'Inna\'s Mantra', image:'//media.blizzard.com/d3/icons/items/large/p2_unique_boots_02_demonhunter_male.png', flavor:'Inna, the goddess of the sky, treads upon clouds in the winter and the summer, in the night and the day.' }, { name: "<NAME>", type: "Boots", weight: 100, hc: false, season: false, smartLoot: [ "Demon Hunter", "Monk", "Barbarian", "Crusader", "Wizard", "Witch Doctor" ], primary:{ MAIN:null, Vitality:null, RANDOM:2 }, secondary:{ Irontoe:{ min:25, max:30 }, RANDOM:1 }, image:'//media.blizzard.com/d3/icons/items/large/unique_boots_104_x1_demonhunter_male.png', flavor:'"Good, heavy steel. Nice flex around the ankle, good weight in the toes. A solid pair of boots. What, did you need them to sparkle?" —Kormac the Templar' }, { name: "<NAME>", type: "Boots", weight: 100, hc: false, season: false, smartLoot: [ "Witch Doctor" ], primary:{ Intelligence:null, Vitality:null, RANDOM:2 }, secondary:{ RANDOM:2 }, set:'Raiment of the Jade Harvester', image:'//media.blizzard.com/d3/icons/items/large/unique_boots_set_09_x1_demonhunter_male.png', flavor:'Tukam moved through the jungles with such grace and speed that some witch doctors claimed he was not umbaru at all, but a spirit sent to their world from Mbwiru Eikura.' }, { name: "<NAME>", type: "Boots", weight: 50, hc: false, season: false, smartLoot: [ "Barbarian" ], primary:{ Strength:null, RANDOM:3 }, secondary:{ LutSocks:null, RANDOM:1 }, image:'//media.blizzard.com/d3/icons/items/large/unique_boots_009_x1_demonhunter_male.png', flavor:'Southal of Lut Bahadur crafted these boots in order to reach high, open windows to make his thieving easier. He was last seen disappearing into the night in the claws of a large carrion bat.' }, { name: "<NAME>", type: "Boots", weight: 100, hc: false, season: false, smartLoot: [ "Demon Hunter" ], primary:{ Dexterity:null, Vitality:null, RANDOM:2 }, secondary:{ RANDOM:2 }, set:'Embodiment of the Marauder', image:'//media.blizzard.com/d3/icons/items/large/unique_boots_set_07_x1_demonhunter_male.png', flavor:'"You will learn to walk as we do. You will stalk these monsters, and after you have made your kill and faded away, the demons will find naught but their own treads upon the earth."—<NAME>' }, { name: "<NAME>", type: "Boots", weight: 100, hc: false, season: false, smartLoot: [ "Demon Hunter" ], primary:{ Dexterity:null, AllResist:null, RANDOM:2 }, secondary:{ RANDOM:2 }, set:'Natalya\'s Vengeance', image:'//media.blizzard.com/d3/icons/items/large/unique_boots_011_x1_demonhunter_male.png', flavor:'Treading through the blood of demons brought her some small semblance of peace.' }, { name:"<NAME>", type:"Boots", weight:100, hc:false, season:true, smartLoot: [ "Wizard" ], primary:{ Intelligence:null, AllResist:null, RANDOM:2 }, secondary:{ Nilfur:{ min:150, max:200 }, RANDOM:1 }, image:'//media.blizzard.com/d3/icons/items/large/p2_unique_boots_01_demonhunter_male.png', flavor:'The Horad<NAME> was so accurate in her spellcasting that she could call meteors to land at her feet.' }, { name: "<NAME>", type: "Boots", weight: 100, hc: false, season: false, smartLoot: [ "Barbarian" ], primary:{ Strength:null, Vitality:null, RANDOM:2 }, secondary:{ RANDOM:2 }, set:'The Legacy of Raekor', image:'//media.blizzard.com/d3/icons/items/large/unique_boots_set_05_x1_demonhunter_male.png', flavor:'Loren of the Samaurenian famously said he would crush the barbarians beneath his boots. Raekor took great delight in ripping them from his cold, dead feet. Forever after, they were a symbol of Raekor\'s greatest triumph.' }, { name: "<NAME>", type: "Boots", weight: 100, hc: false, season: false, smartLoot: [ "Crusader" ], primary:{ Strength:null, Vitality:null, RANDOM:2 }, secondary:{ RANDOM:2 }, set:'Roland\'s Legacy', image:'//media.blizzard.com/d3/icons/items/large/unique_boots_set_01_p1_demonhunter_male.png', flavor:'"I walk forever in my master’s boots so that I never forget how my insolence caused his death. To honor his memory, I live my life in a manner that embodies everything it means to be a crusader." —Meditations on My Redemption' }, { name:"<NAME>", type:"Boots", weight:100, hc:false, season:false, smartLoot:["Barbarian"], primary:{ Strength:null, Vitality:null, RANDOM:2 }, secondary:{ RANDOM:2 }, set:'Wrath of the Wastes', image:'//media.blizzard.com/d3/icons/items/large/unique_boots_set_01_p2_demonhunter_male.png', flavor:'The Tribe of Thunder was moving through the wastes when a madman with one arm rushed from the snowy twilight, babbling of horrendous acts, both suffered and witnessed. He was clad only in rags and these sabatons.' }, { name: "<NAME>", type: "Boots", weight: 100, hc: false, season: false, smartLoot: [ "Crusader" ], primary:{ Strength:null, Vitality:null, RANDOM:2 }, secondary:{ RANDOM:2 }, set:'Armor of Akkhan', image:'//media.blizzard.com/d3/icons/items/large/unique_boots_set_10_x1_demonhunter_male.png', flavor:'"Let the crusaders sheathe their feet in sabatons of unquestionable strength, for the crusade will take them all over the world." -Akkhan' }, { name:"Sage's Passage", type:"Boots", weight:0, hc:false, season:false, craft:{ rp:25,ad:12,vc:45,fs:2,db:8 }, smartLoot:[ "Demon Hunter", "Monk", "Barbarian", "Crusader", "Wizard", "Witch Doctor" ], primary:{ RANDOM:4 }, secondary:{ RANDOM:2 }, set:'Sage\'s Journey', image:'//media.blizzard.com/d3/icons/items/large/unique_boots_018_x1_demonhunter_male.png', flavor:'The ancient sage Imris crafted the original version of these boots to protect him while he traveled the world, seeking a way to retune the Worldstone to restore the power of the nephalem.' }, { name: "The Crudest Boots", type: "Boots", weight: 100, hc: false, season: false, smartLoot: [ "Monk" ], primary:{ Dexterity:null, MoveSpeed:null, RANDOM:2 }, secondary:{ Crudest:null, RANDOM:1 }, image:'//media.blizzard.com/d3/icons/items/large/p1_unique_boots_010_demonhunter_male.png', flavor:'You\'ve got to mind those feets.' }, { name: "The Shadow's Heels", type: "Boots", weight: 100, hc: false, season: false, smartLoot: [ "<NAME>" ], primary:{ Dexterity:null, Vitality:null, RANDOM:2 }, secondary:{ RANDOM:2 }, set:'The Shadow\'s Mantle', image:'//media.blizzard.com/d3/icons/items/large/unique_boots_set_14_x1_demonhunter_male.png', flavor:'"I walk amongst the shadows and call them my own. With every silent stride, I claim my domain." —<NAME>' }, { name:"Striders of Destiny", type:"Boots", weight:100, hc:false, season:false, smartLoot:["Wizard"], primary:{ Intelligence:null, Vitality:null, RANDOM:2 }, secondary:{ RANDOM:2 }, set:'Delsere\'s Magnum Opus', image:'//media.blizzard.com/d3/icons/items/large/unique_boots_set_02_p2_demonhunter_male.png', flavor:'"Like many of his fellow students, Delsere was fascinated by Li-Ming. His master work was an homage to her, and he swore one day she would wear it."- High Councilor Valthek' }, { name: "Vyr's Swaggering Stance", type: "Boots", weight: 100, hc: false, season: false, smartLoot: [ "Wizard" ], primary:{ Intelligence:null, SKILLDAMAGE:null, RANDOM:2 }, secondary:{ RANDOM:2 }, set:'Vyr\'s Amazing Arcana', image:'//media.blizzard.com/d3/icons/items/large/unique_boots_set_13_x1_demonhunter_male.png', flavor:'"The last piece of Vyr\'s Archon armor to be found, these ornate boots were long believed to have been destroyed along with the Black Obelisk that bore the wizard’s name." —<NAME>' }, { name: "<NAME>", type: "Boots", weight: 100, hc: false, season: false, smartLoot: [ "Witch Doctor" ], primary:{ Intelligence:null, Vitality:null, RANDOM:2 }, secondary:{ RANDOM:2 }, set:'Zunimassa\'s Haunt', image:'//media.blizzard.com/d3/icons/items/large/unique_boots_013_x1_demonhunter_male.png', flavor:'Zunimassa spread her wisdom further than any other witch doctor of the Teganze.' } ]; module.exports = boots;
//go:build darwin // +build darwin package fsevents import ( "github.com/noncgo/x/darwin/corefoundation" "github.com/noncgo/x/darwin/internal/cabi" "github.com/noncgo/x/darwin/internal/types" ) // Stream is an opaque reference to a FSEventStream type. // // References // • https://developer.apple.com/documentation/coreservices/fseventstreamref type Stream types.FSEventStreamRef // ConstStream is an opaque reference to a constant FSEventStream type. // // References // • https://developer.apple.com/documentation/coreservices/constfseventstreamref type ConstStream types.ConstFSEventStreamRef // StreamContext is a structure containing stream’s user info. // // References // • https://developer.apple.com/documentation/coreservices/fseventstreamcontext type StreamContext struct { // Info is an arbitrary client-defined value to be associated with the // stream and passed to the callback when it is invoked. Info any // TODO: also add allocator callbacks. } // ShowStream prints a description of the supplied stream to stderr for // debugging purposes. // // References // • https://developer.apple.com/documentation/coreservices/1444302-fseventstreamshow func ShowStream(s ConstStream) { cabi.Call( extern_FSEventStreamShow_trampolineABI0, cabi.Void(), cabi.Uintptr(s.Pointer()), ) } // RetainStream increments the stream’s reference counter. // // References // • https://developer.apple.com/documentation/coreservices/1444986-fseventstreamretain?language=objc func RetainStream(s Stream) { cabi.Call( extern_FSEventStreamRetain_trampolineABI0, cabi.Void(), cabi.Uintptr(s.Pointer()), ) } // ReleaseStream decrements the stream’s reference counter. The counter is // initially one and is incremented via RetainStream. If the counter reaches // zero then the stream is deallocated. // // References // • https://developer.apple.com/documentation/coreservices/1445989-fseventstreamrelease func ReleaseStream(s Stream) { cabi.Call( extern_FSEventStreamRelease_trampolineABI0, cabi.Void(), cabi.Uintptr(s.Pointer()), ) } // StartStream attempts to register with the FS Events service to receive events // per the parameters in the stream. // // References // • https://developer.apple.com/documentation/coreservices/1448000-fseventstreamstart func StartStream(s Stream) bool { var out bool cabi.Call( extern_FSEventStreamStart_trampolineABI0, cabi.OutBool(&out), cabi.Uintptr(s.Pointer()), ) return out } // StopStream unregisters the stream from the FS Events service. // // Once stopped, the stream can be restarted via StartStream, at which point it // will resume receiving events from where it left off (“sinceWhen”). // // References // • https://developer.apple.com/documentation/coreservices/1447673-fseventstreamstop func StopStream(s Stream) { cabi.Call( extern_FSEventStreamStop_trampolineABI0, cabi.Void(), cabi.Uintptr(s.Pointer()), ) } // ScheduleStreamWithRunLoop schedules the stream on the specified run loop. // // References // • https://developer.apple.com/documentation/coreservices/1447824-fseventstreamschedulewithrunloop func ScheduleStreamWithRunLoop(s Stream, runLoop corefoundation.RunLoop, mode corefoundation.RunLoopMode) { cabi.Call( extern_FSEventStreamScheduleWithRunLoop_trampolineABI0, cabi.Void(), cabi.Uintptr(s.Pointer()), cabi.Uintptr(runLoop.Pointer()), cabi.Uintptr(mode.Pointer()), ) } // UnscheduleStreamFromRunLoop unschedules the stream from the specified run // loop. // // References // • https://developer.apple.com/documentation/coreservices/1441982-fseventstreamunschedulefromrunlo func UnscheduleStreamFromRunLoop(s Stream, runLoop corefoundation.RunLoop, mode corefoundation.RunLoopMode) { cabi.Call( extern_FSEventStreamUnscheduleFromRunLoop_trampolineABI0, cabi.Void(), cabi.Uintptr(s.Pointer()), cabi.Uintptr(runLoop.Pointer()), cabi.Uintptr(mode.Pointer()), ) } // InvalidateStream unschedules the stream from any run loops or dispatch queues // upon which it had been scheduled. // // References // • https://developer.apple.com/documentation/coreservices/1446990-fseventstreaminvalidate func InvalidateStream(s Stream) { cabi.Call( extern_FSEventStreamInvalidate_trampolineABI0, cabi.Void(), cabi.Uintptr(s.Pointer()), ) }
#!/bin/bash # Copyright (c) 2014 The Native Client Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. BuildStep() { DefaultPythonModuleBuildStep } InstallStep() { DefaultPythonModuleInstallStep } ConfigureStep() { return }
<reponame>nft-login/nft-marketplace<gh_stars>1-10 import { Token } from "./token"; export interface Blockchain { init(): Promise<void>; chainId(): Promise<string>; contractAddress(): Promise<string>; loadContract(contractAddress: string): Promise<void>; account(): Promise<string>; balance(): Promise<string>; nftName(): Promise<string>; nftSymbol(): Promise<string>; balanceOf(account: string): Promise<number>; baseURI(): Promise<string>; tokenCount(): Promise<number>; getToken(tokenId: number): Promise<Token>; buyToken(tokenId: number, price: string): Promise<void>; toggleForSale(tokenId: number): Promise<void>; changeTokenPrice(tokenId: number, newPrice: string): Promise<void>; deployNew(name: string, symbol: string, tokenURI: string, price: string): Promise<string>; mint(tokenCount: number): Promise<void>; }
// Define the generatePassword function seperately function generatePassword(){ /* ________ Local Variables ________ */ // Const variable initialized to the value of the user num input let userNumInput = prompt('Select Desired Password Length, Min: 8, Max: 128', '48'); // const variable for charsets // const 1. const alphabet = '<KEY>'; // const 2. const numbers = '0123456789'; // const 3. const special = "!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~"; // const 4. // variable initialized to the value of confirmation strings to be combined and displayed after password generation. let yesGenerated = 'Password generated: '; const yesLower = 'added lowercase, '; const yesUpper = 'added UPPER, '; const yesSpecial = 'add3d sUm $P3C!@l, '; const yesNumber = 'added some nums, '; // Starting Char Set initialized to the value of an empty string. let startingChars = ''; // scrambled chars initialized to the value of an empty string let scrambledChars = ''; /* _________________ Conditions & Processes ____________________ */ // Main Conditional // If the user input does not meet the length requirement, we don't need to move forward. if (isNaN(userNumInput) || userNumInput < 8 || userNumInput > 128 ) { alert('Stop trying to break me!'); } else { // If user input does meet requirements, Variables are Initialized to the value of user selected boolean. let userSelectLower = confirm('Include lowercase values?'); let userSelectUpper = confirm('How bout Uppercase?'); let userSelectSpecial = confirm('Special Chars?'); const userSelectNum = confirm('Numerical Vals as Well?'); // If the user selects a charset, add it to the value of startingCharSet & add the confirmation string to the final message. if (userSelectLower) { startingChars += alphabet; yesGenerated += yesLower; } if (userSelectUpper) { startingChars += alphabet.toUpperCase(); yesGenerated += yesUpper; } if (userSelectSpecial) { startingChars += special; yesGenerated += yesSpecial; } if (userSelectNum) { startingChars += numbers; yesGenerated += yesNumber; } // At the end of the else condition, display the yesGenerated confirm confirm(yesGenerated); } // Main Functionality // 1. yates Scramble // Spread each character in the string of combined characters, into an array. let arrayCharacters = [...startingChars]; // variable i = length of the array of chars let i = arrayCharacters.length; // temp = to hold the value of a randomly selected element in the array let tempElement; while (i-- > 0) { // on each iteration, select a number at random between 0 and the current value of i let generateIndex = Math.floor(Math.random() * (i + 1)); // Then, trade the value stored in tempElement with the value of an element selcted at a random index tempElement = arrayCharacters[generateIndex]; // Now that the random element has been stored, store the last element in the array arrayCharacters[generateIndex] = arrayCharacters[i]; // Trade the last element in the array with the random arrayCharacters[i] = tempElement; } // Outside of the loop, convert the scrambled array into a string value. scrambledChars = arrayCharacters.join(''); // Slice the scrambledChars string beginning with 0 index, and ending at userInputNum return scrambledChars.slice(0, userNumInput); } // End generatePassword // Assignment Code var generateBtn = document.querySelector("#generate"); // Write password to the #password input function writePassword() { // 1. Define generatePassword // 2. Return the value of generate password to the global memory // 3. var password = generatePassword(); var passwordText = document.querySelector("#password"); // Could I modify the value of passwordText by making it equal to the value produced by generatePassword invokation? passwordText.value = password; } // Add event listener to generate button generateBtn.addEventListener("click", writePassword);
#!/bin/bash # jprobeit.sh # Wrapper script to help setup the jprobe(s) on a given file and function. # # Kaiwan N Billimoria # License: MIT # name=$(basename $0) source ./common.sh || { echo "$name: could not source common.sh , aborting..." exit 1 } ########### Functions follow ####################### # Function to validate passed as first parameter older-check_function() { FUNC=$1 if [ -z $FUNC ]; then echo echo "*** $name: function name null, aborting now.." exit 1 fi ShowTitle "[ Validate the to-be-Jprobed function ${FUNC} ]" # Attempt to find out if it's valid in the kernel. # In any case, if the function is invalid, it will be caught on the # register_jprobe(), which will then fail.. # What about embedded system which don't have either SYMLOC ?? if [ ! -f /proc/kallsyms ]; then if [ ! -f /boot/System.map-$(uname -r) ]; then echo echo "$name: WARNING! Both /proc/kallsyms and /boot/System.map-$(uname -r) not present!? [Possibly an embedded system]. So, we'll Not attempt to check validity of ${FUNC} right now; if invalid, it will be subsequently caught in the register_jprobe(). " return fi fi if [ -f /proc/kallsyms ]; then SYMLOC=/proc/kallsyms elif [ -f /boot/System.map-$(uname -r) ]; then SYMLOC=/boot/System.map-$(uname -r) fi grep "[tT] ${FUNC}" ${SYMLOC} || { echo echo "*** $name: FATAL: Symbol '${FUNC}' not found! [Either it's invalid -or- Could it be static or inline?]. Aborting..." exit 1 } } show_other_possibilities() { num=$(grep "[tT] .*${1}" ${SYMLOC} |wc -l) [ ${num} -ge 1 ] && { echo " Did you mean one of these functions?" grep "[tT] .*${1}" ${SYMLOC} } } # Function to validate passed as first parameter check_function() { FUNC=$1 if [ -z $FUNC ]; then echo echo "*** $name: function name null, aborting now.." exit 1 fi ShowTitle "[ Validate the to-be-Kprobed function ${FUNC} ]" # Attempt to find out if it's valid in the kernel. # Ultimately, if the function is invalid, it will be caught on the # register_kprobe(), which will then fail.. grep -w -q "${FUNC}" /sys/kernel/debug/kprobes/blacklist && { echo echo "*** $name: FATAL: Function '${FUNC}' cannot be probed, it's blacklisted. Aborting..." exit 1 } # Check for existance # What about embedded system which don't have either SYMLOC ?? if [ ! -f /proc/kallsyms ]; then if [ ! -f /boot/System.map-$(uname -r) ]; then echo echo "$name: WARNING! Both /proc/kallsyms and /boot/System.map-$(uname -r) not present!? [Possibly an embedded system]. So, we'll Not attempt to check validity of ${FUNC} right now; if invalid, it will be subsequently caught in the register_kprobe(). " return fi fi if [ -f /proc/kallsyms ]; then SYMLOC=/proc/kallsyms elif [ -f /boot/System.map-$(uname -r) ]; then SYMLOC=/boot/System.map-$(uname -r) fi grep -w "[tT] *${FUNC}" ${SYMLOC} || { echo echo "*** $name: FATAL: Symbol '${FUNC}' not found! [Either it's invalid -or- Could it be static or inline?]." show_other_possibilities ${FUNC} echo "Aborting..." exit 1 } num=$(grep -w "[tT] *${FUNC}" ${SYMLOC} |wc -l) [ ${num} -gt 1 ] && { echo echo "*** $name: FATAL: Symbol '${FUNC}' - multiple instances found! [Sorry, we currently do not handle this case...]" show_other_possibilities ${FUNC} echo "Aborting..." exit 1 } } # end check_function() setup_workspace() { ShowTitle "[ Setting up the Work Space ]" DESTFILE=jp_${FUNCTION}_$(date +%d%b%y).c BASEFOLDER=${TOPDIR}/work_jps JPDEST=jp_${FUNCTION}_$(date +%d%b%y) [ -d ${BASEFOLDER}/${JPDEST} ] && { echo echo "$name: !WARNING! Everything under ${BASEFOLDER}/${JPDEST} will now be OVERWRITTEN !!! Contents:" ls -l ${BASEFOLDER}/${JPDEST}/ echo "Press [Enter] to continue, ^C to abort..." read # Safety first! make a quick backup :-) mkdir -p ${BKP}/${JPDEST} cd ${BASEFOLDER}/${JPDEST}/ cp -af *.[chS] *.sh Makefile ${BKP}/${JPDEST}/ #cp ../../convenient.h . || exit 1 rm -f *.mod.c cd - } rm -rf ${BASEFOLDER}/${JPDEST} mkdir -p ${BASEFOLDER}/${JPDEST}/ cp -f ${SRC} ${BASEFOLDER}/${JPDEST}/${DESTFILE} export JPFILE=${BASEFOLDER}/${JPDEST}/${DESTFILE} [ ! -f ${JPFILE} ] && { echo "*** $name: Fatal: Final dest file not existing? Aborting..." exit 1 } #echo "+++++++++++++ check ++++++++++++++" #pwd #echo "chown ${ORIG_USER}:${ORIG_USER} ${BASEFOLDER}/${JPDEST}/*" #chown ${ORIG_USER}:${ORIG_USER} ${BASEFOLDER}/${JPDEST}/* cd #echo "JPFILE = ${JPFILE}" ls -l ${JPFILE} } buildit() { ## Jprobes use the mirror principle: the jprobe function handler MUST # have the identical signature of the func (being jprobed). ShowTitle " [ make -> LKM $JPMOD.ko ] " make || { echo "$name: failed to 'make'. Aborting..." cd .. exit 1 } echo " Build done!" ls -l $JPMOD.ko } # Function to jprobe passed as first parameter src_jprobe() { FUNC=$1 ShowTitle "[ Dynamically Updating source of ${DESTFILE} ]" #echo "JPFILE = $JPFILE" cd ${BASEFOLDER}/${JPDEST} #--- Source file update with sed ! # Insertions sed --in-place "2 a\ * ${DESTFILE} " ${JPFILE} #sed --in-place "3 c\ # * ${DESTFILE} " ${JPFILE} # get rid of the '.c' extension JPMOD=$(echo dummy |awk -v str=${DESTFILE} '{print substr(str, 1, length(str)-2)}') sed --in-place -e "s/\#define MYNAME xxx/\#define MYNAME \"${JPMOD}\"/" ${JPFILE} # Replace all occurences of 'xxx' in template src copy with $FUNC sed --in-place -e "s/xxx/${FUNC}/g" ${JPFILE} || { echo "$name: Fatal: sed failed to replace stuff." exit 1 } #--- Generate the Makefile cat > Makefile << @MYMARKER@ # Makefile for Jprobe kernel module # Dynamically generated by ${name} ! # For ${JPMOD}.c ifneq (\$(KERNELRELEASE),) EXTRA_CFLAGS += -DDEBUG -DUSE_FTRACE_PRINT obj-m := $JPMOD.o else ######################################### # To support cross-compiling for the ARM: # For ARM, invoke make as: # make ARCH=arm CROSS_COMPILE=arm-none-linux-gnueabi- ifeq (\$(ARCH),arm) # Update 'KDIR' below to point to the ARM Linux kernel source tree KDIR ?= ~/3.14.34 else KDIR ?= /lib/modules/\$(shell uname -r)/build endif ######################################### PWD := \$(shell pwd) default: \$(info Dynamic Makefile:) \$(info Building with KERNELRELEASE = \${KERNELRELEASE}) \$(MAKE) -C \$(KDIR) M=\$(PWD) modules endif clean: \$(MAKE) -C \$(KDIR) SUBDIRS=\$(PWD) clean @MYMARKER@ # adjust for your workspace #ln -s ../../../../../convenient.h ln -s ../../convenient.h ln -s ../../showlog.sh ShowTitle " [ Source update, Makefile generation done. ]" echo " $name: Currently, attempting a direct make (compile) fails. The reason is straight-forward: we must remember that the kernel module source file is a _template_ and not entirely working source. Jprobes use the mirror principle: the jprobe function handler MUST have the identical signature of the func (being jprobed). So, for now at least, this is as much as the script can do for you! Now: a) cd ${BASEFOLDER}/${JPDEST} b) sudo /bin/bash <-- need to work as root c) edit the source (${JPMOD}.c) d) make e) and try it out! :-) " #buildit cd ${TOPDIR} } ### "main" here ### check_root_AIA TOPDIR=$(pwd) SRC=${TOPDIR}/jp_helper_template.c BKP=${TOPDIR}/backup if [ $# -lt 1 ]; then echo "Usage: $name function_to_jprobe 1st param: {function_to_jprobe} : [REQUIRED] This is the name of the function to jprobe. " # echo "Usage: $name {new_jp_lkm_pathname.c} function_to_jprobe # # 1st param: {new_jp_lkm_pathname.c} : [REQUIRED] # This is the pathname of the copy of the jprobe 'template' source file # (the C source you will subsequently edit, writing your jprobe handler(s)). # exit 1 fi #DESTFILE=$1 FUNCTION=$1 check_function ${FUNCTION} setup_workspace src_jprobe ${FUNCTION} exit 0
#!/bin/bash jekyll serve -D -H lianli > serve.log 2>&1
#!/usr/bin/env node import * as fs from "fs"; import * as util from "util"; import * as path from "path"; import * as FileChanges from "./FileChanges"; const fsExists = util.promisify(fs.exists); const fsWriteFile = util.promisify(fs.writeFile); const fsMakeDir = util.promisify(fs.mkdir); const fsReadFile = util.promisify(fs.readFile); const readdir = util.promisify(fs.readdir); const worker = require("./../Morphir.Elm.CLI").Elm.Morphir.Elm.CLI.init(); interface MorphirJson { name: string; sourceDirectory: string; exposedModules: string[]; } async function make( projectDir: string, options: any ): Promise<string | undefined> { // Morphir specific files expected to be in the project directory const morphirJsonPath: string = path.join(projectDir, "morphir.json"); const hashFilePath: string = path.join(projectDir, "morphir-hashes.json"); const morphirIrPath: string = path.join(projectDir, "morphir-ir.json"); // Load the `morphir.json` file that describes the project const morphirJson: MorphirJson = JSON.parse( (await fsReadFile(morphirJsonPath)).toString() ); // Check if there is an existing IR if ((await fsExists(morphirIrPath)) && (await fsExists(hashFilePath))) { const oldContentHashes = await readContentHashes(hashFilePath); const fileChanges = await FileChanges.detectChanges( oldContentHashes, path.join(projectDir, morphirJson.sourceDirectory) ); if (reportFileChangeStats(fileChanges)) { console.log( "There were file changes and there is an existing IR. Building incrementally." ); const previousIR: string = (await fsReadFile(morphirIrPath)).toString(); const updatedIR: string = await buildIncrementally( morphirJson, fileChanges, options, previousIR ); await writeContentHashes( hashFilePath, FileChanges.toContentHashes(fileChanges) ); return updatedIR; } else { console.log( "There were no file changes and there is an existing IR. No actions needed." ); } } else { console.log("There is no existing IR Or Hash file. Building from scratch."); // We invoke file change detection but pass in no hashes which will generate inserts only const fileChanges = await FileChanges.detectChanges( new Map(), path.join(projectDir, morphirJson.sourceDirectory) ); const fileSnapshot = FileChanges.toFileSnapshotJson(fileChanges); const newIR: string = await buildFromScratch( morphirJson, fileSnapshot, options ); await writeContentHashes( hashFilePath, FileChanges.toContentHashes(fileChanges) ); return newIR; } } async function buildFromScratch( morphirJson: any, fileSnapshot: { [index: string]: string }, options: any ): Promise<string> { return new Promise((resolve, reject) => { worker.ports.decodeFailed.subscribe((err: any) => { reject(err); }); worker.ports.buildFailed.subscribe((err: any) => { reject(err); }); worker.ports.reportProgress.subscribe((message: any) => { console.log(message); }); worker.ports.buildCompleted.subscribe(([err, ok]: any) => { if (err) { reject(err); } else { resolve(JSON.stringify(ok, null, 4)); } }); const opts = { typesOnly: options.typesOnly, }; worker.ports.buildFromScratch.send({ options: opts, packageInfo: morphirJson, fileSnapshot: fileSnapshot, }); }); } async function buildIncrementally( morphirJson: any, fileChanges: FileChanges.FileChanges, options: any, previousIR: string ): Promise<string> { return new Promise((resolve, reject) => { worker.ports.decodeFailed.subscribe((err: any) => { reject(err); }); worker.ports.buildFailed.subscribe((err: any) => { reject(err); }); worker.ports.reportProgress.subscribe((message: any) => { console.log(message); }); worker.ports.buildCompleted.subscribe(([err, ok]: any) => { if (err) { reject(err); } else { resolve(JSON.stringify(ok, null, 4)); } }); const opts = { typesOnly: options.typesOnly, }; let maybeDistribution = null; if (previousIR) { maybeDistribution = JSON.parse(previousIR); } worker.ports.buildIncrementally.send({ options: opts, packageInfo: morphirJson, fileChanges: FileChanges.toFileChangesJson(fileChanges), distribution: maybeDistribution, }); }); } /** * Read content hashes from a file. * * @param filePath file path to read hashes from * @returns map of hashes */ async function readContentHashes( filePath: string ): Promise<Map<FileChanges.Path, FileChanges.Hash>> { // Check if the file exists if (await fsExists(filePath)) { const contentHashesJson = JSON.parse( (await fsReadFile(filePath)).toString() ); const contentHashesMap: Map<FileChanges.Path, FileChanges.Hash> = new Map< FileChanges.Path, FileChanges.Hash >(); for (let path in contentHashesJson) { contentHashesMap.set(path, contentHashesJson[path]); } return contentHashesMap; } else { return new Map<FileChanges.Path, FileChanges.Hash>(); } } /** * Write content hashes into a file. * * @param filePath file path to read hashes from * @returns map of hashes */ async function writeContentHashes( filePath: string, hashes: Map<FileChanges.Path, FileChanges.Hash> ): Promise<void> { const jsonObject: { [index: string]: string } = {}; for (let [path, hash] of hashes) { jsonObject[path] = hash; } await writeFile(filePath, JSON.stringify(jsonObject, null, 4)); } function reportFileChangeStats(fileChanges: FileChanges.FileChanges): boolean { const stats: FileChanges.Stats = FileChanges.toStats(fileChanges); if (FileChanges.hasChanges(stats)) { const message = [ `- inserted: ${stats.inserted}`, `- updated: ${stats.updated}`, `- deleted: ${stats.deleted}`, `- unchanged: ${stats.unchanged}`, ].join("\n "); console.log(`The following file changes were detected:\n ${message}`); return true; } else { console.log(`No file changes were detected.`); return false; } } interface CommandOptions { modulesToInclude: string; targetVersion: string; } interface WorkerOptions { limitToModules?: string[]; } function mapCommandToWorkerOptions(options: CommandOptions): WorkerOptions { return { limitToModules: options.modulesToInclude ? options.modulesToInclude.split(",") : undefined, }; } const gen = async ( input: string, outputPath: string, options: CommandOptions ) => { await fsMakeDir(outputPath, { recursive: true, }); const morphirIrJson: Buffer = await fsReadFile(path.resolve(input)); const workerOptions: WorkerOptions = mapCommandToWorkerOptions(options); // opts.limitToModules = options.modulesToInclude ? options.modulesToInclude.split(',') : undefined const generatedFiles: string[] = await generate( workerOptions, JSON.parse(morphirIrJson.toString()) ); const writePromises = generatedFiles.map( async ([[dirPath, fileName], content]: any) => { const fileDir: string = dirPath.reduce( (accum: string, next: string) => path.join(accum, next), outputPath ); const filePath: string = path.join(fileDir, fileName); if (await fileExist(filePath)) { await fsWriteFile(filePath, content); console.log(`UPDATE - ${filePath}`); } else { await fsMakeDir(fileDir, { recursive: true, }); await fsWriteFile(filePath, content); console.log(`INSERT - ${filePath}`); } } ); const filesToDelete = await findFilesToDelete(outputPath, generatedFiles); const deletePromises = filesToDelete.map(async (fileToDelete: string) => { console.log(`DELETE - ${fileToDelete}`); return fs.unlinkSync(fileToDelete); }); copyRedistributables(options, outputPath); return Promise.all(writePromises.concat(deletePromises)); }; const generate = async ( options: WorkerOptions, ir: string ): Promise<string[]> => { return new Promise((resolve, reject) => { worker.ports.jsonDecodeError.subscribe((err: any) => { reject(err); }); worker.ports.generateResult.subscribe(([err, ok]: any) => { if (err) { reject(err); } else { resolve(ok); } }); worker.ports.generate.send([options, ir]); }); }; const fileExist = async (filePath: string) => { return new Promise((resolve, reject) => { fs.access(filePath, fs.constants.F_OK, (err) => { if (err) { resolve(false); } else { resolve(true); } }); }); }; const findFilesToDelete = async (outputPath: string, fileMap: string[]) => { const readDir = async function ( currentDir: string, generatedFiles: string[] ) { const entries: fs.Dirent[] = await readdir(currentDir, { withFileTypes: true, }); const filesToDelete = entries .filter((entry) => { const entryPath: string = path.join(currentDir, entry.name); return entry.isFile() && !generatedFiles.includes(entryPath); }) .map((entry) => path.join(currentDir, entry.name)); const subDirFilesToDelete: Promise<string[]> = entries .filter((entry) => entry.isDirectory()) .map((entry) => readDir(path.join(currentDir, entry.name), generatedFiles) ) .reduce(async (soFarPromise, nextPromise) => { const soFar = await soFarPromise; const next = await nextPromise; return soFar.concat(next); }, Promise.resolve([])); return filesToDelete.concat(await subDirFilesToDelete); }; const files = fileMap.map(([[dirPath, fileName], content]: any) => { const fileDir = dirPath.reduce( (accum: string, next: string) => path.join(accum, next), outputPath ); return path.resolve(fileDir, fileName); }); return Promise.all(await readDir(outputPath, files)); }; function copyRedistributables(options: CommandOptions, outputPath: string) { const copyFiles = (src: string, dest: string) => { const sourceDirectory: string = path.join( path.dirname(__dirname), "redistributable", src ); copyRecursiveSync(sourceDirectory, outputPath); }; copyFiles("Scala/sdk/src", outputPath); copyFiles(`Scala/sdk/src-${options.targetVersion}`, outputPath); } function copyRecursiveSync(src: string, dest: string) { const exists = fs.existsSync(src); if (exists) { const stats = exists && fs.statSync(src); const isDirectory = exists && stats.isDirectory(); if (isDirectory) { if (!fs.existsSync(dest)) fs.mkdirSync(dest); fs.readdirSync(src).forEach(function (childItemName) { copyRecursiveSync( path.join(src, childItemName), path.join(dest, childItemName) ); }); } else { fs.copyFileSync(src, dest); console.log(`COPY - ${dest}`); } } } async function writeFile(filePath: string, content: string) { await fsMakeDir(path.dirname(filePath), { recursive: true, }); return await fsWriteFile(filePath, content); } export = { make, writeFile, gen };
<gh_stars>0 $(document).ready(function(){ $("[name='approved']").bootstrapSwitch(); $('.button-approved').click(function(){ var id = $(this).find(':first-child').attr('alt'); $(this).removeClass('default'); $(this).addClass('green-jungle'); $(this).next().removeClass('red-thunderbird'); $(this).next().addClass('default'); $.ajax({ method: "GET", url: "/admin/edit-review/"+id+"/approved", success:function(data){ } }) }) $('.button-unapproved').click(function(){ var id = $(this).find(':first-child').attr('alt'); $(this).removeClass('default'); $(this).addClass('red-thunderbird'); $(this).prev().removeClass('green-jungle'); $(this).prev().addClass('default'); $.ajax({ method: "GET", url: "/admin/edit-review/"+id+"/unapproved", success:function(data){ } }) }) });
#!/usr/bin/env bash # The glyph to replace GLYPH="0u007e" # Diminished or not DIM="" # help function ricty_discord_pather_help() { echo "Usage: ricty_discord_patcher [options]" echo "" echo "Options:" echo " -h Display this information" echo " -d Patch to RictyDiminishedDiscord" echo " -g glyph The glyph to replace (default: 0u007e = ~)" exit 0 } while getopts hdg: OPT do case "${OPT}" in "h" ) ricty_discord_pather_help ;; "d" ) DIM="Diminished" ;; "g" ) GLYPH="${OPTARG}" ;; * ) exit 1 ;; esac done shift `expr $OPTIND - 1` # Path to fontforge command fontforge_command="/usr/bin/fontforge" # Check fontforge existance if [ ! which $fontforge_command > /dev/null 2>&1 ]; then echo "Error: ${fontforge_command} command not found" >&2 exit 1 fi # Check fonts existance weights="Regular Italic Bold BoldItalic" for weight in ${weights}; do if [ ! -f "IBMPlexMono-${weight}.ttf" ]; then echo "Error: IBMPlexMono-${weight}.ttf font not found" >&2 exit 2 fi done weights="Regular Oblique Bold BoldOblique" for weight in ${weights}; do if [ ! -f "RictyDiscord-${weight}.ttf" ]; then echo "Error: RictyDiscord-${weight}.ttf font not found" >&2 exit 3 fi done # Make temporary directory and trap signals tmpdir=`mktemp -d /tmp/ricty_discord_pather_tmpdir.XXXXXX` || exit 2 echo "tmpdir = ${tmpdir}" trap "if [ -d \"${tmpdir}\" ]; then echo 'Remove temporary files'; rm -rf ${tmpdir}; echo 'Abnormally terminated'; fi; exit 4" HUP INT QUIT trap "if [ -d \"${tmpdir}\" ]; then echo 'Remove temporary files'; rm -rf ${tmpdir}; echo 'Abnormally terminated'; fi" EXIT # Filenames extract_script="extract_glyph.pe" eliminate_script="eliminate_glyph.pe" merge_script="merge_rd_ipm.pe" # Generate script of extracting GLYPH from IBMPlexMono cat > ${tmpdir}/${extract_script} << _EOT_ #!${fontforge_command} -script Print("Extract GLYPH (${GLYPH}) from IBMPlexMono") inputs = ["IBMPlexMono-Regular.ttf", "IBMPlexMono-Italic.ttf", "IBMPlexMono-Bold.ttf", "IBMPlexMono-BoldItalic.ttf"] outputs = ["IPM-Regular.sfd", "IPM-Italic.sfd", "IPM-Bold.sfd", "IPM-BoldItalic.sfd"] i = 0 while (i < SizeOf(inputs)) Print("Open " + inputs[i]) Open(inputs[i]) SelectWorthOutputting() UnlinkReference() ScaleToEm(860, 140) Select(${GLYPH}); SelectInvert(); Clear() Select(${GLYPH}); Scale(90, 100); SetWidth(500) RoundToInt(); RemoveOverlap(); RoundToInt() SelectWorthOutputting() ClearInstrs() Print("Save " + outputs[i]) Save("${tmpdir}/" + outputs[i]) Close() i += 1 endloop Quit() _EOT_ # Generate script of eliminating GLYPH from RictyDiscord cat > ${tmpdir}/${eliminate_script} << _EOT_ #!${fontforge_command} -script Print("Eliminate GLYPH (${GLYPH}) from Ricty${DIM}Discord") inputs = ["Ricty${DIM}Discord-Regular.ttf", "Ricty${DIM}Discord-Oblique.ttf", "Ricty${DIM}Discord-Bold.ttf", "Ricty${DIM}Discord-BoldOblique.ttf"] outputs = ["R${DIM}D-Regular.sfd", "R${DIM}D-Oblique.sfd", "R${DIM}D-Bold.sfd", "R${DIM}D-BoldOblique.sfd"] i = 0 while (i < SizeOf(inputs)) Print("Open " + inputs[i]) Open(inputs[i]) SelectWorthOutputting() UnlinkReference() Select(${GLYPH}); Clear() SelectWorthOutputting() ClearInstrs() Print("Save " + outputs[i]) Save("${tmpdir}/" + outputs[i]) Close() i += 1 endloop Quit() _EOT_ # Generate script of merging cat > ${tmpdir}/${merge_script} << _EOT_ #!${fontforge_command} -script Print("Patch GLYPH from IBMPlexMono to Ricty${DIM}Discord") rds = ["${tmpdir}/R${DIM}D-Regular.sfd", "${tmpdir}/R${DIM}D-Oblique.sfd", "${tmpdir}/R${DIM}D-Bold.sfd", "${tmpdir}/R${DIM}D-BoldOblique.sfd"] ipms = ["${tmpdir}/IPM-Regular.sfd", "${tmpdir}/IPM-Italic.sfd", "${tmpdir}/IPM-Bold.sfd", "${tmpdir}/IPM-BoldItalic.sfd"] outputs = ["Ricty${DIM}Discord-Regular.ttf", "Ricty${DIM}Discord-Oblique.ttf", "Ricty${DIM}Discord-Bold.ttf", "Ricty${DIM}Discord-BoldOblique.ttf"] i = 0 while (i < SizeOf(rds)) Print("Merge " + ipms[i]:t + " to " + rds[i]:t) Open(rds[i]) MergeFonts(ipms[i]) Print("Generate " + outputs[i]) Generate(outputs[i], "", 0x84) Close() i += 1 endloop Quit() _EOT_ ${fontforge_command} -script ${tmpdir}/${extract_script} 2> /dev/null || exit 5 ${fontforge_command} -script ${tmpdir}/${eliminate_script} 2> /dev/null || exit 5 weights="Regular Oblique Bold BoldOblique" for weight in ${weights}; do if [ -f "RictyDiscord-${weight}.ttf" ]; then rm -f RictyDiscord-${weight}.ttf fi done ${fontforge_command} -script ${tmpdir}/${merge_script} 2> /dev/null || exit 5 rm -rf ${tmpdir} exit 0
package org.glowroot.instrumentation.mongodb; import org.glowroot.instrumentation.api.Descriptor; import org.glowroot.instrumentation.api.Descriptor.PropertyType; @Descriptor( id = "mongodb", name = "MongoDB", properties = { @Descriptor.Property( name = "stackTraceThresholdMillis", type = PropertyType.DOUBLE, defaultValue = { @Descriptor.DefaultValue( doubleValue = 1000.0) }, label = "Stack trace threshold (millis)", description = "Any query that exceeds this threshold will have a stack trace captured and attached to it. An empty value will not collect any stack traces, a zero value will collect a stack trace for every query.") }, classes = { MongoDbInstrumentation.class }, collocate = true) public class InstrumentationDescriptor {}
(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.reduxInspector = f()}})(function(){var define,module,exports;return (function(){function r(e,n,t){function o(i,f){if(!n[i]){if(!e[i]){var c="function"==typeof require&&require;if(!f&&c)return c(i,!0);if(u)return u(i,!0);var a=new Error("Cannot find module '"+i+"'");throw a.code="MODULE_NOT_FOUND",a}var p=n[i]={exports:{}};e[i][0].call(p.exports,function(r){var n=e[i][1][r];return o(n||r)},p,p.exports,r,e,n,t)}return n[i].exports}for(var u="function"==typeof require&&require,i=0;i<t.length;i++)o(t[i]);return o}return r})()({1:[function(require,module,exports){ const WildEmitter = require("./lib/wildemitter"); const ReduxInspector = function (props) { if (!(this instanceof ReduxInspector)) return new ReduxInspector(arguments); if (!props) props = {}; this.delimiter = props.delimiter || "/"; Object.keys(WildEmitter.prototype).forEach((key) => { this[key] = WildEmitter.prototype[key]; }); }; ReduxInspector.prototype.watch = function (store) { if (!(this instanceof ReduxInspector)) return; if (store && store.subscribe) { this.store = store; if (this.store.getState) this.currentState = this.store.getState(); this.unwatch = store.subscribe(() => this.listen(this.store.getState())); } }; ReduxInspector.prototype.listen = function (newState) { if (!(this instanceof ReduxInspector)) return; const prevState = this.currentState; this.currentState = newState; checkKey({ key: this.delimiter, prevState, newState, self: this, }); }; const checkKey = function ({ key, prevState, newState, self }) { if ( newState instanceof Object && prevState instanceof Object && !(newState instanceof Array) && !(prevState instanceof Array) ) { Object.keys(newState).forEach((k) => { const event = `${key === self.delimiter ? "" : key}${self.delimiter}${k}`; checkKey({ key: event, prevState: prevState[k], newState: newState[k], self: self, }); }); } else if ( !(newState instanceof Object) || !(prevState instanceof Object) || newState instanceof Array || prevState instanceof Array ) { if ( (newState instanceof Array && prevState instanceof Array && newState.length === prevState.length) || prevState === newState ) { return; } // console.log(key, prevState, newState); self.emit(key, prevState, newState); } }; ReduxInspector.prototype.spyOn = function ( attributePath, reducerName, callback ) { if (!(this instanceof ReduxInspector)) return; let path, callBackFn; if (arguments.length == 2) { path = `${this.delimiter}${arguments[0]}`; callBackFn = arguments[1]; } else if (arguments.length === 3) { path = `${this.delimiter}${arguments[1]}${this.delimiter}${arguments[0]}`; callBackFn = arguments[2]; } else { return; } if (!path || !callBackFn) return; this.on(path, callBackFn); return this; }; ReduxInspector.prototype.spyOff = function (attributePath, reducerName) { if (!(this instanceof ReduxInspector)) return; if (arguments.length == 2) { const path = `${this.delimiter}${arguments[1]}${this.delimiter}${arguments[0]}`; if (!path) return; this.off(path); } else { return; } return this; }; ReduxInspector.prototype.author = "sanjairocky"; ReduxInspector.prototype.isReduxInspector = true; ReduxInspector.prototype.organization = "Sanazu"; module.exports = ReduxInspector; },{"./lib/wildemitter":2}],2:[function(require,module,exports){ function WildEmitter() {} WildEmitter.mixin = function (constructor) { var prototype = constructor.prototype || constructor; // Listen on the given `event` with `fn`. Store a group name if present. prototype.on = function (event, groupName, fn) { this.callbacks = this.callbacks || {}; var hasGroup = arguments.length === 3, group = hasGroup ? arguments[1] : undefined, func = hasGroup ? arguments[2] : arguments[1]; func._groupName = group; (this.callbacks[event] = this.callbacks[event] || []).push(func); return this; }; // Adds an `event` listener that will be invoked a single // time then automatically removed. prototype.once = function (event, groupName, fn) { var self = this, hasGroup = arguments.length === 3, group = hasGroup ? arguments[1] : undefined, func = hasGroup ? arguments[2] : arguments[1]; function on() { self.off(event, on); func.apply(this, arguments); } this.on(event, group, on); return this; }; // Unbinds an entire group prototype.releaseGroup = function (groupName) { this.callbacks = this.callbacks || {}; var item, i, len, handlers; for (item in this.callbacks) { handlers = this.callbacks[item]; for (i = 0, len = handlers.length; i < len; i++) { if (handlers[i]._groupName === groupName) { //console.log('removing'); // remove it and shorten the array we're looping through handlers.splice(i, 1); i--; len--; } } } return this; }; // Remove the given callback for `event` or all // registered callbacks. prototype.off = function (event, fn) { this.callbacks = this.callbacks || {}; var callbacks = this.callbacks[event], i; if (!callbacks) return this; // remove all handlers if (arguments.length === 1) { delete this.callbacks[event]; return this; } // remove specific handler i = callbacks.indexOf(fn); if (i !== -1) { callbacks.splice(i, 1); if (callbacks.length === 0) { delete this.callbacks[event]; } } return this; }; /// Emit `event` with the given args. // also calls any `*` handlers prototype.emit = function (event) { this.callbacks = this.callbacks || {}; var args = [].slice.call(arguments, 1), callbacks = this.callbacks[event], specialCallbacks = this.getWildcardCallbacks(event), i, len, item, listeners; if (callbacks) { listeners = callbacks.slice(); for (i = 0, len = listeners.length; i < len; ++i) { if (!listeners[i]) { break; } listeners[i].apply(this, args); } } if (specialCallbacks) { len = specialCallbacks.length; listeners = specialCallbacks.slice(); for (i = 0, len = listeners.length; i < len; ++i) { if (!listeners[i]) { break; } listeners[i].apply(this, [event].concat(args)); } } return this; }; // Helper for for finding special wildcard event handlers that match the event prototype.getWildcardCallbacks = function (eventName) { this.callbacks = this.callbacks || {}; var item, split, result = []; for (item in this.callbacks) { split = item.split("*"); if ( item === "*" || (split.length === 2 && eventName.slice(0, split[0].length) === split[0]) ) { result = result.concat(this.callbacks[item]); } } return result; }; }; WildEmitter.mixin(WildEmitter); module.exports = WildEmitter; },{}]},{},[1])(1) });
/* Copyright 2017 IBM Corp. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.ibm.cloud.appid.android.internal; import android.content.Context; import com.ibm.cloud.appid.android.api.AppID; import com.ibm.cloud.appid.android.internal.authorizationmanager.AuthorizationManager; import com.ibm.cloud.appid.android.internal.preferences.PreferenceManager; import com.ibm.cloud.appid.android.internal.registrationmanager.RegistrationManager; import com.ibm.cloud.appid.android.internal.tokenmanager.TokenManager; import java.util.Locale; public class OAuthManager { private AppID appId; private PreferenceManager preferenceManager; private RegistrationManager registrationManager; private AuthorizationManager authorizationManager; private TokenManager tokenManager; public OAuthManager(Context ctx, AppID appId){ this.appId = appId; this.preferenceManager = PreferenceManager.getDefaultPreferenceManager(ctx); this.registrationManager = new RegistrationManager(this); this.authorizationManager = new AuthorizationManager(this, ctx); this.tokenManager = new TokenManager(this); } public AppID getAppId () { return appId; } public PreferenceManager getPreferenceManager () { return preferenceManager; } public RegistrationManager getRegistrationManager () { return registrationManager; } public AuthorizationManager getAuthorizationManager () { return authorizationManager; } public TokenManager getTokenManager () { return tokenManager; } public void setPreferredLocale(Locale locale) { authorizationManager.setPreferredLocale(locale); } }
// Copyright © 2019 <NAME> <<EMAIL>> // This file is part of GoatCounter and published under the terms of the EUPL // v1.2, which can be found in the LICENSE file or at http://eupl12.zgo.at package main import ( "bytes" "compress/gzip" "context" "fmt" "io" "io/ioutil" "net/http" "os" "strconv" "strings" "time" "zgo.at/errors" "zgo.at/goatcounter" "zgo.at/goatcounter/cfg" "zgo.at/goatcounter/handlers" "zgo.at/json" "zgo.at/zdb" "zgo.at/zli" "zgo.at/zlog" "zgo.at/zstd/zstring" ) const usageImport = ` Import pageviews from an export You must give one filename to import; use - to read from stdin: $ goatcounter import export.csv.gz This requires a running GoatCounter instance; it's a front-end for the API rather than a tool to modify the database directly. If you're running this on the same machine the data will be fetched from the DB and a temporary API key will be created. Or use an URL in -site if you want to send data to another instance: $ export GOATCOUNTER_API_KEY=[..] $ goatcounter import -site https://stats.example.com Flags: -db Database connection: "sqlite://<file>" or "postgres://<connect>" See "goatcounter help db" for detailed documentation. Default: sqlite://db/goatcounter.sqlite3?_busy_timeout=200&_journal_mode=wal&cache=shared Only needed if -site is not an URL. -debug Modules to debug, comma-separated or 'all' for all modules. -silent Don't show progress information. -site Site to import to, not needed if there is only one site, as an ID ("1"), code ("example"), or an URL ("https://stats.example.com"). You must set GOATCOUNTER_API_KEY if you use an URL. -format File format; currently accepted values: csv GoatCounter CSV export (default) Environment: GOATCOUNTER_API_KEY API key to use if you're connecting to a remote API; must have "count" permission. ` func importCmd() (int, error) { // So it uses https URLs in site.URL() // TODO: should fix it to always use https even on dev and get rid of the // exceptions. cfg.Prod = true dbConnect := flagDB() debug := flagDebug() var format, siteFlag string var silent bool CommandLine.StringVar(&siteFlag, "site", "", "") CommandLine.StringVar(&format, "format", "csv", "") CommandLine.BoolVar(&silent, "silent", false, "") err := CommandLine.Parse(os.Args[2:]) if err != nil { return 1, err } files := CommandLine.Args() if len(files) == 0 { return 1, fmt.Errorf("need a filename") } if len(files) > 1 { return 1, fmt.Errorf("can only specify one filename") } var fp io.ReadCloser if files[0] == "-" { fp = ioutil.NopCloser(os.Stdin) } else { file, err := os.Open(files[0]) if err != nil { return 1, err } defer file.Close() if strings.HasSuffix(files[0], ".gz") { fp, err = gzip.NewReader(file) if err != nil { return 1, errors.Errorf("could not read as gzip: %w", err) } } else { fp = file } defer fp.Close() } zlog.Config.SetDebug(*debug) url, key, clean, err := findSite(siteFlag, *dbConnect) if err != nil { return 1, err } if clean != nil { defer clean() } err = checkSite(url, key) if err != nil { return 1, err } url += "/api/v0/count" var n int switch format { default: return 1, fmt.Errorf("unknown -format value: %q", format) case "csv": n = 0 ctx := goatcounter.WithSite(context.Background(), &goatcounter.Site{}) hits := make([]handlers.APICountRequestHit, 0, 500) _, err = goatcounter.Import(ctx, fp, false, false, func(hit goatcounter.Hit, final bool) { if !final { hits = append(hits, handlers.APICountRequestHit{ Path: hit.Path, Title: hit.Title, Event: hit.Event, Ref: hit.Ref, Size: hit.Size, Bot: hit.Bot, UserAgent: hit.UserAgentHeader, Location: hit.Location, CreatedAt: hit.CreatedAt, Session: hit.Session.String(), }) } if len(hits) >= 500 || final { err := importSend(url, key, hits) if err != nil { fmt.Println() zli.Errorf(err) } n += len(hits) if !silent { zli.ReplaceLinef("Imported %d rows", n) } hits = make([]handlers.APICountRequestHit, 0, 500) } }) } if err != nil { var gErr *errors.Group if errors.As(err, &gErr) { return 1, fmt.Errorf("%d errors", gErr.Len()) } return 1, err } return 0, nil } var ( importClient = http.Client{Timeout: 5 * time.Second} nSent int ) func newRequest(method, url, key string, body io.Reader) (*http.Request, error) { r, err := http.NewRequest(method, url, body) if err != nil { return nil, err } r.Header.Set("Content-Type", "application/json") r.Header.Set("Authorization", "Bearer "+key) return r, nil } func importSend(url, key string, hits []handlers.APICountRequestHit) error { body, err := json.Marshal(handlers.APICountRequest{Hits: hits}) if err != nil { return err } r, err := newRequest("POST", url, key, bytes.NewReader(body)) if err != nil { return err } r.Header.Set("X-Goatcounter-Import", "yes") resp, err := importClient.Do(r) if err != nil { return err } defer resp.Body.Close() switch resp.StatusCode { case 202: // All okay! case 429: // Rate limit s, err := strconv.Atoi(resp.Header.Get("X-Rate-Limit-Reset")) if err != nil { return err } time.Sleep(time.Duration(s) * time.Second) // Other error default: b, _ := ioutil.ReadAll(resp.Body) return fmt.Errorf("%s: %s: %s", url, resp.Status, zstring.ElideLeft(string(b), 200)) } nSent += len(hits) // Give the server's memstore a second to do its job. if nSent > 5000 { time.Sleep(1 * time.Second) nSent = 0 } return nil } func findSite(siteFlag, dbConnect string) (string, string, func(), error) { var ( url, key string clean func() ) switch { case strings.HasPrefix(siteFlag, "http://") || strings.HasPrefix(siteFlag, "https://"): url = strings.TrimRight(siteFlag, "/") url = strings.TrimSuffix(url, "/api/v0/count") if !strings.HasPrefix(url, "http") { url = "https://" + url } key = os.Getenv("GOATCOUNTER_API_KEY") if key == "" { return "", "", nil, errors.New("GOATCOUNTER_API_KEY must be set") } default: db, err := connectDB(dbConnect, nil, false) if err != nil { return "", "", nil, err } defer db.Close() ctx := zdb.WithDB(context.Background(), db) var site goatcounter.Site siteID, intErr := strconv.ParseInt(siteFlag, 10, 64) switch { default: err = site.ByCode(ctx, siteFlag) case intErr != nil && siteID > 0: err = site.ByID(ctx, siteID) case siteFlag == "": var sites goatcounter.Sites err := sites.UnscopedList(ctx) if err != nil { return "", "", nil, err } switch len(sites) { case 0: return "", "", nil, fmt.Errorf("there are no sites in the database") case 1: site = sites[0] default: return "", "", nil, fmt.Errorf("more than one site: use -site to specify which site to import") } } if err != nil { return "", "", nil, err } ctx = goatcounter.WithSite(ctx, &site) var user goatcounter.User err = user.BySite(ctx, site.ID) if err != nil { return "", "", nil, err } ctx = goatcounter.WithUser(ctx, &user) token := goatcounter.APIToken{ SiteID: site.ID, Name: "goatcounter import", Permissions: goatcounter.APITokenPermissions{Count: true}, } err = token.Insert(ctx) if err != nil { return "", "", nil, err } url = site.URL() key = token.Token clean = func() { token.Delete(ctx) } } return url, key, clean, nil } // Verify that the site is live and that we've got the correct permissions. func checkSite(url, key string) error { r, err := newRequest("GET", url+"/api/v0/me", key, nil) if err != nil { return err } resp, err := importClient.Do(r) if err != nil { return err } defer resp.Body.Close() b, _ := ioutil.ReadAll(resp.Body) if resp.StatusCode != 200 { return fmt.Errorf("%s: %s: %s", url+"/api/v0/me", resp.Status, zstring.ElideLeft(string(b), 200)) } var perm struct { Token goatcounter.APIToken `json:"token"` } err = json.Unmarshal(b, &perm) if err != nil { return err } if !perm.Token.Permissions.Count { return fmt.Errorf("the API token %q is missing the 'count' permission", perm.Token.Name) } return nil }
#!/bin/sh SCRIPT="$0" while [ -h "$SCRIPT" ] ; do ls=`ls -ld "$SCRIPT"` link=`expr "$ls" : '.*-> \(.*\)$'` if expr "$link" : '/.*' > /dev/null; then SCRIPT="$link" else SCRIPT=`dirname "$SCRIPT"`/"$link" fi done if [ ! -d "${APP_DIR}" ]; then APP_DIR=`dirname "$SCRIPT"`/.. APP_DIR=`cd "${APP_DIR}"; pwd` fi executable="./modules/swagger-codegen-cli/target/swagger-codegen-cli.jar" if [ ! -f "$executable" ] then mvn clean package fi # if you've executed sbt assembly previously it will use that instead. export JAVA_OPTS="${JAVA_OPTS} -XX:MaxPermSize=256M -Xmx1024M -DloggerPath=conf/log4j.properties" ags="$@ generate -i modules/swagger-codegen/src/test/resources/2_0/petstore.yaml -l csharp-dotnet2 -o samples/client/petstore/csharp-dotnet2/SwaggerClientTest/Lib/SwaggerClient --additional-properties hideGenerationTimestamp=true" java $JAVA_OPTS -jar $executable $ags
from CCSAmongUs import routes def handle_player_action(player_id, action, action_details): if action == "move": target_location = action_details.get("target_location") routes.move_player(player_id, target_location) elif action == "interact": target_player_id = action_details.get("target_player_id") routes.interact_with_player(player_id, target_player_id) elif action == "report": routes.report_incident(player_id) else: raise ValueError("Invalid action type") updated_game_state = routes.get_game_state() return updated_game_state
<reponame>JackBryce/Artificial-Intelligence #include "Core.h"; //This constructor runs the Core AI. Core::Core(int count) { //Variables KeyGenerator kg = new KeyGenerator(); AIs.push_back(new ArtificialIntelligence()); key = kg.generateKey(binary(count)); updatedKey = key; //Processes while (updatedKey == key) { bool allMaxNetworks = false; vector<bool> maxNetwork; for (int i = 0; i < AIs.size(); i++) { maxNetwork.push_back(false); } /* --- Check the amount of neural networks an AI has, if the AI has 16 neural networks then. --- */ vector<int> numNetworks; for (int i = 0; i < AIs.size(); i++) { numNetworks.push_back(AIs.networkDetails.size()); if (numNetworks[i] == 16) { maxNetwork[i] = true; } } for (int i = 0; i < maxNetwork.size(); i++) { if (maxNetwork[i] == true) { allMaxNetwork = true; } else { allMaxNetwork = false; } } /* --- If there are 16 AIs and one needs to be added, then create a core and then add the new AI to that core, else create a new AI in the current Core. --- */ if (AIs.size() < 16 && allMaxNetwork == true) { AIs.push_back(new ArtificialIntelligence()); } else if (AIs.size()+1 == 16 && subcore.size() < 16) { subcore.push_back(new Core()); } } } //This method creates a binary string to generate a id for the AI and cores. string Core::binary(int n) { string binaryVal; while (n != 0) { binaryVal += (n % 2 == 0 ? "0" : "1"); n /= 2; } return binaryVal; }
<filename>dhis-2/dhis-api/src/test/java/org/hisp/dhis/sms/config/GenericHttpGatewayConfigTest.java package org.hisp.dhis.sms.config; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import java.io.IOException; import java.io.StringReader; import java.io.StringWriter; import java.io.Writer; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBException; import javax.xml.bind.Marshaller; import javax.xml.bind.Unmarshaller; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; /* * Copyright (c) 2011, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ public class GenericHttpGatewayConfigTest { private String urlTemplate = "http://bulksms.vsms.net:5567/eapi/submission/send_sms/2/2.0?username={username}&password={password}&source_id={sender}&message={message}&msisdn={recipient}"; private String bulk = "<bulksms><name>bulk</name><username>username</username><password>password</password></bulksms>"; private String click = "<clickatell><name>click</name><username>storset</username><password><PASSWORD></password><apiId>3304014</apiId></clickatell>"; private JAXBContext context; private String urlString = "<urlTemplate>http://bulksms.vsms.net:5567/eapi/submission/send_sms/2/2.0?username={username}&amp;password={password}&amp;source_id={sender}&amp;message={message}&amp;msisdn={recipient}</urlTemplate>"; private String http = "<http><name>http</name>" + urlString + "<parameters>" + "<parameter key=\"username\" value=\"storset\" /><parameter key=\"password\" value=\"<PASSWORD>\" />" + "</parameters>" + "</http>"; @Before public void setup() throws JAXBException { context = JAXBContext.newInstance( SmsConfiguration.class ); } @Test @Ignore public void testMarshalling() throws IOException, JAXBException { Writer writer = new StringWriter(); Map<String, String> parameters = new HashMap<String, String>(); parameters.put( "username", "u1" ); parameters.put( "password", "p1" ); parameters.put( "sender", "s1" ); SmsGatewayConfig config = new GenericHttpGatewayConfig( urlTemplate, parameters ); SmsConfiguration smsConfiguration = new SmsConfiguration(); smsConfiguration.setGateways( Collections.singletonList( config ) ); Marshaller marshaller = context.createMarshaller(); marshaller.setProperty( Marshaller.JAXB_FORMATTED_OUTPUT, true ); marshaller.marshal( smsConfiguration, writer ); writer.flush(); assertTrue(writer.toString().contains( "<parameter key=\"username\" value=\"u1\"" )); } @Test public void testUntmarshalling() throws JAXBException { String xml = "<smsConfiguration xmlns=\"http://dhis2.org/schema/dxf/2.0\"><enabled>true</enabled><longNumber>DHIS2</longNumber>"; xml += "<gateways>" + bulk + click + http + "</gateways></smsConfiguration>"; Unmarshaller unmarshaller = context.createUnmarshaller(); SmsConfiguration config = (SmsConfiguration) unmarshaller.unmarshal( new StringReader( xml ) ); assertNotNull( config ); List<SmsGatewayConfig> gateways = config.getGateways(); assertNotNull( gateways ); assertEquals( 3, gateways.size() ); assertTrue( ((GenericHttpGatewayConfig)gateways.get( 2 )).getUrlTemplate().contains( "http://bulksms.vsms.net:5567/eapi/submission/send_sms/2/2.0" ) ); } }
#!/bin/bash -e echo "Copy failed. Kill container." kill -9 `pgrep -f cron`
python train/train.py \ test-stl-nw-noprofit-R \ --experiment-name=test-stl-nw-noprofit-R \ --num-env-steps=1600000000 \ --algo=ppo \ --use-gae \ --lr=2.5e-4 \ --clip-param=0.2 \ --value-loss-coef=0.5 \ --num-envs=800 \ --num-actors=8 \ --num-splits=2 \ --eval-num-processes=50 \ --num-steps=500 \ --num-mini-batch=4 \ --log-interval=1 \ --save-interval=32 \ --eval-interval=1000 \ --use-linear-lr-decay \ --popart-reward \ --entropy-coef=0.01 \ --gamma=0.999 \ --queue-size=5 \ --reuse=4
#!/bin/bash if [[ $# -ne 1 ]]; then echo "Usage: $0 <restart|no-restart>" exit 1 fi echo "Starting developer docker container" SCRIPTPATH="$( cd "$(dirname "$0")" ; pwd -P )" IMG=codabuilder:latest MYUID=$(id -u) MYGID=$(id -g) DOCKERNAME="codabuilder-$MYUID" if [[ $1 == "restart" ]]; then if $(docker ps | grep -q $IMG); then echo "Stopping previous dev container" docker ps -q --filter "name=$DOCKERNAME" | grep -q . && docker stop $DOCKERNAME > /dev/null fi # Delete prior image if it's been stopped, but not deleted docker rm -fv $DOCKERNAME > /dev/null echo "Starting new dev container - $DOCKERNAME" NAME=$(docker run \ --volume $SCRIPTPATH/..:/home/opam/app \ --user $MYUID:$MYGID \ --name $DOCKERNAME \ --detach \ --tty \ --interactive \ $IMG \ sleep infinity) else NAME=$(docker ps -q --filter "name=$DOCKERNAME") echo "Container ${NAME} already running." fi
<reponame>kariminf/LangPi<gh_stars>1-10 /* NaLanGen: Natural Language Generation tool: * It contains tools to generate texts in many languages * -------------------------------------------------------------------- * Copyright (C) 2015 <NAME> (<EMAIL>) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package kariminf.langpi.wordnet; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.List; import edu.mit.jwi.Dictionary; import edu.mit.jwi.IDictionary; import edu.mit.jwi.item.IIndexWord; import edu.mit.jwi.item.ISynset; import edu.mit.jwi.item.IWord; import edu.mit.jwi.item.IWordID; import edu.mit.jwi.item.POS; import edu.mit.jwi.item.WordID; public class JWIRequestor implements WNRequestor { private IDictionary dict; protected JWIRequestor(IDictionary dict) { this.dict = dict; } public static JWIRequestor create(String wordnetPath){ File file =new File(wordnetPath); // construct the dictionary object and open it IDictionary dict = new Dictionary(file) ; return new JWIRequestor (dict); } @Override public String getWord(int synset, String pos) { try { dict.open(); } catch (IOException e) { return ""; } IWordID wordID1 = new WordID(synset,POS.valueOf(pos),1); IWord word1 = dict.getWord(wordID1); String lemma = word1.getLemma(); dict.close(); return lemma; } @Override public List<String> getWords(int synset, String pos) { ArrayList<String> words = new ArrayList<String>(); try { dict.open(); } catch (IOException e) { return words; } IWordID wordID1 = new WordID(synset,POS.valueOf(pos),1); IWord word1 = dict.getWord(wordID1); ISynset isynset = word1.getSynset() ; for( IWord word : isynset.getWords () ) words.add(word.getLemma()); return words; } @Override public int getSynset(String word, String pos, boolean caseSensitive) { try { dict.open(); } catch (IOException e) { return -1; } IIndexWord idxWord = dict . getIndexWord (word, POS.valueOf(pos)) ; IWordID wordID = idxWord.getWordIDs().get(0) ; return wordID.getSynsetID().getOffset(); } public int getLexFileNumber(int synset, String pos){ try { dict.open(); } catch (IOException e) { return -1; } IWordID wordID1 = new WordID(synset,POS.valueOf(pos),1); IWord word1 = dict.getWord(wordID1); int number = word1.getSynset().getLexicalFile().getNumber(); dict.close(); return number; } }
#!/usr/bin/env bash # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. docker-compose ps -q | xargs -n1 -I CONTAINER docker exec CONTAINER ps xa
#! /usr/bin/python -u def hash_function(text): out = 0 for c in text: out += ord(c) return out % 100 print hash_function("hello") # -> 32 print hash_function("world") # -> 52 print hash_function("!") # -> 33
import { Injectable } from '@nestjs/common'; import { CreateTweetDto } from './dto/create-tweet.dto'; @Injectable() export class TweetsService { create(createTweetDto: CreateTweetDto) { console.log(createTweetDto.message); return 'This action adds a new tweet'; } }
#!/bin/bash # Define the range of GitHub repositories based on the number of stars gh_stars="1-10" # Iterate through the specified range of GitHub repositories for LQN in $(ls ../*/*mock.go ); do # Extract the directory name from the file path DIR=$(echo ${LQN}| awk -F/ '{print $2}') # Remove the '_mock' suffix from the file name to get the source file path SRC=$(echo ${LQN}| sed 's/\_mock//' ) # Use mockgen to generate mock files for the Go interface mockgen -source ${SRC} -package=${DIR} -destination=${LQN} done
SELECT * FROM products WHERE category = 'clothing';