blob_id
stringlengths
40
40
language
stringclasses
1 value
repo_name
stringlengths
4
115
path
stringlengths
2
970
src_encoding
stringclasses
28 values
length_bytes
int64
31
5.38M
score
float64
2.52
5.28
int_score
int64
3
5
detected_licenses
listlengths
0
161
license_type
stringclasses
2 values
text
stringlengths
31
5.39M
download_success
bool
1 class
a6c62fb8a3797c32e684fda2f08e7edf10979198
Shell
ashortland/toolchain-example5
/booking-application-server/tests/cleanup-1-test.sh
UTF-8
415
2.859375
3
[]
no_license
#!/usr/bin/env roundup # # This file contains the test plan for the cleanup command. # Execute the plan by invoking: # # rerun stubbs:test -m booking-application-server -p cleanup # # Helpers # [[ -f ./functions.sh ]] && . ./functions.sh # The Plan # -------- describe "cleanup" it_works_without_arguments() { rerun booking-application-server:remove rerun booking-application-server:cleanup }
true
d065294c5ac8859f76132e04be8c85506c198ca9
Shell
MezaDios/Practica-2-Bash
/3.- Usuarios.sh
UTF-8
513
3.640625
4
[]
no_license
#!/bin/sh usuario='null' # Ciclo necesario para que se pida el usuario mientra no sea 'fin' while [ "$usuario" != 'fin' ]; do echo "Introduzca un usuario (introduzca fin sin quiere finalizar el programa):" # Se hace que se ingrese un nombre de usuario read usuario # Se busca si esta presente en /etc/passwd con el uso de grep if grep $usuario /etc/passwd; then echo "El usuario $usuario si existe" else echo "El usuario $usuario no existe" fi done
true
dc5caeb7b3e5fc63b98c788ddb105af43705bd95
Shell
openconnectivity/IOTivity-setup
/install_DeviceBuilder.sh
UTF-8
4,892
3.265625
3
[ "Apache-2.0" ]
permissive
#!/bin/bash set -x #echo on ############################# # # copyright 2018 Open Connectivity Foundation, Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ############################# CURPWD=`pwd` #path of the code code_path=OCFDeviceBuilder # linux pi # default ARCH=`uname -m` echo "using architecture: $ARCH" cd .. # clone the repo git clone https://github.com/openconnectivityfoundation/DeviceBuilder.git # get the initial example cp DeviceBuilder/DeviceBuilderInputFormat-file-examples/input-lightdevice.json example.json # clone the iotivity cbor conversion tool git clone https://github.com/alshafi/iotivity-tool.git # install the python libraries that are needed for iotivity-tool cd iotivity-tool pip3 install -U -r requirements.txt # create the initial security file and place it in the code directory. cd $CURPWD sh svr2cbor.sh tocbor cd .. # create the generation script echo "#!/bin/bash" > gen.sh echo "cd DeviceBuilder" >> gen.sh echo "sh ./DeviceBuilder_C++IotivityServer.sh ../example.json ../device_output \"oic.d.light\"" >> gen.sh echo "cd .." >> gen.sh echo "# copying source code to compile location" >> gen.sh echo "cp ./device_output/code/server.cpp ./iotivity/examples/${code_path}/server.cpp " >> gen.sh echo "# making executable folder" >> gen.sh echo "mkdir -p ./iotivity/out/linux/${ARCH}/release/examples/${code_path} >/dev/null 2>&1" >> gen.sh echo "# copying the introspection file to the executable folder" >> gen.sh echo "cp ./device_output/code/server_introspection.dat ./iotivity/out/linux/${ARCH}/release/examples/${code_path}/server_introspection.dat" >> gen.sh echo "# quick fix: using the iotivity supplied oic_svr_db_server_justworks.dat file" >> gen.sh # working copy line of clarke # copying the file so that reset.sh works #cp ~/iot/iotivity/resource/csdk/security/provisioning/sample/oic_svr_db_server_justworks.dat ~/iot/device_output/code/server_security.dat echo "cp ./iotivity/resource/csdk/security/provisioning/sample/oic_svr_db_server_justworks.dat ./device_output/code/server_security.dat" # working copy line from clarke : # cp ~/IOT/iotivity/resource/csdk/security/provisioning/sample/oic_svr_db_server_justworks.dat ~/IOT/iotivity/out/linux/armv7l/release/examples/OCFDeviceBuilder/server_security.dat echo "cp ./iotivity/resource/csdk/security/provisioning/sample/oic_svr_db_server_justworks.dat ./iotivity/out/linux/${ARCH}/release/examples/${code_path}/server_security.dat" >> gen.sh #echo "cp ./device_output/code/server_security.dat ./iotivity/out/linux/${ARCH}/release/examples/${code_path}/server_security.dat" >> gen.sh # create the build script echo "#!/bin/bash" > build.sh echo "cd iotivity" >> build.sh echo "#uncomment next line for building without security" >> build.sh echo "#scons examples/${code_path} SECURED=0" >> build.sh echo "scons examples/${code_path}" >> build.sh echo "cd .." >> build.sh # create the edit code script echo "#!/bin/bash" > edit_code.sh echo "nano ./iotivity/examples/${code_path}/server.cpp" >> edit_code.sh # create the edit input script echo "#!/bin/bash" > edit_input.sh echo "nano ./example.json" >> edit_input.sh # create the run script echo "#!/bin/bash"> run.sh echo 'CURPWD=`pwd`'>> run.sh #echo 'CURPWD=$(pwd -P)'>> run.sh echo "env LD_LIBRARY_PATH=${CURPWD}/mraa/build/src" >> run.sh echo "sudo ldconfig" >> run.sh echo "cd ./iotivity/out/linux/${ARCH}/release/examples/${code_path}" >> run.sh echo "pwd" >> run.sh echo "ls" >> run.sh echo "./server" >> run.sh echo 'cd $CURPWD' >> run.sh # create the reset script echo "#!/bin/bash"> reset.sh echo "mkdir -p ./iotivity/out/linux/${ARCH}/release/examples/${code_path} >/dev/null 2>&1" >> reset.sh echo "rm -f ./iotivity/out/linux/${ARCH}/release/examples/${code_path}/server_security.dat" >> reset.sh echo "#cp ./device_output/code/server_security.dat ./iotivity/out/linux/${ARCH}/release/examples/${code_path}/server_security.dat" >> reset.sh echo "cp ./iotivity/resource/csdk/security/provisioning/sample/oic_svr_db_server_justworks.dat ./iotivity/out/linux/${ARCH}/release/examples/${code_path}/server_security.dat" >> reset.sh cd $CURPWD echo "making the example directory" mkdir -p ../iotivity/examples/${code_path} # add the build file cp ./SConscript ../iotivity/examples/${code_path}/SConscript # add the build dir cp ./SConstruct ../iotivity/. chmod a+x ../*.sh
true
23c741d5b30cd9916e7d95a3fb7bb599f265f04f
Shell
Akshithaaashi/Shell-Programming
/Array/randomDigi.sh
UTF-8
510
3.5
4
[]
no_license
#!/bin/bash -x for ((var=0; var<=10; var++)) do randomNumbers[var]=$(($((RANDOM%900))+100)) done echo ${randomNumbers[@]} max=0 secondmax=0 min=1000 secondmin=1000 for i in ${randomNumbers[@]} do if [ $i -gt $max ] then secondmax=$max max=$i fi if [[ $max -gt $i && $i -gt $secondmax ]] then secondmax=$i fi if [ $i -lt $min ] then secondmin=$min min=$i fi if [[ $i -lt $secondmin && $i -ne $min ]] then secondmin=$i fi done echo second maximum element is $secondmax echo second minimum element is $secondmin
true
a610701eddd122a27aa5743f0bcbbd1ea2059cdf
Shell
mozyg/build
/optware/openvpn/control/postinst
UTF-8
399
2.90625
3
[]
no_license
#!/bin/sh APPID=mobi.optware.openvpn # Symlink files into /opt cd $IPKG_OFFLINE_ROOT/usr/palm/applications/$APPID/opt find sbin -type d -exec mkdir -p /opt/{} \; find sbin -type f -exec ln -sf $IPKG_OFFLINE_ROOT/usr/palm/applications/$APPID/opt/{} /opt/{} \; rm -f /etc/event.d/$APPID mkdir -p /etc/event.d/ cp $IPKG_OFFLINE_ROOT/etc/event.d/$APPID /etc/event.d/$APPID /sbin/start $APPID exit 0
true
2240db880994cecf864bf44472e75bca8215e47b
Shell
solenick/smv
/Build/dem2fds/data/test1.sh
UTF-8
273
2.796875
3
[ "NIST-Software" ]
permissive
#!/bin/bash option=$1 $2 if [ "$option" == "" ]; then option=-obst fi #dem2fds=dem2fds dem2fds=../intel_linux_64/dem2fds_linux_64 terraindir=~/terrain $dem2fds $option -geom -dir $terraindir/gatlinburg test1.in $dem2fds $option -dir $terraindir/gatlinburg test2.in
true
e33e7de14ff61a4b7269381798b304b6ff59382f
Shell
At252/mergify-engine
/entrypoint.sh
UTF-8
454
3.578125
4
[ "Apache-2.0" ]
permissive
#!/bin/bash cd /app get_command() { sed -n -e "s/^$1://p" Procfile } MODE=${1:aio} if [ "$MERGIFYENGINE_INTEGRATION_ID" ]; then case ${MODE} in web|worker) exec $(get_command $1);; aio) exec honcho start;; *) echo "usage: $0 (web|worker|aio)";; esac elif [ "$MERGIFYENGINE_INSTALLER" ]; then exec honcho -f installer/Procfile start else echo "MERGIFYENGINE_INTEGRATION_ID or MERGIFYENGINE_INSTALLER must set" fi exit 1
true
dbb07710a2807a48dbd5c2c12a22f4c311b2ef60
Shell
hugojosefson/ubuntu-install-scripts
/.bash_aliases
UTF-8
1,875
3.171875
3
[]
no_license
# vi:syntax=bash # .bash_aliases alias ba='vim ~/.bash_aliases' alias .ba='. ~/.bash_aliases' # node alias nn='nodeversion=$(cat package.json | jq -r .engines.node); nvm install $nodeversion' # tmuxinator alias mux=tmuxinator alias m=mux alias mm='mux mux' _tmuxinator() { COMPREPLY=() local word word="${COMP_WORDS[COMP_CWORD]}" if [ "$COMP_CWORD" -eq 1 ]; then local commands="$(compgen -W "$(tmuxinator commands)" -- "$word")" local projects="$(compgen -W "$(tmuxinator completions start)" -- "$word")" COMPREPLY=( $commands $projects ) elif [ "$COMP_CWORD" -eq 2 ]; then local words words=("${COMP_WORDS[@]}") unset words[0] unset words[$COMP_CWORD] local completions completions=$(tmuxinator completions "${words[@]}") COMPREPLY=( $(compgen -W "$completions" -- "$word") ) fi } complete -F _tmuxinator m # Make alias mp='make package' # Git . /usr/share/bash-completion/completions/git alias gk='gitk --all &>/dev/null &' __git_complete g __git_main function g() { local cmd=${1-status} shift git $cmd "$@" } __git_complete gf _git_fetch function gf() { git fetch --all --prune "$@" } __git_complete gc _git_commit function gc() { git commit "$@" } function gg() { git commit -m "$*" } __git_complete gd _git_diff function gd() { git diff "$@" } __git_complete ga _git_add function ga() { git add --all "$@" } __git_complete gp _git_push function gp() { git push "$@" } __git_complete gpl _git_pull function gpl() { git pull "$@" } # Docker function d() { local cmd=${1-ps} shift docker $cmd "$@" } # npm which npm > /dev/null && . <(npm completion) # pbcopy / pbpaste alias pbcopy='xsel --clipboard --input' alias pbpaste='xsel --clipboard --output' # temp directory alias t='cd $(mktemp -d)'
true
17100af55f3f553861f20623cda32eb7b0bdbd22
Shell
vikasTmz/TshirtWARP
/tshirt.sh
UTF-8
17,492
4.0625
4
[]
no_license
#!/bin/bash #Originally Developed by Fred Weinhaus coords="" # coordinates of corners clockwise from top left region="" # WxH+X+Y area on background to place overlay image fit="none" # crop or distort the image to fit vertical aspect ratio of region gravity="center" # gravity for cropping vshift=0 # vertical shift of crop region offset="" # additional x,y offsets of region or coordinates rotate=0 # additional rotation for use only with region lighting=20 # contrast increase for highlights; integer blur=1 # blurring/smoothing of displacement image to reduce texture; float displace=10 # amount of displacement for distortions; integer sharpen=1 # sharpening of warped overlay image; float antialias=2 # antialias amount to apply to alpha channel of tshirt image; float export="no" # export lighting image, displacement map and other arguments # set directory for temporary files tmpdir="/tmp" # set up functions to report Usage and Usage with Description PROGNAME=`type $0 | awk '{print $3}'` # search for executable on path PROGDIR=`dirname $PROGNAME` # extract directory of program PROGNAME=`basename $PROGNAME` # base name of program usage1() { echo >&2 "" echo >&2 "$PROGNAME:" "$@" sed >&2 -e '1,/^####/d; /^###/g; /^#/!q; s/^#//; s/^ //; 4,$p' "$PROGDIR/$PROGNAME" } usage2() { echo >&2 "" echo >&2 "$PROGNAME:" "$@" sed >&2 -e '1,/^####/d; /^######/g; /^#/!q; s/^#*//; s/^ //; 4,$p' "$PROGDIR/$PROGNAME" } # function to report error messages errMsg() { echo "" echo $1 echo "" usage1 exit 1 } # function to test for minus at start of value of second part of option 1 or 2 checkMinus() { test=`echo "$1" | grep -c '^-.*$'` # returns 1 if match; 0 otherwise [ $test -eq 1 ] && errMsg "$errorMsg" } # test for correct number of arguments and get values if [ $# -eq 0 ] then # help information echo "" usage2 exit 0 elif [ $# -gt 27 ] then errMsg "--- TOO MANY ARGUMENTS WERE PROVIDED ---" else while [ $# -gt 0 ] do # get parameter values case "$1" in -help|-h) # help information echo "" usage2 exit 0 ;; -r) # get region shift # to get the next parameter # test if parameter starts with minus sign errorMsg="--- INVALID REGION SPECIFICATION ---" checkMinus "$1" region=`expr "$1" : '\([0-9]*[x][0-9]*[+][0-9]*[+][0-9]*\)'` [ "$blur" = "" ] && errMsg "--- REGION=$region MUST BE NON-NEGATIVE INTEGERS OF THE FORM WxH+X+Y ---" ;; -c) # get coords shift # to get the next parameter # test if parameter starts with minus sign errorMsg="--- INVALID COORDS SPECIFICATION ---" checkMinus "$1" coords=`expr "$1" : '\([ ,0-9]*\)'` [ "$coords" = "" ] && errMsg "--- COORDS=$coords MUST BE 4 SPACE SEPARATED INTEGER X,Y PAIRS ---" ;; -f) # fit shift # to get the next parameter # test if parameter starts with minus sign errorMsg="--- INVALID FIT SPECIFICATION ---" checkMinus "$1" # test gravity values fit="$1" fit=`echo "$1" | tr '[A-Z]' '[a-z]'` case "$fit" in none|n) fit="none" ;; crop|c) fit="crop" ;; distort|d) fit="distort" ;; *) errMsg "--- FIT=$fit IS AN INVALID VALUE ---" esac ;; -g) # gravity shift # to get the next parameter # test if parameter starts with minus sign errorMsg="--- INVALID GRAVITY SPECIFICATION ---" checkMinus "$1" # test gravity values gravity="$1" gravity=`echo "$1" | tr '[A-Z]' '[a-z]'` case "$gravity" in center|c) gravity=center ;; north|n) gravity=north ;; south|s) gravity=south ;; *) errMsg "--- GRAVITY=$gravity IS AN INVALID VALUE ---" esac ;; -v) # get vshift shift # to get the next parameter # test if parameter starts with minus sign errorMsg="--- INVALID VSHIFT SPECIFICATION ---" # checkMinus "$1" vshift=`expr "$1" : '\([-]*[0-9]*\)'` [ "$vshift" = "" ] && errMsg "--- VSHIFT=$vshift MUST BE AN INTEGER ---" ;; -o) # get offset shift # to get the next parameter # test if parameter starts with minus sign errorMsg="--- INVALID OFFSET SPECIFICATION ---" # checkMinus "$1" offset=`expr "$1" : '\([-]*[0-9]*,[-]*[0-9]*\)'` [ "$offset" = "" ] && errMsg "--- OFFSET=$offset MUST BE ONE INTEGER X,Y PAIR ---" ;; -R) # get rotate shift # to get the next parameter # test if parameter starts with minus sign errorMsg="--- INVALID ROTATE SPECIFICATION ---" # checkMinus "$1" rotate=`expr "$1" : '\([-]*[.0-9]*\)'` [ "$rotate" = "" ] && errMsg "--- ROTATE=$rotate MUST BE A NON-NEGATIVE FLOAT ---" test1=`echo "$rotate < -360" | bc` test2=`echo "$rotate > 360" | bc` [ $test1 -eq 1 -o $test2 -eq 1 ] && errMsg "--- ROTATE=$rotate MUST BE AN INTEGER BETWEEN -360 AND 360 ---" ;; -b) # get blur shift # to get the next parameter # test if parameter starts with minus sign errorMsg="--- INVALID BLUR SPECIFICATION ---" checkMinus "$1" blur=`expr "$1" : '\([.0-9]*\)'` [ "$blur" = "" ] && errMsg "--- BLUR=$blur MUST BE A NON-NEGATIVE FLOAT ---" ;; -s) # get sharpen shift # to get the next parameter # test if parameter starts with minus sign errorMsg="--- INVALID SHARPEN SPECIFICATION ---" checkMinus "$1" sharpen=`expr "$1" : '\([.0-9]*\)'` [ "$sharpen" = "" ] && errMsg "--- SHARPEN=$sharpen MUST BE A NON-NEGATIVE FLOAT ---" ;; -b) # get blur shift # to get the next parameter # test if parameter starts with minus sign errorMsg="--- INVALID BLUR SPECIFICATION ---" checkMinus "$1" blur=`expr "$1" : '\([.0-9]*\)'` [ "$blur" = "" ] && errMsg "--- BLUR=$blur MUST BE A NON-NEGATIVE FLOAT ---" ;; -a) # get antialias shift # to get the next parameter # test if parameter starts with minus sign errorMsg="--- INVALID ANTIALIAS SPECIFICATION ---" checkMinus "$1" antialias=`expr "$1" : '\([.0-9]*\)'` [ "$antialias" = "" ] && errMsg "--- ANTIALIAS=$antialias MUST BE A NON-NEGATIVE FLOAT ---" ;; -l) # get lighting shift # to get the next parameter # test if parameter starts with minus sign errorMsg="--- INVALID LIGHTING SPECIFICATION ---" checkMinus "$1" lighting=`expr "$1" : '\([0-9]*\)'` [ "$lighting" = "" ] && errMsg "--- LIGHTING=$lighting MUST BE A NON-NEGATIVE INTEGER ---" test1=`echo "$lighting < 0" | bc` test2=`echo "$lighting > 30" | bc` [ $test1 -eq 1 -o $test2 -eq 1 ] && errMsg "--- LIGHTING=$lighting MUST BE AN INTEGER BETWEEN 0 AND 30 ---" ;; -d) # get displace shift # to get the next parameter # test if parameter starts with minus sign errorMsg="--- INVALID DISPLACE SPECIFICATION ---" checkMinus "$1" displace=`expr "$1" : '\([0-9]*\)'` [ "$displace" = "" ] && errMsg "--- DISPLACE=$displace MUST BE A NON-NEGATIVE INTEGER ---" ;; -E) # get export export="yes" ;; -) # STDIN and end of arguments break ;; -*) # any other - argument errMsg "--- UNKNOWN OPTION ---" ;; *) # end of arguments break ;; esac shift # next option done # # get infile and outfile infile="$1" bgfile="$2" outfile="$3" fi # test that infile provided [ "$infile" = "" ] && errMsg "NO OVERLAY FILE SPECIFIED" # test that bgfile provided [ "$bgfile" = "" ] && errMsg "NO BACKGROUND (TSHIRT) FILE SPECIFIED" # test that outfile provided [ "$outfile" = "" ] && errMsg "NO OUTPUT FILE SPECIFIED" dir="$tmpdir/TSHIRT.$$" mkdir "$dir" || { echo >&2 "UNABLE TO CREATE WORKING DIR \"$dir\" -- ABORTING" exit 10 } trap "rm -rf $dir;" 0 trap "rm -rf $dir; exit 1" 1 2 3 10 15 trap "rm -rf $dir; exit 1" ERR # read overlay image if ! convert -quiet "$infile" +repage $dir/tmpI.mpc; then errMsg "--- FILE $infile DOES NOT EXIST OR IS NOT AN ORDINARY FILE, NOT READABLE OR HAS ZERO SIZE ---" fi # read tshirt image if ! convert -quiet "$bgfile" +repage $dir/tmpT.mpc; then errMsg "--- FILE $infile DOES NOT EXIST OR IS NOT AN ORDINARY FILE, NOT READABLE OR HAS ZERO SIZE ---" fi # extract coordinates of subsection of tshirt and bounding box if [ "$coords" = "" -a "$region" = "" ]; then errMsg "--- EITHER COORDINATES OR REGION IS REQUIRED ---" elif [ "$coords" != "" ]; then clist=`echo $coords | sed 's/[, ][, ]*/ /g';` test=`echo $clist | wc -w | tr -d " "` if [ $test -eq 8 ]; then x1=`echo $clist | cut -d\ -f1` y1=`echo $clist | cut -d\ -f2` x2=`echo $clist | cut -d\ -f3` y2=`echo $clist | cut -d\ -f4` x3=`echo $clist | cut -d\ -f5` y3=`echo $clist | cut -d\ -f6` x4=`echo $clist | cut -d\ -f7` y4=`echo $clist | cut -d\ -f8` if [ "$offset" != "" ]; then xx=`echo "$offset" | cut -d, -f1` yy=`echo "$offset" | cut -d, -f2` x1=$((x1+xx)) y1=$((y1+yy)) x2=$((x2+xx)) y2=$((y2+yy)) x3=$((x3+xx)) y3=$((y3+yy)) x4=$((x4+xx)) y4=$((y4+yy)) fi #echo "$x1,$y1; $x2,$y2; $x3,$y3; $x4,$y4;" # get bounding box minx=`convert xc: -format "%[fx:min(min(min($x1,$x2),$x3),$x4)]" info:` miny=`convert xc: -format "%[fx:min(min(min($y1,$y2),$y3),$y4)]" info:` maxx=`convert xc: -format "%[fx:max(max(max($x1,$x2),$x3),$x4)]" info:` maxy=`convert xc: -format "%[fx:max(max(max($y1,$y2),$y3),$y4)]" info:` wd=`convert xc: -format "%[fx:$maxx-$minx+1]" info:` ht=`convert xc: -format "%[fx:$maxy-$miny+1]" info:` #echo "minx=$minx; miny=$miny; maxx=$maxx; maxy=$maxy; wd=$wd, ht=$ht;" # compute offsets, topwidth and correction rotation angle xoffset=$x1 yoffset=$y1 topwidth=`convert xc: -format "%[fx:hypot(($x2-$x1),($y2-$y1))+1]" info:` angle=`convert xc: -format "%[fx:-atan2(($y2-$y1),($x2-$x1))]" info:` #echo "xoffset=$xoffset; yoffset=$yoffset; topwidth=$topwidth; angle=$angle;" else errMsg "--- INCONSISTENT NUMBER OF COORDINATES ---" fi elif [ "$region" != "" ]; then region=`echo "$region" | tr -d " " | tr -cs "0-9\n" " "` wd=`echo "$region" | cut -d\ -f1` ht=`echo "$region" | cut -d\ -f2` minx=`echo "$region" | cut -d\ -f3` miny=`echo "$region" | cut -d\ -f4` #echo "minx=$minx; miny=$miny; wd=$wd, ht=$ht;" if [ "$offset" != "" ]; then xx=`echo "$offset" | cut -d, -f1` yy=`echo "$offset" | cut -d, -f2` minx=$((minx+xx)) miny=$((miny+yy)) fi xoffset=$minx yoffset=$miny topwidth=$wd angle=0 #echo "xoffset=$xoffset; yoffset=$yoffset; topwidth=$topwidth; angle=$angle;" x1=$minx y1=$miny x2=$((minx+$wd-1)) y2=$miny x3=$((minx+$wd-1)) y3=$((miny+$ht-1)) x4=$minx y4=$((miny+$ht-1)) #echo "$x1,$y1; $x2,$y2; $x3,$y3; $x4,$y4;" #echo "xoffset=$xoffset; yoffset=$yoffset; topwidth=$topwidth; angle=$angle;" fi # get width of overlay image and compute xscale ww=`convert -ping $dir/tmpI.mpc -format "%w" info:` hh=`convert -ping $dir/tmpI.mpc -format "%h" info:` scale=`convert xc: -format "%[fx:($ww-1)/($topwidth-1)]" info:` #echo "scale=$scale;" # compute corresponding coordinates in overlay image if [ "$coords" != "" ]; then # subtract offset and unrotate xo1=`convert xc: -format "%[fx:round(($x1-$xoffset)*cos($angle)+($y1-$yoffset)*sin($angle))]" info:` yo1=`convert xc: -format "%[fx:round(($x1-$xoffset)*sin($angle)+($y1-$yoffset)*cos($angle))]" info:` xo2=`convert xc: -format "%[fx:round(($x2-$xoffset)*cos($angle)+($y2-$yoffset)*sin($angle))]" info:` yo2=`convert xc: -format "%[fx:round(($x2-$xoffset)*sin($angle)+($y2-$yoffset)*cos($angle))]" info:` xo3=`convert xc: -format "%[fx:round(($x3-$xoffset)*cos($angle)+($y3-$yoffset)*sin($angle))]" info:` yo3=`convert xc: -format "%[fx:round(($x3-$xoffset)*sin($angle)+($y3-$yoffset)*cos($angle))]" info:` xo4=`convert xc: -format "%[fx:round(($x4-$xoffset)*cos($angle)+($y4-$yoffset)*sin($angle))]" info:` yo4=`convert xc: -format "%[fx:round(($x4-$xoffset)*sin($angle)+($y4-$yoffset)*cos($angle))]" info:` # compute max height ho=`convert xc: -format "%[fx:max($yo4-$yo1,$yo3-$yo2)+1]" info:` #echo "ho=$ho;" xo1=0 yo1=0 xo2=$((ww-1)) yo2=0 xo3=$((ww-1)) if [ "$fit" = "distort" ]; then yo3=$((hh-1)) else yo3=`convert xc: -format "%[fx:(round($scale*($ho-1)))]" info:` fi xo4=0 yo4=$yo3 elif [ "$region" != "" ]; then # use input width and scaled height of region for overlay coordinates xo1=0 yo1=0 xo2=$((ww-1)) yo2=0 xo3=$((ww-1)) if [ "$fit" = "distort" ]; then yo3=$((hh-1)) else yo3=`convert xc: -format "%[fx:(round($scale*($ht-1)))]" info:` fi xo4=0 yo4=$yo3 fi #echo "$xo1,$yo1; $xo2,$yo2; $xo3,$yo3; $xo4,$yo4;" # apply rotation about center of scaled down image translated to correct upper left corner if [ "$rotate" != "0" ]; then rotate=`convert xc: -format "%[fx:(pi/180)*$rotate]" info:` xcent=`convert xc: -format "%[fx:round(0.5*$topwidth)+$x1]" info:` ycent=`convert xc: -format "%[fx:round(0.5*($hh/$scale)+$y1)]" info:` # echo "rotate=$rotate; xcent=$xcent; ycent=$ycent" x1=`convert xc: -format "%[fx:round($xcent+($x1-$xcent)*cos($rotate)-($y1-$ycent)*sin($rotate))]" info:` y1=`convert xc: -format "%[fx:round($ycent+($x1-$xcent)*sin($rotate)+($y1-$ycent)*cos($rotate))]" info:` x2=`convert xc: -format "%[fx:round($xcent+($x2-$xcent)*cos($rotate)-($y2-$ycent)*sin($rotate))]" info:` y2=`convert xc: -format "%[fx:round($ycent+($x2-$xcent)*sin($rotate)+($y2-$ycent)*cos($rotate))]" info:` x3=`convert xc: -format "%[fx:round($xcent+($x3-$xcent)*cos($rotate)-($y3-$ycent)*sin($rotate))]" info:` y3=`convert xc: -format "%[fx:round($ycent+($x3-$xcent)*sin($rotate)+($y3-$ycent)*cos($rotate))]" info:` x4=`convert xc: -format "%[fx:round($xcent+($x4-$xcent)*cos($rotate)-($y4-$ycent)*sin($rotate))]" info:` y4=`convert xc: -format "%[fx:round($ycent+($x4-$xcent)*sin($rotate)+($y4-$ycent)*cos($rotate))]" info:` # echo "$x1,$y1; $x2,$y2; $x3,$y3; $x4,$y4;" fi # test if tshirt/bgfile has alpha. If so remove and save for later. is_alpha=`identify -verbose $dir/tmpT.mpc | grep "Alpha" | head -n 1` [ "$is_alpha" != "" ] && convert $dir/tmpT.mpc -alpha extract -blur 0x$antialias -level 50x100% $dir/tmpA.mpc # convert tshirt/bgfile to grayscale and compute amount to add/subtract to change mean to 50% # use bounding box before any additional rotation as good approximation of region diff=`convert $dir/tmpT.mpc -alpha off -colorspace gray -write $dir/tmpTG.mpc \ -crop ${wd}x${ht}+${minx}+${miny} +repage -format "%[fx:100*mean-50]" info:` #echo "diff=$diff" # set up lighting if [ "$lighting" != "0" ]; then cont=`convert xc: -format "%[fx:$lighting/3]" info:` lproc="-sigmoidal-contrast $cont,50%" else lproc="" fi # set up blurring of the displacement image to soften texture if [ "$blur" != "0" ]; then bproc="-blur 0x$blur" else bproc="" fi # convert grayscale tshirt to 50% mean, then to lighting image and displacement image convert \( $dir/tmpTG.mpc -evaluate subtract $diff% \) \ \( -clone 0 $lproc -write $dir/tmpL.mpc \) +delete \ $bproc $dir/tmpD.mpc if [ "$export" = "yes" ]; then # save distortion map convert $dir/tmpL.mpc lighting.png convert $dir/tmpD.mpc -alpha set displace.png fi # set up sharpening of overlay image in perspective if [ "$sharpen" != "0" ]; then sproc="-unsharp 0x$sharpen -clamp" else sproc="" fi # set up cropping cropping="" if [ "$fit" = "crop" ]; then hc=$((yo3+1)) test=`convert xc: -format "%[fx:($hh>$hc)?1:0]" info:` if [ $test -eq 1 ]; then cropping="-gravity $gravity -crop ${ww}x${hc}+0+0 +repage" fi fi #echo "hh=$hh; hc=$hc; cropping=$cropping" # line2 1-4: process overlay image to perspective transform with transparent # background the size of the tshirt image and sharpen # lines 5-7: apply lighting image and make tshirt background from lighting image # transparent using alpha of previous steps # lines 8-10: apply displacement image convert -respect-parenthesis \( $dir/tmpTG.mpc -alpha transparent \) \ \( $dir/tmpI.mpc $cropping -virtual-pixel none +distort perspective \ "$xo1,$yo1 $x1,$y1 $xo2,$yo2 $x2,$y2 $xo3,$yo3 $x3,$y3 $xo4,$yo4 $x4,$y4" $sproc \) \ -background none -layers merge +repage \ \ \( -clone 0 -alpha extract \) \ \( -clone 0 $dir/tmpL.mpc -compose hardlight -composite \) \ -delete 0 +swap -compose over -alpha off -compose copy_opacity -composite \ \ $dir/tmpD.mpc \ -define compose:args=-$displace,-$displace -compose displace -composite \ $dir/tmpITD.mpc # composite distorted overlay onto tshirt if [ "$is_alpha" != "" ]; then convert $dir/tmpT.mpc $dir/tmpITD.mpc -compose over -composite \ $dir/tmpA.mpc -alpha off -compose copy_opacity -composite "$outfile" else convert $dir/tmpT.mpc $dir/tmpITD.mpc -compose over -composite "$outfile" fi if [ "$export" = "yes" ]; then # show arguments echo "coordinates=\"$xo1,$yo1 $x1,$y1 $xo2,$yo2 $x2,$y2 $xo3,$yo3 $x3,$y3 $xo4,$yo4 $x4,$y4\"" echo "sharpen=\"$sharpen\"" echo "displace=\"$displace\"" fi exit 0
true
d2cf72f55172d40094abeabad39d7ff63ec11dac
Shell
jcfr/ninja
/bootstrap.sh
UTF-8
995
2.828125
3
[ "Apache-2.0", "LicenseRef-scancode-unknown-license-reference" ]
permissive
#!/bin/bash # Copyright 2011 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. set -e cat >config.ninja <<EOT # This file is generated by bootstrap.sh. conf_cflags = -O2 conf_ldflags = -s # When developing: # conf_cflags = -g -Wall # conf_ldlags = EOT echo "Building ninja manually..." srcs=$(ls src/*.cc | grep -v test) g++ -Wno-deprecated -o ninja.bootstrap $srcs echo "Building ninja using itself..." ./ninja.bootstrap ninja rm ninja.bootstrap echo "Done!"
true
9d09c3813f82a1fd543b9768d7b35db92ca0947c
Shell
muesli/dotfiles
/setup_arch.sh
UTF-8
595
3.21875
3
[]
no_license
#!/bin/bash # # Copyright (c) 2017-2018, Christian Muehlhaeuser <muesli@gmail.com> # # For license see LICENSE # set -e if [[ "$FLAVOR" == "router" ]]; then FLAVOR="_${FLAVOR}" elif [[ "$FLAVOR" == "minimal" ]]; then FLAVOR="_${FLAVOR}" elif [[ "$FLAVOR" != "" ]]; then # Unknown echo "Sorry, but flavor '$FLAVOR' is unknown!" exit 1 fi # Install packages ./packages/arch${FLAVOR}/packages.sh # Install drivers #./packages/arch${FLAVOR}/drivers/bluetooth.sh #./packages/arch${FLAVOR}/drivers/smartcard.sh #./packages/arch${FLAVOR}/drivers/intel.sh #./packages/arch${FLAVOR}/drivers/razer.sh
true
f7550f93cfdc0b93e60dbb69d6f16c614819839a
Shell
petronny/aur3-mirror
/python-osmgpsmap-git/PKGBUILD
UTF-8
1,016
3.0625
3
[]
no_license
# Maintainer: Dominik Heidler <dheidler@gmail.com> pkgname=python-osmgpsmap-git pkgver=20121231 pkgrel=2 pkgdesc="Python bindings for osm-gps-map" arch=('i686' 'x86_64') url="http://nzjrs.github.com/osm-gps-map/" license=('GPL') depends=('python2' 'pygobject' 'pygtk' 'osm-gps-map-git') provides=('python-osmgpsmap') source=() md5sums=() _gitroot="git://github.com/nzjrs/osm-gps-map.git" _gitname="osm-gps-map" build() { cd "$srcdir" msg "Connecting to GIT server...." if [ -d $_gitname ] ; then cd $_gitname && git pull origin msg "The local files are updated." else git clone $_gitroot $_gitname fi msg "GIT checkout done or server timeout" cd "$srcdir/$_gitname/python" # fix for py2k find -name "*.py" | xargs sed -i "s|#!/usr/bin/env python$|#!/usr/bin/env python2|" msg "Starting make..." autoreconf --force --install # I hate automake sed -i "s|python python2|python2|" configure ./configure --prefix=/usr make } package() { cd "$srcdir/$_gitname/python" make DESTDIR=$pkgdir install }
true
8e7efe92818eb9843b77324a2ec0da7917469c92
Shell
krzysztofkramarz/bash
/javadoc.sh
UTF-8
1,081
3.4375
3
[]
no_license
#!/bin/bash # program ada, ile w podanym katalogu jest plikow *.java i ile z nich ma publiczne metody #oraz ile z nich jest pokyte javadoc. Statystyka jest wy # cd /media/krzysztof/JavaAcademy/Akademia_Javy/Kod/javadoc_test cd $1 let WITHOUT=0 let TOTAL=0 for d in $( find -readable -executable -type d ) do for file in $d/*.java do if [ -f $file ] && [ -r $file ] && [ -s $file ] then let TOTAL++ javadoc=$( grep '\/\*\*' $file ) public=$( grep 'public[[:space:]]*[class|interface]' $file ) if [ -n "$public" ] && [ -z "$javadoc" ] then echo "Klasa: $file nie ma javadoca!" let WITHOUT++; fi fi done done echo "Total $TOTAL" echo "WITHOUT $WITHOUT" let " WYNIK = 100 * ( $TOTAL - $WITHOUT) / $TOTAL " echo "Pokrycie: $WYNIK %" # \/\*\*(\n.*)*\*\/\n*\s*public\s*(class|interface) # grep -re '\/\*\*([[:cntrl:]].*)*\*\/[[:cntrl:]]*[[:space:]]*public[[:space:]]*(class|interface)' ./*.ja
true
9769d3967ade663b21d61274ba0d7a2ceb8651c0
Shell
rpicluster/rpicluster-stretch
/stage2S/02-net-tweaks/files/enable-base.sh
UTF-8
2,725
3.40625
3
[ "BSD-3-Clause" ]
permissive
#!/bin/bash echo " Enabling Base networking scheme . . . " count=1 total=4 start=`date +%s` while [ $count -le $total ]; do if [ $count -eq 1 ] then task="Updating dhcpcd.conf" sudo mv /etc/dhcpcd.conf /etc/dhcpcd.conf.orig sudo echo "interface wlan0 static ip_address=192.168.1.254/24" >> /etc/dhcpcd.conf elif [ $count -eq 2 ] then task="Generating new hostapd.conf" cd /rpicluster/config output=`python -c 'from functions import *; print " ".join(read_stamp("0100010001000001010101110100000101000101", "/boot/stamp"))'` counter=0 network_name="" password="" for i in ${output[@]} do if [ $counter -eq 0 ] then network_name=$i else password=$i fi counter=$((counter+1)) done sudo echo "#INTERFACE interface=wlan0 #DRIVER SETTINGS driver=nl80211 #WLAN SETTINGS country_code=US ssid=$network_name channel=11 wmm_enabled=1 hw_mode=g #N-WLAN SETTINGS ieee80211n=1 obss_interval=0 require_ht=0 ht_capab=[HT40][SHORT-GI-20][DSSS_CCK-40] #WPA SETTINGS wpa=2 wpa_passphrase=$password wpa_key_mgmt=WPA-PSK wpa_pairwise=TKIP rsn_pairwise=CCMP auth_algs=3 macaddr_acl=0 # Logging logger_syslog=-1 logger_syslog_level=3 logger_stdout=-1 logger_stdout_level=2" > /etc/hostapd/hostapd.conf elif [ $count -eq 3 ] then task="Linking new hostapd.conf" sudo sed -i '10s/.*/DAEMON_CONF="\/etc\/hostapd\/hostapd.conf"/' /etc/default/hostapd sudo sed -i '19s/.*/DAEMON_CONF=\/etc\/hostapd\/hostapd.conf/' /etc/init.d/hostapd elif [ $count -eq 4 ] then task="Generating new dnsmasq.conf" sudo mv /etc/dnsmasq.conf /etc/dnsmasq.conf.orig sudo echo "no-resolv interface=wlan0 listen-address=192.168.1.254 server=8.8.8.8 server=8.8.4.4 cache-size=10000 domain-needed #blocks incomplete requests from leaving your network, such as google instead of google.com bogus-priv #prevents non-routable private addresses from being forwarded out of your network dhcp-range=192.168.1.100,192.168.1.150,12h # IP range and lease time dhcp-authoritative #only use dnsmasq and dhcp server #LOGGING log-queries #log each DNS query as it passes through log-dhcp" > /etc/dnsmasq.conf else task="Finished" fi cur=`date +%s` runtime=$(( $cur-$start )) estremain=$(( ($runtime * $total / $count)-$runtime )) printf "\r%d.%d%% complete ($count of $total tasks) - est %d:%0.2d remaining - $task\e[K" $(( $count*100/$total )) $(( ($count*1000/$total)%10)) $(( $estremain/60 )) $(( $estremain%60 )) if [ $count -lt 6 ] then count=$(( $count + 1 )) fi done printf "\r%d.%d%% complete (4 of 4 tasks) - est %d:%0.2d remaining - Finished\e[K" $(( 4*100/$total )) $(( (4*1000/$total)%10)) $(( $estremain/60 )) $(( $estremain%60 ))
true
1908f2121a007162f2b46e5d2e1e1bdd9ee36a03
Shell
surajsunrays/Daily-Work-Submission-Zemoso
/DAY8-30112018/Shell-Scripting/execution-check.sh
UTF-8
196
3.328125
3
[]
no_license
#This script will check the execution of previously executed command #!/bin/sh if [ "$?" -ne "0" ]; then echo "Sorry, Something goes wrong earlier" else echo "Fine, Earlier command OK " fi
true
338418a05027ee5a35761473aa717e9d8bc7276b
Shell
cwmcbrewster/Jamf_Scripts
/Install_GoogleChrome.sh
UTF-8
2,357
4.0625
4
[]
no_license
#!/bin/zsh # Automatically download and install the latest Google Chrome # https://support.google.com/chrome/a/answer/9915669?hl=en # Variables appName="Google Chrome.app" appPath="/Applications/${appName}" appProcessName="Google Chrome" downloadUrl="https://dl.google.com/chrome/mac/stable" #downloadUrl="https://dl.google.com/chrome/mac/universal/stable/gcem" pkgName="GoogleChrome.pkg" acceptTerms="accept_tos%3Dhttps%253A%252F%252Fwww.google.com%252Fintl%252Fen_ph%252Fchrome%252Fterms%252F%26_and_accept_tos%3Dhttps%253A%252F%252Fpolicies.google.com%252Fterms" cleanup () { if [[ -f "${tmpDir}/${pkgName}" ]]; then if rm -f "${tmpDir}/${pkgName}"; then echo "Removed file ${tmpDir}/${pkgName}" fi fi if [[ -d "${tmpDir}" ]]; then if rm -R "${tmpDir}"; then echo "Removed directory ${tmpDir}" fi fi } createTmpDir () { if [ -z ${tmpDir+x} ]; then tmpDir=$(mktemp -d) echo "Temp dir set to ${tmpDir}" fi } processCheck () { if pgrep -x "${appProcessName}" > /dev/null; then echo "${appProcessName} is currently running" echo "Aborting install" exit 0 else echo "${appProcessName} not currently running" fi } tryDownload () { if curl -LSs "${downloadUrl}/${acceptTerms}/${pkgName}" -o "${tmpDir}/${pkgName}"; then echo "Download successful" tryDownloadState=1 else echo "Download unsuccessful" tryDownloadCounter=$((tryDownloadCounter+1)) fi } versionCheck () { if [[ -d "${appPath}" ]]; then echo "${appName} version is $(defaults read "${appPath}/Contents/Info.plist" CFBundleShortVersionString)" versionCheckStatus=1 else echo "${appName} not installed" versionCheckStatus=0 fi } # Start # List version versionCheck # Download pkg file into tmpDir (60 second timeout) tryDownloadState=0 tryDownloadCounter=0 while [[ ${tryDownloadState} -eq 0 && ${tryDownloadCounter} -le 60 ]]; do processCheck createTmpDir tryDownload sleep 1 done # Check for successful download if [[ ! -f "${tmpDir}/${pkgName}" ]]; then echo "Download failed" cleanup exit 1 fi # Install package echo "Starting install" installer -pkg "${tmpDir}/${pkgName}" -target / # Remove tmp dir and downloaded pkg file cleanup # List version and exit with error code if not found versionCheck if [[ ${versionCheckStatus} -eq 0 ]]; then exit 1 fi
true
41091b6a19b8a8e3c01cccd486f2a321173b9cf6
Shell
CrisFeo/dotfiles
/stow/utilities/.bin/music
UTF-8
840
3.453125
3
[]
no_license
#!/bin/bash set -e find_opts=( \( -type l -or -type f \) -and \( -name '*.opus' -or -name '*.webm' -or -name '*.m4a' -or -name '*.ogg' \) ) fzf_opts=( --multi --cycle --reverse --bind 'alt-a:select-all,alt-d:deselect-all' ) mpv_statusline='\e[0;33m${time-pos}\e[0m' mpv_statusline+='/' mpv_statusline+='\e[0;33m${duration}\e[0m' mpv_statusline+=' \e[0;32m${percent-pos}%\e[0m' mpv_statusline+='\n${filename}' mpv_opts=( --msg-level 'all=no,statusline=status' --term-status-msg="$mpv_statusline" --input-ipc-server='/tmp/mpvsocket' ) path=${1:-$HOME/audio} tracks=$(find "$path" "${find_opts[@]}" | sort | sed "s#$path/##") while :; do chosen=$(fzf ${fzf_opts[@]} <<< "$tracks") clear echo "$chosen" IFS=$'\n' mpv ${mpv_opts[@]} $(sed -E "s#^#$path/#" <<< "$chosen") echo done
true
c7fec951868a3bfdfbc296971bbc19bfbdd859b8
Shell
TmanHef/SCPPP-Algs
/createResultDirs.sh
UTF-8
360
2.609375
3
[ "MIT" ]
permissive
#!/usr/bin/bash mkdir "result_graphs" domains=( blocks logistics depot rovers ) for domain in "${domains[@]}" do mkdir result_graphs/$domain mkdir result_graphs/$domain/evaluations mkdir result_graphs/$domain/expansions mkdir result_graphs/$domain/goal_reach_time mkdir result_graphs/$domain/messages_count mkdir result_graphs/$domain/overall done
true
2e93d023c078e535bd69f6d4f3d8110e5ef837b9
Shell
LUSpace/osdi21ae
/script/check_dax_fs.sh
UTF-8
185
2.953125
3
[]
no_license
#!/bin/bash if [ -c "/dev/dax0.0" ]; then bash ./setup_eval.sh fi fs=`mount` for i in 0 1 2 3 do str=`echo $fs | grep pm$i` if [[ -z $str ]]; then bash ./setup_eval.sh fi done
true
2d058f20d1b522b2505c88a1948f5989667843c3
Shell
djpohly/arch-makepkg-action
/entrypoint.sh
UTF-8
411
3.234375
3
[]
no_license
#!/bin/bash set -eou pipefail if [[ ! -z "$1" ]]; then cd "$1" fi echo "Running makepkg from $PWD" pkg_deps=$(source ./PKGBUILD && echo ${makedepends[@]} ${depends[@]}) echo "Installing dependencies: $pkg_deps" pacman -Syu --noconfirm $pkg_deps chown -R archbuilder "$PWD" echo "Running makepkg" # TODO: support extra flags su archbuilder -c makepkg -f echo "Running namcap" namcap -i *.pkg.tar.xz
true
0d79cb1dbf92a9d8d972e38e98b304caec1269ae
Shell
robin-vip/shell_scripts
/shell_scripts/crontab_task/git_sync/gitrepo_sync.sh
UTF-8
1,544
3.984375
4
[]
no_license
#!/bin/sh ConfigFile=gitrepo_config.ini LogFile="git_sync_$(date '+%Y%m').log" ReposNum= OLD_DIR= PrintLog() { echo "[$(date '+%Y-%m-%d %H:%M:%S')]: $1" >> $LogFile } InitTask() { OLD_DIR=`pwd`; cd `dirname $0` source ../../config_parase/ini_prase.sh LogFile="$(GetIniKey "$ConfigFile" "Header:ConfigPath")/$LogFile" ReposNum=$(GetIniKey "$ConfigFile" "Header:ReposNum") ConfigFile="`dirname $0`/$ConfigFile" cd $OLD_DIR } InitTask Index=1 while [ $Index -le $ReposNum ]; do repos=$(GetIniKey "$ConfigFile" "Repos$Index:Name") path=$(GetIniKey "$ConfigFile" "Repos$Index:Path") if [ -d "$path" ]; then PrintLog "Start sync repos:\"$repos\"..." cd $path remote_num=$(git remote -v | awk '{print $1}' | uniq | wc -l) remote_idx=1 while [ $remote_idx -le $remote_num ]; do remote_name=$(git remote -v | awk '{print $1}' | uniq | sed -n "`echo $remote_idx`p") remote_url=$(git remote -v | awk '{print $2}' | uniq | sed -n "`echo $remote_idx`p") PrintLog "push repos:\"$repos\" to $remote_url" # pull from "gogo_local:master" if [ "gogs_local" == "$remote_name" ]; then git pull $remote_name master:master local_branch="development" remote_branch="development" else local_branch="master" remote_branch="master" fi # push to remote repository. git push $remote_name $local_branch:$remote_branch >> $LogFile 2>&1 echo "" >> $LogFile let remote_idx+=1 done cd $OLD_DIR echo "" >> $LogFile else PrintLog "$path is not exist." fi let Index+=1 done
true
6692cf300ba26c14419de07d884d101380dd048b
Shell
nikitakit/tenshi-old
/build.sh
UTF-8
1,041
3.3125
3
[]
no_license
#!/bin/bash -xe export PROJECT_ROOT_DIR=`pwd` # Set up or download tools ./tools/extract-tools.sh # TODO(rqou): Less hacky export PATH=$PATH:$PROJECT_ROOT_DIR/tools/arm-toolchain/bin:$PROJECT_ROOT_DIR/tools/emscripten-bin:$PROJECT_ROOT_DIR/tools/emscripten-bin/llvm/bin if [[ -z "LLVM" ]] ; then export LLVM_DIR="$PROJECT_ROOT_DIR/emscripten-bin/llvm" export LLVM="$LLVM_DIR/bin" export PATH="$PATH:$PROJECT_ROOT_DIR/emscripten-bin:$LLVM" fi mkdir -p build/artifacts # Run linters linter_status=0 for tool in cpplint pep8 jshint csslint do ./tools/run-style-tool.py $tool 2>&1 | tee build/${tool}.txt linter_status=$[${linter_status} || ${PIPESTATUS[0]}] done if [ $linter_status != 0 ] ; then exit $linter_status fi ./waf configure build # TODO(kzentner): Fix this hack? cp build/vm/release_emscripten/vm/angelic/src/ngl_vm.js angel-player/src/chrome/content/vendor-js/ cp build/lua/release_emscripten/vm/lua/lua.js angel-player/src/chrome/content/vendor-js/ # Main build for dir in angel-player do ./$dir/build.sh done
true
7d78f5b4e2729e8a474e4e46c52c2035c7af3005
Shell
matteoneri/i3
/scripts/i3lock-painteffect.sh
UTF-8
3,371
2.75
3
[]
no_license
#!/bin/bash image=/tmp/i3lock.png screenshot="scrot $image" $screenshot #~/.config/i3/scripts/im-painteffect -p 1 -e 0 -t yes $image $image #~/.config/i3/scripts/im-turbolence -s 300 -d 200 -c together $image $image #~/.config/i3/scripts/im-spots -s 10x10 -t circle -B 20 -C 20 $image $image #convert $image -blur "8x4" $image ~/.config/i3/scripts/im-spots -s 17x17 -t square -B 20 -C 10 $image $image i3lock_cmd=(i3lock --beep -i $image -t -p win -e) text="Введите пароль" font=$(convert -list font | awk "{ a[NR] = \$2 } /family: $(fc-match sans -f "%{family}\n")/ { print a[NR-1]; exit }") hue=(-level "0%,100%,0.6") # value="60" #brightness value to compare to # color=$(convert "$image" -gravity center -crop 100x100+0+0 +repage -colorspace hsb \ # -resize 1x1 txt:- | awk -F '[%$]' 'NR==2{gsub(",",""); printf "%.0f\n", $(NF-1)}'); # if [[ $color -gt $value ]]; then #white background image and black text # bw="black" # icon="/home/matteo/.config/i3/scripts/icons/lockdark.png" # param=("--insidecolor=0000001c" "--ringcolor=0000003e" \ # "--linecolor=00000000" "--keyhlcolor=ffffff80" "--ringvercolor=ffffff00" \ # "--separatorcolor=22222260" "--insidevercolor=ffffff1c" \ # "--ringwrongcolor=ffffff55" "--insidewrongcolor=ffffff1c" \ # "--verifcolor=ffffff00" "--wrongcolor=ff000000" "--timecolor=ffffff00" \ # "--datecolor=ffffff00" "--layoutcolor=ffffff00") # else #black # bw="white" # icon="/home/matteo/.config/i3/scripts/icons/lock.png" # param=("--insidecolor=ffffff1c" "--ringcolor=ffffff3e" \ # "--linecolor=ffffff00" "--keyhlcolor=00000080" "--ringvercolor=00000000" \ # "--separatorcolor=22222260" "--insidevercolor=0000001c" \ # "--ringwrongcolor=00000055" "--insidewrongcolor=0000001c" \ # "--verifcolor=00000000" "--wrongcolor=ff000000" "--timecolor=00000000" \ # "--datecolor=00000000" "--layoutcolor=00000000") # fi # convert "$image" "${hue[@]}" -font "$font" -pointsize 46 -fill "$bw" -gravity center \ # -annotate +0+160 "$text" "$icon" -gravity center -composite "$image" param=("--force-clock" "--insidevercolor=FFFFFF17" \ "--insidewrongcolor=FFFFFF55" "--ringwrongcolor=FFBA63FF" \ "--insidecolor=00000077" "--ringcolor=FFFFFFFF" \ "--linecolor=ffffff00" "--bshlcolor=00000000" \ "--separatorcolor=000000FF" \ "--verifcolor=00000000" "--wrongcolor=ff000000" \ "--datecolor=00000000" "--layoutcolor=00000000" '--indpos="x 100:y 100"' "--timesize=30" "--timecolor=FFFFFFFF") # '--vertext="Drinking verification can..."' '--wrongtext="Nope!"') # "--textsize=20" "--modsize=10") # try to use i3lock with prepared parameters if ! LC_TIME=ru_RU.UTF-8 ${i3lock_cmd[@]} \ -k --force-clock --indicator \ --insidecolor=00000077 --ringcolor=96F7C800 \ --timecolor=ffffffff --datecolor=ffffffff \ --datestr="%A %d %B" \ --keyhlcolor=FFFFFF99 \ --insidewrongcolor=00000055 --ringwrongcolor=FFBA63FF \ --insidevercolor=00000055 --ringvercolor=96F7C8FF \ --radius=110 \ --veriftext="" --wrongtext="" \ --noinputtext="" --greetertext="" \ --locktext="" --lockfailedtext="" \ >/dev/null 2>&1; then # We have failed, lets get back to stock one echo "failed" #"${i3lock_cmd[@]}" fi #i3lock -i $image -t -f -p win -e rm $image
true
22249074017142359c3eb805af9448899a40a744
Shell
akaranjkar-qu/bootstrap-functions
/tests/ranger-client.bats
UTF-8
2,286
3.015625
3
[ "MIT" ]
permissive
load helpers/file_helper RANGER_HOST=localhost RANGER_PORT=6080 RANGER_REPO=hivedev RANGER_URL=http://${RANGER_HOST}:${RANGER_PORT} HIVE_LIB=/usr/lib/hive1.2 RANGER_HIVE_PLUGIN_PATH=/media/ephemeral0/hive_plugin/ranger-1.1.0-hive-plugin function setup() { if [[ ! -e /tmp/RANGER_INSTALLED ]]; then source /usr/lib/qubole/bootstrap-functions/hive/ranger-client.sh install_ranger -h ${RANGER_HOST} -p ${RANGER_PORT} -r ${RANGER_REPO} touch /tmp/RANGER_INSTALLED fi } @test "Ranger directory exists" { assert_dir_exists ${RANGER_HIVE_PLUGIN_PATH} } @test "install.properties contains repo name" { assert_file_contains "REPOSITORY_NAME=${RANGER_REPO}" ${RANGER_HIVE_PLUGIN_PATH}/install.properties } @test "install.properties contains policy mgr url" { assert_file_contains "POLICY_MGR_URL=${RANGER_URL}" ${RANGER_HIVE_PLUGIN_PATH}/install.properties } @test "install.properties contains component install dir name" { assert_file_contains "COMPONENT_INSTALL_DIR_NAME=${HIVE_LIB}" ${RANGER_HIVE_PLUGIN_PATH}/install.properties } @test "enable-hive-plugin.sh contains hcomponent install dir" { assert_file_contains "HCOMPONENT_INSTALL_DIR=${HIVE_LIB}" ${RANGER_HIVE_PLUGIN_PATH}/enable-hive-plugin.sh } @test "verify ranger jars were copied to hive lib" { location=${HIVE_LIB}/lib ranger_jars=( eclipselink-2.5.2.jar httpclient-4.5.3.jar httpcore-4.4.6.jar httpmime-4.5.3.jar javax.persistence-2.1.0.jar noggit-0.6.jar ranger-hive-plugin-1.1.0.jar ranger-hive-plugin-impl ranger-hive-plugin-shim-1.1.0.jar ranger-plugin-classloader-1.1.0.jar ranger-plugins-audit-1.1.0.jar ranger-plugins-common-1.1.0.jar ranger-plugins-cred-1.1.0.jar solr-solrj-5.5.4.jar ) assert_multiple_files_exist $location "${ranger_jars[@]}" } @test "Ranger configs in hiveserver2-site.xml" { assert_file_contains "org.apache.ranger.authorization.hive.authorizer.RangerHiveAuthorizerFactory" /usr/lib/hive1.2/conf/hiveserver2-site.xml } @test "JAVA uses version 8" { run bash -c 'alternatives --display java | grep "currently points to" | grep -qs "1.8.0"' [[ ${status} -eq 0 ]] } @test "JAVA_HOME updated in hadoop-env.sh" { assert_file_contains "export JAVA_HOME=/usr/lib/jvm/jre" /usr/lib/hadoop2/etc/hadoop/hadoop-env.sh }
true
c09595540c2d159c81bb17978e93076d669e422e
Shell
janost/pingstat
/stuff/munin/pingstat.sh
UTF-8
6,071
3.171875
3
[ "WTFPL" ]
permissive
#!/bin/bash PARAMS=${0#*pingstat_} GRAPHTYPE=$(echo "$PARAMS" | cut -d _ -f 1) TARGET=$(echo "$PARAMS" | cut -d _ -f 2) # TODO: move these to configuration PING_LAST_SECONDS=300 PINGSTAT_BASEURL="http://localhost:8000" # PING_MIN_WARNING=25 PING_MIN_CRITICAL=50 PING_AVG_WARNING=25 PING_AVG_CRITICAL=50 PING_MAX_WARNING=100 PING_MAX_CRITICAL=200 case $GRAPHTYPE in ping) if [ "$1" == "config" ]; then echo "graph_title Ping time - ${TARGET}, ${PING_LAST_SECONDS} sec" echo "graph_vlabel ms" echo "graph_info Ping time - ${TARGET}, ${PING_LAST_SECONDS} sec" echo "graph_category ping" echo "graph_args -l 0" echo "graph_scale no" echo "pavg.label Average ping" echo "pavg.warning ${PING_AVG_WARNING}" echo "pavg.critical ${PING_AVG_CRITICAL}" echo "pavg.info Average ping to ${TARGET}, ${PING_LAST_SECONDS} sec" echo "pmin.label Minimum ping" echo "pmin.warning ${PING_MIN_WARNING}" echo "pmin.critical ${PING_MIN_CRITICAL}" echo "pmin.info Minimum ping to ${TARGET}, ${PING_LAST_SECONDS} sec" echo "pmax.label Maximum ping" echo "pmax.warning ${PING_MAX_WARNING}" echo "pmax.critical ${PING_MAX_CRITICAL}" echo "pmax.info Maximum ping to ${TARGET}, ${PING_LAST_SECONDS} sec" echo "p99.label 99th percentile" echo "p99.warning ${PING_MAX_WARNING}" echo "p99.critical ${PING_MAX_CRITICAL}" echo "p99.info 99th percentile ping to ${TARGET}, ${PING_LAST_SECONDS} sec" echo "p98.label 98th percentile" echo "p98.warning ${PING_MAX_WARNING}" echo "p98.critical ${PING_MAX_CRITICAL}" echo "p98.info 98th percentile ping to ${TARGET}, ${PING_LAST_SECONDS} sec" echo "p97.label 97th percentile" echo "p97.warning ${PING_MAX_WARNING}" echo "p97.critical ${PING_MAX_CRITICAL}" echo "p97.info 97th percentile ping to ${TARGET}, ${PING_LAST_SECONDS} sec" echo "p96.label 96th percentile" echo "p96.warning ${PING_MAX_WARNING}" echo "p96.critical ${PING_MAX_CRITICAL}" echo "p96.info 96th percentile ping to ${TARGET}, ${PING_LAST_SECONDS} sec" echo "p95.label 95th percentile" echo "p95.warning ${PING_AVG_WARNING}" echo "p95.critical ${PING_AVG_CRITICAL}" echo "p95.info 95th percentile ping to ${TARGET}, ${PING_LAST_SECONDS} sec" echo "p90.label 90th percentile" echo "p90.warning ${PING_AVG_WARNING}" echo "p90.critical ${PING_AVG_CRITICAL}" echo "p90.info 90th percentile ping to ${TARGET}, ${PING_LAST_SECONDS} sec" echo "p50.label 50th percentile" echo "p50.warning ${PING_MIN_WARNING}" echo "p50.critical ${PING_MIN_CRITICAL}" echo "p50.info 50th percentile ping to ${TARGET}, ${PING_LAST_SECONDS} sec" exit 0 fi JDATA=$(curl -H "Content-type: application/json" -H "Accept: application/json" ${PINGSTAT_BASEURL}/last/${PING_LAST_SECONDS}) printf "pavg.value " printf "%.2f" $(echo ${JDATA} | jq ".[] | select(.target==\"${TARGET}\")[\"avg_ms\"]" | bc -l) echo printf "pmin.value " printf "%.2f" $(echo ${JDATA} | jq ".[] | select(.target==\"${TARGET}\")[\"min_ms\"]" | bc -l) echo printf "pmax.value " printf "%.2f" $(echo ${JDATA} | jq ".[] | select(.target==\"${TARGET}\")[\"max_ms\"]" | bc -l) echo printf "p50.value " printf "%.2f" $(echo ${JDATA} | jq ".[] | select(.target==\"${TARGET}\")[\"perc_50th\"]" | bc -l) echo printf "p90.value " printf "%.2f" $(echo ${JDATA} | jq ".[] | select(.target==\"${TARGET}\")[\"perc_90th\"]" | bc -l) echo printf "p95.value " printf "%.2f" $(echo ${JDATA} | jq ".[] | select(.target==\"${TARGET}\")[\"perc_95th\"]" | bc -l) echo printf "p96.value " printf "%.2f" $(echo ${JDATA} | jq ".[] | select(.target==\"${TARGET}\")[\"perc_96th\"]" | bc -l) echo printf "p97.value " printf "%.2f" $(echo ${JDATA} | jq ".[] | select(.target==\"${TARGET}\")[\"perc_97th\"]" | bc -l) echo printf "p98.value " printf "%.2f" $(echo ${JDATA} | jq ".[] | select(.target==\"${TARGET}\")[\"perc_98th\"]" | bc -l) echo printf "p99.value " printf "%.2f" $(echo ${JDATA} | jq ".[] | select(.target==\"${TARGET}\")[\"perc_99th\"]" | bc -l) echo ;; count) if [ "$1" == "config" ]; then echo "graph_title Ping count - ${TARGET}, ${PING_LAST_SECONDS} sec" echo "graph_vlabel Number of pings" echo "graph_info Ping count - ${TARGET}, ${PING_LAST_SECONDS} sec" echo "graph_category ping" echo "graph_args -l 0" echo "graph_scale no" echo "psucc.label Successful pings" echo "psucc.info Successful pings to ${TARGET}, ${PING_LAST_SECONDS} sec" echo "pfail.label Failed pings" echo "pfail.info Failed pings to ${TARGET}, ${PING_LAST_SECONDS} sec" exit 0 fi JDATA=$(curl -H "Content-type: application/json" -H "Accept: application/json" ${PINGSTAT_BASEURL}/last/${PING_LAST_SECONDS}) printf "psucc.value " printf "%.2f" $(echo ${JDATA} | jq ".[] | select(.target==\"${TARGET}\")[\"success_count\"]" | bc -l) echo printf "pfail.value " printf "%.2f" $(echo ${JDATA} | jq ".[] | select(.target==\"${TARGET}\")[\"failed_count\"]" | bc -l) echo ;; loss) if [ "$1" == "config" ]; then echo "graph_title Packet loss - ${TARGET}, ${PING_LAST_SECONDS} sec" echo "graph_vlabel percent" echo "graph_info Packet loss - ${TARGET}, ${PING_LAST_SECONDS} sec" echo "graph_category ping" echo "graph_args -l 0" echo "graph_scale no" echo "ploss.label Packet loss" echo "ploss.warning 1" echo "ploss.critical 5" echo "ploss.info Packet loss to ${TARGET}, ${PING_LAST_SECONDS} sec" exit 0 fi JDATA=$(curl -H "Content-type: application/json" -H "Accept: application/json" ${PINGSTAT_BASEURL}/last/${PING_LAST_SECONDS}) printf "ploss.value " printf "%.2f" $(echo ${JDATA} | jq ".[] | select(.target==\"${TARGET}\")[\"failed_percent\"]" | bc -l) echo ;; esac
true
ee558fb08928adf5cdb3ca14d2db83d4f1b20b7f
Shell
qubixes/streetview-greenery
/scripts/compute_lisa.sh
UTF-8
1,177
3.234375
3
[]
no_license
#!/bin/bash N_JOBS=64 EXTRA_ARGS="-l 6" if [ $# -ge 1 ]; then CFG_FILE=$1 NEW_JOBS=(`grep 'N_JOBS_COMPUTE' $CFG_FILE`) if [ "${NEW_JOBS[*]}" != "" ]; then N_JOBS=${NEW_JOBS[1]} fi NEW_EXTRA_ARGS=(`grep 'EXTRA_ARGS' $CFG_FILE`) if [ "${NEW_EXTRA_ARGS[*]}" != "" ]; then EXTRA_ARGS=${NEW_EXTRA_ARGS[@]:1} fi fi COMMAND_FILE="temp_commands_compute.txt" PRE_FILE="temp_pre_compute.txt" CONFIG_FILE="compute_lisa.ini" JOB_NAME=`grep "job_name = " "compute_lisa.ini" | cut -f3 -d" "` cat > $PRE_FILE << EOF_CAT #SBATCH -p gpu_shared #SBATCH -n 3 cd `pwd` source hpc/module_load_gpu.sh EOF_CAT rm -f $COMMAND_FILE let "NJOB_MAX=N_JOBS-1" for JOB in `seq 0 $NJOB_MAX`; do echo "\${python} ./streetgreen.py --bbox amsterdam_almere --njobs $N_JOBS --jobid $JOB --model deeplab-xception_71 --skip-overlay $EXTRA_ARGS" >> $COMMAND_FILE done batchgen -f $COMMAND_FILE $CONFIG_FILE -pre $PRE_FILE if [ `uname -s` == "Darwin" ]; then sed -i '' -e '/#SBATCH --tasks-per-node=12/d' batch.slurm_lisa/$JOB_NAME/batch* else sed -i '/#SBATCH --tasks-per-node=12/d' batch.slurm_lisa/$JOB_NAME/batch* fi rm -f $COMMAND_FILE $PRE_FILE
true
d13ea9844cc8c05be6b63332268648ebafb489c3
Shell
greg00m/greg00m
/killsleep.sh
UTF-8
133
2.640625
3
[]
no_license
#!/bin/bash sleep 30 & SLEEPPID=$! echo "PID " $SLEEPPID "is sleeping." sleep 5 echo "PID " $SLEEPPID "is sleeping." kill $SLEEPPID
true
dba7cca192bb22cffe0aa452d0a27a0e13a5227c
Shell
guenther-brunthaler/usr-local-bin-xworld-jv3gwuidf2ezyr5vbqavqtxyh
/mkdir-from-template
UTF-8
3,842
4.28125
4
[]
no_license
#! /bin/sh show_version() { wrL <<-. $SCRIPT version 10.124 Copyright (c) 2008-2010 Guenther Brunthaler. All rights reserved. Distribution is permitted under the terms of the GPLv3. . } show_help() { { wr <<-. $SCRIPT - create directories by cloning from a template directory $SCRIPT takes a template directory and a number of directories to be created as arguments. It then creates the requested directories, duplicating the ownership and attributes of the template directory. Without further options, the contents of the template directory are not cloned, though. Usage: $SCRIPT [options] <template_dir> [ <new_dir> ... ] <new_dir> ... are the list of directories to be created. Missing intermediate directories will not be created automatically by default, so normally the parent directories must already exist. After each new directory has been created, chmod and chown will be used on it to receive the same owner, group and permissions as the <template_dir>. The directory timestamps are not copied. Caveats: The current version does not know about and thus ignores ACLs. Command line options accepted by $SCRIPT: --: Stop parsing for command line options. Any arguments following this option will be interpreted as normal arguments, even if they start with "-" or "--". --help, -h, --usage: Display this help. --version, -V: Show version information. "Switch clustering" for short-options (single-dash options) is supported. For instance, when specifying two separate options "-x" and "-y", the combined option argument "-xy" means exactly the same thing. Return status codes: $SCRIPT will return a status code of 0 for successful operation. It will return a status code of 1 for all severe failures. . show_version } | less -F } die() { printf "ERROR: %s\n" "$*" >& 2 false; exit } wr() { if test $# -gt 0; then printf '%s\n' "$*" | wr else fmt -w `tput cols` || cat fi } wrL() { local LINE while IFS= read -r LINE; do printf "%s\n" "$LINE" | wr done } require_arg() { local ARGC; ARGC=$1; shift test $ARGC -eq 0 && die "Missing argument: $*!" } SCRIPT=${0##*/} COPTS= while true; do if [ -z "$COPTS" ]; then case "$1" in -?*) COPTS="$1"; shift;; *) break;; esac fi if [ "${COPTS#--}" = "$COPTS" ]; then TAIL="${COPTS#-?}"; # Switch clustering. COPT="${COPTS%$TAIL}"; COPTS="${TAIL:+-}$TAIL" else COPT="$COPTS"; COPTS= fi # Any arguments are at $1 and onwards; must be shifted off. case "$COPT" in --) break;; # Must be first! --help | -h | --usage) show_help; exit;; --version | -V) show_version; exit;; *) die "Unknown option '$COPT'!";; # Must be last! esac done require_arg $# "Path to the template directory"; TEMPLATE=$1; shift test -d "$TEMPLATE" && test -x "$TEMPLATE" \ || die "Template directory '$TEMPLATE' is inaccessible!" PERMS=`stat -L -c '%a' "$TEMPLATE"` \ && OWNERS=`stat -L -c '%u:%g' "$TEMPLATE"` \ || die "Cannot read attributes of template directory!" while test $# -gt 0; do NEWDIR=$1; shift test x"$NEWDIR" = x"/" && die "Cannot create the root directory!" test -z "$NEWDIR" && die "Cannot create empty directory name!" while test x"${NEWDIR%/}" != x"$NEWDIR"; do NEWDIR=${NEWDIR%?} done if test x"${NEWDIR#/}" = x"$NEWDIR"; then # Make relative path absolute. NEWDIR=`pwd`/$NEWDIR fi BASENAME=${NEWDIR##*/} PARENT=${NEWDIR%/$BASENAME} test -z "$PARENT" && PARENT="/" test -d "$PARENT" && test -x "$PARENT" \ || die "Inacessible parent directory '$PARENT'!" test -e "$NEWDIR" && \ die "File system object with name '$NEWDIR' already exists!" mkdir -m "$PERMS" "$NEWDIR" \ || die "Could not create directory '$NEWDIR'" \ "with mode $PERMS!" chown "$OWNERS" "$NEWDIR" \ || die "Could not change owner:group for directory" \ "'$NEWDIR' into $OWNERS!" done
true
eaba2ee5fb013a85ac2f99c5a535bd78388e6794
Shell
linostar/vagrirc
/environment/shell/3.ruby.1.init.sh
UTF-8
1,051
3.109375
3
[ "CC0-1.0", "MIT" ]
permissive
#/usr/bin/env sh # Ruby Setup # Mostly taken from the wonderful PuPHPet project, used under the MIT license CACHE_FOLDER=/environment/cache # CentOS release we're using OS=centos RELEASE=6 # Start of PuPHPet Ruby Setup <<< rm -rf /usr/bin/ruby /usr/bin/gem /usr/bin/rvm /usr/local/rvm echo 'Installing RVM and Ruby 1.9.3' if [ "${OS}" == 'debian' ] || [ "${OS}" == 'ubuntu' ]; then gpg --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys D39DC0E3 elif [[ "${OS}" == 'centos' ]]; then gpg2 --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys D39DC0E3 fi curl -sSL https://get.rvm.io | bash -s stable --quiet-curl --ruby=ruby-1.9.3-p551 source /usr/local/rvm/scripts/rvm if [[ -f '/root/.bashrc' ]] && ! grep -q 'source /usr/local/rvm/scripts/rvm' /root/.bashrc; then echo 'source /usr/local/rvm/scripts/rvm' >> /root/.bashrc fi if [[ -f '/etc/profile' ]] && ! grep -q 'source /usr/local/rvm/scripts/rvm' /etc/profile; then echo 'source /usr/local/rvm/scripts/rvm' >> /etc/profile fi # Unfinished end of PuPHPet Ruby Setup
true
255ff2b629f844cdc5194ab79d1bd0a11a1faac2
Shell
dtbinh/Pioneer-Project-2009
/pnav_ex/stage
UTF-8
1,183
3.53125
4
[]
no_license
#!/bin/sh # # Sebastian Rockel, 2010-01-15 # DefCSWait=6 # Seconds to wait default for c-space generation TamsCSWait=10 # Seconds to wait for tams c-space generation DefStageWait=2 # Seconds to wait for Stage world to come up StagePort=6666 NavLocPort=6665 LogPort=6667 world=${1} # Get Stage world as parameter # Check wether logging shall be done #if [ ! -z ${2} ] ; then logEnable=${2} #else #logEnable="NOPARAM" #fi if [ $world = "tams" ] ; then CSWait=$TamsCSWait else CSWait=$DefCSWait fi echo "=Playernav start delayed $CSWait seconds to wait for C-Space generation." player -p $StagePort ${world}_stage.cfg & # Start Stage world sleep $DefStageWait # Wait for Stage world to come up player -p $NavLocPort -q ${world}_navloc.cfg & # Localization and Path planning if [ $logEnable = "TRUE" ] ; then echo "=Logging enabled!" player -p $LogPort -q writelog.cfg & # Start writelog driver sleep 1 playervcr -p $LogPort & # Start logging controller fi sleep $CSWait # Wait for Wavefront driver's C-Space creation playernav # Run playernav on a remote machine echo "=Now kill remaining player background instances.." killall player # Kill player processes in background
true
77f558cbd01702240539275e1405200d4630c77e
Shell
alejandro1395/Impute_Master_Thesis_PanelCHR21
/BEAGLE4.0_CHIMP/IMPUTATION/beagle_imp_script
UTF-8
1,707
2.734375
3
[]
no_license
#!/bin/bash module load purge module load PYTHON/3.6.3 DATA=/scratch/devel/avalenzu/Impute_Master_Project/ANALYSIS_jan2019_panel300/BEAGLE_4.0_analysis_chimp/SAMPLES_vcf/ MAP=/scratch/devel/avalenzu/Impute_Master_Project/ANALYSIS_jan2019_panel300/BEAGLE_4.1_analysis_chimp/MAP/ BIN=/scratch/devel/avalenzu/Impute_Master_Project/ANALYSIS_jan2019_panel300/bin/ REF=/scratch/devel/avalenzu/Impute_Master_Project/ANALYSIS_jan2019_panel300/BEAGLE_4.0_analysis_chimp/PANEL/SPLITTED_VCF/ OUTDIR=/scratch/devel/avalenzu/Impute_Master_Project/ANALYSIS_jan2019_panel300/BEAGLE_4.0_analysis_chimp/IMPUTATION/ mkdir -p ${OUTDIR}/qu mkdir -p ${OUTDIR}/out count=0 for filepath in $(ls ${REF}filtered*.vcf.gz); do count=$(($count+1)) echo $count echo "#!/bin/bash module purge module load gcc/4.9.3-gold module load xz/5.2.2 module load SAMTOOLS/1.3 module load java module load GATK/4.0.8.1 module load TABIX/0.2.6 module load VCFTOOLS/0.1.7 java -Xmx70g -Djava.io.tmpdir=${OUTDIR}tmp/ \ -XX:-UseGCOverheadLimit \ -jar ${BIN}beagle.r1399.jar \ gl=${DATA}VCF_Boe1_merged_nonmiss.vcf.recode.vcf \ ref=${REF}filtered_chimp_chr21_Panel_splitted.${count}.vcf.gz \ nthreads=8 \ out=${OUTDIR}VCF_Boe1_merged_nonmiss_${count} \ map=${MAP}final_chr21.map \ impute=true \ gprobs=true" > ${OUTDIR}qu/VCF_Boe1_merged_gl_imp_${count}.sh jobname=$(echo ${OUTDIR}qu/VCF_Boe1_merged_gl_imp_${count}.sh) chmod 755 $jobname #SUBMISSION TO CLUSTER /scratch/devel/avalenzu/CNAG_interface/submit.py -c ${jobname} -o ${OUTDIR}out/VCF_Boe1_merged_gl_imp_${count}.out \ -e ${OUTDIR}out/VCF_Boe1_merged_gl_imp_${count}.sh -n gl_imp_${count} -u 8 -t 1 -w 20:00:00 #gl=../../SAMPLES_LOW/VCF_Boe1_merged/VCF_Boe1_merged_nonmiss.vcf.gz done
true
eeb7c946686ed9fea92f96657b111852db54e917
Shell
awmyhr/dotfiles-public
/roles/shell/files/.shell.d/lib/exitcodes
UTF-8
6,324
2.921875
3
[ "Apache-2.0" ]
permissive
#!/bin/sh # [SublimeLinter shellcheck-exclude:"SC2039" ] #=============================================================================== # # FILE: .shell.d/lib/exitcodes # # USAGE: (should be sourced by .shell.d/shellinit) # # DESCRIPTION: Standardize my exit codes. # # OPTIONS: --- # REQUIREMENTS: Bourne-compatible shell # OPTIONALS: --- # BUGS: --- # NOTES: --- # AUTHOR: awmyhr, awmyhr@gmail.com # VERSION: 1.0.0 # CREATED: 2016-10-06 # REVISION: 2016-10-06 #=============================================================================== #---------------------------------------------------------------------- #-- Notes/known bugs/other issues #---------------------------------------------------------------------- #---------------------------------------------------------------------- #-- Some of these were taken from http://tldp.org/LDP/abs/html/exitcodes.html #-- May as well use these. #-- Note: do NOT use 128 (Invalid arg to exit) #-- Note: do NOT use values <0 or >254 (out of range) #---------------------------------------------------------------------- export ec_OK=0 # successful termination export ec_NOTOK=1 # catchall export ec_BUILTIN=2 # misuse of shell builtins export ec_WARN=5 # general warning (From AmigaOS) export ec_ERROR=10 # general error (From AmigaOS) export ec_FAILURE=20 # general failure (From AmigaOS) export ec_NOEXECUTE=126 # command not executeable (see also: ec_NOPERM) export ec_NOTFOUND=127 # command not found #---------------------------------------------------------------------- #-- Note: The FSEs (fatal error signals) go from 128 to 165 #-- I've defined some common ones for whatever use I may find for them #---------------------------------------------------------------------- export ec_SIGHUP=129 # hangup aka FES "1" export ec_SIGINT=130 # Terminated by Ctrl-C aka FES "2" export ec_SIGQUIT=131 # core dump aka FES "3" export ec_SIGTRAP=133 # trace/break point aka FES "5" export ec_SIGABRT=134 # process abort aka FES "6" export ec_SIGKILL=137 # kill aka FES "9" export ec_SIGTERM=143 # terminate aka FES "15" #---------------------------------------------------------------------- #-- For Reference (see also https://en.wikipedia.org/wiki/Unix_signal) : #---------------------------------------------------------------------- # SIGHUP 1 Hangup # SIGINT 2 Terminal interrupt signal # SIGQUIT 3 Terminal quit signal. # SIGILL 4 Illegal instruction # SIGTRAP 5 Trace/breakpoint trap # SIGABRT 6 Process abort signal # SIGIOT 6 # SIGBUS 7 Access to an undefined portion of a memory object. # SIGFPE 8 Erroneous/Illegal arithmetic operation # SIGKILL 9 Kill (cannot be caught or ignored). # SIGUSR1 10 User-defined signal 1 # SIGSEGV 11 Invalid memory reference # SIGUSR2 12 User-defined signal 2 # SIGPIPE 13 Write on a pipe with no one to read it # SIGALRM 14 Alarm clock # SIGTERM 15 Termination signal # SIGSTKFLT 16 # SIGCHLD 17 Child process terminated, stopped, or continued # SIGCONT 18 Continue executing, if stopped # SIGSTOP 19 Stop executing (cannot be caught or ignored) # SIGTSTP 20 Terminal stop signal # SIGTTIN 21 Background process attempting read # SIGTTOU 22 Background process attempting write # SIGURG 23 High bandwidth data is available at a socket # SIGXCPU 24 CPU time limit exceeded # SIGXFSZ 25 File size limit exceeded # SIGVTALRM 26 Virtual timer expired # SIGPROF 27 Profiling timer expired # SIGWINCH 28 * controlling terminal changed its size # SIGIO 29 # SIGPOLL SIGIO Pollable event # SIGLOST 29 * file lock lost # SIGPWR 30 * system experienced a power failure # SIGSYS 31 Bad system call # SIGUNUSED 31 # SIGRTMIN 32 real-time signals (User-defined) # SIGRTMAX 32 real-time signals (User-defined) # SIGINFO ?? * status request received from controlling terminal # SIGEMT ?? * emulator trap has occured # All are POSIX except those marked with *. However, only the numbers # 1, 2, 3, 6, 9, 14, and 15 are POSIX defined, the rest are implementation # specific. These numbers are from /usr/include/asm/signal.h on Fedora 24 #---------------------------------------------------------------------- #---------------------------------------------------------------------- #-- These were taken from /usr/include/sysexits.h on Fedora 24 #-- May as well use these too #---------------------------------------------------------------------- export ec_LINUX_BASE=64 # base value for error messages export ec_USAGE=64 # command line usage error (bad syntax, etc.) export ec_DATAERR=65 # data format error (Only for user data) export ec_NOINPUT=66 # cannot open input (Not a system file) export ec_NOUSER=67 # user does not exist/addressee unknown export ec_NOHOST=68 # host name unknown export ec_UNAVAILABLE=69 # service/program/file unavailable or uknown error export ec_SOFTWARE=70 # internal software error (non-OS problems) export ec_OSERR=71 # system error (e.g., can't fork) export ec_OSFILE=72 # critical OS file missing export ec_CANTCREAT=73 # can't create (user specified) output file export ec_IOERR=74 # input/output error export ec_TEMPFAIL=75 # temp failure; user is invited to retry export ec_PROTOCOL=76 # remote error in protocol export ec_NOPERM=77 # permission denied (not file system-related) export ec_CONFIG=78 # configuration error export ec_LINUX_MAX=78 # maximum listed value #=============================================================================== #---------------------------------------------------------------------- export ISSET_EXITCODES=1 #---------------------------------------------------------------------- #===============================================================================
true
0e89d5cdcc535f205e1dcd0f85f8f4350d5aefb6
Shell
davejachimiak/fm
/vm/tests/leq
UTF-8
405
2.796875
3
[]
no_license
#!/usr/bin/env bash file=$(mktemp) loadc_op=$(echo -e '\x01') leq_op=$(echo -e '\x08') echo -e \ "$loadc_op"\ "\x09\x00\x00\x00"\ "\x00\x00\x00\x00"\ "$loadc_op"\ "\x0A\x00\x00\x00"\ "\x00\x00\x00\x00"\ "$leq_op"\ "\x00\x00\x00\x00"\ "\x00\x00\x00\x00"\ "\x00" > "$file" ./build/fm "$file" result="$?" if [[ "$result" -ne 1 ]]; then echo "tests/leq failure: expected $result to equal 1" exit 1 fi
true
5e9cb87dd8c5925970da8fc370dfa812d1933d37
Shell
menkan/configs
/.bash-shell/ranger.sh
UTF-8
236
2.921875
3
[]
no_license
#!/bin/bash # @Author xutongze # @Time 2021年7月2日 # @Description 避免ranger启动的shell内创建新的rangershell rg() { if [ -z "$RANGER_LEVEL" ] then ranger else exit fi } # start rg()? rg()
true
42441ce88580a1bdfb7bad5ae12bc8416f6472eb
Shell
pkalbers/ScriptCollection
/DetectSimulatedIBD/phase_all.sh
UTF-8
666
3.375
3
[]
no_license
#!/bin/bash PREFIX=$1 CORES=$2 NJOBS=$3 SUFFIX="RData" ROOT=`pwd` for FILE in `find -iname "${PREFIX}*${SUFFIX}"`; do cd `dirname $FILE` BASE=`basename $FILE` BASE=${BASE%.RData} echo "###" echo $BASE echo "###" GENFILE=`ls *.gen` SAMFILE=`ls *.sample` PHSFILE=`basename $GENFILE` PHSFILE=${GENFILE%.gen} PHSFILE="phased.${PHSFILE}" echo "GEN file: ${GENFILE}" echo "Sample file: ${SAMFILE}" echo "Output prefix: ${PHSFILE}" ../shapeit -G $GENFILE $SAMFILE -O $PHSFILE -T $CORES & JOBLIST=($(jobs -p)) while (( ${#JOBLIST[*]} >= $NJOBS )) do sleep 30 JOBLIST=($(jobs -p)) done echo "" cd $ROOT done wait
true
6279d7e4a56869466d9bc2303edf2f6550370de4
Shell
p2pderivatives/cfd-js
/.github/workflows/docker/alpine_build_entrypoint.sh
UTF-8
1,386
2.515625
3
[ "MIT" ]
permissive
#!/bin/sh -l apk add --update --no-cache musl gcc g++ make git cmake zip cd /github/workspace ls mkdir dist mkdir build cmake --version cmake -S . -B build -DCMAKE_BUILD_TYPE=Release -DENABLE_SHARED=on -DENABLE_CAPI=off -DENABLE_TESTS=off -DTARGET_RPATH="@executable_path;/usr/local/lib;/usr/local/lib64;./node_modules/cfd-js/build/Release;../node_modules/cfd-js/build/Release;../../node_modules/cfd-js/build/Release;../../../node_modules/cfd-js/build/Release;../../../../node_modules/cfd-js/build/Release;../../../../../node_modules/cfd-js/build/Release;../../../../../../node_modules/cfd-js/build/Release;./build/Release;./build/Release;./wrap_js/dl_lib/lib" cmake --build build --parallel 4 --config Release cd build make install DESTDIR=../dist cd .. rm -rf dist/usr/local/cmake/wallycore-* dist/usr/local/lib/pkgconfig/wallycore.pc rm -rf dist/usr/local/lib/wallycore.* dist/usr/local/lib/libwallycore.* cd dist zip -r cfd.zip usr echo "---- dump zip file ----" ls -l cd .. echo "ls dist" ls /github/workspace/dist echo "ls dist/usr" ls /github/workspace/dist/usr echo "ls dist/usr/local" ls /github/workspace/dist/usr/local echo "ls dist/usr/local/lib" ls /github/workspace/dist/usr/local/lib* # cleanup echo "cleanup" rm -rf build external/cfd external/cfd-core external/libwally-core echo "ls" ls /github/workspace/ echo "ls external" ls /github/workspace/external/
true
ca95fbb5d8834e512fb1a616d9109053abb6858b
Shell
mysociety/varnish-apikey
/commandline/apikeys.sh
UTF-8
4,399
3.359375
3
[ "BSD-2-Clause" ]
permissive
#!/bin/bash # # Authors: Wojciech Mlynarczyk, Sami Kerola # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL AUTHOR OR CONTRIBUTORS BE LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS # OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) # HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY # OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF # SUCH DAMAGE. # Default settings, do not touch. SCRIPT_INVOCATION_SHORT_NAME=$(basename ${0}) set -e # exit on errors trap 'echo "${SCRIPT_INVOCATION_SHORT_NAME}: exit on error"; exit 1' ERR set -u # disallow usage of unset variables RETVAL=0 msg() { echo "${SCRIPT_INVOCATION_SHORT_NAME}: ${@}" } usage() { echo "Usage:" echo " ${SCRIPT_INVOCATION_SHORT_NAME} functionality [arguments]" echo "" echo "The functionalities are:" echo " restrict-api api-name" echo " unrestrict-api api-name" echo " throttle-api api-name counter-time blocked-time" echo " unthrottle-api api-name" echo " set-api-limit api-name limit" echo " remove-api-limit api-name" echo " add-api apikey api-name" echo " delete-api apikey api-name" echo " block-apikey apikey api-name time" echo " unblock-apikey apikey api-name" echo " set-apikey-limit apikey api-name limit" echo " remove-apikey-limit apikey api-name" echo " clear-database" exit ${1} } number_of_args() { if [ "x${1}" != "x${2}" ]; then msg "incorrect number of arguments" msg "try \"${SCRIPT_INVOCATION_SHORT_NAME} help\" for information." exit 1 fi } restrict-api() { redis-cli set api:${1}:restricted 1 } unrestrict-api() { redis-cli del api:${1}:restricted } throttle-api() { redis-cli set api:${1}:throttled 1 redis-cli set api:${1}:counter:time ${2} redis-cli set api:${1}:blocked:time ${3} } unthrottle-api() { redis-cli del api:${1}:throttled redis-cli del api:${1}:counter:time redis-cli del api:${1}:blocked:time } set-apikey-limit() { redis-cli set api:${1}:default_max ${2} } remove-apikey-limit() { redis-cli del api:${1}:default_max } add-api() { redis-cli set key:${1}:api:${2} 1 } delete-api() { redis-cli del key:${1}:api:${2} } block-apikey() { redis-cli set key:${1}:api:${2}:blocked ${3} 1 } unblock-apikey() { redis-cli del key:${1}:api:${2}:blocked } set-apikey-limit() { redis-cli set key:${1}:ratelimit:${2}:max ${3} } remove-apikey-limit() { redis-cli del key:${1}:ratelimit:${2}:max } clear-database() { redis-cli flushdb } # There must be at least one argument. if [ ${#} -eq 0 ]; then usage 1 fi case "${1}" in restrict-api) number_of_args ${#} 2 restrict-api ${2} ;; unrestrict-api) number_of_args ${#} 2 unrestrict-api ${2} ;; throttle-api) number_of_args ${#} 4 throttle-api ${2} ${3} ${4} ;; unthrottle-api) number_of_args ${#} 2 unthrottle-api ${2} ;; add-api) number_of_args ${#} 3 add-api ${2} ${3} ;; delete-api) number_of_args ${#} 3 delete-api ${2} ${3} ;; block-apikey) number_of_args ${#} 4 block-apikey ${2} ${3} ${4} ;; unblock-apikey) number_of_args ${#} 3 unblock-apikey ${2} ${3} ;; set-apikey-limit) number_of_args ${#} 4 block-apikey ${2} ${3} ${4} ;; remove-apikey-limit) number_of_args ${#} 3 block-apikey ${2} ${3} ;; clear-database) number_of_args ${#} 1 clear-database ;; help) usage 0 ;; *) usage 1 ;; esac msg "success" exit ${RETVAL} # EOF
true
9ec223f364177c3cecb2ce69c9d66a3af4c24ed6
Shell
daedalus/misc
/ceph_create_pool_image.sh
UTF-8
1,319
3.078125
3
[]
no_license
#!/bin/bash # Author Dario Clavijo 2018 # GPLv3 set -x # set variables POOL=$1 IMG=$2 SIZE=$3 PAGES=$4 USER=$5 LDEV=$6 # example rbd0 FS=$7 # example "mkfs.ext4 -j" MONITOR="172.16.1.1 172.16.2.2" ceph auth del client.$USER ceph osd pool delete $POOL $IMG --yes-i-really-really-mean-it # define pool,image and rbd mapping ceph osd pool create $POOL $PAGES rbd pool init $POOL rbd create --size $SIZE $POOL/$IMG # define user permissions ceph auth get-or-create client.$USER mon "allow r" osd "allow class-read object_prefix rbd_children, allow rwx pool=$POOL" KEY=$(ceph auth get-or-create client.$USER | tail -n 1 | awk '{ print $3 }') # disable incompatible features rbd feature disable $POOL/$IMG fast-diff rbd feature disable $POOL/$IMG deep-flatten rbd feature disable $POOL/$IMG object-map # create the deploy.sh for the client. echo "#!/bin/bash" > deploy_$USER.sh echo "echo deploying ceph..." >> deploy_$USER.sh echo "sudo apt-get install ceph-common -y" >> deploy_$USER.sh echo "sudo modprobe rbd" >> deploy_$USER.sh echo "sudo echo '$MONITOR name=$USER,secret=$KEY $POOL $IMG' | sudo tee /sys/bus/rbd/add" >> deploy_$USER.sh echo "sudo $FS /dev/$LDEV" >> deploy_$USER.sh echo "sudo mkdir /media/$LDEV" >> deploy_$USER.sh echo "sudo mount /dev/$LDEV /media/$LDEV" >> deploy_$USER.sh chmod u+x deploy_$USER.sh
true
427b6443cc5cd03cfc1a7ecdf2a05e2adaa60fda
Shell
domcleal/sclo-ror42
/repoclosure/repoclosure.sh
UTF-8
649
4
4
[ "LicenseRef-scancode-warranty-disclaimer", "MIT" ]
permissive
#!/bin/bash set -e if [ $# -lt 2 ]; then echo "Usage: $0 <yum.conf> <url>" exit 1 fi yumorig=$1 url=$2 shift; shift TEMPDIR=$(mktemp -d) trap "rm -rf $TEMPDIR" EXIT # repo names must be unique, or yum will get confused between different OSes and URLs reponame=undertest-$(basename $yumorig .conf)-$(echo $url | cksum | sed 's/ /-/g') yumconf=$TEMPDIR/yum.conf cat $yumorig > $yumconf cat >> $yumconf << EOF [$reponame] name=$reponame gpgcheck=0 baseurl=$url EOF repoclosure -c $yumconf -t -r $reponame $* 2>&1 | tee $TEMPDIR/repoclosure.log if tail -n1 $TEMPDIR/repoclosure.log | grep -q "Num Packages"; then exit 0 else exit 1 fi
true
299eee3386c024fa6fbc65ed2bd7bdeb2297c28b
Shell
parrotmac/heylex.us-agent
/force-update.sh
UTF-8
1,200
3.53125
4
[ "MIT" ]
permissive
#!/usr/bin/env bash SCRIPT_PATH=$(readlink -f "$0") cd /opt/heylex.us-agent/ # Store info about updater and supervisord config INITIAL_UPDATER_HASH=$(shasum force-update.sh) INITIAL_SUPERVISOR_HASH=$(shasum heylex-agent.conf) # If we're up to date, then don't do anything git fetch --all REMOTE_REPO_REVISION=$(git rev-parse origin/master) LOCAL_REPO_REVISION=$(git rev-parse HEAD) if [ "$REMOTE_REPO_REVISION" == "$LOCAL_REPO_REVISION" ]; then exit fi # There are upstream changes # Stomp over any local changes git reset --hard origin/master # If this script was updated, we'll switch to the new version UPDATED_UPDATER_HASH=$(shasum force-update.sh) if [ "$INITIAL_UPDATER_HASH" != "$UPDATED_UPDATER_HASH" ]; then exec "$SCRIPT_PATH" fi # Reload config if Supervisor service definition was updated UPDATED_SUPERVISOR_HASH=$(shasum heylex-agent.conf) if [ "$INITIAL_SUPERVISOR_HASH" != "$UPDATED_SUPERVISOR_HASH" ]; then sudo cp heylex-agent.conf /etc/supervisor/conf.d/heylex-agent.conf sudo supervisorctl reload heylex-agent fi # Ensure Node deps are taken care of npm install # Restart our service now that repo has been updated sudo supervisorctl restart heylex-agent
true
268ae9f9464196c2c523e5fcdb123592769c6322
Shell
chengfengliu/nwu-knowleadge
/restart.sh
UTF-8
180
2.59375
3
[]
no_license
#!/bin/bash PID=`lsof -t -i:80` PID2=`lsof -t -i:443` git pull kill -9 $PID kill -9 $PID2 echo "killed http:80 $PID and killed https:443 $PID2" nohup npm run build & echo "started"
true
4dfb2260bb6f47aa4af02da5fb5b29ae51a63bc9
Shell
udit7395/scripts
/getDataWithoutRoot.sh
UTF-8
1,061
3.59375
4
[ "MIT" ]
permissive
#!/bin/bash ##For More Information:http://blog.shvetsov.com/2013/02/access-android-app-data-without-root.html ##Script is Written By udit7395 ##HOW TO USE: ./getDataWithoutRoot.sh <packagename> #NOTE: This method doesn't work if application developer has explicitly disabled ability #to backup his app by setting android:allowBackup="false" in the application manifest. if [ -z "$1" ] then echo "No packagename supplied" echo "Input as ----> ./getDataWithoutRoot.sh <packagename>" else path=$(pwd)/$1_$(date +%d_%m_%Y_%H_%M_%S) mkdir "$path" mkdir "$path/databases" mkdir "$path/sharedprefrences" mkdir backup cd backup || exit echo "Do not enter any Password" adb backup -f data.ab -noapk "$1" if [ -f data.ab ]; then echo "File found!" fi dd if=data.ab bs=1 skip=24 | python -c "import zlib,sys;sys.stdout.write(zlib.decompress(sys.stdin.read()))" | tar -xvf - cd .. echo "$path" rsync --progress backup/apps/"$1"/db/*.db "$path"/databases rsync --progress backup/apps/"$1"/sp/*.xml "$path"/sharedprefrences rm -rf backup fi
true
9a70e1ccd2f3d48ce959f01ee6edc8ac356bd3cb
Shell
ShalokShalom/plan.sh
/msgpack-c/plan.sh
UTF-8
798
2.984375
3
[ "Apache-2.0" ]
permissive
pkg_origin= pkg_name=msgpack-c _pkg_name=msgpack pkg_version=2.1.3 pkg_description="Efficient binary serialization format, which lets you exchange data among multiple languages like JSON" pkg_upstream_url="https://github.com/msgpack/msgpack-c" pkg_license=('GPL3') pkg_deps=('gnutls' 'readline' 'python3-cython') pkg_build_deps=('cmake') pkg_source=("https://github.com/msgpack/msgpack-c/releases/download/cpp-${pkg_version}/${_pkg_name}-${pkg_version}.tar.gz") pkg_shasum=('e604cee971f20f8b4f885163deca2062') do_build() { mkdir -p build cd build cmake ../${_pkg_name}-${pkg_version} \ -DCMAKE_BUILD_TYPE=Release \ -DCMAKE_INSTALL_PREFIX=/usr \ -DMSGPACK_CXX11=ON \ -DMSGPACK_BUILD_EXAMPLES=OFF make } do_package() { cd build make DESTDIR=${pkg_prefix} install }
true
c0e3d89780be4b8f6044778ee4dfc1eeb1951d61
Shell
subnut/dotfiles-artix-s6
/misc_files/battime.sh
UTF-8
285
2.984375
3
[]
no_license
#!/usr/bin/zsh () { local minutes (( \ minutes = $(cat /sys/class/power_supply/BAT0/energy_now) \ * 60 / $(cat /sys/class/power_supply/BAT0/power_now) \ )) printf '%02d%% (%02d:%02d)\n' \ $(cat /sys/class/power_supply/BAT0/capacity) \ $(( minutes / 60 )) \ $(( minutes % 60 )) }
true
5469628356cca021ff578637da1a2bdc3a520967
Shell
arnoldmontiel/pelicanoc
/protected/commands/shell/networkEditor.sh
UTF-8
1,842
3.390625
3
[]
no_license
#!/bin/bash if [ $# -ne 7 ] then echo "Invalid argumets" echo "Count: "$# exit 1 fi echo "Params were" echo address =$1 echo method =$2 echo netmask =$3 echo network =$4 echo broadcast=$5 echo gateway =$6 echo nameservers =$7 ADDRESS=$1 METHOD=$2 NETMASK=$3 NETWORK=$4 BROADCAST=$5 GATEWAY=$6 NAMESERVERS=$7 INTERFACE=eth0 ETH0_IFACE=`augtool match /files/etc/network/*/iface/ eth0` if [ -n "$ETH0_IFACE" ]; then echo "Updating existing item" else echo "looking for em1" ETH0_IFACE=`augtool match /files/etc/network/*/iface/ em1` INTERFACE=em1 fi if [ -n "$ETH0_IFACE" ]; then if [ -n "$ADDRESS" ]; then augtool <<-EOF set $ETH0_IFACE/address ${ADDRESS} save quit EOF else augtool <<-EOF rm $ETH0_IFACE/address save quit EOF fi if [ -n "$METHOD" ]; then augtool <<-EOF set $ETH0_IFACE/method ${METHOD} save quit EOF else augtool <<-EOF rm $ETH0_IFACE/method save quit EOF fi if [ -n "$NETMASK" ]; then augtool <<-EOF set $ETH0_IFACE/netmask ${NETMASK} save quit EOF else augtool <<-EOF rm $ETH0_IFACE/netmask save quit EOF fi if [ -n "$NETWORK" ]; then augtool <<-EOF set $ETH0_IFACE/network ${NETWORK} save quit EOF else augtool <<-EOF rm $ETH0_IFACE/network save quit EOF fi if [ -n "$BROADCAST" ]; then augtool <<-EOF set $ETH0_IFACE/broadcast ${BROADCAST} save quit EOF else augtool <<-EOF rm $ETH0_IFACE/broadcast save quit EOF fi if [ -n "$GATEWAY" ]; then augtool <<-EOF set $ETH0_IFACE/gateway ${GATEWAY} save quit EOF else augtool <<-EOF rm $ETH0_IFACE/gateway save quit EOF fi if [ -n "$NAMESERVERS" ]; then augtool <<-EOF set $ETH0_IFACE/dns-nameservers "${NAMESERVERS}" save quit EOF else augtool <<-EOF rm $ETH0_IFACE/dns-nameservers save quit EOF fi sudo ifdown ${INTERFACE} && sudo ifup ${INTERFACE} else echo "ERROR ETH0 or EM1 not found" fi exit 0
true
7c32ae550351f87824f9b9ea1dab54f023188ed2
Shell
dingyu1/business-tests
/auto-test/nic/onboard/nic_basic_function/nic_basic_Identification/onboard_NIC_BASIC_Identification_001.sh
UTF-8
6,095
3.359375
3
[]
no_license
#!/bin/bash #***************************************************************************************** # *用例名称:NIC_BASIC_Identification_001 # *用例功能:网卡识别测试 # *作者:cwx620666 # *完成时间:2019-5-8 # *前置条件: # 1.服务器1台且已安装操作系统 # 2.被测网卡一块 # *测试步骤: # 1、lspci查看网卡设备 # 2、ip a查看网卡MAC、IP信息 # 3、给网口设置IP,ping网络 (网口已有ip,不需要再设置) # *测试结果: # 1.服务器上电后能正常检测到网卡和网口 # 2.能ping通无丢包 # 3.遍历服务器PCIE卡槽、均无异常 #***************************************************************************************** #加载公共函数 . ../../../../../utils/error_code.inc . ../../../../../utils/test_case_common.inc . ../../../../../utils/sys_info.sh . ../../../../../utils/sh-test-lib . ../../../../../utils/env_parameter.inc #获取脚本名称作为测试用例名称 test_name=$(basename $0 | sed -e 's/\.sh//') #创建log目录 TMPDIR=./logs/temp mkdir -p ${TMPDIR} #存放脚本处理中间状态/值等 TMPFILE=${TMPDIR}/${test_name}.tmp #存放每个测试步骤的执行结果 RESULT_FILE=${TMPDIR}/${test_name}.result test_result="pass" #预置条件 function init_env() { #检查结果文件是否存在,创建结果文件: fn_checkResultFile ${RESULT_FILE} #root用户执行 if [ `whoami` != 'root' ] then PRINT_LOG "INFO" "You must be root user " fn_writeResultFile "${RESULT_FILE}" "Run as root" "fail" return 1 fi } function check_ip() { IP=$1 VALID_CHECK=$(echo $IP|awk -F. '$1<=255&&$2<=255&&$3<=255&&$4<=255{print "yes"}') if echo $IP|grep -E "^[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}$">/dev/null;then if [ ${VALID_CHECK:-no}=="yes" ];then echo "IP $IP available" PRINT_LOG "INFO" "$IP is available" fn_writeResultFile "${RESULT_FILE}" "IP_info" "pass" else echo "IP $IP not available" PRINT_LOG "INFO" "$IP is not available" fn_writeResultFile "${RESULT_FILE}" "IP_info" "fail" fi else echo "IP $IP formort error" PRINT_LOG "INFO" "IP format error" fn_writeResultFile "${RESULT_FILE}" "IP_info_format" "fail" fi } #************************************************************# # Name : verify_connect # # Description : 确认网络连接 # # Parameters : 无 # return value:无 # #************************************************************# function verify_connect(){ network=$1 IP_table=`cat ../../../../../utils/env_parameter.inc` debug=false if [ $debug = true ];then cat << EOF > IP_table.txt client_ip_10=192.168.1.3 client_ip_20=192.168.10.3 client_ip_30=192.168.20.3 client_ip_40=192.168.30.3 #client_ip_50=192.168.50.11 server_ip_10=192.168.1.6 server_ip_20=192.168.10.6 server_ip_30=192.168.20.6 server_ip_40=192.168.30.6 #server_ip_50=192.168.50.12 EOF IP_table=`cat IP_table.txt` fi local_ip=`ip address show $network | grep -w inet | awk -F'[ /]+' '{print $3}'` remote_ip=`echo $IP_table | sed 's/ /\n/g' | grep -w ${local_ip%.*} | grep -v $local_ip | awk -F = '{print $2}'` echo $remote_ip ping $remote_ip -c 5 if [ $? -eq 0 ] then PRINT_LOG "INFO" "$remote_ip connect properly." fn_writeResultFile "${RESULT_FILE}" "${remote_ip}_connect" "pass" else PRINT_LOG "FATAL" "$remote_ip connect is not normal." fn_writeResultFile "${RESULT_FILE}" "${remote_ip}_connect" "fail" ip a echo "ping test" ping $remote_ip -c 5 return 1 fi sleep 5 } #测试执行 function test_case() { fn_get_physical_network_card network_interface_list echo "$network_interface_list" for t in $network_interface_list do echo "$t" bus_info=`ethtool -i $t|grep "bus-info"|awk -F":" '{print $3":"$4}'` if [ ${#bus_info} -eq 7 ];then echo "$t bus-info $bus_info length is 7" PRINT_LOG "INFO" "$t bus-info $bus_info length is 7" fn_writeResultFile "${RESULT_FILE}" "$t_bus_info" "pass" else echo "$t bus-info $bus_info length not is 7" PRINT_LOG "INFO" "$t bus-info $bus_info length not is 7" fn_writeResultFile "${RESULT_FILE}" "$t_bus_info" "fail" fi lspci -v -s $bus_info echo "$t interface lspci info" PRINT_LOG "INFO" "$i interface lspci info" mac_info=`ip a show $t |grep "link/ether"|awk '{print $2}'` if [ ${#mac_info} -eq 17 ];then echo "$t mac is $mac_info length is 17" PRINT_LOG "INFO" "$t mac is $mac_info length is 17" fn_writeResultFile "${RESULT_FILE}" "mac_info" "pass" else echo "$t mac is $mac_info length not is 17" PRINT_LOG "FATAL" " $t mac is $mac_info length is 17 " fn_writeResultFile "${RESULT_FILE}" "mac_info" "fail" fi ipv4_info=`ip a show $t |grep -w "inet"|awk '{print $2}'|awk -F"/" '{print $1}'` echo $ipv4_info check_ip $ipv4_info #确认网络连接状态 verify_connect $t done #检查结果文件,根据测试选项结果,有一项为fail则修改test_result值为fail, check_result ${RESULT_FILE} } #恢复环境 function clean_env() { #清除临时文 FUNC_CLEAN_TMP_FILE } function main() { init_env || test_result="fail" if [ ${test_result} = "pass" ] then test_case || test_result="fail" fi clean_env || test_result="fail" [ "${test_result}" = "pass" ] || return 1 } main $@ ret=$? #LAVA平台上报结果接口,勿修改 lava-test-case "$test_name" --result ${test_result} exit ${ret}
true
4293ac5d912903cac674c527a4ae0e23c51b515c
Shell
whatwewant/whatwewant.github.io
/scripts/config_fish.sh
UTF-8
1,049
3.328125
3
[]
no_license
#!/bin/bash # ########################################### # # Office: http://fishshell.com/ # ########################################### set -e downloadTool="sudo apt-get install -y" PACKAGE_DIR=/tmp/src PROGRAM_NAME=tmux SRC_DIR=$PACKAGE_DIR SRC_DIR_FINAL=${SRC_DIR}/$PROGRAM_NAME URL="https://github.com/fish-shell/fish-shell.git" BINARY_DIR=/usr/local/sbin # Config Options #CONFIG_OPTIONS=" # --user=$USER # --prefix=$PREFIX # --with-http_ssl_module # --with-http_realip_module # --with-ipv6 #" sudo apt-get install -y build-essential \ ncurses-dev libncurses5-dev gettext if [ ! -d $PACKAGE_DIR ]; then mkdir -p $SRC_DIR else test -d $SRC_DIR_FINAL && rm -rf $SRC_DIR_FINAL fi # src echo "Cloning $PROGRAM_NAME to $SRC_DIR_FINAL ..." git clone $URL $SRC_DIR_FINAL # configure cd $SRC_DIR_FINAL autoconf ./configure # make make # install sudo make install # change shell chsh -s /usr/local/bin/fish # config fish shell # fish_config # To scan your man pages for completions, run 'fish_update_completions'
true
ffdc915aa95c2c856621c3d51843c48369a590a3
Shell
iimuz/dotfiles
/setup.sh
UTF-8
2,494
4.15625
4
[]
no_license
# Setup script. set -eu # Create symlink if link does not exist. function create_symlink() { local readonly src=$1 local readonly dst=$2 if [ -e $2 ]; then echo "already exist $2" return 0 fi echo "symlink $1 to $2" mkdir -p $(dirname "$dst") ln -s $src $dst } # Add loading file in .bashrc or .zshrc. function set_bashrc() { local readonly filename="$1" if [[ "$SHELL" == *zsh* ]]; then # zshを利用しているので設定ファイルが異なる local readonly rcfile="$HOME/.zshrc" else # bashを想定している local readonly rcfile="$HOME/.bashrc" fi # if setting exits in rc file, do nothing. if grep $filename -l $rcfile > /dev/null 2>&1; then echo "already setting in $rcfile: $filename" return 0 fi # Add file path. echo "set load setting in $rcfile: $filename" echo -e "if [ -f \"${filename}\" ]; then . \"${filename}\"; fi\n" >> $rcfile } # === 共通パスの設定 readonly SCRIPT_DIR=$(cd $(dirname ${BASH_SOURCE:-0}); pwd) readonly CONFIG_PATH=$SCRIPT_DIR/.config # === Install [homebrew](https://brew.sh/index_ja) if ! type brew > /dev/null 2>&1; then /bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)" fi # === Install softwaare # homebrewを利用するための設定を追記して再読み込み # set_bashrc $CONFIG_PATH/homebrew/homebrew-bundle.sh # if [[ "$SHELL" == *zsh* ]]; then # # zshを利用しているので設定ファイルが異なる # echo "Use zsh" # source ~/.zshrc # else # echo "Use bash" # # bashを想定している # source ~/.bashrc # fi # # homebrewを利用して各種ソフトウェアをインストール # brew bundle # 各種設定ファイルの配置もしくは読み込み設定 set_bashrc $CONFIG_PATH/rc-settings.sh # === bash if type bash > /dev/null 2>&1; then create_symlink $SCRIPT_DIR/.inputrc $HOME/.inputrc fi # === git if type git > /dev/null 2>&1; then create_symlink $SCRIPT_DIR/.gitconfig $HOME/.gitconfig create_symlink $SCRIPT_DIR/.config/git/ignore $HOME/.config/git/ignore fi # === neovim if type nvim > /dev/null 2>&1; then create_symlink $SCRIPT_DIR/.config/nvim $HOME/.config/nvim fi # === tmux if type tmux > /dev/null 2>&1; then create_symlink $SCRIPT_DIR/.tmux.conf $HOME/.tmux.conf fi # === vim if type vim > /dev/null 2>&1; then create_symlink $SCRIPT_DIR/.config/nvim/init.vim $HOME/.vimrc create_symlink $SCRIPT_DIR/.config/nvim $HOME/.config/vim fi
true
716ff3ecf51578af21fb17e99ae85e7c6f6584d7
Shell
scanner-research/scanner-benchmarks
/scale/cinematography_shutdown_nodes_gpu.sh
UTF-8
672
3.21875
3
[]
no_license
if [ -z "$1" ] then echo "Usage: cinematography_spawn_nodes.sh <start_node> <num_nodes>" exit fi trap "trap - SIGTERM && kill -- -$$" SIGINT SIGTERM EXIT start_node=$1 num_nodes=$2 num_simul=100 max=$(($num_nodes + $num_simul - 1)) for j in `seq $start_node $num_simul $max`; do end=$(($j + $num_simul)) m=$(( $num_nodes < $end ? $num_nodes : $end )) echo "Destroying nodes $j through $m..." for i in `seq $j 1 $m`; do yes | gcloud compute --project "visualdb-1046" \ instances delete "scanner-apoms-$i" \ --zone "us-east1-d" \ --delete-disks=all & done wait done
true
9daaaa5d0155ae79c5fc958d05891c2a49da617b
Shell
mirounga/gatk
/scripts/spark_eval/copy_small_to_hdfs.sh
UTF-8
1,480
3.46875
3
[ "Apache-2.0", "LicenseRef-scancode-warranty-disclaimer" ]
permissive
#!/usr/bin/env bash # Download all required data for small BAM and store in HDFS. Use this for non-GCS clusters. TARGET_DIR=${1:-small_spark_eval} hadoop fs -stat $TARGET_DIR > /dev/null 2>&1 if [ $? -eq 0 ]; then echo "$TARGET_DIR already exists. Delete it and try again." exit 1 fi set -e set -x # Create data directory in HDFS hadoop fs -mkdir -p $TARGET_DIR # Download exome BAM gsutil cp gs://broad-spark-eval-test-data/small/CEUTrio.HiSeq.WGS.b37.NA12878.20.21.bam - | hadoop fs -put - $TARGET_DIR/CEUTrio.HiSeq.WGS.b37.NA12878.20.21.bam gsutil cp gs://broad-spark-eval-test-data/small/CEUTrio.HiSeq.WGS.b37.NA12878.20.21.bam.bai - | hadoop fs -put - $TARGET_DIR/CEUTrio.HiSeq.WGS.b37.NA12878.20.21.bam.bai # Download reference gsutil cp gs://broad-spark-eval-test-data/small/human_g1k_v37.20.21.2bit - | hadoop fs -put - $TARGET_DIR/human_g1k_v37.20.21.2bit gsutil cp gs://broad-spark-eval-test-data/small/human_g1k_v37.20.21.dict - | hadoop fs -put - $TARGET_DIR/human_g1k_v37.20.21.dict gsutil cp gs://broad-spark-eval-test-data/small/human_g1k_v37.20.21.fasta.fai - | hadoop fs -put - $TARGET_DIR/human_g1k_v37.20.21.fasta.fai gsutil cp gs://broad-spark-eval-test-data/small/human_g1k_v37.20.21.fasta - | hadoop fs -put - $TARGET_DIR/human_g1k_v37.20.21.fasta # Download known sites VCF gsutil cp gs://broad-spark-eval-test-data/small/dbsnp_138.b37.20.21.vcf - | hadoop fs -put - $TARGET_DIR/dbsnp_138.b37.20.21.vcf # List data hadoop fs -ls -h $TARGET_DIR
true
147fd1290a233aae8f7ca5c2c82424e2fbc32942
Shell
rmathew/ICFPC
/2012/src/test.sh
UTF-8
2,054
2.9375
3
[]
no_license
#!/bin/bash function chk_prog() { make -s $1 if [ "$?" != "0" ] then echo ERROR: Could not create program \"$1\". exit 1 fi } chk_prog checker chk_prog pqtest RET_STATUS=0 TEST_NUM=0 function chk_pqueue() { TEST_NUM=`expr $TEST_NUM + 1` ./pqtest if [ "$?" == "0" ] then echo Test $TEST_NUM \(pqueue\): PASSED else echo Test $TEST_NUM \(pqueue\): FAILED RET_STATUS=1 fi } function chk_map() { TEST_NUM=`expr $TEST_NUM + 1` ./checker ../task/maps/$1.map $2 $3 $4 if [ "$?" == "0" ] then echo Test $TEST_NUM \($1.map\): PASSED else echo Test $TEST_NUM \($1.map\): FAILED RET_STATUS=1 fi } chk_pqueue chk_map sample DDDLLLLLLURRRRRRRRRRLLLLLLDDDDDDDDLLLRRRRRRRRDDRRRD WON 399 chk_map rocks_fall1 L LOST -1 chk_map rocks_fall2 L ABORTED -1 chk_map rocks_fall3 W ABORTED -1 chk_map rocks_fall4 D LOST -1 chk_map contest1 LDRDDUULLLDDL WON 212 chk_map contest1 DD LOST -2 chk_map contest1 DLLLDD ABORTED 94 chk_map contest2 RRUDRRULURULLLLDDDL WON 281 chk_map contest2 RRRRUD LOST 19 chk_map contest3 LDDDRRRRDDLLLLLDURRRUURRR WON 275 chk_map contest4 URDDLRDRDRDRRLUUURUULURDR WON 575 chk_map contest5 LLUURUUUUURRRRRDLDRRRDDDDDLLRRUUUUULLLLLLDDDRRDDD WON 1301 chk_map contest6 RUULRRRRRRRRRRUUURRDDDDDLLLLLLLLLRRRRRRRRRRUUUUULUURUUULULLULLLLLLLLLDRDLDDDDRUURRRRRUDLLLLLLDDDDRRRRRRRR WON 1170 chk_map contest7 RDRRRDDLLLDDLLRRURURRRDDRRRLLLULL WON 867 chk_map contest8 UUUUURLLWRUUUUUUULLLLLUUULLLLRRDDDDDDLDURLLDDUURRDDDDLLRRRUUUUUUUUUURRDDDDDDDDDDRRRDDDDLLLLLRRRRRUURRRRRRRRRLUURUUUULLULLLLLDDDDDDDDDDRRRRRLLLLLD WON 1955 chk_map contest9 LURURRRDULLLDDRRRRRRRRRRRRRRRULURRRRRRRRDDDLLLULDUULLLLLRRLLULLLLURRRLLUUUULLLLLLURRUULLLDDLLRUURRRRRUURRRRURRDDDRRRDDRRRURRDDUULLDLLLUULLLUUULLLLLLU WON 3076 chk_map contest10 UUUUULLLLLLLLLLDDLLLLUUULLLLDDLLLLLDRRRRRUUULLLLLLLLUURRRRUUUUULLUULLUURRRLUUURDDRURRRRRUURRRRRRRRRRRRRRRRRDDDDDLLUUULLLLULLLDDDRRUDLLLDDDDDLLDDURRDLDURUURRDDRRRRRUURRDRLULLDDDDLLLDDDDURRDLDDRRRRRDDD WON 3626 chk_map contest10 UUUUUUUUUUULRDD LOST 110 exit "$RET_STATUS"
true
5a00c156343a13f205098823c52e908385cbf5bf
Shell
akkaze/dgemm
/small_test.sh
UTF-8
290
2.96875
3
[]
no_license
#!/bin/bash maxsize=16 tester=./unit_test i=1 while [ $i -le $maxsize ]; do j=1 while [ $j -le $maxsize ]; do k=1 while [ $k -le $maxsize ]; do echo "$tester -m $i -n $j -k $k" $tester -m $i -n $j -k $k k=`expr $k + 1` done j=`expr $j + 1` done i=`expr $i + 1` done
true
5879f7da5ee14af996b87b46371e276828d73875
Shell
cv1002/poseidon
/ci/build.sh
UTF-8
314
2.578125
3
[ "BSD-3-Clause" ]
permissive
#!/bin/bash -xe # setup export CXX=${CXX:-"g++"} export CXXFLAGS='-O2 -g0 -std=gnu++14 -fno-gnu-keywords' # build ${CXX} --version mkdir -p m4 autoreconf -ifv ./configure --disable-silent-rules --enable-debug-checks --disable-static make -j$(nproc) # test make -j$(nproc) check || (cat ./test-suite.log; false)
true
41c89713e593bf3048cd9997bcd4b4a9e27cb2f1
Shell
iocanel/dotfiles
/scripts/scripts/work/collect-cve.sh
UTF-8
915
3.765625
4
[]
no_license
#!/bin/bash TARGET=$1 if [ ! -f "$TARGET" ]; then echo "|id|cve|component|description|bugzilla link|jira link|comments|" > $TARGET echo "|-" >> $TARGET fi mu find from:bugzilla and subject:CVE | awk -F '[ :\\]\\[]' '{print $13}' | sort | uniq | while read id; do # Skip ids that already exist EXISTS=`cat $TARGET | grep "$id"` if [ -n "$EXISTS" ]; then continue fi # Get the line and remove: `New:` and `EMBARGOED` occurances. STR=`mu find from:bugzilla and subject:CVE | grep $id | sed -e 's/New: //g' | sed -e 's/EMBARGOED //g' | head -n 1` CVE=`echo $STR | awk -F '[ :\\\]\\\[]' '{print $15}'` COMPONENT=`echo $STR | awk -F '[ :\\\]\\\[]' '{print $16}'` DESCRIPTION=`echo $STR | awk -F '[ :\\\]\\\[]' '{for(i=17;i<=NF;i++){printf "%s ", $i}; printf "\n"}'` BUGZILLA_LINK="https://bugzilla.redhat.com/show_bug.cgi?id=$id" echo "|$id|$CVE|$COMPONENT|$DESCRIPTION|$BUGZILLA_LINK|||" >> $TARGET done
true
bea4af540e6e443e66b4fb3dfb8e48deb39868c0
Shell
4moms/dotfiles
/.bash_profile
UTF-8
5,447
3.609375
4
[]
no_license
# Settings for all shells #Environment variables export EDITOR=vim export VISUAL=$EDITOR export PAGER=less export CVS_RSH=ssh export LESS="-RM" export NODE_PATH=/usr/local/lib/node_modules export PATH=$HOME/bin:$HOME/bin/bin:/usr/local/share/npm/bin:/usr/local/share/python:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin # Find chruby share directory for dir in {/usr/local,}/opt/chruby/share/chruby ; do if [[ -d "$dir" ]] ; then chruby_dir="$dir" break fi done # If chruby dir was found, set up chruby if [ -d "$chruby_dir" ] ; then . $chruby_dir/chruby.sh # Set up chruby version from one of three places: # 1. (preferred): file ~/.ruby-version # 2. Last version installed in $HOME/.rubies # 3. Last resort: hard-coded version if [ -f ~/.ruby-version ] ; then use_chruby_version=$(<~/.ruby-version) unset chruby_defaulted elif [ -d ~/.rubies ] ; then use_chruby_version=$(ls -dt1 $HOME/.rubies/* | awk -F/ '{print $(NF)}' | sed 1q) chruby_defaulted='latest version found' fi if [ -z "$use_chruby_version" ] ; then use_chruby_version=1.9.3-p392 chruby_defaulted='one that seemed good' fi chruby "$use_chruby_version" fi # Settings for interactive shells # return if not interactive [[ $- != *i* ]] && return # set a default terminal type for deficient systems or weird terminals tput longname >/dev/null 2>&1 || export TERM=xterm warn() { tput setaf 1 >&2 printf '%s\n' "$*" tput sgr0 >&2 } ## Set up $dotfiles directory # returns true if the program is installed installed() { hash "$1" >/dev/null 2>&1 } # OSX `readlink` doesn't support the `-f` option (-f = follow components to make full path) # If `greadlink` is installed, use it # Otherwise, use the dir and basename provided to construct a sufficient stand-in relative_readlink() { local dir="$1" base="$2" if installed greadlink ; then dirname "$(greadlink -f "$dir/$base")" elif pushd "$dir" >/dev/null 2>&1 ; then local link="$(readlink "$base")" case "$link" in /*) dirname "$link" ;; *) pushd "$(dirname "$link")" >/dev/null 2>&1 ; pwd -P ; popd >/dev/null ;; esac popd >/dev/null fi } if [[ -L "$HOME/.bash_profile" ]] ; then dotfiles="$(relative_readlink "$HOME" .bash_profile)" fi if [[ -z "$dotfiles" ]] || [[ ! -d "$dotfiles" ]] ; then #warn "~/.bash_profile should be a link to .bash_profile in the dotfiles repo" dotfiles=$HOME/Code/dotfiles fi # Finish if we couldn't find our root directory if [[ -z "$dotfiles" ]] || [[ ! -d "$dotfiles" ]] ; then warn "Couldn't find root of dotfiles directory. Exiting .bash_profile early." return fi export DOTFILES="$dotfiles" if [ -n "$chruby_defaulted" ] ; then warn "chruby version defaulted to $chruby_defaulted: $use_chruby_version" fi . $dotfiles/app-navigation.bash #running_modern_bash && shopt -s autocd # History settings # ignoreboth=ignoredups:ignorespace # ignoredups = ignore duplicate commands in history # ignorespace = ignore commands that start with space HISTCONTROL=ignoreboth # Save (effectively) all commands ever HISTSIZE=10000000 HISTFILESIZE=10000000 # only append the history at the end (shouldn't actually be needed - histappend) shopt -s histappend [ -d "$chruby_dir" ] && . $chruby_dir/auto.sh # Bash case "$(uname)" in *Darwin*) ls_options=-lahG ;; *) ls_options=-lah ;; esac function onport() { (( $# )) || set -- 3000 lsof -Pni :$* } ## only binds the given termcap entr(y|ies) to a widget if the terminal supports it termcap_bind() { local widget=$1 key termcap shift for termcap ; do key="$(tput $termcap)" [ -n "$key" ] && bind "\"$key\": $widget" done } # Search history termcap_bind history-search-backward cuu1 kcuu1 termcap_bind history-search-forward cud1 kcud1 # Simulate Zsh's preexec hook (see: http://superuser.com/a/175802/73015 ) # (This performs the histappend at a better time) simulate_preexec() { [ -n "$COMP_LINE" ] || # skip if doing completion [ "$BASH_COMMAND" = "$PROMPT_COMMAND" ] || # skip if generating prompt history -a } trap simulate_preexec DEBUG #command prompt customization prompt() { local last_status=$? local WHITE="\[\033[1;37m\]" local GREEN="\[\033[0;32m\]" local CYAN="\[\033[0;36m\]" local GRAY="\[\033[0;37m\]" local BLUE="\[\033[0;34m\]" local LIGHT_BLUE="\[\033[1;34m\]" local YELLOW="\[\033[1;33m\]" local RED="\[\033[1;31m\]" local no_color='\[\033[0m\]' local time="${YELLOW}\d \@$no_color" local whoami="${GREEN}\u@\h$no_color" local dir="${CYAN}\w$no_color" local branch if git rev-parse --git-dir >/dev/null 2>/dev/null ; then branch=$(git branch | awk '/^\*/ { print $2 }') branch="${branch:+$LIGHT_BLUE$branch }" else unset branch fi local driver if test -n "$M_DRIVER" ; then driver="$LIGHT_BLUE($M_DRIVER)" else driver="${RED}NO DRIVER" fi local last_fail if test $last_status -ne 0 ; then last_fail="=> ${YELLOW}Err: $last_status${no_color}\n" else unset last_fail fi PS1="\n$time $whoami $branch$dir\n$last_fail$no_color\$ " } PROMPT_COMMAND=prompt # retain $PROMPT_DIRTRIM directory components when the prompt is too long PROMPT_DIRTRIM=3 wedidit() { open ~/dora-we-did-it.webm } # Load completion files from $dotfiles/completion/{function}.bash for script in "$dotfiles/completion/"*.bash ; do . "$script" > /dev/null 2>&1 done source "$dotfiles/.aliases"
true
a10b203575895ee9e459a0004bab6fb4d8df84b3
Shell
skial/haxe.io
/test.components.sh
UTF-8
712
2.640625
3
[]
no_license
INPUT="$@" #echo $INPUT STR="${INPUT:3}" BIN=./bin$STR BIN_DIR=${BIN%/*} BIN_BASE="${BIN/$BIN_DIR/}" BIN_BASE="${BIN_BASE/.md/}" MIN=./min$STR MIN_DIR=${MIN%/*} MIN_BASE="${MIN/$MIN_DIR/}" MIN_BASE="${MIN_BASE/.md/}" OPT=./opt$STR OPT_DIR=${OPT%/*} OPT_BASE="${OPT/$OPT_DIR/}" OPT_BASE="${OPT_BASE/.html/}" OPT_CRIT=$OPT_DIR$OPT_BASE.crt.html OPT_TWIN=$OPT_DIR$OPT_BASE.opt.html mkdir -p $BIN_DIR mkdir -p $MIN_DIR mkdir -p $OPT_DIR electron --enable-logging ./render/index.js \ -md markdown-it-abbr markdown-it-attrs markdown-it-emoji markdown-it-footnote \ markdown-it-headinganchor \ -s ./render/build.js -r src -i $INPUT -o $BIN_DIR$BIN_BASE/index.html -b src \ -j ./src/templates/tests/test.json --show
true
7dfbe9aa8e79d842ded73ab82bc045e36b5d6c36
Shell
studioemma/vagrant-manifests
/beanstalkd/install.sh
UTF-8
668
3.375
3
[ "MIT" ]
permissive
#!/bin/bash beanstalkd_basedir=$(dirname $(readlink -f $0)) beanstalkd_calldir=$(pwd) cd "$beanstalkd_basedir" set -e apt-get install -y beanstalkd if which php > /dev/null 2>&1; then # install phpbeanstalkdadmin ( cd /var/www; git clone https://github.com/mnapoli/phpBeanstalkdAdmin.git phpbeanstalkdadmin ) chown vagrant:vagrant -R /var/www/phpbeanstalkdadmin if which nginx > /dev/null 2>&1; then install -Dm644 files/phpbeanstalkdadmin.nginx.conf \ /etc/nginx/sites-enabled/phpbeanstalkdadmin.conf systemctl restart nginx fi fi systemctl restart beanstalkd systemctl enable beanstalkd cd "$beanstalkd_calldir"
true
ee4275e78b40c66fab97aae243f83b9592c833ab
Shell
frncmx/my-arch-linux
/first-boot/post-chroot
UTF-8
1,364
3.53125
4
[ "MIT" ]
permissive
#!/usr/bin/env bash set -x set -euo pipefail : "${NEW_HOSTNAME?"Please, provide your desired hostname."}" # Sanity: Check network connection. ping -c3 archlinux.org ### Localization settings ### ln -f -s /usr/share/zoneinfo/Europe/Budapest /etc/localtime hwclock --systohc --utc eng_locale='en_US' sed -i "s/^#${eng_locale}/${eng_locale}/" /etc/locale.gen hun_locale='hu_HU' sed -i "s/^#${hun_locale}/${hun_locale}/" /etc/locale.gen locale-gen echo "LANG=en_US.UTF-8" > /etc/locale.conf ### Hostname setup ### echo "${NEW_HOSTNAME}" > /etc/hostname cat > /etc/hosts <<EOF # # /etc/hosts: static lookup table for host names # #<ip-address> <hostname.domain.org> <hostname> 127.0.0.1 localhost.localdomain localhost ${NEW_HOSTNAME} ::1 localhost.localdomain localhost ${NEW_HOSTNAME} # End of file EOF ### Systemd boot mkinitcpio -p linux bootctl --path=/boot install cat > /boot/loader/loader.conf <<EOF default arch editor 0 EOF # Get PARTUUID for root device into env eval $(blkid -t PARTLABEL='root' -s PARTUUID -o export) cat > /boot/loader/entries/arch.conf <<EOF title Arch Linux linux /vmlinuz-linux initrd /initramfs-linux.img options root=PARTUUID=${PARTUUID} rw EOF pacman -Sy --noconfirm \ ansible \ bash-completion \ git \ networkmanager systemctl enable NetworkManager echo "DO NOT forget to set the root password!!!"
true
77cf2f4b8e5b09b302ab44a4675e174f8903400d
Shell
ruanbekker/scripts
/tcp_is_it_up.sh
UTF-8
200
2.78125
3
[]
no_license
#!/usr/bin/env bash TCP_EXIT_CODE=1 while [ "$TCP_EXIT_CODE" != 0 ] ; do nc -vz -w 1 localhost 1001 &> /dev/null && TCP_EXIT_CODE=${?} || TCP_EXIT_CODE=${?} ; echo "failing"; sleep 1; done; echo "ok"
true
714854f61a8d4d504ebb5ad2bb42440c21a79fcf
Shell
chinadongnet/hiveos-linux
/hive/miners/noncerpro-cuda/h-stats.sh
UTF-8
1,189
3.171875
3
[]
no_license
#!/usr/bin/env bash get_miner_uptime(){ local a=0 let a=`date +%s`-`stat --format='%Y' /run/hive/miners/noncerpro-cuda/miner.conf` echo $a } stats_raw=`curl --connect-timeout 2 --max-time $API_TIMEOUT --silent --noproxy '*' http://127.0.0.1:${MINER_API_PORT}/api` if [[ $? -ne 0 || -z $stats_raw ]]; then echo -e "${YELLOW}Failed to read $miner from ${WEB_HOST}:{$WEB_PORT}${NOCOLOR}" else khs=`echo $stats_raw | jq -r '.totalHashrate' | awk '{s+=$1} END {print s/1000}'` #" local ac=$(jq -r '.totalShares - .invalidShares' <<< "$stats_raw") local inv=$(jq -r '.invalidShares' <<< "$stats_raw") local uptime=`get_miner_uptime` local algo="argon2d-nim" local temp=$(jq "[.temp$nvidia_indexes_array]" <<< $gpu_stats) local fan=$(jq "[.fan$nvidia_indexes_array]" <<< $gpu_stats) stats=$(jq --arg ac "$ac" --arg inv "$inv" \ --arg algo "$algo" \ --arg ver `miner_ver` --arg uptime "$uptime" \ --argjson fan "$fan" --argjson temp "$temp" \ '{hs: [.devices[].hashrate], hs_units: "hs", $algo, $temp, $fan, $uptime, ar: [$ac, 0, $inv], $ver}' <<< "$stats_raw") fi [[ -z $khs ]] && khs=0 [[ -z $stats ]] && stats="null"
true
5d8595987006054ea249bd892d71dba7bc94bf26
Shell
ifthikhan/dotfiles
/zsh.zsh
UTF-8
1,845
3.234375
3
[ "MIT" ]
permissive
# Load colors autoload -U colors && colors #autoload -U compinstall && compinstall # Key bindings # To see the key combo you want to use just do: # cat > /dev/null # And press it bindkey "^U" kill-whole-line # ctrl-k bindkey "^R" history-incremental-search-backward # ctrl-r bindkey "^A" beginning-of-line # ctrl-a bindkey "^E" end-of-line # ctrl-e bindkey "[B" history-search-forward # down arrow bindkey "[A" history-search-backward # up arrow bindkey "^D" delete-char # ctrl-d bindkey "^F" forward-char # ctrl-f bindkey "^B" backward-char # ctrl-b # Add a version control indicator function prompt_char { git branch >/dev/null 2>/dev/null && echo '±' && return echo '○' } ZSH_THEME_GIT_PROMPT_PREFIX="[git:" ZSH_THEME_GIT_PROMPT_SUFFIX="]$reset_color" ZSH_THEME_GIT_PROMPT_DIRTY="$fg[red]" ZSH_THEME_GIT_PROMPT_CLEAN="$fg[green]" function parse_git_dirty() { gitst="$(git status 2> /dev/null)" if [[ ${gitst} =~ 'nothing to commit' ]]; then echo $ZSH_THEME_GIT_PROMPT_CLEAN else echo $ZSH_THEME_GIT_PROMPT_DIRTY fi } function current_branch() { g br | grep -E '^\*.*' | sed 's/\* //g' } function git_prompt_info() { ref=$(git symbolic-ref HEAD 2> /dev/null) || return echo "$(parse_git_dirty)$ZSH_THEME_GIT_PROMPT_PREFIX$(current_branch)$ZSH_THEME_GIT_PROMPT_SUFFIX" } function collapse_pwd { echo $(pwd | sed -e "s,^$HOME,⌂,") } PROMPT=' %{$fg[magenta]%}%n%{$reset_color%} in %{$fg[yellow]%}%m%{$reset_color%} at %{$fg[red]%}$(collapse_pwd)%{$reset_color%} on %D{%a, %b} %@ (%h) $(git_prompt_info) %{$fg[green]%}$(prompt_char)%{$reset_color%} ' setopt promptsubst
true
13f1540516b8f38e7591ebbebe5f6340ee99893e
Shell
hite/SameSchemeTest
/replaceScheme.sh
UTF-8
1,292
3.453125
3
[]
no_license
#!/bin/bash # author: hite # 修改 bundleid,urltype 里的 bundleId,scheme。 #workflow plistBuddy=/usr/libexec/PlistBuddy projectName=SameSchemeTest info_plist="./${projectName}/Info.plist" schemeurl=weixin dist_dir=`pwd`/build function changeBundleId { # 开始循环 bundleIds=('A' 'f' 'com.xx') for bundleId in ${bundleIds[@]}; do # 还原文件 否则下面 sed 替换不正确 git checkout $info_plist sed -i '' 's#<string>$scheme</string>#<string>'${schemeurl}'</string>#g' $info_plist # sed -i '' 's#<string>$schemeIdentifier</string>#<string>'$bundleId'</string>#g' $info_plist $plistBuddy -c "Set :CFBundleIdentifier $bundleId" $info_plist $plistBuddy -c "Set :CFBundleName $bundleId" $info_plist # 安装到模拟器 xcodebuild -project "$projectName".xcodeproj -scheme "$projectName" -destination generic/platform=iOS -configuration Debug clean build CONFIGURATION_BUILD_DIR=$dist_dir/ xcrun simctl install booted "$dist_dir/${projectName}.app" echo 'Install '${bundleId}' done. continue ?(Y or N)' read cnt if [[ $cnt == 'Y' || $cnt == 'y' ]] ; then echo '....' else break fi done } changeBundleId
true
f6ee50ee8f48a312792caa9eb338fa7354a482f5
Shell
tatonka21/bacontools
/git/git-clone-github-user/git-clone-github-user
UTF-8
380
3.34375
3
[ "LicenseRef-scancode-unknown-license-reference", "MIT" ]
permissive
#!/bin/sh # Usage: git-clone-github-user USER [GIT_CLONE_OPTS...] # Clone all repositories of a Github user to the working directory. PER_PAGE="{PER_PAGE-200}" USER="$1" shift 1 curl -s "https://api.github.com/users/$USER/repos?per_page=$PER_PAGE" \ "https://api.github.com/orgs/$USER/repos?per_page=$PER_PAGE" | \ jq -r '.[].html_url?' | sort | uniq | parallel git clone "$@"
true
1220adb88fce1c0183db106f4654054253d45055
Shell
cityofaustin/lioness
/data/scripts/resetData.sh
UTF-8
475
3.15625
3
[]
no_license
#!/usr/bin/env bash set -e echo "Stoping docker containers & delete it with all volumes..." containers=$(docker ps -f name=local_ -a -q) if [ ! -z "$containers" ]; then docker stop $containers docker rm $containers docker volume rm $(docker volume ls -q -f dangling=true -f name=local_) fi echo done. echo "Removing all generated schemas..." rm -fv downloaded-schema.json generated-schema.ts echo done. echo "Removing .graphcoolrc..." rm -fv .graphcoolrc echo done.
true
d279a8ae3a5e7e013d881d508e505302a02e51b5
Shell
mattvenn/capture
/setup.sh
UTF-8
1,379
3.265625
3
[ "Apache-2.0" ]
permissive
#!/bin/bash # Copyright 2015 Google Inc. All Rights Reserved. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. See the License for the specific language governing # permissions and limitations under the License. if [[ $EUID -ne 0 ]]; then echo "Must be root. Try again with sudo." exit 1 fi # Set the shared memory buffer length to 4MB. modprobe uio_pruss extram_pool_sz=0x400000 DEVICE_OVERLAY=/lib/firmware/capture-00A0.dtbo if [[ ! -f $DEVICE_OVERLAY ]] ; then echo -n "$DEVICE_OVERLAY not found, so we can't enable it." echo -n " (Did you run 'make install' to generate the .dtbo" echo -n " file from the .dts and copy it to /lib/firmware?)" echo echo exit 2 fi if echo capture > /sys/devices/bone_capemgr.9/slots ; then echo 'Device overlay successfully loaded.' else echo echo -n "Couldn't enable the capture-00A0.dtbo device overlay." echo -n " (If the error was 'File exists', then the overlay was already" echo -n " enabled.)" echo exit 3 fi
true
27bd1a952c7dcffe225a8a19dc5744f241b636e4
Shell
marekbeckmann/Bookstack-Debian-11-Installation-Script
/install-bookstack-debian.sh
UTF-8
6,346
3.375
3
[ "MIT" ]
permissive
#!/bin/bash function logToScreen() { clear printf '%s\n' "$(tput setaf 2)$1 $(tput sgr 0)" sleep 1 } function installPackages() { logToScreen "Installing required pacakges..." apt -y update apt -y install wget pwgen unzip git curl apache2 libapache2-mod-php php mariadb-server mariadb-client mariadb-common php-{fpm,curl,mbstring,ldap,tidy,xml,zip,gd,mysql,cli} } function setupDB() { logToScreen "Setting up Database..." bookstackpwd="$(pwgen -N 1 -s 96)" mysql -u root -e "DROP USER ''@'localhost'" mysql -u root -e "DROP USER ''@'$(hostname)'" mysql -u root -e "DROP DATABASE test" mysql -u root -e "DELETE FROM mysql.user WHERE User='root' AND Host NOT IN ('localhost', '127.0.0.1', '::1')" mysql -u root -e "DELETE FROM mysql.db WHERE Db='test' OR Db='test\_%'" mysql -u root -e "FLUSH PRIVILEGES" mysql -u root -e "CREATE DATABASE bookstack" mysql -u root -e "CREATE USER 'bookstack'@'localhost' IDENTIFIED BY '$bookstackpwd'" mysql -u root -e "GRANT ALL ON bookstack.* TO 'bookstack'@'localhost'" mysql -u root -e "FLUSH PRIVILEGES" } function setupBookstack(){ logToScreen "Downloading latest Bookstack release..." mkdir -p /var/www/bookstack cd /var/www/bookstack || exit 1 git clone https://github.com/BookStackApp/BookStack.git --branch release --single-branch /var/www/bookstack chown -R www-data: /var/www/bookstack logToScreen "Installing Composer" curl -s https://getcomposer.org/installer > composer-setup.php php composer-setup.php --quiet rm -f composer-setup.php sudo -u www-data php composer.phar install --no-dev --no-plugins logToScreen "Configuring Bookstack Settings..." mv .env.example .env chown -R root: /var/www/bookstack && sudo chown -R www-data: /var/www/bookstack/{storage,bootstrap/cache,public/uploads} chmod -R 0755 /var/www/bookstack sed -i "s/https:\/\/example.com/https\:\/\/$fqdn/g" .env sed -i 's/database_database/bookstack/g' .env sed -i 's/database_username/bookstack/g' .env sed -i "s/database_user_password/\"$bookstackpwd\"/g" .env php artisan key:generate --no-interaction --force php artisan migrate --no-interaction --force } function configureApache(){ logToScreen "Setting up Apache2 VHOST" echo "Listen 127.0.0.1:8080" | tee /etc/apache2/ports.conf tee /etc/apache2/sites-available/bookstack.conf >/dev/null <<EOT <VirtualHost 127.0.0.1:8080> ServerName ${fqdn} ServerAdmin webmaster@localhost DocumentRoot /var/www/bookstack/public/ <Directory /var/www/bookstack/public/> Options Indexes FollowSymLinks AllowOverride None Require all granted <IfModule mod_rewrite.c> <IfModule mod_negotiation.c> Options -MultiViews -Indexes </IfModule> RewriteEngine On RewriteCond %{HTTP:Authorization} . RewriteRule .* - [E=HTTP_AUTHORIZATION:%{HTTP:Authorization}] RewriteCond %{REQUEST_FILENAME} !-d RewriteCond %{REQUEST_URI} (.+)/$ RewriteRule ^ %1 [L,R=301] RewriteCond %{REQUEST_FILENAME} !-d RewriteCond %{REQUEST_FILENAME} !-f RewriteRule ^ index.php [L] </IfModule> </Directory> </VirtualHost> EOT a2enmod rewrite a2enmod proxy_fcgi setenvif a2enconf php7.4-fpm a2dissite 000-default.conf a2ensite bookstack.conf systemctl restart apache2 } function deploySSCert(){ logToScreen "Using Self Signed Certificate (Certbot failed)..." openssl req -new -newkey rsa:4096 -days 365 -nodes -x509 -subj "/C=NA/ST=None/L=None/O=None/CN=${fqdn}" -keyout /etc/ssl/private/bookstack-selfsigned.key -out /etc/ssl/certs/bookstack-selfsigned.crt sed -i "s/\/etc\/letsencrypt\/live\/${fqdn}\/fullchain.pem/\/etc\/ssl\/certs\/bookstack-selfsigned.crt/g" /etc/nginx/sites-available/"${fqdn}" sed -i "s/\/etc\/letsencrypt\/live\/${fqdn}\/privkey.pem/\/etc\/ssl\/private\/bookstack-selfsigned.key/g" /etc/nginx/sites-available/"${fqdn}" } function configureNginx(){ logToScreen "Installing and setting up NGINX" apt -y install nginx certbot python3-certbot-nginx rm /etc/nginx/sites-enabled/default tee /etc/nginx/sites-available/"${fqdn}" >/dev/null <<EOT upstream bookstack { server 127.0.0.1:8080; } server { server_name ${fqdn}; listen [::]:443 ssl ipv6only=on; listen 443 ssl; ssl_certificate /etc/letsencrypt/live/${fqdn}/fullchain.pem; ssl_certificate_key /etc/letsencrypt/live/${fqdn}/privkey.pem; location / { proxy_pass http://bookstack; proxy_http_version 1.1; proxy_set_header Upgrade \$http_upgrade; proxy_set_header Connection "upgrade"; proxy_set_header Host \$host; proxy_set_header X-Real-IP \$remote_addr; proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-Proto https; proxy_set_header X-Nginx-Proxy true; proxy_redirect off; } } server { listen 80 ; listen [::]:80 ; server_name ${fqdn}; return 301 https://\$server_name\$request_uri; } EOT ln -s /etc/nginx/sites-available/"${fqdn}" /etc/nginx/sites-enabled/ certbot --nginx --non-interactive --agree-tos --domains "${fqdn}" --email "${mail}" || deploySSCert } function scriptSummary(){ systemctl restart nginx logToScreen "Installation complete! If Certbot failed, a self signed certificate was created for you. How to login: Server-Address: https://$fqdn Email: admin@admin.com Password: password" } function script_init() { read -rp "Enter server FQDN [docs.example.com]: " fqdn read -rp "Enter Mail for Certbot: " mail if [ "$fqdn" = "" ] || [ "$(whoami)" != "root" ] || [ "$mail" = "" ]; then clear echo "Script aborted!" else installPackages setupDB setupBookstack configureApache configureNginx scriptSummary fi } script_init
true
bf531f43c02f2ef93733954d654288b2de545b37
Shell
Zaraden/docker-images-1
/splunk/oracle-weblogic-admin-server+splunk/build.sh
UTF-8
1,033
3.578125
4
[ "BSD-2-Clause" ]
permissive
#!/bin/sh # ****************************************************************************** # Oracle WebLogic 12.2.1.4.0 Docker image build file. # # Since : September, 2021 # Author: Arnold Somogyi <arnold.somogyi@gmail.com> # # Copyright (c) 2020-2021 Remal Software and Arnold Somogyi All rights reserved # BSD (2-clause) licensed # # Usage: # $ ./build.sh build the image locally # $ ./build.sh true build and push the image to the image registry # ****************************************************************************** DOCKER_REGISTRY=docker DOCKER_REGISTRY_NAMESPACE=remal IMAGE_NAME=oracle-weblogic-admin-with-splunk IMAGE_VERSION=1.0.0 PUSH_IMAGE=${1:-false} docker build --no-cache -t $DOCKER_REGISTRY/$DOCKER_REGISTRY_NAMESPACE/$IMAGE_NAME:$IMAGE_VERSION . docker rmi $(docker image ls -qf dangling=true) # upload image to GitLab image registry if [ "$PUSH_IMAGE" = true ] ; then echo "pushing the image to the registry..." docker push $DOCKER_REGISTRY/$DOCKER_REGISTRY_NAMESPACE/$IMAGE_NAME:$IMAGE_VERSION fi
true
a300af0c0bad5fd110f2f0bcd857a97ebfa495bd
Shell
carlosmorenoin/livesound
/stop.sh
UTF-8
593
3.203125
3
[]
no_license
#!/usr/bin/env bash set -e cd `dirname $0` r=`pwd` echo $r function stop_service { echo "Stopping Service " $1 cd $r/$1 if [ -z $(cat pid.file) ] then echo "PDI not found for service " $1 else kill $(cat pid.file) && rm pid.file || echo "Could not kill process" $(cat pid.file) fi } stop_service livesound-gateway stop_service livesound-venue-query stop_service livesound-venue-command stop_service livesound-profiles stop_service livesound-users stop_service livesound-authserver stop_service livesound-eureka stop_service livesound-config
true
1a1617851fbaf9c8d16805f2f10c18abcf6e713c
Shell
mgijax/omim_hpoload
/Install
UTF-8
1,086
4.15625
4
[]
no_license
#!/bin/sh # # Installation script # # 1) cp omim_hpoload.config.default omim_hpoload.config # 2) cp annotload.config.default annotload.config # 3) Install # Usage=Install # # Function called when the install fails. # installFailed () { echo "Installation Failed: `date`" exit 1 } # # Verify the arguments to the script, there shouldn't be any # if [ $# -ne 0 ] then echo "Usage: ${Usage}" installFailed fi # # Make sure config files exist # cd `dirname $0` # establish the config files MAIN_CONFIG=omim_hpoload.config # Make sure config file exists and source it if [ -r ${MAIN_CONFIG} ] then echo "Source ${MAIN_CONFIG}" . ${MAIN_CONFIG} else echo "Cannot source configuration file: ${MAIN_CONFIG}" installFailed fi # Check to see if this is a development installation # DEV="" if [ "${INSTALL_TYPE}" = "dev" ] then DEV="-d" fi # # run DLAInstall for each configuration file # echo 'running DLAINSTALL' ${DLAINSTALL} ${DEV} # # Create the input directory if it doesn't exist. # if [ ! -d ${INPUTDIR} ] then mkdir -p ${INPUTDIR} fi
true
71779d861f5ac184205f6c1fc3b3b52de7a7c388
Shell
Findarato/dotFiles
/scripts/bin/newSSH_key.sh
UTF-8
105
3.046875
3
[]
no_license
#!/bin/bash if [ -f "${1}" ];then ssh-keygen -t ed25519 -f "${1}" else print("KEY NAME MISSING") fi
true
4ff4030011d4477d98352d470248b4077a80b04b
Shell
jakartaredhat/weld-cdi-tck
/setup.sh
UTF-8
663
2.703125
3
[ "Apache-2.0" ]
permissive
#!/usr/bin/env bash # Download and unzip the CDI TCK dist wget http://download.eclipse.org/ee4j/cdi/cdi-tck-2.0.6-dist.zip shasum -a 256 cdi-tck-2.0.6-dist.zip unzip cdi-tck-2.0.6-dist.zip # Install the TCK artifacts into local repo pushd . cd cdi-tck-2.0.6/artifacts bash artifact-install.sh popd # Download Wildfly/Weld runtime wget https://download.jboss.org/wildfly/17.0.1.Final/wildfly-17.0.1.Final.zip unzip wildfly-17.0.1.Final.zip export JBOSS_HOME=`pwd`/wildfly-17.0.1.Final # Get the weld/core repo for TCK runner git clone https://github.com/weld/core.git weld-core cd weld-core git checkout 3.1.1.Final mvn clean package -Dtck -f jboss-as/pom.xml
true
d4352aff7a8d818abd85fbcbd520c302f7897426
Shell
kmorel/config
/shell-startup/path-setup.sh
UTF-8
1,577
3.890625
4
[]
no_license
#!/bin/sh # Gets ready to add custom things to the path and adds the paths that I # consistently use. This script can be sourced by sh and any variant. # before_system_path= # system_path=${PATH} # after_system_path= # Used internally detect_system_path() { system_path=`echo ${PATH} | sed -e "s|${before_system_path:-/not/a/path}||g" -e "s|${after_system_path}:\.||g"` } # Used internally update_path() { export PATH=${before_system_path}${system_path}${after_system_path}:. } # Add one or more directories to PATH before the system paths. add_before_system_path() { detect_system_path for dir ; do if [ -d $dir ] ; then if echo ${before_system_path} | fgrep -v ${dir}: > /dev/null then before_system_path=${before_system_path}${dir}: fi fi done update_path } # Add one or more directories to PATH after the system paths. add_after_system_path() { detect_system_path for dir ; do if [ -d $dir ] ; then if echo ${after_system_path} | fgrep -v :${dir} > /dev/null then after_system_path=${after_system_path}:${dir} fi fi done update_path } # Add custom command in home directory first add_before_system_path \ $HOME/local/bin # /usr/local/bin should be in our path, but add it just in case it is not. # Add it after the system path so that it does not overwrite custom path. add_after_system_path \ /usr/local/bin # Make sure X programs are in path. if quiet_which xterm ; then : else add_after_system_path \ /usr/local/X11/bin \ /usr/local/X11R6/bin \ /usr/X11R6/bin fi
true
366e6bc055cec306bd1ecfebb5efecc644220558
Shell
dominopetter/workspaces-minimal
/rstudio/install
UTF-8
765
2.859375
3
[]
no_license
#!/bin/bash set -o nounset -o errexit -o pipefail && \ mkdir -p /tmp/rstudio-temp && \ cd /tmp/rstudio-temp && \ apt-get update -y && apt-get install -y gdebi libapparmor1 && \ wget -q https://download2.rstudio.org/server/bionic/amd64/rstudio-server-1.3.1056-amd64.deb && \ gdebi -n rstudio-server-1.3.1056-amd64.deb && \ ln -sf /usr/lib/rstudio-server /usr/local/lib/rstudio-server && \ chown -R root:root /usr/lib/rstudio-server && \ cd ~ && \ rm -rf /tmp/rstudio-temp && \ echo "www-frame-origin=any" >> /etc/rstudio/rserver.conf && \ chown ubuntu:ubuntu /etc/rstudio/rserver.conf && \ ln -sf /usr/lib/rstudio-server/bin/pandoc/pandoc /usr/local/bin && \ ln -sf /usr/lib/rstudio-server/bin/pandoc/pandoc-citeproc /usr/local/bin && \ rm -rf /var/lib/apt/lists/*
true
84e6f0f9b54afb65749fd238bc722a37172994f8
Shell
bcowgill/bsac-linux-cfg
/bin/cfg/log-githook/post-commit
UTF-8
755
3.03125
3
[]
no_license
#!/bin/sh # https://git-scm.com/docs/githooks PRE="`date` post-commit-msg:" LOG=~/githook.log echo === $PRE $0 entered ========================= >> $LOG echo $PRE LOGNAME: $LOGNAME USER: $USER HOME: $HOME >> $LOG echo $PRE PWD: $PWD >> $LOG echo $PRE EDITOR: $EDITOR >> $LOG echo $PRE PATH: $PATH >> $LOG echo $PRE GIT_DIR: $GIT_DIR >> $LOG echo $PRE GIT_EDITOR: $GIT_EDITOR >> $LOG echo $PRE core.hooksPath: `git config core.hooksPath` >> $LOG if echo $SSH_ASKPASS | grep git-gui > /dev/null; then echo $PRE git gui: yes >> $LOG else echo $PRE git gui: no >> $LOG fi echo --------------------------------------------- >> $LOG git status >> $LOG echo --------------------------------------------- >> $LOG echo $PRE git hook logged #HOOK #/HOOK exit 0
true
e7cfa1c382858e5f625ca279803628b27e981b23
Shell
cccarey/ubu-wd40
/scripts/quicken
UTF-8
1,238
4
4
[]
no_license
#!/usr/bin/env bash die() { # $1 - the exit code # $2 $... - the message string retcode=$1 shift printf >&2 "%s\n" "$@" exit $retcode } wine_install() { echo -n "wine not installed... script will attempt to install. N to cancel" read resp [[ "$resp" == "N" ]] && die 0 "cancel requested" sudo apt-get install wine winetricks } quicken_install() { [[ -s /tmp/Quicken_Deluxe_2011.exe ]] || die 1 "Quicken installer does not exist" winetricks fontfix winetricks corefonts vcrun6 winetricks gdiplus winetricks dotnet20 winetricks ie6 winetricks ie8 winecfg wine /tmp/Quicken_Deluxe_2011.exe die 0 "Quicken Installed" } tricks() { winetricks || die 1 "winetricks not installed" die 0 } cfg() { winecfg || die 1 "wincfg not installed" die 0 } run() { exec wine "c:\program files\quicken\qw.exe" &> /dev/null & } type wine >/dev/null 2>&1 || wine_install export WINEPREFIX=$HOME/.wine-quicken/ export WINEARCH=win32 OPTION=$1 if [ ! -z "$OPTION" ] then [[ "$OPTION" == "install" ]] && quicken_install [[ "$OPTION" == "tricks" ]] && tricks [[ "$OPTION" == "cfg" ]] && cfg die 1 "$OPTION is not valid" else run fi
true
3fd52aaeeeac8aacfa75548843626243ae20d50e
Shell
vicrem/mssql
/init_script/initialization.sh
UTF-8
5,221
2.75
3
[]
no_license
#!/bin/bash set -e # Mandatory input [ -z "${KERBEROS_REALM}" ] && echo "KERBEROS_REALM must be defined" && exit 1 # Optional input [ -z "${LDAP_ENUMERATE}" ] && LDAP_ENUMERATE="False" [ -z "${LDAP_USER_MEMBEROF}" ] && LDAP_USER_MEMBEROF="memberOf" [ -z "${LDAP_IGNORE_GROUP_MEMBERS}" ] && LDAP_IGNORE_GROUP_MEMBERS="False" [ -z "${LDAP_USER_PRINCIPAL}" ] && LDAP_USER_PRINCIPAL="userPrincipalName" [ -z "${KERBEROS_DNS_DISCOVERY_DOMAIN}" ] && KERBEROS_DNS_DISCOVERY_DOMAIN=${KERBEROS_REALM} # Put config files in place cat >/etc/krb5.conf <<EOL [libdefaults] rdns = False forwardable = True renew_lifetime = 7d ticket_lifetime = 24h udp_preference_limit = 0 dns_lookup_realm = True dns_canonicalize_hostname = True default_realm = ${KERBEROS_REALM} default_keytab_name = FILE:${KRB5_KTNAME} default_client_keytab_name = FILE:${KRB5_CLIENT_KTNAME} [realms] ${KERBEROS_REALM} = { } [domain_realm] $(echo ${KERBEROS_REALM%%.*} | tr '[:upper:]' '[:lower:]') = ${KERBEROS_REALM} .$(echo ${KERBEROS_REALM%%.*} | tr '[:upper:]' '[:lower:]') = ${KERBEROS_REALM} $(echo ${KERBEROS_REALM} | tr '[:upper:]' '[:lower:]') = ${KERBEROS_REALM} .$(echo ${KERBEROS_REALM} | tr '[:upper:]' '[:lower:]') = ${KERBEROS_REALM} [capaths] ${KERBEROS_REALM%%.*} = { ${KERBEROS_REALM} = ${KERBEROS_REALM#*.} } ${KERBEROS_REALM} = { ${KERBEROS_REALM#*.} = ${KERBEROS_REALM#*.} } [plugins] localauth = { module = sssd:/usr/lib/x86_64-linux-gnu/sssd/modules/sssd_krb5_localauth_plugin.so } EOL cat >/etc/sssd/sssd.conf <<EOL [sssd] services = nss config_file_version = 2 domains = ${KERBEROS_REALM} [domain/${KERBEROS_REALM}] ad_domain = ${KERBEROS_REALM} id_provider = ad access_provider = ad auth_provider = ad chpass_provider = ad subdomains_provider = ad sudo_provider = none autofs_provider = none selinux_provider = none krb5_realm = ${KERBEROS_REALM} krb5_keytab = ${KRB5_KTNAME} dyndns_update = False cache_credentials = True use_fully_qualified_names = True dns_discovery_domain = ${KERBEROS_REALM} ignore_group_members = ${LDAP_IGNORE_GROUP_MEMBERS} ldap_id_mapping = True ldap_group_nesting_level = 0 ldap_account_expire_policy = ad ldap_force_upper_case_realm = True [nss] filter_groups = root filter_users = root EOL cat >/etc/nsswitch.conf <<EOL passwd: compat sss shadow: compat sss group: compat sss hosts: files dns myhostname bootparams: nisplus [NOTFOUND=return] files ethers: files netmasks: files networks: files protocols: files rpc: files services: files sss netgroup: nisplus sss publickey: nisplus automount: files nisplus sss aliases: files nisplus EOL cat >/etc/ssl/openssl.cnf <<EOF [req] distinguished_name = req_distinguished_name x509_extensions = v3_req prompt = no [req_distinguished_name] C = SE ST = Skane L = Malmo O = Vicrem CN = ${HOSTNAME} [v3_req] keyUsage = critical, digitalSignature, keyAgreement extendedKeyUsage = serverAuth subjectAltName = @alt_names [alt_names] DNS.1 = ${HOSTNAME} DNS.2 = ${HOSTNAME%%.*} EOF cat >/var/opt/mssql/mssql.conf <<EOF [network] forceencryption = 1 tlscert = /etc/ssl/private/cert.pem tlskey = /etc/ssl/private/cert.key tlsciphers = ECDHE-ECDSA-AES128-GCM-SHA256:ECDHE-ECDSA-AES256-GCM-SHA384:ECDHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-GCM-SHA384:ECDHE-ECDSA-AES128-SHA256:ECDHE-ECDSA-AES256-SHA384:ECDHE-RSA-AES128-SHA256:ECDHE-RSA-AES256-SHA384:ECDHE-ECDSA-AES256-SHA:ECDHE-ECDSA-AES128-SHA:ECDHE-RSA-AES256-SHA:ECDHE-RSA-AES128-SHA:AES256-GCM-SHA384:AES128-GCM-SHA256:AES256-SHA256:AES128-SHA256:AES256-SHA:AES128-SHA tlsprotocols = 1.2 enablekdcfromkrb5conf = true disablesssd = false EOF # Create dirs if not exists CHECK_DIR=(/var/opt/mssql/backup /var/opt/mssql/secrets) for dirs in "${CHECK_DIR[@]}"; do if [ ! -d $dirs ]; then mkdir $dirs && \ chown -R mssql $dirs fi done # Create keytab if [ "${CREATE_KEYTAB}" = "True" ]; then python3 /tmp/keytab/run.py fi # Set permission and run sssd chmod 600 /etc/sssd/sssd.conf exec /usr/sbin/sssd -i -d 6 & # Create TLS cert and set permission openssl req -x509 -nodes -days 3650 -newkey rsa:2048 -keyout /etc/ssl/private/cert.key -out /etc/ssl/private/cert.pem -config /etc/ssl/openssl.cnf -sha256 && \ chown -R mssql /etc/ssl/private/ # Wait to be sure that SQL Server is up & running sleep 60s # Run sql script for files in $(ls -d /docker-entrypoint-initdb.d/*); do if [[ $files =~ \.sql$ ]]; then /opt/mssql-tools/bin/sqlcmd -S localhost,${MSSQL_TCP_PORT} -U sa -P ${MSSQL_SA_PASSWORD} -i $files fi done function create_admin_group() { cat >/tmp/create_admin_group.sql <<-EOF USE [master] GO CREATE LOGIN [${KERBEROS_REALM%%.*}\\${groups}] FROM WINDOWS WITH DEFAULT_DATABASE=[master] GO ALTER SERVER ROLE [sysadmin] ADD MEMBER [${KERBEROS_REALM%%.*}\\${groups}] GO EOF } # Create login for Admin group if [ ! -z "${MSSQL_ADMIN_GROUP}" ]; then for groups in $( echo "${MSSQL_ADMIN_GROUP}" | sed 's/,/ /g'); do echo "Creating MSSQL_ADMIN_GROUP: $groups" create_admin_group $groups /opt/mssql-tools/bin/sqlcmd -S localhost,${MSSQL_TCP_PORT} -U sa -P ${MSSQL_SA_PASSWORD} -i /tmp/create_admin_group.sql done fi
true
4086d9d9e4470f12f1bab0a52b0d6c88fe989621
Shell
NathanGiesbrecht/FireOS
/fire_overlay/bundles/xlib/01_get.sh
UTF-8
480
3.453125
3
[ "MIT" ]
permissive
#!/bin/sh set -e . ../../common.sh # Read the 'xlib' download URL from '.config'. DOWNLOAD_URL=`read_property XLIB_SOURCE_URL` # Grab everything after the last '/' character. ARCHIVE_FILE=${DOWNLOAD_URL##*/} # Download 'xlib' source archive in the 'source/overlay' directory. download_source $DOWNLOAD_URL $OVERLAY_SOURCE_DIR/$ARCHIVE_FILE # Extract all 'xlib' sources in the 'work/overlay/xlib' directory. extract_source $OVERLAY_SOURCE_DIR/$ARCHIVE_FILE xlib cd $SRC_DIR
true
e62ce6672a88297054c0cf3a3d1ac9e8d16834cf
Shell
Chrysostomus/packages-community
/packnroll/PKGBUILD
UTF-8
971
2.71875
3
[]
no_license
# Maintainer: Philip Müller <philm[at]manjaro[dot]org> # Developer: Filipe Marques <eagle[dot]software3[at]gmail[dot]com> pkgname=packnroll pkgver=1.0.0 pkgrel=1 pkgdesc="It's so easy doing the packaging process !" arch=('i686' 'x86_64') url="https://github.com/filipe-marques/packnroll" license=('GPL') depends=('qt5-base' 'devtools') makedepends=('qt5-tools' 'devtools') source=("$url/archive/$pkgname-$pkgver.tar.gz") md5sums=('a71f7f73e8d2789b3526b504d08c5baf') build() { cd $srcdir/packnroll-$pkgname-$pkgver qmake-qt5 packnroll.pro make } package() { cd $srcdir/packnroll-$pkgname-$pkgver #make install INSTALL_ROOT=$pkgdir install -Dm755 "PacknRoll" "${pkgdir}/usr/bin/packnroll" install -Dm644 "packnroll.desktop" "${pkgdir}/usr/share/applications/packnroll.desktop" install -Dm644 "resources/icons/packnroll128.png" "${pkgdir}/usr/share/icons/hicolor/128x128/apps/packnroll.png" install -Dm644 "LICENSE" "${pkgdir}/usr/share/licenses/packnroll/LICENSE" }
true
603091a3b33e367db353b4f83d491a048ef1d897
Shell
anomen-s/anomen-kernel-config
/banan/home/sshfs/mivvy/mount.sh
UTF-8
115
2.65625
3
[]
no_license
#!/bin/sh H=mivvy if [ -n "$1" ] then H=$1 fi sshfs -o nonempty -o transform_symlinks $H:/ $HOME/sshfs/mivvy
true
a16233d8044485d0f0ddae1d6cefa7c281566a0d
Shell
cedadev/slstr_calibration_ftp_retriever
/make_mpc_level0_config.sh
UTF-8
1,328
2.96875
3
[]
no_license
#!/bin/bash export model=$1 export config_file=$GWS_PATH/software/slstr_calibration_ftp_retriever/config/slstr_cpa_level0_$model'.cfg' export lockfile=$GWS_PATH/s3_slstr_raw_data/$model/flight/level0_data/level0_lock.txt export output_dir=$GWS_PATH/s3_slstr_raw_data/$model/flight/level0_data/ export log_dir=$GWS_PATH/s3_slstr_raw_data/$model/flight/level0_data/logs/ echo [default] >$config_file echo #general connection details etc >>$config_file echo ftp_host: ftp.acri-cwa.fr >>$config_file echo ftp_user: ftp_s3mpc-SL0-${model:(-1)} >>$config_file if [ $model == S3A ] then echo ftp_pw: 'usfTG654$*' >>$config_file fi if [ $model == S3B ] then echo ftp_pw: 'zzfTD54!%' >>$config_file fi echo check_days: False >>$config_file echo check_days_num: 50 >>$config_file echo log_dir: $log_dir >>$config_file echo email_alerts: >>$config_file echo lockfile: $lockfile >>$config_file echo >>$config_file for days_ago in {1..0} do export month=`date -d $days_ago' days ago' +%m` export year=`date -d $days_ago' days ago' +%Y` export day=`date -d $days_ago' days ago' +%d` echo [L0_MPC0$days_ago] >>$config_file echo ftp_server_path: /$year$month$day >>$config_file echo product_base: $model'_SL_0_.*LN2_O_NT.*' >>$config_file echo local_path: $output_dir >>$config_file echo >>$config_file done
true
0ec0dc5f591d27ac68e9a828afa8d43f666d0d89
Shell
valueerrorx/life-servicemenus
/resize.sh
UTF-8
3,420
3.328125
3
[]
no_license
#!/bin/bash DIR="$1"; kdialog --title "Resize Images" --yesnocancel "Do you want to replace exisiting files ?" case $? in 0) # Replace exisiting files ! choice=`kdialog --title "Resize Images" --radiolist "target size:" 1 "1024px" on 2 "800px" off 3 "640px" off 4 "480px" off`; FILE="" let "nbfiles = $#" dbusRef=`kdialog --progressbar "Initialising ..." $nbfiles` qdbus $dbusRef showCancelButton true compteur=0 for i in "$@";do if [ -f "$i" ];then #test if cancel button has been pushed if [[ "$(qdbus $dbusRef wasCancelled)" == "true" ]] ; then qdbus $dbusRef close exit 1 fi FILE="$i" let "compteur +=1" case "$choice" in 1) qdbus $dbusRef setLabelText "Scaling image `basename "$FILE"`" qdbus $dbusRef org.freedesktop.DBus.Properties.Set org.kde.kdialog.ProgressDialog value $compteur convert -resize 1024x1024 "$FILE" "$FILE";; 2) qdbus $dbusRef setLabelText "Scaling image `basename "$FILE"`" qdbus $dbusRef org.freedesktop.DBus.Properties.Set org.kde.kdialog.ProgressDialog value $compteur convert -resize 800x800 "$FILE" "$FILE";; 3) qdbus $dbusRef setLabelText "Scaling image `basename "$FILE"`" qdbus $dbusRef org.freedesktop.DBus.Properties.Set org.kde.kdialog.ProgressDialog value $compteur convert -resize 600x600 "$FILE" "$FILE";; 4) qdbus $dbusRef setLabelText "Scaling image `basename "$FILE"`" qdbus $dbusRef org.freedesktop.DBus.Properties.Set org.kde.kdialog.ProgressDialog value $compteur convert -resize 300x300 "$FILE" "$FILE";; *) qdbus $dbusRef close; rm -rf "$TMPDIR" exit 0;; esac fi; done qdbus $dbusRef close;; 1) choice=`kdialog --title "Resize Images" --radiolist "target size:" 1 "1024px" on 2 "800px" off 3 "640px" off 4 "480px" off`; FILE="" let "nbfiles = $#" dbusRef=`kdialog --progressbar "Initialising ..." $nbfiles` qdbus $dbusRef showCancelButton true compteur=0 for i in "$@";do if [ -f "$i" ];then #test if cancel button has been pushed if [[ "$(qdbus $dbusRef wasCancelled)" == "true" ]] ; then qdbus $dbusRef close exit 1 fi FILE="$i" let "compteur +=1" case "$choice" in 1) qdbus $dbusRef setLabelText "Scaling image `basename "$FILE"`" qdbus $dbusRef org.freedesktop.DBus.Properties.Set org.kde.kdialog.ProgressDialog value $compteur convert -resize 1024x1024 "$FILE" $DIR/1024_"`basename "$FILE"`";; 2) qdbus $dbusRef setLabelText "Scaling image `basename "$FILE"`" qdbus $dbusRef org.freedesktop.DBus.Properties.Set org.kde.kdialog.ProgressDialog value $compteur convert -resize 800x800 "$FILE" $DIR/800_"`basename "$FILE"`";; 3) qdbus $dbusRef setLabelText "Scaling image `basename "$FILE"`" qdbus $dbusRef org.freedesktop.DBus.Properties.Set org.kde.kdialog.ProgressDialog value $compteur convert -resize 640x640 "$FILE" $DIR/640_"`basename "$FILE"`";; 4) qdbus $dbusRef setLabelText "Scaling image `basename "$FILE"`" qdbus $dbusRef org.freedesktop.DBus.Properties.Set org.kde.kdialog.ProgressDialog value $compteur convert -resize 480x480 "$FILE" $DIR/480_"`basename "$FILE"`";; *) qdbus $dbusRef close; rm -rf "$TMPDIR" exit 0;; esac fi; done qdbus $dbusRef close;; 2) exit 0;; esac;
true
46dc4de26fcbb2680f6e5fc3e3f5419f1bb2487c
Shell
qqqqqqq645/qwr
/asdf/sh_pro1_while.sh
UTF-8
107
3.34375
3
[]
no_license
echo "Input your number" read digit while [ $digit -gt 0 ] do echo $digit digit=`expr $digit -1` done
true
65dcf586e3ac77424c8bcf17cfc040029c725877
Shell
leekaka/Deeplab_v2_test
/deeplab_start.sh
UTF-8
607
2.6875
3
[]
no_license
#!/bin/bash ### # author :leekaka ### echo "拉取源码中..." git clone https://bitbucket.org/aquariusjay/deeplab-public-ver2.git echo "源码拉取成功,开始创建文件夹" mkdir -p ./exper/voc12/config/deeplab_largeFOV mkdir -p ./exper/voc12/features/labels mkdir -p ./exper/voc12/features2/labels mkdir -p ./exper/voc12/list mkdir -p ./exper/voc12/model/deeplab_largeFOV mkdir -p ./exper/voc12/log mkdir -p ./exper/voc12/res echo "创建文件夹成功,用来存放txt,log,model等内容" echo "开始下载model和prototxt,放进新建文件夹" sh ./dowload_model.sh echo "完成.."
true
862b543bde03d635c68d1eb0752abd2c203adbb3
Shell
hmasmoudi/SyphaxOS
/Default/0003-SyphaxOSGnome3/001_BuildPackagesScripts/0173-notification-daemon/PKGBUILD
UTF-8
673
2.53125
3
[]
no_license
# Maintainer: Hatem Masmoudi <hatem.masmoudi@gmail.com> pkgname=notification-daemon pkgver=3.20.0 pkgrel=6 pkgdesc="The Notification Daemon package contains a daemon that displays passive pop-up notifications." arch=('x86_64') url="http://ftp.gnome.org/pub/gnome/sources/notification-daemon/3.20" license=('GPL') groups=('desktop') source=("$url/${pkgname}-${pkgver}.tar.xz") md5sums=('2de7f4075352831f1d98d8851b642124') depends=('rootfs') build() { cd "$srcdir/${pkgname}-${pkgver}" ./configure --prefix=/usr \ --sysconfdir=/etc \ --disable-static make } package() { cd "$srcdir/${pkgname}-${pkgver}" make DESTDIR="${pkgdir}" install }
true
8c34ed91817b3685d2f6d6490866a402cca4cb29
Shell
pandorafms/pandorafms
/extras/docker/centos8/build_image_el8.sh
UTF-8
3,122
3.890625
4
[]
no_license
#!/bin/bash VERSION=$1 ENT="$HOME/code/pandora_enterprise" OPEN="$HOME/code/pandorafms" OS="Centos" ARCH="x86_64" EL="el7" EXT="demo" TARGET_URL="http://atlantis.artica.es" DOCKER_PATH="$OPEN/extras/docker/centos8/" OSTACK_IMAGE="pandorafms/pandorafms-open-stack-el8" OBASE_IMAGE="pandorafms/pandorafms-open-base-el8" PERCONA_IMAGE="pandorafms/pandorafms-percona-base" function help { echo "To excute the builder you must declare 4 parameters: the version image, upload (push) tokens, build base (rebuild centos base image), build percona (rebuild percona base image)" echo "" echo "$0 <version> [ <push 0|1> ] [<build base 0|1>] [<build percona 0|1>]" echo "Ex creates a local image from 749 packages : $0 749 0 1 1" } if [ "$1" == "" ] || [ "$1" == "-h" ] ; then help exit fi if [ "$2" == "1" ]; then UPDATE="1" fi if [ "$3" == "1" ]; then BASEBUILD="1" fi if [ "$4" == "1" ]; then DBBUILD="1" fi #Defining packages urls oconsoleurl=$TARGET_URL/Releases/7.0NG.$VERSION/$OS/noarch/pandorafms_console-7.0NG.$VERSION.noarch.rpm oserverurl=$TARGET_URL/Releases/7.0NG.$VERSION/$OS/noarch/pandorafms_server-7.0NG.$VERSION.noarch.rpm url=$(curl -I -s $TARGET_URL/Releases/7.0NG.$VERSION/ 2> /dev/null | grep "200 OK" | wc -l) # log in into docker acount to acces private repo. # docker login -u $DOCKERUSER -p$DOCKERPASS Check athlantis is reachable if [ "$url" -lt 1 ] ; then echo "$url Athlantis unreachable ..." exit fi echo "Start" # Removing old packages cd $DOCKER_PATH/pandora-stack/sources rm -rf ./pandorafms_* # Downloading new packages wget $oconsoleurl wget $oserverurl if [ "$BASEBUILD" == 1 ] ; then docker pull centos:8 # Open Base image echo "building Base el8 image" cd $DOCKER_PATH/base docker build -t $OBASE_IMAGE:$VERSION -f $DOCKER_PATH/base/Dockerfile $DOCKER_PATH/base echo "Taging Open stack el8 latest image before upload" docker tag $OBASE_IMAGE:$VERSION $OBASE_IMAGE:latest echo -e ">>>> \n" else docker pull pandorafms/pandorafms-open-base-el8 fi if [ "$DBBUILD" == 1 ] ; then docker pull percona:5.7 # Percona image echo "building Percona image" cd $OPEN/extras/docker/percona docker build -t $PERCONA_IMAGE:latest -f $OPEN/extras/docker/percona/Dockerfile $OPEN/extras/docker/percona echo -e ">>>> \n" fi #Open Stack image echo "building Open el8 image" cd $DOCKER_PATH/pandora-stack docker build -t $OSTACK_IMAGE:$VERSION -f $DOCKER_PATH/pandora-stack/Dockerfile $DOCKER_PATH/pandora-stack echo "Taging Open base latest image before upload" docker tag $OSTACK_IMAGE:$VERSION $OSTACK_IMAGE:latest echo -e ">>>> \n" # Upload images if [ "$UPDATE" == 1 ] ; then if [ "$BASEBUILD" == 1 ] ; then #Open base Images echo "Uploading Open $OBASE_IMAGE:$VERSION . . ." docker push $OBASE_IMAGE:$VERSION docker push $OBASE_IMAGE:latest fi if [ "$DBBUILD" == 1 ] ; then #Open base Images echo "Uploading percona $PERCONA_IMAGE:latest . . ." docker push $PERCONA_IMAGE:latest fi #Open Stack Images echo "Uploading Open $OSTACK_IMAGE:$VERSION . . ." docker push $OSTACK_IMAGE:$VERSION docker push $OSTACK_IMAGE:latest fi
true
e7281e306f109ed94becd2bc9dec5161469189ce
Shell
idleuncle/bdpaas
/easy_deploy/easy_deploy
UTF-8
1,730
3.953125
4
[ "MIT" ]
permissive
#!/bin/bash # 本脚本是部署工作的启动脚本。 # 工作起点是已登录控制机,安装包已拷贝至控制机。已取得集群所有节点的主机IP及root密码。 # 控制机工作目录,缺省是本启动脚本所有目录。 readonly MASTER_ROOTPATH=$(cd `dirname $0`; pwd) echo "Master Root Path: " $MASTER_ROOTPATH SCRIPT_NAME=`basename $0` cd $MASTER_ROOTPATH . ./scripts/utils.sh . ./config.sh . ./scripts/cmd_base.sh . ./scripts/cmd_ssh.sh . ./scripts/cmd_docker.sh . ./scripts/cmd_registry.sh # -------- cmd_usage() -------- function cmd_usage() { echo echo "Usage: $SCRIPT_NAME <command>" echo echo " commands: list, base, ssh, docker" echo echo "Use $SCRIPT_NAME <command> --help to find how to use it." echo } # -------- cmd_list() -------- function cmd_list() { for node in $CLUSTER_NODES; do echo $node done #notify $@ } while true; do CMD=$1 case $CMD in list) FUNCTION=cmd_list shift; break;; base) FUNCTION=cmd_base shift; break;; ssh) FUNCTION=cmd_ssh shift; break;; docker) FUNCTION=cmd_docker shift; break;; registry) FUNCTION=cmd_registry shift; break;; *) NODES=$1 if [ "$NODES" == "*" ]; then NODES=$CLUSTER_NODES else NODES=`echo $NODES | tr -d ","` fi echo "NODES:" $NODES shift; continue; FUNCTION=cmd_usage break;; esac done ${FUNCTION} $@
true
566505a88ea9e56e12ea137df544076586268b09
Shell
whchi/env
/image.sh
UTF-8
281
3.53125
4
[]
no_license
#!/bin/sh SUFFIX=$2 TAG="latest" IMAGE_NAME="app_${SUFFIX}:${TAG}" case "$1" in build) docker build -f Dockerfile-${2} -t $IMAGE_NAME . ;; push) docker push $IMAGE_NAME ;; *) echo $"Usage: $0 {build|push}" exit 1 esac
true
8591418468dfc14cf6c408cab34be4f324a29dfa
Shell
luyGithub/hello-world-1
/icepack_luy/cesm_meltpond_pe/run_real/test_if.sh
UTF-8
445
2.84375
3
[]
no_license
#!/bin/sh -f iflag=`grep "IFLAG" icepack.runlog.191215-114747 | cut -f 2 -d"=" | sed 's/ //g'` echo "iflag= $iflag" if [[ $iflag -eq 0 ]] then echo 'iflag=0' echo 1 >> iflag.txt sed -n "` grep -n "FINAL POINT X=" icepack.runlog.191215-114747 | awk -F ":" '{print $1}' `,+1p" icepack.runlog.191215-114747 | tail -n 1 >> X.txt else echo 'iflag=-1' echo -1 >> iflag.txt echo NAN >> X.txt fi
true
bd77c6e1e727e94b0d91fd56134355a113b55cce
Shell
ecmwf/eccodes
/tests/grib_ecc-1271.sh
UTF-8
817
2.59375
3
[ "Apache-2.0" ]
permissive
#!/bin/sh # (C) Copyright 2005- ECMWF. # # This software is licensed under the terms of the Apache Licence Version 2.0 # which can be obtained at http://www.apache.org/licenses/LICENSE-2.0. # # In applying this licence, ECMWF does not waive the privileges and immunities granted to it by # virtue of its status as an intergovernmental organisation nor does it submit to any jurisdiction. # . ./include.ctest.sh label="grib_ecc-1271_test" temp=temp.$label sample_grib2=$ECCODES_SAMPLES_PATH/GRIB2.tmpl ${tools_dir}/grib_set -s \ centre=kwbc,gridDefinitionTemplateNumber=32769,localTablesVersion=1 \ $sample_grib2 $temp grib_check_key_equals $temp Ni,Nj "16 31" grib_check_key_equals $temp centreLatitudeInDegrees,centreLongitudeInDegrees "0 30" grib_check_key_equals $temp minimum,maximum "1 1" rm -f $temp
true
0c262b1ca285a5c09b19b550ade53295a98bf2fa
Shell
trcook/.dotfiles
/bash_profile
UTF-8
1,238
3.171875
3
[]
no_license
#! /bin/bash INVOCATION_ORDER+=('bash_profile') export INVOCATION_ORDER # This prints the directory in the tab window if [ $ITERM_SESSION_ID ]; then export PROMPT_COMMAND='echo -e "\033];${PWD##*/}\007"; ':"$PROMPT_COMMAND"; fi # This function checks through an array of vars. This lets us work out an ordering for startup files that fits our demands function contains(){ local n=$#;local value=${!n}; for ((i=1;i < $#;i++)) do if [ "${!i}" == "${value}" ]; then echo "y"; return 0;fi; done;echo "n"; return 1;} # Run the function over profile and bashrc if [ $(contains "${INVOCATION_ORDER[@]}" "proile") == "n" ]; then source ~/.profile; fi; if [ $(contains "${INVOCATION_ORDER[@]}" "bashrc") == "n" ]; then source ~/.bashrc; fi; # Things that must run after path gets set go here. Note that this does not mean it will be the last thing that is run in startup. That is determined by the byzentene start-ordering for BASH. Lines after this should be tolerant of running multiple times (or having path altered (though not modified)) export WORKON_HOME=$HOME/.virtualenvs export PROJECT_HOME=$HOME/Devel export VIRTUALENVWRAPPER_SCRIPT=/usr/local/bin/virtualenvwrapper.sh source /usr/local/bin/virtualenvwrapper_lazy.sh
true
df3f0fef51f57865833ba1dbd81b13e1d3c58c55
Shell
sangam-belose/learning-spring
/src/main/resources/postgress/start_postgres.sh
UTF-8
471
2.859375
3
[]
no_license
#!/usr/bin/env bash set -euo pipefail #which psql > /dev/null || (echoerr "Please ensure that postgres client is in your PATH" && exit 1) #mkdir -p $HOME/docker/volumes/postgres #rm -rf $HOME/docker/volumes/postgres/data docker run --name pg-docker -e POSTGRES_PASSWORD=postgres -e POSTGRES_DB=dev -d -p 5432:5432 postgres:latest sleep 3 export PGPASSWORD=postgres #psql -U postgres -d dev -h localhost -f schema.sql #psql -U postgres -d dev -h localhost -f data.sql
true
1c4cd5d9e7ec834fa463cc016a062be609c04dfa
Shell
d1namo/Arch
/.scripts/color-wal
UTF-8
604
3.203125
3
[]
no_license
#!/usr/bin/env bash # ~/.Scrips/wal -i ~/Imágenes/Wallpapers/ -o wal-set -a 95 source "${HOME}/.cache/wal/colors.sh" reload_dunst() { pkill dunst && \ dunst \ -lb "$color15" \ -nb "$color15" \ -cb "$color15" \ -lf "$color0" \ -bf "$color0" \ -cf "$color9" \ -fn "${DUNST_FONT:-Artwiz Lemon 7}" \ -geometry "${DUNST_SIZE:-300x30-40+40}" & } set_bar() { pkill bar bar "$color15" "$color0" 2>/dev/null & main() { reload_dunst & set_bar & set_scss & } main >/dev/null 2>&1
true
fa224fa88fde7a714d452830aa85031cf28f30d1
Shell
fgpolito/AlexaPi
/alexapi/gpio
UTF-8
1,148
3.640625
4
[ "FSFAP", "MIT" ]
permissive
#!/bin/sh BASE_PATH="/sys/class/gpio" EXPORT_FILE="export" UNEXPORT_FILE="unexport" BUTTON_1="18" LED_1="23" LED_2="24" LED_3="25" #unexport the required GPIOs (reset) echo ${BUTTON_1} > ${BASE_PATH}/${UNEXPORT_FILE} echo ${LED_1} > ${BASE_PATH}/${UNEXPORT_FILE} echo ${LED_2} > ${BASE_PATH}/${UNEXPORT_FILE} echo ${LED_3} > ${BASE_PATH}/${UNEXPORT_FILE} #export the required GPIOs echo "Enabling GPIOs..." echo ${BUTTON_1} > ${BASE_PATH}/${EXPORT_FILE} echo ${LED_1} > ${BASE_PATH}/${EXPORT_FILE} echo ${LED_2} > ${BASE_PATH}/${EXPORT_FILE} echo ${LED_3} > ${BASE_PATH}/${EXPORT_FILE} #set directions echo "Configuring GPIOs..." echo in > ${BASE_PATH}/gpio${BUTTON_1}/direction echo out > ${BASE_PATH}/gpio${LED_1}/direction echo out > ${BASE_PATH}/gpio${LED_2}/direction echo out > ${BASE_PATH}/gpio${LED_3}/direction echo "GPIO configuration completed." echo "Press the button on the breadboard..." while true; do BTN_IN=$(cat ${BASE_PATH}/gpio${BUTTON_1}/value) echo ${BTN_IN} > ${BASE_PATH}/gpio${LED_1}/value echo ${BTN_IN} > ${BASE_PATH}/gpio${LED_2}/value echo ${BTN_IN} > ${BASE_PATH}/gpio${LED_3}/value sleep 0.1 done
true
a35d2ee8d267b3bdcf7ad6a271a6668d9744f072
Shell
Ritvik19/Shell-Scripts
/n-most-common.sh
UTF-8
280
3.234375
3
[]
no_license
while getopts 'c:f:n:' opt do case $opt in c) COL=$OPTARG ;; f) FILE_PATH=$OPTARG ;; n) N_ROWS=$OPTARG ;; esac done cut -d ',' -f $COL $FILE_PATH | head -n 1 cut -d ',' -f $COL $FILE_PATH | tail -n +2 | sort | uniq -c | sort -nr | head -n $N_ROWS
true
2f681aa2af32816cc412253d7fce06a8d11d6fa1
Shell
clairemcwhite/genetic_code
/get_count.sh
UTF-8
209
2.984375
3
[ "LicenseRef-scancode-mit-old-style", "LicenseRef-scancode-public-domain" ]
permissive
BLA=$1 #grep -Ff six_or_more.txt ../proteome/human_proteome.fasta while read x do count=`grep -o $x proteome/human_proteinstring.txt | wc -l` if [ ! $count -eq 0 ] then echo $x $count fi done < $BLA
true
0677d2be1fbbcbf1aca652ecca546b73954c566e
Shell
garsir01/ds
/hashing/make-random-uint64.sh
UTF-8
1,354
3.84375
4
[]
no_license
#!/bin/bash # # Get a bunch of random numbers from RANDOM.ORG. These are then parsed and put # into a particular header file. # Returns a list of 66 lines, each of which has four (random) numbers URL="http://www.random.org/integers/?num=264&min=0&max=65536&col=4&base=10&format=plain&rnd=new" NUMBERS="$(curl $URL)" #NUMBERS=$(cat random.org) N=$(echo "$NUMBERS" | wc -l) #echo "$NUMBERS" RAND=random-numbers-uint64.hpp TAB="$(printf '\t')" # Generate random number header file: cat <<EOF > $RAND /** * This file is auto-generated by make-random.sh and should not be edited by * hand. It contains a list of 'struct number', 66 all in all, generated by * contacting random.org using the following URL: * * $TAB$URL */ #ifndef __RANDOM_NUMBERS #define __RANDOM_NUMBERS #define N_RANDOM_NUMBERS $N namespace dshash_wrapped { uint64_t random_numbers[N_RANDOM_NUMBERS] = { EOF #static struct number a = { 0, ((uint32_t) 248421 << 16) | ((uint32_t) 17703), ((uint32_t) 367827 << 16) | ((uint32_t) 516523) }; while read -r line; do arr=(${line//\t/ }) #echo "1: echo ${arr[0]}, 2: ${arr[1]}, 3: ${arr[2]}, 4: ${arr[3]}" cat <<EOF >> $RAND $TAB$TAB(uint64_t)${arr[0]} << 48$TAB| (uint64_t)${arr[1]} << 32$TAB| (uint64_t)${arr[2]} << 16$TAB| (uint64_t)${arr[3]}, EOF done <<< "$NUMBERS" cat <<EOF >> $RAND }; } // End of namespace #endif EOF
true
d79a0cd091a3cce7e5698e45033353c3f62db62d
Shell
kaffeed/dots
/bin/select_wm.sh
UTF-8
592
3.03125
3
[]
no_license
#!/bin/sh #=============================================================================== # # FILE: select_wm.sh # # USAGE: ./select_wm.sh # # DESCRIPTION: # # OPTIONS: --- # REQUIREMENTS: --- # BUGS: --- # NOTES: --- # AUTHOR: YOUR NAME (), # ORGANIZATION: # CREATED: 06/20/2016 17:03 # REVISION: --- #=============================================================================== if [ $# -eq 1 ] ; then echo "Loading $1" && sh "$HOME/bin/init_$1.sh" else echo "Loading default i3" && sh "$HOME/bin/init_i3.sh" fi
true
a496ccc019719d6cd320f7f7aaba046baea32e05
Shell
canpok1/sample-pi
/buzzer.sh
UTF-8
490
3.125
3
[]
no_license
#!/bin/bash # ラズパイ3でブザーを鳴らすサンプル # 事前準備:GPIO12にブザーを接続すること BUZZER_GPIO=12 # PWM周波数 = 19.2MHz / CLOCK値 / RANGE値 # ドレミファソラシドっぽい音 PWM_CLOCK=100 PWM_RANGES=(734 654 583 550 490 437 389 367) gpio -g mode ${BUZZER_GPIO} pwm gpio pwm-ms gpio pwmc ${PWM_CLOCK} gpio -g pwm ${BUZZER_GPIO} 50 for range in "${PWM_RANGES[@]}"; do gpio pwmr ${range} sleep 1 done gpio -g pwm ${BUZZER_GPIO} 0
true
8f0f5188851414fcf5cd0a314a6452475f836da4
Shell
jordandrako/dev-env
/dotfiles/.functions.zsh
UTF-8
790
4.25
4
[]
no_license
#!/usr/bin/zsh # Check if command is installed isInstalled() { [[ -x `command -v $1` ]] } # Green background echo green() { echo -e "\n\x1B[1;32m$1\x1B[0m\n" } # Yellow background echo info() { echo -e "\n\x1B[1;33m$1\x1B[0m\n" } # Error echo error() { echo -e "\n\x1B[1;5;31m$1\x1B[0m\n" 1>&2 [[ $2 -gt 0 ]] && exit $2 || echo -ne '\007' } # Perform task successfully or print failed try() { ( $* && success=true ) || ( success=false && error "FAILED: $*" ) } # Ask with blue background. # Pass question and options: `ask "question" "yes/no" readVarName` ask() { if [[ ! $1 ]]; then error "You must pass a question to ask!" else question=$1 input=${2:="y/n"} echo -e "\n\x1B[1;34m$question\x1B[0m" && echo "[$input] > " [[ $3 ]] && read $3 fi }
true
42aea3d61519685917e7e60bfd1d024b5506236f
Shell
ushahidi/Ushahidi_Deploy
/debian6.sh
UTF-8
1,661
2.890625
3
[]
no_license
# This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. #!/bin/bash # Stop whiptail from prompting for user input during unattended deployment. export DEBIAN_FRONTEND=noninteractive # Perform Debian updates. apt-get update apt-get upgrade -q -y # Install necessary Debian packages. apt-get install -q -y apache2 mysql-server php5 php5-curl php5-mysql php5-mcrypt php5-imap php-pear git-core postfix # Enable Apache mod_rewrite. a2enmod rewrite # Enable Apache .htaccess files. sed -i 's/AllowOverride None/AllowOverride All/g' /etc/apache2/sites-enabled/000-default # Create MySQL user account for Ushahidi. echo 'create database ushahidi default charset utf8; grant all on ushahidi.* to ushahidi@localhost identified by "ushahidi";' | mysql -u root # Restart Apache to ensure new configuration is loaded. /etc/init.d/apache2 restart # Clone the Ushahidi repository into /var/www. cd /var/www rm index.html git clone https://github.com/ushahidi/Ushahidi_Web.git . # Transfer ownership of the application to the same user as the Apache process. chown -R www-data:www-data .
true