blob_id
stringlengths 40
40
| language
stringclasses 1
value | repo_name
stringlengths 4
115
| path
stringlengths 2
970
| src_encoding
stringclasses 28
values | length_bytes
int64 31
5.38M
| score
float64 2.52
5.28
| int_score
int64 3
5
| detected_licenses
listlengths 0
161
| license_type
stringclasses 2
values | text
stringlengths 31
5.39M
| download_success
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|
fb120e6ce7560dd2cf5f60fb8d772b8654aa81f6
|
Shell
|
srikanthkris/lockdown-learning
|
/week3/test_loops.sh
|
UTF-8
| 112 | 2.671875 | 3 |
[] |
no_license
|
#!/bin/bash
# This is an example loop
for i in a b c d
do
echo ${i}
done
echo 'hello world'
| true |
8bf780f3a943c0da2495f8950ed2159f4508ca87
|
Shell
|
KerTakanov/ProjetInfo424
|
/demos/demo_filtres_hsv.sh
|
ISO-8859-1
| 801 | 2.640625 | 3 |
[] |
no_license
|
function wait {
read -p "Appuyez sur n'importe quelle touche pour continuer... " -n1 -s
clear
}
echo "==================================================="
echo "============ Démonstration Filtres HSV ============"
echo "==================================================="
echo "Application du filtre Teinte"
wait
./bl_demo.sh ../assets/input_color.ppm -o ../assets/ouput.ppm -hue 120
echo "Application du filtre Saturation"
wait
./bl_demo.sh ../assets/input_color.ppm -o ../assets/ouput.ppm -sat 1.5
echo "Application du filtre Luminosite"
wait
./bl_demo.sh ../assets/input_color.ppm -o ../assets/ouput.ppm -lum 85
echo "Application du filtre Mlange image par teinte"
wait
./bl_demo.sh ../assets/input_color.ppm -o ../assets/ouput.ppm -melhue ../assets/pbmlib.ppm
| true |
c112cdcfbddb29e09c2f28f93fc1d5a0d6d19a43
|
Shell
|
JaredBoone/dotfiles.old
|
/macos/set-defaults.sh
|
UTF-8
| 4,735 | 3.03125 | 3 |
[] |
no_license
|
#!/bin/bash
# Ask for the administrator password upfront
sudo -v
# Keep-alive: update existing `sudo` timestamp until finished
while true; do sudo -n true; sleep 60; kill -0 "$$" || exit; done 2>/dev/null &
# Only use UTF-8 in Terminal.app
defaults write com.apple.terminal StringEncodings -array 4
# Save screenshots to ~/Documents/Screenshots
defaults write com.apple.screencapture location -string "${HOME}/Documents/Screenshots"
# Enable sub-pixel rendering on non-Apple LCDs.
#defaults write NSGlobalDomain AppleFontSmoothing -int 2
# Disable and kill Dashboard
# Can be reverted with:
# defaults write com.apple.dashboard mcx-disabled -boolean NO; killall Dock
defaults write com.apple.dashboard mcx-disabled -boolean YES; killall Dock
# Open text files with sublimetext3 by default
defaults write com.apple.LaunchServices LSHandlers -array-add '{LSHandlerContentType=public.plain-text;LSHandlerRoleAll=com.sublimetext.3;}'
# Set notification banner display time
defaults write com.apple.notificationcenterui bannerTime 3
###############################################################################
# Finder
###############################################################################
# Show the ~/Library folder.
chflags nohidden ~/Library
# Always open everything in Finder's column view.
#defaults write com.apple.Finder FXPreferredViewStyle Nlsv
# Show hidden files and file extensions by default
#defaults write com.apple.finder AppleShowAllFiles -bool true
#defaults write NSGlobalDomain AppleShowAllExtensions -bool true
# Disable the warning when changing file extensions
#defaults write com.apple.finder FXEnableExtensionChangeWarning -bool false
# Allow text-selection in Quick Look
#defaults write com.apple.finder QLEnableTextSelection -bool true
# Disable the warning before emptying the Trash
#defaults write com.apple.finder WarnOnEmptyTrash -bool false
# Enable auto-correct
#defaults write NSGlobalDomain NSAutomaticSpellingCorrectionEnabled -bool true
# Disable the “Are you sure you want to open this application?” dialog
defaults write com.apple.LaunchServices LSQuarantine -bool false
# Expand print panel by default
defaults write NSGlobalDomain PMPrintingExpandedStateForPrint -bool true
# Expand save panel by default
defaults write NSGlobalDomain NSNavPanelExpandedStateForSaveMode -bool true
# Disable Resume system-wide
#defaults write com.apple.systempreferences NSQuitAlwaysKeepsWindows -bool false
# Disable the crash reporter
defaults write com.apple.CrashReporter DialogType -string "none"
# Show New Windows show folder
defaults write com.apple.finder NewWindowTargetPath "file://$HOME/"
###############################################################################
# Dock
###############################################################################
# Show indicator lights for open applications in the Dock
#defaults write com.apple.dock show-process-indicators -bool true
# Add several spacers
#defaults write com.apple.dock persistent-apps -array-add '{tile-data={}; tile-type="spacer-tile";}'
#defaults write com.apple.dock persistent-apps -array-add '{tile-data={}; tile-type="spacer-tile";}'
#defaults write com.apple.dock persistent-apps -array-add '{tile-data={}; tile-type="spacer-tile";}'
#defaults write com.apple.dock persistent-apps -array-add '{tile-data={}; tile-type="spacer-tile";}'
# Automatically hide and show the Dock
# defaults write com.apple.dock autohide -bool true
# Do not show recents
defaults write com.apple.dock show-recents -bool false
################
# Calendar
################
defaults write com.apple.iCal "Default duration in minutes for new event" -int 15
###############################################################################
# Do some clean up work.
###############################################################################
for app in "Activity Monitor" "Address Book" "Calendar" "Contacts" "cfprefsd" \
"Dock" "Finder" "Mail" "Messages" "Safari" "SystemUIServer" \
"Terminal" "Twitter" "iCal"; do
kill all "${app}" > /dev/null 2>&1
done
# Wait a bit before moving on...
sleep 1
# ...and then.
echo "Success! Defaults are set."
echo "Some changes will not take effect until you reboot your machine."
# See if the user wants to reboot.
function reboot() {
read -p "Do you want to reboot your computer now? (y/N)" choice
case "$choice" in
y | Yes | yes ) echo "Yes"; exit;; # If y | yes, reboot
n | N | No | no) echo "No"; exit;; # If n | no, exit
* ) echo "Invalid answer. Enter \"y/yes\" or \"N/no\"" && return;;
esac
}
# Call on the function
if [[ "Yes" == $(reboot) ]]
then
echo "Rebooting."
sudo reboot
exit 0
else
exit 1
fi
| true |
03cbe91d367b6f71db2fd051ae63939bbaee9940
|
Shell
|
martinboller/update-leap
|
/update-leap.sh
|
UTF-8
| 5,078 | 3.78125 | 4 |
[] |
no_license
|
#!/bin/bash
#####################################################################
# #
# Author: Martin Boller #
# #
# Email: martin@bollers.dk #
# Last Update: 2019-06-17 #
# Version: 1.10 #
# #
# Changes: intitial update-leap service creator (1.00) #
# Added ntp/timesyncd/dhcp functions (1.10) #
# #
# Usage: Installs systemd timer and service #
# To update leap-seconds file at regular #
# intervals #
# #
#####################################################################
install_ntp() {
echo -e "\e[32minstall_ntp()\e[0m";
export DEBIAN_FRONTEND=noninteractive;
sudo apt-get update;
sudo apt-get -y install ntp;
sudo systemctl daemon-reload;
sudo systemctl enable ntp.service;
sudo systemctl start ntp.service;
/usr/bin/logger 'install_ntp()' -t 'NTP Server';
}
configure_update_leap() {
echo -e "\e[32mconfigure_update-leap()\e[0m";
echo -e "\e[36m-Creating service unit file\e[0m";
sudo sh -c "cat << EOF > /lib/systemd/system/update-leap.service
# service file running update-leap
# triggered by update-leap.timer
[Unit]
Description=service file running update-leap
Documentation=man:update-leap
[Service]
User=ntp
Group=ntp
ExecStart=-/usr/bin/update-leap -F -f /etc/ntp.conf -s http://www.ietf.org/timezones/data/leap-seconds.list /var/lib/ntp/leap-seconds.list
WorkingDirectory=/var/lib/ntp/
[Install]
WantedBy=multi-user.target
EOF";
echo -e "\e[36m-creating timer unit file\e[0m";
sudo sh -c "cat << EOF > /lib/systemd/system/update-leap.timer
# runs update-leap Weekly.
[Unit]
Description=Weekly job to check for updated leap-seconds.list file
Documentation=man:update-leap
[Timer]
# Don't run for the first 15 minutes after boot
OnBootSec=15min
# Run Weekly
OnCalendar=Weekly
# Specify service
Unit=update-leap.service
[Install]
WantedBy=multi-user.target
EOF";
sync;
echo -e "\e[36m-Get initial leap file making sure timer and service can run\e[0m";
wget -O /var/lib/ntp/leap-seconds.list http://www.ietf.org/timezones/data/leap-seconds.list;
# Telling NTP where the leapseconds file is
echo "leapfile /var/lib/ntp/leap-seconds.list" | tee -a /etc/ntp.conf;
sudo systemctl daemon-reload;
sudo systemctl enable update-leap.timer;
sudo systemctl enable update-leap.service;
sudo systemctl daemon-reload;
sudo systemctl start update-leap.timer;
sudo systemctl start update-leap.service;
/usr/bin/logger 'configure_update-leap()' -t 'update-leap';
}
disable_timesyncd() {
echo -e "\e[32mDisable_timesyncd()\e[0m";
sudo systemctl stop systemd-timesyncd
sudo systemctl daemon-reload
sudo systemctl disable systemd-timesyncd
/usr/bin/logger 'disable_timesyncd()' -t 'NTP Server';
}
configure_dhcp_ntp() {
echo -e "\e[32mconfigure_dhcp()\e[0m";
## Remove ntp and timesyncd exit hooks to cater for server using DHCP
echo -e "\e[36m-Remove scripts utilizing DHCP\e[0m";
sudo rm /etc/dhcp/dhclient-exit-hooks.d/ntp
sudo rm /etc/dhcp/dhclient-exit-hooks.d/timesyncd
## Remove ntp.conf.dhcp if it exist
echo -e "\e[36m-Removing ntp.conf.dhcp\e[0m";
sudo rm /run/ntp.conf.dhcp
## Disable NTP option for dhcp
echo -e "\e[36m-Disable ntp_servers option from dhclient\e[0m";
sudo sed -i -e "s/option ntp_servers/#option ntp_servers/" /etc/dhcpcd.conf;
## restart NTPD yet again after cleaning up DHCP
sudo systemctl restart ntp
/usr/bin/logger 'configure_dhcp()' -t 'NTP Server';
}
#################################################################################################################
## Main Routine #
#################################################################################################################
main() {
# Install NTP
install_ntp;
# Install NTP tools
install_ntp_tools;
# Disable timesyncd to let ntp take care of time
disable_timesyncd;
# Ensure that DHCP does not affect ntp - do make sure that valid ntp servers are configured in ntp.conf
configure_dhcp_ntp:
# Create and configure systemd unit files to update leapseconds file
configure_update_leap;
# Add other stuff to install here as required
## Finish with encouraging message
echo -e "\e[32mInstallation and configuration of NTP and update-leap complete.\e[0m";
echo -e;
}
main;
exit 0
| true |
34d55cf21c38681abf687592536f1eb80dd8b9fb
|
Shell
|
akjadhav/susi_linux
|
/system-integration/scripts/susi-linux
|
UTF-8
| 1,019 | 3.90625 | 4 |
[
"Apache-2.0"
] |
permissive
|
#!/bin/sh
# Start/Stop the SUSI.AI main program
# this wrapper is installed as follows:
# user mode
# .../SUSI.AI/bin/
# .../SUSI.AI/pythonmods/
# system mode
# prefix/bin/
# prefix/lib/SUSI.AI/pythonmods
do_start() {
DIR="$(dirname "$(readlink -f "$0")")"
PMA="$(readlink -m "$DIR/../pythonmods")"
PMB="$(readlink -m "$DIR/../lib/SUSI.AI/pythonmods")"
if [ -d "$PMA" ] && [ -r "$PMA/susi_linux" ] ; then
PYTHONPATH="$PMA":$PYTHONPATH
export PYTHONPATH
elif [ -d "$PMB" ] && [ -r "$PMB/susi_linux" ] ; then
PYTHONPATH="$PMB":$PYTHONPATH
export PYTHONPATH
else
echo "Cannot find SUSI.AI pythonmods, trying without it" >&2
fi
exec python3 -m susi_linux -v --short-log
}
do_stop() {
pkill -f susi_linux
}
case "$1" in
start)
do_start ;;
stop)
do_stop ;;
restart)
do_stop ; sleep 1 ; do_start ;;
*)
echo "Usage: susi-linux {start|stop|restart}" >&2
exit 1
;;
esac
| true |
bb6d838530e878ccccfe2e5efbc98ac0ecf07097
|
Shell
|
AbdelOuery/dotfiles
|
/install.sh
|
UTF-8
| 705 | 3.578125 | 4 |
[] |
no_license
|
#!/bin/bash
SCRIPT_PATH="$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )"
# Populating git user inside git_config
echo "Setting up git user file!"
read -p "Enter your username [ (b) bypass this step ]: " git_user
# Scripts and rcs
ln -s $SCRIPT_PATH/git_config /home/$USER/.gitconfig
ln -s $SCRIPT_PATH/vim_config /home/$USER/.vimrc
ln -s $SCRIPT_PATH/init_script.zsh /home/$USER/.oh-my-zsh/custom/init_script.zsh
if [ $git_user != "b" ]; then
read -p "Enter your git email: " git_email
if [ ! -z "$git_email" ]; then
# Append new settings to git_config file
echo "[user]
name = $git_user
email = $git_email" >> /home/$USER/.gitconfig
fi
fi
| true |
e059f5da22d24ce6bc46e19e1fe1bc87d2bc4474
|
Shell
|
tbielawa/Class-Code
|
/CS450/tdf-os/tags/module-r4/utils/version
|
UTF-8
| 285 | 2.671875 | 3 |
[] |
no_license
|
#!/bin/bash
# 6 modules = 1/6 minor numbers each release towards 1.0.0
# R1 = 0.16.66
# R2 = 0.33.33
# R3 = 0.50.00
# R4 = 0.66.66
# R5 = 0.83.33
# R6 = 1.00.00
NUM_VERSION="0.3.33"
VERSTR="tdf-os version: ${NUM_VERSION} $(date +%F) ($(svn info | grep 'Revision' -))"
echo $VERSTR
| true |
05e63f1a0bf707286276a6c6ac3b92f3e40b9a5a
|
Shell
|
ReanyAlex/unix_and_bash
|
/bash-scripting/iftest.sh
|
UTF-8
| 185 | 3.3125 | 3 |
[
"MIT"
] |
permissive
|
#!/bin/sh
let "a=$1*$2"
if [ $a -gt 674 ]
then
echo "The numbers multiplied to $a and are larger than 674"
else
echo "The numbers multiplied to $a and are not larger than 674"
fi
| true |
117bedf30ee71a5eafd9db1953f6a9030fee2f47
|
Shell
|
omskvelo/race-numbers
|
/#old/make-start-numbers-17/add_bg.sh
|
UTF-8
| 151 | 2.734375 | 3 |
[] |
no_license
|
#!/bin/bash -ex
cd "$(dirname "${BASH_SOURCE[0]}")"
mkdir -p out
cd out_num
for f in *.pdf; do
pdftk $f background ../bg.pdf output ../out/$f
done
| true |
386761bd55d54830d07384f297947623c10c3a2c
|
Shell
|
TiloGit/Workflow_Chef
|
/multinodes/apply_ifix/baw_multinodes_apply_ifix.sh
|
UTF-8
| 7,543 | 3.25 | 3 |
[] |
no_license
|
#!/bin/bash
# set -e
# Any subsequent(*) commands which fail will cause the shell script to exit immediately
#
# Operating Systems Supported
# Ubuntu 16.04 LTS; Ubuntu 18.04 LTS
#
# IBM Business Automation Workflow Cookbook Project, https://github.com/IBM-CAMHub-Open/cookbook_ibm_workflow_multios
#
# This script work with IBM Business Automation Workflow Cookbook project to apply interim fix packs to IBM Business Automation Workflow Enterprise on two hosts.
# Topology
# Host 1, Workflow01 or WF01: IBM Business Automation Workflow Deployment Manager (Dmgr), Custom Node, one cluster member
# Host 2, Workflow02 or WF02: IBM Business Automation Workflow Custom Node, one cluster member
######## Upload all roles to the chef server ########
Upload_Roles () {
knife role from file $BAW_CHEF_TEMP_DIR/$WF01_ROLE_APPLYIFIX_FILE || return 1
knife role from file $BAW_CHEF_TEMP_DIR/$WF01_ROLE_POSTDEV_FILE || return 1
knife role from file $BAW_CHEF_TEMP_DIR/$WF02_ROLE_APPLYIFIX_FILE || return 1
knife role from file $BAW_CHEF_TEMP_DIR/$WF02_ROLE_POSTDEV_FILE
}
######## Define BAW multiple node installation dependency logic units #######
######## NODE Workflow01, WF01, step 1, 2 ########
WF01_step1 () {
# sequential
knife node run_list set $WF01_ON_CHEF_SERVER "role[$WF01_ROLE_APPLYIFIX_NAME]" &&
knife vault update $BAW_CHEF_VAULT_NAME $BAW_CHEF_VAULT_ITEM -S "role:$WF01_ROLE_APPLYIFIX_NAME" -C "$WF01_ON_CHEF_SERVER" -M client || { echo "Error when updating chef vault"; return 1; }
knife ssh "name:$WF01_ON_CHEF_SERVER" -a ipaddress "sudo chef-client -l info -L $LOCAL_CHEF_CLIENT_LOG" -x $WF01_ROOT_USERNAME -P "$WF01_ROOT_PW" | Purification_Logs >> $WF01_LOG &
local TASK_WF01_APPLYIFIX=$!
readonly TASK_WF01_APPLYIFIX
Monitor 0 "$TASK_WF01_APPLYIFIX" "$LOG_WF01_NAME Applyifix(1 task left)" || return 1
knife node run_list add $WF01_ON_CHEF_SERVER "role[$WF01_ROLE_POSTDEV_NAME]" &&
knife ssh "name:$WF01_ON_CHEF_SERVER" -a ipaddress "sudo chef-client -l info -L $LOCAL_CHEF_CLIENT_LOG" -x $WF01_ROOT_USERNAME -P "$WF01_ROOT_PW" | Purification_Logs >> $WF01_LOG &
local TASK_WF01_POSTDEV=$!
readonly TASK_WF01_POSTDEV
Monitor 0 "$TASK_WF01_POSTDEV" "$LOG_WF01_NAME Post Action(0 tasks left)"
}
WF01_step2 () {
# sequential
# knife node run_list add $WF01_ON_CHEF_SERVER "role[$WF01_ROLE_WEBSERVER]" &&
# knife ssh "name:$WF01_ON_CHEF_SERVER" -a ipaddress "sudo chef-client -l info -L $LOCAL_CHEF_CLIENT_LOG" -x $WF01_ROOT_USERNAME -P "$WF01_ROOT_PW" | Purification_Logs >> $WF01_LOG &
# local TASK_WF01_WEBSERVER=$!
# readonly TASK_WF01_WEBSERVER
# Monitor 0 "$TASK_WF01_WEBSERVER" "$LOG_WF01_NAME Configure Web Server" || return 1
# knife node run_list add $WF01_ON_CHEF_SERVER "role[$WF01_ROLE_POSTDEV_NAME]" &&
# knife ssh "name:$WF01_ON_CHEF_SERVER" -a ipaddress "sudo chef-client -l info -L $LOCAL_CHEF_CLIENT_LOG" -P "$WF01_ROOT_PW" | Purification_Logs >> $WF01_LOG &
# local TASK_WF01_POSTDEV=$!
# readonly TASK_WF01_POSTDEV
# Monitor 0 "$TASK_WF01_POSTDEV" "$LOG_WF01_NAME Post Action(0 tasks left)"
:
}
######## NODE Workflow02 WF02, step 1, 2 ########
WF02_step1 () {
# sequential
knife node run_list set $WF02_ON_CHEF_SERVER "role[$WF02_ROLE_APPLYIFIX_NAME]" &&
knife vault update $BAW_CHEF_VAULT_NAME $BAW_CHEF_VAULT_ITEM -S "role:$WF02_ROLE_APPLYIFIX_NAME" -C "$WF02_ON_CHEF_SERVER" -M client || { echo "Error when updating chef vault"; return 1; }
knife ssh "name:$WF02_ON_CHEF_SERVER" -a ipaddress "sudo chef-client -l info -L $LOCAL_CHEF_CLIENT_LOG" -x $WF02_ROOT_USERNAME -P "$WF02_ROOT_PW" | Purification_Logs >> $WF02_LOG &
local TASK_WF02_APPLYIFIX=$!
readonly TASK_WF02_APPLYIFIX
Monitor 0 "$TASK_WF02_APPLYIFIX" "$LOG_WF02_NAME Applyifix(1 task left)"
}
WF02_step2 () {
# sequential
knife node run_list add $WF02_ON_CHEF_SERVER "role[$WF02_ROLE_POSTDEV_NAME]" &&
knife ssh "name:$WF02_ON_CHEF_SERVER" -a ipaddress "sudo chef-client -l info -L $LOCAL_CHEF_CLIENT_LOG" -x $WF02_ROOT_USERNAME -P "$WF02_ROOT_PW" | Purification_Logs >> $WF02_LOG &
local TASK_WF02_POSTDEV=$!
readonly TASK_WF02_POSTDEV
Monitor 0 "$TASK_WF02_POSTDEV" "$LOG_WF02_NAME Post Action(0 tasks left)"
}
######## BAW Installation, WF01, WF02 ########
BAW_Multiple_Nodes_Installation_Start () {
# parallel
local tasks_baw_multinodes_install=()
WF01_step1 &
tasks_baw_multinodes_install+=("$!")
echo
# echo "$(date -Iseconds), MTASK: $LOG_WF01_NAME Step 1 of 2 starts, TASKS List (Installation, Upgrade, Applyifix, Configuration)"
echo "$(date -Iseconds), MTASK: $LOG_WF01_NAME, there are 2 tasks to do: Applyifix, Post Action"
WF02_step1 &
tasks_baw_multinodes_install+=("$!")
# echo "$(date -Iseconds), MTASK: $LOG_WF02_NAME Step 1 of 2 starts, TASKS List (Installation, Upgrade, Applyifix)"
echo "$(date -Iseconds), MTASK: $LOG_WF02_NAME, there are 2 tasks to do: Applyifix, Post Action"
echo
# Monitor 1 "${tasks_baw_multinodes_install[*]}" "$LOG_WF01_NAME Step 1 of 2" "$LOG_WF02_NAME Step 1 of 2"
Monitor 1 "${tasks_baw_multinodes_install[*]}"
}
######## Start the program ########
BAW_Multiple_Nodes_Chef_Start () {
# sequential
Upload_Roles || return 1
BAW_Chef_Vaults "m" || return 1
BAW_Multiple_Nodes_Installation_Start
}
Main_Start () {
Print_Start_Flag
echo "Start to apply interim fix packs to IBM Business Automation Workflow Enterprise on two hosts."
echo
Generate_Roles "apply_ifix" || return 1
######## Prepare logs for nodes #######
# $WF01_IP_ADDR depend on . "$MY_DIR/../libs/dynamic_roles_singlenode_script"
# The name for WF01 in log printing
LOG_WF01_NAME="Host_${var_Workflow01_name}($WF01_IP_ADDR), Workflow01"
readonly LOG_WF01_NAME
# The name for WF02 in log printing
LOG_WF02_NAME="Host_${var_Workflow02_name}($WF02_IP_ADDR), Workflow02"
readonly LOG_WF02_NAME
WF01_LOG="${LOG_DIR}/wf01_${var_Workflow01_name}_${WF01_IP_ADDR}_chef.log"
readonly WF01_LOG
WF02_LOG="${LOG_DIR}/wf02_${var_Workflow02_name}_${WF02_IP_ADDR}_chef.log"
readonly WF02_LOG
Print_Start_Flag >> $WF01_LOG
Print_Start_Flag >> $WF02_LOG
Print_TopologyLogs_Multinodes
BAW_Multiple_Nodes_Chef_Start
Print_Main_Exist_Status "$?" || return 1
Print_End_Flag_Multinodes >> $WF01_LOG
Print_End_Flag_Multinodes >> $WF02_LOG
Print_TopologyLogs_Multinodes
Print_End_Flag_Multinodes
}
######## Programs below ########
######## Include libs ########
MY_DIR=${0%/*}
if [[ ! -d "$MY_DIR" ]]; then MY_DIR="$PWD"; readonly MY_DIR; fi
#echo current Dir is $MY_DIR
. "$MY_DIR/../../libs/utilities_script" &&
. "$MY_DIR/../../libs/dynamic_roles_script" &&
. "$MY_DIR/../../libs/dynamic_roles_multinodes_script" &&
# The properties file path
readonly BAW_CHEF_PROPERTIES_DIR="$MY_DIR"
# ./baw_singlenode.properties
readonly BAW_CHEF_PROPERTIES_FILE="$BAW_CHEF_PROPERTIES_DIR/baw_multinodes_apply_ifix.properties"
# Test if $BAW_CHEF_PROPERTIES_FILE exists
getValueFromPropFile $BAW_CHEF_PROPERTIES_FILE || exit 1
Load_Host_Name_Multinodes || exit 1
# Reference to templates dir
readonly BAW_CHEF_TMPL_DIR=$MY_DIR/../../templates
######## Prepare logs #######
# define where to log
readonly REQUESTED_LOG_DIR="/var/log/baw_chef_shell_log/multinodes_noihs/hosts_${var_Workflow01_name}_${var_Workflow02_name}/apply_ifix"
readonly LOG_DIR="$( Create_Dir $REQUESTED_LOG_DIR )"
# echo "BAW LOG Dir created $LOG_DIR"
readonly BAW_CHEF_LOG="${LOG_DIR}/monitor_${var_Workflow01_name}_${var_Workflow02_name}.log"
Main_Start 2>&1 | tee -a $BAW_CHEF_LOG
| true |
d7e20ce6cc4afa8933bc434e289561f8f714f64d
|
Shell
|
convirt/Ubuntu1204
|
/install/cms/common/functions
|
UTF-8
| 7,504 | 3.125 | 3 |
[] |
no_license
|
#!/bin/bash
#
# ConVirt - Copyright (c) 2008 Convirture Corp.
# ======
#
# ConVirt is a Virtualization management tool with a graphical user
# interface that allows for performing the standard set of VM operations
# (start, stop, pause, kill, shutdown, reboot, snapshot, etc...). It
# also attempts to simplify various aspects of VM lifecycle management.
#
#
# This software is subject to the GNU General Public License, Version 2 (GPLv2)
# and for details, please consult it at:
#
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt
#
#
# author : Jd <jd_jedi@users.sourceforge.net>
#
SUDO=sudo
install_cms_prereq_packages()
{
echo "Probably installation not supported for your platform."
echo "Visit Convirture at http://www.convirture.com for more details."
exit 1
}
install_ez_setup()
{
$SUDO wget http://peak.telecommunity.com/dist/ez_setup.py
$SUDO python ez_setup.py
}
install_virtualenv()
{
$SUDO easy_install virtualenv
}
install_perl_modules()
{
T_DIR=`mktemp -d`
tar -xzf $base/HTML-Tagset-3.20.tar.gz -C $T_DIR
(cd $T_DIR/HTML-Tagset-3.20; perl Makefile.PL ; make; $SUDO make install)
if [ "$?" != 0 ]; then
ERR_CODE=$?
echo "Error installing HTML-Tagset perl module."
rm -rf $T_DIR
return $ERR_CODE
fi
tar -xzf $base/HTML-Parser-3.64.tar.gz -C $T_DIR
(cd $T_DIR/HTML-Parser-3.64; perl Makefile.PL ; make; $SUDO make install)
if [ "$?" != 0 ]; then
ERR_CODE=$?
echo "Error installing HTML-Parser perl module."
rm -rf $T_DIR
return $ERR_CODE
fi
rm -rf $T_DIR
}
#additional setup
additional_setup_mysql()
{
echo "No additional setup required for mysql."
}
# Make the innodb default engine
set_default_to_innodb()
{
echo "Calling base set_defalt_to_innodb which is empty."
}
restart_mysql()
{
$SUDO /etc/init.d/mysql restart
}
start_mysql()
{
$SUDO /etc/init.d/mysql start
}
status_mysql()
{
$SUDO /etc/init.d/mysql status
}
# create TG2 environment
create_tg2env()
{
virtualenv $VIRTUAL_ENV_DIR
ret=$?
if [ $ret != 0 ]; then
echo "ERROR: creating virtualenv at $VIRTUAL_ENV_DIR"
return $ret
fi
source $VIRTUAL_ENV_DIR/bin/activate
easy_install extremes
ret=$?
if [ $ret != 0 ]; then
echo "ERROR: installing extremes."
deactivate
return $ret
fi
#easy_install pysqlite
easy_install mysql-python
ret=$?
if [ $ret != 0 ]; then
echo "ERROR: installing mysql-python."
deactivate
return $ret
fi
easy_install -i http://www.turbogears.org/2.0/downloads/2.0.3/index tg.devtools
ret=$?
if [ $ret != 0 ]; then
echo "ERROR: installing TG2 (2.0.3)."
deactivate
return $ret
fi
fix_xen_in_tg2env
ret=$?
if [ $ret != 0 ]; then
echo "ERROR: fixing xen in tg2env."
deactivate
return $ret
fi
# Kludge for Beaker future import problem
easy_install "Beaker >= 1.4"
ret=$?
if [ $ret != 0 ]; then
echo "ERROR: installing Beaker >= 1.4"
deactivate
return $ret
fi
f_names=`ls $VIRTUAL_ENV_DIR/lib/python2.4/site-packages/Beaker-*py2.4.egg/beaker/ext/google.py`
for f_name in $f_names
do
if [ -e $f_name ]; then
echo "Fixing $f_name for __future__ import problem."
sed -i.bak -e 's/^from __future__/#from __future__/g' $f_name
else
echo "No need to fix beaker google.py."
fi
done
deactivate
}
fix_xen_in_tg2env()
{
echo "No need to fix xen in tg2env."
}
fix_encoding_to_utf8()
{
if [ "$VIRTUAL_ENV_DIR" != "" ]; then
find $VIRTUAL_ENV_DIR -name "site.py" -exec sed -i.bak -e 's/encoding = "ascii"/encoding = "utf-8"/' {} \;
else
echo "VIRTUAL_ENV_DIR not set. Can not find site.py"
return 1
fi
return 0
}
generate_cms_id()
{
if [ "`id -u`" == "0" ]; then
mkdir -p /var/lib/convirt/identity
ID_FILE=/var/lib/convirt/identity/cms_id_rsa
else
ID_FILE=~/.ssh/cms_id_rsa
fi
if [ -e "${ID_FILE}" ]; then
echo "${ID_FILE} exists. "
else
if [ "`which expect 2>/dev/null`" == "" ]; then
# Expect is not installed, prompt the user.
ssh-keygen -t rsa -f ${ID_FILE}
else
# expect seems to be installed use it.
echo "Generting ssh identity for CMS. ${ID_FILE}"
expect -c '
set timeout -1
spawn ssh-keygen -q -t rsa -f '${ID_FILE}'
match_max 100000
expect {
"Enter passphrase (empty for no passphrase):" {
send -- "\r"
expect {
"Enter same passphrase again:" {
send -- "\r"
expect eof
}
}
}
"Overwrite" {
send -- "n\r"
expect eof
}
}
'
fi
chmod 0600 ${ID_FILE}*
fi
}
generate_cms_id_old()
{
if [ -e ~/.ssh/cms_id_rsa ]; then
echo " "
else
echo "Generting ssh identity for CMS."
ssh-keygen -t rsa -f ~/.ssh/cms_id_rsa
chmod 0600 ~/.ssh/cms_id_rsa*
fi
}
download_convirt()
{
cd $CONVIRT_BASE
wget --no-cache $CONVIRT_DOWNLOAD_URL -O $CONVIRT_TARBALL
ret=$?
if [ $ret != 0 ]; then
echo "ERROR: fetching convirt tarball from $CONVIRT_DOWNLOAD_URL"
return $ret
fi
}
unpack_convirt()
{
cd $CONVIRT_BASE
tar -xzf $CONVIRT_TARBALL
ret=$?
if [ $ret != 0 ]; then
echo "ERROR: Opening tar package $CONVIRT_TARBALL"
deactivate
return $ret
fi
}
success_msg()
{
# get the actual port from development.ini
PORT=8081
cat <<EOF
ConVirt Management Server (CMS) setup complete.
To start CMS, give the following commands.
cd $CONVIRT_DIR
./convirt-ctl start
By default the CMS listens on $PORT port. So make sure that your firewall
allows TCP $PORT port
Typically this can be achieved by using the following command as root user
iptables -I INPUT -p tcp --dport $PORT -j ACCEPT
Once done, you should be able to access ConVirt application using
http://$HOSTNAME:$PORT/
EOF
}
echo "install common functions sourced."
deactivate_env()
{
if [ "$VIRTUAL_ENV" != "" ]; then
deactivate
fi
}
# run as non-root
setup_convirt()
{
if [ -e $VIRTUAL_ENV_DIR/bin/activate ]; then
source $VIRTUAL_ENV_DIR/bin/activate
else
echo "Virtual environment not found at $VIRTUAL_ENV_DIR."
echo "Will try to use TG2 installed in the system."
fi
cd $CONVIRT_DIR/src/convirt/web/convirt
if [ $DIST == "Ubuntu" ] && [ $VER == "12.04" ]; then
echo "Updating DecoratorTools and turbojson for Ubuntu 12.04"
easy_install -U DecoratorTools
easy_install -U turbojson
fi
python setup.py develop
ret=$?
if [ $ret != 0 ]; then
echo "ERROR: Running python setup.py develop."
deactivate_env
return $ret
fi
cd $CONVIRT_DIR
./convirt-ctl setup
ret=$?
if [ $ret != 0 ]; then
echo "ERROR: running convirt-ctl setup."
deactivate_env
return $ret
fi
generate_cms_id
ret=$?
if [ $ret != 0 ]; then
echo "WARNING: Problem generating CMS id. Key based authentication may not work."
deactivate_env
return 0
fi
fix_encoding_to_utf8
ret=$?
if [ $ret != 0 ]; then
echo "WARNING: Problem changing default python charset to utf-8."
deactivate_env
return 0
fi
deactivate_env
success_msg
}
| true |
044d7c3cb6b405bb7c1f2615827ac6abdd3fd268
|
Shell
|
c2theg/srvBuilds
|
/docker/docker_volume.sh
|
UTF-8
| 435 | 2.984375 | 3 |
[
"MIT"
] |
permissive
|
#!/bin/sh
if [ ! -d "/media/data" ]; then
mkdir /media/data
fi
if [ ! -d "/media/data/containers" ]; then
mkdir /media/data/containers/
fi
#-----------------------------------------------------
echo "\r\n Create volume 'ContainerVolumes' \r\n "
docker volume create ContainerVolumes
echo "\r\n Inspect the volume... \r\n \r\n"
docker volume inspect ContainerVolumes
echo "\r\n All Volumes... \r\n \r\n"
docker volume ls
| true |
1fa0007dde130a53117af3729c18ed934bb8a71b
|
Shell
|
mcvsubbu/incubator-pinot
|
/contrib/pinot-druid-benchmark/run_benchmark.sh
|
UTF-8
| 7,124 | 2.859375 | 3 |
[
"Apache-2.0",
"BSD-3-Clause",
"LicenseRef-scancode-free-unknown",
"LGPL-2.0-or-later",
"LicenseRef-scancode-unicode",
"LicenseRef-scancode-other-copyleft",
"LicenseRef-scancode-unknown-license-reference",
"NAIST-2003",
"bzip2-1.0.6",
"OpenSSL",
"CC-BY-2.5",
"CC-BY-SA-3.0",
"CDDL-1.0",
"MIT",
"CPL-1.0",
"LicenseRef-scancode-public-domain",
"CDDL-1.1",
"EPL-1.0",
"CC-BY-4.0",
"WTFPL",
"EPL-2.0",
"ISC",
"BSD-2-Clause"
] |
permissive
|
#!/bin/bash
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
echo "Compiling the benchmark driver..."
mvn clean install > /dev/null
rm -rf temp results
mkdir temp results
echo "Untaring Pinot Segments Without Extra Index..."
tar -zxf pinot_non_startree.tar.gz -C temp
echo "Untaring Pinot Segments With Inverted Index..."
tar -zxf pinot_non_startree_inverted_index.tar.gz -C temp
#echo "Untaring Pinot Segments With Default Startree Index..."
#tar -zxf pinot_default_startree.tar.gz -C temp
#echo "Untaring Pinot Segments With Optimal Startree Index..."
#tar -zxf pinot_optimal_startree.tar.gz -C temp
echo "Untaring Druid Segments..."
tar -zxf druid_segment_cache.tar.gz -C temp
cd temp
echo "Downloading Druid..."
curl -O http://static.druid.io/artifacts/releases/druid-0.9.2-bin.tar.gz
tar -zxf druid-0.9.2-bin.tar.gz
rm druid-0.9.2-bin.tar.gz
echo "Downloading ZooKeeper..."
curl -O http://apache.claz.org/zookeeper/zookeeper-3.4.6/zookeeper-3.4.6.tar.gz
tar -zxf zookeeper-3.4.6.tar.gz
rm zookeeper-3.4.6.tar.gz
cd ..
echo "Benchmarking Pinot without Extra Index..."
java -jar pinot-tool-launcher-jar-with-dependencies.jar PerfBenchmarkRunner -mode startAll -dataDir temp/non_startree_small_yearly -tableNames tpch_lineitem_OFFLINE > /dev/null 2>&1 &
PINOT_PROCESS_ID=$!
echo ${PINOT_PROCESS_ID}
echo "Wait 30 seconds so that cluster is ready for processing queries..."
sleep 30
echo "Starting response time benchmark..."
./target/appassembler/bin/pinot-response-time.sh src/main/resources/pinot_queries http://localhost:8099/query 20 20 results/pinot_non_startree | tee results/pinot_non_startree_response_time.txt
echo "Starting throughput benchmark..."
./target/appassembler/bin/pinot-throughput.sh src/main/resources/pinot_queries http://localhost:8099/query 5 60 | tee results/pinot_non_startree_throughput.txt
kill -9 ${PINOT_PROCESS_ID}
echo "Benchmarking Pinot with Inverted Index..."
java -jar pinot-tool-launcher-jar-with-dependencies.jar PerfBenchmarkRunner -mode startAll -dataDir temp/non_startree_small_yearly_inverted_index -tableNames tpch_lineitem_OFFLINE -invertedIndexColumns l_receiptdate,l_shipmode > /dev/null 2>&1 &
PINOT_PROCESS_ID=$!
echo "Wait 30 seconds so that cluster is ready for processing queries..."
sleep 30
echo "Starting response time benchmark..."
./target/appassembler/bin/pinot-response-time.sh src/main/resources/pinot_queries http://localhost:8099/query 20 20 results/pinot_non_startree_inverted_index | tee results/pinot_non_startree_inverted_index_response_time.txt
echo "Starting throughput benchmark..."
./target/appassembler/bin/pinot-throughput.sh src/main/resources/pinot_queries http://localhost:8099/query 5 60 | tee results/pinot_non_startree_inverted_index_throughput.txt
kill -9 ${PINOT_PROCESS_ID}
#echo "Benchmarking Pinot with Default Startree Index..."
#java -jar pinot-tool-launcher-jar-with-dependencies.jar PerfBenchmarkRunner -mode startAll -dataDir temp/default_startree_small_yearly -tableNames tpch_lineitem_OFFLINE > /dev/null 2>&1 &
#PINOT_PROCESS_ID=$!
#echo "Wait 30 seconds so that cluster is ready for processing queries..."
#sleep 30
#echo "Starting response time benchmark..."
#./target/appassembler/bin/pinot-response-time.sh src/main/resources/pinot_queries http://localhost:8099/query 20 20 results/pinot_default_startree | tee results/pinot_default_startree_response_time.txt
#echo "Starting throughput benchmark..."
#./target/appassembler/bin/pinot-throughput.sh src/main/resources/pinot_queries http://localhost:8099/query 5 60 | tee results/pinot_default_startree_throughput.txt
#kill -9 ${PINOT_PROCESS_ID}
#
#echo "Benchmarking Pinot with Optimal Startree Index..."
#java -jar pinot-tool-launcher-jar-with-dependencies.jar PerfBenchmarkRunner -mode startAll -dataDir temp/optimal_startree_small_yearly -tableNames tpch_lineitem_OFFLINE > /dev/null 2>&1 &
#PINOT_PROCESS_ID=$!
#echo "Wait 30 seconds so that cluster is ready for processing queries..."
#sleep 30
#echo "Starting response time benchmark..."
#./target/appassembler/bin/pinot-response-time.sh src/main/resources/pinot_queries http://localhost:8099/query 20 20 results/pinot_optimal_startree | tee results/pinot_optimal_startree_response_time.txt
#echo "Starting throughput benchmark..."
#./target/appassembler/bin/pinot-throughput.sh src/main/resources/pinot_queries http://localhost:8099/query 5 60 | tee results/pinot_optimal_startree_throughput.txt
#kill -9 ${PINOT_PROCESS_ID}
echo "Benchmarking Druid with Inverted Index (Default Setting)..."
cd temp/druid-0.9.2
./bin/init
rm -rf var/druid/segment-cache
mv ../segment-cache var/druid/segment-cache
#Start ZooKeeper
../zookeeper-3.4.6/bin/zkServer.sh start ../zookeeper-3.4.6/conf/zoo_sample.cfg > /dev/null 2>&1
#Replace Druid JVM config and broker runtime properties
cp ../../src/main/resources/config/druid_jvm.config conf/druid/broker/jvm.config
cp ../../src/main/resources/config/druid_jvm.config conf/druid/historical/jvm.config
cp ../../src/main/resources/config/druid_broker_runtime.properties conf/druid/broker/runtime.properties
#Start Druid cluster
java `cat conf/druid/broker/jvm.config | xargs` -cp conf-quickstart/druid/_common:conf/druid/broker:lib/* io.druid.cli.Main server broker > /dev/null 2>&1 &
DRUID_BROKER_PROCESS_ID=$!
java `cat conf/druid/historical/jvm.config | xargs` -cp conf-quickstart/druid/_common:conf/druid/historical:lib/* io.druid.cli.Main server historical > /dev/null 2>&1 &
DRUID_SERVER_PROCESS_ID=$!
#Run benchmark
cd ../..
echo "Wait 30 seconds so that cluster is ready for processing queries..."
sleep 30
echo "Starting response time benchmark..."
./target/appassembler/bin/druid-response-time.sh src/main/resources/druid_queries http://localhost:8082/druid/v2/?pretty 20 20 results/druid | tee results/druid_response_time.txt
echo "Starting throughput benchmark..."
./target/appassembler/bin/druid-throughput.sh src/main/resources/druid_queries http://localhost:8082/druid/v2/?pretty 5 60 | tee results/druid_throughput.txt
kill -9 ${DRUID_BROKER_PROCESS_ID}
kill -9 ${DRUID_SERVER_PROCESS_ID}
temp/zookeeper-3.4.6/bin/zkServer.sh stop temp/zookeeper-3.4.6/conf/zoo_sample.cfg > /dev/null 2>&1
echo "********************************************************************"
echo "* Benchmark finished. Results can be found in 'results' directory. *"
echo "********************************************************************"
exit 0
| true |
fb560df0b4daae8d425396b26dbd9ac7f61be807
|
Shell
|
denisidoro/dotfiles
|
/scripts/script/yaml
|
UTF-8
| 828 | 3.265625 | 3 |
[] |
no_license
|
#!/usr/bin/env bash
##? YAML parser
##?
##? Usage:
##? yaml
##?
##? Examples:
##? eval "$(yaml my.yaml yaml_)"
source "${DOTFILES}/scripts/core/main.sh"
doc::maybe_help "$@"
main() {
# echoerr "reading yaml!"
local -r prefix=$2
local -r s='[[:space:]]*'
local -r w='[a-zA-Z0-9_]*'
local -r fs=$(echo @|tr @ '\034')
sed "h;s/^[^:]*//;x;s/:.*$//;y/-/_/;G;s/\n//" "$1" |
sed -ne "s|^\($s\)\($w\)$s:$s\"\(.*\)\"$s\$|\1$fs\2$fs\3|p" \
-e "s|^\($s\)\($w\)$s:$s\(.*\)$s\$|\1$fs\2$fs\3|p" |
awk -F "$fs" '{
indent = length($1)/2;
vname[indent] = $2;
for (i in vname) {if (i > indent) {delete vname[i]}}
if (length($3) > 0) {
vn=""; for (i=0; i<indent; i++) {vn=(vn)(vname[i])("_")}
printf("export %s%s%s=\"%s\"\n", "'"$prefix"'",vn, $2, $3);
}
}'
}
main "$@"
| true |
2910e492b61464900066c5c42197f85950b8d871
|
Shell
|
urmi-21/RNAseq
|
/map_fastq2.sh
|
UTF-8
| 716 | 3.109375 | 3 |
[] |
no_license
|
#!/bin/bash
#this file starts from fastq-file and finally maps reads to transcriptome
#[1] file_directory_with_SRA
#[2] outdir
#UrMi 17/11/2017
#load all required modules
#module load salmon
#required for bridges
#module load sra-toolkit
file_dir=$1
thisnode=$(/bin/hostname)
thisdir=$(pwd)
#copy index to local
#echo "copying index..."
INDEXDIR="/pylon5/bi5611p/usingh/human_index_final"
#echo "done copying."
#make list of all SRR files in input directory
file_list=($file_dir/*_pass_1.fastq)
for f in "${file_list[@]}"; do
echo $f
this_fname=$(echo "$f" | rev | cut -d"/" -f1 | rev | cut -d"." -f1)
this_name=$(awk '{split($0,a,"_"); print a[1]}' | echo "$f")
echo $this_name
echo $this_fname
done
| true |
b574f115acfffe5a558e772993297e7763af3cf6
|
Shell
|
JoelQFernandez/ultimate-squid-privoxy-tor
|
/ultimate-squid-privoxy-tor
|
UTF-8
| 21,904 | 3.375 | 3 |
[] |
no_license
|
#! /bin/bash
main()
{
ifs;
counter;
paths;
csv;
countryCode;
nodes
}
## Function: ifs()
## Internal Field Separator Command For Heping With Creating Arrays
##
ifs()
{
SAVEIFS=$IFS
IFS=$(echo -en "\n\b")
}
paths()
{
torConfigurationDirectory="/usr/local/etc/tor"
torDataDirectory="/var/lib"
torProgramDirectory="/usr/local/bin"
privoxyConfigurationDirectory="/usr/local/etc/privoxy"
privoxyLogDirectory="/usr/local/var/log"
privoxyPidDirectory="/var/run"
privoxyProgramDirectory="/usr/local/sbin"
cd /usr/local/etc;
if [ ! -d squid ]; then
mkdir squid;
fi
squidEtcDirectory="/usr/local/etc/squid"
squidCacheDirectory="/usr/local/var/cache/squid"
squidRunDirectory="/usr/local/var/run/squid"
squidLogDirectory="/usr/local/var/logs/squid"
squidSbinDirectory="/usr/local/sbin"
}
counter()
{
counter=1;
socksPort=9050;
controlPort=9051;
privoxyListenPort=8118;
squidPort=3118;
}
##
## Function: csv()
## Dowloads Lists Of Availabe Tor Servers And Sorts The List By Country From
## Fastest To Slowest.
##
csv()
{
csvTorStatus()
{
useragent="Mozilla/5.0 (Macintosh; Intel Mac OS X 10.7; \
rv:5.0.1) Gecko/20100101 Firefox/5.0.1"
blutmagieURL="http://torstatus.blutmagie.de/query_export.php/Tor_query_EXPORT.csv";
torstatusInfoURL="torstatus.info/query_export.php/Tor_query_EXPORT.csv"
torStatus=$(curl -k --user-agent "$useragent" "$torstatusInfoURL");
}
csvQueries()
{
######################################
## Sort CSV From Fastest To Slowest ##
######################################
Fast2Slow(){ sort -r -n -k3,3 -t ,; }
################################
## Insert Comma In torcc File ##
################################
Comma(){ sed 's/ /,/g'; }
######################
## Rows To Columns ##
######################
#################
## Upper Case ##
#################
CAPS(){ tr '[:lower:]' '[:upper:]'; }
######################
## Columns To Rows ##
######################
ColumnToRow() { tr "\n" " " ; }
##############################
## Remove "Unnamed" Servers ##
##############################
Unnamed(){ sed -e 's/Unnamed//g' -e 's/unnamed//g'; }
########################
## Delete Empty Lines ##
########################
Empty(){ sed '/^$/d'; }
#######################
## Print Top Results ##
#######################
ONE(){ sed 1q; }
FIVE(){ sed 5q; }
TEN(){ sed 10q; }
TWENTY(){ sed 20q; }
THIRTY(){ sed 30q; }
FORTY(){ sed 40q; }
FIFTY(){ sed 50q; }
HUNDRED(){ sed 100q; }
########################
## Delete Top Results ##
########################
fifty(){ sed '1,50d'; }
hundred(){ sed '1,50d'; }
}
csvTorStatus;
csvQueries;
}
countryCode()
{
countryCodeDetails()
{
countryCodeDetails=$(cat << EOF
A1,"Anonymous Proxy"
A2,"Satellite Provider"
O1,"Other Country"
AD,"Andorra"
AE,"United Arab Emirates"
AF,"Afghanistan"
AG,"Antigua and Barbuda"
AI,"Anguilla"
AL,"Albania"
AM,"Armenia"
AO,"Angola"
AP,"Asia/Pacific Region"
AQ,"Antarctica"
AR,"Argentina"
AS,"American Samoa"
AT,"Austria"
AU,"Australia"
AW,"Aruba"
AX,"Aland Islands"
AZ,"Azerbaijan"
BA,"Bosnia and Herzegovina"
BB,"Barbados"
BD,"Bangladesh"
BE,"Belgium"
BF,"Burkina Faso"
BG,"Bulgaria"
BH,"Bahrain"
BI,"Burundi"
BJ,"Benin"
BL,"Saint Bartelemey"
BM,"Bermuda"
BN,"Brunei Darussalam"
BO,"Bolivia"
BQ,"Bonaire, Saint Eustatius and Saba"
BR,"Brazil"
BS,"Bahamas"
BT,"Bhutan"
BV,"Bouvet Island"
BW,"Botswana"
BY,"Belarus"
BZ,"Belize"
CA,"Canada"
CC,"Cocos (Keeling) Islands"
CD,"Congo, The Democratic Republic of the"
CF,"Central African Republic"
CG,"Congo"
CH,"Switzerland"
CI,"Cote d'Ivoire"
CK,"Cook Islands"
CL,"Chile"
CM,"Cameroon"
CN,"China"
CO,"Colombia"
CR,"Costa Rica"
CU,"Cuba"
CV,"Cape Verde"
CW,"Curacao"
CX,"Christmas Island"
CY,"Cyprus"
CZ,"Czech Republic"
DE,"Germany"
DJ,"Djibouti"
DK,"Denmark"
DM,"Dominica"
DO,"Dominican Republic"
DZ,"Algeria"
EC,"Ecuador"
EE,"Estonia"
EG,"Egypt"
EH,"Western Sahara"
ER,"Eritrea"
ES,"Spain"
ET,"Ethiopia"
EU,"Europe"
FI,"Finland"
FJ,"Fiji"
FK,"Falkland Islands (Malvinas)"
FM,"Micronesia, Federated States of"
FO,"Faroe Islands"
FR,"France"
GA,"Gabon"
GB,"United Kingdom"
GD,"Grenada"
GE,"Georgia"
GF,"French Guiana"
GG,"Guernsey"
GH,"Ghana"
GI,"Gibraltar"
GL,"Greenland"
GM,"Gambia"
GN,"Guinea"
GP,"Guadeloupe"
GQ,"Equatorial Guinea"
GR,"Greece"
GS,"South Georgia and the South Sandwich Islands"
GT,"Guatemala"
GU,"Guam"
GW,"Guinea-Bissau"
GY,"Guyana"
HK,"Hong Kong"
HM,"Heard Island and McDonald Islands"
HN,"Honduras"
HR,"Croatia"
HT,"Haiti"
HU,"Hungary"
ID,"Indonesia"
IE,"Ireland"
IL,"Israel"
IM,"Isle of Man"
IN,"India"
IO,"British Indian Ocean Territory"
IQ,"Iraq"
IR,"Iran, Islamic Republic of"
IS,"Iceland"
IT,"Italy"
JE,"Jersey"
JM,"Jamaica"
JO,"Jordan"
JP,"Japan"
KE,"Kenya"
KG,"Kyrgyzstan"
KH,"Cambodia"
KI,"Kiribati"
KM,"Comoros"
KN,"Saint Kitts and Nevis"
KP,"Korea, Democratic People's Republic of"
KR,"Korea, Republic of"
KW,"Kuwait"
KY,"Cayman Islands"
KZ,"Kazakhstan"
LA,"Lao People's Democratic Republic"
LB,"Lebanon"
LC,"Saint Lucia"
LI,"Liechtenstein"
LK,"Sri Lanka"
LR,"Liberia"
LS,"Lesotho"
LT,"Lithuania"
LU,"Luxembourg"
LV,"Latvia"
LY,"Libyan Arab Jamahiriya"
MA,"Morocco"
MC,"Monaco"
MD,"Moldova, Republic of"
ME,"Montenegro"
MF,"Saint Martin"
MG,"Madagascar"
MH,"Marshall Islands"
MK,"Macedonia"
ML,"Mali"
MM,"Myanmar"
MN,"Mongolia"
MO,"Macao"
MP,"Northern Mariana Islands"
MQ,"Martinique"
MR,"Mauritania"
MS,"Montserrat"
MT,"Malta"
MU,"Mauritius"
MV,"Maldives"
MW,"Malawi"
MX,"Mexico"
MY,"Malaysia"
MZ,"Mozambique"
NA,"Namibia"
NC,"New Caledonia"
NE,"Niger"
NF,"Norfolk Island"
NG,"Nigeria"
NI,"Nicaragua"
NL,"Netherlands"
NO,"Norway"
NP,"Nepal"
NR,"Nauru"
NU,"Niue"
NZ,"New Zealand"
OM,"Oman"
PA,"Panama"
PE,"Peru"
PF,"French Polynesia"
PG,"Papua New Guinea"
PH,"Philippines"
PK,"Pakistan"
PL,"Poland"
PM,"Saint Pierre and Miquelon"
PN,"Pitcairn"
PR,"Puerto Rico"
PS,"Palestinian Territory"
PT,"Portugal"
PW,"Palau"
PY,"Paraguay"
QA,"Qatar"
RE,"Reunion"
RO,"Romania"
RS,"Serbia"
RU,"Russian Federation"
RW,"Rwanda"
SA,"Saudi Arabia"
SB,"Solomon Islands"
SC,"Seychelles"
SD,"Sudan"
SE,"Sweden"
SG,"Singapore"
SH,"Saint Helena"
SI,"Slovenia"
SJ,"Svalbard and Jan Mayen"
SK,"Slovakia"
SL,"Sierra Leone"
SM,"San Marino"
SN,"Senegal"
SO,"Somalia"
SR,"Suriname"
SS,"South Sudan"
ST,"Sao Tome and Principe"
SV,"El Salvador"
SX,"Sint Maarten"
SY,"Syrian Arab Republic"
SZ,"Swaziland"
TC,"Turks and Caicos Islands"
TD,"Chad"
TF,"French Southern Territories"
TG,"Togo"
TH,"Thailand"
TJ,"Tajikistan"
TK,"Tokelau"
TL,"Timor-Leste"
TM,"Turkmenistan"
TN,"Tunisia"
TO,"Tonga"
TR,"Turkey"
TT,"Trinidad and Tobago"
TV,"Tuvalu"
TW,"Taiwan"
TZ,"Tanzania, United Republic of"
UA,"Ukraine"
UG,"Uganda"
UM,"United States Minor Outlying Islands"
US,"United States"
UY,"Uruguay"
UZ,"Uzbekistan"
VA,"Holy See (Vatican City State)"
VC,"Saint Vincent and the Grenadines"
VE,"Venezuela"
VG,"Virgin Islands, British"
VI,"Virgin Islands, U.S."
VN,"Vietnam"
VU,"Vanuatu"
WF,"Wallis and Futuna"
WS,"Samoa"
YE,"Yemen"
YT,"Mayotte"
ZA,"South Africa"
ZM,"Zambia"
ZW,"Zimbabwe"
EOF)
}
countryCodeParameters()
{
countryCode=( $(
echo "$torStatus" \
| awk -F"," '{print $2}' \
| sort -k3 \
| sort -u \
| sed -e 's/Country Code//g' \
-e 's_N/A__g' \
| Empty ) )
countryCodeName=( $(
for ((n=0;n<${#countryCode[@]};n++)); do
echo "$countryCodeDetails" \
| grep ${countryCode[n]} \
| cut -f2 -d'"'
done ) )
}
countryCodeMenu()
{
for ((n=0;n<${#countryCode[@]};n++)); do
printf "%s%s\n" "${countryCode[n]}) ""${countryCodeName[n]}"
done
}
countryCodeSelection()
{
read -p "PLEASE SELECT THE COUNTRY CODE (CA,GB,US): " REPLY
}
countryCodeErrorMessage()
{
while [ $invalidINPUT ]; do
clear;
echo;
echo;
countryCodeMenu | column;
echo;
echo;
echo Invalid Country Code:
echo $invalidINPUT;
echo;
echo;
countryCodeSelection;
countryCodeVerification
done
}
countryCodeVerification()
{
IFS=","
invalidINPUT=( $(
for n in ${REPLY1[@]}; do
valid=$(
echo "$torStatus" \
| awk -F"," '{print $2}' \
| sort -k3 \
| sort -u \
| sed -e 's/Country Code//g' \
-e 's_N/A__g' \
| Empty \
| CAPS \
| grep $n )
if [ ! $valid ]; then
echo $n
fi
done ) )
countryCodeErrorMessage
}
countryCodeDetails;
countryCodeParameters;
clear;
echo;
echo;
countryCodeMenu | column 2> /dev/null;
echo;
echo;
countryCodeSelection;
countryCodeVerification #ca,GB,UK,elephant
}
nodes()
{
nodesInput()
{
name=$(
echo "$countryCodeDetails" \
| grep $n \
| cut -f2 -d'"')
read -p "Please enter the number of exit nodes for $name (1-10): " REPLY2
}
nodesErrorMessage()
{
while [[ $REPLY2 != [0-9]* ]]; do
clear;
echo;
echo;
nodesInput;
done
}
nodesParseREPLY()
{
REPLY1=( $(
IFS=",";
echo "$REPLY" \
| tr " " "\n" ) )
}
nodesParseREPLY
for n in ${REPLY1[@]}; do
clear;
echo;
echo;
nodesInput;
nodesErrorMessage;
bandWidth;
torParameters;
squidConfigurationFile > $squidEtcDirectory/$n.conf
for ((i=1;i<=$REPLY2;i++)); do
torrc;
torStart;
priVoxy
squidConfigurationRoundRobin >> $squidEtcDirectory/$n.conf;
export counter=$(( $counter + 1 ));
export socksPort=$(( $socksPort + 100 ));
export controlPort=$(( $controlPort + 100 ));
export privoxyListenPort=$(( $privoxyListenPort + 100 ));
done
export squidPort=$(( $squidPort + 100 ));
squidConfigurationCache >> $squidEtcDirectory/$n.conf
squidCreateCacheDirectory;
#squidBlackList;
sleep 10
squidStart;
done
}
bandWidth()
{
bandwidthParameters()
{
bandwidth=( '56' '128' '256' '512' '768' '1024' '1280' '1536' '1792' '2048' );
}
bandwidthMenu()
{
for ((n=0;n<${#bandwidth[@]};n++)); do
printf "%6s%s\n" "${bandwidth[n]}) " "${bandwidth[n]} kbps"
done
}
bandwidthSelection()
{
read -p "Please select the minimum bandwidth speed for $n exit nodes: " REPLY3
}
bandwidthErrorMessage()
{
while [ $invalidINPUT ]; do
clear;
echo;
echo;
bandwidthMenu | column;
echo;
echo;
echo Invalid Bandwidth Speed:
echo $invalidINPUT;
echo;
echo;
bandwidthSelection;
bandwidthVerification
break
done
}
bandwidthVerification()
{
invalidINPUT=$(
valid=$(
for n in ${bandwidth[@]}; do
echo "$n"
done | grep "$REPLY3" )
if [ ! $valid ]; then
echo $REPLY3
fi)
bandwidthErrorMessage;
}
clear;
echo;
echo;
bandwidthParameters;
bandwidthMenu | column;
echo;
echo;
bandwidthSelection;
bandwidthVerification;
}
torParameters()
{
#####################################
## Bandwidth Greater Than ??? KB/s ##
#####################################
BANDWIDTH(){ awk -F"," -v Bandwidth=$REPLY3 '$3>=Bandwidth'; }
##################################
## Bandwidth Less Than ??? KB/s ##
##################################
bandwidth(){ awk -F"," -v Bandwidth=$REPLY3 '$3<=Bandwidth'; }
## List The Entry Nodes That Meet The Bandwidth Requirements
csvEntryNodes=$(
echo "$torStatus" \
| Fast2Slow \
| HUNDRED \
| BANDWIDTH \
| Unnamed \
| awk -F"," '{print $1}' \
| Empty \
| ColumnToRow \
| Comma \
| sed 's/,$//g' )
## List The Exit Nodes That Meet The The Bandwidth Requiredments
csvExitNodes=$(
echo "$torStatus" \
| awk -F"," -v CC=$n '$2==CC' \
| Fast2Slow \
| TWENTY \
| Unnamed \
| awk -F"," '{print $1}' \
| Empty \
| ColumnToRow \
| Comma \
| sed 's/,$//g')
## Blacklist Of All Entry And Exit Nodes That Do Not Meet
## The Bandwidth Requirements
csvExcludeNodes=$(
echo "$torStatus" \
| awk -F"," -v CC=$n '$2!=CC' \
| bandwidth \
| Unnamed \
| awk -F"," '{print $1}' \
| Empty \
| ColumnToRow \
| Comma \
| sed 's/,$//g')
}
##
## Function: torrc()
## Creates A Highly Optimized torrc Configuration File Based On The
## Exit Nodes And The Bandwidth Requirements
##
torrc()
{
torrcEntryNodes()
{
echo EntryNodes "$csvEntryNodes"
}
torrcExitNodes()
{
echo ExitNodes "$csvExitNodes"
}
torrcExcludeNodes()
{
echo ExcludeNodes "$csvExcludeNodes"
}
torrcConfig()
{
echo SocksPort $socksPort # what port to open for local application connections
echo SocksBindAddress 127.0.0.1 # accept connections only from localhost
echo StrictNodes 1
echo AllowUnverifiedNodes middle,rendezvous
echo Log notice syslog
echo RunAsDaemon 1
#echo User tor
#echo Group tor
echo DataDirectory "$torDataDirectory/tor$counter"
#echo ControlPort $controlPort
}
torrcEntryNodes > "$torConfigurationDirectory/torrc$counter"
torrcExitNodes >> "$torConfigurationDirectory/torrc$counter"
torrcExcludeNodes >> "$torConfigurationDirectory/torrc$counter"
torrcConfig >> "$torConfigurationDirectory/torrc$counter"
}
torStart()
{
cd $torDataDirectory;
if [ ! -d tor$counter ]; then
mkdir tor$counter;
fi
$torProgramDirectory/tor -f "$torConfigurationDirectory/torrc$counter" &
}
priVoxy()
{
privoxyConfigurationFile()
{
echo trust-info-url http://www.example.com/why_we_block.html
echo trust-info-url http://www.example.com/what_we_allow.html
echo confdir $privoxyConfigurationDirectory
echo logdir $privoxyLogDirectory/privoxy$counter
echo actionsfile match-all.action # Internal purpose, recommended
echo actionsfile default.action # Main actions file
echo actionsfile user.action # User customizations
echo filterfile default.filter
echo logfile logfile
echo listen-address 127.0.0.1:$privoxyListenPort
echo toggle 1
echo enable-remote-toggle 1
echo enable-remote-http-toggle 1
echo enable-edit-actions 1
echo enforce-blocks 0
echo buffer-limit 4096
echo forward-socks5 / 127.0.0.1:$socksPort .
echo forwarded-connect-retries 0
echo accept-intercepted-requests 0
echo allow-cgi-request-crunching 0
echo split-large-forms 0
}
privoxyCreateConfigurationDirectory()
{
cd $privoxyConfigurationDirectory;
if [ ! -d configuration ]; then
sudo mkdir configuration;
fi
chown privoxy:privoxy $privoxyConfigurationDirectory/configuration
cd configuration;
if [ ! -d config$counter ]; then
mkdir config$counter;
fi
chown privoxy:privoxy $privoxyConfigurationDirectory/configuration/config$counter;
for l in match-all.action default.action user.action default.filter user.filter; do
sudo cp $privoxyConfigurationDirectory/$l $privoxyConfigurationDirectory/configuration/config$counter/$l;
chown privoxy:privoxy $privoxyConfigurationDirectory/configuration/config$counter/$l;
done
sudo cp -a $privoxyConfigurationDirectory/templates $privoxyConfigurationDirectory/configuration/config$counter/templates;
}
privoxyCreateLogDirectory()
{
chown privoxy:privoxy $privoxyLogDirectory
cd $privoxyLogDirectory;
## Create Log Directory ##
if [ ! -d privoxy$counter ]; then
mkdir privoxy$counter;
fi
chown privoxy:privoxy $privoxyLogDirectory/privoxy$counter;
## Create Missing Logfiles ##
for l in logfile jarfile; do
touch $privoxyLogDirectory/privoxy$counter/$l;
chgrp privoxy $privoxyLogDirectory/privoxy$counter/$l;
chmod 660 $privoxyLogDirectory/privoxy$counter/$l;
done;
}
privoxyStart()
{
#sudo $privoxyProgramDirectory/privoxy --pidfile $privoxyPidDirectory/privoxy$counter.pid --user privoxy $privoxyConfigurationDirectory/configuration/config$counter/config
sudo $privoxyProgramDirectory/privoxy --pidfile $privoxyPidDirectory/privoxy$counter.pid $privoxyConfigurationDirectory/configuration/config$counter/config
}
privoxyCreateConfigurationDirectory;
privoxyConfigurationFile > $privoxyConfigurationDirectory/configuration/config$counter/config;
privoxyCreateLogDirectory;
privoxyStart;
}
squidConfigurationFile()
{
#echo acl all src all
#echo acl manager proto cache_object
#echo acl localhost src 127.0.0.1/32
#echo acl to_localhost dst 127.0.0.0/8
#echo
echo acl SSL_ports port 443
echo acl Safe_ports port 80" # http"
echo acl Safe_ports port 21" # ftp"
echo acl Safe_ports port 443" # https"
echo acl Safe_ports port 70" # gopher"
echo acl Safe_ports port 210" # wais"
echo acl Safe_ports port 1025-65535" # unregistered ports"
echo acl Safe_ports port 280" # http-mgmt"
echo acl Safe_ports port 488" # gss-http"
echo acl Safe_ports port 591" # filemaker"
echo acl Safe_ports port 777" # multiling http"
echo acl Safe_ports port 901" # SWAT"
echo acl Safe_ports port 1935" # Macromedia Flash x-fcs"
echo acl purge method PURGE
echo acl CONNECT method CONNECT
echo
echo http_access allow manager localhost
echo http_access deny manager
echo http_access allow purge localhost
echo http_access deny purge
echo http_access deny" !Safe_ports"
echo http_access deny CONNECT" !SSL_ports"
echo
echo acl malware_domains url_regex $squidEtcDirectory/Malware-domains.txt
echo http_access deny malware_domains
echo
echo acl ad_block url_regex $squidEtcDirectory/ad_block.txt
echo http_access deny ad_block
echo
echo hosts_file /etc/hosts
#echo http_access allow localhost
#echo http_access deny all
#echo icp_access deny all
echo http_port $squidPort
echo icp_port 0
#echo hierarchy_stoplist cgi-bin ?
echo
echo refresh_pattern" ^ftp: 1440 20% 10080"
echo refresh_pattern" ^gopher: 1440 0% 1440"
echo refresh_pattern" -i (/cgi-bin/|\?) 0 0% 0"
echo refresh_pattern" -i \.(gif|png|jpg|jpeg|ico)$ 10080 90% 43200 override-expire ignore-no-cache ignore-no-store ignore-private ignore-must-revalidate"
echo refresh_pattern" -i \.(iso|avi|wav|mp3|mp4|mpeg|swf|flv|x-flv)$ 43200 90% 432000 override-expire ignore-no-cache ignore-no-store ignore-private ignore-must-revalidate"
echo refresh_pattern" -i \.(deb|rpm|exe|zip|tar|tgz|ram|rar|bin|ppt|doc|tiff)$ 10080 90% 43200 override-expire ignore-no-cache ignore-no-store ignore-private ignore-must-revalidate"
echo refresh_pattern" -i \.index.(html|htm)$ 0 40% 43200 override-expire ignore-no-cache ignore-no-store ignore-private ignore-must-revalidate"
echo refresh_pattern" -i \.(html|htm|css|js)$ 1440 40% 43200 override-expire ignore-no-cache ignore-no-store ignore-private ignore-must-revalidate"
echo refresh_pattern" . 0 40% 40320"
echo refresh_pattern" . 0 20% 4320"
echo
echo pipeline_prefetch on
echo cache_mem 6144 MB
echo
echo maximum_object_size 144 MB
echo dns_nameservers 8.8.8.8 8.8.4.4
}
squidConfigurationRoundRobin()
{
echo cache_peer localhost$counter parent $privoxyListenPort 0 round-robin no-query
}
squidConfigurationCache()
{
echo never_direct allow all
echo always_direct deny all
echo acl apache rep_header Server ^Apache
echo forwarded_for off
#echo cache_effective_user root
echo coredump_dir /usr/local/var/cache/squid/$n
echo cache_dir aufs /usr/local/var/cache/squid/$n 2000 16 256
echo pid_filename /usr/local/var/run/squid/$n.pid
echo access_log $squidLogDirectory/access.$n.log
echo cache_store_log $squidLogDirectory/store.$n.log
echo cache_log $squidLogDirectory/cache.$n.log
echo
echo visible_hostname localhost
echo pipeline_prefetch on
echo
echo via off
echo forwarded_for off
echo
echo request_header_access Allow allow all
echo request_header_access Authorization allow all
echo request_header_access WWW-Authenticate allow all
echo request_header_access Proxy-Authorization allow all
echo request_header_access Proxy-Authenticate allow all
echo request_header_access Cache-Control allow all
echo request_header_access Content-Encoding allow all
echo request_header_access Content-Length allow all
echo request_header_access Content-Type allow all
echo request_header_access Date allow all
echo request_header_access Expires allow all
echo request_header_access Host allow all
echo request_header_access If-Modified-Since allow all
echo request_header_access Last-Modified allow all
echo request_header_access Location allow all
echo request_header_access Pragma allow all
echo request_header_access Accept allow all
echo request_header_access Accept-Charset allow all
echo request_header_access Accept-Encoding allow all
echo request_header_access Accept-Language allow all
echo request_header_access Content-Language allow all
echo request_header_access Mime-Version allow all
echo request_header_access Retry-After allow all
echo request_header_access Title allow all
echo request_header_access Connection allow all
echo request_header_access Proxy-Connection allow all
echo request_header_access User-Agent allow all
echo request_header_access Cookie allow all
echo request_header_access All deny all
}
squidCreateCacheDirectory()
{
cd /usr/local/var/logs
if [ ! -d squid ]; then
sudo mkdir squid;
fi
sudo chown -R root:nobody $squidLogDirectory/squid
chmod g+w $squidLogDirectory
## Change Ownership ##
cd $squidCacheDirectory;
if [ ! -d $n ]; then
sudo mkdir $n;
fi
sudo chown -R nobody:nobody $squidCacheDirectory
sudo chown -R nobody:nobody $squidEtcDirectory/$n
#sudo chmod -r 777 $squidCacheDirectory/$n
## Create Cache Directories ##
sudo $squidSbinDirectory/squid -f $squidEtcDirectory/$n.conf -z;
## Create Swap Files ##
sudo $squidSbinDirectory/squid -f -N -d 1 z $squidEtcDirectory/$n.conf;
}
squidBlackList()
{
cd $squidEtcDirectory
curl "http://malware.hiperlinks.com.br/cgi/submit?action=list_squid" > Malware-domains.txt;
curl "http://pgl.yoyo.org/as/serverlist.php?hostformat=squid-dstdom-regex;showintro=0" | tidy -asxml | xpath '//pre/text()' > ad_block.txt;
}
squidStart()
{
sudo $squidSbinDirectory/squid -f $squidEtcDirectory/$n.conf -N -d 1 &
}
main $@
| true |
fb038ef68b2046602824cd973cd6d378be9fc345
|
Shell
|
FPlatz95/master_thesis
|
/WWTP_sequences/TaxonomicClassification/graftM/graftM_SILVA.sh
|
UTF-8
| 871 | 2.515625 | 3 |
[] |
no_license
|
#!/bin/bash
module purge
module load parallel
module load GraftM/0.13.1-foss-2018a-Python-3.6.4
samples_path=/srv/MA/Projects/microflora_danica/analysis/projects/MFD_seges/results/WWTP_sequences/graftM
data_path=/srv/MA/Projects/microflora_danica/analysis/projects/MFD_seges/data/WWTP_sequences
graftm_package=/shared-nfs/MGP1000/databases/graftm/7.71.silva_v132_alpha1.gpkg
output_dir=/srv/MA/Projects/microflora_danica/analysis/projects/MFD_seges/results/WWTP_sequences/graftM
threads=10
# make your batch file of the samples you want run
ls $data_path | grep _R1.fastq | sed 's/.fastq.gz//' > $samples_path/wwtp_sequences.txt
# run parallel
cat $samples_path/wwtp_sequences.txt | parallel -j5 graftM graft --forward $data_path/{} --graftm_package $graftm_package --output_directory $output_dir/{}_SILVA --threads $threads '&>' $output_dir/log/{}_graftm_SILVA.log
| true |
9ec019990c1ad3c11039926f341173ed7f6e61f5
|
Shell
|
foomango/GeckoHA
|
/src/ha/bin/hustha
|
UTF-8
| 1,647 | 3.671875 | 4 |
[] |
no_license
|
#!/bin/bash
HAPATH="/root/src/ha"
HA863CFG="/root/domu/ha863/remus/ha863.conf"
PROG="hustha"
DOMAIN="ha863"
DEST="hp2"
function state {
state=$(xm list $DOMAIN 2>/dev/null | awk 'NR==2{print $5}')
case $state in
"r-----" | "-b----")
echo "run"
;;
"--p---")
echo "pause"
;;
"---s--" | "----c-" | "-----d")
echo "wait"
;;
*)
echo "stop"
;;
esac
}
function startvm {
status=$(state)
if [ $status != "stop" ]; then
echo fail
else
xm create $HA863CFG >/dev/null 2>&1
fi
status=$(state)
if [ $status != "run" ]; then
echo fail
else
echo success
fi
}
function destroyvm {
xm destroy $DOMAIN >/dev/null 2>&1
status=$(state)
if [ $status != "stop" ]; then
echo fail
else
echo success
fi
}
function stopha {
destroyvm
}
function startha {
status=$(state)
if [ $status != "run" ]; then
echo fail
return
fi
remus -i 50 $DOMAIN $DEST >/dev/null 2>&1 &
sleep 1
ps $! >/dev/null 2>&1
if [ "$?" != "0" ]; then
echo fail
else
echo success
fi
}
function usage {
echo "Usage: $PROG <subcommand> [args]"
}
if [ "$#" -lt "1" ]; then
usage
exit -1
fi
case $1 in
"startvm")
startvm
;;
"destroyvm")
destroyvm
;;
"startha")
startha
;;
"stopha")
stopha
;;
"state")
state
;;
*)
usage
;;
esac
| true |
6ecb74ab786467750eb155887dd3f3bcc7040fe4
|
Shell
|
johnae/dotfiles
|
/home/.profile.d/008_utils.sh
|
UTF-8
| 1,009 | 3.203125 | 3 |
[] |
no_license
|
## lwp
for method in GET HEAD POST PUT DELETE TRACE OPTIONS; do
alias "$method"="lwp-request -m '$method'"
done
#### Some aliases
alias latest="ls -tr | tail -1"
## size of current folder and contents
alias fsize="du -h -c ./ | tail -1"
alias cd..="cd .."
alias ..="cd .."
alias brewup='for od in `brew outdated`; do brew install $od; done'
alias listening='lsof -i -P | grep LISTEN | grep "TCP.*:" | sort'
alias conns='lsof -i -P | grep ESTABLISHED | sort'
remove_fp()
{
rm -rf $HOME/Library/Preferences/Macromedia/Flash\ Player/#SharedObjects/*
mkdir -p $HOME/Library/Preferences/Macromedia/Flash\ Player/Logs
echo "" > $HOME/Library/Preferences/Macromedia/Flash\ Player/Logs/flashlog.txt
rm -rf $HOME/Library/Preferences/Macromedia/Flash\ Player/macromedia.com/support/flashplayer/sys/*
}
# Sha1 from string
sha1fromString()
{
echo -n "$1" | openssl sha1
}
kill_plug_screen()
{
for PID in `ps ax | grep SCREEN | grep usbserial | grep -v grep | awk '{print $1}'`; do sudo kill -9 $PID; done
}
| true |
5f5a8bd0c5c308ea3e676427fb63d8cb3bc10e60
|
Shell
|
heryssantos/ezops-test-hery
|
/devops/build.sh
|
UTF-8
| 927 | 3.234375 | 3 |
[] |
no_license
|
#!/bin/sh
echo "Stoping container"
docker container stop node-container
docker container stop nginx-container
echo "Removing container"
docker container rm node-container
docker container rm nginx-container
echo "Removing images"
docker image rm ezops-test-hery_node
docker image rm ezops-test-hery_nginx
echo "Check if mongo-container is started"
if [ ! "$(docker ps -q -f name=mongo-container)" ]; then
if [ "$(docker ps -aq -f status=exited -f name=mongo-container)" ]; then
# remove container if it's exited (stopped)
docker rm mongo-container
fi
# run mongo container
echo "Running mongo-contaier"
#docker container run --name mongo-container --volume "$(pwd)/mongo/data/db:/data/db" --publish 27017:27017 --network=ezops-test-hery_backend --detach mongo
docker-compose up -d mongo
fi
echo "Running node and nginx contaiers"
docker-compose up -d node
docker-compose up -d nginx
| true |
43f59fa25251cd4629a4721ece49f346af7a41c2
|
Shell
|
M1ckmason/teth
|
/examples/bin/private_blockchain.sh
|
UTF-8
| 256 | 2.546875 | 3 |
[
"MIT"
] |
permissive
|
#!/bin/bash
geth=${GETH:-geth}
echo "***** Using geth at: $geth"
echo "Start geth server..."
$geth --datadir data --networkid 31415926 --rpc --rpccorsdomain "*" --nodiscover --unlock 3ae88fe370c39384fc16da2c9e768cf5d2495b48 --password <(echo -n 123456)
| true |
18d6a2762c019482bc58483a45cbe1c7707af887
|
Shell
|
0x4A6F/travis-debug
|
/test/run_vagrant_kitchen.sh
|
UTF-8
| 330 | 2.640625 | 3 |
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
set -e
echo "**** Box setup ***"
echo "* mkdir /kitchen"
mkdir -p /kitchen
#echo "* cp -ar /mnt/shared /kitchen"
#cp -r /mnt/shared/. /kitchen
echo "* ln -sf /mnt/shared /kitchen"
ln -sf /mnt/shared/* /kitchen/
echo "* cd /kitchen"
cd /kitchen/*
echo "* python test/travis_run.py"
python test/travis_run.py
| true |
eb7f9bfdf027f1155948fb3366a14d76813675d4
|
Shell
|
WeiyiGeek/Study-Promgram
|
/Linux运维进阶/Linux/shell/Shell-syntax/IF.sh
|
UTF-8
| 573 | 3.671875 | 4 |
[] |
no_license
|
#!/bin/bash
#if分支语句
read -p "Please INput A:" a
read -p "Please INput B:" b
read -p "Please INput C:" c
#单分支语句(1)
if [ $a == $b ];then
echo "a is equal to b!!"
fi
if [ $a != $b ];then
echo "a is not equal to b!!"
fi
#单分支语句(2)
if [ $c == $b ];then
echo "c is equal to b!!"
else
echo -e "c is not equal to b!!\n"
fi
#双分支语句
a=10
b=20
if [ $a == $b ]
then
echo "a is equal to b"
elif [ $a -gt $b ]
then
echo "a is greater than b"
elif [ $a -lt $b ]
then
echo "a is less than b"
else
echo "None of the condition met"
fi
| true |
3517556b6b92fe6589357324f13395aec09d0ace
|
Shell
|
Ykisialiou/concourse-playground
|
/certtest/task.sh
|
UTF-8
| 87 | 2.578125 | 3 |
[] |
no_license
|
#!/bin/bash
if [[ $DEBUG == true ]]; then
set -ex
else
set -e
fi
echo $TEST_KEY
| true |
6d9712b748d5f5c939600043d07e6be163d8d1eb
|
Shell
|
pfista/neovim
|
/.ci/coverity.sh
|
UTF-8
| 520 | 2.828125 | 3 |
[
"LicenseRef-scancode-generic-cla",
"Vim",
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"MIT"
] |
permissive
|
. "$CI_SCRIPTS/common.sh"
# temporarily disable error checking, the coverity script exits with
# status code 1 whenever it (1) fails OR (2) is not on the correct
# branch.
set +e
curl -s https://scan.coverity.com/scripts/travisci_build_coverity_scan.sh |
COVERITY_SCAN_PROJECT_NAME="neovim/neovim" \
COVERITY_SCAN_NOTIFICATION_EMAIL="coverity@aktau.be" \
COVERITY_SCAN_BRANCH_PATTERN="coverity-scan" \
COVERITY_SCAN_BUILD_COMMAND_PREPEND="$MAKE_CMD deps" \
COVERITY_SCAN_BUILD_COMMAND="$MAKE_CMD nvim" \
bash
set -e
exit 0
| true |
870a5fa63e34610bd8304493eef68463fd109088
|
Shell
|
step66/opencvonx64
|
/opencv-on-x64.sh
|
UTF-8
| 1,702 | 2.96875 | 3 |
[
"MIT"
] |
permissive
|
# This script installs opencv 3.1.0 onto an x64 device
# Taken from http://www.pyimagesearch.com/2016/10/24/ubuntu-16-04-how-to-install-opencv/
# Change permissions first with: chmod 0755 opencv-on-x64.sh
# Then run with: ./opencv-on-x64.sh
sudo apt-get update
sudo apt-get upgrade
sudo apt-get install build-essential cmake pkg-config libjpeg8-dev libtiff5-dev libjasper-dev libpng12-dev libavcodec-dev libavformat-dev libswscale-dev libv4l-dev libxvidcore-dev libx264-dev libgtk-3-dev libatlas-base-dev gfortran python2.7-dev python3.5-dev
cd ~
wget -O opencv.zip https://github.com/Itseez/opencv/archive/3.1.0.zip
unzip opencv.zip
wget -O opencv_contrib.zip https://github.com/Itseez/opencv_contrib/archive/3.1.0.zip
unzip opencv_contrib.zip
cd ~
wget https://bootstrap.pypa.io/get-pip.py
sudo python get-pip.py
sudo pip install virtualenv virtualenvwrapper
sudo rm -rf ~/get-pip.py ~/.cache/pip
echo -e "\n# virtualenv and virtualenvwrapper" >> ~/.bashrc
echo "export WORKON_HOME=$HOME/.virtualenvs" >> ~/.bashrc
echo "source /usr/local/bin/virtualenvwrapper.sh" >> ~/.bashrc
source ~/.bashrc
mkvirtualenv cv -p python2
workon cv
pip install numpy
cd ~/opencv-3.1.0/
mkdir build
cd build
cmake -D CMAKE_BUILD_TYPE=RELEASE \
-D CMAKE_INSTALL_PREFIX=/usr/local \
-D INSTALL_PYTHON_EXAMPLES=ON \
-D INSTALL_C_EXAMPLES=OFF \
-D OPENCV_EXTRA_MODULES_PATH=~/opencv_contrib-3.1.0/modules \
-D PYTHON_EXECUTABLE=~/.virtualenvs/cv/bin/python \
-D BUILD_EXAMPLES=ON ..
make -j4
sudo make install
sudo ldconfig
ls -l /usr/local/lib/python2.7/site-packages/
cd ~/.virtualenvs/cv/lib/python2.7/site-packages/
ln -s /usr/local/lib/python2.7/site-packages/cv2.so cv2.so
cd ~
workon cv
python
| true |
492008b10b89c6c9c7d2806975b0ed37e86ce920
|
Shell
|
leotoneo/opa-fm
|
/Esm/rpm_runmake
|
UTF-8
| 6,277 | 3.375 | 3 |
[] |
no_license
|
#!/bin/bash
# BEGIN_ICS_COPYRIGHT8 ****************************************
#
# Copyright (c) 2015, Intel Corporation
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Intel Corporation nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# END_ICS_COPYRIGHT8 ****************************************
##
## runmake
## -----------
## Perform a full build
##
## Usage:
## rpm_runmake [-V 'module_version'] [-B 'build_config'] [-n]
## or
## rpm_runmake [-V 'module_version'] -r [-B 'build_config'] [-C] [-n]
##
## Arguments:
## -B build_config release type to build, (debug/release). default is release
## -r rerun the build in place without doing a new checkout/tag
## -C don't clobber prior to the rerun
## -n No build. Useful for checking build machine
## -V module_version The MODULEVERSION define.
##
## In addition, the following environment variables are expected to be set:
## RELEASE_TAG, BRANCH_TAG, BUILD_TARGET, PRODUCT, RELEASE_HOME, BUILD_CONFIG
## The target command is expected to have already been run to set up the
## environment for the specified target
##
## re-run can be used for 2 situations:
## 1. re-run a failed build to correct incorrect weeklybuild arguments
## or other non-source code related problems (out of disk space)
## 2. re-run a build for a different BUILD_TARGET_OS_VERSION while using the same
## source code image/tag
## If the source code needs to be corrected, a full weeklybuild should be run
## after the correction such that the code is properly tagged.
##
## Since weeklybuild and target commands always set BUILD_CONFIG, this
## script ignores BUILD_CONFIG. As such the BuildOptions file
## or the -B argument completely control BUILD_CONFIG for this product.
export BUILD_PLATFORM="LINUX"
if [ -d IbAccess ]
then
. MakeTools/funcs-ext.sh
else
. ../MakeTools/funcs-ext.sh
fi
settarget x86_64
settl
# A given release will be targeted for a specific version of ofed
# however we allow build.config to override if needed
export OFED_STACK_PREFIX=${OFED_STACK_PREFIX:-/usr}
# typically exported by build.config
export BUILD_WITH_STACK=${BUILD_WITH_STACK:-OPENIB}
Usage()
{
# include "ERROR" in message so weeklybuild catches it in error log
echo "ERROR: rpm_runmake failed" >&2
echo "Usage: rpm_runmake [-V 'module_version'] [-B 'build_config'] [-n]" >&2
echo " OR" >&2
echo " rpm_runmake [-V 'module_version'] -r [-B 'build_config'] [-C] [-n]" >&2
exit 2
}
showbuild()
{
# output summary of what we are building to stdout
echo "Building for $SUBPRODUCT $BUILD_TARGET $BUILD_CONFIG $(date)..."
}
fix_version()
{
# translate underscores and dashes to dots
echo "$1"|tr '_-' '..'
}
build_config=
rerun=n
Cflag=n
module_version=
while getopts V:B:rCnS param
do
case $param in
r)
rerun=y;;
B)
build_config="$OPTARG";;
C)
Cflag=y;;
n)
nflag=y;;
V)
module_version="$OPTARG";;
?)
Usage
esac
done
shift $(($OPTIND -1))
if [ $# != 0 ]
then
Usage
fi
# for HSM the kernel rev is not important. We simply use the kernel rev
# of the running kernel. While BUILD_TARGET_OS_VERSION is needed by Makerules
# it will have no impact on what is actually built for HSM
export BUILD_TARGET_OS_VERSION=${BUILD_TARGET_OS_VERSION:-`uname -r`}
setver $BUILD_TARGET_OS_VENDOR $BUILD_TARGET_OS_VERSION
if [ -z "$build_config" ]
then
# default to release build
setrel
elif [ "$build_config" != debug -a "$build_config" != release ]
then
Usage
else
export BUILD_CONFIG="$build_config"
fi
# This is used to build a HSM from an ALL_EMB checkout
THISDIR=`basename $PWD`
if [ $THISDIR = Esm ]; then
export PROJ_FILE_DIR=Esm
else
export PROJ_FILE_DIR=.
fi
if [ "$BUILD_WITH_STACK" = "OPENIB" ]
then
if [ ! -d $OFED_STACK_PREFIX ]
then
echo "$0: ERROR: OFED stack not found: $OFED_STACK_PREFIX" >&2
exit 1
fi
fi
if [ "$nflag" = "y" ]
then
if [ "$BUILD_WITH_STACK" = "OPENIB" ]
then
echo "Stack to build for: $BUILD_WITH_STACK ($OFED_STACK_PREFIX)"
else
echo "Stack to build for: $BUILD_WITH_STACK"
fi
exit 0
fi
# The following does not make much sense since we start the build
# from scratch every time. However, in the future, if we decide
# to support incremental build, just uncomment it.
# clean up from prior build when rebuilding
#if [ "$rerun" = y -a "$Cflag" != y ]
if [ ]
then
# force full rebuild
export REMOVE_DEPENDS=yes
clobber_arg="clobber clobber_stage clobber_release"
else
clobber_arg=
fi
rm -rf packaged_files dist_files
# export DATE for use by prep so all files get same date/time stamp
DATE=${DATE:-"`date +'%m/%d/%y %H:%M'`"}
# Do the actual MAKE
showbuild
env
make $clobber_arg stage
echo "Done $BUILD_TARGET_OS $BUILD_TARGET HSM $SUBPRODUCT $(date)"
echo "=============================================================="
echo "Preping Files for $BUILD_TARGET_OS $BUILD_TARGET $(date)..."
# run prep and package once at end
make prepfiles package
| true |
842759e329b4127dcf4151235d389a8fa5366661
|
Shell
|
KellyLSB/mashbash
|
/source/text.bash
|
UTF-8
| 610 | 3.40625 | 3 |
[] |
no_license
|
#!/usr/bin/env bash
# Text Manipulation
function capitalize() { sed -e 's/^\(.\{1\}\)/\U\1/gi'; }
function upper() { awk '{print toupper($0)}'; }
function lower() { awk '{print tolower($0)}'; }
# Color Effects
function color() { IFS= read -r text <&0; echo -e $2 "\e[$1m$text\e[0m"; }
function black() { color "0;30" $1; }
function red() { color "0;31" $1; }
function green() { color "0;32" $1; }
function yellow() { color "0;33" $1; }
function blue() { color "0;34" $1; }
function purple() { color "0;35" $1; }
function cyan() { color "0;36" $1; }
function white() { color "0;37" $1; }
| true |
1f09badaf8aefef079c0f9fc0696f6dc1cb0566e
|
Shell
|
KijinKims/Novel-Virus-Discovery-Pipeline
|
/pipeline_v1.0.sh
|
UTF-8
| 12,041 | 3.390625 | 3 |
[] |
no_license
|
#!/bin/sh
###############################################################
# #
# Novel Virus Discovery Pipeline #
# #
# This pipeline is for identifying chanses of #
# existing viruses in biological sample using #
# NGS data. #
# #
# Any theoretical questions can be solved with #
# My presentation file. (17/12/11 presented) #
# #
# And instruction for this is provided in enclosed #
# Manual file. Please refer to it.(Written in Korean) #
# #
# If you have any question, please email me. #
# e-mail: skkujin@gmail.com #
# #
# Ki-jin Kim 17/12/29 #
# #
###############################################################
#Paths of Software/DB
HOSTDBPATH=/home/jwsong/Desktop/Pipeline/HOSTDB
TRIMMOMATICPATH=/home/jwsong/miniconda3/envs/pipeline/share/trimmomatic/trimmomatic.jar
CHECKPOINTPATH=/home/jwsong/Desktop/Pipeline
BLASTDBPATH=/home/jwsong/Desktop/Pipeline/BLASTDB
BACTERIADBPATH=/home/jwsong/Desktop/Pipeline/Bacteria_DB
ADAPTORPATH=/home/jwsong/miniconda3/envs/pipeline/share/trimmomatic/adapters
FAQCSPATH=/home/jwsong/miniconda3/envs/pipeline/bin/FaQCs
BOWTIEPATH=/home/jwsong/miniconda3/envs/pipeline/bin/bowtie2
SAMTOOLSPATH=/home/jwsong/miniconda3/envs/pipeline/bin/samtools
BEDTOOLSPATH=/home/jwsong/miniconda3/envs/pipeline/bin/bedtools
SPADESPATH=/home/jwsong/miniconda3/envs/pipeline/bin/spades.py
BLASTNPATH=/home/jwsong/miniconda3/envs/pipeline/bin/blastn
TBLASTXPATH=/home/jwsong/miniconda3/envs/pipeline/bin/tblastx
TIMESTAMP=$(date +%Y-%m-%d_%Hh%Mm%Ss)
#User Input
echo "=====Please type name====="
read name
echo "=====Please type DB name.(Human/Mouse_Rat/Bat/Tick/Cro/Sorex/Penguin)====="
read DB
#Print start time
printf "\t*****Starting time: ${TIMESTAMP}*****\n"
#Clipping adaptor
printf "\n\n=====Trimming adaptor with trimmomatic=====\n\n"
Step=$(grep "CUTADAPT" ${CHECKPOINTPATH}/checkpoint.txt)
if [ "${Step}" != "CUTADAPT" ]
then
cd Adaptor_Clipping
java -jar ${TRIMMOMATICPATH} PE -phred33 ../$1 ../$2 ${name}_1.fq unpaired.fq ${name}_2.fq unpaired.fq ILLUMINACLIP:${ADAPTORPATH}/TruSeq3-PE.fa:2:30:10 LEADING:3 TRAILING:3 SLIDINGWINDOW:4:15 MINLEN:36
if [ $? -ne 0 ]
then
exit 1
fi
cd ..
fi
#Remove PhiX sequence
printf "\n\n=====PhiX removal with Bowtie2=====\n\n"
Step=$(grep "BOWTIE2" ${CHECKPOINTPATH}/checkpoint.txt)
if [ "${Step}" != "BOWTIE2" ]
then
cd Host_removal
${BOWTIEPATH} -x ${HOSTDBPATH}/PhiX -1 ../Adaptor_Clipping/${name}_1.fq -2 ../Adaptor_Clipping/${name}_2.fq -S ${name}_mapped_and_unmapped.sam
if [ $? -ne 0 ]
then
exit 1
fi
fi
printf "\n\n=====Convert sam files into bam files=====\n\n"
Step=$(grep "SAMTOBAM" ${CHECKPOINTPATH}/checkpoint.txt)
if [ "${Step}" != "SAMTOBAM" ]
then
${SAMTOOLSPATH} view -bS ${name}_mapped_and_unmapped.sam > ${name}_mapped_and_unmapped.bam
if [ $? -ne 0 ]
then
exit 1
fi
fi
printf "\n\n=====Filter required unmapped reads=====\n\n"
Step=$(grep "SAMFLAGS" ${CHECKPOINTPATH}/checkpoint.txt)
if [ "${Step}" != "SAMFLAGS" ]
then
${SAMTOOLSPATH} view -b -f 12 -F 256 ${name}_mapped_and_unmapped.bam > ${name}_bothEndsUnmapped.bam
if [ $? -ne 0 ]
then
exit 1
fi
fi
printf "\n\n=====Sort of the bam files=====\n\n"
Step=$(grep "BAMSORT" ${CHECKPOINTPATH}/checkpoint.txt)
if [ "${Step}" != "BAMSORT" ]
then
${SAMTOOLSPATH} sort -n ${name}_bothEndsUnmapped.bam -o ${name}_bothEndsUnmapped_sorted.bam
if [ $? -ne 0 ]
then
exit 1
fi
fi
printf "\n\n=====Convert bam files into fastq files=====\n\n"
Step=$(grep "BAMTOFASTQ" ${CHECKPOINTPATH}/checkpoint.txt)
if [ "${Step}" != "BAMTOFASTQ" ]
then
${BEDTOOLSPATH} bamtofastq -i ${name}_bothEndsUnmapped_sorted.bam -fq ${name}_phiX_removed_1.fastq -fq2 ${name}_phiX_removed_2.fastq
if [ $? -ne 0 ]
then
exit 1
fi
rm ${name}_mapped_and_unmapped.sam
rm ${name}_mapped_and_unmapped.bam
rm ${name}_bothEndsUnmapped.bam
rm ${name}_bothEndsUnmapped_sorted.bam
cd ..
fi
#Remove host sequence
printf "\n\n=====Host removal with Bowtie2=====\n\n"
Step=$(grep "BOWTIE2" ${CHECKPOINTPATH}/checkpoint.txt)
if [ "${Step}" != "BOWTIE2" ]
then
cd Host_removal
${BOWTIEPATH} -x ${HOSTDBPATH}/${DB} -1 ${name}_phiX_removed_1.fastq -2 ${name}_phiX_removed_2.fastq -S ${name}_mapped_and_unmapped.sam
if [ $? -ne 0 ]
then
exit 1
fi
rm ${name}_phiX_removed_1.fastq
rm ${name}_phiX_removed_2.fastq
fi
printf "\n\n=====Convert sam files into bam files=====\n\n"
Step=$(grep "SAMTOBAM" ${CHECKPOINTPATH}/checkpoint.txt)
if [ "${Step}" != "SAMTOBAM" ]
then
${SAMTOOLSPATH} view -bS ${name}_mapped_and_unmapped.sam > ${name}_mapped_and_unmapped.bam
if [ $? -ne 0 ]
then
exit 1
fi
fi
printf "\n\n=====Filter required unmapped reads=====\n\n"
Step=$(grep "SAMFLAGS" ${CHECKPOINTPATH}/checkpoint.txt)
if [ "${Step}" != "SAMFLAGS" ]
then
${SAMTOOLSPATH} view -b -f 12 -F 256 ${name}_mapped_and_unmapped.bam > ${name}_bothEndsUnmapped.bam
if [ $? -ne 0 ]
then
exit 1
fi
fi
printf "\n\n=====Sort of the bam files=====\n\n"
Step=$(grep "BAMSORT" ${CHECKPOINTPATH}/checkpoint.txt)
if [ "${Step}" != "BAMSORT" ]
then
${SAMTOOLSPATH} sort -n ${name}_bothEndsUnmapped.bam -o ${name}_bothEndsUnmapped_sorted.bam
if [ $? -ne 0 ]
then
exit 1
fi
fi
printf "\n\n=====Convert bam files into fastq files=====\n\n"
Step=$(grep "BAMTOFASTQ" ${CHECKPOINTPATH}/checkpoint.txt)
if [ "${Step}" != "BAMTOFASTQ" ]
then
${BEDTOOLSPATH} bamtofastq -i ${name}_bothEndsUnmapped_sorted.bam -fq ${name}_host_removed_1.fastq -fq2 ${name}_host_removed_2.fastq
if [ $? -ne 0 ]
then
exit 1
fi
rm ${name}_mapped_and_unmapped.sam
rm ${name}_mapped_and_unmapped.bam
rm ${name}_bothEndsUnmapped.bam
rm ${name}_bothEndsUnmapped_sorted.bam
cd ..
fi
#Filter reads of ungood quality
printf "\n\n=====Filtering with FaQCs=====\n\n"
Step=$(grep "FILTER" ${CHECKPOINTPATH}/checkpoint.txt)
if [ "${Step}" != "FILTER" ]
then
cd Quality_filter
${FAQCSPATH} -p ../Host_removal/${name}_host_removed_1.fastq ../Host_removal/${name}_host_removed_2.fastq -d ${name}
if [ $? -ne 0 ]
then
exit 1
fi
cd ..
fi
printf "\n\n=====De novo assembly with SPAdes=====\n\n"
Step=$(grep "DENOVO" ${CHECKPOINTPATH}/checkpoint.txt)
if [ "${Step}" != "DENOVO" ]
then
cd De_novo_Assembly
${SPADESPATH} --pe1-1 ../Quality_filter/${name}/QC.1.trimmed.fastq --pe1-2 ../Quality_filter/${name}/QC.2.trimmed.fastq -o ${name}
if [ $? -ne 0 ]
then
exit 1
fi
cd ${name}
cp contigs.fasta ../${name}.fas
cd ../..
fi
#BLAST search
#Blastn(somewhat similar)
printf "\n\n=====Blast Search with Blastn=====\n\n"
Step=$(grep "BLASTN" ${CHECKPOINTPATH}/checkpoint.txt)
if [ "${Step}" != "BLASTN" ]
then
cd Screening
printf " Contig No., Contig Len., Subject Accession, E-Value, Bitscore, Align Len., Subject Title, Status, Identity, Start, End\n" | tee -a "${name}_blastn.csv"
${BLASTNPATH} -query ../De_novo_Assembly/${name}.fas -task blastn -db ${BLASTDBPATH}/Virus -evalue 1.0e-5 -outfmt "10 qseqid qlen sacc evalue bitscore length stitle pident sstart send" -max_target_seqs 10 | tee -a "${name}_blastn.csv"
if [ $? -ne 0 ]
then
exit 1
fi
cd ..
fi
#Megablast(high similiar)
printf "\n\n=====Blast Search with Megablast (high similarity)=====\n\n"
Step=$(grep "MEGABLAST" ${CHECKPOINTPATH}/checkpoint.txt)
if [ "${Step}" != "MEGABLAST" ]
then
cd Screening
printf " Contig No., Contig Len., Subject Accession, E-Value, Bitscore, Align Len., Subject Title, Status, Identity, Start, End\n" | tee -a "${name}_megablast.csv"
${BLASTNPATH} -query ../De_novo_Assembly/${name}.fas -task megablast -db ${BLASTDBPATH}/Virus -evalue 1.0e-5 -outfmt "10 qseqid qlen sacc evalue bitscore length stitle pident sstart send" -max_target_seqs 10 | tee -a "${name}_megablast.csv"
if [ $? -ne 0 ]
then
exit 1
fi
cd ..
fi
#Bacteria screening with megablast
printf "\n\n=====Bacteria Blast Search with Megablast (high similarity)=====\n\n"
Step=$(grep "BACMEGABLAST" ${CHECKPOINTPATH}/checkpoint.txt)
if [ "${Step}" != "BACMEGABLAST" ]
then
cd Screening
printf " Contig No., Contig Len., Subject Accession, E-Value, Bitscore, Align Len., Subject Title, Status, Identity, Start, End\n" | tee -a "${name}_bacteria_megablast.csv"
${BLASTNPATH} -query ../De_novo_Assembly/${name}.fas -task megablast -db ${BACTERIADBPATH}/Bacteria -evalue 1.0e-5 -outfmt "10 qseqid qlen sacc evalue bitscore length stitle pident sstart send" -max_target_seqs 10 | tee -a "${name}_bacteria_megablast.csv"
if [ $? -ne 0 ]
then
exit 1
fi
cd ..
fi
printf "\n\n=====Blast Search with DCmegablast (considering discontinuity)=====\n\n"
#DCmegablast(ignore 3rd of triplet code)
Step=$(grep "DCMEGABLAST" ${CHECKPOINTPATH}/checkpoint.txt)
if [ "${Step}" != "DCMEGABLAST" ]
then
cd Screening
printf " Contig No., Contig Len., Subject Accession, E-Value, Bitscore, Align Len., Subject Title, Status, Identity, Start, End\n" | tee -a "${name}_dcmegablast.csv"
${BLASTNPATH} -query ../De_novo_Assembly/${name}.fas -task dc-megablast -db ${BLASTDBPATH}/Virus -evalue 1.0e-5 -outfmt "10 qseqid qlen sacc evalue bitscore length stitle pident sstart send" -max_target_seqs 10 | tee -a "${name}_dcmegablast.csv"
if [ $? -ne 0 ]
then
exit 1
fi
cd ..
fi
#Blastx(amino acid)
printf "\n\n=====Blast Search with Blastx (amino acid sequence)=====\n\n"
Step=$(grep "BLASTX" ${CHECKPOINTPATH}/checkpoint.txt)
if [ "${Step}" != "BLASTX" ]
then
cd Screening
printf " Contig No., Contig Len., Subject Accession, E-Value, Bitscore, Align Len., Subject Title, Status, Identity, Start, End\n" | tee -a "${name}_blastx.csv"
${TBLASTXPATH} -query ../De_novo_Assembly/${name}.fas -db ${BLASTDBPATH}/Virus -evalue 1.0e-5 -outfmt "10 qseqid qlen sacc evalue bitscore length stitle pident sstart send" -max_target_seqs 10 | tee -a "${name}_blastx.csv"
if [ $? -ne 0 ]
then
exit 1
fi
cd ..
fi
printf "\t*****End time: ${TIMESTAMP}*****\n"
| true |
2d44c0cffadb1ee8030fd1caac62f18ea62a0952
|
Shell
|
zarlo/gameservers
|
/scripts/old/build.sh.old
|
UTF-8
| 2,099 | 3.53125 | 4 |
[
"GPL-3.0-only",
"MIT"
] |
permissive
|
#!/bin/bash
shopt -s globstar
SPCOMP_PATH=$(realpath "tf/addons/sourcemod/scripting/spcomp64")
COMPILED_DIR=$(realpath 'tf/addons/sourcemod/plugins/')
SCRIPTS_DIR=$(realpath 'tf/addons/sourcemod/scripting/')
chmod 744 "$SPCOMP_PATH"
git diff --name-only HEAD "$1" | grep "\.sp$" > ./00
# ==========================
# Compile all scripts that don't have any smxes
# ==========================
echo "Seeking for .sp in $SCRIPTS_DIR/**/*"
for p in "$SCRIPTS_DIR"/**/*
do
if [ "${p##*.}" == 'sp' ]; then
if [[ $p =~ "stac/" ]] || [[ $p =~ "include/" ]] || [[ $p =~ "disabled/" ]] || [[ $p =~ "external/" ]] || [[ $p =~ "economy/" ]]; then
continue
fi
PLUGIN_NAME=$(realpath --relative-to "$SCRIPTS_DIR" "$p")
PLUGIN_NAME=${PLUGIN_NAME%.*}
PLUGIN_SCRIPT_PATH="$SCRIPTS_DIR/$PLUGIN_NAME.sp"
PLUGIN_COMPILED_PATH="$COMPILED_DIR/$(basename "$PLUGIN_NAME").smx"
if [[ ! -f "$PLUGIN_COMPILED_PATH" ]]; then
echo "$PLUGIN_SCRIPT_PATH" >> ./00
fi
fi
done
echo "[INFO] Full compile list:"
echo "========================="
cat ./00
echo "========================="
echo "[INFO] Starting processing of plugin files."
while read -r p; do
PLUGIN_NAME=$(realpath --relative-to "$SCRIPTS_DIR" "$p")
PLUGIN_NAME=${PLUGIN_NAME%.*}
PLUGIN_SCRIPT_PATH="$SCRIPTS_DIR/$PLUGIN_NAME.sp"
PLUGIN_COMPILED_PATH="$COMPILED_DIR/$(basename "$PLUGIN_NAME").smx"
if [[ ! -f "$PLUGIN_SCRIPT_PATH" ]]; then
if [[ -f "$PLUGIN_COMPILED_PATH" ]]; then
rm "$PLUGIN_COMPILED_PATH";
fi
fi
if [[ $p =~ "stac/" ]] || [[ $p =~ "include/" ]] || [[ $p =~ "disabled/" ]] || [[ $p =~ "external/" ]] || [[ $p =~ "economy/" ]] || [[ ! -f "$PLUGIN_SCRIPT_PATH" ]]; then
continue
fi
echo "$PLUGIN_SCRIPT_PATH";
if [[ -f "$PLUGIN_SCRIPT_PATH" ]]; then
$SPCOMP_PATH -D"$SCRIPTS_DIR" "$(realpath --relative-to "$SCRIPTS_DIR" "$PLUGIN_SCRIPT_PATH")" -o"$PLUGIN_COMPILED_PATH" -v0
fi
done < ./00
rm ./00
echo "[INFO] All plugin files are recompiled."
exit;
| true |
b47c767945e54bd9385316129c3ad262634e07ef
|
Shell
|
ignatka89/ci-mvp
|
/add_job.sh
|
UTF-8
| 589 | 2.609375 | 3 |
[] |
no_license
|
#!/bin/bash
curl -s -XPOST 'http://localhost:8080/createItem?name=seed_jobs_test' -u admin:admin --data-binary @config.xml -H "Content-Type:text/xml" > /dev/null
if [ $? -eq 0 ] ;
then echo 'job add in jenkins and create pipeline. Please wait 60s when all enviroment start'
sleep 60 && curl -X POST http://admin:admin@localhost:8080/job/seed_jobs_test/build
if [ $? -eq 0 ]
then echo 'job seeb will work after 4s'
else
curl -X POST http://admin:admin@localhost:8080/job/seed_jobs_test/build
fi
else
echo 'job do not add jenkins, starting jenkins conteiner'
fi
| true |
e16b435a1f96c20ae609c152a986665a17511614
|
Shell
|
PaulTrampert/ansible-docker-image
|
/entrypoint.sh
|
UTF-8
| 395 | 3.578125 | 4 |
[] |
no_license
|
#!/bin/sh
set -e
# Ensure user/group exist for ssh
export USER_ID=$(id -u)
export GROUP_ID=$(id -g)
envsubst < /passwd.template > /tmp/passwd
if [ "$1" = 'ansible-playbook' ]; then
if [ ! -z "$GIT_REPO" ]; then
rm -rf playbook
git clone $GIT_REPO playbook
fi
if [ -f playbook/requirements.yml ]; then
ansible-galaxy install -r playbook/requirements.yml -f
fi
fi
exec "$@"
| true |
c86f951b2661c1955aef4ba0b21dcab69b24aae4
|
Shell
|
alpha-li/netplugin
|
/scripts/netContain/ReleaseContainer.sh
|
UTF-8
| 2,109 | 4.1875 | 4 |
[
"Apache-2.0"
] |
permissive
|
contiv_version=""
docker_user="contiv"
docker_password=""
image_name="contiv/netplugin"
image_tag=""
function usage {
echo "Usage:"
echo "./ReleaseContainer.sh -v <contiv version> -u <docker user> -p <docker password> -i <image name> -t <image tag>"
echo "Example: ./ReleaseContainer.sh -v v0.1-11-30-2016.20-08-20.UTC -u contiv -i contiv/netplugin"
echo "Released versions are available from https://github.com/contiv/netplugin/releases"
echo "Default values are:"
echo "User:contiv, image contiv/netplugin and tag contiv version"
echo "Omit -p to provide password interactively"
exit 1
}
function error_ret {
echo ""
echo $1
exit 1
}
while getopts ":v:u:p:i:t:" opt; do
case $opt in
v)
contiv_version=$OPTARG
;;
u)
docker_user=$OPTARG
;;
p)
docker_password=$OPTARG
;;
i)
image_name=$OPTARG
;;
t)
image_tag=$OPTARG
;;
:)
echo "An argument required for $OPTARG was not passed"
usage
;;
?)
usage
;;
esac
done
if [ "$contiv_version" = "" ]; then
usage
fi
if [ "$image_tag" = "" ]; then
image_tag=$contiv_version
fi
echo "Login to docker hub as $docker_user user"
if [ "$docker_password" = "" ]; then
docker login -u $docker_user
else
docker login -u $docker_user -p $docker_password
fi
wget https://github.com/contiv/netplugin/releases/download/$contiv_version/netplugin-$contiv_version.tar.bz2
tar xvfj netplugin-$contiv_version.tar.bz2
if [ "$?" != "0" ]; then
error_ret "FAILED: Error getting contiv version $contiv_version"
fi
docker build . -t $image_name:$image_tag
if [ "$?" != "0" ]; then
error_ret "FAILED: Error building image for contiv version $contiv_version to $image_name:$image_tag"
fi
docker push $image_name:$image_tag
if [ "$?" = "0" ]; then
echo ""
echo "SUCCESS: Pushed contiv version $contiv_version to $image_name:$image_tag"
else
error_ret "FAILED: Error pushing contiv version $contiv_version to $image_name:$image_tag"
fi
| true |
e72ba8b1970d1167eb8bd6a285c84686f3d3488a
|
Shell
|
khobatha/bahmni-reports
|
/scripts/hostnameConfig.sh
|
UTF-8
| 226 | 2.875 | 3 |
[] |
no_license
|
#!/bin/bash
while read -r line
do
echo "$line" | grep 'uts' &> /dev/null
if [ $? == 0 ]; then
uts_id=`echo "$line" | awk -F" " '{print $4}'`
nsenter --target $uts_id --uts hostname $1
fi
done < <(lsns)
| true |
f9b613000882f1f8b0936d38707fb85c0405bfcd
|
Shell
|
Shourai/xidlehook
|
/publish.sh
|
UTF-8
| 997 | 3.46875 | 3 |
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
set -e
git reset
do_package() {
echo "Running tests..."
cargo check --manifest-path "$1/Cargo.toml"
cargo test --manifest-path "$1/Cargo.toml"
cargo check --all-features --manifest-path "$1/Cargo.toml"
cargo test --all-features --manifest-path "$1/Cargo.toml"
# If the lock file is changed, update that
git add "$1/Cargo.lock"
git commit --amend --no-edit
echo "Making sure packaging works..."
cargo publish --dry-run --manifest-path "$1"/Cargo.toml
git status
echo "Publishing $1!!! Press enter to continue."
read -r
cargo publish --manifest-path "$1"/Cargo.toml
}
mv Cargo.toml Cargo.toml.bak
cleanup() {
mv Cargo.toml.bak Cargo.toml
}
trap cleanup SIGINT
do_package xidlehook-core
echo "Waiting for crates.io to update"
sleep 5
do_package xidlehook-daemon
echo "Now updating root lock file"
rm Cargo.lock
cargo check
git add Cargo.lock
git commit --amend --no-edit
echo "Now make a tag! Yay!"
cleanup
| true |
c7d12d157e20bdc94e9e6f63c3312f472c8f0e45
|
Shell
|
weejulius/.donotrepeatyourself
|
/install.sh
|
UTF-8
| 291 | 2.984375 | 3 |
[] |
no_license
|
#!/bin/bash
# http://www.gnu.org/software/bash/manual/bash.html
# 1. ln all the commands to $home/bin
ln_commands(){
for f in $(find commands/ali-work -name "*.sh" -type f); do
chmod u+x $f
echo $f
source $f
done
source init.sh
}
#ln_commands
source init.sh
| true |
07c8cd6beb98e36a6db3b08e3a05bdc6d7b6cdc1
|
Shell
|
chaitanyasrinivasan/motif_discovery
|
/scripts/find_motif.sh
|
UTF-8
| 8,640 | 4.03125 | 4 |
[
"MIT"
] |
permissive
|
#!/bin/bash
##### Chaitanya Srinivasan #####
##### Parallel de novo regulatory motif discovery tool #####
helpFunction()
{
echo -e "Usage: $0 -i [/path/to/data] -w [motif size] -t [BED/FASTA/GENES]\n"
echo -e "Required arguments:"
echo -e "\t-i, --input\tFile path to the sequence, genomic coordinates, or genes list data."
echo -e "Optional arguments:"
echo -e "\t-w, --width\tPositive integer of motif width"
echo -e "\t-s, --sequential\tRun sequentially"
echo -e "\t-p, --parallel\tRun in parallel"
echo -e "\t-h, --help\n"
echo "Example run calls below:"
echo ""
echo "$0 -i myfasta.fa -w 10 -p"
echo "$0 -i mybed.bed -s"
echo "$0 -i mygenes.txt"
exit 1 # Exit script after printing help
}
# check for args
if [ $# -eq 0 ]; then
helpFunction
exit 1
fi
# default glob vars
PARALLEL=0
SEQUENTIAL=0
# read in command line arguments
while [ "$1" != "" ]; do
case $1 in
-i | --input ) shift
INPUT=$1
;;
-w | --width ) shift
WIDTH=$1
;;
-s | --sequential ) shift
SEQUENTIAL=1
;;
-p | --parallel ) shift
PARALLEL=1
;;
-h | --help ) helpFunction
exit 1
;;
*)
helpFunction
exit 1
esac
shift
done
############## INPUT COMPATABILITY CHECKS ################
# check if input file exists
if [ $(ls ${INPUT}| wc -l) -eq 0 ]
then
echo "Error: input ${INPUT} does not exist"
exit 1
fi
# check type is compatabile
if [[ ${INPUT: -4} != ".bed" && ${INPUT: -3} != ".fa" && ${INPUT: -4} != ".txt" ]]
then
echo "Error: The file must have extension .fa, .bed, or .txt"
helpFunction
exit 1
fi
scanSeqs() {
# check if more than 1 sequence
if [ $(wc -l <${INPUT}) -lt 2 ]
then
echo "Error: input fasta ${INPUT} needs at least 2 sequences"
exit 1
fi
# check input width
if (($WIDTH < 1))
then
echo "Error: width must be an integer greater than 0"
exit 1
fi
# check width is not greater than a sequence length
while IFS= read -r line;
do
if ((${#line} < $WIDTH))
then
echo "Error: width ${WIDTH} is greater than the length of a sequence"
exit 1
fi;
done < ${INPUT}
}
#Remove sequences from FASTA that are not compatabile
preProcessing() {
echo "Preprocessing fasta..."
awk 'NR % 2 == 0 {print}' $INPUT > "${INPUT::-3}_seqs.txt"
grep -vE "(X)" "${INPUT::-3}_seqs.txt" | grep -vE "(N)" | grep -vE "(n)" | tr '[:upper:]' '[:lower:]' > "${INPUT::-3}_filtered.txt"
rm "${INPUT::-3}_seqs.txt"
mv "${INPUT::-3}_filtered.txt" "${INPUT::-3}_seqs.txt"
INPUT="${INPUT::-3}_seqs.txt"
}
############## WIDTH INFERENCE ######################
alignSeqs() {
# run width inference if width not provided
if [ -z "${WIDTH}" ];
then
echo "Inferring width from sequence alignment..."
WIDTH=`python run_align.py -i ${INPUT}`
echo "Setting motif width as ${WIDTH}..."
fi
}
############## PARALLEL ALGORITHM ################
parallelRun()
{
mkdir -p "${INPUT::-4}_splits"
#SHUFFLE
echo "Splitting data..."
#Randomize sequence order in case nearby sequences are similar
shuf ${INPUT} > "${INPUT::-4}_shuffled.txt"
#PARTITION
lines_per_file=5
split -d --lines=${lines_per_file} --additional-suffix=.txt "${INPUT::-4}_shuffled.txt" "${INPUT::-4}_splits"/split
#If there is a split file with length 1, add it to the first file
last_file=$(ls -1 "${INPUT::-4}_splits"/split* | tail -n 1)
first_file=$(ls -1 "${INPUT::-4}_splits"/split* | head -n 1)
if [ $(wc -l <$last_file) -eq 1 ]
then
cat $last_file >> $first_file
rm $last_file
fi;
#CREATE JOB ARRAY OF PARTITIONS
echo "Creating job array..."
ls -d "${INPUT::-4}_splits"/*.txt >> jobs.txt
#SUBMIT JOBS
echo "Submitting jobs..."
NUM_JOBS=$(wc -l <jobs.txt)
sed "s/REPLACE/${NUM_JOBS}/g" template.sb > jobs.sb
# clear old error logs
if [ $(grep err logs/* | wc -l) -gt 0 ]
then
rm logs/*err*
fi
sbatch --wait jobs.sb ${WIDTH}
# check that all jobs completed
for file in logs/*err*;
do
if [ $(wc -l <$file) -gt 0 ]
then
echo "Error : Job ${file::-8} did not complete. Error logs written to ${file}"
exit 1
fi
done
rm logs/*err*
echo "All jobs ended, logs written to logs/"
#MERGE OUTPUTS AND RUN SEQUENTIAL GIBBS
echo "Running merge..."
total_lines=$(wc -l <${INPUT})
python merge.py -j jobs.txt -f ${INPUT} -k ${total_lines} -w ${WIDTH}
# program compelete, clean-up
rm jobs.txt jobs.sb "${INPUT::-4}_shuffled.txt"
rm -r "${INPUT::-4}_splits"
}
############### DETERMINE MODE ##########################
startAnalysis() {
GRAN=20 #empirically determined, see motif_discovery/performance
# automatically infer if parallel or sequential is faster
if [ $SEQUENTIAL -eq $PARALLEL ]
then
if [ $(wc -l <${INPUT}) -lt $GRAN ]
then
python run_gibbs.py -i $INPUT -w $WIDTH -m conquer
exit 0
else
# run parallel if slurm can be used
if [ -x "$(command -v sbatch)" ]
then
parallelRun
exit 0
else
python run_gibbs.py -i $INPUT -w $WIDTH -m conquer
exit 0
fi
fi
fi
# sequential run
if (( SEQUENTIAL ))
then
python run_gibbs.py -i $INPUT -w $WIDTH -m conquer
exit 0
fi
# parallel run
if (( PARALLEL ))
then
if ! [ -x "$(command -v sbatch)" ]
then
echo "Error : parallel job submission requires Slurm, use -s instead of -p"
exit 1
else
parallelRun
exit 0
fi
fi
}
############# BED TO FASTA ####################################
bedToFasta() {
if [ -x "$(command -v bedtools)" ]
then
#Check BED is correctly formatted using bedtools quick command
echo "Merging bed coordinates..."
sort -k 1,1 -k 2,2n ${INPUT} | bedtools merge -i stdin > merged.bed
#Download hg38 fasta
if [ ! -f hg38.fa ]
then
wget -nc ftp://hgdownload.cse.ucsc.edu/goldenPath/hg38/bigZips/hg38.fa.gz
gunzip hg38.fa.gz
fi
bedtools getfasta -fi hg38.fa -bed merged.bed > "${INPUT::-4}.fa"
#Clean up
rm merged.bed
gzip hg38.fa
INPUT="${INPUT::-4}.fa"
else
echo "Error: bedtools is not installed or is not executable from your path."
exit 0
fi
}
################### GENES TO BED #############################
genesToBed() {
if [ -x "$(command -v bedtools)" ]
then
#Download GENCODE v33 and hg38 chrom sizes
if [ ! -f gencode.v33.annotation.gff3 ]
then
wget -nc ftp://ftp.ebi.ac.uk/pub/databases/gencode/Gencode_human/release_33/gencode.v33.annotation.gff3.gz
gunzip gencode.v33.annotation.gff3.gz
fi
wget -nc http://hgdownload.soe.ucsc.edu/goldenPath/hg38/bigZips/hg38.chrom.sizes
#GET GENE AND EXON COORDINATES
echo "Mapping gene names to gene and exon coordinates..."
# returns gene and exon coordinates (2 files) for each gene in the list
python ../scripts/get_gene_and_exon_coordinates.py $INPUT
GENEBED="${INPUT::-4}_genes.bed"
EXONBED="${INPUT::-4}_exons.bed"
#MERGE, SUBTRACT EXONS FROM GENE, AND FLANK GENE 20KB TO CAPTURE REGULATORY ACTIVITY
echo "Merging exons..."
sort -k1,1 -k2,2n $EXONBED | bedtools merge -i stdin > "${EXONBED::-4}_merged.bed"
EXONMERGEDBED="${EXONBED::-4}_merged.bed"
echo "Subtracting merged exons from gene and getting gene flanks..."
sort -k 1,1 -k 2,2n $GENEBED > "${GENEBED::-4}_sorted.bed"
GENESORTEDBED="${GENEBED::-4}_sorted.bed"
bedtools subtract -a $GENESORTEDBED -b $EXONMERGEDBED > "${INPUT::-4}_introns.bed"
INTRONBED="${INPUT::-4}_introns.bed"
bedtools flank -i $GENESORTEDBED -g hg38.chrom.sizes -b 20000 | bedtools subtract -a stdin -b $GENESORTEDBED > "${GENEBED::-4}_20KBflank.bed"
GENEFLANKBED="${GENEBED::-4}_20KBflank.bed"
#ADD GENE FLANKS TO INTRONS
cat $GENEFLANKBED $INTRONBED | sort -k 1,1 -k 2,2n | bedtools merge -i stdin > "${INTRONBED::-4}_and_intergenics.bed"
mv "${INPUT::-4}_introns_and_intergenics.bed" "${INPUT::-4}.bed"
# set glob var to new format
INPUT="${INPUT::-4}.bed"
#CLEANUP AND COMPLETE
gzip ../scripts/gencode.v33.annotation.gff3
rm ${GENEFLANKBED} ${INTRONBED} ${GENESORTEDBED} ${EXONMERGEDBED} ${EXONBED} ${GENEBED}
echo "Done mapping genes to BED."
else
echo "Error: bedtools is not installed or is not executable from your path."
exit 0
fi
}
################### MAIN ####################################
#FASTA
if [[ ${INPUT: -3} = ".fa" ]]
then
# preprocess sequences
preProcessing
# infer width if not provided
alignSeqs
# quality check
scanSeqs
# run motif discovery
startAnalysis
fi
#BED
if [ ${INPUT: -4} = ".bed" ]
then
# convert BED to FASTA
bedToFasta
# same as FASTA from here
preProcessing
alignSeqs
scanSeqs
startAnalysis
fi
#GENES
if [ ${INPUT: -4} = ".txt" ]
then
#Map genes to regulatory coordinates in hg38
genesToBed
# same as for BED from here
bedToFasta
preProcessing
alignSeqs
scanSeqs
startAnalysis
fi
| true |
8ee14cc685180eca930e5f6c8fdfdc8c04d0bb4c
|
Shell
|
OEHU/data-marketplace
|
/scripts/install.sh
|
UTF-8
| 189 | 2.984375 | 3 |
[
"Apache-2.0"
] |
permissive
|
#/usr/bin/env/sh
set -e
components="server client"
for component in $components
do
printf "\n\nInstalling dependencies: $component\n"
cd $component
npm install
cd ..
done
| true |
4e9a4a03bb062a4ad9975d3195e84d6510e878dd
|
Shell
|
joaofnds/dotfiles
|
/dot_scripts/executable_switch-theme
|
UTF-8
| 747 | 3.28125 | 3 |
[] |
no_license
|
#!/usr/bin/env bash
set -euo pipefail
export PATH="/usr/local/bin:$PATH"
export PATH="$HOME/.bin:$PATH"
declare theme
declare other_theme
is_dark=$(osascript -l JavaScript -e "Application('System Events').appearancePreferences.darkMode.get()")
if [[ "$is_dark" == "true" ]]; then
theme="dark"
other_theme="light"
else
theme="light"
other_theme="dark"
fi;
# update alacritty
sed -i "" "s/colors: \*${other_theme}/colors: *${theme}/" ~/.config/alacritty/alacritty.yml &
# update tmux
sed -i "" "s/${other_theme}.conf/${theme}.conf/" ~/.config/tmux/tmux.conf &&
tmux source-file ~/.config/tmux/tmux.conf &
# update vim
pgrep vim | xargs -n1 kill -SIGUSR1 &
# update emacs
pgrep Emacs-x86_64-10_14 | xargs -n1 kill -SIGUSR1 &
| true |
72c6094679c02aeafee3f81c7103703c720302cc
|
Shell
|
Shubham-Kadam-17/MyRepository
|
/EmpWage.sh
|
UTF-8
| 855 | 3.34375 | 3 |
[] |
no_license
|
#!/bin/bash -x
isFullTime=2
isPartTime=1
absent=0
empRatePerHr=20;
workingday=0
workinghr=0
max_working_day=20
max_working_hr=50
da_counter=1
function cal_work_hr () {
case $1 in
$isFullTime )
empHrs=8
;;
$isPartTime )
empHrs=4
;;
*)
empHrs=0
;;
esac
echo $empHrs
}
while [ $workingday -le $max_working_day ] && [ $workinghr -le $max_working_hr ]
do
random=$((RANDOM%3))
empHrs="$( cal_work_hr $random )"
workinghr=$((workinghr + empHrs))
salary=$((salary + empHrs*empRatePerHr))
daily_wage[ ((day_counter++))]=$salary
if [ $random -ne 0 ]
then
((workingday++))
fi
done
echo $salary
echo ${daily_wage[@]}
echo ${!daily_wage[@]}
| true |
03efade6f86158f6847e7065b2307091c1d190bb
|
Shell
|
delkyd/alfheim_linux-PKGBUILDS
|
/labjack-exodriver-git/labjack-exodriver.install
|
UTF-8
| 396 | 3.046875 | 3 |
[
"MIT"
] |
permissive
|
post_install() {
getent group labjack >/dev/null || usr/sbin/groupadd -g 237 labjack
udevadm control --reload-rules 2>/dev/null
cat << EOF
===> Add users that require access to labjack to the "labjack" group
EOF
}
post_upgrade() {
post_install $1
}
post_remove() {
groupdel labjack >/dev/null 2>&1 || true
udevadm control --reload-rules 2>/dev/null
}
# vim:set ts=2 sw=2 ft=sh et:
| true |
028f5335147ee816fab531e83626df104cd838f2
|
Shell
|
gch1p/captive-netns-helper
|
/captive-portal.sh
|
UTF-8
| 755 | 3.625 | 4 |
[
"MIT"
] |
permissive
|
#!/bin/bash
if [ $EUID -eq 0 ]; then
echo "error: this script should not be launched as root"
exit 1
fi
if [ $# -eq 0 ]; then
echo "error: no command specified"
exit 1
fi
export $(dhcpcd -U $IFACE)
if [ -z "$domain_name_servers" ]; then
echo "error: \$domain_name_servers variable not found"
exit 1
fi
IFACE=wlp3s0
ENV=
for var in DISPLAY HOME PWD EDITOR USER XAUTHORITY LANG DBUS_SESSION_BUS_ADDRESS; do
value="${!var}"
if [ ! -z "$value" ]; then
ENV="$ENV --env $var=$value"
fi
done
_doas="doas"
if ! command -v doas &>/dev/null; then
_doas="sudo"
fi
$_doas captive-netns-helper \
--nameserver $domain_name_servers \
--ns-file /run/netns/captive \
--uid $(id -u) --gid $(id -g) $ENV "$@"
| true |
6ec8265ed6671792d59fa8e8e5fa0ba356df84a5
|
Shell
|
fairwood136/traefik
|
/.semaphoreci/setup.sh
|
UTF-8
| 1,322 | 2.734375 | 3 |
[
"CC-BY-3.0",
"MIT"
] |
permissive
|
#!/usr/bin/env bash
set -e
for s in apache2 cassandra elasticsearch memcached mysql mongod postgresql sphinxsearch rethinkdb rabbitmq-server redis-server; do sudo service $s stop; done
sudo swapoff -a
sudo dd if=/dev/zero of=/swapfile bs=1M count=3072
sudo mkswap /swapfile
sudo swapon /swapfile
sudo rm -rf /home/runner/.rbenv
#export DOCKER_VERSION=18.06.3
source .semaphoreci/vars
if [ -z "${PULL_REQUEST_NUMBER}" ]; then SHOULD_TEST="-*-"; else TEMP_STORAGE=$(curl --silent https://patch-diff.githubusercontent.com/raw/containous/traefik/pull/${PULL_REQUEST_NUMBER}.diff | patch --dry-run -p1 -R || true); fi
echo ${SHOULD_TEST}
if [ -n "$TEMP_STORAGE" ]; then SHOULD_TEST=$(echo "$TEMP_STORAGE" | grep -Ev '(.md|.yaml|.yml)' || :); fi
echo ${TEMP_STORAGE}
echo ${SHOULD_TEST}
#if [ -n "$SHOULD_TEST" ]; then sudo -E apt-get -yq update; fi
#if [ -n "$SHOULD_TEST" ]; then sudo -E apt-get -yq --no-install-suggests --no-install-recommends --force-yes install docker-ce=${DOCKER_VERSION}*; fi
if [ -n "$SHOULD_TEST" ]; then docker version; fi
if [ -f "./.semaphoreci/golang.sh" ]; then ./.semaphoreci/golang.sh; fi
if [ -f "./.semaphoreci/golang.sh" ]; then export GOROOT="/usr/local/golang/1.12/go"; fi
if [ -f "./.semaphoreci/golang.sh" ]; then export GOTOOLDIR="/usr/local/golang/1.12/go/pkg/tool/linux_amd64"; fi
| true |
7345ac14b4506595fcdc28f341b20aaf38c76c01
|
Shell
|
valentineus/auth-http
|
/build.sh
|
UTF-8
| 511 | 2.875 | 3 |
[
"MIT"
] |
permissive
|
#!/bin/sh
# Author: Valentin Popov
# Email: info@valentineus.link
# Date: 2017-08-14
# Usage: /bin/sh build.sh
# Description: Build the final package for installation in Moodle.
# Updating the Environment
PATH="/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
export PATH="$PATH:/usr/local/scripts"
# Build the package
cd ..
zip -9 -r auth-http.zip auth-http \
-x "auth-http/.git*" \
-x "auth-http/.travis.yml" \
-x "auth-http/build.sh"
# End of work
exit 0
| true |
c0c227f06c30ff9bbb7aee02a6c5f12557d654a8
|
Shell
|
yeoman-projects/generator-jhipster-liquibase
|
/test-integration/scripts/12.2-run-incremental.sh
|
UTF-8
| 948 | 3.140625 | 3 |
[
"MIT"
] |
permissive
|
#!/bin/bash
set -e
source $(dirname $0)/00-init-env.sh
cd "$JHI_FOLDER_APP"
npm uninstall generator-jhipster generator-jhipster-liquibase
if [[ "$JHI_LIQUIBASE" == "jdl" ]]; then
#-------------------------------------------------------------------------------
# Generate with JDL
#-------------------------------------------------------------------------------
cp -f "$JHI_SAMPLES"/"$JHI_APP"-update/*.jdl "$JHI_FOLDER_APP"/
jhipster import-jdl *.jdl --no-insight --force --skip-install --blueprints liquibase
else
cp -f "$JHI_SAMPLES"/"$JHI_APP"/liquibase.json "$JHI_FOLDER_APP"/
jhipster liquibase --apply liquibase.json --no-insight --force --skip-install
fi
#-------------------------------------------------------------------------------
# Check folder where the app is generated
#-------------------------------------------------------------------------------
ls -al "$JHI_FOLDER_APP"
git add -N .
git diff -p
| true |
7c54bbb51186b00b663a40e01ff150c91a015676
|
Shell
|
cha63506/dots
|
/dunwall/bin/bar.bak/status.sh
|
UTF-8
| 5,936 | 3.734375 | 4 |
[] |
no_license
|
#!/bin/sh
source $(dirname $0)/config.sh
OUT=$default_output
function fg() {
case $1 in
bar) echo "\f$2" ;;
tmux) echo "#[fg=${colors[$2]}]" ;;
none) echo "" ;;
?) echo "\f$2" ;;
esac
}
function bg() {
case $1 in
bar) echo "\b$2" ;;
tmux) echo "#[bg=$2]" ;;
none) echo "" ;;
?) echo "\b$2" ;;
esac
}
# print formatted output. need 2 params: display <value> <icon>
function display () {
if [ -n "$1" ]; then
echo -n "$(fg $OUT ${hl})"
echo -en "$2 "
echo "$(fg $OUT ${fg})$1"
fi
}
function workspaces () {
dskp_num=$(xprop -root _NET_NUMBER_OF_DESKTOPS | cut -d ' ' -f3)
dskp_cur=$(xprop -root _NET_CURRENT_DESKTOP | cut -d ' ' -f3)
buffer=""
for w in $(seq 1 $(($dskp_num-1))); do
if [ "$w" -eq "$dskp_cur" ]; then
buffer="$buffer$(echo -e '\ue190')"
#buffer="$buffer\u${fg} ${dskp_tag[$w]} \u${bg}"
else
buffer="$buffer$(fg $OUT ${hl})$(echo -e '\ue190')$(fg $OUT ${fg})"
#buffer="$buffer ${dskp_tag[$w]} "
fi
done
echo -n "${buffer}"
}
function ratgrp() {
dskp_tag=('' 'MEH' 'WEB' 'DEV')
dskp_cur=$(ratpoison -c 'groups' | cut -sd'*' -f1)
dskp_num=$(ratpoison -c 'groups'| wc -l)
val=""
for w in $(seq 1 $dskp_num); do
if [ "$w" -eq "$dskp_cur" ]; then
val="$val\u${fg} ${dskp_tag[$w]} \u${bg}"
else
val="$val ${dskp_tag[$w]} "
fi
done
echo -n "${val}"
}
function groups() {
if [ "$(xprop -root _NET_WM_NAME|cut -d\" -f2)" = "ratpoison" ]; then
echo "$(ratgrp)"
else
echo "$(workspaces)"
fi
}
function mails () {
new=$(~/bin/mcount ~/var/mail/INBOX/new/)
#cur=$(~/bin/mcount ~/var/mail/INBOX/cur/)
#val="$new/$cur"
val="$new"
ico=${i_mail}
display "$val" "$ico"
}
function mpd_now_playing () {
val=$(mpc current --format "$mpc_format" 2>/dev/null)
ico=${i_musk}
if [[ -z $val ]]; then
val=''
ico=''
fi
display "$val" "$ico"
}
function volume () {
val=$(amixer sget $alsa_channel | sed -n 's/.*\[\([0-9/]*%\)\].*/\1/p' | uniq)
ico=${i_alsa}
display "$val" "$ico"
}
function battery () {
val=$(acpi -b | sed 's/^.* \([0-9]*%\).*$/\1/')
ico=${i_batt}
display "$val" "$ico"
}
function packages () {
val=$"$(pacman -Q| wc -l) pkg"
ico=${i_pkgs}
display "$val" "$ico"
}
function memory () {
mem_tot=$(free -m| sed -n 2p| awk '{print $2}')
mem_use=$(free -m| sed -n 3p| awk '{print $3}')
val="$(echo "$mem_use*100/$mem_tot" | bc)%"
ico=${i_memy}
display "$val" "$ico"
}
function gputemp () {
val="$(nvidia-smi -q -d TEMPERATURE | grep Gpu | sed 's/.*: //')"
ico=${i_grap}
display "$val" "$ico"
}
function gpufanspeed () {
val="$(nvidia-smi -q | grep "Fan" | sed 's/.*: \([0-9]*\).*$/\1/')%"
ico=${i_fans}
display "$val" "$ico"
}
function processes () {
val="$(iostat -c | sed -n "4p" | awk -F " " '{print $1}')%"
ico=${i_load}
display "$val" "$ico"
}
function network () {
interface_up=$(ip link | grep 'state UP' | wc -l)
if [ ${interface_up} -gt 1 ]; then
val="multi connection"
ico=${i_netw}
else
net_interface=$(ip link| grep 'state UP'|
sed 's/[0-9]: \([^:]*\):.*$/\1/')
if [ "$net_interface" = "$net_wire" ]; then
val=$(ip addr show $net_interface| grep 'inet '|
sed 's#.* \(.*\)/.*$#\1#')
ico=${i_netw}
elif [ "$net_interface" = "$net_wifi" ]; then
val=$(ip addr show $net_interface| grep 'inet '|
sed 's#.* \(.*\)/.*$#\1#')
ico=${i_wifi}
else
val=""
ico=${i_netw}
fi
fi
[[ -z "$val" ]] && val="disconnected"
display "$val" "$ico"
}
function clock () {
val=$(date +${date_format})
ico=${i_time}
display "$val" "$ico"
}
function fillbar () {
while getopts "B:F:H:LCRO:s:bcflmnprtvw" opt; do
case $opt in
# Specific options for bar-aint-recursive
B) bg=$OPTARG ;; # background color
F) fg=$OPTARG ;; # foreground color
H) hl=$OPTARG ;; # highlights color
L) buffer="${buffer}\l " ;; # left justify
C) buffer="${buffer}\c " ;; # center text
R) buffer="${buffer}\r " ;; # right justify
# Which program is the output intended for ? (bar|tmux|none)
O) OUT=$OPTARG ;;
# Content of the output
b) [[ -n "$(battery)" ]] && buffer="${buffer}$(battery) ${sp}" ;;
c) [[ -n "$(clock)" ]] && buffer="${buffer}$(clock) ${sp}" ;;
f) [[ -n "$(gpufanspeed)" ]] && buffer="${buffer}$(gpufanspeed) ${sp}" ;;
l) [[ -n "$(mpd_now_playing)" ]] && buffer="${buffer}$(mpd_now_playing) ${sp}" ;;
m) [[ -n "$(mails)" ]] && buffer="${buffer}$(mails) ${sp}" ;;
n) [[ -n "$(network)" ]] && buffer="${buffer}$(network) ${sp}" ;;
p) [[ -n "$(processes)" ]] && buffer="${buffer}$(processes) ${sp}" ;;
r) [[ -n "$(memory)" ]] && buffer="${buffer}$(memory) ${sp}" ;;
t) [[ -n "$(gputemp)" ]] && buffer="${buffer}$(gputemp) ${sp}" ;;
v) [[ -n "$(volume)" ]] && buffer="${buffer}$(volume) ${sp}" ;;
w) [[ -n "$(groups)" ]] && buffer="${buffer}$(groups) ${sp}" ;;
esac
done
# Set the default fg/bg and remove trailing separator (if any)
echo "$(bg $OUT ${bg})$(fg $OUT ${fg}) $buffer " | sed "s/${sp}$//"
}
fillbar $@
| true |
2962879ed4364fd23a7459694f9e62aff0e1527a
|
Shell
|
sspross/postgres-hstore-postgis
|
/create_extensions.sh
|
UTF-8
| 473 | 2.890625 | 3 |
[] |
no_license
|
# Because both template1 and the user postgres database have already been created,
# we need to create the extensions in template1 and then recreate the postgres database.
#
# Running CREATE EXTENSION in both template1 and postgres can lead to
# the extensions having different eid's.
gosu postgres postgres --single template1 -E <<-EOSQL
CREATE EXTENSION hstore;
CREATE EXTENSION postgis;
DROP DATABASE postgres;
CREATE DATABASE postgres TEMPLATE template1;
EOSQL
| true |
3eb7d947a893ff14c555f0b3e6e27de7e513db36
|
Shell
|
SiChiTong/pose_tracker_stack
|
/pose_instance_builder/scripts/rosdep.sh
|
UTF-8
| 280 | 2.5625 | 3 |
[] |
no_license
|
#!/bin/sh
# External, ROS and system package dependencies
PACKAGES="python-pip
python-tables"
PIP_PACKAGES="numpy
pandas
more-itertools
toolz
mock"
sudo apt-get install $PACKAGES
sudo pip install $PIP_PACKAGES
rm -rf build
| true |
a05571567d22de1a33111aa6c276fc6be0d43c48
|
Shell
|
petronny/aur3-mirror
|
/elmerpost/PKGBUILD
|
UTF-8
| 1,253 | 2.546875 | 3 |
[] |
no_license
|
# Contributor: Christoph Siegenthaler < csi@gmx.ch >
pkgname=elmerpost
pkgver=5.2.0
pkgrel=4
pkgdesc="Elmer is a simulation tool for CFD, FEM, electromagnetics, heat transfer and others featuring a PDE solver"
depends=('elmer_fem')
makedepends=('gcc-fortran')
url="http://www.csc.fi/elmer/"
source=(ftp://ftp.funet.fi/pub/sci/physics/elmer/src/${pkgname}-${pkgver}.tar.gz elmerpost \
copy.patch sico2elmer queryglxext)
md5sums=('b9478e412e62a28196760624b97bfed3' '1467e8b4b5e3ab494dac16e4b400f6d4' \
'f65422b491c009e2dd0b988082288dd0' '290912461c09fa297b58de39ddb0c246' \
'fdae7f5b7890b45424ee5963607882e5')
build(){
mkdir -p $startdir/pkg/opt/elmer/bin
mkdir -p $startdir/pkg/opt/elmer/share/elmerpost/{tcl,modules,lib,help}
mkdir -p $startdir/pkg/usr/bin
cd $startdir/src/${pkgname}-${pkgver}
patch -p1 -i ../copy.patch
export FC=gfortran
export F77=gfortran
sed -i "s#STARTDIR#$startdir#" $startdir/src/${pkgname}-${pkgver}/src/Makefile.in
./configure --prefix=/opt/elmer || return 1
make || return 1
make DESTDIR=$startdir/pkg install || return 1
install -c -m 755 $startdir/src/elmerpost $startdir/pkg/usr/bin
install -c -m 755 $startdir/src/sico2elmer $startdir/pkg/usr/bin
install -c -m 755 $startdir/src/queryglxext $startdir/pkg/usr/bin
}
| true |
ece1da58872dec331b5198c89b69dc1d2d8dfc2b
|
Shell
|
code4aichi/Bproposer
|
/docs/scripts/gendbsets.sh
|
UTF-8
| 733 | 3.0625 | 3 |
[
"MIT"
] |
permissive
|
#!/bin/bash
DBMAN_ID=kenken9
DBMAN_KEY=8b58S3jGp3Tp
DB_NAME=aichifoods
TAB_NAME=foods
echo MySQL initialization for any DBs...
mysqladm -u root password 'kenken3922' &&
echo done.
echo DB user initialization for db "$DB_NAME"...
mysql -u root -p "kenken3922" -e "CREATE USER ${DBMAN_ID}@localhost IDENTIFIED BY '${DBMAN_KEY}';" &&
mysql -u root -p "kenken3922" -e "GRANT ALL ON ${DB_NAME}.* TO ${DBMAN_ID}@localhost;" &&
echo done.
echo Table "$TAB_NAME" creation...
mysql -u ${DBMAN_ID} -p "${DBMAN_KEY}" -e "CREATE DATABASE ${DB_NAME} CHARSET utf8mb4;" &&
mysql -u ${DBMAN_ID} -p "${DBMAN_KEY}" ${DB_NAME} -e "CREATE TABLE ${TAB_NAME} ( id INT auto_increment, regionname VARCHAR(30), foodname VARCHAR(100), index(id));" &&
echo done.
| true |
9e657eca723a534bbe73193ae4d8e362367a3072
|
Shell
|
djmulcahy/docker_dss
|
/testall.sh
|
UTF-8
| 2,444 | 4.5 | 4 |
[
"BSD-2-Clause"
] |
permissive
|
#!/bin/bash
# This program builds and tests all of the variations of the
# beroset/opendss software container and then tests them all.
# The output should look something like this:
#
# Build time for alpine: 225.998
# Build time for arch: 174.521
# Build time for centos: 359.120
# Build time for debian: 12.693
# Build time for fedora: 289.985
# Build time for opensuse: 232.167
# Build time for ubuntu: 8.336
# [OK] alpine
# [OK] arch
# [OK] centos
# [OK] debian
# [OK] fedora
# [OK] opensuse
# [OK] ubuntu
#
CONTAINER_ENGINE="podman"
DISTROLIST="alpine arch centos debian fedora opensuse ubuntu"
OK="\e[0;32mOK\e[m"
BAD="\e[0;31mFailed!\e[m"
build_one() (
local distro="${1}"
TIMEFORMAT='%R'
printf $"Build time for %s: " "${distro}"
$(time { ${CONTAINER_ENGINE} build -f=work/Dockerfile.$distro -t beroset/opendss/$distro work >/dev/null 2>&1; } 2>&1 )
)
test_one() (
local distro="${1}"
local SHARED_DIR="$(pwd)/shared/"
if [ ! -d "${SHARED_DIR}" ] ; then
mkdir "${SHARED_DIR}"
else
rm -f "${SHARED_DIR}"*
fi
cp StevensonPflow-3ph.dss "${SHARED_DIR}"
${CONTAINER_ENGINE} run --rm -v "${SHARED_DIR}":/mnt/host:z "beroset/opendss/${distro}" "/mnt/host/StevensonPflow-3ph.dss" 1>/dev/null 2>&1
if sha512sum -c checksums --status ; then
echo -e "[${OK}] ${distro}"
else
echo -e "[${BAD}] ${distro}"
fi
)
while test $# -gt 0; do
case "$1" in
--docker)
echo 'Testing using Docker as the container engine'
CONTAINER_ENGINE="docker"
shift
;;
--podman)
echo 'Testing using Podman as the container engine'
CONTAINER_ENGINE="podman"
shift
;;
--h | --he | --hel | --help)
echo $"Usage: testall.sh [OPTION]
--help print this help and exit
--podman use Podman as the container engine
--docker use Docker as the container engine
"
exit 0
;;
--) # stop option processing
shift; break
;;
-*)
echo >&2 'testall.sh' $"unrecognized option" "\`$1'"
echo >&2 $"Try \`testall.sh --help' for more information."
exit 1
;;
*)
break
;;
esac
done
for distro in ${DISTROLIST}; do
build_one "${distro}"
done
for distro in ${DISTROLIST}; do
test_one "${distro}"
done
| true |
6898b6daeed953089b5bfa82d5cac93c6bb4a44b
|
Shell
|
agileek/docker-tips
|
/cmd/docker-run-all.sh
|
UTF-8
| 588 | 2.71875 | 3 |
[] |
no_license
|
#!/bin/bash
sudo rm -rf $PWD/dataFolder-bad-cmd
sudo rm -rf $PWD/dataFolder-good-cmd
docker rm docker-stop-test-bad-container
docker rm docker-stop-test-good-container
docker run --name docker-stop-test-bad-container -v $PWD/dataFolder-bad-cmd:/dataFolder -d docker-stop-test-bad
docker run --name docker-stop-test-good-container -v $PWD/dataFolder-good-cmd:/dataFolder -d docker-stop-test-good
sleep 5
docker stop docker-stop-test-bad-container
docker stop docker-stop-test-good-container
echo VERIFY in $PWD/dataFolder-bad-cmd and $PWD/dataFolder-good-cmd if data has been written
| true |
5b8717b9c6f7f96fa1214181e186b6f8d391377b
|
Shell
|
danielkza/zfs-scripts
|
/post_debootstrap_cleanup.sh
|
UTF-8
| 319 | 3.5 | 4 |
[] |
no_license
|
#!/bin/bash
if [ $# -lt 1 ]; then
echo "Usage: $0 target_path"
exit 1
fi
target="$1"
if [ -z "$target" ] || ! [ -d "$target" ]; then
echo "Invalid target dir '$target'"
exit 1
fi
rm "${target}/usr/sbin/policy-rc.d"
for path in boot/efi boot sys proc dev/pts dev; do
umount "${target}/${path}"
done
| true |
46e1d4b42f5c357f17519f6e2e8b41285d6a5443
|
Shell
|
infowolfe/infowolfe.github.io
|
/arch_chroot.sh
|
UTF-8
| 1,487 | 3.359375 | 3 |
[] |
no_license
|
#!/bin/bash
# setup automatic ssh auth for root
mkdir -p /root/.ssh
for i in $@ ; do
curl -s https://github.com/${i}.keys
done > /root/.ssh/authorized_keys
chmod 0700 /root/.ssh ; chmod 0600 /root/.ssh/authorized_keys
# setup networking and hostname
for i in $(ip link | grep ^[0-9] | awk -F: {'print $2'} | grep -v 'lo'); do
cat << EOF > /etc/systemd/network/${i}.network
[Match]
Name=${i}
[Network]
DHCP=ipv4
EOF
done
# set temporary hostname
hostname=archvm
hostnamectl set-hostname ${hostname}
echo ${hostname} > /etc/hostname
echo -e "127.0.0.1\t${hostname}" >> /etc/hosts
# set localtime to EST5EDT
ln -sf /usr/share/zoneinfo/EST5EDT /etc/localtime
# start necessary services
for i in sshd systemd-networkd ; do
systemctl enable ${i}
done
# write out hostname script for first login
curl -s -o /etc/profile.d/firstrun.sh https://infowolfe.github.io/arch_firstrun.sh
# setup locale
sed -i -e 's~^#en_US~en_US~' /etc/locale.gen
locale-gen
# setup automatic ssh auth for orchard
useradd -m -k /etc/skel orchard
cp -a /root/.ssh ~orchard/.ssh
chown -R orchard:orchard ~orchard/
# add orchard to sudoers
echo "orchard ALL=(ALL) NOPASSWD: ALL" > /etc/sudoers.d/orchard
# tell user what our current ip is
sed -i -e 's~^Arch.*~& \\4~' /etc/issue
# install meteor as orchard
su - orchard -c "curl https://install.meteor.com/ | sh"
# setup boot
mkinitcpio -p linux
grub-install /dev/sda
grub-mkconfig -o /boot/grub/grub.cfg
# clean up after ourselves
rm /root/arch_chroot.sh
| true |
c64a6d64ebf22bd2d42192ae35e6a86d4e36128e
|
Shell
|
Ivan-zhang01/mysqldumper
|
/lib/func.sh
|
UTF-8
| 3,527 | 4.15625 | 4 |
[] |
no_license
|
# Pretty print to STDOUT
message(){
# Message
MESG=$1
case $2 in
'info')
COLOR="\e[0;33m";;
'alert')
COLOR="\e[0;31m";;
'mesg')
COLOR="\e[0;32m";;
*)
COLOR="\e[0;37m";;
esac
printf "$COLOR%b \e[0m\n" "$MESG"
}
# Log file
log() {
# Log Message
logMessage=$1
logdate=`date +%b\ %d\ %T`
hostname=`hostname`
mydirname=`dirname $0`
myscript=`basename $0`
echo $logdate $hostname $myscript : $logMessage >> $LOGFILE
}
# MySQL Status
mySQLStatus(){
# Check PIF FILE
case $SYSTEM in
'osx' )
# Hostname
HOTSNAME=$(hostname)
# Mysql PID File
MYSQLPID="/usr/local/var/mysql/${HOTSNAME}.pid"
;;
'linux')
# PID File
MYSQLPID="/var/run/mysqld/mysqld.pid"
;;
*)
message "System is not set, See [ $CONFDIR/config.sh ]" "alert"
exit
;;
esac
# Check: MySQL Server must be rinning
if [[ ! -f $MYSQLPID ]]; then
message "MySQL Server is not running" "alert"
log "MySQL Server is not running"
exit
fi
}
# Check Root password is set
mySQLRootPassword(){
# Check if root password is not set or it set to password01
#if [[ -z $MYSQLROOTPASSWD || $MYSQLROOTPASSWD =~ ^password.*$ ]]; then
if [[ -z $MYSQLROOTPASSWD ]]; then
message "MySQL Root Password Is Not Set, See [ ${CONFDIR}/config.sh ]" "alert"
log "MySQL Root Password Is Not Set, See [ ${CONFDIR}/config.sh ] "
exit
fi
}
# Clean up Databases
dbCleanUp() {
# Days in SEC
# ( 60 * 60 ) * ( 24 * $DAYS )
# (sec * min) * ( 1day * Days )
DAYSINSEC=$(( ( 60 * 60 ) * ( 24 * $DAYS ) ))
# Give user Feedback
message "Cleaning Up Databases Older Than: ${DAYS} Days" "info"
log "Cleaning Up Databases Older Than: ${DAYS} Days"
# Find ALL dumped db
FINDDB=$(find ${DUMP} -type f -iname '*sql' | tr "\n" "|")
# Load FINDDB into array
IFS="|" read -a finddbs <<< "$FINDDB"
# iterate through finddbs array
for i in "${!finddbs[@]}"; do
# Get File name only
DB=$(basename "${finddbs[$i]}")
# Explode DB name to get unix timestamp
IFS="." read -a db <<< "$DB"
# DUMP TIME
DUMPTIME="${db[1]}"
# IF Databse is older then $DAYS Remove it
if [[ $(( $UNIXTIME - $DUMPTIME )) -gt $DAYSINSEC ]]; then
message "Removing: ${finddbs[$i]}" "alert"
log "Removing: ${finddbs[$i]}"
# Remove Databses older than $DAYS
rm -rf "${finddbs[$i]}"
fi
done
}
# Dump Databses
dbDumper() {
# SQL: Get All Databases
SQL="SHOW DATABASES;"
# Get All Databases
DATABASES=$(echo $SQL | mysql -u $MYSQLROOTUSER -p"$MYSQLROOTPASSWD" 2> /dev/null | tr "\n" "|")
# Load Databses into an array
IFS="|" read -a dbs <<< "$DATABASES"
# Give user Feedback
echo ""
message "Dumping Databses." "info"
# Irrtiate thought the dbs array
for i in "${!dbs[@]}"; do
case "${dbs[$i]}" in
"Database" ) ;;
"information_schema" ) ;;
"performance_schema" ) ;;
"mysql" ) ;;
"test" ) ;;
*)
# DB Name
DB="${dbs[$i]}"
# Dump Filename
DUMPFILENAME="${PREFIX}.${UNIXTIME}.${DB}.sql"
# DB DIR
DBDIR="${DUMP}/${DB}"
# Seperate DB into different dir
mkdir -p $DBDIR
# Only Dump user dbs
message "Dumping Database: [ ${DB} ] [ $DUMPFILENAME ]" "mesg"
log "Dumping Database: [ ${DB} ] [ $DUMPFILENAME ]"
# Dump DB
mysqldump -u $MYSQLROOTUSER -p"$MYSQLROOTPASSWD" $DB > "${DBDIR}/${DUMPFILENAME}" 2> /dev/null
;;
esac
done
echo ""
}
| true |
d0f665a881134aff56b2dc669e87ee4c9066884d
|
Shell
|
GGCCoder/Linux
|
/Bash/test/ex1_5.sh
|
UTF-8
| 121 | 2.875 | 3 |
[] |
no_license
|
# test `and` `or` `not`
if test -e "./notFile" -o -d "/etc"
then
echo "At least exist one"
else
echo "Not exist!"
fi
| true |
b0de7f4b021aa91882b0dc037b97c2dbf47f6dd7
|
Shell
|
crouchr/learnage
|
/environments/cicd/br2020/setup.sh
|
UTF-8
| 696 | 3.25 | 3 |
[] |
no_license
|
#!/usr/bin/env bash
# This script is running on the VM itself
# Files on the Host can be accessed via the /vagrant share
# This script is only used if debugging i.e. want to bypass chef solo
# e.g. checking that shares are mounted OK etc
set -e # bomb out if any problem
echo
echo 'Started setup.sh for provisioning this node'
echo 'Contents of project root /vagrant'
cd /vagrant
ls -laF
cat dna.json
echo 'Chef recipes (development)'
cd /vagrant/learnage/environments/dev/br2020/cookbooks/blackrain/recipes/default.rb
ls -laF
echo 'Contents of learnage folder'
cd /vagrant/learnage/chef-repo/.chef/
ls -laF
cat solo.rb
cd /vagrant/chef
echo 'Finished setup.sh for provisioning this node'
| true |
765cae8e5b8e747d527ebb976f08e68e0e1ae32c
|
Shell
|
neerfri/asdf-terraform
|
/bin/install
|
UTF-8
| 1,353 | 4.34375 | 4 |
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
set -e
set -o pipefail
ASDF_INSTALL_TYPE=${ASDF_INSTALL_TYPE:-version }
TMPDIR=${TMPDIR:-/tmp}
[ -n "$ASDF_INSTALL_VERSION" ] || (>&2 echo 'Missing ASDF_INSTALL_VERSION' && exit 1)
[ -n "$ASDF_INSTALL_PATH" ] || (>&2 echo 'Missing ASDF_INSTALL_PATH' && exit 1)
install_terraform() {
local install_type=$1
local version=$2
local install_path=$3
local bin_install_path="$install_path/bin"
local download_url="$(get_download_url $version)"
local tmp_dir="$(mktemp -d "${TMPDIR%/}"/asdf-terraform.XXXX)"
local download_path="$tmp_dir/$(get_filename version)"
echo "Downloading terraform from ${download_url}"
curl "$download_url" -o "$download_path"
echo "Creating bin directory"
mkdir -p "${bin_install_path}"
echo "Cleaning previous binaries"
rm -Rf "${bin_install_path}/terraform"* 2>/dev/null || true
echo "Extract archive"
unzip -d "${bin_install_path}" "${download_path}"
}
get_arch() {
uname | tr '[:upper:]' '[:lower:]'
}
get_filename() {
local version="$1"
local platform="$(get_arch)"
echo "terraform_${version}_${platform}_amd64.zip"
}
get_download_url() {
local version="$1"
local filename="$(get_filename $version)"
echo "https://releases.hashicorp.com/terraform/${version}/${filename}"
}
install_terraform $ASDF_INSTALL_TYPE $ASDF_INSTALL_VERSION $ASDF_INSTALL_PATH
| true |
7fc4a1647f78be3f5dc5328b1f6de5d00760af43
|
Shell
|
alexey-larionov/wecare_ampliseq_nfe
|
/s01_hpc_scripts_and_logs/analysis4/s06_clean_bams/s02_check_bams_validation.sh
|
UTF-8
| 2,716 | 3.578125 | 4 |
[] |
no_license
|
#!/bin/bash
# s02_check_bams_validation.sh
# Started: Alexey Larionov, 29Jun2018
# Last updated: Alexey Larionov, 14Feb2019
# Use:
# ./s02_check_bams_validation.sh &> s02_check_bams_validation.log
# or
# sbatch s03_check_bams_validation.sh
# ------------------------------------ #
# sbatch instructions #
# ------------------------------------ #
#SBATCH -J s02_check_bams_validation
#SBATCH -A TISCHKOWITZ-SL2-CPU
#SBATCH -p skylake
#SBATCH --mail-type=ALL
#SBATCH --no-requeue
#SBATCH --nodes=1
#SBATCH --time=01:00:00
#SBATCH --output=s02_check_bams_validation.log
#SBATCH --qos=INTR
#SBATCH --ntasks=2
## Modules section (required, do not remove)
. /etc/profile.d/modules.sh
module purge
module load rhel7/default-peta4
## Set initial working folder
cd "${SLURM_SUBMIT_DIR}"
## Report settings and run the job
echo "Job id: ${SLURM_JOB_ID}"
echo "Job name: ${SLURM_JOB_NAME}"
echo "Allocated node: $(hostname)"
echo "Time: $(date)"
echo ""
echo "Initial working folder:"
echo "${SLURM_SUBMIT_DIR}"
echo ""
echo "------------------ Output ------------------"
echo ""
# ---------------------------------------- #
# job #
# ---------------------------------------- #
# Stop at runtime errors
set -e
# Start message
echo "Check results of BAMs validation"
date
echo ""
# Folders
base_folder="/rds/project/erf33/rds-erf33-medgen"
data_folder="${base_folder}/users/alexey/wecare/wecare_ampliseq/analysis4/ampliseq_nfe/data_and_results"
clean_bam_folder="${data_folder}/s06_clean_bam/bam"
passed_samples="${data_folder}/s06_clean_bam/passed_samples.txt"
failed_samples="${data_folder}/s06_clean_bam/failed_samples.txt"
# Progress report
echo "clean_bam_folder: ${clean_bam_folder}"
echo ""
# Make list of source bam files
cd "${clean_bam_folder}"
clean_bam_files=$(ls *_fixmate_sort_rg.bam)
# Make list of samples
samples=$(sed -e 's/_fixmate_sort_rg.bam//g' <<< "${clean_bam_files}")
echo "Detected $(wc -w <<< ${samples}) cleaned bam files in the folder"
# Initialise samples counters
chk=0
pass=0
fail=0
# For each sample
for sample in ${samples}
do
# Get log file name
cleanup_log="${clean_bam_folder}/${sample}_cleanup.log"
# Check validation and increment pass or fail counter
if grep -q "No errors found" "${cleanup_log}"
then
pass=$(( ${pass} + 1 ))
echo "${sample}" >> "${passed_samples}"
else
fail=$(( ${fail} + 1 ))
echo "${sample}" >> "${failed_samples}"
fi
chk=$(( ${chk} + 1 ))
#echo -ne "Checked: ${chk}"\\r
done # next sample
# Print result
echo "Checked samples: ${chk}"
echo "Passed samples: ${pass}"
echo "Failed samples: ${fail}"
# Completion message
echo ""
echo "Done all tasks"
date
echo ""
| true |
2e4c12fdbf1299b8432f36265f55a311b9951301
|
Shell
|
stimko68/settings
|
/bin/artifactory-upload
|
UTF-8
| 1,960 | 4.4375 | 4 |
[] |
no_license
|
#!/usr/bin/env bash
# Uploads a given package to Artifactory
set -euo pipefail
ART_URL=http://artifactory.int.datarobot.com/artifactory/
function usage() {
echo "Usage: artifactory-upload <filename> <repo_path>"
echo ""
echo "The first argument is the filename to upload, and it can be a relative path."
echo "The second argument is the remote path in Artifactory to upload the file to,"
echo "not including the filename. The remote path should include the name of the"
echo "repository."
echo ""
echo "This script expects that your Artifactory API key is set to the "
echo "ARTIFACTORY_API_KEY environment variable. You cannot upload files without using "
echo "your API key. To get or create your API key log into Artifactory, click on your "
echo "username, then select Profile. You will be able to generate an API key or see"
echo "your existing key there."
}
if [[ "$1" == "" ]] || [[ "$2" == "" ]]; then
usage
exit 1
fi
if [[ "${ARTIFACTORY_API_KEY}" == "" ]]; then
echo "ERROR: No Artifactory API key found! Get it from the profile page in the Artifactory UI"
echo "and then assign it to the ARTIFACTORY_API_KEY environment variable."
exit 1
fi
FILE=$1
REPO_PATH=$2/$(basename "${FILE}")
echo "Uploading ${FILE} to Artifactory remote path ${ART_URL}${REPO_PATH}"
echo "Generating Checksums..."
if [[ $(uname) == 'Darwin' ]]; then
MD5=$(md5 "${FILE}" | awk '{print $4}')
SHA1=$(shasum "${FILE}" | awk '{print $1}')
SHA256=$(shasum -a 256 "${FILE}" | awk '{print $1}')
elif [[ $(uname) == 'Linux' ]]; then
MD5=$(md5sum "${FILE}" | awk '{print $1}')
SHA1=$(sha1sum "${FILE}" | awk '{print $1}')
SHA256=$(sha256sum "${FILE}" | awk '{print $1}')
fi
echo "Uploading file..."
curl -X PUT -H "X-JFrog-Art-Api:${ARTIFACTORY_API_KEY}" -H "X-Checksum-Md5:${MD5}" -H "X-Checksum-Sha1:${SHA1}" -H "X-Checksum-Sha256:${SHA256}" "${ART_URL}${REPO_PATH}" -T "${FILE}"
| true |
4c83fb7e56b95b695f0d0ba217f59af5f943976e
|
Shell
|
BR-ERP/freedom
|
/freedom/lib/exec/lin/freedompcp.sh
|
UTF-8
| 174 | 2.609375 | 3 |
[] |
no_license
|
EXEDIR=${0%/*}
CMDENV="${EXEDIR}/freedomenv.sh"
FREEDOMMD="freedompcp"
FREEDOMCL="org.freedom.modulos.pcp.FreedomPCP"
CMDFREEDOM=`$CMDENV $FREEDOMMD $FREEDOMCL`
$CMDFREEDOM
| true |
249c2025daad3b1df22489a53190db979919f58a
|
Shell
|
bersling/divi
|
/missing-files/run.sh
|
UTF-8
| 115 | 2.796875 | 3 |
[] |
no_license
|
#!/bin/bash
while IFS= read -r ip
do
server=root@${ip}
ssh ${server} ip=${ip} 'bash -s' < $1
done < "servers.txt"
| true |
67674f808e8d24bf191fb42ad5624adffa7d64c0
|
Shell
|
epj009/rc
|
/bin/cron.daily/andr-build
|
UTF-8
| 339 | 3.078125 | 3 |
[] |
no_license
|
#!/usr/bin/env bash
su -c '
build_path="/home/andrew/andr-build"
[ -d "$build_path" ] || exit 1
pushd "$build_path"
for i in 1/1/{7,9}/{1,3}; do
[ -d "$i" ] || continue
pushd "$i"
for j in *; do
[ -d "$j" ] || continue
pushd "$j"
p=(`echo "$i" | sed "s/\// /g"`)
andr-build "${p[@]}"
popd
done
popd
done
popd
' - andrew
| true |
60c21282dd0ea8328bda5f4a986b6cc6372b7abb
|
Shell
|
ketankotian/UserRegistration
|
/userRegisteration.sh
|
UTF-8
| 1,466 | 3.421875 | 3 |
[] |
no_license
|
#!/bin/bash -x
echo " welcome "
#User Needs To Enter Valid First Name
read -p "Enter your first name - " firstName
pattern="^[A-Z]{1}[a-zA-Z]{2,}$";
if [[ $firstName =~ $pattern ]]
then
echo "valid"
else
echo "invalid"
fi
#User Needs To Enter Valid Last Name
read -p "Enter your last name - " lastName
pattern1="^[A-Z]{1}[a-zA-Z]{2,}$";
if [[ $lastName =~ $pattern1 ]]
then
echo "valid"
else
echo "invalid"
fi
#As A User Enter A Valid Email
read -p "Enter a valid email id - " email
email_pattern="^[a-zA-Z]{3}[0-9a-zA-Z\.]*@[a-z]*\.(co|in)$";
if [[ $email =~ $email_pattern ]]
then
echo "valid"
else
echo "invalid"
fi
#To Follow Pre-Defined Mobile Number
read -p "Enter your mobile number - " mobileNumber
mobile_number_pattern="^(91\ )[6-9]{1}[0-9]{9}$";
if [[ $mobileNumber =~ $mobile_number_pattern ]]
then
echo "valid"
else
echo "invalid"
fi
#To Accept Exactly One Special Character
read -p "Enter your password - " password
password_pattern="^(?=.*[a-z])(?=.*[A-Z])(?=.*[0-9])(?=.*[\$\?\#\@\%])(?=.{8,})$";
if [[ $password =~ $password_pattern ]]
then
echo "valid"
else
echo "invalid"
fi
#To Check All Email Samples Provided
read -p "Enter a email address to check if its valid or not - " sampleEmails
regex_pattern="^[a-zA-z]{3}[0-9a-zA-Z\.\_\-\+]*\@[a-z0-9]*\.(com.au|in|net|in|com.com|com|)$"
if [[ $sampleEmails =~ $regex_pattern ]]
then
echo "valid"
else
echo "invalid"
fi
| true |
98c667e2cbb1e083ff3ea51f985c726f041cd3d3
|
Shell
|
cbohara/99-Problems-But-A-Glitch-Aint-One
|
/shellScripts/send_job_hist_output_to_dd.sh
|
UTF-8
| 1,599 | 3.875 | 4 |
[] |
no_license
|
#!/bin/bash
# Downloads the json jobhist logs from S3_INPUT_DIR
# and sends all counters to datadog
### GLOBAL VARS HERE (To be parameterized)
S3_INPUT_DIR=s3://verve-home/leeblackwell/vervathon/jhistlogs
TMP_DIR=tmp_$(date +%s)
AWS_PROFILE=verve-ops-data
PYPATH=../python
mkdir ${TMP_DIR}
# Pull down the logs
aws s3 --profile ${AWS_PROFILE} cp --recursive ${S3_INPUT_DIR}/ ${TMP_DIR}
# Loop through and send counters to Datadog
for file in ${TMP_DIR}/*.json; do
fn=$(echo $file | awk -F'/' '{print $NF}')
emr_name=$(echo $fn | cut -d'_' -f 1)
cluster_id=$(echo $fn | cut -d'_' -f 2)
jq '.event | to_entries[]' $file > ${TMP_DIR}/entries
jobid=$(jq '.value.jobid' ${TMP_DIR}/entries)
finishtime=$(jq '.value.finishTime' ${TMP_DIR}/entries)
finishedmaps=$(jq '.value.finishedMaps' ${TMP_DIR}/entries)
finishedreduces=$(jq '.value.finishedReduces' ${TMP_DIR}/entries)
failedmaps=$(jq '.value.failedMaps' ${TMP_DIR}/entries)
failedreduces=$(jq '.value.failedReduces' ${TMP_DIR}/entries)
jq '.value.totalCounters.groups[].counts[]' ${TMP_DIR}/entries > ${TMP_DIR}/allcounters
jq -r "[.name , .value] | @csv" ${TMP_DIR}/allcounters > ${TMP_DIR}/${emr_name}_${cluster_id}_counters.csv
for c in $(cat ${TMP_DIR}/${emr_name}_${cluster_id}_counters.csv); do
key=$(echo $c | cut -d',' -f 1)
skey=$(echo $key | tr -d \")
val=$(echo $c | cut -d',' -f 2)
echo "$skey ... $val"
${PYPATH}/DDPush.py --metric-name di.vervathon.test.$skey --metric-value $val
done
# this is to simulate time series a bit
sleep 2
done
# Clean up this mess
#rm -r ${TMP_DIR}
| true |
c314478582af1a3e9b562f759968d031f617e440
|
Shell
|
McUsr/Index-2.0
|
/original/part1.sh
|
UTF-8
| 54,067 | 3.1875 | 3 |
[
"LicenseRef-scancode-public-domain"
] |
permissive
|
#! /bin/sh
# This is a shell archive. Remove anything before this line, then unpack
# it by saving it into a file and typing "sh file". To overwrite existing
# files, type "sh file -c". You can also feed this as standard input via
# unshar, or by typing "sh <file", e.g.. If this archive is complete, you
# will see the following message at the end:
# "End of archive 1 (of 2)."
# Contents: MANIFEST Makefile README createdb.c dbfunc.c dbio.c defs.h
# index.1 main.c printdb.c samples samples/books.fmt
# samples/books.idx samples/bphone.idx samples/cdlist.fmt
# samples/cdlist.idx samples/cdwantlist.fmt samples/cdwantlist.idx
# samples/pphone.fmt samples/pphone.idx searchdb.c selectdb.c util.c
# Wrapped by rsalz@papaya.bbn.com on Tue Oct 24 12:09:01 1989
PATH=/bin:/usr/bin:/usr/ucb ; export PATH
if test -f 'MANIFEST' -a "${1}" != "-c" ; then
echo shar: Will not clobber existing file \"'MANIFEST'\"
else
echo shar: Extracting \"'MANIFEST'\" \(840 characters\)
sed "s/^X//" >'MANIFEST' <<'END_OF_FILE'
X File Name Archive # Description
X-----------------------------------------------------------
X MANIFEST 1
X Makefile 1
X README 1
X createdb.c 1
X dbfunc.c 1
X dbio.c 1
X defs.h 1
X index.1 1
X main.c 1
X printdb.c 1
X samples 1
X samples/books.fmt 1
X samples/books.idx 1
X samples/bphone.idx 1
X samples/cdlist.fmt 1
X samples/cdlist.idx 1
X samples/cdwantlist.fmt 1
X samples/cdwantlist.idx 1
X samples/pphone.fmt 1
X samples/pphone.idx 1
X screen.c 2
X searchdb.c 1
X selectdb.c 1
X util.c 1
END_OF_FILE
if test 840 -ne `wc -c <'MANIFEST'`; then
echo shar: \"'MANIFEST'\" unpacked with wrong size!
fi
# end of 'MANIFEST'
fi
if test -f 'Makefile' -a "${1}" != "-c" ; then
echo shar: Will not clobber existing file \"'Makefile'\"
else
echo shar: Extracting \"'Makefile'\" \(928 characters\)
sed "s/^X//" >'Makefile' <<'END_OF_FILE'
X#
X# $Header: /u5/davy/progs/index/RCS/Makefile,v 1.1 89/08/09 11:06:00 davy Exp $
X#
X# Makefile for the "index" program.
X#
X# David A. Curry
X# Research Institute for Advanced Computer Science
X# Mail Stop 230-5
X# NASA Ames Research Center
X# Moffett Field, CA 94035
X# davy@riacs.edu
X#
X# $Log: Makefile,v $
X# Revision 1.1 89/08/09 11:06:00 davy
X# Initial revision
X#
X#
XCFLAGS= -O
XLIBS= -lcurses -ltermcap
X
XSRCS= createdb.c dbfunc.c dbio.c main.c printdb.c screen.c \
X searchdb.c selectdb.c util.c
XOBJS= createdb.o dbfunc.o dbio.o main.o printdb.o screen.o \
X searchdb.o selectdb.o util.o
X
Xindex: $(OBJS)
X $(CC) $(CFLAGS) -o index $(OBJS) $(LIBS)
X
Xclean:
X rm -f a.out core index $(OBJS) \#*
X
Xcreatedb.o: createdb.c defs.h
Xdbfunc.o: dbfunc.c defs.h
Xdbio.o: dbio.c defs.h
Xmain.o: main.c defs.h
Xprintdb.o: printdb.c defs.h
Xscreen.o: screen.c defs.h
Xsearchdb.o: searchdb.c defs.h
Xselectdb.o: selectdb.c defs.h
Xutil.o: util.c defs.h
END_OF_FILE
if test 928 -ne `wc -c <'Makefile'`; then
echo shar: \"'Makefile'\" unpacked with wrong size!
fi
# end of 'Makefile'
fi
if test -f 'README' -a "${1}" != "-c" ; then
echo shar: Will not clobber existing file \"'README'\"
else
echo shar: Extracting \"'README'\" \(3220 characters\)
sed "s/^X//" >'README' <<'END_OF_FILE'
X August 9, 1989
X
XThis is "index", Version 1.0.
X
XOver the years, I've accumulated a number of files in my directory which
Xhold lists of various things. I have a list of business addresses, no
Xless than three personal address and phone lists, a list of all the books
XI own, a list of my compact discs, and so on. Each of these files has a
Xdifferent format, has to be maintained manually with a text editor, can
Xonly be searched with "grep", and is difficult to keep nicely sorted.
X
XWell, I got sick and tired of this. So, I sat down and started hacking,
Xand came up with "index". Index allows you to maintain multiple
Xdatabases of textual information, each with a different format. With
Xeach database, index allows you to:
X
X - add entries
X - delete entries
X - edit existing entries
X - search for entries using full regular expressions
X - control what parts of an entry are searched
X - print out entries matching a pattern
X - run all or part of the database through an arbitrary
X formatting program
X
XThe databases index maintains are stored as simple lines of text. Each
Xfield of an entry is a line of text, and each entry in the database is
Xmade up of a fixed number of lines. For each database, you tell index
Xwhat each field's (line's) name is. You can have multiple-line fields by
Xleaving that field's name blank. There's no fancy storage algorithm,
Xthings are just stored sequentially. But for the biggest database I've
Xgot, about 500 5-line entries, performance is just fine.
X
XIndex uses the Berkeley curses library. It has been tested on a Sun-3
Xunder SunOS 4.0.1 (4.3BSD curses) and on a Sequent Balance under Dynix
X3.0.14 (4.2BSD curses). It should be fairly easy to port to System V -
Xyou'll need to modify screen.c to resolve any curses incompatibilities,
Xand you'll need to change the calls to the regular expression library
Xroutines in searchdb.c to use the System V style routines.
X
XTo compile index, just say "make". Then make a directory in your home
Xdirectory called ".index" (or set the environment variable INDEXDIR to
Xpoint somewhere else). Now, just say "index", and you can create your
Xfirst database description file. The directory "samples" contains some
Xsample database description files and formatting scripts:
X
X books.idx - for keeping a list of books sorted by author.
X books.fmt - formats the database into a tbl input file
X which prints in two columns in landscape
X mode.
X
X bphone.idx - for a business phone/address database.
X
X cdlist.idx - for keeping a list of compact discs.
X cdlist.fmt - formats the database into a wide landscape-mode
X tbl input file.
X
X pphone.idx - for keeping a personal phone/address database
X with home and work addresses and phone numbers
X for your friends.
X pphone.fmt - formats the database into a troff file which
X lists each person along with their home and
X work addresses. Useful for keeping by the
X phone or sending Xmas cards..
X
XSince moving all my lists into the index program, I've found things a lot
Xeasier to keep track of. I hope you'll find it as useful as I do.
X
XDavid A. Curry
XResearch Institute for Advanced Computer Science
XMail Stop 230-5
XNASA Ames Research Center
XMoffett Field, CA 94035
X
END_OF_FILE
if test 3220 -ne `wc -c <'README'`; then
echo shar: \"'README'\" unpacked with wrong size!
fi
# end of 'README'
fi
if test -f 'createdb.c' -a "${1}" != "-c" ; then
echo shar: Will not clobber existing file \"'createdb.c'\"
else
echo shar: Extracting \"'createdb.c'\" \(3325 characters\)
sed "s/^X//" >'createdb.c' <<'END_OF_FILE'
X#ifndef lint
Xstatic char *RCSid = "$Header: /u5/davy/progs/index/RCS/createdb.c,v 1.1 89/08/09 11:06:21 davy Exp $";
X#endif
X/*
X * createdb.c - handle creating a new database.
X *
X * David A. Curry
X * Research Institute for Advanced Computer Science
X * Mail Stop 230-5
X * NASA Ames Research Center
X * Moffett Field, CA 94035
X * davy@riacs.edu
X *
X * $Log: createdb.c,v $
X * Revision 1.1 89/08/09 11:06:21 davy
X * Initial revision
X *
X */
X#include <sys/param.h>
X#include <curses.h>
X#include <stdio.h>
X#include "defs.h"
X
X/*
X * The message we'll print to explain what's happening.
X */
Xstatic struct message {
X char *m_line;
X char *m_arg;
X} message[] = {
X { "You will now be placed into an editor so that you can create the\n",
X 0 },
X { "database description file. This file will be used by the program\n",
X 0 },
X { "to prompt you for new items to be inserted into the database.\n",
X 0 },
X { "\n",
X 0 },
X { "Each line in this file is the name of a field. It may be as long as\n",
X 0 },
X { "you want, and may contain spaces. The order of the lines in the file\n",
X 0 },
X { "is the order you will be prompted for new information when inserting\n",
X 0 },
X { "into the database. You may leave blank lines in the file; this allows\n",
X 0 },
X { "multiple-line entries for items such as addesses. You may have a\n",
X 0 },
X { "total of %d lines in the file.\n",
X (char *) MAXDBLINES },
X { "\n",
X 0 },
X { "By default, all lines in an entry will be examined when searching for\n",
X 0 },
X { "a pattern. To make the program ignore a line, start that line with an\n",
X 0 },
X { "exclamation point (!).\n",
X 0 },
X { "\n",
X 0 },
X { "The database is always sorted into ASCII collating sequence based on\n",
X 0 },
X { "the contents of the first field.\n",
X 0 },
X { "\n",
X 0 },
X { "When you are finished, save the file and exit the editor. You will\n",
X 0 },
X { "then be placed in the main menu, where you can select other operations\n",
X 0 },
X { "on the database.\n",
X 0 },
X { NULL, 0 }
X};
X
X/*
X * create_db - execute an editor to allow the person to create the
X * index definition file.
X */
Xcreate_db(dbname)
Xchar *dbname;
X{
X int pid;
X char *editor;
X char *getenv();
X register int row;
X char buf[BUFSIZ], fname[MAXPATHLEN];
X
X /*
X * Clear the screen and move to the top.
X */
X clear();
X move(0, 0);
X
X /*
X * Print out the explanatory message.
X */
X for (row=0; message[row].m_line != NULL; row++)
X printw(message[row].m_line, message[row].m_arg);
X
X /*
X * Give the user a chance to read it. Wait till they
X * type a carriage return before proceeding.
X */
X prompt_char(++row, 0, "Type RETURN to continue: ", "\n");
X
X /*
X * Use the editor the user prefers, or EDITOR if
X * he doesn't have anything set.
X */
X if ((editor = getenv("EDITOR")) == NULL)
X editor = EDITOR;
X
X /*
X * Construct the file name.
X */
X sprintf(fname, "%s/%s%s", dbasedir, dbname, IDXFILE_SUFFIX);
X
X /*
X * Go back to normal tty modes.
X */
X reset_modes();
X
X /*
X * Spawn a child process.
X */
X if ((pid = fork()) < 0) {
X error("%s: cannot fork.\n", pname, 0, 0);
X exit(1);
X }
X
X /*
X * Execute the editor.
X */
X if (pid == 0) {
X execl(editor, editor, fname, 0);
X perror(editor);
X exit(1);
X }
X
X /*
X * Wait for the editor to finish.
X */
X while (wait((int *) 0) != pid)
X ;
X
X /*
X * Set the tty modes up again.
X */
X set_modes();
X}
END_OF_FILE
if test 3325 -ne `wc -c <'createdb.c'`; then
echo shar: \"'createdb.c'\" unpacked with wrong size!
fi
# end of 'createdb.c'
fi
if test -f 'dbfunc.c' -a "${1}" != "-c" ; then
echo shar: Will not clobber existing file \"'dbfunc.c'\"
else
echo shar: Extracting \"'dbfunc.c'\" \(3726 characters\)
sed "s/^X//" >'dbfunc.c' <<'END_OF_FILE'
X#ifndef lint
Xstatic char *RCSid = "$Header: /u5/davy/progs/index/RCS/dbfunc.c,v 1.1 89/08/09 11:06:31 davy Exp $";
X#endif
X/*
X * dbfunc.c - database functions selected from the main menu.
X *
X * David A. Curry
X * Research Institute for Advanced Computer Science
X * Mail Stop 230-5
X * NASA Ames Research Center
X * Moffett Field, CA 94035
X * davy@riacs.edu
X *
X * $Log: dbfunc.c,v $
X * Revision 1.1 89/08/09 11:06:31 davy
X * Initial revision
X *
X */
X#include <curses.h>
X#include <stdio.h>
X#include "defs.h"
X
X/*
X * add_entry - add an entry to the database.
X */
Xadd_entry()
X{
X register int i;
X struct dbfile *realloc();
X
X /*
X * Search for an empty entry in the array.
X */
X for (i=0; i < dbsize; i++) {
X /*
X * Found one; use it.
X */
X if ((db[i].db_flag & DB_VALID) == 0) {
X /*
X * Clear out any old junk.
X */
X bzero(&db[i], sizeof(struct dbfile));
X
X /*
X * Let the user edit the entry.
X */
X if (edit_entry(&db[i], "new")) {
X /*
X * Mark it valid, mark the database
X * as modified, and increase the
X * number of entries.
X */
X db[i].db_flag |= DB_VALID;
X dbmodified = 1;
X dbentries++;
X
X /*
X * Sort the array, to get this
X * entry into its proper place.
X */
X qsort(db, dbentries, sizeof(struct dbfile),
X dbsort);
X }
X
X return;
X }
X }
X
X /*
X * Didn't find an empty slot, so we have to allocate
X * some more.
X */
X dbsize *= 2;
X
X if ((db = realloc(db, dbsize * sizeof(struct dbfile))) == NULL) {
X error("%s: out of memory.\n", pname, 0, 0);
X exit(1);
X }
X
X bzero(&db[dbentries], sizeof(struct dbfile));
X
X /*
X * Let the user edit the new entry.
X */
X if (edit_entry(&db[dbentries], "new")) {
X /*
X * Mark the entry as valid, mark the
X * database as modified, and increase
X * the number of entries.
X */
X db[dbentries].db_flag |= DB_VALID;
X dbmodified = 1;
X dbentries++;
X
X qsort(db, dbentries, sizeof(struct dbfile), dbsort);
X }
X}
X
X/*
X * del_entry - delete an entry from the database.
X */
Xdel_entry(entry)
Xstruct dbfile *entry;
X{
X char c;
X int x, y;
X
X /*
X * Prompt the user for confirmation.
X */
X getyx(curscr, y, x);
X c = prompt_char(y, 0, "Really delete this entry? ", "YyNn");
X
X /*
X * Return the status of the confirmation.
X */
X switch (c) {
X case 'Y':
X case 'y':
X return(1);
X case 'N':
X case 'n':
X return(0);
X }
X}
X
X/*
X * find_entry - search for entries using a regular expression.
X */
Xfind_entry()
X{
X register int i;
X char pattern[BUFSIZ];
X
X /*
X * Clear the screen and prompt for the pattern to
X * search for.
X */
X clear();
X prompt_str(LINES/2, 0, "Pattern to search for: ", pattern);
X
X /*
X * Search. search_db will set DB_PRINT in the entries
X * which match, and return non-zero if anything matched.
X */
X if (search_db(pattern)) {
X /*
X * Display the entries that matched.
X */
X disp_entries();
X
X /*
X * Clear the DB_PRINT flags.
X */
X for (i=0; i < dbentries; i++)
X db[i].db_flag &= ~DB_PRINT;
X }
X else {
X /*
X * Nothing matched. Tell the user.
X */
X prompt_char(LINES/2, 0,
X "No entries match pattern, type RETURN to continue: ",
X "\n");
X }
X}
X
X/*
X * read_db - run through the database entry by entry.
X */
Xread_db()
X{
X register int i;
X
X /*
X * Sort the database, so we're sure it's in order.
X */
X qsort(db, dbentries, sizeof(struct dbfile), dbsort);
X
X /*
X * Set DB_PRINT in all entries.
X */
X for (i=0; i < dbentries; i++) {
X if (db[i].db_flag & DB_VALID)
X db[i].db_flag |= DB_PRINT;
X }
X
X /*
X * Display the entries.
X */
X disp_entries();
X
X /*
X * Clear DB_PRINT.
X */
X for (i=0; i < dbentries; i++)
X db[i].db_flag &= ~DB_PRINT;
X}
X
X/*
X * save_bye - save the database and exit.
X */
Xsave_bye(dbname)
Xchar *dbname;
X{
X /*
X * Save the database.
X */
X save_db(dbname);
X
X /*
X * Exit.
X */
X byebye();
X}
END_OF_FILE
if test 3726 -ne `wc -c <'dbfunc.c'`; then
echo shar: \"'dbfunc.c'\" unpacked with wrong size!
fi
# end of 'dbfunc.c'
fi
if test -f 'dbio.c' -a "${1}" != "-c" ; then
echo shar: Will not clobber existing file \"'dbio.c'\"
else
echo shar: Extracting \"'dbio.c'\" \(5120 characters\)
sed "s/^X//" >'dbio.c' <<'END_OF_FILE'
X#ifndef lint
Xstatic char *RCSid = "$Header: /u5/davy/progs/index/RCS/dbio.c,v 1.1 89/08/09 11:06:36 davy Exp $";
X#endif
X/*
X * dbio.c - database input/output routines.
X *
X * David A. Curry
X * Research Institute for Advanced Computer Science
X * Mail Stop 230-5
X * NASA Ames Research Center
X * Moffett Field, CA 94035
X * davy@riacs.edu
X *
X * $Log: dbio.c,v $
X * Revision 1.1 89/08/09 11:06:36 davy
X * Initial revision
X *
X */
X#include <sys/param.h>
X#include <sys/stat.h>
X#include <curses.h>
X#include <stdio.h>
X#include "defs.h"
X
Xstruct dbfile *db; /* array of database entries */
Xstruct idxfile idx; /* description of the database file */
X
Xint dbmodified = 0; /* non-zero if database needs saving */
Xint dbentries, dbsize; /* number of entries, size of db array */
X
X/*
X * read_idxfile - read the database description file.
X */
Xread_idxfile(dbname)
Xchar *dbname;
X{
X FILE *fp;
X register int len;
X char buf[BUFSIZ], idxfile[MAXPATHLEN];
X
X /*
X * Construct the file name.
X */
X sprintf(idxfile, "%s/%s%s", dbasedir, dbname, IDXFILE_SUFFIX);
X
X /*
X * Open the file.
X */
X if ((fp = fopen(idxfile, "r")) == NULL) {
X error("%s: cannot open \"%s\".\n", pname, idxfile, 0);
X exit(1);
X }
X
X /*
X * Zero out the structure.
X */
X bzero(&idx, sizeof(struct idxfile));
X
X /*
X * Read lines from the file.
X */
X while (idx.idx_nlines < MAXDBLINES) {
X /*
X * End of file.
X */
X if (fgets(buf, sizeof(buf), fp) == NULL)
X break;
X
X /*
X * Strip the newline.
X */
X len = strlen(buf) - 1;
X buf[len] = '\0';
X
X /*
X * If the first character is '!', then this line
X * should not participate in searches. Save the
X * stuff after the '!'. Otherwise this line does
X * participate in searches, save the whole line.
X */
X if (*buf == '!') {
X idx.idx_lines[idx.idx_nlines] = savestr(buf+1);
X idx.idx_search[idx.idx_nlines] = 0;
X len--;
X }
X else {
X idx.idx_lines[idx.idx_nlines] = savestr(buf);
X idx.idx_search[idx.idx_nlines] = 1;
X }
X
X /*
X * Increment the number of lines.
X */
X idx.idx_nlines++;
X
X /*
X * Save the length of the longest field name.
X */
X if (len > idx.idx_maxlen)
X idx.idx_maxlen = len;
X }
X
X /*
X * Close the file.
X */
X fclose(fp);
X}
X
X/*
X * read_dbfile - read the database file itself.
X */
Xread_dbfile(dbname)
Xchar *dbname;
X{
X FILE *fp;
X register int i;
X struct dbfile *malloc(), *realloc();
X char buf[BUFSIZ], dbfile[MAXPATHLEN];
X
X /*
X * Allocate some entries in the array. 16 is just an
X * arbitrary number.
X */
X dbsize = 16;
X dbentries = 0;
X
X if ((db = malloc(dbsize * sizeof(struct dbfile))) == NULL) {
X error("%s: out of memory.\n", pname, 0, 0);
X exit(1);
X }
X
X /*
X * Construct the name of the file.
X */
X sprintf(dbfile, "%s/%s%s", dbasedir, dbname, DBFILE_SUFFIX);
X
X /*
X * Open the file.
X */
X if ((fp = fopen(dbfile, "r")) == NULL)
X return;
X
X /*
X * Until we hit end of file...
X */
X while (!feof(fp)) {
X /*
X * If we need to, allocate some more entries.
X */
X if (dbentries >= dbsize) {
X dbsize *= 2;
X db = realloc(db, dbsize * sizeof(struct dbfile));
X
X if (db == NULL) {
X error("%s: out of memory.\n", pname, 0, 0);
X exit(1);
X }
X }
X
X /*
X * Read in one entry at a time.
X */
X for (i = 0; i < idx.idx_nlines; i++) {
X /*
X * If we hit end of file before getting a
X * complete entry, toss this one.
X */
X if (fgets(buf, sizeof(buf), fp) == NULL)
X goto out;
X
X /*
X * Save the length of the line, strip the
X * newline, and save the line.
X */
X db[dbentries].db_lens[i] = strlen(buf) - 1;
X buf[db[dbentries].db_lens[i]] = '\0';
X
X db[dbentries].db_lines[i] = savestr(buf);
X }
X
X /*
X * Mark this entry as valid and increase the
X * number of entries.
X */
X db[dbentries].db_flag = DB_VALID;
X dbentries++;
X }
X
Xout:
X /*
X * Make sure what we've got is sorted.
X */
X qsort(db, dbentries, sizeof(struct dbfile), dbsort);
X
X fclose(fp);
X}
X
X/*
X * save_db - save the database to disk.
X */
Xsave_db(dbname)
Xchar *dbname;
X{
X FILE *fp;
X struct stat st;
X register int i, j;
X char realfile[MAXPATHLEN], bakfile[MAXPATHLEN];
X
X /*
X * If it doesn't need saving, never mind.
X */
X if (!dbmodified)
X return;
X
X /*
X * Create the name of the file and a backup file.
X */
X sprintf(realfile, "%s/%s%s", dbasedir, dbname, DBFILE_SUFFIX);
X sprintf(bakfile, "%s/#%s%s", dbasedir, dbname, DBFILE_SUFFIX);
X
X /*
X * Default creation mode.
X */
X st.st_mode = 0400;
X
X /*
X * If the file already exists, rename it to the
X * backup file name.
X */
X if (stat(realfile, &st) == 0)
X rename(realfile, bakfile);
X
X /*
X * Open the new file.
X */
X if ((fp = fopen(realfile, "w")) == NULL) {
X error("%s: cannot create \"%s\".\n", pname, realfile);
X exit(1);
X }
X
X /*
X * Make sure the database is sorted.
X */
X qsort(db, dbentries, sizeof(struct dbfile), dbsort);
X
X /*
X * Write out the entries.
X */
X for (i=0; i < dbentries; i++) {
X if ((db[i].db_flag & DB_VALID) == 0)
X continue;
X
X for (j=0; j < idx.idx_nlines; j++)
X fprintf(fp, "%s\n", db[i].db_lines[j]);
X }
X
X /*
X * Set the file mode to the mode of the original
X * file. Mark the database as unmodified.
X */
X fchmod(fileno(fp), st.st_mode & 0777);
X dbmodified = 0;
X
X fclose(fp);
X}
END_OF_FILE
if test 5120 -ne `wc -c <'dbio.c'`; then
echo shar: \"'dbio.c'\" unpacked with wrong size!
fi
# end of 'dbio.c'
fi
if test -f 'defs.h' -a "${1}" != "-c" ; then
echo shar: Will not clobber existing file \"'defs.h'\"
else
echo shar: Extracting \"'defs.h'\" \(2232 characters\)
sed "s/^X//" >'defs.h' <<'END_OF_FILE'
X/*
X * $Header: /u5/davy/progs/index/RCS/defs.h,v 1.1 89/08/09 11:06:09 davy Exp $
X *
X * defs.h - definitions for the index program.
X *
X * David A. Curry
X * Research Institute for Advanced Computer Science
X * Mail Stop 230-5
X * NASA Ames Research Center
X * Moffett Field, CA 94035
X * davy@riacs.edu
X *
X * $Log: defs.h,v $
X * Revision 1.1 89/08/09 11:06:09 davy
X * Initial revision
X *
X */
X#define PATCHLEVEL 0 /* level of patches applied */
X
X#define MAXDBFILES 64 /* max. no. of database files */
X#define MAXDBLINES 16 /* max. no. of fields in dbase */
X
X#define EDITOR "/usr/ucb/vi" /* editor to use when creating */
X
X#define INDEXDIR ".index" /* directory where stuff is */
X#define DBFILE_SUFFIX ".db" /* database file suffix */
X#define FMTFILE_SUFFIX ".fmt" /* format program suffix */
X#define IDXFILE_SUFFIX ".idx" /* index definition suffix */
X
X/*
X * Values for db_flag.
X */
X#define DB_VALID 0x01
X#define DB_PRINT 0x02
X
X/*
X * For 4.2 curses.
X */
X#ifndef cbreak
X#define cbreak() crmode()
X#endif
X#ifndef nocbreak
X#define nocbreak() nocrmode()
X#endif
X
X/*
X * Usually defined in ttychars.h.
X */
X#ifndef CTRL
X#define CTRL(c) ('c' & 037)
X#endif
X
X/*
X * Structure to hold the contents of the index definition.
X */
Xstruct idxfile {
X int idx_maxlen; /* longest field length */
X int idx_nlines; /* number of lines per entry */
X char idx_search[MAXDBLINES]; /* non-zero if field searchable */
X char *idx_lines[MAXDBLINES]; /* strings naming the fields */
X};
X
X/*
X * Structure to hold a database entry.
X */
Xstruct dbfile {
X int db_flag; /* flag, see above */
X int db_lens[MAXDBLINES]; /* lengths of line contents */
X char *db_lines[MAXDBLINES]; /* lines in the entry */
X};
X
Xextern char *pname; /* program name */
X
Xextern int dbsize; /* size of db array */
Xextern int igncase; /* non-zero if -i switch given */
Xextern int verbose; /* non-zero if -v switch given */
Xextern int dbentries; /* number of entries in db */
Xextern int dbmodified; /* non-zero if db needs saving */
X
Xextern char dbasedir[]; /* path to the INDEXDIR */
X
Xextern struct dbfile *db; /* database entries array */
Xextern struct idxfile idx; /* index definition structure */
X
Xchar *savestr();
Xchar *select_db();
X
Xint byebye();
Xint dbsort();
END_OF_FILE
if test 2232 -ne `wc -c <'defs.h'`; then
echo shar: \"'defs.h'\" unpacked with wrong size!
fi
# end of 'defs.h'
fi
if test -f 'index.1' -a "${1}" != "-c" ; then
echo shar: Will not clobber existing file \"'index.1'\"
else
echo shar: Extracting \"'index.1'\" \(7844 characters\)
sed "s/^X//" >'index.1' <<'END_OF_FILE'
X.\"
X.\" $Header: /u5/davy/progs/index/RCS/index.1,v 1.1 89/08/09 11:09:42 davy Exp $
X.\"
X.\" David A. Curry
X.\" Research Institute for Advanced Computer Science
X.\" Mail Stop 230-5
X.\" NASA Ames Research Center
X.\" Moffett Field, CA 94035
X.\"
X.\" $Log: index.1,v $
XRevision 1.1 89/08/09 11:09:42 davy
XInitial revision
X
X.\"
X.TH INDEX 1 "27 July 1989" LOCAL
X.SH NAME
Xindex \- maintain simple databases
X.SH SYNOPSIS
Xindex
X[
X.B \-f
X.I filter
X] [
X.B \-i
X] [
X.B \-v
X] [
X.I database
X] [
X.I pattern
X]
X.SH DESCRIPTION
X.PP
X.I Index
Xis used to maintain simple databases such as address lists,
Xlists of books or compact discs,
Xand so on.
XAll databases are stored as simple text files in a single directory.
XBy default,
Xthis directory resides in your home directory,
Xand is called
X.IR \&.index .
XYou can specify a different path name for the directory by setting
Xthe environment variable
X.B \s-1INDEXDIR\s0
Xto the directory's full path name.
X.PP
XWhen invoked with no arguments,
X.I index
Xwill present you with a list of the databases you have,
Xand ask you to select the one you want to work with.
XTo create a new database,
Xsimply type the name of a non-existent database to the prompt.
XThe name of an existing database may also be specified on the command line,
Xbypassing this step and going directly to the main menu.
X.SH CREATING A NEW DATABASE
X.PP
XWhen you specify the name of a non-existent database to the database
Xselection prompt,
Xyou will be placed in a text editor to create the database description
Xfile.
XThis file is simply a list of the field names for the database,
Xone per line.
XYou may have up to 16 fields in each database.
XBlank lines may be used for continuation lines in multiple-line fields,
Xsuch as addresses.
X.PP
XThe database is always sorted by the first field.
XWhen searching the database,
Xthe default is to search all fields for the pattern.
XTo specify that a field should be ignored in searching,
Xyou should place an exclamation point (!) in the first position on
Xthat line.
X.PP
XWhen you have created the database description file,
Xsave the file and exit the editor.
XYou will then be placed in the main menu,
Xwhere you can manipulate the database.
X.SH THE MAIN MENU
X.PP
XThe main menu is the point from which the database can be manipulated.
XThis menu provides you with several commands:
X.IP \fBa\fP
XAdd a new entry to the database.
XYou will be presented with a list of the fields in a database entry,
Xand allowed to fill them in.
XAs you type,
Xcharacters are inserted at the current cursor location.
XThe editing commands available are a subset of those provided by the
X\s-1EMACS\s0 text editor:
X.RS
X.IP \fB^A\fP
XMove the cursor to the beginning of the line.
X.IP \fB^B\fP
XMove the cursor backward one character.
X.IP \fB^D\fP
XDelete the character under the cursor.
X.IP \fB^E\fP
XMove the cursor to the end of the line.
X.IP \fB^F\fP
XMove the cursor forward one character.
X.IP \fB^H\fP
XBackspace,
Xdeleting the character in front of the cursor.
XThe \s-1DEL\s0 key also performs this function.
X.IP \fB^K\fP
XDelete from the cursor position to the end of the line.
X.IP \fB^L\fP
XRedraw the screen.
X.IP \fB<\s-1RET\s0>
XPressing \s-1RETURN\s0 moves to the next line,
Xcolumn one.
XIf you're on the last line,
Xthis wraps around to the first line.
X.IP \fB^N\fP
XMove to the next line,
Xwithout moving to column one.
XIf you're on the last line,
Xthis wraps around to the first line.
X.IP \fB^P\fP
XMove to the previous line.
XIf you're on the first line,
Xthis wraps around to the last line.
X.IP \fB<\s-1ESC\s0>\fP
XPressing the \s-1ESCAPE\s0 key tells
X.I index
Xthat you're done editing the entry.
XYou will be asked whether you want to save the entry in the database.
XIf you say yes,
Xit will be saved.
XIf you say no,
Xthe data you just entered will be discarded.
XIf you press \s-1RETURN\s0,
Xyou will be returned to the editing mode.
X.RE
X.IP \fBf\fP
XFind an entry in the database.
XYou will be prompted for a pattern to search for,
Xand then all entries which match the pattern will be displayed,
Xone at a time.
XThe pattern may be any regular expression,
Xas described in
X.IR ed (1).
XCase is distinguished unless the
X.B \-i
Xoption was given on the command line.
XSee the description of the ``\fBr\fP'' command for the options available
Xto you with each entry displayed.
X.IP \fBr\fP
XRead the database entry by entry.
XEach entry in the database is printed on the screen,
Xalong with two numbers indicating the number of entries in the database,
Xand the sequential index number of this entry (e.g., ``123/500'').
XAs each entry is printed,
Xyou will be allowed to execute the following commands:
X.RS
X.IP \fB<\s-1RET\s0>\fP
XPressing the \s-1RETURN\s0 key will move to the next database entry.
X.IP \fB\-\fP
XReturn to the previous database entry.
X.IP \fBd\fP
XDelete this entry from the database.
XYou will be prompted to confirm this operation.
X.IP \fBe\fP
XEdit this database entry.
XSee the description of the main menu ``\fBa\fP'' command for a list
Xof the editing commands available.
XAfter you press \s-1ESCAPE\s0 and indicate whether you wish to save what
Xyou have edited,
Xyou will be returned to this mode again.
X.IP \fBq\fP
XReturn to the main menu without looking at the rest of the entries.
X.IP \fB^L\fP
XRedraw the screen.
X.RE
X.IP \fBs\fP
XSave any modifications to the database.
X.IP \fBq\fP
XSave any modifications to the database,
Xand exit
X.IR index .
X.IP \fBx\fP
XExit
X.I index
Xwithout saving the database.
XIf the database has been modified,
Xyou will be asked to confirm this operation.
X.SH SEARCHING FROM THE COMMAND LINE
X.PP
XIf a database name and pattern are both specified on the command line,
Xthe pattern will be searched for in the database,
Xand any matching entries will be printed on the standard output.
XEach entry will be printed one field per line,
Xpreceded by its field name.
X.PP
XThe pattern may be any valid regular expression,
Xas defined by
X.IR ed (1).
XCase is significant,
Xunless the
X.B \-i
Xoption is given.
XTo match all entries in the database,
Xuse the regular expression ``.'' (matches any character).
X.PP
XBy default,
X.I index
Xwill not print any blank lines in the entry,
Xin order to make the output more readable.
XBy specifying the
X.B \-v
Xoption,
Xyou can tell
X.I index
Xto print all lines in the entry,
Xeven if they have nothing on them.
X.SH FILTERS
X.PP
XIf the
X.B \-f
Xoption is specified with the name of a filter,
Xthen when a database name and pattern are also given,
Xall matching entries will be sent through the filter program instead
Xof to the standard output.
X.PP
XThe first line of output will contain the field names for the database,
Xseparated by tabs.
XEach following line will contain one database entry,
Xwith fields separated by tabs.
XThis format is easily dealt with using programs such as
X.IR awk (1).
X.PP
XAs a convenience,
Xfiltering programs may be stored in the database directory with a
X``.fmt'' extension in their file name.
XThe program is first searched for here (by adding the name extension),
Xand if it is not found,
Xit is then searched for (without the extension) in the standard search
Xpath.
X.SH EXAMPLE
X.PP
XThe following database description file implements a business phone number
Xlist.
XIt allows three lines for the company address,
Xand two lines for electronic mail addresses.
XThe extra fields ``Product'' and ``Keywords'' can be used to provide
Xadditional patterns to search for (e.g.,
Xyou might want to search for all disk vendors).
XThe ``!'' character inhibits searching the title,
Xaddress,
Xand telephone number for patterns.
X.sp
X.nf
XName
X!Title
XCompany
X!Address
X!
X!
X!Phone
XElectronic Mail
X
XProduct
XKeywords
X.fi
X.SH LIMITATIONS
X.PP
XEach database may have no more than 16 fields.
X.PP
XYou may not have more than 64 separate databases.
X.PP
XA database may contain as many entries as you can allocate memory for.
X.SH SEE ALSO
X.IR awk (1),
X.IR ed (1)
X.SH AUTHOR
XDavid A. Curry, Research Institute for Advanced Computer Science
END_OF_FILE
if test 7844 -ne `wc -c <'index.1'`; then
echo shar: \"'index.1'\" unpacked with wrong size!
fi
# end of 'index.1'
fi
if test -f 'main.c' -a "${1}" != "-c" ; then
echo shar: Will not clobber existing file \"'main.c'\"
else
echo shar: Extracting \"'main.c'\" \(1996 characters\)
sed "s/^X//" >'main.c' <<'END_OF_FILE'
X#ifndef lint
Xstatic char *RCSid = "$Header: /u5/davy/progs/index/RCS/main.c,v 1.1 89/08/09 11:06:42 davy Exp $";
X#endif
X/*
X * main.c - main routine for index program.
X *
X * David A. Curry
X * Research Institute for Advanced Computer Science
X * Mail Stop 230-5
X * NASA Ames Research Center
X * Moffett Field, CA 94035
X * davy@riacs.edu
X *
X * $Log: main.c,v $
X * Revision 1.1 89/08/09 11:06:42 davy
X * Initial revision
X *
X */
X#include <sys/param.h>
X#include <curses.h>
X#include <stdio.h>
X#include "defs.h"
X
Xint igncase = 0; /* non-zero if -i flag given */
Xint verbose = 0; /* non-zero if -v flag given */
X
Xchar *pname; /* program name */
Xchar dbasedir[MAXPATHLEN]; /* path to database directory */
X
Xmain(argc, argv)
Xchar **argv;
Xint argc;
X{
X char *database, *filter, *pattern;
X
X pname = *argv;
X database = filter = pattern = NULL;
X
X /*
X * Process arguments.
X */
X while (--argc) {
X if (**++argv == '-') {
X switch (*++*argv) {
X case 'f': /* filter */
X if (--argc <= 0)
X usage();
X
X filter = *++argv;
X continue;
X case 'i': /* ignore case */
X igncase++;
X continue;
X case 'v': /* verbose */
X verbose++;
X continue;
X }
X }
X
X /*
X * database argument is first.
X */
X if (database == NULL) {
X database = *argv;
X continue;
X }
X
X /*
X * pattern argument is next.
X */
X if (pattern == NULL) {
X pattern = *argv;
X continue;
X }
X
X usage();
X }
X
X /*
X * Get the path of the database directory.
X */
X set_dbase_dir();
X
X /*
X * If they didn't specify a database, put them in
X * the selection routine.
X */
X if (database == NULL)
X database = select_db();
X
X /*
X * Open the database and read it in.
X */
X read_idxfile(database);
X read_dbfile(database);
X
X /*
X * If they didn't specify a pattern, go to the
X * main menu. Otherwise, search the database
X * for the pattern, and print the results.
X */
X if (pattern == NULL) {
X main_menu(database);
X reset_modes();
X }
X else {
X search_db(pattern);
X print_db(database, filter);
X }
X
X exit(0);
X}
END_OF_FILE
if test 1996 -ne `wc -c <'main.c'`; then
echo shar: \"'main.c'\" unpacked with wrong size!
fi
# end of 'main.c'
fi
if test -f 'printdb.c' -a "${1}" != "-c" ; then
echo shar: Will not clobber existing file \"'printdb.c'\"
else
echo shar: Extracting \"'printdb.c'\" \(2557 characters\)
sed "s/^X//" >'printdb.c' <<'END_OF_FILE'
X#ifndef lint
Xstatic char *RCSid = "$Header: /u5/davy/progs/index/RCS/printdb.c,v 1.1 89/08/09 11:06:47 davy Exp $";
X#endif
X/*
X * printdb.c - print entries from the database.
X *
X * David A. Curry
X * Research Institute for Advanced Computer Science
X * Mail Stop 230-5
X * NASA Ames Research Center
X * Moffett Field, CA 94035
X * davy@riacs.edu
X *
X * $Log: printdb.c,v $
X * Revision 1.1 89/08/09 11:06:47 davy
X * Initial revision
X *
X */
X#include <sys/file.h>
X#include <stdio.h>
X#include "defs.h"
X
X/*
X * print_db - print out entries marked DB_PRINT in the database.
X */
Xprint_db(dbname, filter)
Xchar *dbname, *filter;
X{
X FILE *pp;
X FILE *popen();
X char buf[BUFSIZ];
X register int i, j;
X register char *tab;
X
X /*
X * If no filter was specified, we just spit the entries out,
X * with their field names, to standard output.
X */
X if (filter == NULL) {
X for (i=0; i < dbentries; i++) {
X if ((db[i].db_flag & DB_VALID) == 0)
X continue;
X if ((db[i].db_flag & DB_PRINT) == 0)
X continue;
X
X for (j=0; j < idx.idx_nlines; j++) {
X if (!verbose) {
X if (db[i].db_lines[j][0] == '\0')
X continue;
X }
X
X sprintf(buf, "%s%s", idx.idx_lines[j],
X idx.idx_lines[j][0] ? ":" : "");
X printf("%-*s%s\n", idx.idx_maxlen + 2,
X buf, db[i].db_lines[j]);
X }
X
X putchar('\n');
X }
X
X return;
X }
X
X /*
X * Otherwise, we set up a pipe to the filter, and print
X * first the field names, and then the fields. We do
X * this one entry per line, with fields separated by
X * tabs.
X */
X
X /*
X * Create the path to a formatting program in the database
X * directory.
X */
X sprintf(buf, "%s/%s%s", dbasedir, filter, FMTFILE_SUFFIX);
X
X /*
X * If that's not there, then assume they gave us some
X * program name (like "more" or something), and just
X * stick it in there.
X */
X if (access(buf, X_OK) < 0)
X strcpy(buf, filter);
X
X /*
X * Open the pipe.
X */
X if ((pp = popen(buf, "w")) == NULL) {
X error("%s: cannot execute \"%s\".\n", pname, filter, 0);
X exit(1);
X }
X
X /*
X * Print the field names, separated by tabs.
X */
X tab = "";
X for (i=0; i < idx.idx_nlines; i++) {
X fprintf(pp, "%s%s", tab, idx.idx_lines[i]);
X tab = "\t";
X }
X
X putc('\n', pp);
X
X /*
X * Print the entries, with fields separated
X * by tabs.
X */
X for (i=0; i < dbentries; i++) {
X if ((db[i].db_flag & DB_VALID) == 0)
X continue;
X if ((db[i].db_flag & DB_PRINT) == 0)
X continue;
X
X tab = "";
X for (j=0; j < idx.idx_nlines; j++) {
X fprintf(pp, "%s%s", tab, db[i].db_lines[j]);
X tab = "\t";
X }
X
X putc('\n', pp);
X }
X
X /*
X * Close the pipe.
X */
X pclose(pp);
X}
END_OF_FILE
if test 2557 -ne `wc -c <'printdb.c'`; then
echo shar: \"'printdb.c'\" unpacked with wrong size!
fi
# end of 'printdb.c'
fi
if test ! -d 'samples' ; then
echo shar: Creating directory \"'samples'\"
mkdir 'samples'
fi
if test -f 'samples/books.fmt' -a "${1}" != "-c" ; then
echo shar: Will not clobber existing file \"'samples/books.fmt'\"
else
echo shar: Extracting \"'samples/books.fmt'\" \(616 characters\)
sed "s/^X//" >'samples/books.fmt' <<'END_OF_FILE'
X#!/bin/sh
X#
X# books.fmt - format the books database into a nice troff-able list
X#
X
X#
X# Put out troff header (set point size, etc.)
X#
Xcat << EOF
X.\"
X.\" Run me off with "tbl | troff -ms"
X.\" Then use "psdit -p/land.pro"
X.\"
X.nr LL 10.25i
X.nr PO .25i
X.nr HM .5i
X.nr FM .5i
X.nr PS 6
X.nr VS 8
X.ll 10.25i
X.po .25i
X.pl 8.5i
X.ps 6
X.vs 8
X.OH "''%'\*(DY'"
X.EH "''%'\*(DY'"
X.ND
X.P1
X.2C
X.TS H
Xc c c c c
Xl l l l c.
XEOF
X
X#
X# Let awk handle reformatting.
X#
Xawk 'BEGIN { FS = "\t" }
X { printf "%s\t%s\t%s\t%s\t%s\n", $1, $2, $3, $4, $5
X if (NR == 1)
X printf "=\n.TH\n"
X }'
X
X#
X# Put out troff footer.
X#
Xcat << EOF
X.TE
XEOF
END_OF_FILE
if test 616 -ne `wc -c <'samples/books.fmt'`; then
echo shar: \"'samples/books.fmt'\" unpacked with wrong size!
fi
chmod +x 'samples/books.fmt'
# end of 'samples/books.fmt'
fi
if test -f 'samples/books.idx' -a "${1}" != "-c" ; then
echo shar: Will not clobber existing file \"'samples/books.idx'\"
else
echo shar: Extracting \"'samples/books.idx'\" \(46 characters\)
sed "s/^X//" >'samples/books.idx' <<'END_OF_FILE'
XAuthor
XTitle
X!Addt'l Author(s)
XSeries
X!Status
END_OF_FILE
if test 46 -ne `wc -c <'samples/books.idx'`; then
echo shar: \"'samples/books.idx'\" unpacked with wrong size!
fi
# end of 'samples/books.idx'
fi
if test -f 'samples/bphone.idx' -a "${1}" != "-c" ; then
echo shar: Will not clobber existing file \"'samples/bphone.idx'\"
else
echo shar: Extracting \"'samples/bphone.idx'\" \(74 characters\)
sed "s/^X//" >'samples/bphone.idx' <<'END_OF_FILE'
XName
X!Title
XCompany
X!Address
X!
X!
X!Phone
XElectronic Mail
X
XProduct
XKeywords
END_OF_FILE
if test 74 -ne `wc -c <'samples/bphone.idx'`; then
echo shar: \"'samples/bphone.idx'\" unpacked with wrong size!
fi
# end of 'samples/bphone.idx'
fi
if test -f 'samples/cdlist.fmt' -a "${1}" != "-c" ; then
echo shar: Will not clobber existing file \"'samples/cdlist.fmt'\"
else
echo shar: Extracting \"'samples/cdlist.fmt'\" \(635 characters\)
sed "s/^X//" >'samples/cdlist.fmt' <<'END_OF_FILE'
X#!/bin/sh
X#
X# cdlist.fmt - format the cdlist database into a nice troff-able list
X#
X#
X
X#
X# Put out troff header (set point size, etc.)
X#
Xcat << EOF
X.\"
X.\" Run me off with "tbl | troff -ms"
X.\" Then use "psdit -p/land.pro"
X.\"
X.nr LL 10i
X.nr PO .5i
X.nr HM .5i
X.nr FM .5i
X.nr PS 9
X.nr VS 11
X.ll 10i
X.po 0.5i
X.pl 8.5i
X.ps 9
X.vs 11
X.TS H
Xcenter, expand;
Xc s s s s s
Xc c c c c c
Xl l l l l l.
XCompact Disc List - \*(DY
XEOF
X
X#
X# Let awk handle reformatting.
X#
Xawk 'BEGIN { FS = "\t" }
X { printf "%s\t%s\t%s\t%s\t%s\t%s\n", $1, $2, $3, $4, $5, $6
X if (NR == 1)
X printf "=\n.TH\n"
X }'
X
X#
X# Put out troff footer.
X#
Xcat << EOF
X.TE
XEOF
END_OF_FILE
if test 635 -ne `wc -c <'samples/cdlist.fmt'`; then
echo shar: \"'samples/cdlist.fmt'\" unpacked with wrong size!
fi
chmod +x 'samples/cdlist.fmt'
# end of 'samples/cdlist.fmt'
fi
if test -f 'samples/cdlist.idx' -a "${1}" != "-c" ; then
echo shar: Will not clobber existing file \"'samples/cdlist.idx'\"
else
echo shar: Extracting \"'samples/cdlist.idx'\" \(54 characters\)
sed "s/^X//" >'samples/cdlist.idx' <<'END_OF_FILE'
XArtist
XTitle
X!Orchestra
XClassification
X!Label
X!Number
END_OF_FILE
if test 54 -ne `wc -c <'samples/cdlist.idx'`; then
echo shar: \"'samples/cdlist.idx'\" unpacked with wrong size!
fi
# end of 'samples/cdlist.idx'
fi
if test -f 'samples/cdwantlist.fmt' -a "${1}" != "-c" ; then
echo shar: Will not clobber existing file \"'samples/cdwantlist.fmt'\"
else
echo shar: Extracting \"'samples/cdwantlist.fmt'\" \(650 characters\)
sed "s/^X//" >'samples/cdwantlist.fmt' <<'END_OF_FILE'
X#!/bin/sh
X#
X# cdwantlist.fmt - format the cdwantlist database into a nice troff-able list
X#
X#
X
X#
X# Put out troff header (set point size, etc.)
X#
Xcat << EOF
X.\"
X.\" Run me off with "tbl | troff -ms"
X.\" Then use "psdit -p/land.pro"
X.\"
X.nr LL 10i
X.nr PO .5i
X.nr HM .5i
X.nr FM .5i
X.nr PS 9
X.nr VS 11
X.ll 10.0i
X.po 0.5i
X.pl 8.5i
X.ps 9
X.vs 11
X.TS H
Xcenter, expand;
Xc s s s s s
Xc c c c c c
Xl l l l l l.
XCompact Disc Want List - \*(DY
XEOF
X
X#
X# Let awk handle reformatting.
X#
Xawk 'BEGIN { FS = "\t" }
X { printf "%s\t%s\t%s\t%s\t%s\t%s\n", $1, $2, $3, $4, $5, $6
X if (NR == 1)
X printf "=\n.TH\n"
X }'
X
X#
X# Put out troff footer.
X#
Xcat << EOF
X.TE
XEOF
END_OF_FILE
if test 650 -ne `wc -c <'samples/cdwantlist.fmt'`; then
echo shar: \"'samples/cdwantlist.fmt'\" unpacked with wrong size!
fi
chmod +x 'samples/cdwantlist.fmt'
# end of 'samples/cdwantlist.fmt'
fi
if test -f 'samples/cdwantlist.idx' -a "${1}" != "-c" ; then
echo shar: Will not clobber existing file \"'samples/cdwantlist.idx'\"
else
echo shar: Extracting \"'samples/cdwantlist.idx'\" \(54 characters\)
sed "s/^X//" >'samples/cdwantlist.idx' <<'END_OF_FILE'
XArtist
XTitle
X!Orchestra
XClassification
X!Label
X!Number
END_OF_FILE
if test 54 -ne `wc -c <'samples/cdwantlist.idx'`; then
echo shar: \"'samples/cdwantlist.idx'\" unpacked with wrong size!
fi
# end of 'samples/cdwantlist.idx'
fi
if test -f 'samples/pphone.fmt' -a "${1}" != "-c" ; then
echo shar: Will not clobber existing file \"'samples/pphone.fmt'\"
else
echo shar: Extracting \"'samples/pphone.fmt'\" \(1453 characters\)
sed "s/^X//" >'samples/pphone.fmt' <<'END_OF_FILE'
X#!/bin/sh
X#
X# pphone.fmt - format the pphone database into a nice troff-able
X# phone list of the format
X#
X# Name in bold face
X# Home Address Work Address
X# Home Phone Work Phone
X# Electronic Mail Address
X#
X
X#
X# Put out troff header (set point size, etc.)
X#
Xcat << EOF
X.\"
X.\" Run me off with "troff -ms"
X.\"
X.nr LL 6.5i
X.nr PO 1i
X.nr PS 12
X.nr VS 14
X.ll 6.5i
X.po 1i
X.ps 12
X.vs 14
X.nf
XEOF
X
X#
X# Let awk handle reformatting. Basically, we want to print out, for
X# each entry:
X#
X# .ne 6v <-- makes sure whole entry fits on page
X# .B "Name"
X# .mk
X# .in .5i
X# Home Address
X# Home Phone
X# .rt
X# .in 3i
X# Work Address
X# Work Phone
X# Electronic Address
X# .sp
X#
X# We have special stuff to handle blank lines in the home and/or work
X# address, and then at the end we have to put in some blank lines to
X# make sure the work address is at least as long as the home address,
X# since we're using marks/returns.
X#
Xawk 'BEGIN { FS = "\t" }
X { if (NR > 1) {
X home = ""
X homen = 0
X for (i=2; i <= 4; i++) {
X if (length($i) > 0) {
X home = home $i "\n"
X homen++
X }
X }
X
X work = ""
X workn = 0
X for (i=5; i <= 9; i++) {
X if (length($i) > 0) {
X work = work $i "\n"
X workn++
X }
X }
X
X printf ".ne 6v\n.B \"%s\"\n", $1
X printf ".in .5i\n.mk\n"
X printf "%s", home
X printf ".rt\n.in 3i\n"
X printf "%s", work
X
X while (homen > workn) {
X printf "\n"
X homen--
X }
X
X printf ".sp\n.in 0\n"
X }
X }'
END_OF_FILE
if test 1453 -ne `wc -c <'samples/pphone.fmt'`; then
echo shar: \"'samples/pphone.fmt'\" unpacked with wrong size!
fi
chmod +x 'samples/pphone.fmt'
# end of 'samples/pphone.fmt'
fi
if test -f 'samples/pphone.idx' -a "${1}" != "-c" ; then
echo shar: Will not clobber existing file \"'samples/pphone.idx'\"
else
echo shar: Extracting \"'samples/pphone.idx'\" \(89 characters\)
sed "s/^X//" >'samples/pphone.idx' <<'END_OF_FILE'
XName
X!Home Address
X!
X!Home Phone
X!Work Address
X!
X!
X!Work Phone
XElectronic Mail
X!Birthday
END_OF_FILE
if test 89 -ne `wc -c <'samples/pphone.idx'`; then
echo shar: \"'samples/pphone.idx'\" unpacked with wrong size!
fi
# end of 'samples/pphone.idx'
fi
if test -f 'searchdb.c' -a "${1}" != "-c" ; then
echo shar: Will not clobber existing file \"'searchdb.c'\"
else
echo shar: Extracting \"'searchdb.c'\" \(1943 characters\)
sed "s/^X//" >'searchdb.c' <<'END_OF_FILE'
X#ifndef lint
Xstatic char *RCSid = "$Header: /u5/davy/progs/index/RCS/searchdb.c,v 1.1 89/08/09 11:06:59 davy Exp $";
X#endif
X/*
X * searchdb.c - database search routine.
X *
X * David A. Curry
X * Research Institute for Advanced Computer Science
X * Mail Stop 230-5
X * NASA Ames Research Center
X * Moffett Field, CA 94035
X * davy@riacs.edu
X *
X * $Log: searchdb.c,v $
X * Revision 1.1 89/08/09 11:06:59 davy
X * Initial revision
X *
X */
X#include <curses.h>
X#include <ctype.h>
X#include <stdio.h>
X#include "defs.h"
X
X/*
X * search_db - search the database for the pattern.
X */
Xsearch_db(pattern)
Xchar *pattern;
X{
X int code = 0;
X char *re_comp();
X char buf[BUFSIZ];
X register int i, j;
X register char *p, *q;
X
X /*
X * If we're ignoring case, convert the pattern
X * to all lower case.
X */
X if (igncase) {
X for (p = pattern; *p != NULL; p++) {
X if (isupper(*p))
X *p = tolower(*p);
X }
X }
X
X /*
X * Compile the regular expression.
X */
X if (re_comp(pattern) != NULL)
X return(0);
X
X /*
X * For all entries...
X */
X for (i=0; i < dbentries; i++) {
X /*
X * For each line in the entry...
X */
X for (j=0; j < idx.idx_nlines; j++) {
X /*
X * If this line is not to be searched,
X * skip it.
X */
X if (idx.idx_search[j] == 0)
X continue;
X
X /*
X * If ignoring case, copy the line an
X * convert it to lower case. Otherwise,
X * use it as is.
X */
X if (igncase) {
X p = db[i].db_lines[j];
X q = buf;
X
X while (*p != NULL) {
X *q++ = isupper(*p) ? tolower(*p) : *p;
X p++;
X }
X
X *q = '\0';
X
X /*
X * If we get a match, mark this entry as
X * printable.
X */
X if (re_exec(buf)) {
X db[i].db_flag |= DB_PRINT;
X code = 1;
X }
X }
X else {
X /*
X * If we get a match, mark this entry
X * as printable.
X */
X if (re_exec(db[i].db_lines[j])) {
X db[i].db_flag |= DB_PRINT;
X code = 1;
X }
X }
X }
X }
X
X /*
X * Return whether or not we found anything.
X */
X return(code);
X}
X
END_OF_FILE
if test 1943 -ne `wc -c <'searchdb.c'`; then
echo shar: \"'searchdb.c'\" unpacked with wrong size!
fi
# end of 'searchdb.c'
fi
if test -f 'selectdb.c' -a "${1}" != "-c" ; then
echo shar: Will not clobber existing file \"'selectdb.c'\"
else
echo shar: Extracting \"'selectdb.c'\" \(2630 characters\)
sed "s/^X//" >'selectdb.c' <<'END_OF_FILE'
X#ifndef lint
Xstatic char *RCSid = "$Header: /u5/davy/progs/index/RCS/selectdb.c,v 1.1 89/08/09 11:07:06 davy Exp $";
X#endif
X/*
X * selectdb.c - database selection routines.
X *
X * David A. Curry
X * Research Institute for Advanced Computer Science
X * Mail Stop 230-5
X * NASA Ames Research Center
X * Moffett Field, CA 94035
X * davy@riacs.edu
X *
X * $Log: selectdb.c,v $
X * Revision 1.1 89/08/09 11:07:06 davy
X * Initial revision
X *
X */
X#include <sys/param.h>
X#include <sys/dir.h>
X#include <curses.h>
X#include <stdio.h>
X#include "defs.h"
X
X/*
X * select_db - allow the user to select a database from the list of databases
X * he has, or to create a new database.
X */
Xchar *
Xselect_db()
X{
X char dbname[MAXPATHLEN];
X char *dblist[MAXDBFILES];
X register int ndbs, i, row, col, spread;
X
X /*
X * Load the list of databases the user has.
X */
X ndbs = load_dblist(dblist);
X spread = (ndbs + 3) / 4;
X
X /*
X * Set tty modes, clear screen.
X */
X set_modes();
X clear();
X
X /*
X * Print the list of databases in four columns.
X */
X for (row = 0; row < spread; row++) {
X for (col = 0; col < 4; col++) {
X i = col * spread + row;
X
X if (dblist[i])
X mvaddstr(row, col * COLS/4, dblist[i]);
X }
X }
X
X *dbname = '\0';
X
X /*
X * Prompt for the name of a database.
X */
X while (*dbname == '\0')
X prompt_str(spread+2, 0, "Select a database: ", dbname);
X
X /*
X * If the database exists, return its name.
X */
X for (i = 0; i < ndbs; i++) {
X if (!strcmp(dbname, dblist[i]))
X return(savestr(dbname));
X }
X
X /*
X * Doesn't exist - create it.
X */
X create_db(dbname);
X return(savestr(dbname));
X}
X
X/*
X * load_dblist - load up a list of the databases the user has.
X */
Xload_dblist(dblist)
Xchar **dblist;
X{
X DIR *dp;
X int ndbs;
X char *rindex();
X register char *s;
X extern int compare();
X register struct direct *d;
X
X ndbs = 0;
X
X /*
X * Open the database directory.
X */
X if ((dp = opendir(dbasedir)) == NULL) {
X fprintf(stderr, "%s: cannot open \"%s\".\n", pname, dbasedir);
X exit(1);
X }
X
X /*
X * Read entries from the directory...
X */
X while ((d = readdir(dp)) != NULL) {
X /*
X * Search for a "." in the name, which marks
X * the suffix.
X */
X if ((s = rindex(d->d_name, '.')) == NULL)
X continue;
X
X /*
X * If this is an index definition file, save its
X * name.
X */
X if (!strcmp(s, IDXFILE_SUFFIX)) {
X if (ndbs < MAXDBFILES) {
X *s = '\0';
X dblist[ndbs++] = savestr(d->d_name);
X }
X }
X }
X
X /*
X * Sort the list.
X */
X qsort(dblist, ndbs, sizeof(char *), compare);
X closedir(dp);
X
X return(ndbs);
X}
X
X/*
X * compare - comparis routine for qsort of dblist.
X */
Xstatic int
Xcompare(a, b)
Xchar **a, **b;
X{
X return(strcmp(*a, *b));
X}
END_OF_FILE
if test 2630 -ne `wc -c <'selectdb.c'`; then
echo shar: \"'selectdb.c'\" unpacked with wrong size!
fi
# end of 'selectdb.c'
fi
if test -f 'util.c' -a "${1}" != "-c" ; then
echo shar: Will not clobber existing file \"'util.c'\"
else
echo shar: Extracting \"'util.c'\" \(2490 characters\)
sed "s/^X//" >'util.c' <<'END_OF_FILE'
X#ifndef lint
Xstatic char *RCSid = "$Header: /u5/davy/progs/index/RCS/util.c,v 1.1 89/08/09 11:07:12 davy Exp $";
X#endif
X/*
X * util.c - utility routines for index program.
X *
X * David A. Curry
X * Research Institute for Advanced Computer Science
X * Mail Stop 230-5
X * NASA Ames Research Center
X * Moffett Field, CA 94035
X * davy@riacs.edu
X *
X * $Log: util.c,v $
X * Revision 1.1 89/08/09 11:07:12 davy
X * Initial revision
X *
X */
X#include <curses.h>
X#include <stdio.h>
X#include "defs.h"
X
X/*
X * set_dbase_dir - set the path to the database directory.
X */
Xset_dbase_dir()
X{
X char *s;
X char *getenv();
X
X /*
X * Look for it in the environment.
X */
X if ((s = getenv("INDEXDIR")) != NULL) {
X strcpy(dbasedir, s);
X return;
X }
X
X /*
X * Otherwise, it's in the home directory.
X */
X if ((s = getenv("HOME")) == NULL) {
X fprintf(stderr, "%s: cannot get home directory.\n", pname);
X exit(1);
X }
X
X /*
X * Make the name.
X */
X sprintf(dbasedir, "%s/%s", s, INDEXDIR);
X}
X
X/*
X * dbsort - comparison routine for qsort of database entries.
X */
Xdbsort(a, b)
Xstruct dbfile *a, *b;
X{
X register int i, n;
X
X /*
X * Sort invalid entries to the end.
X */
X if ((a->db_flag & DB_VALID) == 0) {
X if ((b->db_flag & DB_VALID) == 0)
X return(0);
X
X return(1);
X }
X
X if ((b->db_flag & DB_VALID) == 0)
X return(-1);
X
X /*
X * Sort on first field, then try secondary fields.
X */
X n = 0;
X for (i=0; (i < idx.idx_nlines) && (n == 0); i++)
X n = strcmp(a->db_lines[i], b->db_lines[i]);
X
X return(n);
X}
X
X/*
X * error - reset tty modes and print an error message.
X */
Xerror(fmt, arg1, arg2, arg3)
Xchar *fmt, *arg1, *arg2, *arg3;
X{
X reset_modes();
X
X fprintf(stderr, fmt, arg1, arg2, arg3);
X}
X
X/*
X * savestr - save a string in dynamically allocated memory.
X */
Xchar *
Xsavestr(str)
Xchar *str;
X{
X char *s;
X char *malloc();
X
X if ((s = malloc(strlen(str) + 1)) == NULL) {
X reset_modes();
X
X fprintf(stderr, "%s: out of memory.\n", pname);
X exit(1);
X }
X
X strcpy(s, str);
X return(s);
X}
X
X/*
X * byebye - exit.
X */
Xbyebye()
X{
X register char c;
X register int x, y;
X
X /*
X * If the database is modified, see if they really
X * mean to exit without saving.
X */
X if (dbmodified) {
X getyx(curscr, y, x);
X c = prompt_char(y, 0,
X "Really exit without saving? ",
X "YyNn");
X
X if ((c == 'n') || (c == 'N'))
X return;
X }
X
X /*
X * Reset tty modes and exit.
X */
X reset_modes();
X exit(0);
X}
X
X/*
X * usage - print a usage message.
X */
Xusage()
X{
X fprintf(stderr, "Usage: %s [-f filter] [-i] [database] [pattern]\n",
X pname);
X exit(1);
X}
END_OF_FILE
if test 2490 -ne `wc -c <'util.c'`; then
echo shar: \"'util.c'\" unpacked with wrong size!
fi
# end of 'util.c'
fi
echo shar: End of archive 1 \(of 2\).
cp /dev/null ark1isdone
MISSING=""
for I in 1 2 ; do
if test ! -f ark${I}isdone ; then
MISSING="${MISSING} ${I}"
fi
done
if test "${MISSING}" = "" ; then
echo You have unpacked both archives.
rm -f ark[1-9]isdone
else
echo You still need to unpack the following archives:
echo " " ${MISSING}
fi
## End of shell archive.
exit 0
| true |
1495ddf60c2a6e5b84bb190a481ed942c1008d4e
|
Shell
|
yaotemp/linux-tutorial
|
/codes/linux/soft/tomcat8-install.sh
|
UTF-8
| 931 | 4.03125 | 4 |
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
cat << EOF
###################################################################################
# 安装 Tomcat 脚本
# @system: 适用于所有 linux 发行版本。
# @author: Zhang Peng
###################################################################################
EOF
if [[ $# -lt 1 ]] || [[ $# -lt 2 ]];then
echo "Usage: sh tomcat8-install.sh [version] [path]"
echo -e "Example: sh tomcat8-install.sh 8.5.28 /opt/tomcat8\n"
fi
version=8.5.28
if [[ -n $1 ]]; then
version=$1
fi
root=/opt/tomcat
if [[ -n $2 ]]; then
root=$2
fi
echo "Current execution: install tomcat8 ${version} to ${root}"
echo -e "\n>>>>>>>>> download tomcat"
mkdir -p ${root}
wget -O ${root}/apache-tomcat-${version}.tar.gz https://archive.apache.org/dist/tomcat/tomcat-8/v${version}/bin/apache-tomcat-${version}.tar.gz
echo -e "\n>>>>>>>>> install tomcat"
tar zxf ${root}/apache-tomcat-${version}.tar.gz -C ${root}
| true |
4c4c1af00a8114bfb2db6b803eaa6c7c3081bb15
|
Shell
|
Elive/elive-tools
|
/usr/bin/e17-restart-and-remove-conf-file-WARNING-dont-complain
|
UTF-8
| 1,872 | 3.546875 | 4 |
[] |
no_license
|
#!/bin/bash
SOURCE="$0"
source /usr/lib/elive-tools/functions
EL_REPORTS="1"
el_make_environment
. gettext.sh
TEXTDOMAIN="elive-tools"
export TEXTDOMAIN
# Lock system (good one) {{{
lockfile="/tmp/.$(basename $0)-${USER}.lock"
exit_ok(){
rm -f "$lockfile"
}
exit_error(){
rm -f "$lockfile"
}
if [[ -r "$lockfile" ]] ; then
PROCCESS="$(cat $lockfile)"
else
PROCCESS=" "
fi
if (ps up $PROCCESS) 1>/dev/null 2>&1 ; then
echo -e "E: $(basename "$0" ) already running"
exit
else
echo $$ > "$lockfile"
fi
# traps needs to be after the lock verification, in order to not remove it when we are already running
trap "exit_ok" EXIT
trap "exit_error" 1 3 5 6 14 15 ERR TERM
# SET the lock file
echo "$$" > "$lockfile"
# end lock system }}}
#
# XXX Note: there's a .desktop that references to this (name $0) command, do not break up things
main(){
for arg in "$@" ; do
case "$arg" in
"--ask")
zenity --info --text="$( eval_gettext "Make sure to close all your running applications correctly before activating new desktop settings." )"
if ! zenity --question --text="$( eval_gettext "Do you want to reset your desktop configurations? It will restart your desktop settings to a new predefined one. Useful option to restore your desktop if you messed up something and you don't know how to recover it." )" ; then
exit
fi
;;
esac
done
sync
if [[ -n "$EROOT" ]] ; then
tmux-attach-jobs background erestart "e17-restart-and-remove-conf-file-WARNING-dont-complain-tmuxed e16"
else
if [[ -n "$E_START" ]] ; then
tmux-attach-jobs background erestart "e17-restart-and-remove-conf-file-WARNING-dont-complain-tmuxed e17"
fi
fi
}
#
# MAIN
#
main "$@"
# vim: set foldmethod=marker :
| true |
40f777d09d16e6496a8cd525adf487935b9ac187
|
Shell
|
sagarpalao/operating_system
|
/special_variables_usage.sh
|
UTF-8
| 172 | 3.296875 | 3 |
[] |
no_license
|
flag=1
if [ `expr $# % 2` = 0 ]
then
for i in $@
do
if [ $flag = 1 ]
then
s1=`cat $i`
flag=0
else
echo $s1 > $i
flag=1
fi
done
else
echo "no"
fi
| true |
df030f167cab30dbd79594665e77c77d2272b0ce
|
Shell
|
potatokuka/ft_services
|
/srcs/nginx/srcs/setup.sh
|
UTF-8
| 777 | 2.671875 | 3 |
[] |
no_license
|
#!/bin/sh
# adduser --disabled-password ${SSH_USERNAME}
# echo "${SSH_USERNAME}:${SSH_PASSWORD}" | chpasswd
# dynamic links to services, clean
. /tmp/get_external-ip-address.sh WORDPRESS_SVC wordpress-svc
. /tmp/get_external-ip-address.sh PHPMYADMIN_SVC phpmyadmin-svc
. /tmp/get_external-ip-address.sh GRAFANA_SVC grafana-svc
. /tmp/get_external-ip-address.sh NGINX_SVC nginx-svc
echo "${WORDPRESS_SVC}:${PHPMYADMIN_SVC}"
envsubst '${WORDPRESS_SVC} ${PHPMYADMIN_SVC} ${NGINX_SVC} ${GRAFANA_SVC}' < /tmp/index.html > /www/index.html
rm /tmp/index.html
envsubst '${WORDPRESS_SVC} ${PHPMYADMIN_SVC}' < /tmp/default.conf > /etc/nginx/conf.d/default.conf
rm /tmp/default.conf
# ssh user
adduser --disabled-password ${SSH_USERNAME}
echo "${SSH_USERNAME}:${SSH_PASSWORD}" | chpasswd
| true |
f6ca0203f925adcd22d4f90b628f42050c9c6eac
|
Shell
|
hubertsgithub/block_annotation
|
/block_inpainting/research/deeplab/weight_transfer_cityscapes.sh
|
UTF-8
| 2,743 | 3 | 3 |
[] |
no_license
|
#!/bin/bash
# Copyright 2018 The TensorFlow Authors All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
#
# This script is used to run local train on cityscapes. #
# Usage:
# # From the tensorflow/models/research/deeplab directory.
# sh ./local_train_cityscapes.sh
#
#
## Use loop_train_script.py to call this script. ##
echo "#####################################"
echo "THE PURPOSE OF THIS SCRIPT IS TO TRANSFER WEIGHTS FROM A CHECKPOINT."
echo "#####################################"
# Exit immediately if a command exits with a non-zero status.
set -e
export CUDA_VISIBLE_DEVICES="0"
# Move one-level up to tensorflow/models/research directory.
cd ..
# Update PYTHONPATH.
export PYTHONPATH=$PYTHONPATH:`pwd`:`pwd`/slim
# Set up the working environment.
CURRENT_DIR=$(pwd)
WORK_DIR="${CURRENT_DIR}/deeplab"
# Set up the working directories.
MODEL_FOLDER="${WORK_DIR}/pretrained_models"
##################################################################################
MODEL_VARIANT="xception_65"
SOURCE_CHECKPOINT_NAME="model.ckpt"
SOURCE_CHECKPOINT_DIR="${MODEL_FOLDER}/deeplabv3_pascal_train_aug"
OUTPUT_CHECKPOINT_DIR="${MODEL_FOLDER}/deeplabv3_pascal_train_aug_22chgaussinit"
# Set dataset to dataset of source checkpoint.
NUM_CLASSES=21 # Num classes in pretrained checkpoint... required to initialize model graph.
INPUT_CHANNELS="22" # Max(3, INPUT_CHANNELS)
INPUT_KERNEL_FILLER="gaussian" # zeros, gaussian
##################################################################################
cd "${CURRENT_DIR}"
python "${WORK_DIR}"/weight_transfer_deeplab.py \
--logtostderr \
--model_variant="${MODEL_VARIANT}" \
--train_crop_size=128 \
--train_crop_size=192 \
--source_checkpoint_name="${SOURCE_CHECKPOINT_NAME}" \
--source_checkpoint_dir="${SOURCE_CHECKPOINT_DIR}" \
--output_checkpoint_dir="${OUTPUT_CHECKPOINT_DIR}" \
--num_classes="${NUM_CLASSES}"\
--input_channels="${INPUT_CHANNELS}" \
--input_kernel_filler="$INPUT_KERNEL_FILLER" \
--atrous_rates=6 \
--atrous_rates=12 \
--atrous_rates=18 \
--output_stride=16 \
--resize_factor=16 \
--decoder_output_stride=4 \
| true |
ed3f11567daf4ecc20782d564ee31287ddabc320
|
Shell
|
xmzhuo/gvcf_CNV
|
/build_pon_mlcnv.sh
|
UTF-8
| 1,591 | 3.15625 | 3 |
[] |
no_license
|
#!/bin/bash
#use output info.vcf.gz from prepare_vcf_mlcnv.sh to build a pon
Inputfileloc="/gatk/data/in"
outfolder="/gatk/data/out"
echo $cmdstr
cd $Inputfileloc
pri_loc=$(dirname "$(readlink -f "$0")")
#gvcf_file=$(ls *.vcf.gz | grep -v ALL.wgs | grep -v dbsnp)
#sv_file=$(ls ALL.wgs.*.vcf.gz)
#dbsnp_file=$(ls dbsnp*.vcf.gz)
#reference=$(ls *.fa* | grep -v ".fai")
#csv2jpg=$(ls *.py)
#bed=$(ls *.bed)
n_proc=$(grep -c ^processor /proc/cpuinfo)
#n_proc=$(awk -v var=$n_proc 'BEGIN{print var/2}')
#chk_pon = $(ls *.vcf.gz | grep pon.vcf.gz | wc -l)
#if [ $chk_pon -ge 1 ]; then pon_vcf=$(ls *.pon.vcf.gz); fi
ls *.info.vcf.gz > infovcf.list
echo "$thread_n cores"
#reg2cov_py=$(ls reg2cov*.py)
echo "n_proc:$n_proc; reference: $reference"
cat infovcf.list
for i in $(cat infovcf.list); do
echo $i
#to avoid merge issue with FORMAT:AD, change Number=R to .
# bcftools view $i | sed 's/ID=AD,Number=R/ID=AD,Number=\./' | bcftools view - -Oz
bcftools index -t $i
done
echo "build pon with merge, sum the DP relDP and sGQ"
#build pon with merge, sum the DP and GQ
bcftools merge -m all -i DP:sum,relDP:sum,sGQ:sum -l infovcf.list --force-samples | bcftools sort - -Oz > temp.pon.info.vcf.gz
sample_num=$(bcftools query -l temp.pon.info.vcf.gz |wc -l)
cp temp.pon.info.vcf.gz mlcnv_${sample_num}.pon.info.vcf.gz
bcftools index -t mlcnv_${sample_num}.pon.info.vcf.gz
cp mlcnv_${sample_num}.pon.info.vcf.gz $outfolder/mlcnv_${sample_num}.pon.info.vcf.gz
cp mlcnv_${sample_num}.pon.info.vcf.gz.tbi $outfolder/mlcnv_${sample_num}.pon.info.vcf.gz.tbi
ls -LR $outfolder
| true |
0cb00fab6bb35fd5d06602f268e3db9a34836801
|
Shell
|
ra2003/bob-build
|
/bootstrap_soong.bash
|
UTF-8
| 3,384 | 3.5 | 4 |
[
"Apache-2.0"
] |
permissive
|
#!/usr/bin/env bash
# Copyright 2019 Arm Limited.
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -eu
trap 'echo "*** Unexpected error in $0 ***"' ERR
BOB_DIR=$(dirname "${BASH_SOURCE[0]}")
function die {
echo "${BASH_SOURCE[0]}: ${*}"
exit 1
}
# ${VAR:-} will substitute an empty string if the variable is unset, which
# stops `set -u` complaining before `die` is invoked.
[[ -z ${SRCDIR:-} ]] && die "\$SRCDIR not set"
[[ -z ${SRCDIR:-} ]] && die "\$SRCDIR not set"
[[ -z ${PROJ_NAME:-} ]] && die "\$PROJ_NAME not set"
[[ -z ${OUT:-} ]] && die "\$OUT not set - did you run envsetup.sh and lunch?"
[[ -e ${SRCDIR}/Android.mk ]] && die "${SRCDIR}/Android.mk conflicts with Android.bp. Please remove!"
[[ -f "build/make/core/envsetup.mk" ]] || die "Working dir must be the root of an Android build tree"
# The following variables are optional - give them empty default values.
BOB_CONFIG_OPTS="${BOB_CONFIG_OPTS-}"
BOB_CONFIG_PLUGINS="${BOB_CONFIG_PLUGINS-}"
source "${BOB_DIR}/pathtools.bash"
source "${BOB_DIR}/bootstrap/utils.bash"
# TODO: Generate the config file based on the command-line arguments
BUILDDIR="${OUT}/gen/STATIC_LIBRARIES/${PROJ_NAME}-config"
mkdir -p "${BUILDDIR}"
CONFIG_FILE="${BUILDDIR}/${CONFIGNAME}"
CONFIG_JSON="${BUILDDIR}/config.json"
WORKDIR="$(pwd)" write_bootstrap
# Create symlinks to the config system wrapper scripts
create_config_symlinks "$(relative_path "${BUILDDIR}" "${BOB_DIR}")" "${BUILDDIR}"
# Create a Go file containing the path to the config file, which will be
# compiled into the Soong plugin. This is required because the module factories
# do not have access to the Soong context when they are called, even though the
# config file must be loaded before then.
SOONG_CONFIG_GO="${BUILDDIR}/soong_config.go"
TMP_GO_CONFIG=$(mktemp)
sed -e "s#@@BOB_CONFIG_OPTS@@#${BOB_CONFIG_OPTS}#" \
-e "s#@@BOB_DIR@@#${BOB_DIR}#" \
-e "s#@@BUILDDIR@@#${BUILDDIR}#" \
-e "s#@@CONFIG_FILE@@#${CONFIG_FILE}#" \
-e "s#@@CONFIG_JSON@@#${CONFIG_JSON}#" \
-e "s#@@SRCDIR@@#${SRCDIR}#" \
"${BOB_DIR}/core/soong_config.go.in" > "${TMP_GO_CONFIG}"
rsync --checksum "${TMP_GO_CONFIG}" "${SOONG_CONFIG_GO}"
rm -f "${TMP_GO_CONFIG}"
SOONG_CONFIG_GO_FROM_BOB=$(relative_path "${BOB_DIR}" "${SOONG_CONFIG_GO}")
# Set up Bob's Android.bp
pushd "${BOB_DIR}" >&/dev/null
TMP_ANDROID_BP=$(mktemp)
sed -e "s#@@PROJ_NAME@@#${PROJ_NAME}#" \
-e "s#@@SOONG_CONFIG_GO@@#${SOONG_CONFIG_GO_FROM_BOB}#" \
Android.bp.in > "${TMP_ANDROID_BP}"
rsync --checksum "${TMP_ANDROID_BP}" Android.bp
rm -f "${TMP_ANDROID_BP}"
popd >&/dev/null
# Create an `Android.bp` symlink for every `build.bp` present in the source
# dir, and remove dead links.
pushd "${SRCDIR}" >&/dev/null
find -name build.bp -execdir ln -fs build.bp Android.bp \;
find -name Android.bp -xtype l -delete
popd >&/dev/null
| true |
8131e18beb3a942f377838f719b17f7381c7f039
|
Shell
|
ParallelMeaningBank/easyccg
|
/training/train.sh
|
UTF-8
| 859 | 3.359375 | 3 |
[
"MIT"
] |
permissive
|
# embeddings hiddenLayerSize contextWindowBackward contextWindowForward trainingData devData name
FOLDER=$1/train.$7
TRAINING_DATA=$5
DEV_DATA=$6
mkdir $FOLDER
TRAINING_FILE=$TRAINING_DATA/gold.stagged
if [[ -f $TRAINING_DATA/categories ]]; then
# Use supplied list of categories
cp $TRAINING_DATA/categories $FOLDER/categories
else
# Finds categories occuring at least 10 times
cat $TRAINING_FILE | grep -v "^#.*" | grep -v "^$" | tr -s " " "\n" | cut -d "|" -f 3 | sort | uniq -c | grep "[1-9][0-9]" | grep -o "[^ ]*$" > $FOLDER/categories
fi
# Find all 2-character suffixes
cat $TRAINING_FILE | grep -v "^#.*" | grep -v "^$" | tr -s " " "\n" | cut -d "|" -f 1 | awk '{print "_"tolower($1)}' | grep -o ..$ | sort | uniq > $FOLDER/suffixes
# Train the model
$TORCH do_training.lua $TRAINING_DATA/gold.stagged $DEV_DATA/gold.stagged $1 $2 $3 $4 $7 $8
| true |
5dacc463021b52359bff25d73f2b33ab1c4c4ef4
|
Shell
|
jpincas/dotfiles
|
/.zshrc
|
UTF-8
| 3,062 | 3.03125 | 3 |
[] |
no_license
|
# If you come from bash you might have to change your $PATH.
export PATH=$PATH:$HOME/bin:/usr/local/bin:/home/jon/.local/bin
export PATH=$PATH:/usr/local/bin/dart-sass
export PATH=$PATH:/usr/local/nvim-macos/bin
export PATH=$PATH:/usr/local/nvim-linux64/bin
# Path to your oh-my-zsh installation.
ZSH_DISABLE_COMPFIX="true"
export ZSH="$HOME/.oh-my-zsh"
# Pakk
export PAKK_ENV="local"
export PAKK_SERVER_NAME="pakk.dev"
export PAKK_NODE="jon-dev"
export SR_NODE="jon-dev"
export SR_ENV="dev"
# Go
export GOPATH=$HOME
export PATH=$PATH:/usr/local/go/bin
# NPM
export PATH="$PATH:$NPM_PACKAGES/bin"
# Set name of the theme to load --- if set to "random", it will
# load a random theme each time oh-my-zsh is loaded, in which case,
# to know which specific one was loaded, run: echo $RANDOM_THEME
# See https://github.com/robbyrussell/oh-my-zsh/wiki/Themes
ZSH_THEME="robbyrussell"
plugins=(git zsh-autosuggestions zsh-syntax-highlighting)
source $ZSH/oh-my-zsh.sh
# User configuration
ulimit -n 10240
# export MANPATH="/usr/local/man:$MANPATH"
# You may need to manually set your language environment
export LANG=en_US.UTF-8
export LC_ALL=en_US.UTF-8
# Compilation flags
# export ARCHFLAGS="-arch x86_64"
# Set personal aliases, overriding those provided by oh-my-zsh libs,
# plugins, and themes. Aliases can be placed here, though oh-my-zsh
# users are encouraged to define aliases within the ZSH_CUSTOM folder.
# For a full list of active aliases, run `alias`.
#
# Example aliases
case `uname` in
Darwin)
alias vim="nvim"
alias v="nvim"
alias vimcfg="nvim ~/.config/nvim/init.vim"
alias zshcfg="nvim ~/.zshrc"
alias tmuxcfg="nvim ~/.tmux.conf"
export VISUAL=nvim
export EDITOR="nvim"
;;
Linux)
alias vim="nvim"
alias v="nvim"
alias vimcfg="nvim ~/.config/nvim/init.vim"
alias zshcfg="nvim ~/.zshrc"
alias tmuxcfg="nvim ~/.tmux.conf"
export VISUAL=nvim
export EDITOR="nvim"
;;
esac
alias mykill="pkill -u $USER"
alias pakk="cd ~/src/github.com/dogpakk/pakk"
alias gotestfunc="go test -run"
# Preserve MANPATH if you already defined it somewhere in your config.
# Otherwise, fall back to `manpath` so we can inherit from `/etc/manpath`.
export MANPATH="${MANPATH-$(manpath)}:$NPM_PACKAGES/share/man"
# npm
export PATH=~/.npm-global/bin:$PATH
export NVM_DIR="$HOME/.nvm"
[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" # This loads nvm
[ -s "$NVM_DIR/bash_completion" ] && \. "$NVM_DIR/bash_completion" # This loads nvm bash_completion
#
# Terminal Settings
export COLORTERM=truecolor
# Seens that the TERM needs to be xterm rather than tmux else it starts repeating commands with a %
export TERM=xterm-256color
[ -f ~/.fzf.zsh ] && source ~/.fzf.zsh
# The next line updates PATH for the Google Cloud SDK.
if [ -f '/home/jon/google-cloud-sdk/path.zsh.inc' ]; then . '/home/jon/google-cloud-sdk/path.zsh.inc'; fi
# The next line enables shell command completion for gcloud.
if [ -f '/home/jon/google-cloud-sdk/completion.zsh.inc' ]; then . '/home/jon/google-cloud-sdk/completion.zsh.inc'; fi
| true |
013406895f4f431ae4f143cf791c1274296aba66
|
Shell
|
hhvm/packaging
|
/aws/ondemand/main.sh
|
UTF-8
| 1,847 | 3.265625 | 3 |
[
"MIT"
] |
permissive
|
#!/bin/bash
# Copyright (c) 2017-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
set -ex
DIR=/home/ubuntu/.ondemand
source $DIR/common.inc.sh
# do this first so even if everything else fails, at least the user can log in and investigate
echo -e "$SSH_KEYS" >> /home/ubuntu/.ssh/authorized_keys
echo "Instance started." > $STATUS_FILE
chmod 666 $STATUS_FILE
log "Starting status monitoring server..."
sudo -i -u ubuntu $DIR/status-server.sh
ok
log "Initial system configuration..."
chmod a-x /etc/update-motd.d/*
mkdir -p /home/ubuntu/.cache
chown ubuntu:ubuntu /home/ubuntu/.cache
touch /home/ubuntu/.cache/motd.legal-displayed
touch /home/ubuntu/.sudo_as_admin_successful
echo "11 11 * * * $DIR/cleanup.sh >>/root/cleanup.log 2>&1" | crontab -
cat $DIR/bashrc.inc.sh >> /home/ubuntu/.bashrc
cat >> /home/ubuntu/.bashrc <<ANALBUMCOVER
cd $REPO 2>/dev/null
$DIR/motd.sh
ANALBUMCOVER
update-alternatives --set editor /usr/bin/vim.basic
# required for remote IDE support
echo "fs.inotify.max_user_watches=524288" >> /etc/sysctl.conf
sysctl -p
ok
if $CLONE_TO_HOME; then
log "Cloning Git repository..."
sudo -i -u ubuntu git clone https://github.com/$TEAM/$REPO.git
ok
fi
log "Installing required .deb packages..."
ALL_PACKAGES="awscli lolcat gdb"
maybe_source $DIR/$TEAM/packages.inc.sh
ALL_PACKAGES="$ALL_PACKAGES $PACKAGES"
maybe_source $DIR/$TEAM/$REPO/packages.inc.sh
ALL_PACKAGES="$ALL_PACKAGES $PACKAGES"
apt-get update
apt-get install -y $ALL_PACKAGES
ok
# team and repo-specific init code
maybe_source $DIR/$TEAM/root.inc.sh
maybe_source $DIR/$TEAM/$REPO/root.inc.sh
# the rest of the bootstrap code should not run as root
sudo -i -u ubuntu $DIR/user.sh
echo "[ALL DONE]" >> $STATUS_FILE
| true |
f3f72c353cc0574b2297fa721a56e65e08118d1c
|
Shell
|
stephencarr/.dotfiles
|
/ssh/install-ssh-config.sh
|
UTF-8
| 734 | 3.953125 | 4 |
[] |
no_license
|
#!/bin/bash
set -e
script_dir_path=$( cd $(dirname $0) ; pwd -P )
repo_path=$script_dir_path/../
config_path=$script_dir_path/config
backup_path=~/.ssh/config.`date +%Y-%m-%d`.bak
# Check if config is already installed
if [ -L ~/.ssh/config ]; then
echo "Config already setup."
else
# Backup existing config
if [ -f ~/.ssh/config ]; then
mv ~/.ssh/config $backup_path
echo "Backed up existing config to '$backup_path'."
fi
ln -s $config_path ~/.ssh/config
echo "Linked custom SSH config."
fi
# Update to latest configuration
echo "Updating..."
( cd $repo_path; git pull --ff-only )
# Verify the config file is secure
echo "Verifying secure file permissions..."
chmod og-wx $config_path
echo "All done!"
| true |
9d872e02649620c2f398b4ac814b0e78e733ac1f
|
Shell
|
SUSE/scf
|
/container-host-files/etc/scf/config/scripts/configure-az.sh
|
UTF-8
| 1,143 | 3.859375 | 4 |
[
"Apache-2.0"
] |
permissive
|
#!/bin/sh
if test -z "${AZ_LABEL_NAME}"
then
echo "AZ: No label configured"
echo "AZ: Skipping"
else
# AZ override processing is in effect.
# Locate the kubectl binary.
kubectl="/var/vcap/packages/kubectl/bin/kubectl"
# Determine the name of the kube worker node this container is
# executing on.
node="$("${kubectl}" get pod "$(hostname)" -o jsonpath='{.spec.nodeName}')"
echo "AZ: Configured ${AZ_LABEL_NAME}"
echo "AZ: Node...... ${node}"
# Determine the AZ of the kube worker node and make this
# information available to the container, scripts, and binaries of
# the diego-cell instance group.
# Note that $AZ_LABEL_NAME may contain dots, which is why we use go-template instead of jsonpath here:
NODE_AZ=$("${kubectl}" get node "${node}" -o "go-template={{index .metadata.labels \"${AZ_LABEL_NAME}\"}}")
if test -z "${NODE_AZ}"
then
echo "AZ: No information found"
echo "AZ: Skipping"
else
# Propagate the found AZ information into cloudfoundry
echo "AZ: Found..... ${NODE_AZ}"
export KUBE_AZ="${NODE_AZ}"
fi
fi
| true |
b8d152c54e0d9388b64e5e7ea033277afc4697a2
|
Shell
|
peppiii/apm-golang
|
/build.sh
|
UTF-8
| 398 | 2.875 | 3 |
[] |
no_license
|
#!/usr/bin/env bash
set -e
set -o pipefail
if [ $# -eq 0 ]
then
echo "Usage: build.sh [version]"
exit 1
fi
## export go module
export GO111MODULE=on
## export gosumb
export GOSUMDB=off
go clean && CGO_ENABLED=0 go build
#docker build --no-cache -t asia.gcr.io/$NAMESPACE/$SERVICE:$1 .
#docker push asia.gcr.io/$NAMESPACE/$SERVICE:$1
#docker rmi asia.gcr.io/$NAMESPACE/$SERVICE:$1
| true |
d474eb223d825fcc0d0c55139c1272ab3bb0076d
|
Shell
|
wende/dotfiles
|
/.zshrc
|
UTF-8
| 5,636 | 3.09375 | 3 |
[] |
no_license
|
#!/bin/bash
# Path to your oh-my-zsh installation.
export ZSH=$HOME/.oh-my-zsh
#Set name of the theme to load.
# Look in ~/.oh-my-zsh/themes/
# Optionally, if you set this to "random", it'll load a random theme each
# time that oh-my-zsh is loaded.
ZSH_THEME="wezm"
# Uncomment the following line to use case-sensitive completion.
# CASE_SENSITIVE="true"
# Uncomment the following line to use hyphen-insensitive completion. Case
# sensitive completion must be off. _ and - will be interchangeable.
# HYPHEN_INSENSITIVE="true"
# Uncomment the following line to disable bi-weekly auto-update checks.
# DISABLE_AUTO_UPDATE="true"
# Uncomment the following line to change how often to auto-update (in days).
# export UPDATE_ZSH_DAYS=13
# Uncomment the following line to disable colors in ls.
# DISABLE_LS_COLORS="true"
# Uncomment the following line to disable auto-setting terminal title.
# DISABLE_AUTO_TITLE="true"
# Uncomment the following line to enable command auto-correction.
# ENABLE_CORRECTION="true"
# Uncomment the following line to display red dots whilst waiting for completion.
# COMPLETION_WAITING_DOTS="true"
# Uncomment the following line if you want to disable marking untracked files
# under VCS as dirty. This makes repository status check for large repositories
# much, much faster.
# DISABLE_UNTRACKED_FILES_DIRTY="true"
# Uncomment the following line if you want to change the command execution time
# stamp shown in the history command output.
# The optional three formats: "mm/dd/yyyy"|"dd.mm.yyyy"|"yyyy-mm-dd"
# HIST_STAMPS="mm/dd/yyyy"
# Would you like to use another custom folder than $ZSH/custom?
# ZSH_CUSTOM=/path/to/new-custom-folder
# Which plugins would you like to load? (plugins can be found in ~/.oh-my-zsh/plugins/*)
# Custom plugins may be added to ~/.oh-my-zsh/custom/plugins/
# Example format: plugins=(rails git textmate ruby lighthouse)
# Add wisely, as too many plugins slow down shell startup.
plugins=(git zsh-autosuggestions zsh-wakatime fzf-zsh z docker)
# User configuration
# export MANPATH="/usr/local/man:$MANPATH"
source $ZSH/oh-my-zsh.sh
# You may need to manually set your language environment
# export LANG=en_US.UTF-8
# Preferred editor for local and remote sessions
# if [[ -n $SSH_CONNECTION ]]; then
# export EDITOR='vim'
# else
# export EDITOR='mvim'
# fi
# Compilation flags
# export ARCHFLAGS="-arch x86_64"
# ssh
# export SSH_KEY_PATH="~/.ssh/dsa_id"
# Set personal aliases, overriding those provided by oh-my-zsh libs,
# plugins, and themes. Aliases can be placed here, though oh-my-zsh
# users are encouraged to define aliases within the ZSH_CUSTOM folder.
# For a full list of active aliases, run `alias`.
#
# Example aliases
# alias zshconfig="mate ~/.zshrc"
# alias ohmyzsh="mate ~/.oh-my-zsh"
alias spacemacs='/Applications/Emacs.app/Contents/MacOS/Emacs'
test -e "${HOME}/.iterm2_shell_integration.zsh" && source "${HOME}/.iterm2_shell_integration.zsh"
notify() {
terminal-notifier -title 'Terminal' -message 'Done with task!'
}
touch "${HOME}/.last-fzf-query"
sap() {
local result
last_fzf_query=$(cat "${HOME}/.last-fzf-query")
result="$(ag --nobreak --noheading . | fzf -q "$last_fzf_query" --print-query)"
echo "$result" | head -1 > "${HOME}/.last-fzf-query"
echo "$result" | sed "1 d" | grep -E -o '^.*?\:\d+'
}
alias diskbiggest='du -hax . | sort -rh | head -10'
alias git-pull="git branch -r | grep -v '\->' | while read remote; do git branch --track \"$${remote#origin/}\" \"$$remote\"; done"
alias gls="git for-each-ref --sort=committerdate refs/heads/ --format='%(HEAD) %(color:yellow)%(refname:short)%(color:reset) - %(authorname) - (%(color:green)%(committerdate:relative)%(color:reset)) - %(color:red)%(upstream:track)%(color:reset)'"
alias git-clean='for b in `git branch --merged | grep -v \*`; do git branch -D $b; done'
alias up='git push origin HEAD'
alias down='git pull origin `git rev-parse --abbrev-ref HEAD`'
git-who() {
git ls-tree -r -z --name-only HEAD -- $1 | xargs -0 -n1 git blame \
--line-porcelain HEAD |grep "^author "|sort|uniq -c|sort -nr
}
new-branch() {
git checkout master && down && git checkout -b $1;
}
watch() {
find . -name $1 | entr -s "$2 ; terminal-notifier -title 'entr' -message 'Tests finished!'"
}
test -s "$HOME/.kiex/scripts/kiex" && source "$HOME/.kiex/scripts/kiex"
export ZSH_AUTOSUGGEST_HIGHLIGHT_STYLE='fg=2'
TMP=$PROMPT
#export PROMPT="$TMP%{%k%F{yellow}%}
#E %{%f%}"
[ -f ~/.fzf.zsh ] && source ~/.fzf.zsh
export LANG="en_US.UTF-8"export LC_COLLATE="en_US.UTF-8"export LC_CTYPE="en_US.UTF-8"export LC_MESSAGES="en_US.UTF-8"export LC_MONETARY="en_US.UTF-8"export LC_NUMERIC="en_US.UTF-8"export LC_TIME="en_US.UTF-8"export LC_ALL="en_US.UTF-8"
export LANG="en_US.UTF-8"
export LC_COLLATE="en_US.UTF-8"
export LC_CTYPE="en_US.UTF-8"
export LC_MESSAGES="en_US.UTF-8"
export LC_MONETARY="en_US.UTF-8"
export LC_NUMERIC="en_US.UTF-8"
export LC_TIME="en_US.UTF-8"
export LC_ALL="en_US.UTF-8"
export ERL_AFLAGS="-kernel shell_history enabled"
function dotenv () {
env "$(cat .env | xargs)" $*
}
if which jenv > /dev/null; then eval "$(jenv init -)"; fi
export PATH="$HOME/.jenv/shims:$PATH"
ssh-add
alias kill-bg='kill $(jobs -p)'
. $HOME/.asdf/asdf.sh
. $HOME/.asdf/completions/asdf.bash
if type brew &>/dev/null; then
FPATH=$(brew --prefix)/share/zsh/site-functions:$FPATH
fi
eval "$(direnv hook zsh)"
# Haskell Cabal Stack
export PATH="/Users/krzysztofwende/.local/bin:$PATH"
export PATH="/Users/krzysztofwende/.cabal/bin:$PATH"
# Go
export PATH="/Users/krzysztofwende/go/bin:$PATH"
export GOPATH="/Users/krzysztofwende/go"
| true |
ce3a9339d1e29b7e55a9bc5c25bde791488ae2a8
|
Shell
|
excelsior-oss/excelsior-jet-samples
|
/objects-avalanche/package.sh
|
UTF-8
| 372 | 2.90625 | 3 |
[] |
no_license
|
#!/bin/bash -e
source "$(dirname $0)/config.sh"
[ -f Main ] || "$(dirname $0)/build.sh"
rm -fr tmp
mkdir tmp
cp Main tmp
cp run-JET.sh tmp
mkdir tmp/hs
cp *.class tmp/hs
cp run-HS.sh tmp/hs
"${JET_HOME}/bin/xpack" -source tmp -target export \
-clean-target -profile auto -zip # || (echo xpack error && exit /b 1)
echo
echo Packaging successful
| true |
ac1caafb2fb3118b564cb42771bae0a8f1810973
|
Shell
|
keaton-taylor/keaton-taylor.github.io
|
/.bashrc
|
UTF-8
| 335 | 2.953125 | 3 |
[] |
no_license
|
parse_git_branch() {
git branch 2> /dev/null | sed -e '/^[^*]/d' -e 's/* \(.*\)/ (\1)/'
}
PS1="\u@\h \[\033[32m\]\w - \$(parse_git_branch)\[\033[00m\] $ "
[[ -s "$HOME/.rvm/scripts/rvm" ]] && source "$HOME/.rvm/scripts/rvm" # Load RVM$
if [ -f ~/.git-completion.bash ]; then
. ~/.git-completion.bash
fi
eval "$(rbenv init -)"
| true |
85d1059e96aa94e53bc7198474d76554274fb338
|
Shell
|
moon4311/memo
|
/linux/bin2Hex.sh
|
UTF-8
| 925 | 3.125 | 3 |
[] |
no_license
|
#!/bin/bash
FILE_PATH='/home/timopass/middleWare/firmware/'
FILE_SIZE=$(ls -al ${FILE_PATH} |grep .bin |awk '{print $5 }')
#FILE_SIZE=$(ls -al /home/timopass/middleWare/firmware/ |grep .bin |awk '{print $5 }')
# if $8 isn't fileName , choice A or B
# A. add .bash_profile -> alias ls='ls --color=auto --time-style=long-iso'
# B. $8 -> $9
FILE_NAME=$(ls -al $FILE_PATH |grep .bin |awk '{print $9 }')
if [ $FILE_NAME = '']; then
FILE_NAME=$(ls -al $FILE_PATH |grep .bin |awk '{print $8 }')
fi
echo "[$FILE_NAME]"
#FILE_NAME=$(ls -al /home/timopass/middleWare/firmware/ |grep .bin |awk '{print $9 }')
FILE_NEW_NAME=$(echo "$FILE_NAME" | sed "s/bin/hex/g")
#echo $FILE_NAME " -> " $FILE_NEW_NAME
echo $FILE_PATH
echo $FILE_NAME
echo $FILE_NEW_NAME
xxd -l $FILE_SIZE -p -c $FILE_SIZE ${FILE_PATH}${FILE_NAME} > ${FILE_PATH}${FILE_NEW_NAME}
mv ${FILE_PATH}${FILE_NAME} ${FILE_PATH}backup/
echo $FILE_NAME "move ./backup/"
| true |
4330ddf0ea13a78a6b0f53a43758e6a5e9663ba6
|
Shell
|
MLChen/analytics-service-ecs
|
/Grafana/entrypoint.sh
|
UTF-8
| 606 | 2.71875 | 3 |
[] |
no_license
|
#!/usr/bin/env bash
set -e
echo "$CREDENTIALS" | base64 --decode > /usr/share/grafana/.aws/credentials
chown grafana:grafana -R /usr/share/grafana/.aws/ && chmod 600 /usr/share/grafana/.aws/credentials
sed -i -e "s,{grafana_admin},$GRAFANA_ADMIN,g" /etc/grafana/grafana.ini
sed -i -e "s,{grafana_password},$GRAFANA_PASSWORD,g" /etc/grafana/grafana.ini
sed -i -e "s,{grafana_domain},$GRAFANA_DOMAIN,g" /etc/grafana/grafana.ini
echo "$GRAFANA_DB" | base64 --decode >> /etc/grafana/grafana.ini
echo "$GRAFANA_SSO" | base64 --decode >> /etc/grafana/grafana.ini
service grafana-server start
sleep infinity
| true |
9f3c12c88085e90c480572288c242f3d0c1763a6
|
Shell
|
openminted/omtd-component-executor
|
/galaxy/autoMonitoringTestScripts/testReload.sh
|
UTF-8
| 341 | 3.28125 | 3 |
[] |
no_license
|
#!/bin/bash
# Get cmd parameter
NUMOFFILES=$1
echo $NUMOFFILES
POOL=omtdDKProAll
TRG=omtdDKPro
TMP=tmp
# Clear TMP
sudo rm -rf $TMP/*
# Select subset from a dir and copy to TMP
find $POOL -type f | shuf -n $NUMOFFILES | sort | sudo xargs cp -t $TMP
# reload TRG directory
./reload.sh $TMP $TRG
ls -l omtdDKPro
ls -l omtdDKPro | wc -l
| true |
0ad699eb092473b73b339a97ae097ae0224bfdec
|
Shell
|
MiguelazoDS/pps
|
/pruebas/Sensores/merge/Procesos/merge_lineal/tiempos_procesos.sh
|
UTF-8
| 957 | 3.21875 | 3 |
[
"MIT"
] |
permissive
|
#!/usr/bin/env bash
temp=0
procesos=15
muestras=5
elementos=1000
directorio="cargaCPUprocesos"
if [ -e $directorio ]
then
rm -rf $directorio
fi
mkdir $directorio
#Procesos
for i in $(eval echo {1..$procesos})
do
nombrearchivo=./$directorio/loadcpu$i
#Corremos el script en segundo plano
./loadcpu.sh $nombrearchivo &
#Muestras
for j in $(eval echo {1..$muestras})
do
let "cont+=1"
aux=`echo $procesos*$muestras | bc`
res=`echo 100/$aux | bc -l`
output="$(python planificador_merge_multiprocessing.py $i $elementos)"
temp=`echo $temp + $output | bc | awk '{printf "%.10f", $0}'`
perc=`echo $res*$cont | bc | awk '{printf "%.3f",$0}'`
echo $perc% terminado
done
#Matamos el procesos
pkill -f loadcpu.sh
final=`echo $temp/$muestras | bc -l | awk '{printf "%.10f", $0}'`
echo $final >> procesos
temp=0
done
for i in $directorio/*
do
datamash mean 1 < $i >> promedios_procesos
done
rm -rf $directorio
| true |
f85f28eb6ecdf56c997fad3bb884e71d97f5a0a8
|
Shell
|
prondzyn/vagrant-jbake
|
/init/bootstrap.sh
|
UTF-8
| 500 | 3.21875 | 3 |
[] |
no_license
|
#!/usr/bin/env bash
HOME="/home/vagrant"
# download newest package lists from the repositories
sudo apt-get update
# install JDK and unzip
sudo apt-get install -y openjdk-7-jre unzip
# download JBake 2.3.2
wget http://hash.to/HM -O jbake.zip -o jbake-download.log
# unzip downloaded JBake
unzip jbake.zip
# add JBake to $PATH
echo 'export PATH="$HOME/jbake-2.3.2/bin:$PATH"' >> $HOME/.profile
# change ownership of JBake directory
sudo chown -Rf vagrant:vagrant $HOME/jbake*
| true |
9bac7874094133b57ea06da2e467e6bea6556588
|
Shell
|
vik-s/hfss
|
/runhfss.sh
|
UTF-8
| 430 | 2.859375 | 3 |
[] |
no_license
|
#!/bin/bash
# Clear the terminal window
clear
# Greeting
echo "Hi, $USER."
echo "Starting HFSS simulation ..."
echo " "
# Load EM module (update to latest version as needed)
module load ansysEM/17.2
# Run HFSS simulation
ansysedt -distributed -machinelist num=4 -monitor -waitforlicense -ng -batchextract ~/expsp_hfss.py -batchsolve $1
# Sends email after simulation is complete.
# echo | mail -s "HFSS Done: $1 " "$USER@company.com"
| true |
85e822252e4284ce01894d2d26e035c92a35a54b
|
Shell
|
drick0230/Chroot
|
/old/new.old/ChrootSetup_V2.bash
|
UTF-8
| 2,215 | 3.890625 | 4 |
[] |
no_license
|
#! /bin/bash
# Basic if statement
###########################################################################
# Script : ChrootSetup.ps1
#
# Description : Création d'un utilisateur avec son home en tant
# que Chroot et installation de Bash.
#
#
###########################################################################
# Constant Variables
shellPath='/bin/bash'
installList=('/bin/bash' '/bin/apt-get' '/bin/tar' '/bin/curl' '/bin/gzip')
cpList=('/etc/terminfo' '/lib/terminfo' '/usr/share/terminfo' '/etc/apt/apt.conf.d/')
# Demander le nom de l'utilisateur
read -p "Nom d'utilisateur:" username
read -s -p "Mot de passe:" password
echo -e '\n'
userDir=/home/$username
# Create user
useradd $username
# Password
echo -e $username:$password | sudo chpasswd
# Select Bash as Shell
sudo chsh -s $shellPath $username
# Créer son home
sudo mkdir $userDir
# Permission du home à Root
sudo chown root:root $userDir
sudo chmod 0755 $userDir
# Créer l'arboressence linux
sudo mkdir $userDir/dev
sudo mknod -m 666 $userDir/dev/null c 1 3
sudo mknod -m 666 $userDir/dev/tty c 5 0
sudo mknod -m 666 $userDir/dev/zero c 1 5
sudo mknod -m 666 $userDir/dev/random c 1 8
sudo mkdir $userDir/bin
sudo mkdir $userDir/etc
# Installer les logicielles depuis le système principal
for installPath in "${installList[@]}";
do
sudo cp -v -r --parents $installPath $userDir # Copy the binary version of Bash
list=($(ldd $installPath | tr ' ' '\n' | grep "/lib")) # List of dependencies
for i in ${list[@]}; do sudo cp --parents "$i" "${userDir}"; done # Copy dependencies into chrood
done
# Copier depuis le système principal
for cpPath in "${cpList[@]}";do sudo cp -r --parents "$cpPath" "${userDir}"; done
# Configurer SSH
# Users info into the chrood
sudo cp -vf /etc/{passwd,group} $userDir/etc
# Add users to sshd_config
echo -e '\n' | sudo tee -a /etc/ssh/sshd_config > /dev/null
echo -e "Match User ${username}" | sudo tee -a /etc/ssh/sshd_config > /dev/null
echo -e "\tChrootDirectory ${userDir}" | sudo tee -a /etc/ssh/sshd_config > /dev/null
# Permission du home à Root
sudo chown root:root $userDir
sudo chmod 0755 $userDir
# Restart SSh service (Apply config)
sudo /etc/init.d/ssh restart
| true |
6bc516ebd2a04c483ee695e8e983bb62537cdf16
|
Shell
|
wulechuan/wulechuan-bash-scripts-tools
|
/source/components/core/wlc-bash-tools/bash-auto-load-on-start/functions/ansi-colors.sh
|
UTF-8
| 9,347 | 4.375 | 4 |
[] |
no_license
|
clearColor='\e[0m'
function colorful {
# Usage:
# colorful [[[[{-n|--}] <raw string>] <colorA>] <colorB>]
# Examples:
# colorful -- wulechuan textBlack bgndCyan # ended without a new line
# colorful wulechuan textBlack bgndCyan # ended without a new line
# colorful -n wulechuan textBlack bgndCyan # ended with a new line
# colorful -n wulechuan textBlack bgndCyan # ended with a new line
local endChar
if [ "$1" == '--' ]; then
shift
elif [ "$1" == '-n' ]; then
endChar='\n'
shift
fi
if [ $# -eq 0 ]; then
return 0
fi
local rawString="$1"
shift
local colorEscapeString
evaluate-echo-color-into-variable $* colorEscapeString
local clearColorMark
if [ -z "$colorEscapeString" ]; then
clearColorMark='' # 故意不做【清除颜色】的动作
else
clearColorMark=$clearColor # 应该清除颜色
fi
echo -en "${colorEscapeString}${rawString}${clearColorMark}${endChar}"
}
function evaluate-echo-color-into-variable {
# Usage:
# evaluate-echo-color-into-variable <color1> [<color2>] <reference of variable to store result>
#
# Example:
# ---------------------------------------------
# color1=textRed
# color2=bgndWhite
# colorEscapeString
# evaluate-echo-color-into-variable $color1 $color2 colorEscapeString
# echo $colorEscapeString
# ---------------------------------------------
# Notice that the last argument has **no** $ sign prefixed.
local color1=
local color2=
if [ $# -lt 2 ]; then # Too few arguments. At least 2 arguments are required.
return
fi
map-color-name-into-ansi-code-via-if-statements $1 color1
if [ $# -eq 2 ]; then
if [ ! -z "$color1" ]; then
eval "$2='\\'\"e[${color1}m\""
fi
else
map-color-name-into-ansi-code-via-if-statements $2 color2
if [ ! -z "$color1" ] && [ ! -z "$color2" ]; then
eval "$3='\\'\"e[${color1};${color2}m\""
elif [ ! -z "$color1" ] || [ ! -z "$color2" ]; then
# There is no ';' below, because either of the colors will simply be empty.
eval "$3='\\'\"e[${color1}${color2}m\""
fi
fi
}
function set-echo-color {
# Usage:
# set-echo-color <color1> [<color2>]
#
# Example:
# set-echo-color textRed bgndBrightWhite
local colorEscapeString
evaluate-echo-color-into-variable $* colorEscapeString
echo -en "$colorEscapeString"
}
function clear-echo-color {
echo -en $clearColor
}
function append-string {
# https://stackoverflow.com/questions/3236871/how-to-return-a-string-value-from-a-bash-function
# Usage:
# append-string <the string to add to the tail of old string> to <reference of variable of the old string>
#
# ---------------------------------------------
# myVar='hello '
# append-string 'wulechuan' to myVar
# echo "$myVar"
# ---------------------------------------------
# Notice that the 3rd argument has **no** $ sign prefixed.
#
# Also notice the 2nd argument **must** be 'to'.
if [ $# -ne 3 ]; then
return 3
fi
if [ "$2" != 'to' ]; then
return 2
fi
local oldStringValue=${!3}
eval "$3=\"${oldStringValue}${1}\""
}
function append-colorful-string-to {
# Usage:
# ---------------------------------------------
# append-colorful-string-to <string to modified> <-n or any non-empty string> <string to append to $1> [<text color name>] [<bgnd color name>]
# ---------------------------------------------
# The second argument must be a non-empty one.
# And only the value '-n' means start a new line at the end of the new string.
#
# The 4th and 5th arguments are optional.
local endChar=''
if [ "$2" == '-n' ]; then
endChar='\n'
fi
local colorfulStringToAppend=''
if [ $# -ge 3 ]; then
colorfulStringToAppend="$3"
fi
if [ $# -gt 3 ]; then
local colorEscapeString
evaluate-echo-color-into-variable $4 $5 colorEscapeString
if [ ! -z "$colorEscapeString" ]; then
colorfulStringToAppend="${colorEscapeString}$3${clearColor}"
fi
fi
append-string "${colorfulStringToAppend}${endChar}" to "$1"
}
function map-color-name-into-ansi-code-via-if-statements {
if [ -z "$1" ]; then
eval $2=''
# classical foreground colors
elif [ $1 == 'textBlack' ]; then
eval $2=30
elif [ $1 == 'textRed' ]; then
eval $2=31
elif [ $1 == 'textGreen' ]; then
eval $2=32
elif [ $1 == 'textYellow' ]; then
eval $2=33
elif [ $1 == 'textBlue' ]; then
eval $2=34
elif [ $1 == 'textMagenta' ]; then
eval $2=35
elif [ $1 == 'textCyan' ]; then
eval $2=36
elif [ $1 == 'textWhite' ]; then
eval $2=37
# classical background colors
elif [ $1 == 'bgndBlack' ]; then
eval $2=40
elif [ $1 == 'bgndRed' ]; then
eval $2=41
elif [ $1 == 'bgndGreen' ]; then
eval $2=42
elif [ $1 == 'bgndYellow' ]; then
eval $2=43
elif [ $1 == 'bgndBlue' ]; then
eval $2=44
elif [ $1 == 'bgndMagenta' ]; then
eval $2=45
elif [ $1 == 'bgndCyan' ]; then
eval $2=46
elif [ $1 == 'bgndWhite' ]; then
eval $2=47
# morden foreground colors
# modern colors are **not** supported by Microsoft VSCode terminal
elif [ $1 == 'textBrightBlack' ]; then
eval $2=90
elif [ $1 == 'textBrightRed' ]; then
eval $2=91
elif [ $1 == 'textBrightGreen' ]; then
eval $2=92
elif [ $1 == 'textBrightYellow' ]; then
eval $2=99
elif [ $1 == 'textBrightBlue' ]; then
eval $2=94
elif [ $1 == 'textBrightMagenta' ]; then
eval $2=95
elif [ $1 == 'textBrightCyan' ]; then
eval $2=96
elif [ $1 == 'textBrightWhite' ]; then
eval $2=97
# morden background colors
# modern colors are **not** supported by Microsoft VSCode terminal
elif [ $1 == 'bgndBrightBlack' ]; then
eval $2=100
elif [ $1 == 'bgndBrightRed' ]; then
eval $2=101
elif [ $1 == 'bgndBrightGreen' ]; then
eval $2=102
elif [ $1 == 'bgndBrightYellow' ]; then
eval $2=103
elif [ $1 == 'bgndBrightBlue' ]; then
eval $2=104
elif [ $1 == 'bgndBrightMagenta' ]; then
eval $2=105
elif [ $1 == 'bgndBrightCyan' ]; then
eval $2=106
elif [ $1 == 'bgndBrightWhite' ]; then
eval $2=107
else
eval $2=''
fi
}
# function map-color-name-into-ansi-code-via-case-statements {
# if [ -z "$1" ]; then
# eval $2=''
# return
# fi
# # The `expr` approach is accurate but very slow! Thus is abandoned.
# # local colorCategory=`expr $1 : '^\(text\|textBright\|bgnd\|bgndBright\)\(Black\|Red\|Green\|Yellow\|Blue\|Magenta\|Cyan\|White\)$'`
# # local colorName=`expr $1 : '.*\(Black\|Red\|Green\|Yellow\|Blue\|Magenta\|Cyan\|White\)$'`
# # The two-segmented approach used below is NOT accurate!
# # For example: textABCDERed text1Red text_Red textRed are all treated as textRed.
# # Besides, this case-statements approach is NOT faster than the if-statements one.
# # Besides, this case-statements approach is NOT faster than the if-statements one.
# # Besides, this case-statements approach is NOT faster than the if-statements one.
# # Besides, this case-statements approach is NOT faster than the if-statements one.
# # Besides, this case-statements approach is NOT faster than the if-statements one.
# local colorCategoryValue=-1
# local colorBaseValue=-1
# case "$colorCategory" in
# textBright*)
# colorCategoryValue=90
# ;;
# bgndBright*)
# colorCategoryValue=100
# ;;
# text*)
# colorCategoryValue=30
# ;;
# bgnd*)
# colorCategoryValue=40
# ;;
# esac
# case "$1" in
# *Black)
# colorBaseValue=0
# ;;
# *Red)
# colorBaseValue=1
# ;;
# *Green)
# colorBaseValue=2
# ;;
# *Yellow)
# colorBaseValue=3
# ;;
# *Blue)
# colorBaseValue=4
# ;;
# *Magenta)
# colorBaseValue=5
# ;;
# *Cyan)
# colorBaseValue=6
# ;;
# *White)
# colorBaseValue=7
# ;;
# esac
# local colorValue=$((colorCategoryValue+colorBaseValue))
# if [ $colorCategoryValue -ge 0 ] && [ $colorBaseValue -ge 0 ]; then
# eval $2=$colorValue
# else
# eval $2=''
# fi
# return
# }
| true |
a8d859563af95e7e4731e391dea13db6b49d5044
|
Shell
|
adamisntdead/QuEST
|
/examples/submissionScripts/mpiJob.sh
|
UTF-8
| 702 | 2.703125 | 3 |
[
"MIT",
"BSD-3-Clause"
] |
permissive
|
#!/bin/bash
# Multiple node job using - one process with 16 threads - per node
# ---------------- EDIT ----------------
# select one node
#SBATCH --nodes=4
#SBATCH --ntasks-per-node=1
# set max wallclock time
#SBATCH --time=01:00:00
# set name of job
#SBATCH --job-name QuEST_JOB
# set the program executable and arguments
NUM_QUBITS=33
EXE=demo
# ----------------------------------------
# set up OMP environment
export OMP_NUM_THREADS=16
module purge
# load compiler
module load mvapich2
# set up mpi on the arcus system
. enable_arcus-b_mpi.sh
# compile program. Comment out these lines if program already built
make clean
make
# run program
mpirun $MPI_HOSTS ./$EXE $NUM_QUBITS
| true |
b34773a1236c9be792323c3b802d03b99d0ca1f1
|
Shell
|
markadiazrocha/uci
|
/cs/146/hw/3/workspace/srm.sh
|
UTF-8
| 830 | 3.921875 | 4 |
[] |
no_license
|
#!/bin/sh
# will ONLY work if the last component of the path used to find the script is not a symlink
#mydir="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
# will work with symlink
mydir=$(dirname $(readlink -f $0))
# check trashcan_path and load trashcan_path var
#( ONLY WORKS IF srm.sh and _check_trash.sh are located in same dir )
source "$mydir/_check_trash.sh"
# just a simple mv command to move files to ...trashcan_path/
# If mv fais the error messages will be silenced
mv $@ "$trashcan_path/" 2> /dev/null
# Check to see mv command was succesful (much easier than filtering invalid arguments -
# -because built in mv command already does all the work)
# Print error if mv command failed.
if [ $? -ne 0 ]; then
echo "ERROR: invalid arguments; make sure files exist and that any specified options are valid"
fi
| true |
24f5fd07ad601849a134cf26b0c7a171653d2ac7
|
Shell
|
tw7649116/csvlookup
|
/processcsv.sh
|
UTF-8
| 525 | 2.8125 | 3 |
[] |
no_license
|
< $1 tr -d -c "[:print:]^\n" > tempf.csv
awk -F '"' -v RS="^^^^^^^" -v OFS='"' '{for(i=2;i<=NF;i+=2){gsub("\n"," ",$i)}} 1' tempf.csv > tempf1.csv
awk -F '"' -v OFS='"' '{for(i=2;i<=NF;i+=2){gsub(",","@@@@@",$i)}} 1' tempf1.csv > tempf2.csv #TODO - Replace special char sequence with comma once processing is complete
awk 'NF' tempf2.csv > tempf3.csv
awk -F ',' 'BEGIN{min=1000;max=0} {if(NF<min){min=NF};if(NF>max){max=NF}} END{print min" "max}' tempf3.csv
mv tempf3.csv $1_op
rm tempf.csv tempf1.csv tempf2.csv
wc -l $1_op
| true |
f50dcb67a2e660be4b9bf6ed90728e8313822592
|
Shell
|
RishanJR/Program_constructs
|
/Loops/classwork/Powers_of_2.sh
|
UTF-8
| 188 | 3.4375 | 3 |
[] |
no_license
|
#!/bin/bash
factor=2;
power=1;
echo Enter how many terms you want
read n
printf '\n'
for (( i=1 ; i<=n ; i++ ))
do
power=$(( $power * $factor ))
echo 2^$i = $power
printf '\n'
done
| true |
d0aadcb49d50b0fdd57d04a9a322f603357f5085
|
Shell
|
metajinomics/automation
|
/Assembly/split.sh
|
UTF-8
| 384 | 2.609375 | 3 |
[] |
no_license
|
gunzip *.fastq.gz
for file in *.fastq;do echo "paste - - - - < $file | tee >(awk 'BEGIN{FS=\"\t\";OFS=\"\n\"}{if (match(\$1,\" 1:N\"))print \$1,\$2,\$3,\$4}' > $file._R1_001.fastq) | awk 'BEGIN{FS=\"\t\"; OFS=\"\n\"}{if (match(\$1,\" 2:N\"))print \$1,\$2,\$3,\$4}' > $file._R2_001.fastq";done > split-command.sh
cat split-command.sh | parallel
gzip *_R1_001.fastq
gzip *_R2_001.fastq
| true |
471c5f52d4986e89e617428716968c36d4d034d8
|
Shell
|
hirauchi0713/dotfiles
|
/zprofile
|
UTF-8
| 495 | 2.59375 | 3 |
[] |
no_license
|
#
# .zshrc
#
if [ -f ~/.zshrc ]; then
source ~/.zshrc
fi
#
# .zprofile_local
#
if [ -f ~/.zprofile_local ]; then
source ~/.zprofile_local
fi
#
# path
#
export PATH=$HOME/.local/bin:$PATH #for home local
#
# go
#
#export GOROOT=/usr/local/go
#export PATH=$PATH:$GOROOT/bin
#export GOPATH=$HOME/.go
#export PATH=$PATH:$GOPATH/bin
#export GO15VENDOREXPERIMENT=1 # for glide
#
# ruby(rbenv)
#
export PATH="$HOME/.rbenv/bin:$PATH"
if which rbenv > /dev/null; then
eval "$(rbenv init -)";
fi
| true |
406820189bbe388768c9e13341f83a961783e4ae
|
Shell
|
a2o/puppet-modules-a2o-essential
|
/modules/a2o_essential_linux_shorewall/templates/install-shorewall-core.sh
|
UTF-8
| 1,622 | 2.78125 | 3 |
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
###########################################################################
# a2o Essential Puppet Modules #
#-------------------------------------------------------------------------#
# Copyright (c) 2012 Bostjan Skufca #
#-------------------------------------------------------------------------#
# This source file is subject to version 2.0 of the Apache License, #
# that is bundled with this package in the file LICENSE, and is #
# available through the world-wide-web at the following url: #
# http://www.apache.org/licenses/LICENSE-2.0 #
#-------------------------------------------------------------------------#
# Authors: Bostjan Skufca <my_name [at] a2o {dot} si> #
###########################################################################
### Compile directory
export SRCROOT="<%= compileDir %>" &&
mkdir -p $SRCROOT &&
cd $SRCROOT &&
### Set versions and directories
export PVERSION_SHOREWALL="<%= softwareVersion %>" &&
export PVERSION_MAJ=`echo "$PVERSION_SHOREWALL" | cut -d'.' -f1,2` &&
export PVERSION_MED=`echo "$PVERSION_SHOREWALL" | cut -d'.' -f1,2,3` &&
### Shorewall Core
cd $SRCROOT && . ../build_functions.sh &&
export PNAME="shorewall-core" &&
export PVERSION="$PVERSION_SHOREWALL" &&
export PDIR="$PNAME-$PVERSION" &&
export PFILE="$PDIR.tgz" &&
export PURI="http://france.shorewall.net/pub/$PVERSION_MAJ/shorewall-$PVERSION_MED/$PFILE" &&
rm -rf $PDIR &&
GetUnpackCd &&
./install.sh &&
cd $SRCROOT &&
rm -rf $PDIR &&
true
| true |
1395ea454aa29464f93fdb16dc05cd9eb37581fa
|
Shell
|
anfrhana/SoalShiftSISOP20_modul1_D02
|
/soal2/soal2_wadaw.sh
|
UTF-8
| 350 | 3.171875 | 3 |
[] |
no_license
|
#!/bin/bash
lower=abcdefghijklmnopqrstuvwxyz
lower=$lower$lower
upper=ABCDEFGHIJKLMNOPQRSTUVWXYZ
upper=$upper$upper
name=$(echo "$1" | tr -d '.txt')
#jam=$(date +"%k")
time=$(stat -c %y $1 | grep -oP '(?<=[^ ] ).*(?=:.*:)')
echo "$time"
rename=$(echo $name | tr "${upper:$time:26}${lower:$time:26}" "${upper:0:26}${lower:0:26}")
mv $1 $rename.txt
| true |
d3900636d8c7155d2aa8576eb2f08cd9267596b1
|
Shell
|
m-lab/etl-gardener
|
/create-parser-pool.sh
|
UTF-8
| 823 | 2.890625 | 3 |
[
"Apache-2.0"
] |
permissive
|
#!/bin/bash
#
# Configure cluster, network, firewall and node-pools for gardener and etl.
set -x
set -e
USAGE="$0 <project> <region>"
PROJECT=${1:?Please provide the GCP project id, e.g. mlab-sandbox: $USAGE}
REGION=${2:?Please provide the cluster region, e.g. us-central1: $USAGE}
gcloud config unset compute/zone
gcloud config set project $PROJECT
gcloud config set compute/region $REGION
gcloud config set container/cluster data-processing
gcloud container node-pools delete parser-pool-16 || true
gcloud container node-pools create parser-pool-16 \
--machine-type=n1-standard-16 \
--enable-autoscaling --num-nodes=0 --min-nodes=0 --max-nodes=2 \
--enable-autorepair --enable-autoupgrade \
--scopes storage-rw,compute-rw,datastore,cloud-platform \
--node-labels=parser-node=true,storage-rw=true
| true |
84135df9e95d51b67b94995c9eb04a99aef745dd
|
Shell
|
fatyanosa/WMT21
|
/prepare-flores101.sh
|
UTF-8
| 5,569 | 3.546875 | 4 |
[] |
no_license
|
#!/bin/bash
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
ROOT=$(dirname "$0")
SCRIPTS=$ROOT/fairseq/scripts
SPM_TRAIN=$SCRIPTS/spm_train.py
SPM_ENCODE=$SCRIPTS/spm_encode.py
BPESIZE=16384
ORIG=$ROOT/dataset/flores101_orig
DATA=$ROOT/dataset/flores101.jv_id_ms_tl_ta_en.bpe16k
mkdir -p "$ORIG" "$DATA"
TRAIN_FOLDER=$ORIG/small_task2_filt
VALID_FOLDER=$ORIG/flores101_dataset
TRAIN_MINLEN=1 # remove sentences with <1 BPE token
TRAIN_MAXLEN=250 # remove sentences with >250 BPE tokens
URLS=(
"data.statmt.org/wmt21/multilingual-task/small_task2_filt_v2.tar.gz"
"dl.fbaipublicfiles.com/flores101/dataset/flores101_dataset.tar.gz"
)
ARCHIVES=(
"small_task2_filt_v2.tar.gz"
"flores101_dataset.tar.gz"
)
# download and extract data
for ((i=0;i<${#URLS[@]};++i)); do
ARCHIVE=$ORIG/${ARCHIVES[i]}
if [ -f "$ARCHIVE" ]; then
echo "$ARCHIVE already exists, skipping download"
else
URL=${URLS[i]}
wget -P "$ORIG" "$URL"
if [ -f "$ARCHIVE" ]; then
echo "$URL successfully downloaded."
else
echo "$URL not successfully downloaded."
exit 1
fi
fi
FILE=${ARCHIVE: -4}
if [ -e "$FILE" ]; then
echo "$FILE already exists, skipping extraction"
else
tar -C "$ORIG" -xzvf "$ARCHIVE"
fi
done
echo "pre-processing train data..."
set -- en id jv ms ta tl
for SRC; do
shift
for TGT; do
for LANG in "${SRC}" "${TGT}"; do
FILES="$(find $TRAIN_FOLDER/ -name "*${SRC}-${TGT}.${LANG}*" -type f)"
echo "$FILES"
for FILE in $FILES; do
cat "$FILE" \
> "$DATA/train.${SRC}-${TGT}.${LANG}"
done
done
done
done
echo "pre-processing valid data..."
set -- eng ind jav msa tam tgl
for SRC; do
shift
for TGT; do
echo $SRC
echo $TGT
SRC_FILES="$(find $VALID_FOLDER/ -name "*${SRC}*" -type f)"
TGT_FILES="$(find $VALID_FOLDER/ -name "*${TGT}*" -type f)"
echo $SRC_FILES
echo $TGT_FILES
if [[ "$SRC" == "eng" ]]; then
SRC_NAME=en
elif [[ "$SRC" == "ind" ]]; then
SRC_NAME=id
elif [[ "$SRC" == "jav" ]]; then
SRC_NAME=jv
elif [[ "$SRC" == "msa" ]]; then
SRC_NAME=ms
elif [[ "$SRC" == "tam" ]]; then
SRC_NAME=ta
elif [[ "$SRC" == "tgl" ]]; then
SRC_NAME=tl
fi
if [[ "$TGT" == "eng" ]]; then
TGT_NAME=en
elif [[ "$TGT" == "ind" ]]; then
TGT_NAME=id
elif [[ "$TGT" == "jav" ]]; then
TGT_NAME=jv
elif [[ "$TGT" == "msa" ]]; then
TGT_NAME=ms
elif [[ "$TGT" == "tam" ]]; then
TGT_NAME=ta
elif [[ "$TGT" == "tgl" ]]; then
TGT_NAME=tl
fi
i=0
for SRC_FILE in $SRC_FILES; do
cat "${SRC_FILE}" > "$DATA/valid${i}.${SRC_NAME}-${TGT_NAME}.${SRC_NAME}"
((i=i+1))
done
i=0
for TGT_FILE in $TGT_FILES; do
cat "${TGT_FILE}" > "$DATA/valid${i}.${SRC_NAME}-${TGT_NAME}.${TGT_NAME}"
((i=i+1))
done
done
done
# learn BPE with sentencepiece
TRAIN_FILES=$(find $DATA/ -name "train*" -type f | tr "\n" ",")
echo "learning joint BPE over ${TRAIN_FILES}..."
python "$SPM_TRAIN" \
--input=$TRAIN_FILES \
--model_prefix=$DATA/sentencepiece.bpe \
--vocab_size=$BPESIZE \
--character_coverage=0.9887 \
--model_type=bpe
# encode train/valid
echo "encoding train with learned BPE..."
set -- en id jv ms ta tl
for SRC; do
shift
for TGT; do
python "$SPM_ENCODE" \
--model "$DATA/sentencepiece.bpe.model" \
--output_format=piece \
--inputs $DATA/train.${SRC}-${TGT}.${SRC} $DATA/train.${SRC}-${TGT}.${TGT} \
--outputs $DATA/train.bpe.${SRC}-${TGT}.${SRC} $DATA/train.bpe.${SRC}-${TGT}.${TGT} \
--min-len $TRAIN_MINLEN --max-len $TRAIN_MAXLEN
done
done
echo "encoding valid with learned BPE..."
set -- en id jv ms ta tl
for SRC; do
shift
for TGT; do
echo $SRC
echo $TGT
for i in 0 1
do
python "$SPM_ENCODE" \
--model "$DATA/sentencepiece.bpe.model" \
--output_format=piece \
--inputs $DATA/valid${i}.${SRC}-${TGT}.${SRC} $DATA/valid${i}.${SRC}-${TGT}.${TGT} \
--outputs $DATA/valid${i}.bpe.${SRC}-${TGT}.${SRC} $DATA/valid${i}.bpe.${SRC}-${TGT}.${TGT}
done
done
done
# Binarize the dataset
tail -n +4 dataset/flores101.jv_id_ms_tl_ta_en.bpe16k/sentencepiece.bpe.vocab | cut -f1 | sed 's/$/ 100/g' > dataset/fairseq.vocab
cd fairseq
TEXT=$ROOT/../dataset/flores101.jv_id_ms_tl_ta_en.bpe16k
DATA=$ROOT/../dataset/data-bin/flores101.jv_id_ms_tl_ta_en.bpe16k
mkdir -p "$DATA"
set -- en id jv ms ta tl
for SRC; do
shift
for TGT; do
SRC_DICT="$(find $DATA/ -name "dict.${SRC}.txt" -type f)"
TGT_DICT="$(find $DATA/ -name "dict.${TGT}.txt" -type f)"
fairseq-preprocess --source-lang $SRC --target-lang $TGT --trainpref $TEXT/train.bpe.${SRC}-${TGT} --validpref $TEXT/valid0.bpe.${SRC}-${TGT},$TEXT/valid1.bpe.${SRC}-${TGT} --srcdict $ROOT/../dataset/fairseq.vocab --tgtdict $ROOT/../dataset/fairseq.vocab --destdir $DATA --workers 16
done
done
| true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.