repo
stringlengths 7
41
| pr_number
int64 1
5.65k
| filename
stringlengths 7
92
| file_before
stringlengths 39
76.9k
| file_after
stringlengths 63
76.9k
|
---|---|---|---|---|
maximeh/lacie-uboot | 1 | lacie_uboot/ubootshell.py | #! /usr/bin/python -B
# -*- coding: utf-8 -*-
'''
ubootshell allow you to discuss with the netconsol of u-boot.
'''
# Author: Maxime Hadjinlian (C) 2013
# maxime.hadjinlian@gmail.com
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The name of the author may not be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from math import floor
import logging
import os
import readline
# for history and elaborate line editing when using raw_input
# see http://docs.python.org/library/functions.html#raw_input
from select import select
import socket
from struct import pack
import sys
from time import sleep
sys.dont_write_bytecode = True
from network import iface_info, find_free_ip, is_valid_mac, is_valid_ipv4, ipcomm_info
class Ubootshell(object):
'''
An instance of UBootshell is a session with the netconsole shell.
'''
def __init__(self):
'''Sets some defaults'''
self.ip_target = None
self.bcast_addr = None
self.mac_target = None
self.send_port = 4446
self.receive_port = 4445
self.uboot_port = 6666
self.lump_timeout = 120
self.script = None
self.do_wait = False
self.progress = False
self.debug = False
def load_script(self, path_file):
'''
If we don't want an interactive shell, but a script to be executed.
'''
if not os.path.exists(path_file):
logging.error("%s does not exists." % path_file)
self.script = path_file
def do_progress(self, new_progress):
'''
If set to true, we will print a progress bar instead of the output.
'''
self.progress = new_progress
# Output example: [======= ] 75%
# width defines bar width
# percent defines current percentage
def print_progress(self, width, percent):
marks = floor(width * (percent / 100.0))
spaces = floor(width - marks)
loader = '[' + ('=' * int(marks)) + (' ' * int(spaces)) + ']'
sys.stdout.write("%s %d%%\r" % (loader, percent))
if percent >= 100:
sys.stdout.write("\n")
sys.stdout.flush()
def wait_at_reboot(self, wait):
'''
Set to true, we will wait for the device to reboot completely
'''
self.do_wait = wait
def setup_network(self, net_dict):
'''
Give a dict with the following values to setup your network :
{
'iface': 'ethX' # default : eth0
'bcast_addr' : '255.255.255.0' # The broadcast address if you need to set it
'mac_target' : '00:00:00:00:00:00' # The mac of your product
'ip_target' : '192.168.1.1' # The ip to assign to the product
}
'''
if ('mac_target' not in net_dict) or (net_dict['mac_target'] is None):
logging.info("WARNING : The first product to reboot will be catched !")
logging.info("It may not be yours if multiple product reboot at the "
"same time on your network.")
net_dict['mac_target'] = "00:00:00:00:00:00"
try:
ip, mac, netmask, bcast = iface_info(net_dict['iface'])
except (IOError, TypeError):
logging.error("Your network interface is not reachable."
" Is %s correct ?" % net_dict['iface'])
return 1
# This IP is used afterwards when TFTP'ing files
if ('ip_target' not in net_dict) or (net_dict['ip_target'] is None):
if sys.platform == "darwin":
logging.error("You need to specify an IP to assign to the device.")
return 1
net_dict['ip_target'] = find_free_ip(net_dict['iface'], ip, mac, netmask)
# Check MAC and IP value.
if not is_valid_mac(net_dict['mac_target']):
logging.error("Your MAC address is not in the proper format."
"\'00:00:00:00:00:00\' format is awaited."
"You gave %s" % net_dict['mac_target'])
return 1
self.mac_target = net_dict['mac_target']
if not is_valid_ipv4(bcast):
logging.error("Your Broadcast IP is not in the proper format."
"\'W.X.Y.Z\' format is awaited."
"You gave %s" % bcast)
return 1
self.bcast_addr = bcast
if not is_valid_ipv4(net_dict['ip_target']):
logging.error("Your product IP is not in the proper format."
"\'W.X.Y.Z\' format is awaited."
"You gave %s" % net_dict['ip_target'])
return 1
self.ip_target = net_dict['ip_target']
def send_lump(self):
'''
It will ask the users to reboot the target manually and then
it will send LUMP packet to a target during 60s.
'''
# Create an array with 6 cases, each one is a member (int) of the MAC
fields_macdest = [int(x, 16) for x in self.mac_target.split(':')]
# Create an array with 4 cases, each one is a member (int) of the IP
fields_ip = [int(x) for x in self.ip_target.split('.')]
# Note : The empty MAC are 8 bytes in length according to the reverse
# engineering done with WireShark. Don't know why exactly...
pkt = pack('!I' # LUMP
'L' # Length of LUMP
'I' # MACD
'L' # Length of MACD
'I' # MAC@
'L' # Length of MAC@ field
'2x' # fill space because MAC take only 6 bytes
'6s' # MAC address of target
'I' # IPS
'L' # Length of IPS
'I' # IP@
'L' # Length of IP@
'4s' # IP of the target
'I' # MACS
'L' # Length of MACS
'I' # MAC address of source
'L' # Length of MAC@
'8x', # Empty MAC
0x4C554D50, # LUMP
0x44,
0x4D414344, # MACD
0x10,
0x4D414340, # MAC
0x8,
pack('!6B', *fields_macdest), # int[] -> byte[]
0x49505300, # IPS
0x0C,
0x49504000, # IP
0x4,
pack('!4B', *fields_ip), # int[] -> byte[]
0x4D414353, # MACS
0x10,
0x4D414340, # MAC
0x8)
logging.debug("Sending some LUMP / Ctrl-C, "
"waiting for the NAS to start up")
logging.info("Please /!\HARD/!\ reboot the device /!\NOW/!\ ")
timeout = 0
socket.setdefaulttimeout(60)
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
try:
sock.bind(('', self.uboot_port))
except socket.error, err:
logging.error("Couldn't be a udp server on port %d : %s",
self.uboot_port, err)
sock.close()
return None
lump_ok = False
while lump_ok is False and timeout < self.lump_timeout:
sock.sendto(pkt, (self.bcast_addr, self.send_port))
sleep(0.2) # Wait for the device to process the LUMP
#Send Ctrl-C (Code ASCII 3 for EXT equivalent of SIGINT for Unix)
sock.sendto('\3', (self.bcast_addr, self.uboot_port))
srecv = select([sock], [], [], 1)
# data
if not srecv[0]:
continue
try:
serv_data = sock.recvfrom(1024)
if serv_data[1][0] != self.ip_target:
continue
serv_data = serv_data[0]
# check when prompt (Marvell>>) is available,
# then out to the next while to input command and send them !
if "Marvell>> " == serv_data:
lump_ok = True
break
except (socket.error, KeyboardInterrupt, SystemExit):
return None
timeout += 1
if timeout >= self.lump_timeout:
logging.debug("Sending LUMP for %ds, no response !",
self.lump_timeout)
lump_ok = False
sock.close()
return lump_ok
def invoke(self, cmd, display=True):
'''
send a cmd
'''
# Empty command, nothing to do here
if cmd == "":
return 42
exit_list = ['exit', 'reset']
override = 'Override Env parameters? (y/n)'
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
if cmd in exit_list:
cmd = 'reset'
cmd = pack('!' + str(len(cmd)) + 's1s', cmd, '\x0A')
sock.sendto(cmd, (self.ip_target, self.uboot_port))
sock.close()
return 0
sock.settimeout(10.0)
try:
sock.bind(('', self.uboot_port))
except socket.error, err:
logging.error("Can't open %d port. (Error : %s)",
self.receive_port, err)
sock.close()
return 0
#we want to send a cmd to the nas and get the reply in ans
#every command is completed by \n !
command = pack('!' + str(len(cmd)) + 's1s', cmd, '\x0A')
sock.sendto(command, (self.ip_target, self.uboot_port))
prompt = False
len_command = 0
# Don't try to wait for a prompt with bootm
if cmd == 'bootm':
sock.close()
return 42
while prompt is False:
srecv = select([sock], [], [], 0.5)
# data
if not srecv[0]:
continue
try:
data = sock.recvfrom(1024)
if data[1][0] != self.ip_target:
continue
recv_data = data[0]
# check when prompt (Marvell>>) is available,
if ("Marvell>> " == recv_data or override == recv_data):
if override == recv_data:
print recv_data
prompt = True
# When sending a command U-Boot return the commands
# char by char, we do this so we don't display it.
elif len_command < len(command):
len_command += 1
else:
# to handle the printenv and other case
# when answer is given one letter at a time...
if display:
write = sys.stdout.write
write(str(recv_data))
sys.stdout.flush()
except (socket.error, KeyboardInterrupt, SystemExit) as err:
if self.debug:
logging.error("Sending command %s on %d : %s",
cmd, self.receive_port, err)
sock.close()
return 42
def run(self):
'''
Either we execute the script or we create an interactive shell
to the netconsole.
'''
if not self.send_lump():
logging.debug("LUMP was not sent/receveid by the target")
return 1
if self.script is not None:
with open(self.script, 'r+') as script:
script_cmd = script.readlines()
if self.progress:
# setup progress_bar
p_width = 60
p_pas = p_width / len(script_cmd)
p_percent = 0
for cmd in script_cmd:
if self.progress:
# update the bar
self.print_progress(p_width, p_percent)
p_percent += p_pas
if cmd == '\n' or cmd.startswith('#'):
continue
if not self.progress:
print cmd.strip() + " => ",
self.invoke(cmd.strip(), display=not self.progress)
sleep(1) # it seems uboot doesn't like being shaked a bit
if self.progress:
self.print_progress(p_width, 100)
# You can't wait if there is no MAC.
if self.do_wait and (self.mac_target != "00:00:00:00:00:00"):
# Some command output may be stuck in the pipe
sys.stdout.flush()
# WAIT FOR THE DEVICE TO BOOT
logging.info("Waiting for your product to reboot...")
sleep(60 * 7) # Wait 7mn, it should give the device time to boot.
# This is done to avoid spamming the network with packet while we are sure it is not necessary.
ip = ipcomm_info(self.receive_port, self.mac_target, self.ip_target)
if ip is None:
logging.info("Timeout : Unable to get your product IP.")
return 1
logging.info("Your product is available at %s" % ip)
return 0
exit_code = 42
while(exit_code):
exit_code = self.invoke(raw_input("Marvell>> "), display=True)
return 0
def main():
''' launch everything '''
import argparse
from argparse import RawTextHelpFormatter
parser = argparse.ArgumentParser(formatter_class=RawTextHelpFormatter)
parser.add_argument('script', metavar='file', type=str, nargs='?',
help=argparse.SUPPRESS)
parser.add_argument("-m", "--mac", dest="mac", action="store",
default=None,
help="Address MAC of the targeted device "
"(00:00:00:00:00:00)"
)
parser.add_argument("-i", "--iface", dest="iface", action="store",
default="eth0",
help="Interface to use to send LUMP packet to.\n"
"Default is eth0.\n"
)
parser.add_argument("--ip", dest="force_ip", action="store",
default=None,
help="Specify the IP address to assign to the device."
)
parser.add_argument("-p", "--progress", dest="progress",
action="store_const", default=False, const=True,
help="Print a pretty progress bar,"
" use with a script shebang only.")
parser.add_argument("-w", "--wait", dest="wait", action="store_const",
default=False, const=True,
help="Wait for the product to boot.\n"
"Note : Require the -m/--mac option to be set.\n")
parser.add_argument("-D", "--debug", dest="loglevel", action="store_const",
const=logging.DEBUG, help="Output debugging information")
session = Ubootshell()
if '-D' in sys.argv or '--debug' in sys.argv:
session.debug = True
logging.basicConfig(level=logging.DEBUG, format='%(message)s')
else:
logging.basicConfig(level=logging.INFO, format='%(message)s')
options = parser.parse_args()
setup = {'mac_target': options.mac, 'iface': options.iface}
if options.force_ip is not None:
setup['ip_target'] = options.force_ip
if session.setup_network(setup):
return 1
session.wait_at_reboot(options.wait)
session.do_progress(options.progress)
if options.script is not None and os.path.isfile(options.script):
session.load_script(options.script)
session.run()
return 0
if __name__ == '__main__':
if sys.platform != "win32":
if os.geteuid() != 0:
print "You must be administrator/root to run this program."
sys.exit(1)
try:
sys.exit(main())
except (KeyboardInterrupt, EOFError, SystemExit, KeyError):
pass
| #! /usr/bin/python -B
# -*- coding: utf-8 -*-
'''
ubootshell allow you to discuss with the netconsol of u-boot.
'''
# Author: Maxime Hadjinlian (C) 2013
# maxime.hadjinlian@gmail.com
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The name of the author may not be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from math import floor
import logging
import os
import readline
# for history and elaborate line editing when using raw_input
# see http://docs.python.org/library/functions.html#raw_input
from select import select
import socket
from struct import pack
import sys
from time import sleep
sys.dont_write_bytecode = True
from network import iface_info, find_free_ip, is_valid_mac, is_valid_ipv4, ipcomm_info
class Ubootshell(object):
'''
An instance of UBootshell is a session with the netconsole shell.
'''
def __init__(self):
'''Sets some defaults'''
self.ip_target = None
self.bcast_addr = None
self.mac_target = None
self.send_port = 4446
self.receive_port = 4445
self.uboot_port = 6666
self.lump_timeout = 120
self.script = None
self.do_wait = False
self.progress = False
self.debug = False
def load_script(self, path_file):
'''
If we don't want an interactive shell, but a script to be executed.
'''
if not os.path.exists(path_file):
logging.error("%s does not exists." % path_file)
self.script = path_file
def do_progress(self, new_progress):
'''
If set to true, we will print a progress bar instead of the output.
'''
self.progress = new_progress
# Output example: [======= ] 75%
# width defines bar width
# percent defines current percentage
def print_progress(self, width, percent):
marks = floor(width * (percent / 100.0))
spaces = floor(width - marks)
loader = '[' + ('=' * int(marks)) + (' ' * int(spaces)) + ']'
sys.stdout.write("%s %d%%\r" % (loader, percent))
if percent >= 100:
sys.stdout.write("\n")
sys.stdout.flush()
def wait_at_reboot(self, wait):
'''
Set to true, we will wait for the device to reboot completely
'''
self.do_wait = wait
def setup_network(self, net_dict):
'''
Give a dict with the following values to setup your network :
{
'iface': 'ethX' # default : eth0
'bcast_addr' : '255.255.255.0' # The broadcast address if you need to set it
'mac_target' : '00:00:00:00:00:00' # The mac of your product
'ip_target' : '192.168.1.1' # The ip to assign to the product
}
'''
if ('mac_target' not in net_dict) or (net_dict['mac_target'] is None):
logging.info("WARNING : The first product to reboot will be catched !")
logging.info("It may not be yours if multiple product reboot at the "
"same time on your network.")
net_dict['mac_target'] = "00:00:00:00:00:00"
try:
ip, mac, netmask, bcast = iface_info(net_dict['iface'])
except (IOError, TypeError):
logging.error("Your network interface is not reachable."
" Is %s correct ?" % net_dict['iface'])
return 1
# This IP is used afterwards when TFTP'ing files
if ('ip_target' not in net_dict) or (net_dict['ip_target'] is None):
if sys.platform == "darwin":
logging.error("You need to specify an IP to assign to the device.")
return 1
net_dict['ip_target'] = find_free_ip(net_dict['iface'], ip, mac, netmask)
# Check MAC and IP value.
if not is_valid_mac(net_dict['mac_target']):
logging.error("Your MAC address is not in the proper format."
"\'00:00:00:00:00:00\' format is awaited."
"You gave %s" % net_dict['mac_target'])
return 1
self.mac_target = net_dict['mac_target']
if not is_valid_ipv4(bcast):
logging.error("Your Broadcast IP is not in the proper format."
"\'W.X.Y.Z\' format is awaited."
"You gave %s" % bcast)
return 1
self.bcast_addr = bcast
if not is_valid_ipv4(net_dict['ip_target']):
logging.error("Your product IP is not in the proper format."
"\'W.X.Y.Z\' format is awaited."
"You gave %s" % net_dict['ip_target'])
return 1
self.ip_target = net_dict['ip_target']
def send_lump(self):
'''
It will ask the users to reboot the target manually and then
it will send LUMP packet to a target during 60s.
'''
# Create an array with 6 cases, each one is a member (int) of the MAC
fields_macdest = [int(x, 16) for x in self.mac_target.split(':')]
# Create an array with 4 cases, each one is a member (int) of the IP
fields_ip = [int(x) for x in self.ip_target.split('.')]
# Note : The empty MAC are 8 bytes in length according to the reverse
# engineering done with WireShark. Don't know why exactly...
pkt = pack('!I' # LUMP
'L' # Length of LUMP
'I' # MACD
'L' # Length of MACD
'I' # MAC@
'L' # Length of MAC@ field
'2x' # fill space because MAC take only 6 bytes
'6s' # MAC address of target
'I' # IPS
'L' # Length of IPS
'I' # IP@
'L' # Length of IP@
'4s' # IP of the target
'I' # MACS
'L' # Length of MACS
'I' # MAC address of source
'L' # Length of MAC@
'8x', # Empty MAC
0x4C554D50, # LUMP
0x44,
0x4D414344, # MACD
0x10,
0x4D414340, # MAC
0x8,
pack('!6B', *fields_macdest), # int[] -> byte[]
0x49505300, # IPS
0x0C,
0x49504000, # IP
0x4,
pack('!4B', *fields_ip), # int[] -> byte[]
0x4D414353, # MACS
0x10,
0x4D414340, # MAC
0x8)
logging.debug("Sending some LUMP / Ctrl-C, "
"waiting for the NAS to start up")
logging.info("Please /!\HARD/!\ reboot the device /!\NOW/!\ ")
timeout = 0
socket.setdefaulttimeout(60)
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
try:
sock.bind(('', self.uboot_port))
except socket.error, err:
logging.error("Couldn't be a udp server on port %d : %s",
self.uboot_port, err)
sock.close()
return None
lump_ok = False
while lump_ok is False and timeout < self.lump_timeout:
sock.sendto(pkt, (self.bcast_addr, self.send_port))
sleep(0.2) # Wait for the device to process the LUMP
#Send Ctrl-C (Code ASCII 3 for EXT equivalent of SIGINT for Unix)
sock.sendto('\3', (self.bcast_addr, self.uboot_port))
srecv = select([sock], [], [], 1)
# data
if not srecv[0]:
continue
try:
serv_data = sock.recvfrom(1024)
if serv_data[1][0] != self.ip_target:
continue
serv_data = serv_data[0]
# check when prompt (Marvell>>) is available,
# then out to the next while to input command and send them !
if "Marvell>> " == serv_data:
lump_ok = True
break
except (socket.error, KeyboardInterrupt, SystemExit):
return None
timeout += 1
if timeout >= self.lump_timeout:
logging.debug("Sending LUMP for %ds, no response !",
self.lump_timeout)
lump_ok = False
sock.close()
return lump_ok
def invoke(self, cmd, display=True):
'''
send a cmd
'''
# Empty command, nothing to do here
if cmd == "":
return 42
exit_list = ['exit', 'reset']
override = 'Override Env parameters? (y/n)'
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
if cmd in exit_list:
cmd = 'reset'
cmd = pack('!' + str(len(cmd)) + 's1s', cmd, '\x0A')
sock.sendto(cmd, (self.ip_target, self.uboot_port))
sock.close()
return 0
sock.settimeout(10.0)
try:
sock.bind(('', self.uboot_port))
except socket.error, err:
logging.error("Can't open %d port. (Error : %s)",
self.receive_port, err)
sock.close()
return 0
#we want to send a cmd to the nas and get the reply in ans
#every command is completed by \n !
command = pack('!' + str(len(cmd)) + 's1s', cmd, '\x0A')
sock.sendto(command, (self.ip_target, self.uboot_port))
prompt = False
len_command = 0
# Don't try to wait for a prompt with bootm
if 'bootm' in cmd:
sock.close()
return 42
while prompt is False:
srecv = select([sock], [], [], 0.5)
# data
if not srecv[0]:
continue
try:
data = sock.recvfrom(1024)
if data[1][0] != self.ip_target:
continue
recv_data = data[0]
# check when prompt (Marvell>>) is available,
if ("Marvell>> " == recv_data or override == recv_data):
if override == recv_data:
print recv_data
prompt = True
# When sending a command U-Boot return the commands
# char by char, we do this so we don't display it.
elif len_command < len(command):
len_command += 1
else:
# to handle the printenv and other case
# when answer is given one letter at a time...
if display:
write = sys.stdout.write
write(str(recv_data))
sys.stdout.flush()
except (socket.error, KeyboardInterrupt, SystemExit) as err:
if self.debug:
logging.error("Sending command %s on %d : %s",
cmd, self.receive_port, err)
sock.close()
return 42
def run(self):
'''
Either we execute the script or we create an interactive shell
to the netconsole.
'''
if not self.send_lump():
logging.debug("LUMP was not sent/receveid by the target")
return 1
if self.script is not None:
with open(self.script, 'r+') as script:
script_cmd = script.readlines()
if self.progress:
# setup progress_bar
p_width = 60
p_pas = p_width / len(script_cmd)
p_percent = 0
for cmd in script_cmd:
if self.progress:
# update the bar
self.print_progress(p_width, p_percent)
p_percent += p_pas
if cmd == '\n' or cmd.startswith('#'):
continue
if not self.progress:
print cmd.strip() + " => ",
self.invoke(cmd.strip(), display=not self.progress)
sleep(1) # it seems uboot doesn't like being shaked a bit
if self.progress:
self.print_progress(p_width, 100)
# You can't wait if there is no MAC.
if self.do_wait and (self.mac_target != "00:00:00:00:00:00"):
# Some command output may be stuck in the pipe
sys.stdout.flush()
# WAIT FOR THE DEVICE TO BOOT
logging.info("Waiting for your product to reboot...")
sleep(60 * 7) # Wait 7mn, it should give the device time to boot.
# This is done to avoid spamming the network with packet while we are sure it is not necessary.
ip = ipcomm_info(self.receive_port, self.mac_target, self.ip_target)
if ip is None:
logging.info("Timeout : Unable to get your product IP.")
return 1
logging.info("Your product is available at %s" % ip)
return 0
exit_code = 42
while(exit_code):
exit_code = self.invoke(raw_input("Marvell>> "), display=True)
return 0
def main():
''' launch everything '''
import argparse
from argparse import RawTextHelpFormatter
parser = argparse.ArgumentParser(formatter_class=RawTextHelpFormatter)
parser.add_argument('script', metavar='file', type=str, nargs='?',
help=argparse.SUPPRESS)
parser.add_argument("-m", "--mac", dest="mac", action="store",
default=None,
help="Address MAC of the targeted device "
"(00:00:00:00:00:00)"
)
parser.add_argument("-i", "--iface", dest="iface", action="store",
default="eth0",
help="Interface to use to send LUMP packet to.\n"
"Default is eth0.\n"
)
parser.add_argument("--ip", dest="force_ip", action="store",
default=None,
help="Specify the IP address to assign to the device."
)
parser.add_argument("-p", "--progress", dest="progress",
action="store_const", default=False, const=True,
help="Print a pretty progress bar,"
" use with a script shebang only.")
parser.add_argument("-w", "--wait", dest="wait", action="store_const",
default=False, const=True,
help="Wait for the product to boot.\n"
"Note : Require the -m/--mac option to be set.\n")
parser.add_argument("-D", "--debug", dest="loglevel", action="store_const",
const=logging.DEBUG, help="Output debugging information")
session = Ubootshell()
if '-D' in sys.argv or '--debug' in sys.argv:
session.debug = True
logging.basicConfig(level=logging.DEBUG, format='%(message)s')
else:
logging.basicConfig(level=logging.INFO, format='%(message)s')
options = parser.parse_args()
setup = {'mac_target': options.mac, 'iface': options.iface}
if options.force_ip is not None:
setup['ip_target'] = options.force_ip
if session.setup_network(setup):
return 1
session.wait_at_reboot(options.wait)
session.do_progress(options.progress)
if options.script is not None and os.path.isfile(options.script):
session.load_script(options.script)
session.run()
return 0
if __name__ == '__main__':
if sys.platform != "win32":
if os.geteuid() != 0:
print "You must be administrator/root to run this program."
sys.exit(1)
try:
sys.exit(main())
except (KeyboardInterrupt, EOFError, SystemExit, KeyError):
pass
|
jbevain/cecil | 949 | Mono.Cecil/BaseAssemblyResolver.cs | //
// Author:
// Jb Evain (jbevain@gmail.com)
//
// Copyright (c) 2008 - 2015 Jb Evain
// Copyright (c) 2008 - 2011 Novell, Inc.
//
// Licensed under the MIT/X11 license.
//
using System;
using System.Collections.Generic;
using System.IO;
using System.Reflection;
using System.Text;
using Mono.Collections.Generic;
namespace Mono.Cecil {
public delegate AssemblyDefinition AssemblyResolveEventHandler (object sender, AssemblyNameReference reference);
public sealed class AssemblyResolveEventArgs : EventArgs {
readonly AssemblyNameReference reference;
public AssemblyNameReference AssemblyReference {
get { return reference; }
}
public AssemblyResolveEventArgs (AssemblyNameReference reference)
{
this.reference = reference;
}
}
#if !NET_CORE
[Serializable]
#endif
public sealed class AssemblyResolutionException : FileNotFoundException {
readonly AssemblyNameReference reference;
public AssemblyNameReference AssemblyReference {
get { return reference; }
}
public AssemblyResolutionException (AssemblyNameReference reference)
: this (reference, null)
{
}
public AssemblyResolutionException (AssemblyNameReference reference, Exception innerException)
: base (string.Format ("Failed to resolve assembly: '{0}'", reference), innerException)
{
this.reference = reference;
}
#if !NET_CORE
AssemblyResolutionException (
System.Runtime.Serialization.SerializationInfo info,
System.Runtime.Serialization.StreamingContext context)
: base (info, context)
{
}
#endif
}
public abstract class BaseAssemblyResolver : IAssemblyResolver {
static readonly bool on_mono = Type.GetType ("Mono.Runtime") != null;
readonly Collection<string> directories;
#if NET_CORE
// Maps file names of available trusted platform assemblies to their full paths.
// Internal for testing.
internal static readonly Lazy<Dictionary<string, string>> TrustedPlatformAssemblies = new Lazy<Dictionary<string, string>> (CreateTrustedPlatformAssemblyMap);
#else
Collection<string> gac_paths;
#endif
public void AddSearchDirectory (string directory)
{
directories.Add (directory);
}
public void RemoveSearchDirectory (string directory)
{
directories.Remove (directory);
}
public string [] GetSearchDirectories ()
{
var directories = new string [this.directories.size];
Array.Copy (this.directories.items, directories, directories.Length);
return directories;
}
public event AssemblyResolveEventHandler ResolveFailure;
protected BaseAssemblyResolver ()
{
directories = new Collection<string> (2) { ".", "bin" };
}
AssemblyDefinition GetAssembly (string file, ReaderParameters parameters)
{
if (parameters.AssemblyResolver == null)
parameters.AssemblyResolver = this;
return ModuleDefinition.ReadModule (file, parameters).Assembly;
}
public virtual AssemblyDefinition Resolve (AssemblyNameReference name)
{
return Resolve (name, new ReaderParameters ());
}
public virtual AssemblyDefinition Resolve (AssemblyNameReference name, ReaderParameters parameters)
{
Mixin.CheckName (name);
Mixin.CheckParameters (parameters);
var assembly = SearchDirectory (name, directories, parameters);
if (assembly != null)
return assembly;
if (name.IsRetargetable) {
// if the reference is retargetable, zero it
name = new AssemblyNameReference (name.Name, Mixin.ZeroVersion) {
PublicKeyToken = Empty<byte>.Array,
};
}
#if NET_CORE
assembly = SearchTrustedPlatformAssemblies (name, parameters);
if (assembly != null)
return assembly;
#else
var framework_dir = Path.GetDirectoryName (typeof (object).Module.FullyQualifiedName);
var framework_dirs = on_mono
? new [] { framework_dir, Path.Combine (framework_dir, "Facades") }
: new [] { framework_dir };
if (IsZero (name.Version)) {
assembly = SearchDirectory (name, framework_dirs, parameters);
if (assembly != null)
return assembly;
}
if (name.Name == "mscorlib") {
assembly = GetCorlib (name, parameters);
if (assembly != null)
return assembly;
}
assembly = GetAssemblyInGac (name, parameters);
if (assembly != null)
return assembly;
assembly = SearchDirectory (name, framework_dirs, parameters);
if (assembly != null)
return assembly;
#endif
if (ResolveFailure != null) {
assembly = ResolveFailure (this, name);
if (assembly != null)
return assembly;
}
throw new AssemblyResolutionException (name);
}
#if NET_CORE
AssemblyDefinition SearchTrustedPlatformAssemblies (AssemblyNameReference name, ReaderParameters parameters)
{
if (name.IsWindowsRuntime)
return null;
if (TrustedPlatformAssemblies.Value.TryGetValue (name.Name, out string path))
return GetAssembly (path, parameters);
return null;
}
static Dictionary<string, string> CreateTrustedPlatformAssemblyMap ()
{
var result = new Dictionary<string, string> (StringComparer.OrdinalIgnoreCase);
string paths;
try {
paths = (string) AppDomain.CurrentDomain.GetData ("TRUSTED_PLATFORM_ASSEMBLIES");
} catch {
paths = null;
}
if (paths == null)
return result;
foreach (var path in paths.Split (Path.PathSeparator))
if (string.Equals (Path.GetExtension (path), ".dll", StringComparison.OrdinalIgnoreCase))
result [Path.GetFileNameWithoutExtension (path)] = path;
return result;
}
#endif
protected virtual AssemblyDefinition SearchDirectory (AssemblyNameReference name, IEnumerable<string> directories, ReaderParameters parameters)
{
var extensions = name.IsWindowsRuntime ? new [] { ".winmd", ".dll" } : new [] { ".exe", ".dll" };
foreach (var directory in directories) {
foreach (var extension in extensions) {
string file = Path.Combine (directory, name.Name + extension);
if (!File.Exists (file))
continue;
try {
return GetAssembly (file, parameters);
} catch (System.BadImageFormatException) {
continue;
}
}
}
return null;
}
static bool IsZero (Version version)
{
return version.Major == 0 && version.Minor == 0 && version.Build == 0 && version.Revision == 0;
}
#if !NET_CORE
AssemblyDefinition GetCorlib (AssemblyNameReference reference, ReaderParameters parameters)
{
var version = reference.Version;
var corlib = typeof (object).Assembly.GetName ();
if (corlib.Version == version || IsZero (version))
return GetAssembly (typeof (object).Module.FullyQualifiedName, parameters);
var path = Directory.GetParent (
Directory.GetParent (
typeof (object).Module.FullyQualifiedName).FullName
).FullName;
if (on_mono) {
if (version.Major == 1)
path = Path.Combine (path, "1.0");
else if (version.Major == 2) {
if (version.MajorRevision == 5)
path = Path.Combine (path, "2.1");
else
path = Path.Combine (path, "2.0");
} else if (version.Major == 4)
path = Path.Combine (path, "4.0");
else
throw new NotSupportedException ("Version not supported: " + version);
} else {
switch (version.Major) {
case 1:
if (version.MajorRevision == 3300)
path = Path.Combine (path, "v1.0.3705");
else
path = Path.Combine (path, "v1.1.4322");
break;
case 2:
path = Path.Combine (path, "v2.0.50727");
break;
case 4:
path = Path.Combine (path, "v4.0.30319");
break;
default:
throw new NotSupportedException ("Version not supported: " + version);
}
}
var file = Path.Combine (path, "mscorlib.dll");
if (File.Exists (file))
return GetAssembly (file, parameters);
if (on_mono && Directory.Exists (path + "-api")) {
file = Path.Combine (path + "-api", "mscorlib.dll");
if (File.Exists (file))
return GetAssembly (file, parameters);
}
return null;
}
static Collection<string> GetGacPaths ()
{
if (on_mono)
return GetDefaultMonoGacPaths ();
var paths = new Collection<string> (2);
var windir = Environment.GetEnvironmentVariable ("WINDIR");
if (windir == null)
return paths;
paths.Add (Path.Combine (windir, "assembly"));
paths.Add (Path.Combine (windir, Path.Combine ("Microsoft.NET", "assembly")));
return paths;
}
static Collection<string> GetDefaultMonoGacPaths ()
{
var paths = new Collection<string> (1);
var gac = GetCurrentMonoGac ();
if (gac != null)
paths.Add (gac);
var gac_paths_env = Environment.GetEnvironmentVariable ("MONO_GAC_PREFIX");
if (string.IsNullOrEmpty (gac_paths_env))
return paths;
var prefixes = gac_paths_env.Split (Path.PathSeparator);
foreach (var prefix in prefixes) {
if (string.IsNullOrEmpty (prefix))
continue;
var gac_path = Path.Combine (Path.Combine (Path.Combine (prefix, "lib"), "mono"), "gac");
if (Directory.Exists (gac_path) && !paths.Contains (gac))
paths.Add (gac_path);
}
return paths;
}
static string GetCurrentMonoGac ()
{
return Path.Combine (
Directory.GetParent (
Path.GetDirectoryName (typeof (object).Module.FullyQualifiedName)).FullName,
"gac");
}
AssemblyDefinition GetAssemblyInGac (AssemblyNameReference reference, ReaderParameters parameters)
{
if (reference.PublicKeyToken == null || reference.PublicKeyToken.Length == 0)
return null;
if (gac_paths == null)
gac_paths = GetGacPaths ();
if (on_mono)
return GetAssemblyInMonoGac (reference, parameters);
return GetAssemblyInNetGac (reference, parameters);
}
AssemblyDefinition GetAssemblyInMonoGac (AssemblyNameReference reference, ReaderParameters parameters)
{
for (int i = 0; i < gac_paths.Count; i++) {
var gac_path = gac_paths [i];
var file = GetAssemblyFile (reference, string.Empty, gac_path);
if (File.Exists (file))
return GetAssembly (file, parameters);
}
return null;
}
AssemblyDefinition GetAssemblyInNetGac (AssemblyNameReference reference, ReaderParameters parameters)
{
var gacs = new [] { "GAC_MSIL", "GAC_32", "GAC_64", "GAC" };
var prefixes = new [] { string.Empty, "v4.0_" };
for (int i = 0; i < gac_paths.Count; i++) {
for (int j = 0; j < gacs.Length; j++) {
var gac = Path.Combine (gac_paths [i], gacs [j]);
var file = GetAssemblyFile (reference, prefixes [i], gac);
if (Directory.Exists (gac) && File.Exists (file))
return GetAssembly (file, parameters);
}
}
return null;
}
static string GetAssemblyFile (AssemblyNameReference reference, string prefix, string gac)
{
var gac_folder = new StringBuilder ()
.Append (prefix)
.Append (reference.Version)
.Append ("__");
for (int i = 0; i < reference.PublicKeyToken.Length; i++)
gac_folder.Append (reference.PublicKeyToken [i].ToString ("x2"));
return Path.Combine (
Path.Combine (
Path.Combine (gac, reference.Name), gac_folder.ToString ()),
reference.Name + ".dll");
}
#endif
public void Dispose ()
{
Dispose (true);
GC.SuppressFinalize (this);
}
protected virtual void Dispose (bool disposing)
{
}
}
}
| //
// Author:
// Jb Evain (jbevain@gmail.com)
//
// Copyright (c) 2008 - 2015 Jb Evain
// Copyright (c) 2008 - 2011 Novell, Inc.
//
// Licensed under the MIT/X11 license.
//
using System;
using System.Collections.Generic;
using System.IO;
using System.Reflection;
using System.Text;
using Mono.Collections.Generic;
namespace Mono.Cecil {
public delegate AssemblyDefinition AssemblyResolveEventHandler (object sender, AssemblyNameReference reference);
public sealed class AssemblyResolveEventArgs : EventArgs {
readonly AssemblyNameReference reference;
public AssemblyNameReference AssemblyReference {
get { return reference; }
}
public AssemblyResolveEventArgs (AssemblyNameReference reference)
{
this.reference = reference;
}
}
#if !NET_CORE
[Serializable]
#endif
public sealed class AssemblyResolutionException : FileNotFoundException {
readonly AssemblyNameReference reference;
public AssemblyNameReference AssemblyReference {
get { return reference; }
}
public AssemblyResolutionException (AssemblyNameReference reference)
: this (reference, null)
{
}
public AssemblyResolutionException (AssemblyNameReference reference, Exception innerException)
: base (string.Format ("Failed to resolve assembly: '{0}'", reference), innerException)
{
this.reference = reference;
}
#if !NET_CORE
AssemblyResolutionException (
System.Runtime.Serialization.SerializationInfo info,
System.Runtime.Serialization.StreamingContext context)
: base (info, context)
{
}
#endif
}
public abstract class BaseAssemblyResolver : IAssemblyResolver {
static readonly bool on_mono = Type.GetType ("Mono.Runtime") != null;
readonly Collection<string> directories;
#if NET_CORE
// Maps file names of available trusted platform assemblies to their full paths.
// Internal for testing.
internal static readonly Lazy<Dictionary<string, string>> TrustedPlatformAssemblies = new Lazy<Dictionary<string, string>> (CreateTrustedPlatformAssemblyMap);
#else
Collection<string> gac_paths;
#endif
public void AddSearchDirectory (string directory)
{
directories.Add (directory);
}
public void RemoveSearchDirectory (string directory)
{
directories.Remove (directory);
}
public string [] GetSearchDirectories ()
{
var directories = new string [this.directories.size];
Array.Copy (this.directories.items, directories, directories.Length);
return directories;
}
public event AssemblyResolveEventHandler ResolveFailure;
protected BaseAssemblyResolver ()
{
directories = new Collection<string> (2) { ".", "bin" };
}
AssemblyDefinition GetAssembly (string file, ReaderParameters parameters)
{
if (parameters.AssemblyResolver == null)
parameters.AssemblyResolver = this;
return ModuleDefinition.ReadModule (file, parameters).Assembly;
}
public virtual AssemblyDefinition Resolve (AssemblyNameReference name)
{
return Resolve (name, new ReaderParameters ());
}
public virtual AssemblyDefinition Resolve (AssemblyNameReference name, ReaderParameters parameters)
{
Mixin.CheckName (name);
Mixin.CheckParameters (parameters);
var assembly = SearchDirectory (name, directories, parameters);
if (assembly != null)
return assembly;
if (name.IsRetargetable) {
// if the reference is retargetable, zero it
name = new AssemblyNameReference (name.Name, Mixin.ZeroVersion) {
PublicKeyToken = Empty<byte>.Array,
};
}
#if NET_CORE
assembly = SearchTrustedPlatformAssemblies (name, parameters);
if (assembly != null)
return assembly;
#else
var framework_dir = Path.GetDirectoryName (typeof (object).Module.FullyQualifiedName);
var framework_dirs = on_mono
? new [] { framework_dir, Path.Combine (framework_dir, "Facades") }
: new [] { framework_dir };
if (IsZero (name.Version)) {
assembly = SearchDirectory (name, framework_dirs, parameters);
if (assembly != null)
return assembly;
}
if (name.Name == "mscorlib") {
assembly = GetCorlib (name, parameters);
if (assembly != null)
return assembly;
}
assembly = GetAssemblyInGac (name, parameters);
if (assembly != null)
return assembly;
assembly = SearchDirectory (name, framework_dirs, parameters);
if (assembly != null)
return assembly;
#endif
if (ResolveFailure != null) {
assembly = ResolveFailure (this, name);
if (assembly != null)
return assembly;
}
throw new AssemblyResolutionException (name);
}
#if NET_CORE
AssemblyDefinition SearchTrustedPlatformAssemblies (AssemblyNameReference name, ReaderParameters parameters)
{
if (name.IsWindowsRuntime)
return null;
if (TrustedPlatformAssemblies.Value.TryGetValue (name.Name, out string path))
return GetAssembly (path, parameters);
return null;
}
static Dictionary<string, string> CreateTrustedPlatformAssemblyMap ()
{
var result = new Dictionary<string, string> (StringComparer.OrdinalIgnoreCase);
string paths;
try {
paths = (string) AppDomain.CurrentDomain.GetData ("TRUSTED_PLATFORM_ASSEMBLIES");
} catch {
paths = null;
}
if (paths == null)
return result;
foreach (var path in paths.Split (Path.PathSeparator))
if (string.Equals (Path.GetExtension (path), ".dll", StringComparison.OrdinalIgnoreCase))
result [Path.GetFileNameWithoutExtension (path)] = path;
return result;
}
#endif
protected virtual AssemblyDefinition SearchDirectory (AssemblyNameReference name, IEnumerable<string> directories, ReaderParameters parameters)
{
var extensions = name.IsWindowsRuntime ? new [] { ".winmd", ".dll" } : new [] { ".dll", ".exe" };
foreach (var directory in directories) {
foreach (var extension in extensions) {
string file = Path.Combine (directory, name.Name + extension);
if (!File.Exists (file))
continue;
try {
return GetAssembly (file, parameters);
} catch (System.BadImageFormatException) {
continue;
}
}
}
return null;
}
static bool IsZero (Version version)
{
return version.Major == 0 && version.Minor == 0 && version.Build == 0 && version.Revision == 0;
}
#if !NET_CORE
AssemblyDefinition GetCorlib (AssemblyNameReference reference, ReaderParameters parameters)
{
var version = reference.Version;
var corlib = typeof (object).Assembly.GetName ();
if (corlib.Version == version || IsZero (version))
return GetAssembly (typeof (object).Module.FullyQualifiedName, parameters);
var path = Directory.GetParent (
Directory.GetParent (
typeof (object).Module.FullyQualifiedName).FullName
).FullName;
if (on_mono) {
if (version.Major == 1)
path = Path.Combine (path, "1.0");
else if (version.Major == 2) {
if (version.MajorRevision == 5)
path = Path.Combine (path, "2.1");
else
path = Path.Combine (path, "2.0");
} else if (version.Major == 4)
path = Path.Combine (path, "4.0");
else
throw new NotSupportedException ("Version not supported: " + version);
} else {
switch (version.Major) {
case 1:
if (version.MajorRevision == 3300)
path = Path.Combine (path, "v1.0.3705");
else
path = Path.Combine (path, "v1.1.4322");
break;
case 2:
path = Path.Combine (path, "v2.0.50727");
break;
case 4:
path = Path.Combine (path, "v4.0.30319");
break;
default:
throw new NotSupportedException ("Version not supported: " + version);
}
}
var file = Path.Combine (path, "mscorlib.dll");
if (File.Exists (file))
return GetAssembly (file, parameters);
if (on_mono && Directory.Exists (path + "-api")) {
file = Path.Combine (path + "-api", "mscorlib.dll");
if (File.Exists (file))
return GetAssembly (file, parameters);
}
return null;
}
static Collection<string> GetGacPaths ()
{
if (on_mono)
return GetDefaultMonoGacPaths ();
var paths = new Collection<string> (2);
var windir = Environment.GetEnvironmentVariable ("WINDIR");
if (windir == null)
return paths;
paths.Add (Path.Combine (windir, "assembly"));
paths.Add (Path.Combine (windir, Path.Combine ("Microsoft.NET", "assembly")));
return paths;
}
static Collection<string> GetDefaultMonoGacPaths ()
{
var paths = new Collection<string> (1);
var gac = GetCurrentMonoGac ();
if (gac != null)
paths.Add (gac);
var gac_paths_env = Environment.GetEnvironmentVariable ("MONO_GAC_PREFIX");
if (string.IsNullOrEmpty (gac_paths_env))
return paths;
var prefixes = gac_paths_env.Split (Path.PathSeparator);
foreach (var prefix in prefixes) {
if (string.IsNullOrEmpty (prefix))
continue;
var gac_path = Path.Combine (Path.Combine (Path.Combine (prefix, "lib"), "mono"), "gac");
if (Directory.Exists (gac_path) && !paths.Contains (gac))
paths.Add (gac_path);
}
return paths;
}
static string GetCurrentMonoGac ()
{
return Path.Combine (
Directory.GetParent (
Path.GetDirectoryName (typeof (object).Module.FullyQualifiedName)).FullName,
"gac");
}
AssemblyDefinition GetAssemblyInGac (AssemblyNameReference reference, ReaderParameters parameters)
{
if (reference.PublicKeyToken == null || reference.PublicKeyToken.Length == 0)
return null;
if (gac_paths == null)
gac_paths = GetGacPaths ();
if (on_mono)
return GetAssemblyInMonoGac (reference, parameters);
return GetAssemblyInNetGac (reference, parameters);
}
AssemblyDefinition GetAssemblyInMonoGac (AssemblyNameReference reference, ReaderParameters parameters)
{
for (int i = 0; i < gac_paths.Count; i++) {
var gac_path = gac_paths [i];
var file = GetAssemblyFile (reference, string.Empty, gac_path);
if (File.Exists (file))
return GetAssembly (file, parameters);
}
return null;
}
AssemblyDefinition GetAssemblyInNetGac (AssemblyNameReference reference, ReaderParameters parameters)
{
var gacs = new [] { "GAC_MSIL", "GAC_32", "GAC_64", "GAC" };
var prefixes = new [] { string.Empty, "v4.0_" };
for (int i = 0; i < gac_paths.Count; i++) {
for (int j = 0; j < gacs.Length; j++) {
var gac = Path.Combine (gac_paths [i], gacs [j]);
var file = GetAssemblyFile (reference, prefixes [i], gac);
if (Directory.Exists (gac) && File.Exists (file))
return GetAssembly (file, parameters);
}
}
return null;
}
static string GetAssemblyFile (AssemblyNameReference reference, string prefix, string gac)
{
var gac_folder = new StringBuilder ()
.Append (prefix)
.Append (reference.Version)
.Append ("__");
for (int i = 0; i < reference.PublicKeyToken.Length; i++)
gac_folder.Append (reference.PublicKeyToken [i].ToString ("x2"));
return Path.Combine (
Path.Combine (
Path.Combine (gac, reference.Name), gac_folder.ToString ()),
reference.Name + ".dll");
}
#endif
public void Dispose ()
{
Dispose (true);
GC.SuppressFinalize (this);
}
protected virtual void Dispose (bool disposing)
{
}
}
}
|
lukeredpath/simpleconfig | 23 | test/test_helper.rb | require 'rubygems'
require 'test/unit'
require 'mocha'
$:.unshift File.expand_path('../../lib', __FILE__)
require 'simple_config'
| require 'rubygems'
require 'test/unit'
require 'mocha'
require 'mocha/mini_test'
$:.unshift File.expand_path('../../lib', __FILE__)
require 'simple_config'
|
kof/xLazyLoader | 2 | src/jquery.xLazyLoader.js | /*
* xLazyLoader 1.5 - Plugin for jQuery
*
* Load js, css and images asynchron and get different callbacks
*
* Dual licensed under the MIT (http://www.opensource.org/licenses/mit-license.php)
* and GPL (http://www.opensource.org/licenses/gpl-license.php) licenses.
*
* Depends:
* jquery.js
*
* Copyright (c) 2010 Oleg Slobodskoi (jsui.de)
*/
;(function($){
$.xLazyLoader = function ( method, options ) {
if ( typeof method == 'object' ) {
options = method;
method = 'init';
};
new xLazyLoader()[method](options);
};
$.xLazyLoader.defaults = {
js: [], css: [], img: [],
jsKey: null, cssKey: null, imgKey: null,
name: null,
timeout: 20000,
//success callback for all files
success: $.noop,
//error callback - by load errors / timeout
error: $.noop,
//complete callbck - by success or errors
complete: $.noop,
//success callback for each file
each: $.noop
};
var head = document.getElementsByTagName("head")[0];
function xLazyLoader ()
{
var self = this,
s,
loaded = [],
errors = [],
tTimeout,
cssTimeout,
toLoad,
files = []
;
this.init = function ( options )
{
if ( !options ) return;
s = $.extend({}, $.xLazyLoader.defaults, options);
toLoad = {js: s.js, css: s.css, img: s.img};
$.each(toLoad, function( type, f ){
if ( typeof f == 'string' )
f = f.split(',');
files = files.concat(f);
});
if ( !files.length ) {
dispatchCallbacks('error');
return;
};
if (s.timeout) {
tTimeout = setTimeout(function(){
var handled = loaded.concat(errors);
/* search for unhandled files */
$.each(files, function(i, file){
$.inArray(file, handled) == -1 && errors.push(file);
});
dispatchCallbacks('error');
}, s.timeout);
};
$.each(toLoad, function(type, urls){
if ( $.isArray(urls) )
$.each( urls, function(i, url){
load(type, url);
});
else if (typeof urls == 'string')
load(type, urls);
});
};
this.js = function ( src, callback, name, key )
{
var $script = $('script[src*="'+src+'"]');
if ( $script.length ) {
$script.attr('pending') ? $script.bind('scriptload',callback) : callback();
return;
};
var s = document.createElement('script');
s.setAttribute("type","text/javascript");
s.setAttribute('charset', 'UTF-8');
s.setAttribute("src", src + key);
s.setAttribute('id', name);
s.setAttribute('pending', 1);
// Mozilla only
s.onerror = addError;
$(s).bind('scriptload',function(){
$(this).removeAttr('pending');
callback();
//unbind load event
//timeout because of pending callbacks
setTimeout(function(){
$(s).unbind('scriptload');
},10);
});
// jQuery doesn't handles onload event special for script tag,
var done = false;
s.onload = s.onreadystatechange = function() {
if ( !done && ( !this.readyState || /loaded|complete/.test(this.readyState) ) ) {
done = true;
// Handle memory leak in IE
s.onload = s.onreadystatechange = null;
$(s).trigger('scriptload');
};
};
head.appendChild(s);
};
this.css = function ( href, callback, name, key )
{
if ( $('link[href*="'+href+'"]').length ) {
callback();
return;
};
var link = $('<link rel="stylesheet" type="text/css" media="all" href="'+ href + key + '" id="'+name+'"></link>')[0];
if ( $.browser.msie ) {
link.onreadystatechange = function () {
/loaded|complete/.test(link.readyState) && callback();
};
} else if ( $.browser.opera ) {
link.onload = callback;
} else {
/*
* Mozilla, Safari, Chrome
* unfortunately it is inpossible to check if the stylesheet is really loaded or it is "HTTP/1.0 400 Bad Request"
* the only way to do this is to check if some special properties were set, so there is no error callback for stylesheets -
* it fires alway success
*
* There is also no access to sheet properties by crossdomain stylesheets,
* so we fire callback immediately
*/
var hostname = location.hostname.replace('www.',''),
hrefHostname = /http:/.test(href) ? /^(\w+:)?\/\/([^\/?#]+)/.exec( href )[2] : hostname;
hostname != hrefHostname && $.browser.mozilla ?
callback()
:
//stylesheet is from the same domain or it is not firefox
(function(){
try {
link.sheet.cssRules;
} catch (e) {
cssTimeout = setTimeout(arguments.callee, 20);
return;
};
callback();
})();
};
head.appendChild(link);
};
this.img = function ( src, callback, name, key )
{
var img = new Image();
img.onload = callback;
img.onerror = addError;
img.src = src + key;
};
/* It works only for css */
this.disable = function ( name )
{
$('#lazy-loaded-'+name, head).attr('disabled', 'disabled');
};
/* It works only for css */
this.enable = function ( name )
{
$('#lazy-loaded-'+name, head).removeAttr('disabled');
};
/*
* By removing js tag, script ist still living in browser memory,
* css will be really destroyed
*/
this.destroy = function ( name )
{
$('#lazy-loaded-'+name, head).remove();
};
function load ( type, url ) {
self[type](url, function(status) {
status == 'error' ? errors.push(url) : loaded.push(url) && s.each(url);
checkProgress();
}, 'lazy-loaded-'+ (s.name ? s.name : new Date().getTime()), s[type+'Key'] ? '?key='+s[type+'Key'] : '' );
};
function dispatchCallbacks ( status ) {
s.complete(status, loaded, errors);
s[status]( status=='error' ? errors : loaded);
clearTimeout(tTimeout);
clearTimeout(cssTimeout);
};
function checkProgress () {
if (loaded.length == files.length) dispatchCallbacks('success')
else if (loaded.length+errors.length == files.length) dispatchCallbacks('error');
};
function addError () {
errors.push(this.src);
checkProgress();
};
};
})(jQuery); | /*
* xLazyLoader 1.5 - Plugin for jQuery
*
* Load js, css and images asynchron and get different callbacks
*
* Dual licensed under the MIT (http://www.opensource.org/licenses/mit-license.php)
* and GPL (http://www.opensource.org/licenses/gpl-license.php) licenses.
*
* Depends:
* jquery.js
*
* Copyright (c) 2010 Oleg Slobodskoi (jsui.de)
*/
;(function($){
$.xLazyLoader = function ( method, options ) {
if ( typeof method == 'object' ) {
options = method;
method = 'init';
};
new xLazyLoader()[method](options);
};
$.xLazyLoader.defaults = {
js: [], css: [], img: [],
jsKey: null, cssKey: null, imgKey: null,
name: null,
timeout: 20000,
//success callback for all files
success: $.noop,
//error callback - by load errors / timeout
error: $.noop,
//complete callbck - by success or errors
complete: $.noop,
//success callback for each file
each: $.noop
};
var head = document.getElementsByTagName("head")[0];
function xLazyLoader ()
{
var self = this,
s,
loaded = [],
errors = [],
tTimeout,
cssTimeout,
toLoad,
files = []
;
this.init = function ( options )
{
if ( !options ) return;
s = $.extend({}, $.xLazyLoader.defaults, options);
toLoad = {js: s.js, css: s.css, img: s.img};
$.each(toLoad, function( type, f ){
if ( typeof f == 'string' )
f = f.split(',');
files = files.concat(f);
});
if ( !files.length ) {
dispatchCallbacks('error');
return;
};
if (s.timeout) {
tTimeout = setTimeout(function(){
var handled = loaded.concat(errors);
/* search for unhandled files */
$.each(files, function(i, file){
$.inArray(file, handled) == -1 && errors.push(file);
});
dispatchCallbacks('error');
}, s.timeout);
};
$.each(toLoad, function(type, urls){
if ( $.isArray(urls) )
$.each( urls, function(i, url){
load(type, url);
});
else if (typeof urls == 'string')
load(type, urls);
});
};
this.js = function ( src, callback, name, key )
{
var $script = $('script[src*="'+src+'"]');
if ( $script.length ) {
$script.attr('pending') ? $script.bind('scriptload',callback) : callback();
return;
};
var s = document.createElement('script');
s.setAttribute("type","text/javascript");
s.setAttribute('charset', 'UTF-8');
s.setAttribute("src", src + key);
s.setAttribute('id', name);
s.setAttribute('pending', 1);
// Mozilla only
s.onerror = addError;
$(s).bind('scriptload',function(){
$(this).removeAttr('pending');
callback();
//unbind load event
//timeout because of pending callbacks
setTimeout(function(){
$(s).unbind('scriptload');
},10);
});
// jQuery doesn't handles onload event special for script tag,
var done = false;
s.onload = s.onreadystatechange = function() {
if ( !done && ( !this.readyState || /loaded|complete/.test(this.readyState) ) ) {
done = true;
// Handle memory leak in IE
s.onload = s.onreadystatechange = null;
$(s).trigger('scriptload');
};
};
head.appendChild(s);
};
this.css = function ( href, callback, name, key )
{
if ( $('link[href*="'+href+'"]').length ) {
callback();
return;
};
var link = $('<link rel="stylesheet" type="text/css" media="all" href="'+ href + key + '" id="'+name+'"></link>')[0];
if ( $.browser.msie ) {
link.onreadystatechange = function (){
if (link.readyState == "loaded" || link.readyState == "complete") {
link.onreadystatechange = null;
callback();
}
}
} else if ( $.browser.opera ) {
link.onload = callback;
} else {
/*
* Mozilla, Safari, Chrome
* unfortunately it is inpossible to check if the stylesheet is really loaded or it is "HTTP/1.0 400 Bad Request"
* the only way to do this is to check if some special properties were set, so there is no error callback for stylesheets -
* it fires alway success
*
* There is also no access to sheet properties by crossdomain stylesheets,
* so we fire callback immediately
*/
var hostname = location.hostname.replace('www.',''),
hrefHostname = /http:/.test(href) ? /^(\w+:)?\/\/([^\/?#]+)/.exec( href )[2] : hostname;
hostname != hrefHostname && $.browser.mozilla ?
callback()
:
//stylesheet is from the same domain or it is not firefox
(function(){
try {
link.sheet.cssRules;
} catch (e) {
cssTimeout = setTimeout(arguments.callee, 20);
return;
};
callback();
})();
};
head.appendChild(link);
};
this.img = function ( src, callback, name, key )
{
var img = new Image();
img.onload = callback;
img.onerror = addError;
img.src = src + key;
};
/* It works only for css */
this.disable = function ( name )
{
$('#lazy-loaded-'+name, head).attr('disabled', 'disabled');
};
/* It works only for css */
this.enable = function ( name )
{
$('#lazy-loaded-'+name, head).removeAttr('disabled');
};
/*
* By removing js tag, script ist still living in browser memory,
* css will be really destroyed
*/
this.destroy = function ( name )
{
$('#lazy-loaded-'+name, head).remove();
};
function load ( type, url ) {
self[type](url, function(status) {
status == 'error' ? errors.push(url) : loaded.push(url) && s.each(url);
checkProgress();
}, 'lazy-loaded-'+ (s.name ? s.name : new Date().getTime()), s[type+'Key'] ? '?key='+s[type+'Key'] : '' );
};
function dispatchCallbacks ( status ) {
s.complete(status, loaded, errors);
s[status]( status=='error' ? errors : loaded);
clearTimeout(tTimeout);
clearTimeout(cssTimeout);
};
function checkProgress () {
if (loaded.length == files.length) dispatchCallbacks('success')
else if (loaded.length+errors.length == files.length) dispatchCallbacks('error');
};
function addError () {
errors.push(this.src);
checkProgress();
};
};
})(jQuery); |
Uninett/PyMetric | 10 | model.py | import networkx as nx
from pajek import read_pajek
import utils
import distutils.version
class Model:
def __init__(self, graph, config, debug=False):
self.graph = graph
self.config = config
self.debug = debug
self.G = self._make_weighted_copy()
self._refresh_betweenness()
self.linkloads = {}
self.all_paths = {}
self.paths_using_edge = {}
self.linkload_parts = {}
self._refresh_all_paths()
def refresh_linkloads(self):
if not self.config.get('use_linkloads'): return False
self.linkloads = utils.read_linkloads(self.graph,
self.config.get('linkloads_host'),
self.config.get('linkloads_url'))
if not self.linkloads: return False
self.linkload_parts = {}
return True
def has_linkloads(self):
return len(self.linkloads.keys()) > 0
def get_in_link_load(self, u,v):
if not (u,v) in self.linkloads:
return False
return int(self.linkloads[(v,u)])
def get_out_link_load(self, u,v):
if not (u,v) in self.linkloads:
return False
return int(self.linkloads[(u,v)])
def get_betweenness(self, top=None):
if not top:
return self.betweenness
bc = self.betweenness
toplist = sorted(bc, lambda x,y: cmp(bc[x], bc[y]))
toplist.reverse()
return toplist[:top]
def get_edge_betweenness(self, top=None):
if not top:
return self.edge_betweenness
ebc = self.edge_betweenness
toplist = sorted(ebc,
lambda (x1,y1), (x2, y2): cmp(ebc[(x1, y1)], ebc[(x2, y2)]))
toplist.reverse()
return toplist[:top]
def uneven_metrics(self):
G = self.G
return filter(lambda x: G[x[0]][x[1]] != G[x[1]][x[0]],
G.edges())
def get_total_in_load(self, node, G=None, loads=None):
sum = 0
if not loads: loads = self.linkloads
if not G: G = self.graph
for neighbor in G[node]:
sum += loads[neighbor, node]
return sum
def get_total_out_load(self, node, G=None, loads=None):
sum = 0
if not loads: loads = self.linkloads
if not G: G = self.graph
for neighbor in G[node]:
sum += loads[node, neighbor]
return sum
def get_transit_links(self, u, v):
paths = self.nodes_and_paths_using_edge(u,v,self.G, True)[1]
return paths.keys()
def nodes_and_paths_using_edge(self, u, v, G=None, transit_only=False):
import time
stime = time.time()
if not G:
G = self.G
if not transit_only and (G == self.G or G == self.graph) and (u,v) in self.paths_using_edge:
return self.paths_using_edge[(u,v)]
candidates = set()
retpaths = {}
#print " Finding candidates (%s secs)" % (time.time() - stime)
for node in G:
if node == v: continue
paths = self.path(node, v, G)
if not paths: continue
for path in paths[1]:
if path[-2] == u:
candidates.add(node)
#print " Done. (%s secs)" % (time.time() - stime)
for node in candidates:
for dest in (set(G.nodes()) - candidates):
paths = self.path(node, dest, G)
if not paths: continue
paths = paths[1]
for path in paths:
edges = zip(path, path[1:])
if (u,v) in edges:
if (node,dest) not in retpaths:
if transit_only:
if node not in (u,v) and dest not in (u,v):
retpaths[(node,dest)] = [path]
else:
retpaths[(node,dest)] = [path]
else:
if transit_only:
if node not in (u,v) and dest not in (u,v):
retpaths[(node,dest)].append(path)
else:
retpaths[(node,dest)].append(path)
#print " Returning (%s secs)" % (time.time() - stime)
if not transit_only:
self.paths_using_edge[(u,v)] = (candidates, retpaths)
return candidates, retpaths
def get_link_load_part(self, u, v, loads=None, G=None):
import time
stime = time.time()
use_cache = False
if not G:
G = self.G
if not loads:
loads = self.linkloads
if loads == self.linkloads:
use_cache = True
#print " Cache is possible, keys:"
#print " %s" % self.linkload_parts.keys()
if use_cache and (u,v) in self.linkload_parts:
#print " Returning from cache (%s secs)" % (time.time() - stime)
return self.linkload_parts[(u,v)]
#print " Finding nodes_and_paths (%s, %s) (%s secs)" % (u,v,time.time()-stime)
nodes, pathlist = self.nodes_and_paths_using_edge(u, v, G)
#print " Nodes: %s -- Pathlist: %s" % (nodes, pathlist)
#print " Done. (%s secs)" % (time.time()-stime)
partloads = {}
counts = {}
for paths in pathlist.values():
numpaths = len(paths)
pathloads = {}
for path in paths:
#print " Finding path_loads (%s, %s) (%s secs)" % (u,v,time.time()-stime)
edges = self.get_path_loads(u, v, path, numpaths, loads, G)
for (s,t) in edges:
if (s,t) not in pathloads:
pathloads[(s,t)] = edges[(s,t)]
else:
pathloads[(s,t)] += edges[(s,t)]
partloads.update(pathloads)
for (s,t) in partloads:
try:
assert float(partloads[(s,t)]) -1 <= float(loads[(s,t)])
except:
print "Assertion failed for (%s,%s) %s > %s" \
% (s,t, partloads[(s,t)], loads[(s,t)])
#print " Returning (%s secs)" % (time.time()-stime)
if use_cache:
self.linkload_parts[(u,v)] = partloads
return partloads
def get_path_loads(self, u, v, path,
numpaths=1,
loads=None,
G=None):
if not loads:
loads = self.linkloads
if not G:
G = self.G
edges = zip(path, path[1:])
if path[0] == u:
pass
elif path[-1] == v:
edges.reverse()
elif (u,v) not in edges:
print "Invalid call:"
print "get_path_loads: (%s -> %s) [%s]" % (u,v,path)
return False
else:
path1, path2 = [], []
i = 0
while path[i] != v:
path1.append(path[i])
i += 1
path1.append(v)
path2.append(u)
path2.append(v)
i += 1
for node in path[i:]:
path2.append(node)
#print "Splitting call in two: %s, %s" % (path1, path2)
res1 = self.get_path_loads(u,v,path1,numpaths,loads,G)
res2 = self.get_path_loads(u,v,path2,numpaths,loads,G)
res1.update(res2)
return res1
#print "get_path_loads: (%s -> %s) [%s]" % (u,v,path)
cr = utils.calc_ratio
ndio = utils.node_diff_in_out
loadmatrix = {}
loadmatrix[u] = {'out': loads[(u,v)] / float(numpaths)}
loadmatrix[v] = { 'in': loads[(u,v)] / float(numpaths)}
for i in range(len(edges)):
(s,t) = edges[i]
#print "Looking at [%s] (%s,%s)" % (i,s,t)
if s in loadmatrix:
if not 'out' in loadmatrix[s]:
loadmatrix[s]['out'] = loadmatrix[s]['in'] * cr(G, loads,
s, t, True, False)
#print "Load(in) :", loadmatrix[s]['in']
#print "Load(out):", loadmatrix[s]['out']
loadmatrix[t] = {'in': loadmatrix[s]['out']}
elif t in loadmatrix:
if not 'in' in loadmatrix[t]:
newpath = path[:]
#print "Newpath before slice: %s" % newpath
newpath = newpath[-(i+2):]
#print "Doing self(newpath: %s)" % newpath
pathcalc = self.get_path_loads(newpath[0], newpath[1],
newpath, numpaths, loads, G)
loadmatrix[t]['in'] = pathcalc[(newpath[-2], newpath[-1])]
loadmatrix[s] = {'out': loadmatrix[t]['in']}
else:
print "Can't find loaddata for (%s,%s)" % (s,t)
edges = zip(path, path[1:])
retloads = {}
for (s,t) in edges:
retloads[(s,t)] = loadmatrix[s]['out']
return retloads
def get_link_info(self, u, v):
G = self.G
if not G.has_edge(u,v): return {}
bc = self.edge_betweenness
retinfo = {}
edgedata = self.graph[u][v]
name = ""
capacity = 0
if 'c' in edgedata:
capacity = edgedata['c']
if 'l' in edgedata:
name = edgedata['l']
utilization = "NA"
if capacity != 0 and (u,v) in self.linkloads:
utilization = "%.2f%%" % (self.get_link_utilization(u,v)*100)
load = "NA"
if (u,v) in self.linkloads:
load = "%.2f Mbit/s" % (self.get_out_link_load(u,v)/float(1024))
retinfo['name'] = name
retinfo['betweenness'] = "%.3f (%.2f%% of max, %.2f%% of avg)" \
% (bc[(u,v)], (bc[(u,v)]/max(bc.values()))*100,
(bc[(u,v)]/(sum(bc.values())/len(bc)))*100)
retinfo['capacity'] = utils.cap2str(capacity)
retinfo['load'] = load
retinfo['utilization'] = utilization
return retinfo
def get_node_info(self, node):
G = self.graph
if node not in G.nodes(): return {}
bc = self.betweenness
retinfo = {}
retinfo['name'] = node
retinfo['degree'] = G.out_degree(node)
retinfo['links'] = map(lambda x: x[2]['l'] + \
" (" + str(int(x[2]['value'])) + ")",
G.edges(node, data=True))
retinfo['neighbors'] = G.neighbors(node)
retinfo['longest paths'] = self.get_max_cost_paths(nodes=[node])
retinfo['eccentricity'] = nx.eccentricity(G, node)
retinfo['betweenness'] = "%.3f (%.2f%% of max, %.2f%% of avg)" \
% (bc[node], (bc[node]/max(bc.values()))*100,
(bc[node]/(sum(bc.values())/len(bc)))*100)
return retinfo
def get_max_cost_paths(self, top=8, nodes=None):
sources = self.G.nodes()
if nodes:
sources = nodes
pathcosts = {}
retval = []
for source in sources:
costs = nx.dijkstra_predecessor_and_distance(self.G, source)[1]
for dest in costs:
pathcosts[(source, dest)] = costs[dest]
spathcosts = sorted(pathcosts,
cmp=lambda x,y: cmp(pathcosts[x], pathcosts[y]))
spathcosts.reverse()
sp = spathcosts
pc = pathcosts
seen = {}
for (u,v) in sp[:top]:
if (u,v) in seen and pc[(u,v)] == pc[(v,u)]: continue
retval.append("%s (%s)" % (" <-> ".join([u,v]), pc[(u,v)]))
seen[(u,v)] = True
if (v,u) in sp and pc[(u,v)] == pc[(v,u)]:
seen[(v,u)] = True
return retval
def get_node_groups(self, threshold=0.095, n=10, nodes=None, path=None):
groups = {}
bc = self.betweenness
top = self.get_betweenness(top=n)
for node in self.G.nodes():
if nodes != None and node not in nodes:
continue
if bc[node] > threshold or node in top:
if path and node == path[0]:
if not 'mainstart' in groups:
groups['mainstart'] = [node]
else:
groups['mainstart'].append(node)
elif path and node == path[-1]:
if not 'mainstop' in groups:
groups['mainstop'] = [node]
else:
groups['mainstop'].append(node)
elif path and node in path:
if not 'mainpath' in groups:
groups['mainpath'] = [node]
else:
groups['mainpath'].append(node)
else:
if not 'main' in groups:
groups['main'] = [node]
else:
groups['main'].append(node)
else:
if path and node == path[0]:
if not 'normalstart' in groups:
groups['normalstart'] = [node]
else:
groups['normalstart'].append(node)
elif path and node == path[-1]:
if not 'normalstop' in groups:
groups['normalstop'] = [node]
else:
groups['normalstop'].append(node)
elif path and node in path:
if not 'normalpath' in groups:
groups['normalpath'] = [node]
else:
groups['normalpath'].append(node)
else:
if not 'normal' in groups:
groups['normal'] = [node]
else:
groups['normal'].append(node)
return [(groups[k], k) for k in groups]
def get_path_capacity(self, path, as_string=False, slowest_only=False):
path_links = zip(path, path[1:])
slowest = None
if slowest_only:
slowest = min([self.get_link_capacity(u,v)
for (u,v) in path_links])
if as_string:
return utils.cap2str(slowest)
return slowest
return [self.get_link_capacity(u,v,as_string) for (u,v) in path_links]
def get_link_capacity(self, u, v, as_string=False):
if not self.graph.has_edge(u,v):
return False
linkinfo = self.graph[u][v]
if not 'c' in linkinfo:
if as_string:
return "Unknown"
return False
if as_string:
return utils.cap2str(int(linkinfo['c']))
return int(linkinfo['c'])
def get_link_utilization(self, u, v):
try:
utilz = self.get_out_link_load(u,v)/float(self.get_link_capacity(u,v))
except ZeroDivisionError:
print "Warning: Could not get link capacity for link:"
print "%s => %s" % (u,v)
utilz = 0.0
return utilz
def get_link_utilizations(self):
utils = {}
for (u,v) in self.G.edges():
utils[(u,v)] = self.get_link_utilization(u,v)
return utils
def has_capacity_info(self):
for (u,v) in self.graph.edges():
if 'c' in self.graph[u][v]:
return True
return False
def get_edge_groups(self, threshold=0.020, n=20, edges=None, path=None):
groups, mpath_edges, rpath_edges = {}, [], []
multi = False
mpath = path
if path != None:
if type(path[0]) == type([]):
if len(path) > 1: multi = True
mpath = path[0]
for p in path[1:]:
rpath_edges += zip(p, p[1:])
rpath_edges += zip(p[1:], p)
mpath_edges = zip(mpath, mpath[1:])
mpath_edges += zip(mpath[1:], mpath)
ebc = self.edge_betweenness
top = self.get_edge_betweenness(top=n)
for (u, v, d) in self.G.edges(data=True):
if edges != None and (u, v) not in edges:
continue
if (ebc[(u,v)] > threshold and ebc[(v,u)] > threshold) \
or (u,v) in top:
#print "Path: %s, multi: %s, (%s,%s), %s" % (path, multi, u,v,mpath_edges)
if (path != None) and (not multi) and ((u,v) in mpath_edges):
if 'mainpath' not in groups:
groups['mainpath'] = [(u,v,d)]
else:
groups['mainpath'].append((u,v,d))
elif multi and mpath_edges and (u,v) in mpath_edges \
and (u,v) not in rpath_edges:
if 'mainaltpath' not in groups:
groups['mainaltpath'] = [(u,v,d)]
else:
groups['mainaltpath'].append((u,v,d))
elif mpath_edges and (u,v) in mpath_edges:
if 'mainpath' not in groups:
groups['mainpath'] = [(u,v,d)]
else:
groups['mainpath'].append((u,v,d))
elif rpath_edges and (u,v) in rpath_edges:
if 'mainaltpath' not in groups:
groups['mainaltpath'] = [(u,v,d)]
else:
groups['mainaltpath'].append((u,v,d))
else:
if 'main' not in groups:
groups['main'] = [(u,v,d)]
else:
groups['main'].append((u,v,d))
else:
if path != None and not multi and (u,v) in mpath_edges:
if 'normalpath' not in groups:
groups['normalpath'] = [(u,v,d)]
else:
groups['normalpath'].append((u,v,d))
elif multi and mpath_edges and (u,v) in mpath_edges \
and (u,v) not in rpath_edges:
if 'normalaltpath' not in groups:
groups['normalaltpath'] = [(u,v,d)]
else:
groups['normalaltpath'].append((u,v,d))
elif mpath_edges and (u,v) in mpath_edges:
if 'normalpath' not in groups:
groups['normalpath'] = [(u,v,d)]
else:
groups['normalpath'].append((u,v,d))
elif rpath_edges and (u,v) in rpath_edges:
if 'normalaltpath' not in groups:
groups['normalaltpath'] = [(u,v,d)]
else:
groups['normalaltpath'].append((u,v,d))
else:
if 'normal' not in groups:
groups['normal'] = [(u,v,d)]
else:
groups['normal'].append((u,v,d))
return [(groups[k], k) for k in groups]
def get_nodes(self):
return self.G.nodes()
def get_areas(self, nodes):
na = self.graph.node_attr
areas = {}
for n in nodes:
if 'area' in na[n]:
areas[n] = na[n]['area']
else:
areas[n] = None
return areas
def get_positions(self, nodes):
na = self.graph.node_attr
pos = {}
for n in nodes:
pos[n] = (float(na[n]['x']), float(na[n]['y']))
return pos
def get_stats(self):
top = self.get_betweenness(top=20)
stats = {}
stats["nodes"] = nx.number_of_nodes(self.graph)
stats["edges"] = nx.number_of_edges(self.graph)
stats["radius"] = nx.radius(self.graph)
stats["diameter"] = nx.diameter(self.graph)
stats["center"] = nx.center(self.graph)
stats["periphery"] = nx.periphery(self.graph)
stats["density"] = nx.density(self.graph)
stats["reciprocity"] = utils.reciprocity(self.graph)
stats["mean length"] = nx.average_shortest_path_length(self.graph)
stats["longest paths"] = self.get_max_cost_paths()
stats["top 20 transit"] = top
return stats
def path(self, source, dest, G=None):
if not G:
G = self.G
if G == self.G:
preds, costs = self.all_paths[source]
else:
preds, costs = nx.dijkstra_predecessor_and_distance(G, source)
if not dest in costs:
return False, None
def _get_paths(preds, path, paths, dest):
if dest in path:
return
path.append(dest)
if len(preds[dest]) == 0:
paths.append(path)
return
for newdest in preds[dest]:
_get_paths(preds, path[:], paths, newdest)
return paths
paths = _get_paths(preds, [], [], dest)
for path in paths:
path.reverse()
return costs[dest], paths
def refresh_from_file(self, filename):
self.graph = read_pajek(filename)
self.G = self._make_weighted_copy()
self.linkloads = {}
self._refresh_betweenness()
self._refresh_all_paths()
def _make_weighted_copy(self):
G = self.graph.copy()
for (u,v,d) in G.edges(data=True):
G.remove_edge(u,v)
G.add_edge(u,v,weight=d['value'])
return G
def _refresh_betweenness(self):
self.betweenness = None
if distutils.version.StrictVersion(nx.__version__) > distutils.version.StrictVersion("1.5"):
self.betweenness = nx.load_centrality(self.G, weight='weight')
else:
self.betweenness = nx.load_centrality(self.G, weighted_edges=True)
self.edge_betweenness = nx.edge_betweenness(self.G, normalized=True, weight=True)
def _refresh_all_paths(self):
for node in self.G:
self.all_paths[node] = nx.dijkstra_predecessor_and_distance(self.G, node)
for edge in self.G.edges():
self.paths_using_edge[edge[0], edge[1]] = \
self.nodes_and_paths_using_edge(edge[0], edge[1], self.G)
def _routeselection(self, paths):
p_attr = {}
pathnodes = reduce(lambda x,y: x+y, paths)
areas = self.get_areas(pathnodes)
if not areas:
return paths
for i in range(len(paths)):
areahops = map(lambda x: areas[x[0]] == areas[x[1]],
zip(paths[i], paths[i][1:]))
p_attr[i] = {'areahops': areahops, 'candidate': True}
for hop in range(1, max([2] + [len(p) for p in paths]) - 2):
diff = False
last_hop = None
last_areahop = None
for i, path in enumerate(paths):
if hop+1 > len(path) - 1: continue
if p_attr[i]['candidate'] == False: continue
pathhop = (path[hop], path[hop+1])
pathah = p_attr[i]['areahops'][hop]
print "Comparing %s to %s and %s to %s (hop %s)" \
% (pathhop, last_hop, pathah, last_areahop, hop)
if last_hop == None:
last_hop = pathhop
last_areahop = pathah
elif pathhop != last_hop:
if pathah != last_areahop:
diff = True
print "breaking at hop %s" % hop
break
if diff:
for i in range(len(paths)):
if hop > len(paths[i]) - 1: continue
print "Looking at path %s with areahops %s, index %s" \
% (paths[i], p_attr[i]['areahops'], hop)
if p_attr[i]['areahops'][hop] != True:
p_attr[i]['candidate'] = False
diff = False
return [paths[i] for i in range(len(paths)) if p_attr[i]['candidate']]
class Simulation:
SC_METRIC = 1
SC_LINKFAIL = 2
SC_ROUTERFAIL = 4
def __init__(self, model, debug=False):
self.model = model
self.graph = model.G.copy()
self.active = False
self.changes = []
self._refresh_betweenness()
self.debug = debug
self.acnodes = set()
self.acgroups = {}
self.all_paths = {}
self._refresh_all_paths()
self.linkloads = self.model.linkloads
def get_stats(self):
bc = self.betweenness
top = sorted(bc, lambda x,y: cmp(bc[x], bc[y]))
top.reverse()
stats = {}
stats["nodes"] = nx.number_of_nodes(self.graph)
stats["edges"] = nx.number_of_edges(self.graph)
stats["radius"] = nx.radius(self.graph)
stats["diameter"] = nx.diameter(self.graph)
stats["center"] = nx.center(self.graph)
stats["periphery"] = nx.periphery(self.graph)
stats["density"] = nx.density(self.graph)
stats["reciprocity"] = utils.reciprocity(self.graph)
stats["mean length"] = nx.average_shortest_path_length(self.graph)
stats["longest paths"] = self.get_max_cost_paths()
stats["top 20 transit"] = top[0:20]
return stats
def get_link_utilization(self, u, v):
return self.get_out_link_load(u,v)/float(self.model.get_link_capacity(u,v))
def get_link_utilizations(self):
utils = {}
for (u,v) in self.graph.edges():
utils[(u,v)] = self.get_link_utilization(u,v)
return utils
def get_in_link_load(self, u,v):
if not (u,v) in self.linkloads:
return False
return int(self.linkloads[(v,u)])
def get_out_link_load(self, u,v):
if not (u,v) in self.linkloads:
return False
return int(self.linkloads[(u,v)])
def get_node_info(self, node):
G = self.graph
if node not in G.nodes(): return {}
bc = self.betweenness
retinfo = {}
retinfo['name'] = node
retinfo['degree'] = G.out_degree(node)
retinfo['links'] = map(lambda x: self.model.graph.get_edge_data(x[0], x[1])['l']\
+ " (" + str(int(x[2])) + ")",
G.edges(node, data=True))
retinfo['neighbors'] = G.neighbors(node)
retinfo['longest paths'] = self.get_max_cost_paths(nodes=[node])
retinfo['eccentricity'] = nx.eccentricity(G, node)
retinfo['betweenness'] = "%.3f (%.2f%% of max, %.2f%% of avg)" \
% (bc[node], (bc[node]/max(bc.values()))*100,
(bc[node]/(sum(bc.values())/len(bc)))*100)
if self.acnodes:
acstr = " and ".join(self.acgroups[node])
retinfo['anycast group'] = acstr
if node in self.acnodes:
retinfo['anycast group'] += '*'
return retinfo
def get_link_info(self, u, v):
G = self.graph
if not G.has_edge(u,v): return {}
bc = self.edge_betweenness
retinfo = {}
edgedata = self.model.graph[u][v]
name = ""
capacity = 0
if 'c' in edgedata:
capacity = edgedata['c']
if 'l' in edgedata:
name = edgedata['l']
utilization = "NA"
if capacity != 0 and (u,v) in self.linkloads:
utilization = "%.2f%%" % (self.get_link_utilization(u,v)*100)
load = "NA"
if (u,v) in self.linkloads:
load = "%.2f Mbit/s" % (self.get_out_link_load(u,v)/float(1024))
retinfo['name'] = name
retinfo['betweenness'] = "%.3f (%.2f%% of max, %.2f%% of avg)" \
% (bc[(u,v)], (bc[(u,v)]/max(bc.values()))*100,
(bc[(u,v)]/(sum(bc.values())/len(bc)))*100)
retinfo['capacity'] = utils.cap2str(capacity)
retinfo['load'] = load
retinfo['utilization'] = utilization
return retinfo
def get_transit_links(self, u, v):
paths = self.model.nodes_and_paths_using_edge(u,v,self.graph, True)[1]
return paths.keys()
def get_max_cost_paths(self, top=8, nodes=None):
sources = self.graph.nodes()
if nodes:
sources = nodes
pathcosts = {}
retval = []
for source in sources:
costs = nx.dijkstra_predecessor_and_distance(self.graph, source)[1]
for dest in costs:
pathcosts[(source, dest)] = costs[dest]
spathcosts = sorted(pathcosts,
cmp=lambda x,y: cmp(pathcosts[x], pathcosts[y]))
spathcosts.reverse()
sp = spathcosts
pc = pathcosts
seen = {}
for (u,v) in sp[:top]:
if (u,v) in seen and pc[(u,v)] == pc[(v,u)]: continue
retval.append("%s (%s)" % (" <-> ".join([u,v]), pc[(u,v)]))
seen[(u,v)] = True
if (v,u) in sp and pc[(u,v)] == pc[(v,u)]:
seen[(v,u)] = True
return retval
def start(self):
self.active = True
self.graph = self.model.G.copy()
self._refresh_betweenness()
self.changes = []
self.effects = []
self.linkloads = self.model.linkloads
def stop(self):
self.acnodes = set()
self.acgroups = {}
self.active = False
def get_changes(self):
return self.changes
def get_changes_strings(self, commands=False):
strings = []
for change in self.changes:
if change['type'] == Simulation.SC_METRIC:
connector = "->"
bidirstr = " one-way"
if change['bidir']:
connector = "<->"
bidirstr = ""
if commands:
strings.append("metric %s %s %s%s"\
% (change['pair'][0], change['pair'][1],
change['metrics'][1], bidirstr))
continue
strings.append("Metric for %s%s%s [%s->%s]"\
% (change['pair'][0], connector, change['pair'][1],
change['metrics'][0], change['metrics'][1]))
elif change['type'] == Simulation.SC_LINKFAIL:
if commands:
strings.append("linkfail %s %s"\
% (change['pair'][0], change['pair'][1]))
continue
strings.append("Link failure between %s and %s" \
% (change['pair'][0], change['pair'][1]))
elif change['type'] == Simulation.SC_ROUTERFAIL:
if commands:
strings.append("routerfail %s"\
% (change['node']))
continue
strings.append("Router failure of %s" \
% (change['node']))
return strings
def uneven_metrics(self):
G = self.graph
return filter(lambda x: G[x[0]][x[1]] != G[x[1]][x[0]],
G.edges())
def has_changes(self):
return len(self.changes) > 0
def no_changes(self):
return len(self.changes)
def get_effects(self):
return self.effects
def get_effects_node(self, node):
if not node in self.effects: return {}
return self.effects[node]
def get_effects_summary(self):
dstsummary, srcsummary = {}, {}
for source in self.effects:
no_changes = 0
for dest in self.effects[source].keys():
ddiffs = self.effects[source][dest]
no_changes += len(ddiffs)
if dest in dstsummary:
dstsummary[dest].append(source)
else:
dstsummary[dest] = [source]
if source in srcsummary:
srcsummary[source].append(dest)
else:
srcsummary[source] = [dest]
return srcsummary, dstsummary
def get_nodes(self):
return self.graph.nodes()
def get_betweenness(self, top=None):
if not top:
return self.betweenness
bc = self.betweenness
toplist = sorted(bc, lambda x,y: cmp(bc[x], bc[y]))
toplist.reverse()
return toplist[:top]
def get_edge_betweenness(self, top=None):
if not top:
return self.edge_betweenness
ebc = self.edge_betweenness
toplist = sorted(ebc, lambda (x1,y1), (x2, y2): cmp(ebc[(x1, y1)], ebc[(x2, y2)]))
toplist.reverse()
return toplist[:top]
def get_anycast_groups_by_source(self):
return self.acgroups
def get_anycast_group(self, node):
if node not in self.acnodes:
return None
return filter(lambda x: node in self.acgroups[x], self.acgroups.keys())
def get_anycast_nodes(self):
return list(self.acnodes)
def add_anycast_nodes(self, nodes):
self.acnodes.update(nodes)
self._refresh_anycast()
def remove_anycast_nodes(self, nodes):
for n in nodes:
self.acnodes.discard(n)
self._refresh_anycast()
def get_node_groups(self, threshold=0.095, n=10, path=None):
groups = {}
bc = self.betweenness
top = self.get_betweenness(top=n)
for node in self.graph.nodes():
if bc[node] > threshold or node in top:
if path and node == path[0]:
if not 'mainstart' in groups:
groups['mainstart'] = [node]
else:
groups['mainstart'].append(node)
elif path and node == path[-1]:
if not 'mainstop' in groups:
groups['mainstop'] = [node]
else:
groups['mainstop'].append(node)
elif path and node in path:
if not 'mainpath' in groups:
groups['mainpath'] = [node]
else:
groups['mainpath'].append(node)
else:
if not 'main' in groups:
groups['main'] = [node]
else:
groups['main'].append(node)
else:
if path and node == path[0]:
if not 'normalstart' in groups:
groups['normalstart'] = [node]
else:
groups['normalstart'].append(node)
elif path and node == path[-1]:
if not 'normalstop' in groups:
groups['normalstop'] = [node]
else:
groups['normalstop'].append(node)
elif path and node in path:
if not 'normalpath' in groups:
groups['normalpath'] = [node]
else:
groups['normalpath'].append(node)
else:
if not 'normal' in groups:
groups['normal'] = [node]
else:
groups['normal'].append(node)
return [(groups[k], k) for k in groups]
def get_diff_edge_groups(self, path, spath, threshold=0.01, n=20):
groups = {}
#print "get_diff_edge_groups called (%s, %s)" % (path, spath)
smpath_edges, srpath_edges = [], []
mpath_edges, rpath_edges = [], []
smpath = spath
mpath = path
if type(spath[0]) == type([]):
if len(spath) > 1: smulti = True
smpath = spath[0]
for p in spath[1:]:
srpath_edges += zip(p, p[1:])
srpath_edges += zip(p[1:], p)
if type(path[0]) == type([]):
if len(path) > 1: multi = True
mpath = path[0]
for p in path[1:]:
rpath_edges += zip(p, p[1:])
rpath_edges += zip(p[1:], p)
mpath_edges = zip(mpath, mpath[1:])
mpath_edges += zip(mpath[1:], mpath)
smpath_edges = zip(smpath, smpath[1:])
smpath_edges += zip(smpath[1:], smpath)
mopath_edges = list(set(mpath_edges) - set(smpath_edges))
mupath_edges = list(set(mpath_edges).intersection(set(smpath_edges)))
ropath_edges = list(set(rpath_edges) - set(srpath_edges))
#rupath_edges = list(set(rpath_edges).intersection(set(srpath_edges)))
#print "mpath: %s" % mpath_edges
#print "rpath: %s" % rpath_edges
#print "smpath: %s" % smpath_edges
#print "srpath: %s" % srpath_edges
a = set(srpath_edges) ^ set(smpath_edges)
b = set(rpath_edges) ^ set(mpath_edges)
if not srpath_edges and not rpath_edges:
a = set()
b = set()
c = set(srpath_edges).intersection((a & b))
d = set(smpath_edges).intersection((a & b))
rupath_edges = list(c|d)
#rupath_edges = list(set(srpath_edges).intersection((a & b)))
#print "mupath: %s" % mupath_edges
#print "rupath: %s" % rupath_edges
ebc = self.edge_betweenness
top = self.get_edge_betweenness(top=n)
redges = list(set(self.model.G.edges()) \
- set(self.graph.edges()))
for (u, v, d) in self.graph.edges(data=True):
debug = False
#if u == 'oslo-gw' or v == 'oslo-gw': debug = True
if debug: print "Looking at (%s, %s, %s)" % (u, v, d)
if (u,v) in redges:
if debug: print "In redges...ignoring"
continue
if (ebc[(u,v)] > threshold and ebc[(v,u)] > threshold) \
or (u,v) in top:
if debug: print "Is main edge"
if (u,v) in mupath_edges and (u,v) not in rupath_edges:
if debug: print "Is mupath_edge"
if 'mainupath' not in groups:
groups['mainupath'] = [(u,v,d)]
else:
groups['mainupath'].append((u,v,d))
elif (u,v) in rupath_edges:
if debug: print "Is rupath_edge"
if 'mainualtpath' not in groups:
groups['mainualtpath'] = [(u,v,d)]
else:
groups['mainualtpath'].append((u,v,d))
elif (u,v) in smpath_edges and srpath_edges and (u,v) not in srpath_edges:
if debug: print "Is smpath_edge (not sr)"
if 'mainaltpath' not in groups:
groups['mainaltpath'] = [(u,v,d)]
else:
groups['mainaltpath'].append((u,v,d))
elif (u,v) in smpath_edges:
if debug: print "Is smpath_edge"
if 'mainpath' not in groups:
groups['mainpath'] = [(u,v,d)]
else:
groups['mainpath'].append((u,v,d))
elif (u,v) in srpath_edges:
if debug: print "Is srpath_edge"
if 'mainaltpath' not in groups:
groups['mainaltpath'] = [(u,v,d)]
else:
groups['mainaltpath'].append((u,v,d))
elif (u,v) in mopath_edges:
if debug: print "Is mopath_edge"
if 'mainopath' not in groups:
groups['mainopath'] = [(u,v,d)]
else:
groups['mainopath'].append((u,v,d))
elif (u,v) in ropath_edges:
if debug: print "Is ropath_edge"
if 'mainoaltpath' not in groups:
groups['mainoaltpath'] = [(u,v,d)]
else:
groups['mainoaltpath'].append((u,v,d))
else:
if debug: print "Is notpath_edge"
if 'main' not in groups:
groups['main'] = [(u,v,d)]
else:
groups['main'].append((u,v,d))
else:
if debug: print "Is normal edge"
if (u,v) in mupath_edges and (u,v) not in rupath_edges:
if debug: print "Is mupath_edge"
if 'normalupath' not in groups:
groups['normalupath'] = [(u,v,d)]
else:
groups['normalupath'].append((u,v,d))
elif (u,v) in rupath_edges:
if debug: print "Is rupath_edge"
if 'normalualtpath' not in groups:
groups['normalualtpath'] = [(u,v,d)]
else:
groups['normalualtpath'].append((u,v,d))
elif (u,v) in smpath_edges and srpath_edges and (u,v) not in srpath_edges:
if debug: print "Is smpath_edge (not sr)"
if 'normalaltpath' not in groups:
groups['normalaltpath'] = [(u,v,d)]
else:
groups['normalaltpath'].append((u,v,d))
elif (u,v) in rupath_edges:
if debug: print "Is rupath_edge"
if 'normalualtpath' not in groups:
groups['normalualtpath'] = [(u,v,d)]
else:
groups['normalualtpath'].append((u,v,d))
elif (u,v) in smpath_edges:
if debug: print "Is smpath_edge"
if 'normalpath' not in groups:
groups['normalpath'] = [(u,v,d)]
else:
groups['normalpath'].append((u,v,d))
elif (u,v) in srpath_edges:
if debug: print "Is srpath_edge"
if 'normalaltpath' not in groups:
groups['normalaltpath'] = [(u,v,d)]
else:
groups['normalaltpath'].append((u,v,d))
elif (u,v) in mopath_edges:
if debug: print "Is mopath_edge"
if 'normalopath' not in groups:
groups['normalopath'] = [(u,v,d)]
else:
groups['normalopath'].append((u,v,d))
elif (u,v) in ropath_edges:
if debug: print "Is ropath_edge"
if 'normaloaltpath' not in groups:
groups['normaloaltpath'] = [(u,v,d)]
else:
groups['normaloaltpath'].append((u,v,d))
else:
if debug: print "Is notpath_edge"
if 'normal' not in groups:
groups['normal'] = [(u,v,d)]
else:
groups['normal'].append((u,v,d))
redge_data = self.model.get_edge_groups(edges=redges, path=path)
for (edges, etype) in redge_data:
if etype == 'mainpath':
if 'mainopath' in groups:
groups['mainopath'] += edges
else:
groups['mainopath'] = edges
elif etype == 'mainaltpath':
if 'mainoaltpath' in groups:
groups['mainoaltpath'] += edges
else:
groups['mainoaltpath'] = edges
elif etype == 'normalpath':
if 'normalopath' in groups:
groups['normalopath'] += edges
else:
groups['normalopath'] = edges
elif etype == 'normalaltpath':
if 'normaloaltpath' in groups:
groups['normaloaltpath'] += edges
else:
groups['normaloaltpath'] = edges
return [(groups[k], k) for k in groups]
def get_diff_node_groups(self, path, spath, threshold=0.095, n=10):
groups = {}
bc = self.betweenness
top = self.get_betweenness(top=n)
opath = list(set(path) - set(spath))
upath = list(set(path).intersection(set(spath)))
rnodes = list(set(self.model.G.nodes()) - set(self.graph.nodes()))
for node in self.graph.nodes():
if node in rnodes: continue
if bc[node] > threshold or node in top:
if node == path[0]:
if not 'mainstart' in groups:
groups['mainstart'] = [node]
else:
groups['mainstart'].append(node)
elif node == path[-1]:
if not 'mainstop' in groups:
groups['mainstop'] = [node]
else:
groups['mainstop'].append(node)
elif node in upath:
if not 'mainupath' in groups:
groups['mainupath'] = [node]
else:
groups['mainupath'].append(node)
elif node in spath:
if not 'mainpath' in groups:
groups['mainpath'] = [node]
else:
groups['mainpath'].append(node)
elif node in opath:
if not 'mainopath' in groups:
groups['mainopath'] = [node]
else:
groups['mainopath'].append(node)
else:
if not 'main' in groups:
groups['main'] = [node]
else:
groups['main'].append(node)
else:
if node == path[0]:
if not 'normalstart' in groups:
groups['normalstart'] = [node]
else:
groups['normalstart'].append(node)
elif node == path[-1]:
if not 'normalstop' in groups:
groups['normalstop'] = [node]
else:
groups['normalstop'].append(node)
elif node in upath:
if not 'normalupath' in groups:
groups['normalupath'] = [node]
else:
groups['normalupath'].append(node)
elif node in spath:
if not 'normalpath' in groups:
groups['normalpath'] = [node]
else:
groups['normalpath'].append(node)
elif node in opath:
if not 'normalopath' in groups:
groups['normalopath'] = [node]
else:
groups['normalopath'].append(node)
else:
if not 'normal' in groups:
groups['normal'] = [node]
else:
groups['normal'].append(node)
rnode_data = self.model.get_node_groups(nodes=rnodes, path=path)
for (nodes, ntype) in rnode_data:
if ntype == 'mainpath':
if 'mainopath' in groups:
groups['mainopath'] += nodes
else:
groups['mainopath'] = nodes
elif ntype == 'normalpath':
if 'normalopath' in groups:
groups['normalopath'] += nodes
else:
groups['normalopath'] = nodes
return [(groups[k], k) for k in groups]
def get_edge_groups(self, threshold=0.01, n=20, path=None):
groups, mpath_edges, rpath_edges = {}, [], []
multi = False
mpath = path
if path != None:
if type(path[0]) == type([]):
if len(path) > 1: multi = True
mpath = path[0]
for p in path[1:]:
rpath_edges += zip(p, p[1:])
rpath_edges += zip(p[1:], p)
mpath_edges = zip(mpath, mpath[1:])
mpath_edges += zip(mpath[1:], mpath)
ebc = self.edge_betweenness
top = self.get_edge_betweenness(top=n)
for (u, v, d) in self.graph.edges(data=True):
if (ebc[(u,v)] > threshold and ebc[(v,u)] > threshold) \
or (u,v) in top:
if (path != None) and (not multi) and ((u,v) in mpath_edges):
if 'mainpath' not in groups:
groups['mainpath'] = [(u,v,d)]
else:
groups['mainpath'].append((u,v,d))
elif multi and mpath_edges and (u,v) in mpath_edges \
and (u,v) not in rpath_edges:
if 'mainaltpath' not in groups:
groups['mainaltpath'] = [(u,v,d)]
else:
groups['mainaltpath'].append((u,v,d))
elif mpath_edges and (u,v) in mpath_edges:
if 'mainpath' not in groups:
groups['mainpath'] = [(u,v,d)]
else:
groups['mainpath'].append((u,v,d))
elif rpath_edges and (u,v) in rpath_edges:
if 'mainaltpath' not in groups:
groups['mainaltpath'] = [(u,v,d)]
else:
groups['mainaltpath'].append((u,v,d))
else:
if 'main' not in groups:
groups['main'] = [(u,v,d)]
else:
groups['main'].append((u,v,d))
else:
if (path != None) and (not multi) and ((u,v) in mpath_edges):
if 'normalpath' not in groups:
groups['normalpath'] = [(u,v,d)]
else:
groups['normalpath'].append((u,v,d))
elif multi and mpath_edges and (u,v) in mpath_edges \
and (u,v) not in rpath_edges:
if 'normalaltpath' not in groups:
groups['normalaltpath'] = [(u,v,d)]
else:
groups['normalaltpath'].append((u,v,d))
elif mpath_edges and (u,v) in mpath_edges:
if 'normalpath' not in groups:
groups['normalpath'] = [(u,v,d)]
else:
groups['normalpath'].append((u,v,d))
elif rpath_edges and (u,v) in rpath_edges:
if 'normalaltpath' not in groups:
groups['normalaltpath'] = [(u,v,d)]
else:
groups['normalaltpath'].append((u,v,d))
else:
if 'normal' not in groups:
groups['normal'] = [(u,v,d)]
else:
groups['normal'].append((u,v,d))
return [(groups[k], k) for k in groups]
def has_effects(self):
return len(self.effects) > 0
def linkfail(self, n1, n2, record=True):
if not self.graph.has_edge(n1,n2):
return False
metrics = (self.graph[n1][n2]['weight'], self.graph[n2][n1]['weight'])
if record:
self.changes.append({'type': Simulation.SC_LINKFAIL, 'pair': (n1,n2),
'metrics': metrics})
self.graph.remove_edge(n1, n2)
self.graph.remove_edge(n2, n1)
self._refresh_betweenness()
self._refresh_anycast()
self.effects = self._refresh_effects()
self._refresh_linkload()
return True
def routerfail(self, n1, record=True):
if not self.graph.has_node(n1):
return False
removed_edges = []
for node in self.graph.neighbors(n1):
removed_edges.append((n1, node, self.graph[n1][node]))
self.graph.remove_edge(n1, node)
removed_edges.append((node, n1, self.graph[node][n1]))
self.graph.remove_edge(node, n1)
self.graph.remove_node(n1)
if record:
self.changes.append({'type': Simulation.SC_ROUTERFAIL, 'node': n1,
'edges': removed_edges})
self._refresh_betweenness()
self._refresh_anycast()
self.effects = self._refresh_effects()
self._refresh_linkload()
return True
def change_metric(self, n1, n2, metric, record=True, bidir=None):
bidirectional = False
metric = float(metric)
if not self.graph.has_edge(n1, n2):
return False
old_metric = self.graph[n1][n2]['weight']
if old_metric == self.graph[n2][n1]['weight']:
bidirectional = True
if bidir == False:
bidirectional = False
if not record:
bidirectional = False
self.graph.remove_edge(n1, n2)
self.graph.add_edge(n1,n2, weight=metric)
if bidirectional or bidir:
self.graph.remove_edge(n2, n1)
self.graph.add_edge(n2,n1, weight=metric)
if record:
self.changes.append({'type': Simulation.SC_METRIC, 'pair': (n1, n2),
'metrics': (int(old_metric), int(metric)),
'bidir': bidirectional or bidir})
self._refresh_betweenness()
self._refresh_anycast()
self.effects = self._refresh_effects()
self._refresh_linkload()
return True
def path(self, source, dest, G=None):
if not G:
G = self.graph
if G == self.graph:
preds, costs = self.all_paths[source]
else:
preds, costs = nx.dijkstra_predecessor_and_distance(G, source)
if not dest in costs:
return False, None
def _get_paths(preds, path, paths, dest):
if dest in path:
return
path.append(dest)
if len(preds[dest]) == 0:
paths.append(path)
return
for newdest in preds[dest]:
_get_paths(preds, path[:], paths, newdest)
return paths
paths = _get_paths(preds, [], [], dest)
for path in paths:
path.reverse()
return costs[dest], paths
def undo(self, change_no):
if change_no > len(self.changes) or change_no < 1:
return False
idx = change_no - 1
change = self.changes[idx]
if change['type'] == Simulation.SC_METRIC:
(u, v) = change['pair']
w = change['metrics'][0]
self.change_metric(u, v, w, record=False)
if change['bidir']:
self.change_metric(v, u, w, record=False)
elif change['type'] == Simulation.SC_LINKFAIL:
(u, v) = change['pair']
(m1, m2) = change['metrics']
self.graph.add_edge(u, v, weight=m1)
self.graph.add_edge(v, u, weight=m2)
elif change['type'] == Simulation.SC_ROUTERFAIL:
router = change['node']
edges = change['edges']
self.graph.add_node(router)
for (u, v, w) in edges:
self.graph.add_edge(u, v, **w)
del self.changes[idx]
self._refresh_betweenness()
self._refresh_anycast()
self.effects = self._refresh_effects()
self._refresh_linkload()
return True
def is_active(self):
return self.active
def reroute(self, start, stop, via, equal=False, timeout=2*60):
import time
debug = False
max_metric = self.model.config.get('max_metric')
G = self.graph
H = self.graph.copy()
I = self.graph.copy()
K = self.graph.copy()
success = False
results = {}
target_no_paths = [1]
if equal:
target_no_paths = range(2,10) + [1]
ocost, opaths = self.path(start, stop)
cost1, paths1 = self.path(start, via)
if not start in G.nodes() \
or not stop in G.nodes() \
or not via in G.nodes():
print "Invalid nodename"
return []
for path in paths1:
for node in path:
if node == stop:
print "Path to via-node is through stop-node."
print "Exiting"
return (success, results)
if node == via:
continue
H.remove_node(node)
J = H.copy()
cost2, paths2 = self.path(via, stop, H)
S = set(reduce(lambda x,y: x+y, opaths))
U = set(reduce(lambda x,y: x+y, paths1))
V = set(reduce(lambda x,y: x+y, paths2))
A = V.copy()
for node in V:
A.update(G.neighbors(node))
allowed_nodes = list(U.union(A))
if debug: print "Parameters:"
if debug: print "S: %s" % (S)
if debug: print "U: %s" % (U)
if debug: print "V: %s" % (V)
if debug: print "Allowed nodes: %s" % (allowed_nodes)
finished = False
neighbor_inc = 1
start_t = time.time()
while not finished:
if time.time() - start_t >= timeout:
finished = True
success = False
print "Timed out!"
return (success, results)
ocost, opaths = self.path(start, stop, K)
W = set(reduce(lambda x,y: x+y, opaths))
cost1, paths1 = self.path(start, via, I)
cost2, paths2 = self.path(via, stop, J)
if debug: print "Opath now: %s" % opaths
if debug: print "Comparing %s+%s to %s" % (cost2, cost1, ocost)
if via in W and len(opaths) in target_no_paths:
if debug: print "Success!"
finished = True
success = True
continue
nochange = True
if debug: print "Negative adjustment loop"
for path1 in paths1:
for (u,v) in zip(path1, path1[1:]):
w = I[u][v]['weight']
if debug: print "Considering (%s,%s,%s) (-1)" % (u,v,w)
if u == start or u == stop or v == start or v == stop:
if debug: print "A: Bad effect = False"
bad_effect = False
minmax = False
if debug: print "Inner negative adjustment loop"
while (not bad_effect) and (not minmax):
w = w - 1
if w < 1:
if debug: print "Reached minimum metric..."
minmax = True
break
I.add_edge(u,v,weight=w)
K.add_edge(u,v,weight=w)
effects = self._refresh_effects(G, I)
if debug: print "B: Bad effect = False"
bad_effect = False
for src in effects:
if src not in allowed_nodes:
for dest in effects[src]:
#print dest
if dest not in allowed_nodes:
bad_effect = True
if not bad_effect:
ocost, opaths = self.path(start, stop, K)
W = set(reduce(lambda x,y: x+y, opaths))
cost1, paths1 = self.path(start, via, I)
if debug: print "Opath now: %s" % opaths
if debug: print "Comparing %s+%s to %s" % (cost2, cost1, ocost)
if via in W and len(opaths) in target_no_paths:
if debug: print "Success!"
finished = True
success = True
break
if minmax:
if debug: print "A2: Bad effect = 2"
bad_effect = 2
else:
w = w - 1
if w < 1:
if debug: print "Reached minimum metric..."
continue
I.add_edge(u,v,weight=w)
K.add_edge(u,v,weight=w)
effects = self._refresh_effects(G, I)
if debug: print "C: Bad effect = False"
bad_effect = False
for src in effects:
if src not in allowed_nodes:
for dest in effects[src]:
#print dest
if dest not in allowed_nodes:
bad_effect = True
if bad_effect == True:
I.add_edge(u,v,weight=w+1)
K.add_edge(u,v,weight=w+1)
continue
elif bad_effect == 2:
continue
else:
if debug: print "A: nochange = False"
nochange = False
ocost, opaths = self.path(start, stop, K)
W = set(reduce(lambda x,y: x+y, opaths))
cost1, paths1 = self.path(start, via, I)
if debug: print "Opath now: %s" % opaths
if debug: print "Comparing %s+%s to %s" % (cost2, cost1, ocost)
if via in W and len(opaths) in target_no_paths:
if debug: print "Success!"
finished = True
success = True
continue
if debug: print "Positive adjustment loop"
for opath in opaths:
for (u,v) in zip(opath, opath[1:]):
if u in V and v in V: continue
w = I[u][v]['weight']
if debug: print "Considering (%s,%s,%s) (+1)" % (u,v,w)
if u == start or u == stop or v == start or v == stop:
if debug: print "D: Bad effect = False"
bad_effect = False
minmax = False
if debug: print "Inner positive adjustment loop"
while (not bad_effect) and (not minmax):
w = w + 1
if w > max_metric:
if debug: print "Reached maximum metric..."
minmax = True
continue
I.add_edge(u,v,weight=w)
K.add_edge(u,v,weight=w)
effects = self._refresh_effects(G, I)
if debug: print "E: Bad effect = False"
bad_effect = False
for src in effects:
if src not in allowed_nodes:
for dest in effects[src]:
#print dest
if dest not in allowed_nodes:
bad_effect = True
if not bad_effect:
ocost, opaths = self.path(start, stop, K)
W = set(reduce(lambda x,y: x+y, opaths))
cost1, paths1 = self.path(start, via, I)
if debug: print "Opath now: %s" % opaths
if debug: print "Comparing %s+%s to %s" % (cost2, cost1, ocost)
if via in W and len(opaths) in target_no_paths:
if debug: print "Success!"
finished = True
success = True
break
if minmax:
if debug: print "D2: Bad effect = 2"
bad_effect = 2
else:
w = w + 1
if w > max_metric:
if debug: print "Reached maximum metric..."
continue
I.add_edge(u,v,weight=w)
K.add_edge(u,v,weight=w)
effects = self._refresh_effects(G, I)
if debug: print "F: Bad effect = False"
bad_effect = False
for src in effects:
if src not in allowed_nodes:
for dest in effects[src]:
#print dest
if dest not in allowed_nodes:
bad_effect = True
if bad_effect == True:
I.add_edge(u,v,weight=w-1)
K.add_edge(u,v,weight=w-1)
continue
elif bad_effect == 2:
continue
else:
if debug: print "B: nochange = False"
nochange = False
ocost, opaths = self.path(start, stop, K)
W = set(reduce(lambda x,y: x+y, opaths))
cost1, paths1 = self.path(start, via, I)
if debug: print "Opath now: %s" % opaths
if debug: print "Comparing %s+%s to %s" % (cost2, cost1, ocost)
if via in W and len(opaths) in target_no_paths:
if debug: print "Success!"
finished = True
success = True
continue
if debug: print "2nd negative adjustment loop"
for path2 in paths2:
for (u,v) in zip(path2, path2[1:]):
w = J[u][v]['weight']
if debug: print "Considering (%s,%s,%s) (-1)" % (u,v,w)
w = w - 1
if w < 1:
if debug: print "Reached minimum metric..."
continue
J.add_edge(u,v,weight=w)
K.add_edge(u,v,weight=w)
effects = self._refresh_effects(H, J)
if debug: print "G: Bad effect = False"
bad_effect = False
for src in effects:
if src not in allowed_nodes:
for dest in effects[src]:
#print dest
if dest not in allowed_nodes:
bad_effect = True
if bad_effect:
J.add_edge(u,v,weight=w+1)
K.add_edge(u,v,weight=w+1)
continue
else:
if debug: print "C: nochange = False"
nochange = False
if debug: print "Considering increasing allowed nodes"
if nochange:
if neighbor_inc > 2:
if debug: print "No solution found"
finished = True
success = False
continue
append_nodes = []
for node in allowed_nodes:
append_nodes += G.neighbors(node)
if debug: print "Increasing set of nodes"
allowed_nodes += append_nodes
neighbor_inc += 1
else:
if debug: print "nochange was False, so going on"
for (u,v,w) in K.edges(data=True):
if (u,v) in results: continue
old_w = G[u][v]['weight']
if old_w != w:
results[(u,v)] = w
results[(v,u)] = w
#for (u,v,w) in J.edges():
# if (u,v) in results: continue
# old_w = H.get_edge(u,v)
# if old_w != w:
# results[(u,v)] = w
# results[(v,u)] = w
return (success, results)
def minimal_link_costs(self):
debug = False
ebc = self.edge_betweenness
G = self.graph
H = self.graph.copy()
edges = sorted(H.edges(data=True), cmp=lambda x,y: cmp(y[2]['weight'], x[2]['weight']) \
or cmp(ebc[(x[0],x[1])],
ebc[(y[0],y[1])]))
finished = False
while not finished:
adjustment_found = False
for (u,v,w) in edges:
w = w['weight']
if not w == H[v][u]['weight']:
continue
old_w = G[u][v]['weight']
if debug: print "Considering (%s,%s)" % (u,v)
count = 1
while count:
w = w - 1
count = 0
if w < 1: continue
if debug: print "Trying metrics..",
H.add_edge(u,v,weight=w)
H.add_edge(v,u,weight=w)
effects = self._refresh_effects(G, H)
if effects:
if abs(old_w - w) < 2:
H.add_edge(u,v,weight=old_w)
H.add_edge(v,u,weight=old_w)
else:
H.add_edge(u,v,weight=w+1)
H.add_edge(v,u,weight=w+1)
if debug: print "failed! (%s->%s)" % (old_w, w+1)
else:
count = 1
adjustment_found = True
if debug: print "ok"
if not adjustment_found:
finished = True
return H
def _refresh_betweenness(self):
self.betweenness = None
if distutils.version.StrictVersion(nx.__version__) > distutils.version.StrictVersion("1.5"):
self.betweenness = nx.load_centrality(self.graph, weight='weight')
else:
self.betweenness = nx.load_centrality(self.graph, weighted_edges=True)
self.edge_betweenness = nx.edge_betweenness(self.graph, normalized=True, weight=True)
def _refresh_effects(self, OG=None, NG=None):
self._refresh_all_paths()
if not OG:
OG = self.model.G
if not NG:
NG = self.graph
diff_paths = {}
sources = OG.nodes()
for source in sources:
diff_by_dst = {}
if not source in NG: continue
opreds = self.model.all_paths[source][0]
if OG != self.model.G:
opreds = nx.dijkstra_predecessor_and_distance(OG, source)[0]
npreds = self.all_paths[source][0]
if NG != self.graph:
npreds = nx.dijkstra_predecessor_and_distance(NG, source)[0]
for dest in opreds:
if not dest in npreds:
diff_by_dst[dest] = [{'old': opreds[dest], 'new': []}]
continue
diff_res = self._path_cmp(opreds[dest], npreds[dest])
if diff_res:
if dest in diff_by_dst:
diff_by_dst[dest].append(diff_res)
else:
diff_by_dst[dest] = [diff_res]
if diff_by_dst.keys():
diff_paths[source] = diff_by_dst
#print diff_paths
return diff_paths
def _path_cmp(self, oldpaths, newpaths):
if cmp(oldpaths, newpaths) != 0:
return {'old': oldpaths, 'new': newpaths}
return None
def _refresh_anycast(self):
accosts = {}
acgroups = {}
for source in self.graph.nodes():
if source in self.acnodes:
acgroups[source] = [source]
continue
lengths = nx.single_source_dijkstra_path_length(self.graph, source)
for dest in self.acnodes:
if dest not in lengths:
continue
else:
cost = lengths[dest]
if not source in accosts:
accosts[source] = cost
acgroups[source] = [dest]
elif cost == accosts[source]:
acgroups[source] += [dest]
elif cost < accosts[source]:
accosts[source] = cost
acgroups[source] = [dest]
self.acgroups = acgroups
def _apply_load_changes(self,effects):
import time
stime = time.time()
via_edges_seen = {}
adjustments = {}
after_adjustments = {}
traverse_edges = []
old_path_parts = {}
old_paths = {}
for node in effects:
for dest in effects[node]:
no_old = len(effects[node][dest][0]['old'])
no_new = len(effects[node][dest][0]['new'])
old_vias = effects[node][dest][0]['old']
new_vias = effects[node][dest][0]['new']
for vianode in old_vias:
if (vianode, dest) in via_edges_seen:
continue
#print " Considering viapath (%s, %s)" % (vianode, dest)
traverse_edges.append((vianode,dest))
via_edges_seen[(vianode,dest)] = True
#print "Viapaths found (%s secs)" % (time.time() - stime)
for (vianode, dest) in traverse_edges:
old_paths = self.model.nodes_and_paths_using_edge(vianode,dest)[1]
# reduce_old(node, dest)
loads = self.model.linkloads
G = self.model.graph
#print "Finding load parts for (%s, %s) (%s secs)" % (vianode, dest,
# time.time() - stime)
old_path_load_parts = self.model.get_link_load_part(vianode, dest)
old_path_parts[(vianode, dest)] = old_path_load_parts.copy()
for (u,v) in old_path_load_parts:
change = -old_path_load_parts[(u,v)]
if (u,v) in adjustments:
if change < adjustments[(u,v)]:
#if u == 'porsgrunn-gw' or v == 'porsgrunn-gw':
#print " Setting (%s, %s) to %s (<%s)" % (u,v, change,
# adjustments[(u,v)])
adjustments[(u,v)] = change
if u in effects:
if dest in effects[u] \
and vianode in effects[u][dest][0]['old']:
new_paths = self.path(vianode,dest)[1]
if new_paths == None:
new_paths = [[]]
no_new_paths = len(effects[u][dest][0]['new'])
no_old_paths = len(effects[u][dest][0]['old'])
deduct = 0
for npath in new_paths:
edges = zip(npath, npath[1:])
if (v,u) in edges:
deduct = old_path_parts[(vianode, dest)][(u,v)]
deduct *= float(no_old_paths/no_new_paths)
if (v,u) in after_adjustments:
if -deduct < after_adjustments[(v,u)]:
after_adjustments[(v,u)] = -deduct
#print "Deducting %s from (%s,%s)" % (deduct, v,u)
else:
after_adjustments[(v,u)] = -deduct
#print "Deducting %s from (%s,%s)" % (deduct, v,u)
else:
#if u == 'porsgrunn-gw' or v == 'porsgrunn-gw':
#print " Setting (%s, %s) to %s" % (u,v, change)
adjustments[(u,v)] = change
if u in effects:
if dest in effects[u] \
and vianode in effects[u][dest][0]['old']:
new_paths = self.path(vianode,dest)[1]
if new_paths == None:
new_paths = [[]]
no_new_paths = len(effects[u][dest][0]['new'])
no_old_paths = len(effects[u][dest][0]['old'])
deduct = 0
for npath in new_paths:
edges = zip(npath, npath[1:])
if (v,u) in edges:
deduct = old_path_parts[(vianode, dest)][(u,v)]
deduct *= float(no_old_paths/no_new_paths)
if (v,u) in after_adjustments:
if -deduct < after_adjustments[(v,u)]:
after_adjustments[(v,u)] = -deduct
#print "Deducting %s from (%s,%s)" % (deduct, v,u)
else:
after_adjustments[(v,u)] = -deduct
#print "Deducting %s from (%s,%s)" % (deduct, v, u)
#print "Negative adjustments complete (%s secs)" % (time.time() - stime)
pos_changes = {}
for (vianode, dest) in traverse_edges:
old_paths = self.model.nodes_and_paths_using_edge(vianode,dest)[1]
for (n1, n2) in old_paths:
if not (n1 in self.graph and n2 in self.graph): continue
if not (n2 == dest or n1 == vianode): continue
new_paths = self.path(n1,n2)[1]
if new_paths == None:
new_paths = [[]]
opaths = old_paths[(n1,n2)]
ofirst_edges = []
for opath in opaths:
if n2 == dest:
ofirst_edges.append((opath[0], opath[1]))
else:
ofirst_edges.append((opath[-2], opath[-1]))
old = 0
for oedge in ofirst_edges:
if oedge in old_path_parts[(vianode, dest)]:
old += old_path_parts[(vianode, dest)][oedge]
if old == 0: continue
#print "Applying old load %s to new path (%s,%s)" \
# % (old, n1, n2)
for path in new_paths:
edges = zip(path, path[1:])
for (u,v) in edges:
if (u,v) not in pos_changes:
pos_changes[(u,v)] = old
else:
if old > pos_changes[(u,v)]:
pos_changes[(u,v)] = old
#print "Positive adjustments complete (%s secs)" % (time.time() - stime)
for (u,v) in pos_changes:
#if (u,v) == ('trd-gw1', 'hovedbygget-gw1'):
# print " Adjusting (%s, %s) += %s" % (u, v, pos_changes[(u,v)])
if (u,v) not in adjustments:
if (u,v) not in after_adjustments:
adjustments[(u,v)] = pos_changes[(u,v)]
else:
adjustments[(u,v)] = pos_changes[(u,v)] \
+ after_adjustments[(u,v)]
else:
if (u,v) not in after_adjustments:
adjustments[(u,v)] += pos_changes[(u,v)]
else:
adjustments[(u,v)] += pos_changes[(u,v)] \
+ after_adjustments[(u,v)]
#print "Returning adjustments (%s secs)" % (time.time() - stime)
return adjustments
def _refresh_linkload(self):
self.linkloads = self.model.linkloads.copy()
newloads = self.model.linkloads.copy()
if not self.linkloads: return
effects = self.effects
adjustments = self._apply_load_changes(effects)
for (u,v) in adjustments:
if adjustments[(u,v)] == 0: continue
if self.graph.has_edge(u,v):
#print "Final adjustment for (%s, %s) += %s" % (u,v, adjustments[(u,v)])
if (u,v) in newloads:
newloads[(u,v)] += adjustments[(u,v)]
else:
newloads[(u,v)] = adjustments[(u,v)]
for (u,v) in sorted(newloads):
if newloads[(u,v)] < 0:
print "Assertion failed for load on (%s,%s): %s" \
% (u,v, newloads[(u,v)])
self.linkloads = newloads
def _refresh_all_paths(self):
for node in self.graph:
self.all_paths[node] = nx.dijkstra_predecessor_and_distance(self.graph, node)
| import networkx as nx
from pajek import read_pajek
import utils
import distutils.version
class Model:
def __init__(self, graph, config, debug=False):
self.graph = graph
self.config = config
self.debug = debug
self.G = self._make_weighted_copy()
self._refresh_betweenness()
self.linkloads = {}
self.all_paths = {}
self.paths_using_edge = {}
self.linkload_parts = {}
self._refresh_all_paths()
def refresh_linkloads(self):
if not self.config.get('use_linkloads'): return False
self.linkloads = utils.read_linkloads(self.graph,
self.config.get('linkloads_host'),
self.config.get('linkloads_url'))
if not self.linkloads: return False
self.linkload_parts = {}
return True
def has_linkloads(self):
return len(self.linkloads.keys()) > 0
def get_in_link_load(self, u,v):
if not (u,v) in self.linkloads:
return False
return int(self.linkloads[(v,u)])
def get_out_link_load(self, u,v):
if not (u,v) in self.linkloads:
return False
return int(self.linkloads[(u,v)])
def get_betweenness(self, top=None):
if not top:
return self.betweenness
bc = self.betweenness
toplist = sorted(bc, lambda x,y: cmp(bc[x], bc[y]))
toplist.reverse()
return toplist[:top]
def get_edge_betweenness(self, top=None):
if not top:
return self.edge_betweenness
ebc = self.edge_betweenness
toplist = sorted(ebc,
lambda (x1,y1), (x2, y2): cmp(ebc[(x1, y1)], ebc[(x2, y2)]))
toplist.reverse()
return toplist[:top]
def uneven_metrics(self):
G = self.G
return filter(lambda x: G[x[0]][x[1]] != G[x[1]][x[0]],
G.edges())
def get_total_in_load(self, node, G=None, loads=None):
sum = 0
if not loads: loads = self.linkloads
if not G: G = self.graph
for neighbor in G[node]:
sum += loads[neighbor, node]
return sum
def get_total_out_load(self, node, G=None, loads=None):
sum = 0
if not loads: loads = self.linkloads
if not G: G = self.graph
for neighbor in G[node]:
sum += loads[node, neighbor]
return sum
def get_transit_links(self, u, v):
paths = self.nodes_and_paths_using_edge(u,v,self.G, True)[1]
return paths.keys()
def nodes_and_paths_using_edge(self, u, v, G=None, transit_only=False):
import time
stime = time.time()
if not G:
G = self.G
if not transit_only and (G == self.G or G == self.graph) and (u,v) in self.paths_using_edge:
return self.paths_using_edge[(u,v)]
candidates = set()
retpaths = {}
#print " Finding candidates (%s secs)" % (time.time() - stime)
for node in G:
if node == v: continue
paths = self.path(node, v, G)
if not paths: continue
for path in paths[1]:
if path[-2] == u:
candidates.add(node)
#print " Done. (%s secs)" % (time.time() - stime)
for node in candidates:
for dest in (set(G.nodes()) - candidates):
paths = self.path(node, dest, G)
if not paths: continue
paths = paths[1]
for path in paths:
edges = zip(path, path[1:])
if (u,v) in edges:
if (node,dest) not in retpaths:
if transit_only:
if node not in (u,v) and dest not in (u,v):
retpaths[(node,dest)] = [path]
else:
retpaths[(node,dest)] = [path]
else:
if transit_only:
if node not in (u,v) and dest not in (u,v):
retpaths[(node,dest)].append(path)
else:
retpaths[(node,dest)].append(path)
#print " Returning (%s secs)" % (time.time() - stime)
if not transit_only:
self.paths_using_edge[(u,v)] = (candidates, retpaths)
return candidates, retpaths
def get_link_load_part(self, u, v, loads=None, G=None):
import time
stime = time.time()
use_cache = False
if not G:
G = self.G
if not loads:
loads = self.linkloads
if loads == self.linkloads:
use_cache = True
#print " Cache is possible, keys:"
#print " %s" % self.linkload_parts.keys()
if use_cache and (u,v) in self.linkload_parts:
#print " Returning from cache (%s secs)" % (time.time() - stime)
return self.linkload_parts[(u,v)]
#print " Finding nodes_and_paths (%s, %s) (%s secs)" % (u,v,time.time()-stime)
nodes, pathlist = self.nodes_and_paths_using_edge(u, v, G)
#print " Nodes: %s -- Pathlist: %s" % (nodes, pathlist)
#print " Done. (%s secs)" % (time.time()-stime)
partloads = {}
counts = {}
for paths in pathlist.values():
numpaths = len(paths)
pathloads = {}
for path in paths:
#print " Finding path_loads (%s, %s) (%s secs)" % (u,v,time.time()-stime)
edges = self.get_path_loads(u, v, path, numpaths, loads, G)
for (s,t) in edges:
if (s,t) not in pathloads:
pathloads[(s,t)] = edges[(s,t)]
else:
pathloads[(s,t)] += edges[(s,t)]
partloads.update(pathloads)
for (s,t) in partloads:
try:
assert float(partloads[(s,t)]) -1 <= float(loads[(s,t)])
except:
print "Assertion failed for (%s,%s) %s > %s" \
% (s,t, partloads[(s,t)], loads[(s,t)])
#print " Returning (%s secs)" % (time.time()-stime)
if use_cache:
self.linkload_parts[(u,v)] = partloads
return partloads
def get_path_loads(self, u, v, path,
numpaths=1,
loads=None,
G=None):
if not loads:
loads = self.linkloads
if not G:
G = self.G
edges = zip(path, path[1:])
if path[0] == u:
pass
elif path[-1] == v:
edges.reverse()
elif (u,v) not in edges:
print "Invalid call:"
print "get_path_loads: (%s -> %s) [%s]" % (u,v,path)
return False
else:
path1, path2 = [], []
i = 0
while path[i] != v:
path1.append(path[i])
i += 1
path1.append(v)
path2.append(u)
path2.append(v)
i += 1
for node in path[i:]:
path2.append(node)
#print "Splitting call in two: %s, %s" % (path1, path2)
res1 = self.get_path_loads(u,v,path1,numpaths,loads,G)
res2 = self.get_path_loads(u,v,path2,numpaths,loads,G)
res1.update(res2)
return res1
#print "get_path_loads: (%s -> %s) [%s]" % (u,v,path)
cr = utils.calc_ratio
ndio = utils.node_diff_in_out
loadmatrix = {}
loadmatrix[u] = {'out': loads[(u,v)] / float(numpaths)}
loadmatrix[v] = { 'in': loads[(u,v)] / float(numpaths)}
for i in range(len(edges)):
(s,t) = edges[i]
#print "Looking at [%s] (%s,%s)" % (i,s,t)
if s in loadmatrix:
if not 'out' in loadmatrix[s]:
loadmatrix[s]['out'] = loadmatrix[s]['in'] * cr(G, loads,
s, t, True, False)
#print "Load(in) :", loadmatrix[s]['in']
#print "Load(out):", loadmatrix[s]['out']
loadmatrix[t] = {'in': loadmatrix[s]['out']}
elif t in loadmatrix:
if not 'in' in loadmatrix[t]:
newpath = path[:]
#print "Newpath before slice: %s" % newpath
newpath = newpath[-(i+2):]
#print "Doing self(newpath: %s)" % newpath
pathcalc = self.get_path_loads(newpath[0], newpath[1],
newpath, numpaths, loads, G)
loadmatrix[t]['in'] = pathcalc[(newpath[-2], newpath[-1])]
loadmatrix[s] = {'out': loadmatrix[t]['in']}
else:
print "Can't find loaddata for (%s,%s)" % (s,t)
edges = zip(path, path[1:])
retloads = {}
for (s,t) in edges:
retloads[(s,t)] = loadmatrix[s]['out']
return retloads
def get_link_info(self, u, v):
G = self.G
if not G.has_edge(u,v): return {}
bc = self.edge_betweenness
retinfo = {}
edgedata = self.graph[u][v]
name = ""
capacity = 0
if 'c' in edgedata:
capacity = edgedata['c']
if 'l' in edgedata:
name = edgedata['l']
utilization = "NA"
if capacity != 0 and (u,v) in self.linkloads:
utilization = "%.2f%%" % (self.get_link_utilization(u,v)*100)
load = "NA"
if (u,v) in self.linkloads:
load = "%.2f Mbit/s" % (self.get_out_link_load(u,v)/float(1024))
retinfo['name'] = name
retinfo['betweenness'] = "%.3f (%.2f%% of max, %.2f%% of avg)" \
% (bc[(u,v)], (bc[(u,v)]/max(bc.values()))*100,
(bc[(u,v)]/(sum(bc.values())/len(bc)))*100)
retinfo['capacity'] = utils.cap2str(capacity)
retinfo['load'] = load
retinfo['utilization'] = utilization
return retinfo
def get_node_info(self, node):
G = self.graph
if node not in G.nodes(): return {}
bc = self.betweenness
retinfo = {}
retinfo['name'] = node
retinfo['degree'] = G.out_degree(node)
retinfo['links'] = map(lambda x: x[2]['l'] + \
" (" + str(int(x[2]['value'])) + ")",
G.edges(node, data=True))
retinfo['neighbors'] = [x for x in G.neighbors(node)]
retinfo['longest paths'] = self.get_max_cost_paths(nodes=[node])
retinfo['eccentricity'] = nx.eccentricity(G, node)
retinfo['betweenness'] = "%.3f (%.2f%% of max, %.2f%% of avg)" \
% (bc[node], (bc[node]/max(bc.values()))*100,
(bc[node]/(sum(bc.values())/len(bc)))*100)
return retinfo
def get_max_cost_paths(self, top=8, nodes=None):
sources = self.G.nodes()
if nodes:
sources = nodes
pathcosts = {}
retval = []
for source in sources:
costs = nx.dijkstra_predecessor_and_distance(self.G, source)[1]
for dest in costs:
pathcosts[(source, dest)] = costs[dest]
spathcosts = sorted(pathcosts,
cmp=lambda x,y: cmp(pathcosts[x], pathcosts[y]))
spathcosts.reverse()
sp = spathcosts
pc = pathcosts
seen = {}
for (u,v) in sp[:top]:
if (u,v) in seen and pc[(u,v)] == pc[(v,u)]: continue
retval.append("%s (%s)" % (" <-> ".join([u,v]), pc[(u,v)]))
seen[(u,v)] = True
if (v,u) in sp and pc[(u,v)] == pc[(v,u)]:
seen[(v,u)] = True
return retval
def get_node_groups(self, threshold=0.095, n=10, nodes=None, path=None):
groups = {}
bc = self.betweenness
top = self.get_betweenness(top=n)
for node in self.G.nodes():
if nodes != None and node not in nodes:
continue
if bc[node] > threshold or node in top:
if path and node == path[0]:
if not 'mainstart' in groups:
groups['mainstart'] = [node]
else:
groups['mainstart'].append(node)
elif path and node == path[-1]:
if not 'mainstop' in groups:
groups['mainstop'] = [node]
else:
groups['mainstop'].append(node)
elif path and node in path:
if not 'mainpath' in groups:
groups['mainpath'] = [node]
else:
groups['mainpath'].append(node)
else:
if not 'main' in groups:
groups['main'] = [node]
else:
groups['main'].append(node)
else:
if path and node == path[0]:
if not 'normalstart' in groups:
groups['normalstart'] = [node]
else:
groups['normalstart'].append(node)
elif path and node == path[-1]:
if not 'normalstop' in groups:
groups['normalstop'] = [node]
else:
groups['normalstop'].append(node)
elif path and node in path:
if not 'normalpath' in groups:
groups['normalpath'] = [node]
else:
groups['normalpath'].append(node)
else:
if not 'normal' in groups:
groups['normal'] = [node]
else:
groups['normal'].append(node)
return [(groups[k], k) for k in groups]
def get_path_capacity(self, path, as_string=False, slowest_only=False):
path_links = zip(path, path[1:])
slowest = None
if slowest_only:
slowest = min([self.get_link_capacity(u,v)
for (u,v) in path_links])
if as_string:
return utils.cap2str(slowest)
return slowest
return [self.get_link_capacity(u,v,as_string) for (u,v) in path_links]
def get_link_capacity(self, u, v, as_string=False):
if not self.graph.has_edge(u,v):
return False
linkinfo = self.graph[u][v]
if not 'c' in linkinfo:
if as_string:
return "Unknown"
return False
if as_string:
return utils.cap2str(int(linkinfo['c']))
return int(linkinfo['c'])
def get_link_utilization(self, u, v):
try:
utilz = self.get_out_link_load(u,v)/float(self.get_link_capacity(u,v))
except ZeroDivisionError:
print "Warning: Could not get link capacity for link:"
print "%s => %s" % (u,v)
utilz = 0.0
return utilz
def get_link_utilizations(self):
utils = {}
for (u,v) in self.G.edges():
utils[(u,v)] = self.get_link_utilization(u,v)
return utils
def has_capacity_info(self):
for (u,v) in self.graph.edges():
if 'c' in self.graph[u][v]:
return True
return False
def get_edge_groups(self, threshold=0.020, n=20, edges=None, path=None):
groups, mpath_edges, rpath_edges = {}, [], []
multi = False
mpath = path
if path != None:
if type(path[0]) == type([]):
if len(path) > 1: multi = True
mpath = path[0]
for p in path[1:]:
rpath_edges += zip(p, p[1:])
rpath_edges += zip(p[1:], p)
mpath_edges = zip(mpath, mpath[1:])
mpath_edges += zip(mpath[1:], mpath)
ebc = self.edge_betweenness
top = self.get_edge_betweenness(top=n)
for (u, v, d) in self.G.edges(data=True):
if edges != None and (u, v) not in edges:
continue
if (ebc[(u,v)] > threshold and ebc[(v,u)] > threshold) \
or (u,v) in top:
#print "Path: %s, multi: %s, (%s,%s), %s" % (path, multi, u,v,mpath_edges)
if (path != None) and (not multi) and ((u,v) in mpath_edges):
if 'mainpath' not in groups:
groups['mainpath'] = [(u,v,d)]
else:
groups['mainpath'].append((u,v,d))
elif multi and mpath_edges and (u,v) in mpath_edges \
and (u,v) not in rpath_edges:
if 'mainaltpath' not in groups:
groups['mainaltpath'] = [(u,v,d)]
else:
groups['mainaltpath'].append((u,v,d))
elif mpath_edges and (u,v) in mpath_edges:
if 'mainpath' not in groups:
groups['mainpath'] = [(u,v,d)]
else:
groups['mainpath'].append((u,v,d))
elif rpath_edges and (u,v) in rpath_edges:
if 'mainaltpath' not in groups:
groups['mainaltpath'] = [(u,v,d)]
else:
groups['mainaltpath'].append((u,v,d))
else:
if 'main' not in groups:
groups['main'] = [(u,v,d)]
else:
groups['main'].append((u,v,d))
else:
if path != None and not multi and (u,v) in mpath_edges:
if 'normalpath' not in groups:
groups['normalpath'] = [(u,v,d)]
else:
groups['normalpath'].append((u,v,d))
elif multi and mpath_edges and (u,v) in mpath_edges \
and (u,v) not in rpath_edges:
if 'normalaltpath' not in groups:
groups['normalaltpath'] = [(u,v,d)]
else:
groups['normalaltpath'].append((u,v,d))
elif mpath_edges and (u,v) in mpath_edges:
if 'normalpath' not in groups:
groups['normalpath'] = [(u,v,d)]
else:
groups['normalpath'].append((u,v,d))
elif rpath_edges and (u,v) in rpath_edges:
if 'normalaltpath' not in groups:
groups['normalaltpath'] = [(u,v,d)]
else:
groups['normalaltpath'].append((u,v,d))
else:
if 'normal' not in groups:
groups['normal'] = [(u,v,d)]
else:
groups['normal'].append((u,v,d))
return [(groups[k], k) for k in groups]
def get_nodes(self):
return self.G.nodes()
def get_areas(self, nodes):
na = self.graph.node_attr
areas = {}
for n in nodes:
if 'area' in na[n]:
areas[n] = na[n]['area']
else:
areas[n] = None
return areas
def get_positions(self, nodes):
na = self.graph.node_attr
pos = {}
for n in nodes:
pos[n] = (float(na[n]['x']), float(na[n]['y']))
return pos
def get_stats(self):
top = self.get_betweenness(top=20)
stats = {}
stats["nodes"] = nx.number_of_nodes(self.graph)
stats["edges"] = nx.number_of_edges(self.graph)
stats["radius"] = nx.radius(self.graph)
stats["diameter"] = nx.diameter(self.graph)
stats["center"] = nx.center(self.graph)
stats["periphery"] = nx.periphery(self.graph)
stats["density"] = nx.density(self.graph)
stats["reciprocity"] = utils.reciprocity(self.graph)
stats["mean length"] = nx.average_shortest_path_length(self.graph)
stats["longest paths"] = self.get_max_cost_paths()
stats["top 20 transit"] = top
return stats
def path(self, source, dest, G=None):
if not G:
G = self.G
if G == self.G:
preds, costs = self.all_paths[source]
else:
preds, costs = nx.dijkstra_predecessor_and_distance(G, source)
if not dest in costs:
return False, None
def _get_paths(preds, path, paths, dest):
if dest in path:
return
path.append(dest)
if len(preds[dest]) == 0:
paths.append(path)
return
for newdest in preds[dest]:
_get_paths(preds, path[:], paths, newdest)
return paths
paths = _get_paths(preds, [], [], dest)
for path in paths:
path.reverse()
return costs[dest], paths
def refresh_from_file(self, filename):
self.graph = read_pajek(filename)
self.G = self._make_weighted_copy()
self.linkloads = {}
self._refresh_betweenness()
self._refresh_all_paths()
def _make_weighted_copy(self):
G = self.graph.copy()
for (u,v,d) in G.edges(data=True):
G.remove_edge(u,v)
G.add_edge(u,v,weight=d['value'])
return G
def _refresh_betweenness(self):
self.betweenness = None
if distutils.version.StrictVersion(nx.__version__) > distutils.version.StrictVersion("1.5"):
self.betweenness = nx.load_centrality(self.G, weight='weight')
else:
self.betweenness = nx.load_centrality(self.G, weighted_edges=True)
self.edge_betweenness = nx.edge_betweenness(self.G, normalized=True, weight=True)
def _refresh_all_paths(self):
for node in self.G:
self.all_paths[node] = nx.dijkstra_predecessor_and_distance(self.G, node)
for edge in self.G.edges():
self.paths_using_edge[edge[0], edge[1]] = \
self.nodes_and_paths_using_edge(edge[0], edge[1], self.G)
def _routeselection(self, paths):
p_attr = {}
pathnodes = reduce(lambda x,y: x+y, paths)
areas = self.get_areas(pathnodes)
if not areas:
return paths
for i in range(len(paths)):
areahops = map(lambda x: areas[x[0]] == areas[x[1]],
zip(paths[i], paths[i][1:]))
p_attr[i] = {'areahops': areahops, 'candidate': True}
for hop in range(1, max([2] + [len(p) for p in paths]) - 2):
diff = False
last_hop = None
last_areahop = None
for i, path in enumerate(paths):
if hop+1 > len(path) - 1: continue
if p_attr[i]['candidate'] == False: continue
pathhop = (path[hop], path[hop+1])
pathah = p_attr[i]['areahops'][hop]
print "Comparing %s to %s and %s to %s (hop %s)" \
% (pathhop, last_hop, pathah, last_areahop, hop)
if last_hop == None:
last_hop = pathhop
last_areahop = pathah
elif pathhop != last_hop:
if pathah != last_areahop:
diff = True
print "breaking at hop %s" % hop
break
if diff:
for i in range(len(paths)):
if hop > len(paths[i]) - 1: continue
print "Looking at path %s with areahops %s, index %s" \
% (paths[i], p_attr[i]['areahops'], hop)
if p_attr[i]['areahops'][hop] != True:
p_attr[i]['candidate'] = False
diff = False
return [paths[i] for i in range(len(paths)) if p_attr[i]['candidate']]
class Simulation:
SC_METRIC = 1
SC_LINKFAIL = 2
SC_ROUTERFAIL = 4
def __init__(self, model, debug=False):
self.model = model
self.graph = model.G.copy()
self.active = False
self.changes = []
self._refresh_betweenness()
self.debug = debug
self.acnodes = set()
self.acgroups = {}
self.all_paths = {}
self._refresh_all_paths()
self.linkloads = self.model.linkloads
def get_stats(self):
bc = self.betweenness
top = sorted(bc, lambda x,y: cmp(bc[x], bc[y]))
top.reverse()
stats = {}
stats["nodes"] = nx.number_of_nodes(self.graph)
stats["edges"] = nx.number_of_edges(self.graph)
stats["radius"] = nx.radius(self.graph)
stats["diameter"] = nx.diameter(self.graph)
stats["center"] = nx.center(self.graph)
stats["periphery"] = nx.periphery(self.graph)
stats["density"] = nx.density(self.graph)
stats["reciprocity"] = utils.reciprocity(self.graph)
stats["mean length"] = nx.average_shortest_path_length(self.graph)
stats["longest paths"] = self.get_max_cost_paths()
stats["top 20 transit"] = top[0:20]
return stats
def get_link_utilization(self, u, v):
return self.get_out_link_load(u,v)/float(self.model.get_link_capacity(u,v))
def get_link_utilizations(self):
utils = {}
for (u,v) in self.graph.edges():
utils[(u,v)] = self.get_link_utilization(u,v)
return utils
def get_in_link_load(self, u,v):
if not (u,v) in self.linkloads:
return False
return int(self.linkloads[(v,u)])
def get_out_link_load(self, u,v):
if not (u,v) in self.linkloads:
return False
return int(self.linkloads[(u,v)])
def get_node_info(self, node):
G = self.graph
if node not in G.nodes(): return {}
bc = self.betweenness
retinfo = {}
retinfo['name'] = node
retinfo['degree'] = G.out_degree(node)
retinfo['links'] = map(lambda x: self.model.graph.get_edge_data(x[0], x[1])['l']\
+ " (" + str(int(x[2]['weight'])) + ")",
G.edges(node, data=True))
retinfo['neighbors'] = [x for x in G.neighbors(node)]
retinfo['longest paths'] = self.get_max_cost_paths(nodes=[node])
retinfo['eccentricity'] = nx.eccentricity(G, node)
retinfo['betweenness'] = "%.3f (%.2f%% of max, %.2f%% of avg)" \
% (bc[node], (bc[node]/max(bc.values()))*100,
(bc[node]/(sum(bc.values())/len(bc)))*100)
if self.acnodes:
acstr = " and ".join(self.acgroups[node])
retinfo['anycast group'] = acstr
if node in self.acnodes:
retinfo['anycast group'] += '*'
return retinfo
def get_link_info(self, u, v):
G = self.graph
if not G.has_edge(u,v): return {}
bc = self.edge_betweenness
retinfo = {}
edgedata = self.model.graph[u][v]
name = ""
capacity = 0
if 'c' in edgedata:
capacity = edgedata['c']
if 'l' in edgedata:
name = edgedata['l']
utilization = "NA"
if capacity != 0 and (u,v) in self.linkloads:
utilization = "%.2f%%" % (self.get_link_utilization(u,v)*100)
load = "NA"
if (u,v) in self.linkloads:
load = "%.2f Mbit/s" % (self.get_out_link_load(u,v)/float(1024))
retinfo['name'] = name
retinfo['betweenness'] = "%.3f (%.2f%% of max, %.2f%% of avg)" \
% (bc[(u,v)], (bc[(u,v)]/max(bc.values()))*100,
(bc[(u,v)]/(sum(bc.values())/len(bc)))*100)
retinfo['capacity'] = utils.cap2str(capacity)
retinfo['load'] = load
retinfo['utilization'] = utilization
return retinfo
def get_transit_links(self, u, v):
paths = self.model.nodes_and_paths_using_edge(u,v,self.graph, True)[1]
return paths.keys()
def get_max_cost_paths(self, top=8, nodes=None):
sources = self.graph.nodes()
if nodes:
sources = nodes
pathcosts = {}
retval = []
for source in sources:
costs = nx.dijkstra_predecessor_and_distance(self.graph, source)[1]
for dest in costs:
pathcosts[(source, dest)] = costs[dest]
spathcosts = sorted(pathcosts,
cmp=lambda x,y: cmp(pathcosts[x], pathcosts[y]))
spathcosts.reverse()
sp = spathcosts
pc = pathcosts
seen = {}
for (u,v) in sp[:top]:
if (u,v) in seen and pc[(u,v)] == pc[(v,u)]: continue
retval.append("%s (%s)" % (" <-> ".join([u,v]), pc[(u,v)]))
seen[(u,v)] = True
if (v,u) in sp and pc[(u,v)] == pc[(v,u)]:
seen[(v,u)] = True
return retval
def start(self):
self.active = True
self.graph = self.model.G.copy()
self._refresh_betweenness()
self.changes = []
self.effects = []
self.linkloads = self.model.linkloads
def stop(self):
self.acnodes = set()
self.acgroups = {}
self.active = False
def get_changes(self):
return self.changes
def get_changes_strings(self, commands=False):
strings = []
for change in self.changes:
if change['type'] == Simulation.SC_METRIC:
connector = "->"
bidirstr = " one-way"
if change['bidir']:
connector = "<->"
bidirstr = ""
if commands:
strings.append("metric %s %s %s%s"\
% (change['pair'][0], change['pair'][1],
change['metrics'][1], bidirstr))
continue
strings.append("Metric for %s%s%s [%s->%s]"\
% (change['pair'][0], connector, change['pair'][1],
change['metrics'][0], change['metrics'][1]))
elif change['type'] == Simulation.SC_LINKFAIL:
if commands:
strings.append("linkfail %s %s"\
% (change['pair'][0], change['pair'][1]))
continue
strings.append("Link failure between %s and %s" \
% (change['pair'][0], change['pair'][1]))
elif change['type'] == Simulation.SC_ROUTERFAIL:
if commands:
strings.append("routerfail %s"\
% (change['node']))
continue
strings.append("Router failure of %s" \
% (change['node']))
return strings
def uneven_metrics(self):
G = self.graph
return filter(lambda x: G[x[0]][x[1]] != G[x[1]][x[0]],
G.edges())
def has_changes(self):
return len(self.changes) > 0
def no_changes(self):
return len(self.changes)
def get_effects(self):
return self.effects
def get_effects_node(self, node):
if not node in self.effects: return {}
return self.effects[node]
def get_effects_summary(self):
dstsummary, srcsummary = {}, {}
for source in self.effects:
no_changes = 0
for dest in self.effects[source].keys():
ddiffs = self.effects[source][dest]
no_changes += len(ddiffs)
if dest in dstsummary:
dstsummary[dest].append(source)
else:
dstsummary[dest] = [source]
if source in srcsummary:
srcsummary[source].append(dest)
else:
srcsummary[source] = [dest]
return srcsummary, dstsummary
def get_nodes(self):
return self.graph.nodes()
def get_betweenness(self, top=None):
if not top:
return self.betweenness
bc = self.betweenness
toplist = sorted(bc, lambda x,y: cmp(bc[x], bc[y]))
toplist.reverse()
return toplist[:top]
def get_edge_betweenness(self, top=None):
if not top:
return self.edge_betweenness
ebc = self.edge_betweenness
toplist = sorted(ebc, lambda (x1,y1), (x2, y2): cmp(ebc[(x1, y1)], ebc[(x2, y2)]))
toplist.reverse()
return toplist[:top]
def get_anycast_groups_by_source(self):
return self.acgroups
def get_anycast_group(self, node):
if node not in self.acnodes:
return None
return filter(lambda x: node in self.acgroups[x], self.acgroups.keys())
def get_anycast_nodes(self):
return list(self.acnodes)
def add_anycast_nodes(self, nodes):
self.acnodes.update(nodes)
self._refresh_anycast()
def remove_anycast_nodes(self, nodes):
for n in nodes:
self.acnodes.discard(n)
self._refresh_anycast()
def get_node_groups(self, threshold=0.095, n=10, path=None):
groups = {}
bc = self.betweenness
top = self.get_betweenness(top=n)
for node in self.graph.nodes():
if bc[node] > threshold or node in top:
if path and node == path[0]:
if not 'mainstart' in groups:
groups['mainstart'] = [node]
else:
groups['mainstart'].append(node)
elif path and node == path[-1]:
if not 'mainstop' in groups:
groups['mainstop'] = [node]
else:
groups['mainstop'].append(node)
elif path and node in path:
if not 'mainpath' in groups:
groups['mainpath'] = [node]
else:
groups['mainpath'].append(node)
else:
if not 'main' in groups:
groups['main'] = [node]
else:
groups['main'].append(node)
else:
if path and node == path[0]:
if not 'normalstart' in groups:
groups['normalstart'] = [node]
else:
groups['normalstart'].append(node)
elif path and node == path[-1]:
if not 'normalstop' in groups:
groups['normalstop'] = [node]
else:
groups['normalstop'].append(node)
elif path and node in path:
if not 'normalpath' in groups:
groups['normalpath'] = [node]
else:
groups['normalpath'].append(node)
else:
if not 'normal' in groups:
groups['normal'] = [node]
else:
groups['normal'].append(node)
return [(groups[k], k) for k in groups]
def get_diff_edge_groups(self, path, spath, threshold=0.01, n=20):
groups = {}
#print "get_diff_edge_groups called (%s, %s)" % (path, spath)
smpath_edges, srpath_edges = [], []
mpath_edges, rpath_edges = [], []
smpath = spath
mpath = path
if type(spath[0]) == type([]):
if len(spath) > 1: smulti = True
smpath = spath[0]
for p in spath[1:]:
srpath_edges += zip(p, p[1:])
srpath_edges += zip(p[1:], p)
if type(path[0]) == type([]):
if len(path) > 1: multi = True
mpath = path[0]
for p in path[1:]:
rpath_edges += zip(p, p[1:])
rpath_edges += zip(p[1:], p)
mpath_edges = zip(mpath, mpath[1:])
mpath_edges += zip(mpath[1:], mpath)
smpath_edges = zip(smpath, smpath[1:])
smpath_edges += zip(smpath[1:], smpath)
mopath_edges = list(set(mpath_edges) - set(smpath_edges))
mupath_edges = list(set(mpath_edges).intersection(set(smpath_edges)))
ropath_edges = list(set(rpath_edges) - set(srpath_edges))
#rupath_edges = list(set(rpath_edges).intersection(set(srpath_edges)))
#print "mpath: %s" % mpath_edges
#print "rpath: %s" % rpath_edges
#print "smpath: %s" % smpath_edges
#print "srpath: %s" % srpath_edges
a = set(srpath_edges) ^ set(smpath_edges)
b = set(rpath_edges) ^ set(mpath_edges)
if not srpath_edges and not rpath_edges:
a = set()
b = set()
c = set(srpath_edges).intersection((a & b))
d = set(smpath_edges).intersection((a & b))
rupath_edges = list(c|d)
#rupath_edges = list(set(srpath_edges).intersection((a & b)))
#print "mupath: %s" % mupath_edges
#print "rupath: %s" % rupath_edges
ebc = self.edge_betweenness
top = self.get_edge_betweenness(top=n)
redges = list(set(self.model.G.edges()) \
- set(self.graph.edges()))
for (u, v, d) in self.graph.edges(data=True):
debug = False
#if u == 'oslo-gw' or v == 'oslo-gw': debug = True
if debug: print "Looking at (%s, %s, %s)" % (u, v, d)
if (u,v) in redges:
if debug: print "In redges...ignoring"
continue
if (ebc[(u,v)] > threshold and ebc[(v,u)] > threshold) \
or (u,v) in top:
if debug: print "Is main edge"
if (u,v) in mupath_edges and (u,v) not in rupath_edges:
if debug: print "Is mupath_edge"
if 'mainupath' not in groups:
groups['mainupath'] = [(u,v,d)]
else:
groups['mainupath'].append((u,v,d))
elif (u,v) in rupath_edges:
if debug: print "Is rupath_edge"
if 'mainualtpath' not in groups:
groups['mainualtpath'] = [(u,v,d)]
else:
groups['mainualtpath'].append((u,v,d))
elif (u,v) in smpath_edges and srpath_edges and (u,v) not in srpath_edges:
if debug: print "Is smpath_edge (not sr)"
if 'mainaltpath' not in groups:
groups['mainaltpath'] = [(u,v,d)]
else:
groups['mainaltpath'].append((u,v,d))
elif (u,v) in smpath_edges:
if debug: print "Is smpath_edge"
if 'mainpath' not in groups:
groups['mainpath'] = [(u,v,d)]
else:
groups['mainpath'].append((u,v,d))
elif (u,v) in srpath_edges:
if debug: print "Is srpath_edge"
if 'mainaltpath' not in groups:
groups['mainaltpath'] = [(u,v,d)]
else:
groups['mainaltpath'].append((u,v,d))
elif (u,v) in mopath_edges:
if debug: print "Is mopath_edge"
if 'mainopath' not in groups:
groups['mainopath'] = [(u,v,d)]
else:
groups['mainopath'].append((u,v,d))
elif (u,v) in ropath_edges:
if debug: print "Is ropath_edge"
if 'mainoaltpath' not in groups:
groups['mainoaltpath'] = [(u,v,d)]
else:
groups['mainoaltpath'].append((u,v,d))
else:
if debug: print "Is notpath_edge"
if 'main' not in groups:
groups['main'] = [(u,v,d)]
else:
groups['main'].append((u,v,d))
else:
if debug: print "Is normal edge"
if (u,v) in mupath_edges and (u,v) not in rupath_edges:
if debug: print "Is mupath_edge"
if 'normalupath' not in groups:
groups['normalupath'] = [(u,v,d)]
else:
groups['normalupath'].append((u,v,d))
elif (u,v) in rupath_edges:
if debug: print "Is rupath_edge"
if 'normalualtpath' not in groups:
groups['normalualtpath'] = [(u,v,d)]
else:
groups['normalualtpath'].append((u,v,d))
elif (u,v) in smpath_edges and srpath_edges and (u,v) not in srpath_edges:
if debug: print "Is smpath_edge (not sr)"
if 'normalaltpath' not in groups:
groups['normalaltpath'] = [(u,v,d)]
else:
groups['normalaltpath'].append((u,v,d))
elif (u,v) in rupath_edges:
if debug: print "Is rupath_edge"
if 'normalualtpath' not in groups:
groups['normalualtpath'] = [(u,v,d)]
else:
groups['normalualtpath'].append((u,v,d))
elif (u,v) in smpath_edges:
if debug: print "Is smpath_edge"
if 'normalpath' not in groups:
groups['normalpath'] = [(u,v,d)]
else:
groups['normalpath'].append((u,v,d))
elif (u,v) in srpath_edges:
if debug: print "Is srpath_edge"
if 'normalaltpath' not in groups:
groups['normalaltpath'] = [(u,v,d)]
else:
groups['normalaltpath'].append((u,v,d))
elif (u,v) in mopath_edges:
if debug: print "Is mopath_edge"
if 'normalopath' not in groups:
groups['normalopath'] = [(u,v,d)]
else:
groups['normalopath'].append((u,v,d))
elif (u,v) in ropath_edges:
if debug: print "Is ropath_edge"
if 'normaloaltpath' not in groups:
groups['normaloaltpath'] = [(u,v,d)]
else:
groups['normaloaltpath'].append((u,v,d))
else:
if debug: print "Is notpath_edge"
if 'normal' not in groups:
groups['normal'] = [(u,v,d)]
else:
groups['normal'].append((u,v,d))
redge_data = self.model.get_edge_groups(edges=redges, path=path)
for (edges, etype) in redge_data:
if etype == 'mainpath':
if 'mainopath' in groups:
groups['mainopath'] += edges
else:
groups['mainopath'] = edges
elif etype == 'mainaltpath':
if 'mainoaltpath' in groups:
groups['mainoaltpath'] += edges
else:
groups['mainoaltpath'] = edges
elif etype == 'normalpath':
if 'normalopath' in groups:
groups['normalopath'] += edges
else:
groups['normalopath'] = edges
elif etype == 'normalaltpath':
if 'normaloaltpath' in groups:
groups['normaloaltpath'] += edges
else:
groups['normaloaltpath'] = edges
return [(groups[k], k) for k in groups]
def get_diff_node_groups(self, path, spath, threshold=0.095, n=10):
groups = {}
bc = self.betweenness
top = self.get_betweenness(top=n)
opath = list(set(path) - set(spath))
upath = list(set(path).intersection(set(spath)))
rnodes = list(set(self.model.G.nodes()) - set(self.graph.nodes()))
for node in self.graph.nodes():
if node in rnodes: continue
if bc[node] > threshold or node in top:
if node == path[0]:
if not 'mainstart' in groups:
groups['mainstart'] = [node]
else:
groups['mainstart'].append(node)
elif node == path[-1]:
if not 'mainstop' in groups:
groups['mainstop'] = [node]
else:
groups['mainstop'].append(node)
elif node in upath:
if not 'mainupath' in groups:
groups['mainupath'] = [node]
else:
groups['mainupath'].append(node)
elif node in spath:
if not 'mainpath' in groups:
groups['mainpath'] = [node]
else:
groups['mainpath'].append(node)
elif node in opath:
if not 'mainopath' in groups:
groups['mainopath'] = [node]
else:
groups['mainopath'].append(node)
else:
if not 'main' in groups:
groups['main'] = [node]
else:
groups['main'].append(node)
else:
if node == path[0]:
if not 'normalstart' in groups:
groups['normalstart'] = [node]
else:
groups['normalstart'].append(node)
elif node == path[-1]:
if not 'normalstop' in groups:
groups['normalstop'] = [node]
else:
groups['normalstop'].append(node)
elif node in upath:
if not 'normalupath' in groups:
groups['normalupath'] = [node]
else:
groups['normalupath'].append(node)
elif node in spath:
if not 'normalpath' in groups:
groups['normalpath'] = [node]
else:
groups['normalpath'].append(node)
elif node in opath:
if not 'normalopath' in groups:
groups['normalopath'] = [node]
else:
groups['normalopath'].append(node)
else:
if not 'normal' in groups:
groups['normal'] = [node]
else:
groups['normal'].append(node)
rnode_data = self.model.get_node_groups(nodes=rnodes, path=path)
for (nodes, ntype) in rnode_data:
if ntype == 'mainpath':
if 'mainopath' in groups:
groups['mainopath'] += nodes
else:
groups['mainopath'] = nodes
elif ntype == 'normalpath':
if 'normalopath' in groups:
groups['normalopath'] += nodes
else:
groups['normalopath'] = nodes
return [(groups[k], k) for k in groups]
def get_edge_groups(self, threshold=0.01, n=20, path=None):
groups, mpath_edges, rpath_edges = {}, [], []
multi = False
mpath = path
if path != None:
if type(path[0]) == type([]):
if len(path) > 1: multi = True
mpath = path[0]
for p in path[1:]:
rpath_edges += zip(p, p[1:])
rpath_edges += zip(p[1:], p)
mpath_edges = zip(mpath, mpath[1:])
mpath_edges += zip(mpath[1:], mpath)
ebc = self.edge_betweenness
top = self.get_edge_betweenness(top=n)
for (u, v, d) in self.graph.edges(data=True):
if (ebc[(u,v)] > threshold and ebc[(v,u)] > threshold) \
or (u,v) in top:
if (path != None) and (not multi) and ((u,v) in mpath_edges):
if 'mainpath' not in groups:
groups['mainpath'] = [(u,v,d)]
else:
groups['mainpath'].append((u,v,d))
elif multi and mpath_edges and (u,v) in mpath_edges \
and (u,v) not in rpath_edges:
if 'mainaltpath' not in groups:
groups['mainaltpath'] = [(u,v,d)]
else:
groups['mainaltpath'].append((u,v,d))
elif mpath_edges and (u,v) in mpath_edges:
if 'mainpath' not in groups:
groups['mainpath'] = [(u,v,d)]
else:
groups['mainpath'].append((u,v,d))
elif rpath_edges and (u,v) in rpath_edges:
if 'mainaltpath' not in groups:
groups['mainaltpath'] = [(u,v,d)]
else:
groups['mainaltpath'].append((u,v,d))
else:
if 'main' not in groups:
groups['main'] = [(u,v,d)]
else:
groups['main'].append((u,v,d))
else:
if (path != None) and (not multi) and ((u,v) in mpath_edges):
if 'normalpath' not in groups:
groups['normalpath'] = [(u,v,d)]
else:
groups['normalpath'].append((u,v,d))
elif multi and mpath_edges and (u,v) in mpath_edges \
and (u,v) not in rpath_edges:
if 'normalaltpath' not in groups:
groups['normalaltpath'] = [(u,v,d)]
else:
groups['normalaltpath'].append((u,v,d))
elif mpath_edges and (u,v) in mpath_edges:
if 'normalpath' not in groups:
groups['normalpath'] = [(u,v,d)]
else:
groups['normalpath'].append((u,v,d))
elif rpath_edges and (u,v) in rpath_edges:
if 'normalaltpath' not in groups:
groups['normalaltpath'] = [(u,v,d)]
else:
groups['normalaltpath'].append((u,v,d))
else:
if 'normal' not in groups:
groups['normal'] = [(u,v,d)]
else:
groups['normal'].append((u,v,d))
return [(groups[k], k) for k in groups]
def has_effects(self):
return len(self.effects) > 0
def linkfail(self, n1, n2, record=True):
if not self.graph.has_edge(n1,n2):
return False
metrics = (self.graph[n1][n2]['weight'], self.graph[n2][n1]['weight'])
if record:
self.changes.append({'type': Simulation.SC_LINKFAIL, 'pair': (n1,n2),
'metrics': metrics})
self.graph.remove_edge(n1, n2)
self.graph.remove_edge(n2, n1)
self._refresh_betweenness()
self._refresh_anycast()
self.effects = self._refresh_effects()
self._refresh_linkload()
return True
def routerfail(self, n1, record=True):
if not self.graph.has_node(n1):
return False
removed_edges = []
for node in self.graph.neighbors(n1):
removed_edges.append((n1, node, self.graph[n1][node]))
self.graph.remove_edge(n1, node)
removed_edges.append((node, n1, self.graph[node][n1]))
self.graph.remove_edge(node, n1)
self.graph.remove_node(n1)
if record:
self.changes.append({'type': Simulation.SC_ROUTERFAIL, 'node': n1,
'edges': removed_edges})
self._refresh_betweenness()
self._refresh_anycast()
self.effects = self._refresh_effects()
self._refresh_linkload()
return True
def change_metric(self, n1, n2, metric, record=True, bidir=None):
bidirectional = False
metric = float(metric)
if not self.graph.has_edge(n1, n2):
return False
old_metric = self.graph[n1][n2]['weight']
if old_metric == self.graph[n2][n1]['weight']:
bidirectional = True
if bidir == False:
bidirectional = False
if not record:
bidirectional = False
self.graph.remove_edge(n1, n2)
self.graph.add_edge(n1,n2, weight=metric)
if bidirectional or bidir:
self.graph.remove_edge(n2, n1)
self.graph.add_edge(n2,n1, weight=metric)
if record:
self.changes.append({'type': Simulation.SC_METRIC, 'pair': (n1, n2),
'metrics': (int(old_metric), int(metric)),
'bidir': bidirectional or bidir})
self._refresh_betweenness()
self._refresh_anycast()
self.effects = self._refresh_effects()
self._refresh_linkload()
return True
def path(self, source, dest, G=None):
if not G:
G = self.graph
if G == self.graph:
preds, costs = self.all_paths[source]
else:
preds, costs = nx.dijkstra_predecessor_and_distance(G, source)
if not dest in costs:
return False, None
def _get_paths(preds, path, paths, dest):
if dest in path:
return
path.append(dest)
if len(preds[dest]) == 0:
paths.append(path)
return
for newdest in preds[dest]:
_get_paths(preds, path[:], paths, newdest)
return paths
paths = _get_paths(preds, [], [], dest)
for path in paths:
path.reverse()
return costs[dest], paths
def undo(self, change_no):
if change_no > len(self.changes) or change_no < 1:
return False
idx = change_no - 1
change = self.changes[idx]
if change['type'] == Simulation.SC_METRIC:
(u, v) = change['pair']
w = change['metrics'][0]
self.change_metric(u, v, w, record=False)
if change['bidir']:
self.change_metric(v, u, w, record=False)
elif change['type'] == Simulation.SC_LINKFAIL:
(u, v) = change['pair']
(m1, m2) = change['metrics']
self.graph.add_edge(u, v, weight=m1)
self.graph.add_edge(v, u, weight=m2)
elif change['type'] == Simulation.SC_ROUTERFAIL:
router = change['node']
edges = change['edges']
self.graph.add_node(router)
for (u, v, w) in edges:
self.graph.add_edge(u, v, **w)
del self.changes[idx]
self._refresh_betweenness()
self._refresh_anycast()
self.effects = self._refresh_effects()
self._refresh_linkload()
return True
def is_active(self):
return self.active
def reroute(self, start, stop, via, equal=False, timeout=2*60):
import time
debug = False
max_metric = self.model.config.get('max_metric')
G = self.graph
H = self.graph.copy()
I = self.graph.copy()
K = self.graph.copy()
success = False
results = {}
target_no_paths = [1]
if equal:
target_no_paths = range(2,10) + [1]
ocost, opaths = self.path(start, stop)
cost1, paths1 = self.path(start, via)
if not start in G.nodes() \
or not stop in G.nodes() \
or not via in G.nodes():
print "Invalid nodename"
return []
for path in paths1:
for node in path:
if node == stop:
print "Path to via-node is through stop-node."
print "Exiting"
return (success, results)
if node == via:
continue
H.remove_node(node)
J = H.copy()
cost2, paths2 = self.path(via, stop, H)
S = set(reduce(lambda x,y: x+y, opaths))
U = set(reduce(lambda x,y: x+y, paths1))
V = set(reduce(lambda x,y: x+y, paths2))
A = V.copy()
for node in V:
A.update(G.neighbors(node))
allowed_nodes = list(U.union(A))
if debug: print "Parameters:"
if debug: print "S: %s" % (S)
if debug: print "U: %s" % (U)
if debug: print "V: %s" % (V)
if debug: print "Allowed nodes: %s" % (allowed_nodes)
finished = False
neighbor_inc = 1
start_t = time.time()
while not finished:
if time.time() - start_t >= timeout:
finished = True
success = False
print "Timed out!"
return (success, results)
ocost, opaths = self.path(start, stop, K)
W = set(reduce(lambda x,y: x+y, opaths))
cost1, paths1 = self.path(start, via, I)
cost2, paths2 = self.path(via, stop, J)
if debug: print "Opath now: %s" % opaths
if debug: print "Comparing %s+%s to %s" % (cost2, cost1, ocost)
if via in W and len(opaths) in target_no_paths:
if debug: print "Success!"
finished = True
success = True
continue
nochange = True
if debug: print "Negative adjustment loop"
for path1 in paths1:
for (u,v) in zip(path1, path1[1:]):
w = I[u][v]['weight']
if debug: print "Considering (%s,%s,%s) (-1)" % (u,v,w)
if u == start or u == stop or v == start or v == stop:
if debug: print "A: Bad effect = False"
bad_effect = False
minmax = False
if debug: print "Inner negative adjustment loop"
while (not bad_effect) and (not minmax):
w = w - 1
if w < 1:
if debug: print "Reached minimum metric..."
minmax = True
break
I.add_edge(u,v,weight=w)
K.add_edge(u,v,weight=w)
effects = self._refresh_effects(G, I)
if debug: print "B: Bad effect = False"
bad_effect = False
for src in effects:
if src not in allowed_nodes:
for dest in effects[src]:
#print dest
if dest not in allowed_nodes:
bad_effect = True
if not bad_effect:
ocost, opaths = self.path(start, stop, K)
W = set(reduce(lambda x,y: x+y, opaths))
cost1, paths1 = self.path(start, via, I)
if debug: print "Opath now: %s" % opaths
if debug: print "Comparing %s+%s to %s" % (cost2, cost1, ocost)
if via in W and len(opaths) in target_no_paths:
if debug: print "Success!"
finished = True
success = True
break
if minmax:
if debug: print "A2: Bad effect = 2"
bad_effect = 2
else:
w = w - 1
if w < 1:
if debug: print "Reached minimum metric..."
continue
I.add_edge(u,v,weight=w)
K.add_edge(u,v,weight=w)
effects = self._refresh_effects(G, I)
if debug: print "C: Bad effect = False"
bad_effect = False
for src in effects:
if src not in allowed_nodes:
for dest in effects[src]:
#print dest
if dest not in allowed_nodes:
bad_effect = True
if bad_effect == True:
I.add_edge(u,v,weight=w+1)
K.add_edge(u,v,weight=w+1)
continue
elif bad_effect == 2:
continue
else:
if debug: print "A: nochange = False"
nochange = False
ocost, opaths = self.path(start, stop, K)
W = set(reduce(lambda x,y: x+y, opaths))
cost1, paths1 = self.path(start, via, I)
if debug: print "Opath now: %s" % opaths
if debug: print "Comparing %s+%s to %s" % (cost2, cost1, ocost)
if via in W and len(opaths) in target_no_paths:
if debug: print "Success!"
finished = True
success = True
continue
if debug: print "Positive adjustment loop"
for opath in opaths:
for (u,v) in zip(opath, opath[1:]):
if u in V and v in V: continue
w = I[u][v]['weight']
if debug: print "Considering (%s,%s,%s) (+1)" % (u,v,w)
if u == start or u == stop or v == start or v == stop:
if debug: print "D: Bad effect = False"
bad_effect = False
minmax = False
if debug: print "Inner positive adjustment loop"
while (not bad_effect) and (not minmax):
w = w + 1
if w > max_metric:
if debug: print "Reached maximum metric..."
minmax = True
continue
I.add_edge(u,v,weight=w)
K.add_edge(u,v,weight=w)
effects = self._refresh_effects(G, I)
if debug: print "E: Bad effect = False"
bad_effect = False
for src in effects:
if src not in allowed_nodes:
for dest in effects[src]:
#print dest
if dest not in allowed_nodes:
bad_effect = True
if not bad_effect:
ocost, opaths = self.path(start, stop, K)
W = set(reduce(lambda x,y: x+y, opaths))
cost1, paths1 = self.path(start, via, I)
if debug: print "Opath now: %s" % opaths
if debug: print "Comparing %s+%s to %s" % (cost2, cost1, ocost)
if via in W and len(opaths) in target_no_paths:
if debug: print "Success!"
finished = True
success = True
break
if minmax:
if debug: print "D2: Bad effect = 2"
bad_effect = 2
else:
w = w + 1
if w > max_metric:
if debug: print "Reached maximum metric..."
continue
I.add_edge(u,v,weight=w)
K.add_edge(u,v,weight=w)
effects = self._refresh_effects(G, I)
if debug: print "F: Bad effect = False"
bad_effect = False
for src in effects:
if src not in allowed_nodes:
for dest in effects[src]:
#print dest
if dest not in allowed_nodes:
bad_effect = True
if bad_effect == True:
I.add_edge(u,v,weight=w-1)
K.add_edge(u,v,weight=w-1)
continue
elif bad_effect == 2:
continue
else:
if debug: print "B: nochange = False"
nochange = False
ocost, opaths = self.path(start, stop, K)
W = set(reduce(lambda x,y: x+y, opaths))
cost1, paths1 = self.path(start, via, I)
if debug: print "Opath now: %s" % opaths
if debug: print "Comparing %s+%s to %s" % (cost2, cost1, ocost)
if via in W and len(opaths) in target_no_paths:
if debug: print "Success!"
finished = True
success = True
continue
if debug: print "2nd negative adjustment loop"
for path2 in paths2:
for (u,v) in zip(path2, path2[1:]):
w = J[u][v]['weight']
if debug: print "Considering (%s,%s,%s) (-1)" % (u,v,w)
w = w - 1
if w < 1:
if debug: print "Reached minimum metric..."
continue
J.add_edge(u,v,weight=w)
K.add_edge(u,v,weight=w)
effects = self._refresh_effects(H, J)
if debug: print "G: Bad effect = False"
bad_effect = False
for src in effects:
if src not in allowed_nodes:
for dest in effects[src]:
#print dest
if dest not in allowed_nodes:
bad_effect = True
if bad_effect:
J.add_edge(u,v,weight=w+1)
K.add_edge(u,v,weight=w+1)
continue
else:
if debug: print "C: nochange = False"
nochange = False
if debug: print "Considering increasing allowed nodes"
if nochange:
if neighbor_inc > 2:
if debug: print "No solution found"
finished = True
success = False
continue
append_nodes = []
for node in allowed_nodes:
append_nodes += G.neighbors(node)
if debug: print "Increasing set of nodes"
allowed_nodes += append_nodes
neighbor_inc += 1
else:
if debug: print "nochange was False, so going on"
for (u,v,w) in K.edges(data=True):
if (u,v) in results: continue
old_w = G[u][v]['weight']
if old_w != w:
results[(u,v)] = w
results[(v,u)] = w
#for (u,v,w) in J.edges():
# if (u,v) in results: continue
# old_w = H.get_edge(u,v)
# if old_w != w:
# results[(u,v)] = w
# results[(v,u)] = w
return (success, results)
def minimal_link_costs(self):
debug = False
ebc = self.edge_betweenness
G = self.graph
H = self.graph.copy()
edges = sorted(H.edges(data=True), cmp=lambda x,y: cmp(y[2]['weight'], x[2]['weight']) \
or cmp(ebc[(x[0],x[1])],
ebc[(y[0],y[1])]))
finished = False
while not finished:
adjustment_found = False
for (u,v,w) in edges:
w = w['weight']
if not w == H[v][u]['weight']:
continue
old_w = G[u][v]['weight']
if debug: print "Considering (%s,%s)" % (u,v)
count = 1
while count:
w = w - 1
count = 0
if w < 1: continue
if debug: print "Trying metrics..",
H.add_edge(u,v,weight=w)
H.add_edge(v,u,weight=w)
effects = self._refresh_effects(G, H)
if effects:
if abs(old_w - w) < 2:
H.add_edge(u,v,weight=old_w)
H.add_edge(v,u,weight=old_w)
else:
H.add_edge(u,v,weight=w+1)
H.add_edge(v,u,weight=w+1)
if debug: print "failed! (%s->%s)" % (old_w, w+1)
else:
count = 1
adjustment_found = True
if debug: print "ok"
if not adjustment_found:
finished = True
return H
def _refresh_betweenness(self):
self.betweenness = None
if distutils.version.StrictVersion(nx.__version__) > distutils.version.StrictVersion("1.5"):
self.betweenness = nx.load_centrality(self.graph, weight='weight')
else:
self.betweenness = nx.load_centrality(self.graph, weighted_edges=True)
self.edge_betweenness = nx.edge_betweenness(self.graph, normalized=True, weight=True)
def _refresh_effects(self, OG=None, NG=None):
self._refresh_all_paths()
if not OG:
OG = self.model.G
if not NG:
NG = self.graph
diff_paths = {}
sources = OG.nodes()
for source in sources:
diff_by_dst = {}
if not source in NG: continue
opreds = self.model.all_paths[source][0]
if OG != self.model.G:
opreds = nx.dijkstra_predecessor_and_distance(OG, source)[0]
npreds = self.all_paths[source][0]
if NG != self.graph:
npreds = nx.dijkstra_predecessor_and_distance(NG, source)[0]
for dest in opreds:
if not dest in npreds:
diff_by_dst[dest] = [{'old': opreds[dest], 'new': []}]
continue
diff_res = self._path_cmp(opreds[dest], npreds[dest])
if diff_res:
if dest in diff_by_dst:
diff_by_dst[dest].append(diff_res)
else:
diff_by_dst[dest] = [diff_res]
if diff_by_dst.keys():
diff_paths[source] = diff_by_dst
#print diff_paths
return diff_paths
def _path_cmp(self, oldpaths, newpaths):
if cmp(oldpaths, newpaths) != 0:
return {'old': oldpaths, 'new': newpaths}
return None
def _refresh_anycast(self):
accosts = {}
acgroups = {}
for source in self.graph.nodes():
if source in self.acnodes:
acgroups[source] = [source]
continue
lengths = nx.single_source_dijkstra_path_length(self.graph, source)
for dest in self.acnodes:
if dest not in lengths:
continue
else:
cost = lengths[dest]
if not source in accosts:
accosts[source] = cost
acgroups[source] = [dest]
elif cost == accosts[source]:
acgroups[source] += [dest]
elif cost < accosts[source]:
accosts[source] = cost
acgroups[source] = [dest]
self.acgroups = acgroups
def _apply_load_changes(self,effects):
import time
stime = time.time()
via_edges_seen = {}
adjustments = {}
after_adjustments = {}
traverse_edges = []
old_path_parts = {}
old_paths = {}
for node in effects:
for dest in effects[node]:
no_old = len(effects[node][dest][0]['old'])
no_new = len(effects[node][dest][0]['new'])
old_vias = effects[node][dest][0]['old']
new_vias = effects[node][dest][0]['new']
for vianode in old_vias:
if (vianode, dest) in via_edges_seen:
continue
#print " Considering viapath (%s, %s)" % (vianode, dest)
traverse_edges.append((vianode,dest))
via_edges_seen[(vianode,dest)] = True
#print "Viapaths found (%s secs)" % (time.time() - stime)
for (vianode, dest) in traverse_edges:
old_paths = self.model.nodes_and_paths_using_edge(vianode,dest)[1]
# reduce_old(node, dest)
loads = self.model.linkloads
G = self.model.graph
#print "Finding load parts for (%s, %s) (%s secs)" % (vianode, dest,
# time.time() - stime)
old_path_load_parts = self.model.get_link_load_part(vianode, dest)
old_path_parts[(vianode, dest)] = old_path_load_parts.copy()
for (u,v) in old_path_load_parts:
change = -old_path_load_parts[(u,v)]
if (u,v) in adjustments:
if change < adjustments[(u,v)]:
#if u == 'porsgrunn-gw' or v == 'porsgrunn-gw':
#print " Setting (%s, %s) to %s (<%s)" % (u,v, change,
# adjustments[(u,v)])
adjustments[(u,v)] = change
if u in effects:
if dest in effects[u] \
and vianode in effects[u][dest][0]['old']:
new_paths = self.path(vianode,dest)[1]
if new_paths == None:
new_paths = [[]]
no_new_paths = len(effects[u][dest][0]['new'])
no_old_paths = len(effects[u][dest][0]['old'])
deduct = 0
for npath in new_paths:
edges = zip(npath, npath[1:])
if (v,u) in edges:
deduct = old_path_parts[(vianode, dest)][(u,v)]
deduct *= float(no_old_paths/no_new_paths)
if (v,u) in after_adjustments:
if -deduct < after_adjustments[(v,u)]:
after_adjustments[(v,u)] = -deduct
#print "Deducting %s from (%s,%s)" % (deduct, v,u)
else:
after_adjustments[(v,u)] = -deduct
#print "Deducting %s from (%s,%s)" % (deduct, v,u)
else:
#if u == 'porsgrunn-gw' or v == 'porsgrunn-gw':
#print " Setting (%s, %s) to %s" % (u,v, change)
adjustments[(u,v)] = change
if u in effects:
if dest in effects[u] \
and vianode in effects[u][dest][0]['old']:
new_paths = self.path(vianode,dest)[1]
if new_paths == None:
new_paths = [[]]
no_new_paths = len(effects[u][dest][0]['new'])
no_old_paths = len(effects[u][dest][0]['old'])
deduct = 0
for npath in new_paths:
edges = zip(npath, npath[1:])
if (v,u) in edges:
deduct = old_path_parts[(vianode, dest)][(u,v)]
deduct *= float(no_old_paths/no_new_paths)
if (v,u) in after_adjustments:
if -deduct < after_adjustments[(v,u)]:
after_adjustments[(v,u)] = -deduct
#print "Deducting %s from (%s,%s)" % (deduct, v,u)
else:
after_adjustments[(v,u)] = -deduct
#print "Deducting %s from (%s,%s)" % (deduct, v, u)
#print "Negative adjustments complete (%s secs)" % (time.time() - stime)
pos_changes = {}
for (vianode, dest) in traverse_edges:
old_paths = self.model.nodes_and_paths_using_edge(vianode,dest)[1]
for (n1, n2) in old_paths:
if not (n1 in self.graph and n2 in self.graph): continue
if not (n2 == dest or n1 == vianode): continue
new_paths = self.path(n1,n2)[1]
if new_paths == None:
new_paths = [[]]
opaths = old_paths[(n1,n2)]
ofirst_edges = []
for opath in opaths:
if n2 == dest:
ofirst_edges.append((opath[0], opath[1]))
else:
ofirst_edges.append((opath[-2], opath[-1]))
old = 0
for oedge in ofirst_edges:
if oedge in old_path_parts[(vianode, dest)]:
old += old_path_parts[(vianode, dest)][oedge]
if old == 0: continue
#print "Applying old load %s to new path (%s,%s)" \
# % (old, n1, n2)
for path in new_paths:
edges = zip(path, path[1:])
for (u,v) in edges:
if (u,v) not in pos_changes:
pos_changes[(u,v)] = old
else:
if old > pos_changes[(u,v)]:
pos_changes[(u,v)] = old
#print "Positive adjustments complete (%s secs)" % (time.time() - stime)
for (u,v) in pos_changes:
#if (u,v) == ('trd-gw1', 'hovedbygget-gw1'):
# print " Adjusting (%s, %s) += %s" % (u, v, pos_changes[(u,v)])
if (u,v) not in adjustments:
if (u,v) not in after_adjustments:
adjustments[(u,v)] = pos_changes[(u,v)]
else:
adjustments[(u,v)] = pos_changes[(u,v)] \
+ after_adjustments[(u,v)]
else:
if (u,v) not in after_adjustments:
adjustments[(u,v)] += pos_changes[(u,v)]
else:
adjustments[(u,v)] += pos_changes[(u,v)] \
+ after_adjustments[(u,v)]
#print "Returning adjustments (%s secs)" % (time.time() - stime)
return adjustments
def _refresh_linkload(self):
self.linkloads = self.model.linkloads.copy()
newloads = self.model.linkloads.copy()
if not self.linkloads: return
effects = self.effects
adjustments = self._apply_load_changes(effects)
for (u,v) in adjustments:
if adjustments[(u,v)] == 0: continue
if self.graph.has_edge(u,v):
#print "Final adjustment for (%s, %s) += %s" % (u,v, adjustments[(u,v)])
if (u,v) in newloads:
newloads[(u,v)] += adjustments[(u,v)]
else:
newloads[(u,v)] = adjustments[(u,v)]
for (u,v) in sorted(newloads):
if newloads[(u,v)] < 0:
print "Assertion failed for load on (%s,%s): %s" \
% (u,v, newloads[(u,v)])
self.linkloads = newloads
def _refresh_all_paths(self):
for node in self.graph:
self.all_paths[node] = nx.dijkstra_predecessor_and_distance(self.graph, node)
|
nateleavitt/infusionsoft | 65 | lib/infusionsoft/client/invoice.rb | module Infusionsoft
class Client
# The Invoice service allows you to manage eCommerce transactions.
module Invoice
# Creates a blank order with no items.
#
# @param [Integer] contact_id
# @param [String] description the name this order will display
# @param [Date] order_date
# @param [Integer] lead_affiliate_id 0 should be used if none
# @param [Integer] sale_affiliate_id 0 should be used if none
# @return [Integer] returns the invoice id
def invoice_create_blank_order(contact_id, description, order_date, lead_affiliate_id,
sale_affiliate_id)
response = get('InvoiceService.createBlankOrder', contact_id, description, order_date,
lead_affiliate_id, sale_affiliate_id)
end
# Adds a line item to an order. This used to add a Product to an order as well as
# any other sort of charge/discount.
#
# @param [Integer] invoice_id
# @param [Integer] product_id
# @param [Integer] type UNKNOWN = 0, SHIPPING = 1, TAX = 2, SERVICE = 3, PRODUCT = 4,
# UPSELL = 5, FINANCECHARGE = 6, SPECIAL = 7
# @param [Float] price
# @param [Integer] quantity
# @param [String] description a full description of the line item
# @param [String] notes
# @return [Boolean] returns true/false if it was added successfully or not
def invoice_add_order_item(invoice_id, product_id, type, price, quantity, description, notes)
response = get('InvoiceService.addOrderItem', invoice_id, product_id, type, price,
quantity, description, notes)
end
# This will cause a credit card to be charged for the amount currently due on an invoice.
#
# @param [Integer] invoice_id
# @param [String] notes a note about the payment
# @param [Integer] credit_card_id
# @param [Integer] merchant_account_id
# @param [Boolean] bypass_commission
# @return [Hash] containing the following keys {'Successful' => [Boolean],
# 'Code' => [String], 'RefNum' => [String], 'Message' => [String]}
def invoice_charge_invoice(invoice_id, notes, credit_card_id, merchant_account_id,
bypass_commissions)
response = get('InvoiceService.chargeInvoice', invoice_id, notes, credit_card_id,
merchant_account_id, bypass_commissions)
end
# Deletes the specified subscription from the database, as well as all invoices
# tied to the subscription.
#
# @param [Integer] cprogram_id the id of the subscription being deleted
# @return [Boolean]
def invoice_delete_subscription(cprogram_id)
response = get('InvoiceService.deleteSubscription', cprogram_id)
end
# Creates a subscription for a contact. Subscriptions are billing automatically
# by infusionsoft within the next six hours. If you want to bill immediately you
# will need to utilize the create_invoice_for_recurring and then
# charge_invoice method to accomplish this.
#
# @param [Integer] contact_id
# @param [Boolean] allow_duplicate
# @param [Integer] cprogram_id the subscription id
# @param [Integer] merchant_account_id
# @param [Integer] credit_card_id
# @param [Integer] affiliate_id
# @param [Integer] days_till_charge number of days you want to wait till it's charged
def invoice_add_recurring_order(contact_id, allow_duplicate, cprogram_id,
merchant_account_id, credit_card_id, affiliate_id,
days_till_charge)
api_logger.warn "[DEPRECATION WARNING]: The invoice_add_subscription method more fully complies with Infusionsoft's published API documents. User is advised to review Infusionsoft's API and this gem's documentation for changes in parameters."
response = get('InvoiceService.addRecurringOrder', contact_id,
allow_duplicate, cprogram_id, merchant_account_id, credit_card_id,
affiliate_id, days_till_charge)
end
################### This is a replacement method for invoice_add_recurring_order
# in order to fully support and comply with the Infusionsoft API documentation.
#
#
# @param [Integer] contact_id
# @param [Boolean] allow_duplicate
# @param [Integer] cprogram_id the subscription id
# @param [Integer] qty
# @param [Float] price
# @param [Boolean] allow_tax
# @param [Integer] merchant_account_id
# @param [Integer] credit_card_id
# @param [Integer] affiliate_id
# @param [Integer] days_till_charge number of days you want to wait till it's charged
def invoice_add_subscription(contact_id, allow_duplicate, cprogram_id,
qty, price, allow_tax,
merchant_account_id, credit_card_id, affiliate_id,
days_till_charge)
response = get('InvoiceService.addRecurringOrder', contact_id,
allow_duplicate, cprogram_id, qty, price, allow_tax, merchant_account_id, credit_card_id,
affiliate_id, days_till_charge)
end
# This modifies the commissions being earned on a particular subscription.
# This does not affect previously generated invoices for this subscription.
#
# @param [Integer] recurring_order_id
# @param [Integer] affiliate_id
# @param [Float] amount
# @param [Integer] paryout_type how commissions will be earned (possible options are
# 4 - up front earning, 5 - upon customer payment) typically this is 5
# @return [Boolean]
def invoice_add_recurring_commission_override(recurring_order_id, affiliate_id,
amount, payout_type, description)
response = get('InvoiceService.addRecurringCommissionOverride', recurring_order_id,
affiliate_id, amount, payout_type, description)
end
# Adds a payment to an invoice without actually processing a charge through a merchant.
#
# @param [Integer] invoice_id
# @param [Float] amount
# @param [Date] date
# @param [String] type Cash, Check, Credit Card, Money Order, PayPal, etc.
# @param [String] description an area useful for noting payment details such as check number
# @param [Boolean] bypass_commissions
# @return [Boolean]
def invoice_add_manual_payment(invoice_id, amount, date, type, description, bypass_commissions)
response = get('InvoiceService.addManualPayment', invoice_id, amount, date, type,
description, bypass_commissions)
end
# This will create an invoice for all charges due on a Subscription. If the
# subscription has three billing cycles that are due, it will create one
# invoice with all three items attached.
#
# @param [Integer] recurring_order_id
# @return [Integer] returns the id of the invoice that was created
def invoice_create_invoice_for_recurring(recurring_order_id)
response = get('InvoiceService.createInvoiceForRecurring', recurring_order_id)
end
# Adds a payment plan to an existing invoice.
#
# @param [Integer] invoice_id
# @param [Boolean]
# @param [Integer] credit_card_id
# @param [Integer] merchant_account_id
# @param [Integer] days_between_retry the number of days Infusionsoft should wait
# before re-attempting to charge a failed payment
# @param [Integer] max_retry the maximum number of charge attempts
# @param [Float] initial_payment_ammount the amount of the very first charge
# @param [Date] initial_payment_date
# @param [Date] plan_start_date
# @param [Integer] number_of_payments the number of payments in this payplan (does not include
# initial payment)
# @param [Integer] days_between_payments the number of days between each payment
# @return [Boolean]
def invoice_add_payment_plan(invoice_id, auto_charge, credit_card_id,
merchant_account_id, days_between_retry, max_retry,
initial_payment_amount, initial_payment_date, plan_start_date,
number_of_payments, days_between_payments)
response = get('InvoiceService.addPaymentPlan', invoice_id, auto_charge,
credit_card_id, merchant_account_id, days_between_retry, max_retry,
initial_payment_amount, initial_payment_date, plan_start_date, number_of_payments,
days_between_payments)
end
# Calculates the amount owed for a given invoice.
#
# @param [Integer] invoice_id
# @return [Float]
def invoice_calculate_amount_owed(invoice_id)
response = get('InvoiceService.calculateAmountOwed', invoice_id)
end
# Retrieve all Payment Types currently setup under the Order Settings section of Infusionsoft.
#
# @return [Array]
def invoice_get_all_payment_otpions
response = get('InvoiceService.getAllPaymentOptions')
end
# Retrieves all payments for a given invoice.
#
# @param [Integer] invoice_id
# @return [Array<Hash>] returns an array of payments
def invoice_get_payments(invoice_id)
response = get('Invoice.getPayments', invoice_id)
end
# Locates an existing card in the system for a contact, using the last 4 digits.
#
# @param [Integer] contact_id
# @param [Integer] last_four
# @return [Integer] returns the id of the credit card
def invoice_locate_existing_card(contact_id, last_four)
response = get('InvoiceService.locateExistingCard', contact_id, last_four)
end
# Calculates tax, and places it onto the given invoice.
#
# @param [Integer] invoice_id
# @return [Boolean]
def invoice_recalculate_tax(invoice_id)
response = get('InvoiceService.recalculateTax', invoice_id)
end
# This will validate a credit card in the system.
#
# @param [Integer] credit_card_id if the card is already in the system
# @return [Hash] returns a hash { 'Valid' => false, 'Message' => 'Card is expired' }
def invoice_validate_card(credit_card_id)
response = get('InvoiceService.validateCreditCard', credit_card_id)
end
# This will validate a credit card by passing in values of the
# card directly (this card doesn't have to be added to the system).
#
# @param [Hash] data
# @return [Hash] returns a hash { 'Valid' => false, 'Message' => 'Card is expired' }
def invoice_validate_card(data)
response = get('InvoiceService.validateCreditCard', data)
end
# Retrieves the shipping options currently setup for the Infusionsoft shopping cart.
#
# @return [Array]
def invoice_get_all_shipping_options
response = get('Invoice.getAllShippingOptions')
end
# Changes the next bill date on a subscription.
#
# @param [Integer] job_recurring_id this is the subscription id on the contact
# @param [Date] next_bill_date
# @return [Boolean]
def invoice_update_recurring_next_bill_date(job_recurring_id, next_bill_date)
response = get('InvoiceService.updateJobRecurringNextBillDate', job_recurring_id, next_bill_date)
end
# Adds a commission override to a one time order, using a combination of percentage
# and hard-coded amounts.
#
# @param [Integer] invoice_id
# @param [Integer] affiliate_id
# @param [Integer] product_id
# @param [Integer] percentage
# @param [Float] amount
# @param [Integer] payout_type how commision should be earned (4 - up front in full, 5 - upon
# customer payment
# @param [String] description a note about this commission
# @param [Date] date the commission was generated, not necessarily earned
# @return [Boolean]
def invoice_add_order_commission_override(invoice_id, affiliate_id, product_id, percentage,
amount, payout_type, description, date)
response = get('InvoiceService.addOrderCommissionOverride', invoice_id, affiliate_id,
product_id, percentage, amount, payout_type, description, date)
end
# Deprecated - Adds a recurring order to the database.
def invoice_add_recurring_order_with_price(contact_id, allow_duplicate, cprogram_id, qty,
price, allow_tax, merchant_account_id,
credit_card_id, affiliate_id, days_till_charge)
response = get('InvoiceService.addRecurringOrder', contact_id, allow_duplicate,
cprogram_id, qty, price, allow_tax, merchant_account_id, credit_card_id,
affiliate_id, days_till_charge)
end
# Deprecated - returns the invoice id from a one time order.
def invoice_get_invoice_id(order_id)
response = get('InvoiceService.getinvoice_id', order_id)
end
end
end
end
| module Infusionsoft
class Client
# The Invoice service allows you to manage eCommerce transactions.
module Invoice
# Creates a blank order with no items.
#
# @param [Integer] contact_id
# @param [String] description the name this order will display
# @param [Date] order_date
# @param [Integer] lead_affiliate_id 0 should be used if none
# @param [Integer] sale_affiliate_id 0 should be used if none
# @return [Integer] returns the invoice id
def invoice_create_blank_order(contact_id, description, order_date, lead_affiliate_id,
sale_affiliate_id)
response = get('InvoiceService.createBlankOrder', contact_id, description, order_date,
lead_affiliate_id, sale_affiliate_id)
end
# Adds a line item to an order. This used to add a Product to an order as well as
# any other sort of charge/discount.
#
# @param [Integer] invoice_id
# @param [Integer] product_id
# @param [Integer] type UNKNOWN = 0, SHIPPING = 1, TAX = 2, SERVICE = 3, PRODUCT = 4,
# UPSELL = 5, FINANCECHARGE = 6, SPECIAL = 7
# @param [Float] price
# @param [Integer] quantity
# @param [String] description a full description of the line item
# @param [String] notes
# @return [Boolean] returns true/false if it was added successfully or not
def invoice_add_order_item(invoice_id, product_id, type, price, quantity, description, notes)
response = get('InvoiceService.addOrderItem', invoice_id, product_id, type, price,
quantity, description, notes)
end
# This will cause a credit card to be charged for the amount currently due on an invoice.
#
# @param [Integer] invoice_id
# @param [String] notes a note about the payment
# @param [Integer] credit_card_id
# @param [Integer] merchant_account_id
# @param [Boolean] bypass_commission
# @return [Hash] containing the following keys {'Successful' => [Boolean],
# 'Code' => [String], 'RefNum' => [String], 'Message' => [String]}
def invoice_charge_invoice(invoice_id, notes, credit_card_id, merchant_account_id,
bypass_commissions)
response = get('InvoiceService.chargeInvoice', invoice_id, notes, credit_card_id,
merchant_account_id, bypass_commissions)
end
# Deletes the specified subscription from the database, as well as all invoices
# tied to the subscription.
#
# @param [Integer] cprogram_id the id of the subscription being deleted
# @return [Boolean]
def invoice_delete_subscription(cprogram_id)
response = get('InvoiceService.deleteSubscription', cprogram_id)
end
# Creates a subscription for a contact. Subscriptions are billing automatically
# by infusionsoft within the next six hours. If you want to bill immediately you
# will need to utilize the create_invoice_for_recurring and then
# charge_invoice method to accomplish this.
#
# @param [Integer] contact_id
# @param [Boolean] allow_duplicate
# @param [Integer] cprogram_id the subscription id
# @param [Integer] merchant_account_id
# @param [Integer] credit_card_id
# @param [Integer] affiliate_id
# @param [Integer] days_till_charge number of days you want to wait till it's charged
def invoice_add_recurring_order(contact_id, allow_duplicate, cprogram_id,
merchant_account_id, credit_card_id, affiliate_id,
days_till_charge)
api_logger.warn "[DEPRECATION WARNING]: The invoice_add_subscription method more fully complies with Infusionsoft's published API documents. User is advised to review Infusionsoft's API and this gem's documentation for changes in parameters."
response = get('InvoiceService.addRecurringOrder', contact_id,
allow_duplicate, cprogram_id, merchant_account_id, credit_card_id,
affiliate_id, days_till_charge)
end
################### This is a replacement method for invoice_add_recurring_order
# in order to fully support and comply with the Infusionsoft API documentation.
#
#
# @param [Integer] contact_id
# @param [Boolean] allow_duplicate
# @param [Integer] cprogram_id the subscription id
# @param [Integer] qty
# @param [Float] price
# @param [Boolean] allow_tax
# @param [Integer] merchant_account_id
# @param [Integer] credit_card_id
# @param [Integer] affiliate_id
# @param [Integer] days_till_charge number of days you want to wait till it's charged
def invoice_add_subscription(contact_id, allow_duplicate, cprogram_id,
qty, price, allow_tax,
merchant_account_id, credit_card_id, affiliate_id,
days_till_charge)
response = get('InvoiceService.addRecurringOrder', contact_id,
allow_duplicate, cprogram_id, qty, price, allow_tax, merchant_account_id, credit_card_id,
affiliate_id, days_till_charge)
end
# This modifies the commissions being earned on a particular subscription.
# This does not affect previously generated invoices for this subscription.
#
# @param [Integer] recurring_order_id
# @param [Integer] affiliate_id
# @param [Float] amount
# @param [Integer] paryout_type how commissions will be earned (possible options are
# 4 - up front earning, 5 - upon customer payment) typically this is 5
# @return [Boolean]
def invoice_add_recurring_commission_override(recurring_order_id, affiliate_id,
amount, payout_type, description)
response = get('InvoiceService.addRecurringCommissionOverride', recurring_order_id,
affiliate_id, amount, payout_type, description)
end
# Adds a payment to an invoice without actually processing a charge through a merchant.
#
# @param [Integer] invoice_id
# @param [Float] amount
# @param [Date] date
# @param [String] type Cash, Check, Credit Card, Money Order, PayPal, etc.
# @param [String] description an area useful for noting payment details such as check number
# @param [Boolean] bypass_commissions
# @return [Boolean]
def invoice_add_manual_payment(invoice_id, amount, date, type, description, bypass_commissions)
response = get('InvoiceService.addManualPayment', invoice_id, amount, date, type,
description, bypass_commissions)
end
# This will create an invoice for all charges due on a Subscription. If the
# subscription has three billing cycles that are due, it will create one
# invoice with all three items attached.
#
# @param [Integer] recurring_order_id
# @return [Integer] returns the id of the invoice that was created
def invoice_create_invoice_for_recurring(recurring_order_id)
response = get('InvoiceService.createInvoiceForRecurring', recurring_order_id)
end
# Adds a payment plan to an existing invoice.
#
# @param [Integer] invoice_id
# @param [Boolean]
# @param [Integer] credit_card_id
# @param [Integer] merchant_account_id
# @param [Integer] days_between_retry the number of days Infusionsoft should wait
# before re-attempting to charge a failed payment
# @param [Integer] max_retry the maximum number of charge attempts
# @param [Float] initial_payment_ammount the amount of the very first charge
# @param [Date] initial_payment_date
# @param [Date] plan_start_date
# @param [Integer] number_of_payments the number of payments in this payplan (does not include
# initial payment)
# @param [Integer] days_between_payments the number of days between each payment
# @return [Boolean]
def invoice_add_payment_plan(invoice_id, auto_charge, credit_card_id,
merchant_account_id, days_between_retry, max_retry,
initial_payment_amount, initial_payment_date, plan_start_date,
number_of_payments, days_between_payments)
response = get('InvoiceService.addPaymentPlan', invoice_id, auto_charge,
credit_card_id, merchant_account_id, days_between_retry, max_retry,
initial_payment_amount, initial_payment_date, plan_start_date, number_of_payments,
days_between_payments)
end
# Calculates the amount owed for a given invoice.
#
# @param [Integer] invoice_id
# @return [Float]
def invoice_calculate_amount_owed(invoice_id)
response = get('InvoiceService.calculateAmountOwed', invoice_id)
end
# Retrieve all Payment Types currently setup under the Order Settings section of Infusionsoft.
#
# @return [Array]
def invoice_get_all_payment_otpions
response = get('InvoiceService.getAllPaymentOptions')
end
# Retrieves all payments for a given invoice.
#
# @param [Integer] invoice_id
# @return [Array<Hash>] returns an array of payments
def invoice_get_payments(invoice_id)
response = get('Invoice.getPayments', invoice_id)
end
# Locates an existing card in the system for a contact, using the last 4 digits.
#
# @param [Integer] contact_id
# @param [Integer] last_four
# @return [Integer] returns the id of the credit card
def invoice_locate_existing_card(contact_id, last_four)
response = get('InvoiceService.locateExistingCard', contact_id, last_four)
end
# Calculates tax, and places it onto the given invoice.
#
# @param [Integer] invoice_id
# @return [Boolean]
def invoice_recalculate_tax(invoice_id)
response = get('InvoiceService.recalculateTax', invoice_id)
end
# This will validate a credit card in the system.
#
# @param [Integer] credit_card_id if the card is already in the system
# @return [Hash] returns a hash { 'Valid' => false, 'Message' => 'Card is expired' }
def invoice_validate_card(credit_card_id)
response = get('InvoiceService.validateCreditCard', credit_card_id)
end
# This will validate a credit card by passing in values of the
# card directly (this card doesn't have to be added to the system).
#
# @param [Hash] data
# @return [Hash] returns a hash { 'Valid' => false, 'Message' => 'Card is expired' }
def invoice_validate_card(data)
response = get('InvoiceService.validateCreditCard', data)
end
# Retrieves the shipping options currently setup for the Infusionsoft shopping cart.
#
# @return [Array]
def invoice_get_all_shipping_options
response = get('Invoice.getAllShippingOptions')
end
# Changes the next bill date on a subscription.
#
# @param [Integer] job_recurring_id this is the subscription id on the contact
# @param [Date] next_bill_date
# @return [Boolean]
def invoice_update_recurring_next_bill_date(job_recurring_id, next_bill_date)
response = get('InvoiceService.updateJobRecurringNextBillDate', job_recurring_id, next_bill_date)
end
# Adds a commission override to a one time order, using a combination of percentage
# and hard-coded amounts.
#
# @param [Integer] invoice_id
# @param [Integer] affiliate_id
# @param [Integer] product_id
# @param [Integer] percentage
# @param [Float] amount
# @param [Integer] payout_type how commision should be earned (4 - up front in full, 5 - upon
# customer payment
# @param [String] description a note about this commission
# @param [Date] date the commission was generated, not necessarily earned
# @return [Boolean]
def invoice_add_order_commission_override(invoice_id, affiliate_id, product_id, percentage,
amount, payout_type, description, date)
response = get('InvoiceService.addOrderCommissionOverride', invoice_id, affiliate_id,
product_id, percentage, amount, payout_type, description, date)
end
# adding a manual payment for an invoice.
# This can be useful when the payments are not handled by Infusionsoft
# but you still needs to makethe invoice as paid
#
# @param [Integer] invoice_id
# @param [Float] amount
# @param [Date] date (time)
# @param [String] payment_type
# - E.g 'Credit Card'
# @param [String] description
# @param [Boolean] bypass_commissions (Whether this payment
# should count towards affiliate commissions.)
def add_manual_payment(invoice_id, amount, date, payment_type, description, bypass_commission)
response = get('InvoiceService.addManualPayment', invoice_id, amount,
date, payment_type, description, bypass_commission)
end
# Deprecated - Adds a recurring order to the database.
def invoice_add_recurring_order_with_price(contact_id, allow_duplicate, cprogram_id, qty,
price, allow_tax, merchant_account_id,
credit_card_id, affiliate_id, days_till_charge)
response = get('InvoiceService.addRecurringOrder', contact_id, allow_duplicate,
cprogram_id, qty, price, allow_tax, merchant_account_id, credit_card_id,
affiliate_id, days_till_charge)
end
# Deprecated - returns the invoice id from a one time order.
def invoice_get_invoice_id(order_id)
response = get('InvoiceService.getinvoice_id', order_id)
end
end
end
end
|
gunderwonder/postmark-swiftmailer | 1 | postmark_swiftmailer.php | <?php
/**
* @licence http://www.opensource.org/licenses/bsd-license.php New BSD Licence
* @author Øystein Riiser Gundersen <oysteinrg@gmail.com>
*/
/**
* A SwiftMailer transport implementation for the
* {@link http://postmarkapp.com/ Postmark} email delivery API for transactional
* email.
*
* Postmark is *not* for bulk email, but multiple recipients are still supported
* by posting the email once for each address.
*
* Bcc and Cc headers are silently ignored as these are not supported by Postmark.
*
* Usage:
* <code>
* $transport = Swift_PostmarkTransport::newInstance('YOUR-POSTMARK-API-KEY')
* $mailer = Swift_Mailer::newInstance($transport);
* $message = Swift_Message::newInstance('Wonderful Subject')
* ->setFrom(array('sender@mydomain.com' => 'John Doe'))
* ->setTo(array('receiver@otherdomain.org' => 'Jane Doe'))
* ->setBody('Here is the message itself');
* $mailer->send($message);
* </code>
*
* @package Swift
* @subpackage Postmark
*/
class Swift_PostmarkTransport implements Swift_Transport {
/** @var string */
const POSTMARK_URI = 'http://api.postmarkapp.com/email';
/** @var string */
protected $postmark_api_token = NULL;
/** @var array */
protected $IGNORED_HEADERS = array('Content-Type', 'Date');
/** @var array */
protected $UNSUPPORTED_HEADERS = array('Bcc', 'Cc');
/**
* @param string $postmark_api_token Postmark API key
* @param string|array $from Postmark sender signature email
* @param string $postmark_uri Postmark HTTP service URI
*/
public function __construct($postmark_api_token, $from = NULL, $postmark_uri = NULL) {
$this->postmark_api_token = $postmark_api_token;
$this->postmark_uri = is_null($postmark_uri) ? self::POSTMARK_URI : $postmark_uri;
$this->postmark_from_signature = $from;
}
public static function newInstance($postmark_api_token, $from = NULL, $postmark_uri = NULL) {
return new Swift_PostmarkTransport($postmark_api_token, $from, $postmark_uri);
}
public function isStarted() { return false; }
public function start() { }
public function stop() { }
/**
* @param Swift_Mime_Message $message
* @param string $mime_type
* @return Swift_Mime_MimePart
*/
protected function getMIMEPart(Swift_Mime_Message $message, $mime_type) {
$html_part = NULL;
foreach ($message->getChildren() as $part) {
if (strpos($part->getContentType(), 'text/html') === 0)
$html_part = $part;
}
return $html_part;
}
/**
* @param Swift_Mime_Message $message
* @param string $mime_type
* @return Swift_Mime_Message
*/
protected function processHeaders(Swift_Mime_Headerset $headers) {
foreach ($this->IGNORED_HEADERS as $header_name)
$headers->remove($header_name);
foreach ($this->UNSUPPORTED_HEADERS as $header_name)
if ($headers->has($header_name))
throw new Swift_PostmarkTransportException(
"Postmark does not support the '{$header_name}' header"
);
return $headers;
}
/**
* @param Swift_Mime_Message $message
* @param string $mime_type
* @return array
*/
protected function buildMessageData(Swift_Mime_Message $message) {
$headers = $this->processHeaders($message->getHeaders());
$message_data = array();
$message_data['Subject'] = $headers->get('Subject')->getFieldBody();
$headers->remove('Subject');
$message_data['From'] = $headers->get('From')->getFieldBody();
$headers->remove('From');
$message_data['ReplyTo'] = $message->getReplyTo();
$message_data['TextBody'] = $message->getBody();
if (!is_null($html_part = $this->getMIMEPart($message, 'text/html')))
$message_data['HtmlBody'] = $html_part->getBody();
$extra_headers = array();
foreach ($headers as $header) {
$extra_headers[] = array(
'Name' => $header->getFieldName(),
'Value' => $header->getFieldBody()
);
}
if (!empty($extra_headers))
$message_data['Headers'] = $extra_headers;
return $message_data;
}
/**
* @return array
*/
protected function headers() {
return array(
'Accept: application/json',
'Content-Type: application/json',
'X-Postmark-Server-Token: ' . $this->postmark_api_token
);
}
/**
* @param array $message_data
* @return array
*/
protected function post(array $message_data) {
$curl = curl_init();
curl_setopt_array($curl, array(
CURLOPT_URL => self::POSTMARK_URI,
CURLOPT_POST => true,
CURLOPT_HTTPHEADER => $this->headers(),
CURLOPT_POSTFIELDS => json_encode($message_data),
CURLOPT_RETURNTRANSFER => true
));
$response = curl_exec($curl);
if ($response === false)
$this->fail('Postmark delivery failed: ' . curl_error($curl));
$response_code = curl_getinfo($curl, CURLINFO_HTTP_CODE);
return array($response_code, @json_decode($response, true));
}
protected function fail($message) {
throw new Swift_TransportException($message);
}
/**
* @param Swift_Mime_Message $message
* @param array $failed_recipients
* @return int
*/
public function send(Swift_Mime_Message $message, &$failed_recipients = NULL) {
if (!is_null($this->postmark_from_signature))
$message->setFrom($this->postmark_from);
$failed_recipients = (array)$failed_recipients;
$message_data = $this->buildMessageData($message);
$send_count = 0;
$recipients = $message->getHeaders()->get('To');
$addresses = $recipients->getAddresses();
foreach ($recipients->getNameAddressStrings() as $i => $recipient) {
$message_data['To'] = $recipient;
list($response_code, $response) = $this->post($message_data);
if ($response_code != 200) {
$failed_recipients[] = $addresses[$i];
$this->fail(
"Postmark delivery failed with HTTP status code {$response_code}. " .
"Postmark said: '{$response['Message']}'"
);
} else {
$send_count++;
}
}
return $send_count;
}
public function registerPlugin(Swift_Events_EventListener $plugin) {
// TODO
}
}
| <?php
/**
* @licence http://www.opensource.org/licenses/bsd-license.php New BSD Licence
* @author Øystein Riiser Gundersen <oysteinrg@gmail.com>
*/
/**
* A SwiftMailer transport implementation for the
* {@link http://postmarkapp.com/ Postmark} email delivery API for transactional
* email.
*
* Postmark is *not* for bulk email, but multiple recipients are still supported
* by posting the email once for each address.
*
* Bcc and Cc headers are silently ignored as these are not supported by Postmark.
*
* Usage:
* <code>
* $transport = Swift_PostmarkTransport::newInstance('YOUR-POSTMARK-API-KEY')
* $mailer = Swift_Mailer::newInstance($transport);
* $message = Swift_Message::newInstance('Wonderful Subject')
* ->setFrom(array('sender@mydomain.com' => 'John Doe'))
* ->setTo(array('receiver@otherdomain.org' => 'Jane Doe'))
* ->setBody('Here is the message itself');
* $mailer->send($message);
* </code>
*
* @package Swift
* @subpackage Postmark
*/
class Swift_PostmarkTransport implements Swift_Transport {
/** @var string */
const POSTMARK_URI = 'http://api.postmarkapp.com/email';
/** @var string */
protected $postmark_api_token = NULL;
/** @var array */
protected $IGNORED_HEADERS = array('Content-Type', 'Date');
/** @var array */
protected $UNSUPPORTED_HEADERS = array();
/**
* @param string $postmark_api_token Postmark API key
* @param string|array $from Postmark sender signature email
* @param string $postmark_uri Postmark HTTP service URI
*/
public function __construct($postmark_api_token, $from = NULL, $postmark_uri = NULL) {
$this->postmark_api_token = $postmark_api_token;
$this->postmark_uri = is_null($postmark_uri) ? self::POSTMARK_URI : $postmark_uri;
$this->postmark_from_signature = $from;
}
public static function newInstance($postmark_api_token, $from = NULL, $postmark_uri = NULL) {
return new Swift_PostmarkTransport($postmark_api_token, $from, $postmark_uri);
}
public function isStarted() { return false; }
public function start() { }
public function stop() { }
/**
* @param Swift_Mime_Message $message
* @param string $mime_type
* @return Swift_Mime_MimePart
*/
protected function getMIMEPart(Swift_Mime_Message $message, $mime_type) {
$html_part = NULL;
foreach ($message->getChildren() as $part) {
if (strpos($part->getContentType(), 'text/html') === 0)
$html_part = $part;
}
return $html_part;
}
/**
* @param Swift_Mime_Message $message
* @param string $mime_type
* @return Swift_Mime_Message
*/
protected function processHeaders(Swift_Mime_Headerset $headers) {
foreach ($this->IGNORED_HEADERS as $header_name)
$headers->remove($header_name);
foreach ($this->UNSUPPORTED_HEADERS as $header_name)
if ($headers->has($header_name))
throw new Swift_PostmarkTransportException(
"Postmark does not support the '{$header_name}' header"
);
return $headers;
}
/**
* @param Swift_Mime_Message $message
* @param string $mime_type
* @return array
*/
protected function buildMessageData(Swift_Mime_Message $message) {
$headers = $this->processHeaders($message->getHeaders());
$message_data = array();
$message_data['Subject'] = $headers->get('Subject')->getFieldBody();
$headers->remove('Subject');
$message_data['From'] = $headers->get('From')->getFieldBody();
$headers->remove('From');
$message_data['ReplyTo'] = $message->getReplyTo();
$message_data['TextBody'] = $message->getBody();
if (!is_null($html_part = $this->getMIMEPart($message, 'text/html')))
$message_data['HtmlBody'] = $html_part->getBody();
$extra_headers = array();
foreach ($headers as $header) {
$extra_headers[] = array(
'Name' => $header->getFieldName(),
'Value' => $header->getFieldBody()
);
}
if (!empty($extra_headers))
$message_data['Headers'] = $extra_headers;
$children = $message->getChildren();
$attachments = array();
foreach ($children as $child) {
if (get_class($child) == 'Swift_Attachment'){
$attachments[] = array (
'Name' => $child->getFilename(),
'Content' => base64_encode($child->getBody()),
'ContentType' => $child->getContentType()
);
}
}
$message_data['Attachments'] = $attachments;
return $message_data;
}
/**
* @return array
*/
protected function headers() {
return array(
'Accept: application/json',
'Content-Type: application/json',
'X-Postmark-Server-Token: ' . $this->postmark_api_token
);
}
/**
* @param array $message_data
* @return array
*/
protected function post(array $message_data) {
$curl = curl_init();
curl_setopt_array($curl, array(
CURLOPT_URL => self::POSTMARK_URI,
CURLOPT_POST => true,
CURLOPT_HTTPHEADER => $this->headers(),
CURLOPT_POSTFIELDS => json_encode($message_data),
CURLOPT_RETURNTRANSFER => true
));
$response = curl_exec($curl);
if ($response === false)
$this->fail('Postmark delivery failed: ' . curl_error($curl));
$response_code = curl_getinfo($curl, CURLINFO_HTTP_CODE);
return array($response_code, @json_decode($response, true));
}
protected function fail($message) {
throw new Swift_TransportException($message);
}
/**
* @param Swift_Mime_Message $message
* @param array $failed_recipients
* @return int
*/
public function send(Swift_Mime_Message $message, &$failed_recipients = NULL) {
if (!is_null($this->postmark_from_signature))
$message->setFrom($this->postmark_from);
$failed_recipients = (array)$failed_recipients;
$message_data = $this->buildMessageData($message);
$send_count = 0;
$recipients = $message->getHeaders()->get('To');
$addresses = $recipients->getAddresses();
foreach ($recipients->getNameAddressStrings() as $i => $recipient) {
$message_data['To'] = $recipient;
list($response_code, $response) = $this->post($message_data);
if ($response_code != 200) {
$failed_recipients[] = $addresses[$i];
$this->fail(
"Postmark delivery failed with HTTP status code {$response_code}. " .
"Postmark said: '{$response['Message']}'"
);
} else {
$send_count++;
}
}
return $send_count;
}
public function registerPlugin(Swift_Events_EventListener $plugin) {
// TODO
}
}
|
animaux/lang_german | 1 | lang/lang.de.php | <?php
$about = array(
'name' => 'Deutsch',
'author' => array(
'name' => 'Nils Hörrmann',
'email' => 'post@nilshoerrmann.de',
'website' => 'http://www.nilshoerrmann.de'
),
'release-date' => '2010-02-09',
);
/**
* Symphony Core
*/
$dictionary = array(
'"%1$s" contains invalid XML. The following error was returned: <code>%2$s</code>' =>
'"%1$s" enthält ungültiges XML. Der folgende Fehler wurde zurückgegeben: <code>%2$s</code>',
'%1$s – %2$s' =>
'%1$s – %2$s',
'%1$s – %2$s – %3$s' =>
'%1$s – %2$s – %3$s',
'%1$s Allow remote login via <a href="%2$s">%2$s</a>' =>
'%1$s Remotezugriff über <a href="%2$s">%2$s</a> erlauben',
'%s Allow selection of multiple authors' =>
'%s Erlaube Auswahl mehrerer Autoren.',
'%s Allow selection of multiple options' =>
'%s Erlaube Mehrfachauswahl.',
'%s Checked by default' =>
'%s Standardmäßig ausgewählt.',
'%s Hide this section from the Publish menu' =>
'%s Diesen Bereich nicht im Menü anzeigen.',
'%s HTML-encode text' =>
'%s Ausgabe HTML-konform kodieren.',
'%s is not a valid object. Failed to append to XML.' =>
'%s ist kein gültiges Objekt. Es konnte dem XML nicht hinzugefügt werden.',
'%s Make this a required field' =>
'%s Dieses Feld verpflichtend machen.',
'%s Redirect to 404 page when no results are found' =>
'%s Auf 404-Fehlerseite umleiten, wenn keine Ergebnisse gefunden werden können.',
'%s Select current user by default' =>
'%s Den aktuellen Benutzer vorauswählen',
'%s Show column' =>
'%s In der Übersicht anzeigen',
'← Previous' =>
'← Vorherige',
'\'%s\' contains invalid data. Please check the contents.' =>
'\'%s\' enthält ungültige Daten. Bitte überprüfen Sie den Inhalt.',
'\'%s\' is a required field.' =>
'\'%s\' ist ein Pflichtfeld.',
' (<b>Notice that it is possible to get mixtures of success and failure messages when using the "Allow Multiple" option</b>)' =>
' (<b>Bitte beachten Sie, dass Sie möglicherweise eine Mischung aus Fehler- und Erfolgsrückmeldungen erhalten, wenn Sie die die Erstellung mehrerer Einträge zulassen.</b>)',
'<abbr title="eXtensible Stylesheet Language Transformation">XSLT</abbr> Processor' =>
'<abbr title="eXtensible Stylesheet Language Transformation">XSLT</abbr>-Prozessor',
'<abbr title="PHP: Hypertext Pre-processor">PHP</abbr> 5.1 or above' =>
'<abbr title="PHP: Hypertext Pre-processor">PHP</abbr> 5.1 oder höher',
'<acronym title="Universal Resource Locator">URL</acronym>' =>
'<acronym title="Universal Resource Locator">URL</acronym>',
'<acronym title="Universal Resource Locator">URL</acronym> Parameters' =>
'<acronym title="Universal Resource Locator">URL-</acronym>Parameter',
'<a href="%1$s" title="Show debug view for %2$s">Line %3$d</a>' =>
'<a href="%1$s" title="Wechsle in den Debug-Modus für %2$s">Zeile %3$d</a>',
'<a href="%s" title="Show debug view">Compile</a>' =>
'<a href="%s" title="Zeige Debug-Modus">Kompiliere</a>',
'A 403 type page already exists.' =>
'Es existiert bereits eine 403-Fehlerseite.',
'A 404 type page already exists.' =>
'Es existiert bereits eine 404-Fehlerseite.',
'Aardvarks' =>
'Erdferkel',
'about 1 hour ago' =>
'vor etwa einer Stunde',
'about {$hours} hours ago' =>
'vor etwa {$hours} Stunden',
'Access Denied' =>
'Zugriff verweigert',
'A database error occurred while attempting to reorder.' =>
'Beim Neuordnen ist ein Datenbankfehler aufgetreten.',
'A Data source with the name <code>%s</code> name already exists' =>
'Eine Datenquelle mit dem Namen <code>%s</code> existiert bereits',
'Add an Author' =>
'Autor hinzufügen',
'Add a new author' =>
'Neuen Autor hinzufügen',
'Add item' =>
'Hinzufügen',
'Admin Only' =>
'Nur Administratoren',
'Advanced Configuration' =>
'Erweiterte Einstellungen',
'A field with that element name already exists. Please choose another.' =>
'Ein Feld mit diesem Elementnamen existiert bereits. Bitte wählen Sie einen anderen.',
'A file with the name %1$s already exists in %2$s. Please rename the file first, or choose another.' =>
'Eine Datei mit dem Namen %1$s existiert bereits in %2$s. Bitte benennen Sie die Datei zuerst um oder wähle Sie eine andere.',
'All of these fields can be set dynamically using the exact field name of another field in the form as shown below in the example form:' =>
'Alle diese Felder können dynamisch befüllt werden, indem Sie den genauen Feldnamen eines anderen Feldes des Formulares verwenden, wie das nachfolgende Beispiel zeigt:',
'Allow Multiple' =>
'Mehrere zulassen',
'a minute ago' =>
'vor einer Minute',
'An email containing a customised login link has been sent. It will expire in 2 hours.' =>
'Eine E-Mail mit personalisierten Anmeldedaten wurden verschickt. Sie verliert in zwei Stunden ihre Gültigkeit.',
'An empty result will be returned when this parameter does not have a value. Do not wrap the parameter with curly-braces.' =>
'Wenn dieser Parameter keinen Wert hat, wird ein leeres Ergebnis ausgegeben. Umschließen Sie den Parameter nicht mit geschweiften Klammern.',
'An error occurred during installation. You can view you log <a href="install-log.txt">here</a> for more details.' =>
'Während der Installation ist ein Fehler aufgetreten. Sie können das <a href="install-log.txt">Installation-Logbuch</> für weitere Informationen einsehen.',
'An error occurred while processing this form. <a href="#error">See below for details.</a>' =>
'Beim Verarbeiten dieses Formulars ist ein Fehler aufgetreten. <a href="#error">Details siehe unten.</a>',
'An Event with the name <code>%s</code> name already exists' =>
'Ein Ereignis mit dem Namen <code>%s</code> existiert bereits',
'A new password has been requested for your account. Login using the following link, and change your password via the Authors area:' =>
'Ein neues Passwort wurde für Ihren Zugang angefordert. Sie können sich anmelden, indem Sie nachfolgendem Link folgen, und dann Ihr Passwort im Autorenbereich ändern:',
'An existing <code>/workspace</code> directory was found at this location. Symphony will use this workspace.' =>
'An diesem Ort wurde ein bereits existierendes <code>/workspace</code>-Verzeichnis gefunden. Symphony wird diesen Workspace verwenden.',
'An index type page already exists.' =>
'Es existiert bereits eine Index-Seite.',
'An unknown database occurred while attempting to create the section.' =>
'Es ist ein unbekannter Datenbankfehler beim Erstellen des Bereiches aufgetreten.',
'A page number must be set' =>
'Eine Seitenzahl muss festgelegt werden',
'A page with that handle already exists' =>
'Es existiert bereits eine Seite mit diesem Bezeichner.',
'A page with that title already exists' =>
'Es existiert bereits eine Seite mit diesem Titel.',
'Apply' =>
'Anwenden',
'A result limit must be set' =>
'Eine Ergebnisobergrenze muss festgelegt werden',
'Are you sure you want to {$action}?' =>
'Sind Sie sicher, dass Sie {$action} wollen?',
'Are you sure you want to {$action} {$count} items?' =>
'Sind Sie sicher, dass sie {$count} Einträge {$action} wollen?',
'Are you sure you want to {$action} {$name}?' =>
'Sind Sie sicher, dass sie {$name} {$action} wollen?',
'ascending' =>
'aufsteigend',
'A Section with the name <code>%s</code> name already exists' =>
'Es existiert bereits ein Bereich mit dem Namen <code>%s</code>',
'at' =>
'um',
'At least one source must be specified, dynamic or static.' =>
'Mindestens eine Quelle, dynamisch oder statisch, muss festgelegt werden.',
'Author' =>
'Autor',
'Author created at %1$s. <a href="%2$s">Create another?</a> <a href="%3$s">View all Authors</a>' =>
'Der Autor wurde um %1$s erstellt. <a href="%2$s">Einen neuen erstellen?</a> <a href="%3$s">Alle Autoren anzeigen.</a>',
'Author ID' =>
'Autor-ID',
'Authors' =>
'Autoren',
'authors' =>
'autoren',
'Author updated at %1$s. <a href="%2$s">Create another?</a> <a href="%3$s">View all Authors</a>' =>
'Der Autor wurde um %1$s aktualisiert. <a href="%2$s">Einen neuen erstellen?</a> <a href="%3$s">Alle Autoren anzeigen.</a>',
'A Utility with that name already exists. Please choose another.' =>
'Es existiert bereits ein Baustein mit dem namen <code>%s</code>.',
'Best Regards,' =>
'Mit freundlichen Grüßen,',
'Big' =>
'Dick',
'Birds' =>
'Vögel',
'Blueprints' =>
'Blaupausen',
'Body' =>
'Daten',
'Body is a required field.' =>
'Der Datenbereich ist ein Pflichtfeld.',
'Bugs' =>
'Käfer',
'Can\'t open file %s' =>
'Datei %s konnte nicht geöffnet werden',
'Cats' =>
'Katze',
'Change Password' =>
'Passwort ändern',
'Checkbox' =>
'Kontrollkästchen',
'Coconut' =>
'Kokosnuss',
'Cold' =>
'Kalt',
'Components' =>
'Komponenten',
'Confirm New Password' =>
'Neues Passwort wiederholen',
'Confirm Password' =>
'Passwort wiederholen',
'Could not %1$s %2$s, there was a problem loading the object. Check the driver class exists.' =>
'%s konnte aufgrund eines Problems beim Laden des Objektes nicht %1$s werden. Überprüfen Sie, ob die Treiberklasse existiert.',
'Could not add directory "%s".' =>
'Das Verzeichnis "%s" konnte nicht hinzugefügt werden.',
'Could not add file "%s".' =>
'Die Datei "%s" konnte nicht hinzugefügt werden.',
'Could not find Data Source <code>%s</code>. If the Data Source was provided by an Extensions, ensure that it is installed, and enabled.' =>
'Die Datenquelle <code>%s</code> konnte nicht gefunden werden. Wenn diese Datenquelle von einer Erweiterung bereitgestellt wurde, überprüfen Sie, dass diese installiert und aktiviert ist.',
'Could not find Event <code>%s</code>. If the Event was provided by an Extensions, ensure that it is installed, and enabled.' =>
'Das Ereignis <code>%s</code> konnte nicht gefunden werden. Wenn dieses Ereignis von einer Erweiterung bereitgestellt wurde, überprüfen Sie, dass diese installiert und aktiviert ist.',
'Could not find extension at location %s' =>
'Die Erweiterung konnte nicht unter %s gefunden werden.',
'Could not find Field <code>%1$s</code> at <code>%2$s</code>. If the Field was provided by an Extension, ensure that it is installed, and enabled.' =>
'Das Feld <code>%1$s</code> konnte nicht unter <code>%2$s</code> gefunden werden. Wenn dieses Feld von einer Erweiterung bereitgestellt wurde, überprüfen Sie, dass diese installiert und aktiviert ist.',
'Could not find Text Formatter <code>%s</code>. If the Text Formatter was provided by an Extensions, ensure that it is installed, and enabled.' =>
'Der Textformatierer <code>%s</code> konnte nicht gefunden werden. Wenn der Textformatierer von einer Erweiterung bereitgestellt wurde, überprüfen Sie, dass diese installiert und aktiviert ist.',
'Create a new data source' =>
'Neue Datenquelle erstellen',
'Create a new entry' =>
'Neuen Eintrag erstellen',
'Create a new event' =>
'Neues Ereignis erstellen',
'Create a new page' =>
'Neue Seite erstellen',
'Create a new utility' =>
'Neuen Baustein erstellen',
'Create a section' =>
'Bereich erstellen',
'Create Author' =>
'Autor erstellen',
'Create Data Source' =>
'Datenquelle erstellen',
'Create Entry' =>
'Eintrag erstellen',
'Create Event' =>
'Ereignis erstellen',
'Create New' =>
'Neu erstellen',
'Create Page' =>
'Seite erstellen',
'Create Utility' =>
'Baustein erstellen',
'Customise how Date and Time values are displayed throughout the Administration interface.' =>
'Passen Sie an, wie Datums- und Zeitangaben innerhalb des Administrationsbereichs dargestellt werden.',
'Custom XML' =>
'Benutzerdefiniertes XML',
'Database' =>
'Datenbank',
'Database Connection' =>
'Datenbankverbindung',
'Database Error' =>
'Datenbankfehler',
'Data retrieved from the Symphony support server is decompressed with the ZLib compression library.' =>
'Daten, die vom Symphony-Supportserver empfangen werden, werden mit der ZLib-Kompression-Bibliothek dekomprimiert.',
'Data Source' =>
'Datenquelle',
'Data source output grouping is not supported by the <code>%s</code> field' =>
'Ergebnisgruppierung für Datenquellen wird vom Feld <code>%s</code> nicht unterstützt',
'Data Sources' =>
'Datenquellen',
'Date' =>
'Datum',
'Date and Time' =>
'Datum und Zeit',
'Date Format' =>
'Datumsformat',
'Dear <!-- RECIPIENT NAME -->,' =>
'Liebe(r) <!-- RECIPIENT NAME -->,',
'Default Section' =>
'Standardbereich',
'Delete' =>
'Löschen',
'Delete Entries' =>
'Einträge löschen',
'Delete this author' =>
'Diesen Autor löschen',
'Delete this data source' =>
'Diese Datenquelle löschen',
'Delete this entry' =>
'Diesen Eintrag löschen',
'Delete this event' =>
'Dieses Ereignis löschen',
'Delete this page' =>
'Diese Seite löschen',
'Delete this section' =>
'Diesen Bereich löschen',
'Delete this utility' =>
'Diesen Baustein löschen',
'descending' =>
'absteigend',
'Description' =>
'Beschreibung',
'Destination Directory' =>
'Zielordner',
'Destination folder, <code>%s</code>, is not writable. Please check permissions.' =>
'Das Zielverzeichnis <code>%s</code> ist nicht beschreibbar. Bitte überprüfen Sie die Zugriffsrechte.',
'Developer' =>
'Entwickler',
'Directories' =>
'Verzeichnisse',
'Disable' =>
'Deaktivieren',
'Dogs' =>
'Hunde',
'Dynamic Options' =>
'Dynamische Optionen',
'Dynamic XML' =>
'Dynamisches XML',
'dynamic_xml' =>
'dynamisches_xml',
'E-mail address entered is invalid' =>
'Die eingegebene E-Mail-Adresse ist ungültig',
'E-mail address is required' =>
'Die E-Mail-Adresse ist eine Pflichtangabe',
'Edit' =>
'Bearbeiten',
'Email' =>
'E-Mail',
'Email Address' =>
'E-Mail-Adresse',
'Enable' =>
'Aktivieren',
'Enabled' =>
'Aktiviert',
'Encumbered' =>
'belastet',
'Enter your email address to be sent a remote login link with further instructions for logging in.' =>
'Geben Sie Ihre E-Mail-Adresse an, um einen Link mit weiteren Erläuterungen zur Anmeldung zugesandt zu bekommen.',
'Entries' =>
'Einträge',
'Entry created at %1$s. <a href="%2$s">Create another?</a> <a href="%3$s">View all Entries</a>' =>
'Der Eintrag wurde um %1$s erstellt. <a href="%2$s">Einen neuen erstellen?</a> <a href="%3$s">Alle Einträge anzeigen.</a>',
'Entry created successfully.' =>
'Eintrag erfolgreich erstellt.',
'Entry edited successfully.' =>
'Eintrag erfolgreich bearbeitet.',
'Entry encountered errors when saving.' =>
'Beim Speichern des Eintrags sind Fehler aufgetreten.',
'Entry limit specified was not a valid type. String or Integer expected.' =>
'Die festgelegte Obergrenze entspricht keinem gültigen Typ. String oder Integer erwartet. ',
'Entry updated at %1$s. <a href="%2$s">Create another?</a> <a href="%3$s">View all Entries</a>' =>
'Dieser Eintrag wurde um %1$s aktualisiert. <a href="%2$s">Einen neuen erstellen?</a> <a href="%3$s">Alle Einträge anzeigen.</a>',
'Entry [created | edited] successfully.' =>
'Eintrag erfolgreich [erstellt | bearbeitet].',
'Environment Settings' =>
'Umgebungseinstellungen',
'Error creating field object with id %1$d, for filtering in data source "%2$s". Check this field exists.' =>
'Beim Erstellen des Feld-Objekts mit der ID %1sd, das zum Filter der Datenquelle "%2$s" verwendet wird, ist ein Fehler aufgetreten. Überprüfen Sie, ob das Feld existiert.',
'Essentials' =>
'Grundangaben',
'Event created at %1$s. <a href="%2$s">Create another?</a> <a href="%3$s">View all Events</a>' =>
'Dieses Ereignis wurde um %1$s erstellt. <a href="%2$s">Ein neues erstellen?</a> <a href="%2$s">Alle Ereignisse anzeigen.</a>',
'Events' =>
'Ereignisse',
'Event updated at %1$s. <a href="%2$s">Create another?</a> <a href="%3$s">View all Events</a>' =>
'Dieses Ereignis wurde um %1$s aktualisiert. <a href="%2$s">Ein neues erstellen?</a> <a href="%2$s">Alle Ereignisse anzeigen.</a>',
'Example Front-end Form Markup' =>
'Beispiel-Frontend-Formular',
'Existing Values' =>
'Existierende Werte',
'Extensions' =>
'Erweiterungen',
'Failed to delete <code>%s</code>. Please check permissions.' =>
'<code>%s</code> konnte nicht gelöscht werden. Bitte überprüfen Sie die Zugriffsrechte.',
'Failed to write Data source to <code>%s</code>. Please check permissions.' =>
'Datenquelle konnte nicht unter <code>%s</code> gespeichert werden. Bitte überprüfen Sie die Zugriffsrechte.',
'Failed to write Event to <code>%s</code>. Please check permissions.' =>
'<code>%s</code> konnte nicht gespeichert werden. Bitte überprüfen Sie die Zugriffsrechte.',
'Fields' =>
'Felder',
'File chosen in "%1$s" exceeds the maximum allowed upload size of %2$s, specified by Symphony.' =>
'Die gewählte Datei überschreitet die für Symphony festgelegte maximale Uploadgröße von %2$s.',
'File chosen in "%1$s" exceeds the maximum allowed upload size of %2$s specified by your host.' =>
'Die gewählte Datei überschreitet die von Ihrem Host festgelegte maximale Uploadgröße von %2$s.',
'File chosen in \'%s\' does not match allowable file types for that field.' =>
'Die in \'%s\' ausgewählte Datei entspricht keinem erlaubten Dateityp für dieses Feld.',
'File chosen in \'%s\' was only partially uploaded due to an error.' =>
'Die in \'%s\' ausgewählte Datei wurde aufgrund eines Fehlers nur teilweise hochgeladen.',
'Files' =>
'Dateien',
'File Upload' =>
'Dateiupload',
'Filter %s by' =>
'%s filtern mit',
'Filter Authors by' =>
'Autoren filtern mit',
'Filter Navigation by' =>
'Navigation filtern mit',
'Filter Results' =>
'Ergebnisfilter',
'Filter Rules' =>
'Filterregeln',
'First' =>
'Erste',
'First Name' =>
'Vorname',
'First name is required' =>
'Der Vorname ist eine Pflichtangabe',
'Forbidden' =>
'Verboten',
'Formatting' =>
'Formatierung',
'General' =>
'Allgemein',
'Group By' =>
'Gruppieren nach',
'Hairy' =>
'Haarig',
'Handle' =>
'Bezeichner',
'Hi %s,' =>
'Hi %s,',
'Host' =>
'Host',
'Hot' =>
'Heiß',
'ID' =>
'ID',
'Included Elements' =>
'Eingebundene Elemente',
'Installation Failure' =>
'Installation fehlgeschlagen',
'Install Symphony' =>
'Symphony installieren',
'Invalid element name. Must be valid QName.' =>
'Ungültiger Elementname. Muss ein gültiger QName sein.',
'Invalid Entry ID specified. Could not create Entry object.' =>
'Ungültige Eintrags-ID angegeben. Eintragsobjekt konnte nicht erstellt werden.',
'It looks like your trying to create an entry. Perhaps you want fields first? <a href="%s">Click here to create some.</a>' =>
'Anscheinend versuchen Sie einen neuen Eintrag zu erstellen. Vielleicht möchten Sie vorher Felder anlegen. <a href="%s">Klicken Sie hier um Felder anzulegen.</a>',
'It will expire in 2 hours. If you did not ask for a new password, please disregard this email.' =>
'Es wird in zwei Stunden ablaufen. Falls Sie kein neues Passwort angefordert haben, ignorieren Sie bitte diese Nachricht.',
'just now' =>
'gerade eben',
'Label' =>
'Bezeichnung',
'Large' =>
'Groß',
'Last' =>
'Letzte',
'Last Name' =>
'Nachname',
'Last name is required' =>
'Der Nachname ist eine Pflichtangabe',
'Last Seen' =>
'Letzter Besuch',
'Leave password fields blank to keep the current password' =>
'Lassen Sie das Passwortfeld leer, um das derzeitige Passwort zu behalten',
'Leave these fields unless you are sure they need to be changed.' =>
'Belassen Sie diese Felder wie sie sind, es sei denn, Sie sind sich sicher, dass sie geändert werden müssen.',
'Line %s' =>
'Zeile %s',
'list of comma author usernames.' =>
'Kommagetrennte Liste der Autoren-Benutzernamen.',
'Login' =>
'Anmeldung',
'Login Details' =>
'Anmeldedaten',
'Logout' =>
'Abmelden',
'Long Description <i>Optional</i>' =>
'Lange Beschreibung <i>optional</i>',
'Lumpy' =>
'Pummelig',
'Main content' =>
'Hauptbereich',
'Make sure that you delete <code>' =>
'Stellen Sie sich, dass Sie <code>',
'Make textarea %s rows tall' =>
'Stelle Textfeld %s Zeilen hoch dar.',
'Men' =>
'Männer',
'Message' =>
'Nachricht',
'Missing Requirements' =>
'Fehlende Voraussetzungen',
'Monkeys' =>
'Affen',
'Must be a valid number' =>
'Muss eine gültige Zahl sein',
'Must be a valid number or parameter' =>
'Muss eine gültige Zahl oder ein gültiger Parameter sein',
'Must be greater than zero' =>
'Muss größer als Null sein',
'My<abbr title="Structured Query Language">SQL</abbr> 4.1 or above' =>
'My<abbr title="Structured Query Language">SQL</abbr> 4.1 oder höher',
'MySQL Error (%1$s): %2$s in query "%3$s"' =>
'MySQL-Fehler (%1$s): %2$s in query "%3$s"',
'Name is a required field.' =>
'Name ist ein Pflichtfeld.',
'Namespace' =>
'Namensraum',
'Namespace Declarations <i>Optional</i>' =>
'Namensraumdeklarationen <i>optional</i>',
'navigation' =>
'navigation',
'Navigation' =>
'Navigation',
'Navigation Group' =>
'Navigationsgruppe',
'New Password' =>
'Neues Passwort',
'New Symphony Account Password' =>
'Neues Passwort für Ihren Symphony-Zugang',
'Next →' =>
'Nächste →',
'No' =>
'Nein',
'None' =>
'Keine Angaben',
'None found.' =>
'Keine Einträge.',
'No records found.' =>
'Keine Einträge gefunden.',
'No suitable engine object found' =>
'Es konnte kein ausreichendes Engine-Objekt gefunden werden.',
'No suitable XSLT processor was found.' =>
'Es konnte kein ausreichender XSLT-Prozessor gefunden werden.',
'No valid recipients found. Check send-email[recipient] field.' =>
'Es konnten keine Empfänger gefunden werden. Überprüfen Sie das Feld send-email[recipient].',
'Old Password' =>
'Altes Passwort',
'Once installed, you will be able to login to the Symphony admin with these user details.' =>
'Sobald die Installation abgeschlossen ist, können Sie mit diesen Zugangsdaten auf Symphony zugreifen.',
'One or more pages could not be deleted. Please check permissions on <code>/workspace/pages</code>.' =>
'Eine oder mehrere Seiten konnten nicht gelöscht werden. Bitte überprüfen Sie die Rechte für <code>/workspace/pages</code>.',
'Optional' =>
'optional',
'Output Options' =>
'Ausgabeoptionen',
'Outstanding Requirements' =>
'Fehlende Anforderungen',
'Page %1$s of %2$s' =>
'Seite %1$s von %2$s',
'Page could not be deleted because it does not exist.' =>
'Die Seite konnten nicht gelöscht werden, weil sie nicht existiert.',
'Page could not be deleted because it has children.' =>
'Die Seite konnte nicht gelöscht werden, weil sie Unterseiten hat.',
'Page could not be written to disk. Please check permissions on <code>/workspace/pages</code>.' =>
'Die Seite konnte nicht auf der Festplatte gespeichert werden. Bitte überprüfen Sie die Zugriffsrechte für <code>/workspace/pages</code>.',
'Page ID' =>
'Seiten-ID',
'Page Not Found' =>
'Seite konnte nicht gefunden werden',
'Page not found' =>
'Seite nicht gefunden',
'Page Resources' =>
'Seitenbasis',
'Pages' =>
'Seiten',
'Page Settings' =>
'Seiteneinstellungen',
'Page Type' =>
'Seitentyp',
'Parameter Output' =>
'Ausgabeparameter',
'Parent Page' =>
'Übergeordnete Seite',
'Password' =>
'Passwort',
'Password is required' =>
'Das Passwort ist eine Pflichtangabe',
'Passwords did not match' =>
'Passworteingabe stimmte nicht überein',
'Permission Settings' =>
'Zugriffseinstellungen',
'Personal Information' =>
'Persönliche Informationen',
'Pigs' =>
'Schweine',
'Pirates' =>
'Piraten',
'Placement' =>
'Platzierung',
'Please <a href="%s">login</a> to view this page.' =>
'Bitte <a href="%s">melden Sie sich an</a>, um diese Seite zu sehen.',
'Please add the following personal details for this user.' =>
'Bitte ergänzen Sie die nachfolgenden persönlichen Informationen des Nutzers.',
'Please provide Symphony with access to a database.' =>
'Bitte räumen Sie Symphony einen Datenbankzugang ein.',
'Port' =>
'Port',
'Preferences' =>
'Einstellungen',
'Preferences saved.' =>
'Die Einstellungen wurden gespeichert.',
'random' =>
'zufällig',
'Recipient username was invalid' =>
'Empfängername war ungültig',
'Region' =>
'Region',
'Remove File' =>
'Datei entfernen',
'Remove selected items' =>
'Auswahl entfernen',
'Reordering was unsuccessful.' =>
'Das Neusortieren ist fehlgeschlagen.',
'Required URL Parameter <i>Optional</i>' =>
'Verpflichtende URL-Parameter <i>optional</i>',
'Root Path' =>
'Wurzelpfad',
'Round' =>
'Rund',
'Save Changes' =>
'Änderungen speichern',
'Section created at %1$s. <a href="%2$s">Create another?</a> <a href="%3$s">View all Sections</a>' =>
'Der Bereich wurde um %1$s erstellt. <a href="%2$s">Einen neuen erstellen?</a> <a href="%3$s">Alle Bereiche anzeigen.</a>',
'Section is invalid' =>
'Der Bereich ist ungültig',
'sections' =>
'bereiche',
'Sections' =>
'Bereiche',
'Section updated at %1$s. <a href="%2$s">Create another?</a> <a href="%3$s">View all Sections</a>' =>
'Der Bereich wurde um %1$s aktualisiert. <a href="%2$s">Einen neuen erstellen?</a> <a href="%3$s">Alle Bereiche anzeigen.</a>',
'Select Box' =>
'Auswahlfeld',
'Send Email' =>
'E-Mail verschicken',
'Send Email Filter' =>
'E-Mail-Versandfilter',
'Set %s' =>
'Setze %s',
'Show a maximum of %s results' =>
'Zeige maximal %s Ergebnisse',
'Show page %s of results' =>
'Zeige Seite %s der Ergebnisse',
'Sidebar' =>
'Seitenleiste',
'Small' =>
'Klein',
'Some errors were encountered while attempting to save.' =>
'Beim Versuch zu speichern sind Fehler aufgetreten.',
'Sort By' =>
'Sortieren nach',
'Sort by %1$s %2$s' =>
'Sortiere nach %1$s %2$s',
'Sorting and Limiting' =>
'Sortierung und Begrenzung',
'Sort Order' =>
'Sortierreihenfolge',
'Source' =>
'Quelle',
'Static Options' =>
'Statische Optionen',
'Static XML' =>
'Statisches XML',
'static_xml' =>
'statisches_xml',
'Status' =>
'Status',
'Submit' =>
'Abschicken',
'Success and Failure XML Examples' =>
'Erfolgs- und Fehlerbeispiele',
'Suggestion List' =>
'Vorschlagsliste',
'Symphony' =>
'Symphony',
'Symphony Concierge' =>
'Symphony-Concierge',
'Symphony Database Error' =>
'Symphony-Datenbankfehler',
'Symphony does not have write permission to the <code>/manifest</code> directory. Please modify permission settings on this directory and its contents to allow this, such as with a recursive <code>chmod -R</code> command.' =>
'Symphony hat keine Schreibrechte für das Verzeichnis <code>/manifest</code>. Bitte passen Sie die Zugriffsrechte dieses Verzeichnisses und seiner Inhalte an, zum Beispiel mit einen rekursiven <code>chmod -R</code> Kommando.',
'Symphony does not have write permission to the <code>/symphony</code> directory. Please modify permission settings on this directory. This is necessary only during installation, and can be reverted once installation is complete.' =>
'Symphony hat keine Schreibrechte für das Verzeichnis <code>/symphony</code>. Bitte passen Sie die Zugriffsrechte dieses Verzeichnisses an. Diese Änderung ist nur während der Installation nötig und kann danach rückgängig gemacht werden.',
'Symphony does not have write permission to the existing <code>/workspace</code> directory. Please modify permission settings on this directory and its contents to allow this, such as with a recursive <code>chmod -R</code> command.' =>
'Symphony hat keine Schreibrechte für das Verzeichnis <code>/workspace</code>. Bitte passen Sie die Zugriffsrechte dieses Verzeichnisses und seiner Inhalte an, zum Beispiel mit einen rekursiven <code>chmod -R</code> Kommando.',
'Symphony does not have write permission to the root directory. Please modify permission settings on this directory. This is necessary only if you are not including a workspace, and can be reverted once installation is complete.' =>
'Symphony hat keine Schreibreiche für das Wurzelverzeichnis. Bitte passen Sie die Zugriffsrechte dieses Verzeichnisses an. Diese Änderung ist nur nötig, wenn Sie keinen Workspace einbinden und kann nach der Installation rückgängig gemacht werden.',
'Symphony does not have write permission to the temporary <code>htaccess</code> file. Please modify permission settings on this file so it can be written to, and renamed.' =>
'Symphony hat keine Schreibrechte für die temporäre <code>.htaccess</code>-Datei. Bitte passen Sie die Zugriffsrechte dieser Datei so an, dass sie umbenannt und beschrieben werden kann.',
'Symphony is ready to be installed at the following location.' =>
'Symphony ist bereit für die Installation an nachfolgendem Ort.',
'Symphony needs an XSLT processor such as Lib<abbr title="eXtensible Stylesheet Language Transformation">XSLT</abbr> or Sablotron to build pages.' =>
'Symphony benötigt einen XSLT-Prozessor wie Lib<abbr title="eXtensible Stylesheet Language Transformation">XSLT</abbr> oder Sablotron um Seiten erzeugen zu können.',
'Symphony needs a recent version of <abbr title="PHP: Hypertext Pre-processor">PHP</abbr>.' =>
'Symphony benötigt eine aktuelle <abbr title="PHP: Hypertext Pre-processor">PHP</abbr>-Version.',
'Symphony needs a recent version of My<abbr title="Structured Query Language">SQL</abbr>.' =>
'Symphony benötigt eine aktuelle My<abbr title="Structured Query Language">SQL</abbr>-Version.',
'Symphony needs permission to read and write both files and directories.' =>
'Symphony benötigt Lese- und Schreibrechte für Dateien und Verzeichnisse.',
'Symphony needs the following requirements satisfied before installation can proceed.' =>
'Symphony benötigt folgende Voraussetzungen bevor die Installation fortgesetzt werden kann.',
'Symphony normally specifies UTF-8 character encoding for database entries. With compatibility mode enabled, Symphony will instead use the default character encoding of your database.' =>
'Symphony verwendet normalerweise UTF-8-Zeichenkodierung für Datenbankeinträge. Im Kompatibilitätsmodus verwendet Symphony anstelle die Standardzeichenkodierung ihrer Datenbank.',
'Symphony requires <code>MySQL 4.1</code> or greater to work, however version <code>%s</code> was detected. This requirement must be met before installation can proceed.' =>
'Symphony benötigt <code>MySQL 4.1</code> oder neuer, allerdings wurde Version <code>%s</code> erkannt. Ohne die benötigte Version kann die Installation nicht fortgesetzt werden.',
'Symphony requires <code>MySQL 4.1</code> or greater to work. This requirement must be met before installation can proceed.' =>
'Symphony benötigt <code>MySQL 4.1</code> oder neuer. Ohne die benötigte Version kann die Installation nicht fortgesetzt werden.',
'Symphony was unable to connect to the specified database. You may need to modify host or port settings.' =>
'Symphony war nicht in der Lage eine Verbindung zur angegebenen Datenbank aufzubauen. Möglicherweise müssen Sie Ihre Host- oder Port-Einstellungen anpassen.',
'System' =>
'System',
'System Author' =>
'Systemautor',
'System Date' =>
'Systemdatum',
'System ID' =>
'System-ID',
'Table Prefix' =>
'Tabellenprefix',
'Tag List' =>
'Tag-Liste',
'Template' =>
'Vorlage',
'Textarea' =>
'Textfeld',
'Text Input' =>
'Eingabefeld',
'The date specified in \'%s\' is invalid.' =>
'Das angegebene Datum \'%s\' ist ungültig.',
'The entry you are looking for could not be found.' =>
'Der von Ihnen gesuchte Eintrag konnte nicht gefunden werden.',
'The following is an example of what is returned if any filters fail:' =>
'Nachfolgendes Beispiel zeigt das Ergebnis, wenn ein Filter einen Fehler ausgibt:',
'The page you requested does not exist.' =>
'Die aufgerufene Seite existiert nicht.',
'The page you requested to edit does not exist.' =>
'Die Seite, die Sie bearbeiten möchten, existiert nicht.',
'The password and confirmation did not match. Please retype your password.' =>
'Das Passwort und dessen Wiederholung stimmten nicht überein. Bitte geben Sie Ihr Passwort erneut ein.',
'There appears to be an existing <code>.htaccess</code> file in the <code>/symphony</code> directory.' =>
'Es scheint bereits eine <code>.htaccess</code>-Datei innerhalb des Verzeichnisses <code>/symphony</code> zu existieren.',
'There appears to be an existing <code>.htaccess</code> file in the Symphony install location. To avoid name clashes, you will need to delete or rename this file.' =>
'Es scheint bereits eine <code>.htaccess</code>-Datei innerhalb Ihrer Symphony-Installation zu existieren. Um Überschneidungen zu vermeiden, müssen Sie diese löschen oder umbenennen.',
'There is already a field of type <code>%s</code>. There can only be one per section.' =>
'Es existiert bereits ein Feld des Typs <code>%s</code>. Es ist für jeden Bereich nur eines zulässig.',
'There was an error while trying to upload the file <code>%1$s</code> to the target directory <code>%2$s</code>.' =>
'Beim Hochladen der Datei <code>%1$s</code> in den Zielordner <code>%2$s</code> ist ein Fehler aufgetreten.',
'There was a problem locating your account. Please check that you are using the correct email address.' =>
'Es gab Schwierigkeiten Ihren Benutzerzugang zuzuordnen. Überprüfen Sie bitte, ob Sie die richtige E-Mail-Adresse angegeben haben.',
'There were some problems while attempting to save. Please check below for problem fields.' =>
'Beim Speichern sind einige Fehler aufgetreten. Bitte überprüfen Sie die betroffenen Felder.',
'The section associated with the data source <code>%s</code> could not be found.' =>
'Der mit der Datenquelle <code>%s</code> verbundene Bereich konnte nicht gefunden werden.',
'The Section you are looking, <code>%s</code> for could not be found.' =>
'Der von Ihnen gesuchte Bereich <code>%s</code> konnte nicht gefunden werden.',
'The Section you are looking for could not be found.' =>
'Der von Ihnen gesuchte Bereich konnte nicht gefunden werden.',
'The send email filter, upon the event successfully saving the entry, takes input from the form and send an email to the desired recipient. <b>This filter currently does not work with the "Allow Multiple" option.</b> The following are the recognised fields:' =>
'Der Filter zum Versenden von E-Mail schickt, sofern das Ereignis erfolgreich gespeichert werden konnte, alle Eingaben des Formulars an den gewünschten Empfänger. <b>Dieser Filter funktioniert derzeit nicht, wenn Sie die Erstellung mehrerer Einträge zulassen.</b> Folgende Felder werden vom Filter erkannt:',
'The supplied password was rejected. <a href="%s">Retrieve password?</a>' =>
'Das übermittelte Passwort wurde nicht akzeptiert. <a href="%s">Ein neues anfordern?</a>',
'The supplied password was rejected. Make sure it is not empty and that password matches password confirmation.' =>
'Das verwendete Passwort wurde nicht akzeptiert. Stellen Sie sicher, dass das Passwort nicht leer ist und dass es der Passwort-Benachrichtung entspricht.',
'The Symphony configuration file, <code>/manifest/config.php</code>, is not writable. You will not be able to save changes to preferences.' =>
'Die Symphony-Konfigurationsdatei <code>/manifest/config.php</code> ist nicht lesbar. Die Änderungen der Voreinstellungen werden nicht gespeichert.',
'The Symphony Team' =>
'Ihr Symphony-Team',
'The table prefix <code><!-- TABLE-PREFIX --></code> is already in use. Please choose a different prefix to use with Symphony.' =>
'Der Tabellenprefix <code><!-- TABLE-PREFIX --></code>ist bereits in Benutzung. Bitte wählen Sie einen anderen Prefix, der in Verbindung mit Symphony verwendet werden soll.',
'This document is not well formed. The following error was returned: <code>%s</code>' =>
'Dieses Dokument ist nicht wohlgeformt. Folgender Fehler wurde zurückgegeben: <code>%s</code>',
'This event will not be processed if any of these rules return true.' =>
'Dieses Ereignis wird nicht ausgeführt werden, wenn eine dieser Regel wahr zurückgibt.',
'This is a courtesy email to notify you that an entry was created on the %1$s section. You can edit the entry by going to: %2$s' =>
'Diese E-Mail möchte Sie darüber informieren, dass ein Eintrag im Bereich %1$s erstellt wurde. Sie können diesen bearbeiten, indem folgende Seite aufrufen: %2$s',
'This is an example of the form markup you can use on your frontend:' =>
'Dies ist ein Beispiel, dass Sie für Ihr Frontend-Formular nutzen können:',
'This is a required field' =>
'Dieses Feld ist verpflichtend.',
'This is a required field.' =>
'Dies ist ein Pflichtfeld.',
'This is not a valid email address. You must provide an email address since you will need it if you forget your password.' =>
'Dies keine gültige E-Mail-Adresse. Sie müssen eine E-Mail-Adresse angeben, da Sie diese benötigen, falls Sie Ihr Passwort vergessen sollten.',
'This page could not be rendered due to the following XSLT processing errors.' =>
'Diese Seite konnte aufgrund nachfolgender XSLT-Verarbeitungfehler nicht dargestellt werden.',
'Time Format' =>
'Zeitformat',
'Title' =>
'Titel',
'Title is a required field' =>
'Titel ist ein Pflichtfeld',
'To edit an existing entry, include the entry ID value of the entry in the form. This is best as a hidden field like so:' =>
'Um einen existierenden Eintrag zu bearbeiten, müssen Sie die Eintrags-ID im Formular einbinden. Dies geht am besten mit einem versteckten Feld:',
'To redirect to a different location upon a successful save, include the redirect location in the form. This is best as a hidden field like so, where the value is the URL to redirect to:' =>
'Um nach erfolgreichem Speichern zu einer anderen Adresse weiterzuleiten, müssen Sie das Umleitungsziel im Formular einbinden. Dies geht am besten mit einem versteckten Feld, wobei der Wert der Ziel-URL entspricht:',
'Two custom fields have the same element name. All element names must be unique.' =>
'Zwei Felder haben den selben Elementnamen. Alle Elementnamen müssen eindeutig sein.',
'Type' =>
'Typ',
'Unable to remove file - %s' =>
'Datei konnte nicht entfernt werden – %s',
'Uninstall' =>
'Deinstallieren',
'Unknown Entry' =>
'Unbekannter Eintrag',
'Unknown errors occurred while attempting to save. Please check your <a href="%s">activity log</a>.' =>
'Ein unbekannter Fehler ist beim Speichern aufgetreten. Bitte überprüfen Sie Ihr <a href="%s">Systemlogbuch</a>.',
'Unknown errors where encountered when saving.' =>
'Beim Speichern sind unbekannte Fehler aufgetreten.',
'Unknown Section' =>
'Unbekannter Bereich',
'Untitled' =>
'Ohne Titel',
'Update Symphony' =>
'Symphony Aktualisierung',
'Uploading \'%s\' failed. Could not write temporary file to disk.' =>
'Das Hochladen von \'%s\' ist fehlgeschlagen. Die temporäre Datei konnte nicht gespeichert werden.',
'Uploading \'%s\' failed. File upload stopped by extension.' =>
'Das Hochladen von \'%s\' ist fehlgeschlagen. Der Vorgang wurde von einer Erweiterung unterbrochen.',
'URI' =>
'URI',
'URL' =>
'URL',
'URL Handle' =>
'URL-Bezeichner',
'URL Parameters' =>
'URL-Parameter',
'Use <code>{$param}</code> syntax to filter by page parameters.' =>
'Verwenden Sie <code>{$param}</code>, um mit Seitenparametern zu filtern.',
'Use <code>{$param}</code> syntax to limit by page parameters.' =>
'Verwenden Sie <code>{$param}</code>, um mit Seitenparametern zu begrenzen.',
'Use <code>{$param}</code> syntax to specify dynamic portions of the URL.' =>
'Verwenden Sie <code>{$param}</code>, um dynamische Teile der URL festzulegen.',
'Use an XPath expression to select which elements from the source XML to include.' =>
'Benutzen Sie einen X-Path-Ausdruck, um die einzubinden Elemente der XML-Quelle auszuwählen.',
'Use compatibility mode' =>
'Verwende Kompatibilitätsmodus',
'Use Field' =>
'Zu verwendendes Feld',
'User Information' =>
'Benutzerinformationen',
'Username' =>
'Benutzername',
'Username is already taken' =>
'Dieser Benutzername ist bereits vergeben',
'Username is required' =>
'Der Benutzername ist eine Pflichtangabe',
'User type' =>
'Benutzergruppe',
'Utilities' =>
'Bausteine',
'Utility' =>
'Baustein',
'Utility could not be written to disk. Please check permissions on <code>/workspace/utilities</code>.' =>
'Baustein konnte nicht auf der Festplatte gespeichert werden. Bitte überprüfen Sie die Zugriffsrechte für <code>/workspace/utilities</code>.',
'Utility created at %1$s. <a href="%2$s">Create another?</a> <a href="%3$s">View all Utilities</a>' =>
'Der Baustein wurde um %1$s erstellt. <a href="%2$s">Einen neuen erstellen?</a> <a href="%3$s">Alle Bausteine anzeigen.</a>',
'Utility updated at %1$s. <a href="%2$s">Create another?</a> <a href="%3$s">View all Utilities</a>' =>
'Der Baustein wurde um %1$s aktualisiert. <a href="%2$s">Einen neuen erstellen?</a> <a href="%3$s">Alle Bausteine anzeigen.</a>',
'Validation Rule <i>Optional</i>' =>
'Validierungsregel <i>optional</i>',
'Value' =>
'Wert',
'Version' =>
'Version',
'Version %s' =>
'Version %s',
'Viewing %1$s - %2$s of %3$s entries' =>
'Zeige %1$s - %2$s von %3$s Einträgen',
'Weasels' =>
'Wiesel',
'Website Name' =>
'Webseitenname',
'Website Preferences' =>
'Webseiteneinstellungen',
'When an error occurs during saving, due to either missing or invalid fields, the following XML will be returned' =>
'Wenn beim Speichern ein Fehler auftritt, weil Felder fehlen oder ungültig sind, wird nachfolgendes XML ausgeben',
'When saved successfully, the following XML will be returned:' =>
'Nach erfolgreicher Speicherung, wird nachfolgendes XML ausgegeben:',
'With Selected...' =>
'Auswahl …',
'Women' =>
'Frauen',
'Worms' =>
'Würmer',
'Wrong password. Enter old one to change email address.' =>
'Falsches Passwort. Geben Sie Ihr altes Passwort zum Ändern der E-Mail-Adresse ein.',
'Wrong password. Enter old password to change it.' =>
'Falsches Passwort. Geben Sie Ihr altes Passwort zum Ändern ein.',
'XML' =>
'XML',
'XML is invalid' =>
'XML ist ungültig',
'XML Output' =>
'XML-Ausgabe',
'XML returned is invalid.' =>
'Zurückgegebenes XML ist ungültig',
'XSLT Processing Error' =>
'XSLT-Verarbeitungsfehler',
'Yes' =>
'Ja',
'You are already using the most recent version of Symphony. There is no need to run the installer, and can be safely deleted.' =>
'Sie verwenden bereits die aktuellste Version von Symphony. Es ist nicht nötig das Installationsprogramm laufen zu lassen, es kann sicher entfernt werden.',
'You are not authorised to access this page.' =>
'Sie sind nicht berechtigt diese Seite zu besuchen.',
'You are not authorised to access this section.' =>
'Sie sind nicht autorisiert auf diesen Bereich zuzugreifen',
'You are not using the most recent version of Symphony. This update is only compatible with Symphony 2.' =>
'Sie verwenden nicht die aktuellste Version von Symphony. Diese Aktualisierung ist nur mit Symphony 2 kompatibel.',
'You must enter a Password. This will be your Symphony login information.' =>
'Sie müssen ein Passwort anlegen, welches für Sie bei der Symphony-Anmeldung verwendet werden soll.',
'You must enter a Username. This will be your Symphony login information.' =>
'Sie müssen einen Benutzernamen anlegen, welcher für Sie bei der Symphony-Anmeldung verwendet werden soll.',
'You must enter your name.' =>
'Sie müssen Ihren Namen angeben.',
'ZLib Compression Library' =>
'ZLib-Compression-Library',
'[Symphony] A new entry was created on %s' =>
'[Symphony] Ein neuer Eintrag auf %s wurde erstellt',
'{$minutes} minutes ago' =>
'vor {$minutes} Minuten',
'%s Include a count of entries in associated sections' =>
'%s Zeige Gesamtzahl der Einträge in verknüpften Bereichen',
'%s Pre-populate this field with today\'s date' =>
'%s Dieses Feld mit dem heutigen Datum vorbelegen.',
'Authors can set up a differing language in their profiles.' =>
'Autoren können in Ihren Profilen eine abweichende Sprache einstellen.',
'Children' =>
'Kinder',
'Choose only one. Created if does not exist' =>
'Wählen Sie nur ein Gruppe. Diese wird bei Bedarf erstellt.',
'Create Section' =>
'Bereich erstellen',
'Custom Preferences' =>
'Benutzerdefinierte Einstellungen',
'Data source created at %1$s. <a href="%2$s">Create another?</a> <a href="%3$s">View all Data sources</a>' =>
'Diese Datenquelle wurde um %1$s erstellt. <a href="%2$s">Eine neue erstellen?</a> <a href="%3$s">Alle Datenquellen anzeigen.</a>',
'Data source updated at %1$s. <a href="%2$s">Create another?</a> <a href="%3$s">View all Data sources</a>' =>
'Diese Datenquelle wurde um %1$s aktualisiert. <a href="%2$s">Eine neue erstellen?</a> <a href="%3$s">Alle Datenquellen anzeigen.</a>',
'Daylight savings time' =>
'Sommerzeit',
'Directory <code>%s</code> does not exist.' =>
'Der Ordner <code>%s</code> existiert nicht.',
'Edit Configuration' =>
'Einstellungen bearbeiten',
'Edit Page Confguration' =>
'Seiteneinstellungen bearbeiten',
'Edit Page Template' =>
'Seitenvorlage bearbeiten',
'Edit Template' =>
'Vorlage bearbeiten',
'It looks like you\'re trying to create an entry. Perhaps you want fields first? <a href="%s">Click here to create some.</a>' =>
'Anscheinend versuchen Sie einen Eintrag zu erstellen. Vielleicht möchten Sie zunächst Felder erstellen. <a href="%s">Klicken Sie hier, um welche zu erstellen.</a>',
'Language' =>
'Sprache',
'Local' =>
'Lokal',
'Name' =>
'Name',
'No <code>/symphony</code> directory was found at this location. Please upload the contents of Symphony\'s install package here.' =>
'An diesem Ort konnte kein Verzeichnis <code>/symphony</code> gefunden werden. Bitte laden Sie den Inhalt des Symphony-Installationspakets hierher hoch.',
'Page created at %1$s. <a href="%2$s">Create another?</a> <a href="%3$s">View all Pages</a>' =>
'Diese Seiten wurde um %1$s erstellt. <a href="%2$s">Eine neue erstellen?</a> <a href="%3$s">Alle Seiten anzeigen</a>',
'Page updated at %1$s. <a href="%2$s">Create another?</a> <a href="%3$s">View all Pages</a>' =>
'Diese Seite wurde um %1$s aktualisiert. <a href="%2$s">Eine neue erstellen?</a> <a href="%3$s">Alle Seiten anzeigen</a>',
'Page updated at %s. <a href="%s">View all Pages</a>' =>
'Diese Seite wurde um %1$s aktualisiert. <a href="%3$s">Alle Seiten anzeigen</a>',
'System Default' =>
'Systemstandard',
'System Language' =>
'Systemsprache',
'The destination directory, <code>%s</code>, does not exist.' =>
'Der Zielordner <code>%s</code> existiert nicht.',
'The parameter <code id="output-param-name">$ds-%s</code> will be created with this field\'s value for XSLT or other data sources to use.' =>
'Das Parameter <code id="output-param-name">$ds-%s</code> wird auf Grundlage des Wertes dieses Feldes erstellt und kann im XSLT oder in anderen Datenquellen verwendet werden.',
'The Section you are looking for, <code>%s</code>, could not be found.' =>
'Der von Ihnen gesuchte Berich <code>%s</code> konnte nicht gefunden werden.',
'User Type' =>
'Benutzertyp',
'Text Formatter' =>
'Textformatierung',
'Example XML' =>
'Beispiel-XML',
'released on' =>
'veröffentlicht am',
);
/**
* Transliterations
*/
$transliterations = array(
// Alphabetical
'/À/' => 'A', '/Á/' => 'A', '/Â/' => 'A', '/Ã/' => 'A', '/Ä/' => 'Ae',
'/Å/' => 'A', '/Ā/' => 'A', '/Ą/' => 'A', '/Ă/' => 'A', '/Æ/' => 'Ae',
'/Ç/' => 'C', '/Ć/' => 'C', '/Č/' => 'C', '/Ĉ/' => 'C', '/Ċ/' => 'C',
'/Ď/' => 'D', '/Đ/' => 'D', '/Ð/' => 'D', '/È/' => 'E', '/É/' => 'E',
'/Ê/' => 'E', '/Ë/' => 'E', '/Ē/' => 'E', '/Ę/' => 'E', '/Ě/' => 'E',
'/Ĕ/' => 'E', '/Ė/' => 'E', '/Ĝ/' => 'G', '/Ğ/' => 'G', '/Ġ/' => 'G',
'/Ģ/' => 'G', '/Ĥ/' => 'H', '/Ħ/' => 'H', '/Ì/' => 'I', '/Í/' => 'I',
'/Î/' => 'I', '/Ï/' => 'I', '/Ī/' => 'I', '/Ĩ/' => 'I', '/Ĭ/' => 'I',
'/Į/' => 'I', '/İ/' => 'I', '/IJ/' => 'Ij', '/Ĵ/' => 'J', '/Ķ/' => 'K',
'/Ł/' => 'L', '/Ľ/' => 'L', '/Ĺ/' => 'L', '/Ļ/' => 'L', '/Ŀ/' => 'L',
'/Ñ/' => 'N', '/Ń/' => 'N', '/Ň/' => 'N', '/Ņ/' => 'N', '/Ŋ/' => 'N',
'/Ò/' => 'O', '/Ó/' => 'O', '/Ô/' => 'O', '/Õ/' => 'O', '/Ö/' => 'Oe',
'/Ø/' => 'O', '/Ō/' => 'O', '/Ő/' => 'O', '/Ŏ/' => 'O', '/Œ/' => 'Oe',
'/Ŕ/' => 'R', '/Ř/' => 'R', '/Ŗ/' => 'R', '/Ś/' => 'S', '/Š/' => 'S',
'/Ş/' => 'S', '/Ŝ/' => 'S', '/Ș/' => 'S', '/Ť/' => 'T', '/Ţ/' => 'T',
'/Ŧ/' => 'T', '/Ț/' => 'T', '/Ù/' => 'U', '/Ú/' => 'U', '/Û/' => 'U',
'/Ü/' => 'Ue', '/Ū/' => 'U', '/Ů/' => 'U', '/Ű/' => 'U', '/Ŭ/' => 'U',
'/Ũ/' => 'U', '/Ų/' => 'U', '/Ŵ/' => 'W', '/Ý/' => 'Y', '/Ŷ/' => 'Y',
'/Ÿ/' => 'Y', '/Y/' => 'Y', '/Ź/' => 'Z', '/Ž/' => 'Z', '/Ż/' => 'Z',
'/Þ/' => 'T',
'/à/' => 'a', '/á/' => 'a', '/â/' => 'a', '/ã/' => 'a', '/ä/' => 'ae',
'/å/' => 'a', '/ā/' => 'a', '/ą/' => 'a', '/ă/' => 'a', '/æ/' => 'ae',
'/ç/' => 'c', '/ć/' => 'c', '/č/' => 'c', '/ĉ/' => 'c', '/ċ/' => 'c',
'/ď/' => 'd', '/đ/' => 'd', '/ð/' => 'd', '/è/' => 'e', '/é/' => 'e',
'/ê/' => 'e', '/ë/' => 'e', '/ē/' => 'e', '/ę/' => 'e', '/ě/' => 'e',
'/ĕ/' => 'e', '/ė/' => 'e', '/ĝ/' => 'g', '/ğ/' => 'g', '/ġ/' => 'g',
'/ģ/' => 'g', '/ĥ/' => 'h', '/ħ/' => 'h', '/ì/' => 'i', '/í/' => 'i',
'/î/' => 'i', '/ï/' => 'i', '/ī/' => 'i', '/ĩ/' => 'i', '/ĭ/' => 'i',
'/į/' => 'i', '/ı/' => 'i', '/ij/' => 'ij', '/ĵ/' => 'j', '/ķ/' => 'k',
'/ł/' => 'l', '/ľ/' => 'l', '/ĺ/' => 'l', '/ļ/' => 'l', '/ŀ/' => 'l',
'/ñ/' => 'n', '/ń/' => 'n', '/ň/' => 'n', '/ņ/' => 'n', '/ŋ/' => 'n',
'/ò/' => 'o', '/ó/' => 'o', '/ô/' => 'o', '/õ/' => 'o', '/ö/' => 'oe',
'/ø/' => 'o', '/ō/' => 'o', '/ő/' => 'o', '/ŏ/' => 'o', '/œ/' => 'oe',
'/ŕ/' => 'r', '/ř/' => 'r', '/ŗ/' => 'r', '/ś/' => 's', '/š/' => 's',
'/ş/' => 's', '/ŝ/' => 's', '/ș/' => 's', '/ť/' => 't', '/ţ/' => 't',
'/ŧ/' => 't', '/ț/' => 't', '/ù/' => 'u', '/ú/' => 'u', '/û/' => 'u',
'/ü/' => 'ue', '/ū/' => 'u', '/ů/' => 'u', '/ű/' => 'u', '/ŭ/' => 'u',
'/ũ/' => 'u', '/ų/' => 'u', '/ŵ/' => 'w', '/ý/' => 'y', '/ŷ/' => 'y',
'/ÿ/' => 'y', '/y/' => 'y', '/ź/' => 'z', '/ž/' => 'z', '/ż/' => 'z',
'/þ/' => 't', '/ß/' => 'ss', '/ſ/' => 'ss', '/ƒ/' => 'f', '/ĸ/' => 'k',
'/ʼn/' => 'n',
// Symbolic
'/\(/' => null, '/\)/' => null, '/,/' => null,
'/–/' => '-', '/-/' => '-', '/„/' => '"',
'/“/' => '"', '/”/' => '"', '/—/' => '-',
'/¿/' => null, '/‽/' => null, '/¡/' => null,
// Ampersands
'/^&(?!&)$/' => 'und',
'/^&(?!&)/' => 'und-',
'/&(?!&)&/' => '-und',
'/&(?!&)/' => '-und-',
);
| <?php
$about = array(
'name' => 'Deutsch',
'author' => array(
'name' => 'Nils Hörrmann',
'email' => 'post@nilshoerrmann.de',
'website' => 'http://www.nilshoerrmann.de'
),
'release-date' => '2010-02-09',
);
/**
* Symphony Core
*/
$dictionary = array(
'"%1$s" contains invalid XML. The following error was returned: <code>%2$s</code>' =>
'"%1$s" enthält ungültiges XML. Der folgende Fehler wurde zurückgegeben: <code>%2$s</code>',
'%1$s – %2$s' =>
'%1$s – %2$s',
'%1$s – %2$s – %3$s' =>
'%1$s – %2$s – %3$s',
'%1$s Allow remote login via <a href="%2$s">%2$s</a>' =>
'%1$s Remotezugriff über <a href="%2$s">%2$s</a> erlauben',
'%s Allow selection of multiple authors' =>
'%s Erlaube Auswahl mehrerer Autoren.',
'%s Allow selection of multiple options' =>
'%s Erlaube Mehrfachauswahl.',
'%s Checked by default' =>
'%s Standardmäßig ausgewählt.',
'%s Hide this section from the Publish menu' =>
'%s Diesen Bereich nicht im Menü anzeigen.',
'%s HTML-encode text' =>
'%s Ausgabe HTML-konform kodieren.',
'%s is not a valid object. Failed to append to XML.' =>
'%s ist kein gültiges Objekt. Es konnte dem XML nicht hinzugefügt werden.',
'%s Make this a required field' =>
'%s Dieses Feld verpflichtend machen.',
'%s Redirect to 404 page when no results are found' =>
'%s Auf 404-Fehlerseite umleiten, wenn keine Ergebnisse gefunden werden können.',
'%s Select current user by default' =>
'%s Den aktuellen Benutzer vorauswählen',
'%s Show column' =>
'%s In der Übersicht anzeigen',
'← Previous' =>
'← Vorherige',
'\'%s\' contains invalid data. Please check the contents.' =>
'\'%s\' enthält ungültige Daten. Bitte überprüfen Sie den Inhalt.',
'\'%s\' is a required field.' =>
'\'%s\' ist ein Pflichtfeld.',
' (<b>Notice that it is possible to get mixtures of success and failure messages when using the "Allow Multiple" option</b>)' =>
' (<b>Bitte beachten Sie, dass Sie möglicherweise eine Mischung aus Fehler- und Erfolgsrückmeldungen erhalten, wenn Sie die die Erstellung mehrerer Einträge zulassen.</b>)',
'<abbr title="eXtensible Stylesheet Language Transformation">XSLT</abbr> Processor' =>
'<abbr title="eXtensible Stylesheet Language Transformation">XSLT</abbr>-Prozessor',
'<abbr title="PHP: Hypertext Pre-processor">PHP</abbr> 5.1 or above' =>
'<abbr title="PHP: Hypertext Pre-processor">PHP</abbr> 5.1 oder höher',
'<acronym title="Universal Resource Locator">URL</acronym>' =>
'<acronym title="Universal Resource Locator">URL</acronym>',
'<acronym title="Universal Resource Locator">URL</acronym> Parameters' =>
'<acronym title="Universal Resource Locator">URL-</acronym>Parameter',
'<a href="%1$s" title="Show debug view for %2$s">Line %3$d</a>' =>
'<a href="%1$s" title="Wechsle in den Debug-Modus für %2$s">Zeile %3$d</a>',
'<a href="%s" title="Show debug view">Compile</a>' =>
'<a href="%s" title="Zeige Debug-Modus">Kompiliere</a>',
'A 403 type page already exists.' =>
'Es existiert bereits eine 403-Fehlerseite.',
'A 404 type page already exists.' =>
'Es existiert bereits eine 404-Fehlerseite.',
'Aardvarks' =>
'Erdferkel',
'about 1 hour ago' =>
'vor etwa einer Stunde',
'about {$hours} hours ago' =>
'vor etwa {$hours} Stunden',
'Access Denied' =>
'Zugriff verweigert',
'A database error occurred while attempting to reorder.' =>
'Beim Neuordnen ist ein Datenbankfehler aufgetreten.',
'A Data source with the name <code>%s</code> name already exists' =>
'Eine Datenquelle mit dem Namen <code>%s</code> existiert bereits',
'Add an Author' =>
'Autor hinzufügen',
'Add a new author' =>
'Neuen Autor hinzufügen',
'Add item' =>
'Hinzufügen',
'Admin Only' =>
'Nur Administratoren',
'Advanced Configuration' =>
'Erweiterte Einstellungen',
'A field with that element name already exists. Please choose another.' =>
'Ein Feld mit diesem Elementnamen existiert bereits. Bitte wählen Sie einen anderen.',
'A file with the name %1$s already exists in %2$s. Please rename the file first, or choose another.' =>
'Eine Datei mit dem Namen %1$s existiert bereits in %2$s. Bitte benennen Sie die Datei zuerst um oder wähle Sie eine andere.',
'All of these fields can be set dynamically using the exact field name of another field in the form as shown below in the example form:' =>
'Alle diese Felder können dynamisch befüllt werden, indem Sie den genauen Feldnamen eines anderen Feldes des Formulares verwenden, wie das nachfolgende Beispiel zeigt:',
'Allow Multiple' =>
'Mehrere zulassen',
'a minute ago' =>
'vor einer Minute',
'An email containing a customised login link has been sent. It will expire in 2 hours.' =>
'Eine E-Mail mit personalisierten Anmeldedaten wurden verschickt. Sie verliert in zwei Stunden ihre Gültigkeit.',
'An empty result will be returned when this parameter does not have a value. Do not wrap the parameter with curly-braces.' =>
'Wenn dieser Parameter keinen Wert hat, wird ein leeres Ergebnis ausgegeben. Umschließen Sie den Parameter nicht mit geschweiften Klammern.',
'An error occurred during installation. You can view you log <a href="install-log.txt">here</a> for more details.' =>
'Während der Installation ist ein Fehler aufgetreten. Sie können das <a href="install-log.txt">Installation-Logbuch</> für weitere Informationen einsehen.',
'An error occurred while processing this form. <a href="#error">See below for details.</a>' =>
'Beim Verarbeiten dieses Formulars ist ein Fehler aufgetreten. <a href="#error">Details siehe unten.</a>',
'An Event with the name <code>%s</code> name already exists' =>
'Ein Ereignis mit dem Namen <code>%s</code> existiert bereits',
'A new password has been requested for your account. Login using the following link, and change your password via the Authors area:' =>
'Ein neues Passwort wurde für Ihren Zugang angefordert. Sie können sich anmelden, indem Sie nachfolgendem Link folgen, und dann Ihr Passwort im Autorenbereich ändern:',
'An existing <code>/workspace</code> directory was found at this location. Symphony will use this workspace.' =>
'An diesem Ort wurde ein bereits existierendes <code>/workspace</code>-Verzeichnis gefunden. Symphony wird diesen Workspace verwenden.',
'An index type page already exists.' =>
'Es existiert bereits eine Index-Seite.',
'An unknown database occurred while attempting to create the section.' =>
'Es ist ein unbekannter Datenbankfehler beim Erstellen des Bereiches aufgetreten.',
'A page number must be set' =>
'Eine Seitenzahl muss festgelegt werden',
'A page with that handle already exists' =>
'Es existiert bereits eine Seite mit diesem Bezeichner.',
'A page with that title already exists' =>
'Es existiert bereits eine Seite mit diesem Titel.',
'Apply' =>
'Anwenden',
'A result limit must be set' =>
'Eine Ergebnisobergrenze muss festgelegt werden',
'Are you sure you want to {$action}?' =>
'Sind Sie sicher, dass Sie {$action} wollen?',
'Are you sure you want to {$action} {$count} items?' =>
'Sind Sie sicher, dass sie {$count} Einträge {$action} wollen?',
'Are you sure you want to {$action} {$name}?' =>
'Sind Sie sicher, dass sie {$name} {$action} wollen?',
'ascending' =>
'aufsteigend',
'A Section with the name <code>%s</code> name already exists' =>
'Es existiert bereits ein Bereich mit dem Namen <code>%s</code>',
'at' =>
'um',
'At least one source must be specified, dynamic or static.' =>
'Mindestens eine Quelle, dynamisch oder statisch, muss festgelegt werden.',
'Author' =>
'Autor',
'Author created at %1$s. <a href="%2$s" accesskey="c">Create another?</a> <a href="%3$s" accesskey="a">View all Authors</a>' =>
'Der Autor wurde um %1$s erstellt. <a href="%2$s" accesskey="c">Einen neuen erstellen?</a> <a href="%3$s" accesskey="a">Alle Autoren anzeigen.</a>',
'Author ID' =>
'Autor-ID',
'Authors' =>
'Autoren',
'authors' =>
'autoren',
'Author updated at %1$s. <a href="%2$s" accesskey="c">Create another?</a> <a href="%3$s" accesskey="a">View all Authors</a>' =>
'Der Autor wurde um %1$s aktualisiert. <a href="%2$s" accesskey="c">Einen neuen erstellen?</a> <a href="%3$s" accesskey="a">Alle Autoren anzeigen.</a>',
'A Utility with that name already exists. Please choose another.' =>
'Es existiert bereits ein Baustein mit dem namen <code>%s</code>.',
'Best Regards,' =>
'Mit freundlichen Grüßen,',
'Big' =>
'Dick',
'Birds' =>
'Vögel',
'Blueprints' =>
'Blaupausen',
'Body' =>
'Daten',
'Body is a required field.' =>
'Der Datenbereich ist ein Pflichtfeld.',
'Bugs' =>
'Käfer',
'Can\'t open file %s' =>
'Datei %s konnte nicht geöffnet werden',
'Cats' =>
'Katze',
'Change Password' =>
'Passwort ändern',
'Checkbox' =>
'Kontrollkästchen',
'Coconut' =>
'Kokosnuss',
'Cold' =>
'Kalt',
'Components' =>
'Komponenten',
'Confirm New Password' =>
'Neues Passwort wiederholen',
'Confirm Password' =>
'Passwort wiederholen',
'Could not %1$s %2$s, there was a problem loading the object. Check the driver class exists.' =>
'%s konnte aufgrund eines Problems beim Laden des Objektes nicht %1$s werden. Überprüfen Sie, ob die Treiberklasse existiert.',
'Could not add directory "%s".' =>
'Das Verzeichnis "%s" konnte nicht hinzugefügt werden.',
'Could not add file "%s".' =>
'Die Datei "%s" konnte nicht hinzugefügt werden.',
'Could not find Data Source <code>%s</code>. If the Data Source was provided by an Extensions, ensure that it is installed, and enabled.' =>
'Die Datenquelle <code>%s</code> konnte nicht gefunden werden. Wenn diese Datenquelle von einer Erweiterung bereitgestellt wurde, überprüfen Sie, dass diese installiert und aktiviert ist.',
'Could not find Event <code>%s</code>. If the Event was provided by an Extensions, ensure that it is installed, and enabled.' =>
'Das Ereignis <code>%s</code> konnte nicht gefunden werden. Wenn dieses Ereignis von einer Erweiterung bereitgestellt wurde, überprüfen Sie, dass diese installiert und aktiviert ist.',
'Could not find extension at location %s' =>
'Die Erweiterung konnte nicht unter %s gefunden werden.',
'Could not find Field <code>%1$s</code> at <code>%2$s</code>. If the Field was provided by an Extension, ensure that it is installed, and enabled.' =>
'Das Feld <code>%1$s</code> konnte nicht unter <code>%2$s</code> gefunden werden. Wenn dieses Feld von einer Erweiterung bereitgestellt wurde, überprüfen Sie, dass diese installiert und aktiviert ist.',
'Could not find Text Formatter <code>%s</code>. If the Text Formatter was provided by an Extensions, ensure that it is installed, and enabled.' =>
'Der Textformatierer <code>%s</code> konnte nicht gefunden werden. Wenn der Textformatierer von einer Erweiterung bereitgestellt wurde, überprüfen Sie, dass diese installiert und aktiviert ist.',
'Create a new data source' =>
'Neue Datenquelle erstellen',
'Create a new entry' =>
'Neuen Eintrag erstellen',
'Create a new event' =>
'Neues Ereignis erstellen',
'Create a new page' =>
'Neue Seite erstellen',
'Create a new utility' =>
'Neuen Baustein erstellen',
'Create a section' =>
'Bereich erstellen',
'Create Author' =>
'Autor erstellen',
'Create Data Source' =>
'Datenquelle erstellen',
'Create Entry' =>
'Eintrag erstellen',
'Create Event' =>
'Ereignis erstellen',
'Create New' =>
'Neu erstellen',
'Create Page' =>
'Seite erstellen',
'Create Utility' =>
'Baustein erstellen',
'Customise how Date and Time values are displayed throughout the Administration interface.' =>
'Passen Sie an, wie Datums- und Zeitangaben innerhalb des Administrationsbereichs dargestellt werden.',
'Custom XML' =>
'Benutzerdefiniertes XML',
'Database' =>
'Datenbank',
'Database Connection' =>
'Datenbankverbindung',
'Database Error' =>
'Datenbankfehler',
'Data retrieved from the Symphony support server is decompressed with the ZLib compression library.' =>
'Daten, die vom Symphony-Supportserver empfangen werden, werden mit der ZLib-Kompression-Bibliothek dekomprimiert.',
'Data Source' =>
'Datenquelle',
'Data source output grouping is not supported by the <code>%s</code> field' =>
'Ergebnisgruppierung für Datenquellen wird vom Feld <code>%s</code> nicht unterstützt',
'Data Sources' =>
'Datenquellen',
'Date' =>
'Datum',
'Date and Time' =>
'Datum und Zeit',
'Date Format' =>
'Datumsformat',
'Dear <!-- RECIPIENT NAME -->,' =>
'Liebe(r) <!-- RECIPIENT NAME -->,',
'Default Section' =>
'Standardbereich',
'Delete' =>
'Löschen',
'Delete Entries' =>
'Einträge löschen',
'Delete this author' =>
'Diesen Autor löschen',
'Delete this data source' =>
'Diese Datenquelle löschen',
'Delete this entry' =>
'Diesen Eintrag löschen',
'Delete this event' =>
'Dieses Ereignis löschen',
'Delete this page' =>
'Diese Seite löschen',
'Delete this section' =>
'Diesen Bereich löschen',
'Delete this utility' =>
'Diesen Baustein löschen',
'descending' =>
'absteigend',
'Description' =>
'Beschreibung',
'Destination Directory' =>
'Zielordner',
'Destination folder, <code>%s</code>, is not writable. Please check permissions.' =>
'Das Zielverzeichnis <code>%s</code> ist nicht beschreibbar. Bitte überprüfen Sie die Zugriffsrechte.',
'Developer' =>
'Entwickler',
'Directories' =>
'Verzeichnisse',
'Disable' =>
'Deaktivieren',
'Dogs' =>
'Hunde',
'Dynamic Options' =>
'Dynamische Optionen',
'Dynamic XML' =>
'Dynamisches XML',
'dynamic_xml' =>
'dynamisches_xml',
'E-mail address entered is invalid' =>
'Die eingegebene E-Mail-Adresse ist ungültig',
'E-mail address is required' =>
'Die E-Mail-Adresse ist eine Pflichtangabe',
'Edit' =>
'Bearbeiten',
'Email' =>
'E-Mail',
'Email Address' =>
'E-Mail-Adresse',
'Enable' =>
'Aktivieren',
'Enabled' =>
'Aktiviert',
'Encumbered' =>
'belastet',
'Enter your email address to be sent a remote login link with further instructions for logging in.' =>
'Geben Sie Ihre E-Mail-Adresse an, um einen Link mit weiteren Erläuterungen zur Anmeldung zugesandt zu bekommen.',
'Entries' =>
'Einträge',
'Entry created at %1$s. <a href="%2$s" accesskey="c">Create another?</a> <a href="%3$s" accesskey="a">View all Entries</a>' =>
'Der Eintrag wurde um %1$s erstellt. <a href="%2$s" accesskey="c">Einen neuen erstellen?</a> <a href="%3$s" accesskey="a">Alle Einträge anzeigen.</a>',
'Entry created successfully.' =>
'Eintrag erfolgreich erstellt.',
'Entry edited successfully.' =>
'Eintrag erfolgreich bearbeitet.',
'Entry encountered errors when saving.' =>
'Beim Speichern des Eintrags sind Fehler aufgetreten.',
'Entry limit specified was not a valid type. String or Integer expected.' =>
'Die festgelegte Obergrenze entspricht keinem gültigen Typ. String oder Integer erwartet. ',
'Entry updated at %1$s. <a href="%2$s" accesskey="c">Create another?</a> <a href="%3$s" accesskey="a">View all Entries</a>' =>
'Dieser Eintrag wurde um %1$s aktualisiert. <a href="%2$s" accesskey="c">Einen neuen erstellen?</a> <a href="%3$s" accesskey="a">Alle Einträge anzeigen.</a>',
'Entry [created | edited] successfully.' =>
'Eintrag erfolgreich [erstellt | bearbeitet].',
'Environment Settings' =>
'Umgebungseinstellungen',
'Error creating field object with id %1$d, for filtering in data source "%2$s". Check this field exists.' =>
'Beim Erstellen des Feld-Objekts mit der ID %1sd, das zum Filter der Datenquelle "%2$s" verwendet wird, ist ein Fehler aufgetreten. Überprüfen Sie, ob das Feld existiert.',
'Essentials' =>
'Grundangaben',
'Event created at %1$s. <a href="%2$s" accesskey="c">Create another?</a> <a href="%3$s" accesskey="a">View all Events</a>' =>
'Dieses Ereignis wurde um %1$s erstellt. <a href="%2$s" accesskey="c">Ein neues erstellen?</a> <a href="%2$s" accesskey="a">Alle Ereignisse anzeigen.</a>',
'Events' =>
'Ereignisse',
'Event updated at %1$s. <a href="%2$s" accesskey="c">Create another?</a> <a href="%3$s" accesskey="a">View all Events</a>' =>
'Dieses Ereignis wurde um %1$s aktualisiert. <a href="%2$s" accesskey="c">Ein neues erstellen?</a> <a href="%2$s" accesskey="a">Alle Ereignisse anzeigen.</a>',
'Example Front-end Form Markup' =>
'Beispiel-Frontend-Formular',
'Existing Values' =>
'Existierende Werte',
'Extensions' =>
'Erweiterungen',
'Failed to delete <code>%s</code>. Please check permissions.' =>
'<code>%s</code> konnte nicht gelöscht werden. Bitte überprüfen Sie die Zugriffsrechte.',
'Failed to write Data source to <code>%s</code>. Please check permissions.' =>
'Datenquelle konnte nicht unter <code>%s</code> gespeichert werden. Bitte überprüfen Sie die Zugriffsrechte.',
'Failed to write Event to <code>%s</code>. Please check permissions.' =>
'<code>%s</code> konnte nicht gespeichert werden. Bitte überprüfen Sie die Zugriffsrechte.',
'Fields' =>
'Felder',
'File chosen in "%1$s" exceeds the maximum allowed upload size of %2$s, specified by Symphony.' =>
'Die gewählte Datei überschreitet die für Symphony festgelegte maximale Uploadgröße von %2$s.',
'File chosen in "%1$s" exceeds the maximum allowed upload size of %2$s specified by your host.' =>
'Die gewählte Datei überschreitet die von Ihrem Host festgelegte maximale Uploadgröße von %2$s.',
'File chosen in \'%s\' does not match allowable file types for that field.' =>
'Die in \'%s\' ausgewählte Datei entspricht keinem erlaubten Dateityp für dieses Feld.',
'File chosen in \'%s\' was only partially uploaded due to an error.' =>
'Die in \'%s\' ausgewählte Datei wurde aufgrund eines Fehlers nur teilweise hochgeladen.',
'Files' =>
'Dateien',
'File Upload' =>
'Dateiupload',
'Filter %s by' =>
'%s filtern mit',
'Filter Authors by' =>
'Autoren filtern mit',
'Filter Navigation by' =>
'Navigation filtern mit',
'Filter Results' =>
'Ergebnisfilter',
'Filter Rules' =>
'Filterregeln',
'First' =>
'Erste',
'First Name' =>
'Vorname',
'First name is required' =>
'Der Vorname ist eine Pflichtangabe',
'Forbidden' =>
'Verboten',
'Formatting' =>
'Formatierung',
'General' =>
'Allgemein',
'Group By' =>
'Gruppieren nach',
'Hairy' =>
'Haarig',
'Handle' =>
'Bezeichner',
'Hi %s,' =>
'Hi %s,',
'Host' =>
'Host',
'Hot' =>
'Heiß',
'ID' =>
'ID',
'Included Elements' =>
'Eingebundene Elemente',
'Installation Failure' =>
'Installation fehlgeschlagen',
'Install Symphony' =>
'Symphony installieren',
'Invalid element name. Must be valid QName.' =>
'Ungültiger Elementname. Muss ein gültiger QName sein.',
'Invalid Entry ID specified. Could not create Entry object.' =>
'Ungültige Eintrags-ID angegeben. Eintragsobjekt konnte nicht erstellt werden.',
'It looks like your trying to create an entry. Perhaps you want fields first? <a href="%s">Click here to create some.</a>' =>
'Anscheinend versuchen Sie einen neuen Eintrag zu erstellen. Vielleicht möchten Sie vorher Felder anlegen. <a href="%s">Klicken Sie hier um Felder anzulegen.</a>',
'It will expire in 2 hours. If you did not ask for a new password, please disregard this email.' =>
'Es wird in zwei Stunden ablaufen. Falls Sie kein neues Passwort angefordert haben, ignorieren Sie bitte diese Nachricht.',
'just now' =>
'gerade eben',
'Label' =>
'Bezeichnung',
'Large' =>
'Groß',
'Last' =>
'Letzte',
'Last Name' =>
'Nachname',
'Last name is required' =>
'Der Nachname ist eine Pflichtangabe',
'Last Seen' =>
'Letzter Besuch',
'Leave password fields blank to keep the current password' =>
'Lassen Sie das Passwortfeld leer, um das derzeitige Passwort zu behalten',
'Leave these fields unless you are sure they need to be changed.' =>
'Belassen Sie diese Felder wie sie sind, es sei denn, Sie sind sich sicher, dass sie geändert werden müssen.',
'Line %s' =>
'Zeile %s',
'list of comma author usernames.' =>
'Kommagetrennte Liste der Autoren-Benutzernamen.',
'Login' =>
'Anmeldung',
'Login Details' =>
'Anmeldedaten',
'Logout' =>
'Abmelden',
'Long Description <i>Optional</i>' =>
'Lange Beschreibung <i>optional</i>',
'Lumpy' =>
'Pummelig',
'Main content' =>
'Hauptbereich',
'Make sure that you delete <code>' =>
'Stellen Sie sich, dass Sie <code>',
'Make textarea %s rows tall' =>
'Stelle Textfeld %s Zeilen hoch dar.',
'Men' =>
'Männer',
'Message' =>
'Nachricht',
'Missing Requirements' =>
'Fehlende Voraussetzungen',
'Monkeys' =>
'Affen',
'Must be a valid number' =>
'Muss eine gültige Zahl sein',
'Must be a valid number or parameter' =>
'Muss eine gültige Zahl oder ein gültiger Parameter sein',
'Must be greater than zero' =>
'Muss größer als Null sein',
'My<abbr title="Structured Query Language">SQL</abbr> 4.1 or above' =>
'My<abbr title="Structured Query Language">SQL</abbr> 4.1 oder höher',
'MySQL Error (%1$s): %2$s in query "%3$s"' =>
'MySQL-Fehler (%1$s): %2$s in query "%3$s"',
'Name is a required field.' =>
'Name ist ein Pflichtfeld.',
'Namespace' =>
'Namensraum',
'Namespace Declarations <i>Optional</i>' =>
'Namensraumdeklarationen <i>optional</i>',
'navigation' =>
'navigation',
'Navigation' =>
'Navigation',
'Navigation Group' =>
'Navigationsgruppe',
'New Password' =>
'Neues Passwort',
'New Symphony Account Password' =>
'Neues Passwort für Ihren Symphony-Zugang',
'Next →' =>
'Nächste →',
'No' =>
'Nein',
'None' =>
'Keine Angaben',
'None found.' =>
'Keine Einträge.',
'No records found.' =>
'Keine Einträge gefunden.',
'No suitable engine object found' =>
'Es konnte kein ausreichendes Engine-Objekt gefunden werden.',
'No suitable XSLT processor was found.' =>
'Es konnte kein ausreichender XSLT-Prozessor gefunden werden.',
'No valid recipients found. Check send-email[recipient] field.' =>
'Es konnten keine Empfänger gefunden werden. Überprüfen Sie das Feld send-email[recipient].',
'Old Password' =>
'Altes Passwort',
'Once installed, you will be able to login to the Symphony admin with these user details.' =>
'Sobald die Installation abgeschlossen ist, können Sie mit diesen Zugangsdaten auf Symphony zugreifen.',
'One or more pages could not be deleted. Please check permissions on <code>/workspace/pages</code>.' =>
'Eine oder mehrere Seiten konnten nicht gelöscht werden. Bitte überprüfen Sie die Rechte für <code>/workspace/pages</code>.',
'Optional' =>
'optional',
'Output Options' =>
'Ausgabeoptionen',
'Outstanding Requirements' =>
'Fehlende Anforderungen',
'Page %1$s of %2$s' =>
'Seite %1$s von %2$s',
'Page could not be deleted because it does not exist.' =>
'Die Seite konnten nicht gelöscht werden, weil sie nicht existiert.',
'Page could not be deleted because it has children.' =>
'Die Seite konnte nicht gelöscht werden, weil sie Unterseiten hat.',
'Page could not be written to disk. Please check permissions on <code>/workspace/pages</code>.' =>
'Die Seite konnte nicht auf der Festplatte gespeichert werden. Bitte überprüfen Sie die Zugriffsrechte für <code>/workspace/pages</code>.',
'Page ID' =>
'Seiten-ID',
'Page Not Found' =>
'Seite konnte nicht gefunden werden',
'Page not found' =>
'Seite nicht gefunden',
'Page Resources' =>
'Seitenbasis',
'Pages' =>
'Seiten',
'Page Settings' =>
'Seiteneinstellungen',
'Page Type' =>
'Seitentyp',
'Parameter Output' =>
'Ausgabeparameter',
'Parent Page' =>
'Übergeordnete Seite',
'Password' =>
'Passwort',
'Password is required' =>
'Das Passwort ist eine Pflichtangabe',
'Passwords did not match' =>
'Passworteingabe stimmte nicht überein',
'Permission Settings' =>
'Zugriffseinstellungen',
'Personal Information' =>
'Persönliche Informationen',
'Pigs' =>
'Schweine',
'Pirates' =>
'Piraten',
'Placement' =>
'Platzierung',
'Please <a href="%s">login</a> to view this page.' =>
'Bitte <a href="%s">melden Sie sich an</a>, um diese Seite zu sehen.',
'Please add the following personal details for this user.' =>
'Bitte ergänzen Sie die nachfolgenden persönlichen Informationen des Nutzers.',
'Please provide Symphony with access to a database.' =>
'Bitte räumen Sie Symphony einen Datenbankzugang ein.',
'Port' =>
'Port',
'Preferences' =>
'Einstellungen',
'Preferences saved.' =>
'Die Einstellungen wurden gespeichert.',
'random' =>
'zufällig',
'Recipient username was invalid' =>
'Empfängername war ungültig',
'Region' =>
'Region',
'Remove File' =>
'Datei entfernen',
'Remove selected items' =>
'Auswahl entfernen',
'Reordering was unsuccessful.' =>
'Das Neusortieren ist fehlgeschlagen.',
'Required URL Parameter <i>Optional</i>' =>
'Verpflichtende URL-Parameter <i>optional</i>',
'Root Path' =>
'Wurzelpfad',
'Round' =>
'Rund',
'Save Changes' =>
'Änderungen speichern',
'Section created at %1$s. <a href="%2$s" accesskey="c">Create another?</a> <a href="%3$s" accesskey="a">View all Sections</a>' =>
'Der Bereich wurde um %1$s erstellt. <a href="%2$s" accesskey="c">Einen neuen erstellen?</a> <a href="%3$s" accesskey="a">Alle Bereiche anzeigen.</a>',
'Section is invalid' =>
'Der Bereich ist ungültig',
'sections' =>
'bereiche',
'Sections' =>
'Bereiche',
'Section updated at %1$s. <a href="%2$s" accesskey="c">Create another?</a> <a href="%3$s" accesskey="a">View all Sections</a>' =>
'Der Bereich wurde um %1$s aktualisiert. <a href="%2$s" accesskey="c">Einen neuen erstellen?</a> <a href="%3$s" accesskey="a">Alle Bereiche anzeigen.</a>',
'Select Box' =>
'Auswahlfeld',
'Send Email' =>
'E-Mail verschicken',
'Send Email Filter' =>
'E-Mail-Versandfilter',
'Set %s' =>
'Setze %s',
'Show a maximum of %s results' =>
'Zeige maximal %s Ergebnisse',
'Show page %s of results' =>
'Zeige Seite %s der Ergebnisse',
'Sidebar' =>
'Seitenleiste',
'Small' =>
'Klein',
'Some errors were encountered while attempting to save.' =>
'Beim Versuch zu speichern sind Fehler aufgetreten.',
'Sort By' =>
'Sortieren nach',
'Sort by %1$s %2$s' =>
'Sortiere nach %1$s %2$s',
'Sorting and Limiting' =>
'Sortierung und Begrenzung',
'Sort Order' =>
'Sortierreihenfolge',
'Source' =>
'Quelle',
'Static Options' =>
'Statische Optionen',
'Static XML' =>
'Statisches XML',
'static_xml' =>
'statisches_xml',
'Status' =>
'Status',
'Submit' =>
'Abschicken',
'Success and Failure XML Examples' =>
'Erfolgs- und Fehlerbeispiele',
'Suggestion List' =>
'Vorschlagsliste',
'Symphony' =>
'Symphony',
'Symphony Concierge' =>
'Symphony-Concierge',
'Symphony Database Error' =>
'Symphony-Datenbankfehler',
'Symphony does not have write permission to the <code>/manifest</code> directory. Please modify permission settings on this directory and its contents to allow this, such as with a recursive <code>chmod -R</code> command.' =>
'Symphony hat keine Schreibrechte für das Verzeichnis <code>/manifest</code>. Bitte passen Sie die Zugriffsrechte dieses Verzeichnisses und seiner Inhalte an, zum Beispiel mit einen rekursiven <code>chmod -R</code> Kommando.',
'Symphony does not have write permission to the <code>/symphony</code> directory. Please modify permission settings on this directory. This is necessary only during installation, and can be reverted once installation is complete.' =>
'Symphony hat keine Schreibrechte für das Verzeichnis <code>/symphony</code>. Bitte passen Sie die Zugriffsrechte dieses Verzeichnisses an. Diese Änderung ist nur während der Installation nötig und kann danach rückgängig gemacht werden.',
'Symphony does not have write permission to the existing <code>/workspace</code> directory. Please modify permission settings on this directory and its contents to allow this, such as with a recursive <code>chmod -R</code> command.' =>
'Symphony hat keine Schreibrechte für das Verzeichnis <code>/workspace</code>. Bitte passen Sie die Zugriffsrechte dieses Verzeichnisses und seiner Inhalte an, zum Beispiel mit einen rekursiven <code>chmod -R</code> Kommando.',
'Symphony does not have write permission to the root directory. Please modify permission settings on this directory. This is necessary only if you are not including a workspace, and can be reverted once installation is complete.' =>
'Symphony hat keine Schreibreiche für das Wurzelverzeichnis. Bitte passen Sie die Zugriffsrechte dieses Verzeichnisses an. Diese Änderung ist nur nötig, wenn Sie keinen Workspace einbinden und kann nach der Installation rückgängig gemacht werden.',
'Symphony does not have write permission to the temporary <code>htaccess</code> file. Please modify permission settings on this file so it can be written to, and renamed.' =>
'Symphony hat keine Schreibrechte für die temporäre <code>.htaccess</code>-Datei. Bitte passen Sie die Zugriffsrechte dieser Datei so an, dass sie umbenannt und beschrieben werden kann.',
'Symphony is ready to be installed at the following location.' =>
'Symphony ist bereit für die Installation an nachfolgendem Ort.',
'Symphony needs an XSLT processor such as Lib<abbr title="eXtensible Stylesheet Language Transformation">XSLT</abbr> or Sablotron to build pages.' =>
'Symphony benötigt einen XSLT-Prozessor wie Lib<abbr title="eXtensible Stylesheet Language Transformation">XSLT</abbr> oder Sablotron um Seiten erzeugen zu können.',
'Symphony needs a recent version of <abbr title="PHP: Hypertext Pre-processor">PHP</abbr>.' =>
'Symphony benötigt eine aktuelle <abbr title="PHP: Hypertext Pre-processor">PHP</abbr>-Version.',
'Symphony needs a recent version of My<abbr title="Structured Query Language">SQL</abbr>.' =>
'Symphony benötigt eine aktuelle My<abbr title="Structured Query Language">SQL</abbr>-Version.',
'Symphony needs permission to read and write both files and directories.' =>
'Symphony benötigt Lese- und Schreibrechte für Dateien und Verzeichnisse.',
'Symphony needs the following requirements satisfied before installation can proceed.' =>
'Symphony benötigt folgende Voraussetzungen bevor die Installation fortgesetzt werden kann.',
'Symphony normally specifies UTF-8 character encoding for database entries. With compatibility mode enabled, Symphony will instead use the default character encoding of your database.' =>
'Symphony verwendet normalerweise UTF-8-Zeichenkodierung für Datenbankeinträge. Im Kompatibilitätsmodus verwendet Symphony anstelle die Standardzeichenkodierung ihrer Datenbank.',
'Symphony requires <code>MySQL 4.1</code> or greater to work, however version <code>%s</code> was detected. This requirement must be met before installation can proceed.' =>
'Symphony benötigt <code>MySQL 4.1</code> oder neuer, allerdings wurde Version <code>%s</code> erkannt. Ohne die benötigte Version kann die Installation nicht fortgesetzt werden.',
'Symphony requires <code>MySQL 4.1</code> or greater to work. This requirement must be met before installation can proceed.' =>
'Symphony benötigt <code>MySQL 4.1</code> oder neuer. Ohne die benötigte Version kann die Installation nicht fortgesetzt werden.',
'Symphony was unable to connect to the specified database. You may need to modify host or port settings.' =>
'Symphony war nicht in der Lage eine Verbindung zur angegebenen Datenbank aufzubauen. Möglicherweise müssen Sie Ihre Host- oder Port-Einstellungen anpassen.',
'System' =>
'System',
'System Author' =>
'Systemautor',
'System Date' =>
'Systemdatum',
'System ID' =>
'System-ID',
'Table Prefix' =>
'Tabellenprefix',
'Tag List' =>
'Tag-Liste',
'Template' =>
'Vorlage',
'Textarea' =>
'Textfeld',
'Text Input' =>
'Eingabefeld',
'The date specified in \'%s\' is invalid.' =>
'Das angegebene Datum \'%s\' ist ungültig.',
'The entry you are looking for could not be found.' =>
'Der von Ihnen gesuchte Eintrag konnte nicht gefunden werden.',
'The following is an example of what is returned if any filters fail:' =>
'Nachfolgendes Beispiel zeigt das Ergebnis, wenn ein Filter einen Fehler ausgibt:',
'The page you requested does not exist.' =>
'Die aufgerufene Seite existiert nicht.',
'The page you requested to edit does not exist.' =>
'Die Seite, die Sie bearbeiten möchten, existiert nicht.',
'The password and confirmation did not match. Please retype your password.' =>
'Das Passwort und dessen Wiederholung stimmten nicht überein. Bitte geben Sie Ihr Passwort erneut ein.',
'There appears to be an existing <code>.htaccess</code> file in the <code>/symphony</code> directory.' =>
'Es scheint bereits eine <code>.htaccess</code>-Datei innerhalb des Verzeichnisses <code>/symphony</code> zu existieren.',
'There appears to be an existing <code>.htaccess</code> file in the Symphony install location. To avoid name clashes, you will need to delete or rename this file.' =>
'Es scheint bereits eine <code>.htaccess</code>-Datei innerhalb Ihrer Symphony-Installation zu existieren. Um Überschneidungen zu vermeiden, müssen Sie diese löschen oder umbenennen.',
'There is already a field of type <code>%s</code>. There can only be one per section.' =>
'Es existiert bereits ein Feld des Typs <code>%s</code>. Es ist für jeden Bereich nur eines zulässig.',
'There was an error while trying to upload the file <code>%1$s</code> to the target directory <code>%2$s</code>.' =>
'Beim Hochladen der Datei <code>%1$s</code> in den Zielordner <code>%2$s</code> ist ein Fehler aufgetreten.',
'There was a problem locating your account. Please check that you are using the correct email address.' =>
'Es gab Schwierigkeiten Ihren Benutzerzugang zuzuordnen. Überprüfen Sie bitte, ob Sie die richtige E-Mail-Adresse angegeben haben.',
'There were some problems while attempting to save. Please check below for problem fields.' =>
'Beim Speichern sind einige Fehler aufgetreten. Bitte überprüfen Sie die betroffenen Felder.',
'The section associated with the data source <code>%s</code> could not be found.' =>
'Der mit der Datenquelle <code>%s</code> verbundene Bereich konnte nicht gefunden werden.',
'The Section you are looking, <code>%s</code> for could not be found.' =>
'Der von Ihnen gesuchte Bereich <code>%s</code> konnte nicht gefunden werden.',
'The Section you are looking for could not be found.' =>
'Der von Ihnen gesuchte Bereich konnte nicht gefunden werden.',
'The send email filter, upon the event successfully saving the entry, takes input from the form and send an email to the desired recipient. <b>This filter currently does not work with the "Allow Multiple" option.</b> The following are the recognised fields:' =>
'Der Filter zum Versenden von E-Mail schickt, sofern das Ereignis erfolgreich gespeichert werden konnte, alle Eingaben des Formulars an den gewünschten Empfänger. <b>Dieser Filter funktioniert derzeit nicht, wenn Sie die Erstellung mehrerer Einträge zulassen.</b> Folgende Felder werden vom Filter erkannt:',
'The supplied password was rejected. <a href="%s">Retrieve password?</a>' =>
'Das übermittelte Passwort wurde nicht akzeptiert. <a href="%s">Ein neues anfordern?</a>',
'The supplied password was rejected. Make sure it is not empty and that password matches password confirmation.' =>
'Das verwendete Passwort wurde nicht akzeptiert. Stellen Sie sicher, dass das Passwort nicht leer ist und dass es der Passwort-Benachrichtung entspricht.',
'The Symphony configuration file, <code>/manifest/config.php</code>, is not writable. You will not be able to save changes to preferences.' =>
'Die Symphony-Konfigurationsdatei <code>/manifest/config.php</code> ist nicht lesbar. Die Änderungen der Voreinstellungen werden nicht gespeichert.',
'The Symphony Team' =>
'Ihr Symphony-Team',
'The table prefix <code><!-- TABLE-PREFIX --></code> is already in use. Please choose a different prefix to use with Symphony.' =>
'Der Tabellenprefix <code><!-- TABLE-PREFIX --></code>ist bereits in Benutzung. Bitte wählen Sie einen anderen Prefix, der in Verbindung mit Symphony verwendet werden soll.',
'This document is not well formed. The following error was returned: <code>%s</code>' =>
'Dieses Dokument ist nicht wohlgeformt. Folgender Fehler wurde zurückgegeben: <code>%s</code>',
'This event will not be processed if any of these rules return true.' =>
'Dieses Ereignis wird nicht ausgeführt werden, wenn eine dieser Regel wahr zurückgibt.',
'This is a courtesy email to notify you that an entry was created on the %1$s section. You can edit the entry by going to: %2$s' =>
'Diese E-Mail möchte Sie darüber informieren, dass ein Eintrag im Bereich %1$s erstellt wurde. Sie können diesen bearbeiten, indem folgende Seite aufrufen: %2$s',
'This is an example of the form markup you can use on your frontend:' =>
'Dies ist ein Beispiel, dass Sie für Ihr Frontend-Formular nutzen können:',
'This is a required field' =>
'Dieses Feld ist verpflichtend.',
'This is a required field.' =>
'Dies ist ein Pflichtfeld.',
'This is not a valid email address. You must provide an email address since you will need it if you forget your password.' =>
'Dies keine gültige E-Mail-Adresse. Sie müssen eine E-Mail-Adresse angeben, da Sie diese benötigen, falls Sie Ihr Passwort vergessen sollten.',
'This page could not be rendered due to the following XSLT processing errors.' =>
'Diese Seite konnte aufgrund nachfolgender XSLT-Verarbeitungfehler nicht dargestellt werden.',
'Time Format' =>
'Zeitformat',
'Title' =>
'Titel',
'Title is a required field' =>
'Titel ist ein Pflichtfeld',
'To edit an existing entry, include the entry ID value of the entry in the form. This is best as a hidden field like so:' =>
'Um einen existierenden Eintrag zu bearbeiten, müssen Sie die Eintrags-ID im Formular einbinden. Dies geht am besten mit einem versteckten Feld:',
'To redirect to a different location upon a successful save, include the redirect location in the form. This is best as a hidden field like so, where the value is the URL to redirect to:' =>
'Um nach erfolgreichem Speichern zu einer anderen Adresse weiterzuleiten, müssen Sie das Umleitungsziel im Formular einbinden. Dies geht am besten mit einem versteckten Feld, wobei der Wert der Ziel-URL entspricht:',
'Two custom fields have the same element name. All element names must be unique.' =>
'Zwei Felder haben den selben Elementnamen. Alle Elementnamen müssen eindeutig sein.',
'Type' =>
'Typ',
'Unable to remove file - %s' =>
'Datei konnte nicht entfernt werden – %s',
'Uninstall' =>
'Deinstallieren',
'Unknown Entry' =>
'Unbekannter Eintrag',
'Unknown errors occurred while attempting to save. Please check your <a href="%s">activity log</a>.' =>
'Ein unbekannter Fehler ist beim Speichern aufgetreten. Bitte überprüfen Sie Ihr <a href="%s">Systemlogbuch</a>.',
'Unknown errors where encountered when saving.' =>
'Beim Speichern sind unbekannte Fehler aufgetreten.',
'Unknown Section' =>
'Unbekannter Bereich',
'Untitled' =>
'Ohne Titel',
'Update Symphony' =>
'Symphony Aktualisierung',
'Uploading \'%s\' failed. Could not write temporary file to disk.' =>
'Das Hochladen von \'%s\' ist fehlgeschlagen. Die temporäre Datei konnte nicht gespeichert werden.',
'Uploading \'%s\' failed. File upload stopped by extension.' =>
'Das Hochladen von \'%s\' ist fehlgeschlagen. Der Vorgang wurde von einer Erweiterung unterbrochen.',
'URI' =>
'URI',
'URL' =>
'URL',
'URL Handle' =>
'URL-Bezeichner',
'URL Parameters' =>
'URL-Parameter',
'Use <code>{$param}</code> syntax to filter by page parameters.' =>
'Verwenden Sie <code>{$param}</code>, um mit Seitenparametern zu filtern.',
'Use <code>{$param}</code> syntax to limit by page parameters.' =>
'Verwenden Sie <code>{$param}</code>, um mit Seitenparametern zu begrenzen.',
'Use <code>{$param}</code> syntax to specify dynamic portions of the URL.' =>
'Verwenden Sie <code>{$param}</code>, um dynamische Teile der URL festzulegen.',
'Use an XPath expression to select which elements from the source XML to include.' =>
'Benutzen Sie einen X-Path-Ausdruck, um die einzubinden Elemente der XML-Quelle auszuwählen.',
'Use compatibility mode' =>
'Verwende Kompatibilitätsmodus',
'Use Field' =>
'Zu verwendendes Feld',
'User Information' =>
'Benutzerinformationen',
'Username' =>
'Benutzername',
'Username is already taken' =>
'Dieser Benutzername ist bereits vergeben',
'Username is required' =>
'Der Benutzername ist eine Pflichtangabe',
'User type' =>
'Benutzergruppe',
'Utilities' =>
'Bausteine',
'Utility' =>
'Baustein',
'Utility could not be written to disk. Please check permissions on <code>/workspace/utilities</code>.' =>
'Baustein konnte nicht auf der Festplatte gespeichert werden. Bitte überprüfen Sie die Zugriffsrechte für <code>/workspace/utilities</code>.',
'Utility created at %1$s. <a href="%2$s" accesskey="c">Create another?</a> <a href="%3$s" accesskey="a">View all Utilities</a>' =>
'Der Baustein wurde um %1$s erstellt. <a href="%2$s" accesskey="c">Einen neuen erstellen?</a> <a href="%3$s" accesskey="a">Alle Bausteine anzeigen.</a>',
'Utility updated at %1$s. <a href="%2$s" accesskey="c">Create another?</a> <a href="%3$s" accesskey="a">View all Utilities</a>' =>
'Der Baustein wurde um %1$s aktualisiert. <a href="%2$s" accesskey="c">Einen neuen erstellen?</a> <a href="%3$s" accesskey="a">Alle Bausteine anzeigen.</a>',
'Validation Rule <i>Optional</i>' =>
'Validierungsregel <i>optional</i>',
'Value' =>
'Wert',
'Version' =>
'Version',
'Version %s' =>
'Version %s',
'Viewing %1$s - %2$s of %3$s entries' =>
'Zeige %1$s - %2$s von %3$s Einträgen',
'Weasels' =>
'Wiesel',
'Website Name' =>
'Webseitenname',
'Website Preferences' =>
'Webseiteneinstellungen',
'When an error occurs during saving, due to either missing or invalid fields, the following XML will be returned' =>
'Wenn beim Speichern ein Fehler auftritt, weil Felder fehlen oder ungültig sind, wird nachfolgendes XML ausgeben',
'When saved successfully, the following XML will be returned:' =>
'Nach erfolgreicher Speicherung, wird nachfolgendes XML ausgegeben:',
'With Selected...' =>
'Auswahl …',
'Women' =>
'Frauen',
'Worms' =>
'Würmer',
'Wrong password. Enter old one to change email address.' =>
'Falsches Passwort. Geben Sie Ihr altes Passwort zum Ändern der E-Mail-Adresse ein.',
'Wrong password. Enter old password to change it.' =>
'Falsches Passwort. Geben Sie Ihr altes Passwort zum Ändern ein.',
'XML' =>
'XML',
'XML is invalid' =>
'XML ist ungültig',
'XML Output' =>
'XML-Ausgabe',
'XML returned is invalid.' =>
'Zurückgegebenes XML ist ungültig',
'XSLT Processing Error' =>
'XSLT-Verarbeitungsfehler',
'Yes' =>
'Ja',
'You are already using the most recent version of Symphony. There is no need to run the installer, and can be safely deleted.' =>
'Sie verwenden bereits die aktuellste Version von Symphony. Es ist nicht nötig das Installationsprogramm laufen zu lassen, es kann sicher entfernt werden.',
'You are not authorised to access this page.' =>
'Sie sind nicht berechtigt diese Seite zu besuchen.',
'You are not authorised to access this section.' =>
'Sie sind nicht autorisiert auf diesen Bereich zuzugreifen',
'You are not using the most recent version of Symphony. This update is only compatible with Symphony 2.' =>
'Sie verwenden nicht die aktuellste Version von Symphony. Diese Aktualisierung ist nur mit Symphony 2 kompatibel.',
'You must enter a Password. This will be your Symphony login information.' =>
'Sie müssen ein Passwort anlegen, welches für Sie bei der Symphony-Anmeldung verwendet werden soll.',
'You must enter a Username. This will be your Symphony login information.' =>
'Sie müssen einen Benutzernamen anlegen, welcher für Sie bei der Symphony-Anmeldung verwendet werden soll.',
'You must enter your name.' =>
'Sie müssen Ihren Namen angeben.',
'ZLib Compression Library' =>
'ZLib-Compression-Library',
'[Symphony] A new entry was created on %s' =>
'[Symphony] Ein neuer Eintrag auf %s wurde erstellt',
'{$minutes} minutes ago' =>
'vor {$minutes} Minuten',
'%s Include a count of entries in associated sections' =>
'%s Zeige Gesamtzahl der Einträge in verknüpften Bereichen',
'%s Pre-populate this field with today\'s date' =>
'%s Dieses Feld mit dem heutigen Datum vorbelegen.',
'Authors can set up a differing language in their profiles.' =>
'Autoren können in Ihren Profilen eine abweichende Sprache einstellen.',
'Children' =>
'Kinder',
'Choose only one. Created if does not exist' =>
'Wählen Sie nur ein Gruppe. Diese wird bei Bedarf erstellt.',
'Create Section' =>
'Bereich erstellen',
'Custom Preferences' =>
'Benutzerdefinierte Einstellungen',
'Data source created at %1$s. <a href="%2$s" accesskey="c">Create another?</a> <a href="%3$s" accesskey="a">View all Data sources</a>' =>
'Diese Datenquelle wurde um %1$s erstellt. <a href="%2$s" accesskey="c">Eine neue erstellen?</a> <a href="%3$s" accesskey="a">Alle Datenquellen anzeigen.</a>',
'Data source updated at %1$s. <a href="%2$s" accesskey="c">Create another?</a> <a href="%3$s" accesskey="a">View all Data sources</a>' =>
'Diese Datenquelle wurde um %1$s aktualisiert. <a href="%2$s" accesskey="c">Eine neue erstellen?</a> <a href="%3$s" accesskey="a">Alle Datenquellen anzeigen.</a>',
'Daylight savings time' =>
'Sommerzeit',
'Directory <code>%s</code> does not exist.' =>
'Der Ordner <code>%s</code> existiert nicht.',
'Edit Configuration' =>
'Einstellungen bearbeiten',
'Edit Page Confguration' =>
'Seiteneinstellungen bearbeiten',
'Edit Page Template' =>
'Seitenvorlage bearbeiten',
'Edit Template' =>
'Vorlage bearbeiten',
'It looks like you\'re trying to create an entry. Perhaps you want fields first? <a href="%s">Click here to create some.</a>' =>
'Anscheinend versuchen Sie einen Eintrag zu erstellen. Vielleicht möchten Sie zunächst Felder erstellen. <a href="%s">Klicken Sie hier, um welche zu erstellen.</a>',
'Language' =>
'Sprache',
'Local' =>
'Lokal',
'Name' =>
'Name',
'No <code>/symphony</code> directory was found at this location. Please upload the contents of Symphony\'s install package here.' =>
'An diesem Ort konnte kein Verzeichnis <code>/symphony</code> gefunden werden. Bitte laden Sie den Inhalt des Symphony-Installationspakets hierher hoch.',
'Page created at %1$s. <a href="%2$s" accesskey="c">Create another?</a> <a href="%3$s" accesskey="a">View all Pages</a>' =>
'Diese Seiten wurde um %1$s erstellt. <a href="%2$s" accesskey="c">Eine neue erstellen?</a> <a href="%3$s" accesskey="a">Alle Seiten anzeigen</a>',
'Page updated at %1$s. <a href="%2$s" accesskey="c">Create another?</a> <a href="%3$s" accesskey="a">View all Pages</a>' =>
'Diese Seite wurde um %1$s aktualisiert. <a href="%2$s" accesskey="c">Eine neue erstellen?</a> <a href="%3$s" accesskey="a">Alle Seiten anzeigen</a>',
'Page updated at %s. <a href="%s" accesskey="a">View all Pages</a>' =>
'Diese Seite wurde um %1$s aktualisiert. <a href="%3$s" accesskey="a">Alle Seiten anzeigen</a>',
'System Default' =>
'Systemstandard',
'System Language' =>
'Systemsprache',
'The destination directory, <code>%s</code>, does not exist.' =>
'Der Zielordner <code>%s</code> existiert nicht.',
'The parameter <code id="output-param-name">$ds-%s</code> will be created with this field\'s value for XSLT or other data sources to use.' =>
'Das Parameter <code id="output-param-name">$ds-%s</code> wird auf Grundlage des Wertes dieses Feldes erstellt und kann im XSLT oder in anderen Datenquellen verwendet werden.',
'The Section you are looking for, <code>%s</code>, could not be found.' =>
'Der von Ihnen gesuchte Berich <code>%s</code> konnte nicht gefunden werden.',
'User Type' =>
'Benutzertyp',
'Text Formatter' =>
'Textformatierung',
'Example XML' =>
'Beispiel-XML',
'released on' =>
'veröffentlicht am',
);
/**
* Transliterations
*/
$transliterations = array(
// Alphabetical
'/À/' => 'A', '/Á/' => 'A', '/Â/' => 'A', '/Ã/' => 'A', '/Ä/' => 'Ae',
'/Å/' => 'A', '/Ā/' => 'A', '/Ą/' => 'A', '/Ă/' => 'A', '/Æ/' => 'Ae',
'/Ç/' => 'C', '/Ć/' => 'C', '/Č/' => 'C', '/Ĉ/' => 'C', '/Ċ/' => 'C',
'/Ď/' => 'D', '/Đ/' => 'D', '/Ð/' => 'D', '/È/' => 'E', '/É/' => 'E',
'/Ê/' => 'E', '/Ë/' => 'E', '/Ē/' => 'E', '/Ę/' => 'E', '/Ě/' => 'E',
'/Ĕ/' => 'E', '/Ė/' => 'E', '/Ĝ/' => 'G', '/Ğ/' => 'G', '/Ġ/' => 'G',
'/Ģ/' => 'G', '/Ĥ/' => 'H', '/Ħ/' => 'H', '/Ì/' => 'I', '/Í/' => 'I',
'/Î/' => 'I', '/Ï/' => 'I', '/Ī/' => 'I', '/Ĩ/' => 'I', '/Ĭ/' => 'I',
'/Į/' => 'I', '/İ/' => 'I', '/IJ/' => 'Ij', '/Ĵ/' => 'J', '/Ķ/' => 'K',
'/Ł/' => 'L', '/Ľ/' => 'L', '/Ĺ/' => 'L', '/Ļ/' => 'L', '/Ŀ/' => 'L',
'/Ñ/' => 'N', '/Ń/' => 'N', '/Ň/' => 'N', '/Ņ/' => 'N', '/Ŋ/' => 'N',
'/Ò/' => 'O', '/Ó/' => 'O', '/Ô/' => 'O', '/Õ/' => 'O', '/Ö/' => 'Oe',
'/Ø/' => 'O', '/Ō/' => 'O', '/Ő/' => 'O', '/Ŏ/' => 'O', '/Œ/' => 'Oe',
'/Ŕ/' => 'R', '/Ř/' => 'R', '/Ŗ/' => 'R', '/Ś/' => 'S', '/Š/' => 'S',
'/Ş/' => 'S', '/Ŝ/' => 'S', '/Ș/' => 'S', '/Ť/' => 'T', '/Ţ/' => 'T',
'/Ŧ/' => 'T', '/Ț/' => 'T', '/Ù/' => 'U', '/Ú/' => 'U', '/Û/' => 'U',
'/Ü/' => 'Ue', '/Ū/' => 'U', '/Ů/' => 'U', '/Ű/' => 'U', '/Ŭ/' => 'U',
'/Ũ/' => 'U', '/Ų/' => 'U', '/Ŵ/' => 'W', '/Ý/' => 'Y', '/Ŷ/' => 'Y',
'/Ÿ/' => 'Y', '/Y/' => 'Y', '/Ź/' => 'Z', '/Ž/' => 'Z', '/Ż/' => 'Z',
'/Þ/' => 'T',
'/à/' => 'a', '/á/' => 'a', '/â/' => 'a', '/ã/' => 'a', '/ä/' => 'ae',
'/å/' => 'a', '/ā/' => 'a', '/ą/' => 'a', '/ă/' => 'a', '/æ/' => 'ae',
'/ç/' => 'c', '/ć/' => 'c', '/č/' => 'c', '/ĉ/' => 'c', '/ċ/' => 'c',
'/ď/' => 'd', '/đ/' => 'd', '/ð/' => 'd', '/è/' => 'e', '/é/' => 'e',
'/ê/' => 'e', '/ë/' => 'e', '/ē/' => 'e', '/ę/' => 'e', '/ě/' => 'e',
'/ĕ/' => 'e', '/ė/' => 'e', '/ĝ/' => 'g', '/ğ/' => 'g', '/ġ/' => 'g',
'/ģ/' => 'g', '/ĥ/' => 'h', '/ħ/' => 'h', '/ì/' => 'i', '/í/' => 'i',
'/î/' => 'i', '/ï/' => 'i', '/ī/' => 'i', '/ĩ/' => 'i', '/ĭ/' => 'i',
'/į/' => 'i', '/ı/' => 'i', '/ij/' => 'ij', '/ĵ/' => 'j', '/ķ/' => 'k',
'/ł/' => 'l', '/ľ/' => 'l', '/ĺ/' => 'l', '/ļ/' => 'l', '/ŀ/' => 'l',
'/ñ/' => 'n', '/ń/' => 'n', '/ň/' => 'n', '/ņ/' => 'n', '/ŋ/' => 'n',
'/ò/' => 'o', '/ó/' => 'o', '/ô/' => 'o', '/õ/' => 'o', '/ö/' => 'oe',
'/ø/' => 'o', '/ō/' => 'o', '/ő/' => 'o', '/ŏ/' => 'o', '/œ/' => 'oe',
'/ŕ/' => 'r', '/ř/' => 'r', '/ŗ/' => 'r', '/ś/' => 's', '/š/' => 's',
'/ş/' => 's', '/ŝ/' => 's', '/ș/' => 's', '/ť/' => 't', '/ţ/' => 't',
'/ŧ/' => 't', '/ț/' => 't', '/ù/' => 'u', '/ú/' => 'u', '/û/' => 'u',
'/ü/' => 'ue', '/ū/' => 'u', '/ů/' => 'u', '/ű/' => 'u', '/ŭ/' => 'u',
'/ũ/' => 'u', '/ų/' => 'u', '/ŵ/' => 'w', '/ý/' => 'y', '/ŷ/' => 'y',
'/ÿ/' => 'y', '/y/' => 'y', '/ź/' => 'z', '/ž/' => 'z', '/ż/' => 'z',
'/þ/' => 't', '/ß/' => 'ss', '/ſ/' => 'ss', '/ƒ/' => 'f', '/ĸ/' => 'k',
'/ʼn/' => 'n',
// Symbolic
'/\(/' => null, '/\)/' => null, '/,/' => null,
'/–/' => '-', '/-/' => '-', '/„/' => '"',
'/“/' => '"', '/”/' => '"', '/—/' => '-',
'/¿/' => null, '/‽/' => null, '/¡/' => null,
// Ampersands
'/^&(?!&)$/' => 'und',
'/^&(?!&)/' => 'und-',
'/&(?!&)&/' => '-und',
'/&(?!&)/' => '-und-',
);
|
semsol/arc2 | 147 | extractors/ARC2_TwitterProfilePicExtractor.php | <?php
/*
@homepage <https://github.com/semsol/arc2>
@license W3C Software License and GPL
class: ARC2 Extractor
author: Benjamin Nowack
version: 2010-11-16
*/
ARC2::inc('RDFExtractor');
class ARC2_TwitterProfilePicExtractor extends ARC2_RDFExtractor {
function __construct($a, &$caller) {
parent::__construct($a, $caller);
}
function __init() {
parent::__init();
$this->a['ns']['foaf'] = 'http://xmlns.com/foaf/0.1/';
$this->a['ns']['mf'] = 'http://poshrdf.org/ns/mf#';
}
/* */
function extractRDF() {
$t_vals = array();
$t = '';
foreach ($this->nodes as $n) {
if (isset($n['tag']) && ($n['tag'] == 'img') && ($this->v('id', '', $n['a']) == 'profile-image')) {
$t_vals['vcard_id'] = $this->getDocID($n) . '#resource(side/1/2/1)';
$t .= '?vcard_id mf:photo <' . $n['a']['src'] . '> . ';
break;
}
}
if ($t) {
$doc = $this->getFilledTemplate($t, $t_vals, $n['doc_base']);
$this->addTs(ARC2::getTriplesFromIndex($doc));
}
}
/* */
}
| <?php
/*
@homepage <https://github.com/semsol/arc2>
@license W3C Software License and GPL
class: ARC2 Extractor
author: Benjamin Nowack
version: 2010-11-16
*/
ARC2::inc('RDFExtractor');
class ARC2_TwitterProfilePicExtractor extends ARC2_RDFExtractor
{
public function __construct($a, &$caller)
{
parent::__construct($a, $caller);
}
public function __init()
{
parent::__init();
$this->a['ns']['foaf'] = 'http://xmlns.com/foaf/0.1/';
$this->a['ns']['mf'] = 'http://poshrdf.org/ns/mf#';
}
public function extractRDF()
{
$t_vals = [];
$t = '';
foreach ($this->nodes as $n) {
if (isset($n['tag']) && ('img' == $n['tag']) && ('profile-image' == $this->v('id', '', $n['a']))) {
$t_vals['vcard_id'] = $this->getDocID($n).'#resource(side/1/2/1)';
$t .= '?vcard_id mf:photo <'.$n['a']['src'].'> . ';
break;
}
}
if ($t) {
$doc = $this->getFilledTemplate($t, $t_vals, $n['doc_base']);
$this->addTs(ARC2::getTriplesFromIndex($doc));
}
}
}
|
pycco-docs/pycco | 113 | tests/test_pycco.py | from __future__ import absolute_import
import copy
import os
import os.path
import tempfile
import time
import pytest
import pycco.generate_index as generate_index
import pycco.main as p
from hypothesis import assume, example, given
from hypothesis.strategies import booleans, choices, lists, none, text
from pycco.languages import supported_languages
try:
from unittest.mock import patch
except ImportError:
from mock import patch
PYTHON = supported_languages['.py']
PYCCO_SOURCE = 'pycco/main.py'
FOO_FUNCTION = """def foo():\n return True"""
def get_language(choice):
return choice(list(supported_languages.values()))
@given(lists(text()), text())
def test_shift(fragments, default):
if fragments == []:
assert p.shift(fragments, default) == default
else:
fragments2 = copy.copy(fragments)
head = p.shift(fragments, default)
assert [head] + fragments == fragments2
@given(text(), booleans(), text(min_size=1))
@example("/foo", True, "0")
def test_destination(filepath, preserve_paths, outdir):
dest = p.destination(
filepath, preserve_paths=preserve_paths, outdir=outdir)
assert dest.startswith(outdir)
assert dest.endswith(".html")
@given(choices(), text())
def test_parse(choice, source):
lang = get_language(choice)
parsed = p.parse(source, lang)
for s in parsed:
assert {"code_text", "docs_text"} == set(s.keys())
def test_skip_coding_directive():
source = "# -*- coding: utf-8 -*-\n" + FOO_FUNCTION
parsed = p.parse(source, PYTHON)
for section in parsed:
assert "coding" not in section['code_text']
def test_multi_line_leading_spaces():
source = "# This is a\n# comment that\n# is indented\n"
source += FOO_FUNCTION
parsed = p.parse(source, PYTHON)
# The resulting comment has leading spaces stripped out.
assert parsed[0]["docs_text"] == "This is a\ncomment that\nis indented\n"
def test_comment_with_only_cross_ref():
source = (
'''# ==Link Target==\n\ndef test_link():\n """[[testing.py#link-target]]"""\n pass'''
)
sections = p.parse(source, PYTHON)
p.highlight(sections, PYTHON, outdir=tempfile.gettempdir())
assert sections[1][
'docs_html'] == '<p><a href="testing.html#link-target">testing.py</a></p>'
@given(text(), text())
def test_get_language_specify_language(source, code):
assert p.get_language(
source, code, language_name="python") == supported_languages['.py']
with pytest.raises(ValueError):
p.get_language(source, code, language_name="non-existent")
@given(text() | none())
def test_get_language_bad_source(source):
code = "#!/usr/bin/python\n"
code += FOO_FUNCTION
assert p.get_language(source, code) == PYTHON
with pytest.raises(ValueError) as e:
assert p.get_language(source, "badlang")
msg = "Can't figure out the language!"
try:
assert e.value.message == msg
except AttributeError:
assert e.value.args[0] == msg
@given(text() | none())
def test_get_language_bad_code(code):
source = "test.py"
assert p.get_language(source, code) == PYTHON
@given(text(max_size=64))
def test_ensure_directory(dir_name):
tempdir = os.path.join(tempfile.gettempdir(),
str(int(time.time())), dir_name)
# Use sanitization from function, but only for housekeeping. We
# pass in the unsanitized string to the function.
safe_name = p.remove_control_chars(dir_name)
if not os.path.isdir(safe_name) and os.access(safe_name, os.W_OK):
p.ensure_directory(tempdir)
assert os.path.isdir(safe_name)
def test_ensure_multiline_string_support():
code = '''x = """
multi-line-string
"""
y = z # comment
# *comment with formatting*
def x():
"""multi-line-string
"""'''
docs_code_tuple_list = p.parse(code, PYTHON)
assert docs_code_tuple_list[0]['docs_text'] == ''
assert "#" not in docs_code_tuple_list[1]['docs_text']
def test_indented_block():
code = '''"""To install Pycco, simply
pip install pycco
"""
'''
parsed = p.parse(code, PYTHON)
highlighted = p.highlight(parsed, PYTHON, outdir=tempfile.gettempdir())
pre_block = highlighted[0]['docs_html']
assert '<pre>' in pre_block
assert '</pre>' in pre_block
def test_generate_documentation():
p.generate_documentation(PYCCO_SOURCE, outdir=tempfile.gettempdir())
@given(booleans(), booleans(), choices())
def test_process(preserve_paths, index, choice):
lang_name = choice([l["name"] for l in supported_languages.values()])
p.process([PYCCO_SOURCE], preserve_paths=preserve_paths,
index=index,
outdir=tempfile.gettempdir(),
language=lang_name)
@patch('pygments.lexers.guess_lexer')
def test_process_skips_unknown_languages(mock_guess_lexer):
class Name:
name = 'this language does not exist'
mock_guess_lexer.return_value = Name()
with pytest.raises(ValueError):
p.process(['LICENSE'], outdir=tempfile.gettempdir(), skip=False)
p.process(['LICENSE'], outdir=tempfile.gettempdir(), skip=True)
one_or_more_chars = text(min_size=1, max_size=255)
paths = lists(one_or_more_chars, min_size=1, max_size=30)
@given(
lists(paths, min_size=1, max_size=255),
lists(one_or_more_chars, min_size=1, max_size=255)
)
def test_generate_index(path_lists, outdir_list):
file_paths = [os.path.join(*path_list) for path_list in path_lists]
outdir = os.path.join(*outdir_list)
generate_index.generate_index(file_paths, outdir=outdir)
def test_flatten_sources(tmpdir):
sources = [str(tmpdir)]
expected_sources = []
# Setup the base dir
td = tmpdir.join("test.py")
td.write("#!/bin/env python")
expected_sources.append(str(td))
# Make some more directories, each with a file present
for d in ["foo", "bar", "buzz"]:
dd = tmpdir.mkdir(d)
dummy_file = dd.join("test.py")
dummy_file.write("#!/bin/env python")
expected_sources.append(str(dummy_file))
# Get the flattened version of the base directory
flattened = p._flatten_sources(sources)
# Make sure that the lists are the same
assert sorted(expected_sources) == sorted(flattened)
| from __future__ import absolute_import
import copy
import os
import os.path
import tempfile
import time
import pytest
import pycco.generate_index as generate_index
import pycco.main as p
from hypothesis import assume, example, given
from hypothesis.strategies import booleans, lists, none, text, sampled_from, data
from pycco.languages import supported_languages
try:
from unittest.mock import patch
except ImportError:
from mock import patch
PYTHON = supported_languages['.py']
PYCCO_SOURCE = 'pycco/main.py'
FOO_FUNCTION = """def foo():\n return True"""
def get_language(data):
return data.draw(sampled_from(list(supported_languages.values())))
@given(lists(text()), text())
def test_shift(fragments, default):
if fragments == []:
assert p.shift(fragments, default) == default
else:
fragments2 = copy.copy(fragments)
head = p.shift(fragments, default)
assert [head] + fragments == fragments2
@given(text(), booleans(), text(min_size=1))
@example("/foo", True, "0")
def test_destination(filepath, preserve_paths, outdir):
dest = p.destination(
filepath, preserve_paths=preserve_paths, outdir=outdir)
assert dest.startswith(outdir)
assert dest.endswith(".html")
@given(data(), text())
def test_parse(data, source):
lang = get_language(data)
parsed = p.parse(source, lang)
for s in parsed:
assert {"code_text", "docs_text"} == set(s.keys())
def test_skip_coding_directive():
source = "# -*- coding: utf-8 -*-\n" + FOO_FUNCTION
parsed = p.parse(source, PYTHON)
for section in parsed:
assert "coding" not in section['code_text']
def test_multi_line_leading_spaces():
source = "# This is a\n# comment that\n# is indented\n"
source += FOO_FUNCTION
parsed = p.parse(source, PYTHON)
# The resulting comment has leading spaces stripped out.
assert parsed[0]["docs_text"] == "This is a\ncomment that\nis indented\n"
def test_comment_with_only_cross_ref():
source = (
'''# ==Link Target==\n\ndef test_link():\n """[[testing.py#link-target]]"""\n pass'''
)
sections = p.parse(source, PYTHON)
p.highlight(sections, PYTHON, outdir=tempfile.gettempdir())
assert sections[1][
'docs_html'] == '<p><a href="testing.html#link-target">testing.py</a></p>'
@given(text(), text())
def test_get_language_specify_language(source, code):
assert p.get_language(
source, code, language_name="python") == supported_languages['.py']
with pytest.raises(ValueError):
p.get_language(source, code, language_name="non-existent")
@given(text() | none())
def test_get_language_bad_source(source):
code = "#!/usr/bin/python\n"
code += FOO_FUNCTION
assert p.get_language(source, code) == PYTHON
with pytest.raises(ValueError) as e:
assert p.get_language(source, "badlang")
msg = "Can't figure out the language!"
try:
assert e.value.message == msg
except AttributeError:
assert e.value.args[0] == msg
@given(text() | none())
def test_get_language_bad_code(code):
source = "test.py"
assert p.get_language(source, code) == PYTHON
@given(text(max_size=64))
def test_ensure_directory(dir_name):
tempdir = os.path.join(tempfile.gettempdir(),
str(int(time.time())), dir_name)
# Use sanitization from function, but only for housekeeping. We
# pass in the unsanitized string to the function.
safe_name = p.remove_control_chars(dir_name)
if not os.path.isdir(safe_name) and os.access(safe_name, os.W_OK):
p.ensure_directory(tempdir)
assert os.path.isdir(safe_name)
def test_ensure_multiline_string_support():
code = '''x = """
multi-line-string
"""
y = z # comment
# *comment with formatting*
def x():
"""multi-line-string
"""'''
docs_code_tuple_list = p.parse(code, PYTHON)
assert docs_code_tuple_list[0]['docs_text'] == ''
assert "#" not in docs_code_tuple_list[1]['docs_text']
def test_indented_block():
code = '''"""To install Pycco, simply
pip install pycco
"""
'''
parsed = p.parse(code, PYTHON)
highlighted = p.highlight(parsed, PYTHON, outdir=tempfile.gettempdir())
pre_block = highlighted[0]['docs_html']
assert '<pre>' in pre_block
assert '</pre>' in pre_block
def test_generate_documentation():
p.generate_documentation(PYCCO_SOURCE, outdir=tempfile.gettempdir())
@given(booleans(), booleans(), data())
def test_process(preserve_paths, index, data):
lang_name = data.draw(sampled_from([l["name"] for l in supported_languages.values()]))
p.process([PYCCO_SOURCE], preserve_paths=preserve_paths,
index=index,
outdir=tempfile.gettempdir(),
language=lang_name)
@patch('pygments.lexers.guess_lexer')
def test_process_skips_unknown_languages(mock_guess_lexer):
class Name:
name = 'this language does not exist'
mock_guess_lexer.return_value = Name()
with pytest.raises(ValueError):
p.process(['LICENSE'], outdir=tempfile.gettempdir(), skip=False)
p.process(['LICENSE'], outdir=tempfile.gettempdir(), skip=True)
one_or_more_chars = text(min_size=1, max_size=255)
paths = lists(one_or_more_chars, min_size=1, max_size=30)
@given(
lists(paths, min_size=1, max_size=255),
lists(one_or_more_chars, min_size=1, max_size=255)
)
def test_generate_index(path_lists, outdir_list):
file_paths = [os.path.join(*path_list) for path_list in path_lists]
outdir = os.path.join(*outdir_list)
generate_index.generate_index(file_paths, outdir=outdir)
def test_flatten_sources(tmpdir):
sources = [str(tmpdir)]
expected_sources = []
# Setup the base dir
td = tmpdir.join("test.py")
td.write("#!/bin/env python")
expected_sources.append(str(td))
# Make some more directories, each with a file present
for d in ["foo", "bar", "buzz"]:
dd = tmpdir.mkdir(d)
dummy_file = dd.join("test.py")
dummy_file.write("#!/bin/env python")
expected_sources.append(str(dummy_file))
# Get the flattened version of the base directory
flattened = p._flatten_sources(sources)
# Make sure that the lists are the same
assert sorted(expected_sources) == sorted(flattened)
|
rc0/mairix | 2 | search.c | /*
mairix - message index builder and finder for maildir folders.
**********************************************************************
* Copyright (C) Richard P. Curnow 2002,2003,2004,2005,2006
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of version 2 of the GNU General Public License as
* published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*
**********************************************************************
*/
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <ctype.h>
#include <time.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <sys/mman.h>
#include <unistd.h>
#include <assert.h>
#include <dirent.h>
#include <errno.h>
/* Lame fix for systems where NAME_MAX isn't defined after including the above
* set of .h files (Solaris, FreeBSD so far). Probably grossly oversized but
* it'll do. */
#if !defined(NAME_MAX)
#define NAME_MAX 4096
#endif
#include "mairix.h"
#include "reader.h"
#include "memmac.h"
static void mark_hits_in_table(struct read_db *db, struct toktable_db *tt, int hit_tok, char *hits)/*{{{*/
{
/* mark files containing matched token */
int idx;
unsigned char *j, *first_char;
idx = 0;
first_char = (unsigned char *) db->data + tt->enc_offsets[hit_tok];
for (j = first_char; *j != 0xff; ) {
idx += read_increment(&j);
assert(idx < db->n_msgs);
hits[idx] = 1;
}
}
/*}}}*/
static void mark_hits_in_table2(struct read_db *db, struct toktable2_db *tt, int hit_tok, char *hits)/*{{{*/
{
/* mark files containing matched token */
int idx;
unsigned char *j, *first_char;
idx = 0;
first_char = (unsigned char *) db->data + tt->enc1_offsets[hit_tok];
for (j = first_char; *j != 0xff; ) {
idx += read_increment(&j);
assert(idx < db->n_msgs);
hits[idx] = 1;
}
}
/*}}}*/
/* See "Fast text searching with errors, Sun Wu and Udi Manber, TR 91-11,
University of Arizona. I have been informed that this algorithm is NOT
patented. This implementation of it is entirely the work of Richard P.
Curnow - I haven't looked at any related source (webglimpse, agrep etc) in
writing this.
*/
static void build_match_vector(char *substring, unsigned long *a, unsigned long *hit)/*{{{*/
{
int len;
char *p;
int i;
len = strlen(substring);
if (len > 31 || len == 0) {
fprintf(stderr, "Can't match patterns longer than 31 characters or empty\n");
unlock_and_exit(2);
}
memset(a, 0xff, 256 * sizeof(unsigned long));
for (p=substring, i=0; *p; p++, i++) {
a[(unsigned int) *(unsigned char *)p] &= ~(1UL << i);
}
*hit = ~(1UL << (len-1));
return;
}
/*}}}*/
static int substring_match_0(unsigned long *a, unsigned long hit, int left_anchor, char *token)/*{{{*/
{
int got_hit=0;
char *p;
unsigned long r0;
unsigned long anchor, anchor1;
r0 = ~0;
got_hit = 0;
anchor = 0;
anchor1 = left_anchor ? 0x1 : 0x0;
for(p=token; *p; p++) {
int idx = (unsigned int) *(unsigned char *)p;
r0 = (r0<<1) | anchor | a[idx];
if (~(r0 | hit)) {
got_hit = 1;
break;
}
anchor = anchor1;
}
return got_hit;
}
/*}}}*/
static int substring_match_1(unsigned long *a, unsigned long hit, int left_anchor, char *token)/*{{{*/
{
int got_hit=0;
char *p;
unsigned long r0, r1, nr0;
unsigned long anchor, anchor1;
r0 = ~0;
r1 = r0<<1;
got_hit = 0;
anchor = 0;
anchor1 = left_anchor ? 0x1 : 0x0;
for(p=token; *p; p++) {
int idx = (unsigned int) *(unsigned char *)p;
nr0 = (r0<<1) | anchor | a[idx];
r1 = ((r1<<1) | anchor | a[idx]) & ((r0 & nr0) << 1) & r0;
r0 = nr0;
if (~((r0 & r1) | hit)) {
got_hit = 1;
break;
}
anchor = anchor1;
}
return got_hit;
}
/*}}}*/
static int substring_match_2(unsigned long *a, unsigned long hit, int left_anchor, char *token)/*{{{*/
{
int got_hit=0;
char *p;
unsigned long r0, r1, r2, nr0, nr1;
unsigned long anchor, anchor1;
r0 = ~0;
r1 = r0<<1;
r2 = r1<<1;
got_hit = 0;
anchor = 0;
anchor1 = left_anchor ? 0x1 : 0x0;
for(p=token; *p; p++) {
int idx = (unsigned int) *(unsigned char *)p;
nr0 = (r0<<1) | anchor | a[idx];
nr1 = ((r1<<1) | anchor | a[idx]) & ((r0 & nr0) << 1) & r0;
r2 = ((r2<<1) | anchor | a[idx]) & ((r1 & nr1) << 1) & r1;
r0 = nr0;
r1 = nr1;
if (~((r0 & r1 & r2) | hit)) {
got_hit = 1;
break;
}
anchor = anchor1;
}
return got_hit;
}
/*}}}*/
static int substring_match_3(unsigned long *a, unsigned long hit, int left_anchor, char *token)/*{{{*/
{
int got_hit=0;
char *p;
unsigned long r0, r1, r2, r3, nr0, nr1, nr2;
unsigned long anchor, anchor1;
r0 = ~0;
r1 = r0<<1;
r2 = r1<<1;
r3 = r2<<1;
got_hit = 0;
anchor = 0;
anchor1 = left_anchor ? 0x1 : 0x0;
for(p=token; *p; p++) {
int idx = (unsigned int) *(unsigned char *)p;
nr0 = (r0<<1) | anchor | a[idx];
nr1 = ((r1<<1) | anchor | a[idx]) & ((r0 & nr0) << 1) & r0;
nr2 = ((r2<<1) | anchor | a[idx]) & ((r1 & nr1) << 1) & r1;
r3 = ((r3<<1) | anchor | a[idx]) & ((r2 & nr2) << 1) & r2;
r0 = nr0;
r1 = nr1;
r2 = nr2;
if (~((r0 & r1 & r2 & r3) | hit)) {
got_hit = 1;
break;
}
anchor = anchor1;
}
return got_hit;
}
/*}}}*/
static int substring_match_general(unsigned long *a, unsigned long hit, int left_anchor, char *token, int max_errors, unsigned long *r, unsigned long *nr)/*{{{*/
{
int got_hit=0;
char *p;
int j;
unsigned long anchor, anchor1;
r[0] = ~0;
anchor = 0;
anchor1 = left_anchor ? 0x1 : 0x0;
for (j=1; j<=max_errors; j++) {
r[j] = r[j-1] << 1;
}
got_hit = 0;
for(p=token; *p; p++) {
int idx = (unsigned int) *(unsigned char *)p;
int d;
unsigned int compo;
compo = nr[0] = ((r[0]<<1) | anchor | a[idx]);
for (d=1; d<=max_errors; d++) {
nr[d] = ((r[d]<<1) | anchor | a[idx])
& ((r[d-1] & nr[d-1])<<1)
& r[d-1];
compo &= nr[d];
}
memcpy(r, nr, (1 + max_errors) * sizeof(unsigned long));
if (~(compo | hit)) {
got_hit = 1;
break;
}
anchor = anchor1;
}
return got_hit;
}
/*}}}*/
static void match_substring_in_table(struct read_db *db, struct toktable_db *tt, char *substring, int max_errors, int left_anchor, char *hits)/*{{{*/
{
int i, got_hit;
unsigned long a[256];
unsigned long *r=NULL, *nr=NULL;
unsigned long hit;
char *token;
build_match_vector(substring, a, &hit);
got_hit = 0;
if (max_errors > 3) {
r = new_array(unsigned long, 1 + max_errors);
nr = new_array(unsigned long, 1 + max_errors);
}
for (i=0; i<tt->n; i++) {
token = db->data + tt->tok_offsets[i];
switch (max_errors) {
/* Optimise common cases for few errors to allow optimizer to keep bitmaps
* in registers */
case 0:
got_hit = substring_match_0(a, hit, left_anchor, token);
break;
case 1:
got_hit = substring_match_1(a, hit, left_anchor, token);
break;
case 2:
got_hit = substring_match_2(a, hit, left_anchor, token);
break;
case 3:
got_hit = substring_match_3(a, hit, left_anchor, token);
break;
default:
got_hit = substring_match_general(a, hit, left_anchor, token, max_errors, r, nr);
break;
}
if (got_hit) {
mark_hits_in_table(db, tt, i, hits);
}
}
if (r) free(r);
if (nr) free(nr);
}
/*}}}*/
static void match_substring_in_table2(struct read_db *db, struct toktable2_db *tt, char *substring, int max_errors, int left_anchor, char *hits)/*{{{*/
{
int i, got_hit;
unsigned long a[256];
unsigned long *r=NULL, *nr=NULL;
unsigned long hit;
char *token;
build_match_vector(substring, a, &hit);
got_hit = 0;
if (max_errors > 3) {
r = new_array(unsigned long, 1 + max_errors);
nr = new_array(unsigned long, 1 + max_errors);
}
for (i=0; i<tt->n; i++) {
token = db->data + tt->tok_offsets[i];
switch (max_errors) {
/* Optimise common cases for few errors to allow optimizer to keep bitmaps
* in registers */
case 0:
got_hit = substring_match_0(a, hit, left_anchor, token);
break;
case 1:
got_hit = substring_match_1(a, hit, left_anchor, token);
break;
case 2:
got_hit = substring_match_2(a, hit, left_anchor, token);
break;
case 3:
got_hit = substring_match_3(a, hit, left_anchor, token);
break;
default:
got_hit = substring_match_general(a, hit, left_anchor, token, max_errors, r, nr);
break;
}
if (got_hit) {
mark_hits_in_table2(db, tt, i, hits);
}
}
if (r) free(r);
if (nr) free(nr);
}
/*}}}*/
static void match_substring_in_paths(struct read_db *db, char *substring, int max_errors, int left_anchor, char *hits)/*{{{*/
{
int i;
unsigned long a[256];
unsigned long *r=NULL, *nr=NULL;
unsigned long hit;
build_match_vector(substring, a, &hit);
if (max_errors > 3) {
r = new_array(unsigned long, 1 + max_errors);
nr = new_array(unsigned long, 1 + max_errors);
}
for (i=0; i<db->n_msgs; i++) {
char *token = NULL;
unsigned int mbix, msgix;
switch (rd_msg_type(db, i)) {
case DB_MSG_FILE:
token = db->data + db->path_offsets[i];
break;
case DB_MSG_MBOX:
decode_mbox_indices(db->path_offsets[i], &mbix, &msgix);
token = db->data + db->mbox_paths_table[mbix];
break;
case DB_MSG_DEAD:
hits[i] = 0; /* never match on dead paths */
goto next_message;
}
assert(token);
switch (max_errors) {
/* Optimise common cases for few errors to allow optimizer to keep bitmaps
* in registers */
case 0:
hits[i] = substring_match_0(a, hit, left_anchor, token);
break;
case 1:
hits[i] = substring_match_1(a, hit, left_anchor, token);
break;
case 2:
hits[i] = substring_match_2(a, hit, left_anchor, token);
break;
case 3:
hits[i] = substring_match_3(a, hit, left_anchor, token);
break;
default:
hits[i] = substring_match_general(a, hit, left_anchor, token, max_errors, r, nr);
break;
}
next_message:
(void) 0;
}
if (r) free(r);
if (nr) free(nr);
}
/*}}}*/
static void match_string_in_table(struct read_db *db, struct toktable_db *tt, char *key, char *hits)/*{{{*/
{
/* TODO : replace with binary search? */
int i;
for (i=0; i<tt->n; i++) {
if (!strcmp(key, db->data + tt->tok_offsets[i])) {
/* get all matching files */
mark_hits_in_table(db, tt, i, hits);
}
}
}
/*}}}*/
static void match_string_in_table2(struct read_db *db, struct toktable2_db *tt, char *key, char *hits)/*{{{*/
{
/* TODO : replace with binary search? */
int i;
for (i=0; i<tt->n; i++) {
if (!strcmp(key, db->data + tt->tok_offsets[i])) {
/* get all matching files */
mark_hits_in_table2(db, tt, i, hits);
}
}
}
/*}}}*/
static int parse_size_expr(char *x)/*{{{*/
{
int result;
int n;
if (1 == sscanf(x, "%d%n", &result, &n)) {
x += n;
switch (*x) {
case 'k':
case 'K':
result <<= 10;
break;
case 'm':
case 'M':
result <<= 20;
break;
default:
break;
}
return result;
} else {
fprintf(stderr, "Could not parse message size expression <%s>\n", x);
return -1;
}
}
/*}}}*/
static void parse_size_range(char *size_expr, int *has_start, int *start, int *has_end, int *end)/*{{{*/
{
char *x = size_expr;
char *dash;
int len;
if (*x == ':') x++;
len = strlen(x);
dash = strchr(x, '-');
*has_start = *has_end = 0;
if (dash) {
char *p, *q;
if (dash > x) {
char *s;
s = new_array(char, dash - x + 1);
for (p=s, q=x; q<dash; ) *p++ = *q++;
*p = 0;
*start = parse_size_expr(s);
*has_start = 1;
free(s);
}
if (dash[1]) { /* dash not at end of arg */
char *e;
e = new_array(char, (x + len) - dash);
for (p=e, q=dash+1; *q; ) *p++ = *q++;
*p = 0;
*end = parse_size_expr(e);
*has_end = 1;
free(e);
}
} else {
*has_start = 0;
*end = parse_size_expr(size_expr);
*has_end = 1;
}
return;
}
/*}}}*/
static void find_size_matches_in_table(struct read_db *db, char *size_expr, char *hits)/*{{{*/
{
int start, end;
int has_start, has_end, start_cond, end_cond;
int i;
start = end = -1; /* avoid compiler warning about uninitialised variables. */
parse_size_range(size_expr, &has_start, &start, &has_end, &end);
if (has_start && has_end) {
/* Allow user to put the endpoints in backwards */
if (start > end) {
int temp = start;
start = end;
end = temp;
}
}
for (i=0; i<db->n_msgs; i++) {
start_cond = has_start ? (db->size_table[i] > start) : 1;
end_cond = has_end ? (db->size_table[i] < end ) : 1;
if (start_cond && end_cond) {
hits[i] = 1;
}
}
}
/*}}}*/
static void find_date_matches_in_table(struct read_db *db, char *date_expr, char *hits)/*{{{*/
{
time_t start, end;
int has_start, has_end, start_cond, end_cond;
int i;
int status;
status = scan_date_string(date_expr, &start, &has_start, &end, &has_end);
if (status) {
unlock_and_exit (2);
}
if (has_start && has_end) {
/* Allow user to put the endpoints in backwards */
if (start > end) {
time_t temp = start;
start = end;
end = temp;
}
}
for (i=0; i<db->n_msgs; i++) {
start_cond = has_start ? (db->date_table[i] > start) : 1;
end_cond = has_end ? (db->date_table[i] < end ) : 1;
if (start_cond && end_cond) {
hits[i] = 1;
}
}
}
/*}}}*/
static void find_flag_matches_in_table(struct read_db *db, char *flag_expr, char *hits)/*{{{*/
{
int pos_seen, neg_seen;
int pos_replied, neg_replied;
int pos_flagged, neg_flagged;
int negate;
char *p;
int i;
negate = 0;
pos_seen = neg_seen = 0;
pos_replied = neg_replied = 0;
pos_flagged = neg_flagged = 0;
for (p=flag_expr; *p; p++) {
switch (*p) {
case '-':
negate = 1;
break;
case 's':
case 'S':
if (negate) neg_seen = 1;
else pos_seen = 1;
negate = 0;
break;
case 'r':
case 'R':
if (negate) neg_replied = 1;
else pos_replied = 1;
negate = 0;
break;
case 'f':
case 'F':
if (negate) neg_flagged = 1;
else pos_flagged = 1;
negate = 0;
break;
default:
fprintf(stderr, "Did not understand the character '%c' (0x%02x) in the flags argument F:%s\n",
isprint(*p) ? *p : '.',
(int) *(unsigned char *) p,
flag_expr);
break;
}
}
for (i=0; i<db->n_msgs; i++) {
if ((!pos_seen || (db->msg_type_and_flags[i] & FLAG_SEEN)) &&
(!neg_seen || !(db->msg_type_and_flags[i] & FLAG_SEEN)) &&
(!pos_replied || (db->msg_type_and_flags[i] & FLAG_REPLIED)) &&
(!neg_replied || !(db->msg_type_and_flags[i] & FLAG_REPLIED)) &&
(!pos_flagged || (db->msg_type_and_flags[i] & FLAG_FLAGGED)) &&
(!neg_flagged || !(db->msg_type_and_flags[i] & FLAG_FLAGGED))) {
hits[i] = 1;
}
}
}
/*}}}*/
static char *mk_maildir_path(int token, char *output_dir, int is_in_new,
int is_seen, int is_replied, int is_flagged)/*{{{*/
{
char *result;
char uniq_buf[48];
int len;
len = strlen(output_dir) + 64; /* oversize */
result = new_array(char, len + 1 + sizeof(":2,FRS"));
strcpy(result, output_dir);
strcat(result, is_in_new ? "/new/" : "/cur/");
sprintf(uniq_buf, "123456789.%d.mairix", token);
strcat(result, uniq_buf);
if (is_seen || is_replied || is_flagged) {
strcat(result, ":2,");
}
if (is_flagged) strcat(result, "F");
if (is_replied) strcat(result, "R");
if (is_seen) strcat(result, "S");
return result;
}
/*}}}*/
static char *mk_mh_path(int token, char *output_dir)/*{{{*/
{
char *result;
char uniq_buf[8];
int len;
len = strlen(output_dir) + 10; /* oversize */
result = new_array(char, len);
strcpy(result, output_dir);
strcat(result, "/");
sprintf(uniq_buf, "%d", token+1);
strcat(result, uniq_buf);
return result;
}
/*}}}*/
static int looks_like_maildir_new_p(const char *p)/*{{{*/
{
const char *s1, *s2;
s2 = p;
while (*s2) s2++;
while ((s2 > p) && (*s2 != '/')) s2--;
if (s2 <= p) return 0;
s1 = s2 - 1;
while ((s1 > p) && (*s1 != '/')) s1--;
if (s1 <= p) return 0;
if (!strncmp(s1, "/new/", 5)) {
return 1;
} else {
return 0;
}
}
/*}}}*/
static void create_symlink(char *link_target, char *new_link)/*{{{*/
{
if ((!do_hardlinks && symlink(link_target, new_link) < 0) || link(link_target, new_link)) {
if (verbose) {
perror("symlink");
fprintf(stderr, "Failed path <%s> -> <%s>\n", link_target, new_link);
}
}
}
/*}}}*/
static void mbox_terminate(const unsigned char *data, int len, FILE *out)/*{{{*/
{
if (len == 0)
fputs("\n", out);
else if (len == 1) {
if (data[0] != '\n')
fputs("\n", out);
}
else if (data[len-1] != '\n')
fputs("\n\n", out);
else if (data[len-2] != '\n')
fputs("\n", out);
}
/*}}}*/
static void append_file_to_mbox(const char *path, FILE *out)/*{{{*/
{
unsigned char *data;
int len;
create_ro_mapping(path, &data, &len);
if (data) {
fprintf(out, "From mairix@mairix Mon Jan 1 12:34:56 1970\n");
fprintf(out, "X-source-folder: %s\n", path);
fwrite (data, sizeof(unsigned char), len, out);
mbox_terminate(data, len, out);
free_ro_mapping(data, len);
}
return;
}
/*}}}*/
static int had_failed_checksum;
static void get_validated_mbox_msg(struct read_db *db, int msg_index,/*{{{*/
int *mbox_index,
unsigned char **mbox_data, int *mbox_len,
unsigned char **msg_data, int *msg_len)
{
/* msg_data==NULL if checksum mismatches */
unsigned char *start;
checksum_t csum;
unsigned int mbi, msgi;
*msg_data = NULL;
*msg_len = 0;
decode_mbox_indices(db->path_offsets[msg_index], &mbi, &msgi);
*mbox_index = mbi;
create_ro_mapping(db->data + db->mbox_paths_table[mbi], mbox_data, mbox_len);
if (!*mbox_data) return;
start = *mbox_data + db->mtime_table[msg_index];
/* Ensure that we don't run off the end of the mmap'd file */
if (db->mtime_table[msg_index] >= *mbox_len)
*msg_len = 0;
else if (db->mtime_table[msg_index] + db->size_table[msg_index] >= *mbox_len)
*msg_len = *mbox_len - db->mtime_table[msg_index];
else
*msg_len = db->size_table[msg_index];
compute_checksum((char *)start, *msg_len, &csum);
if (!memcmp((db->data + db->mbox_checksum_table[mbi] + (msgi * sizeof(checksum_t))), &csum, sizeof(checksum_t))) {
*msg_data = start;
} else {
had_failed_checksum = 1;
}
return;
}
/*}}}*/
static void append_mboxmsg_to_mbox(struct read_db *db, int msg_index, FILE *out)/*{{{*/
{
/* Need to common up code with try_copy_to_path */
unsigned char *mbox_start, *msg_start;
int mbox_len, msg_len;
int mbox_index;
get_validated_mbox_msg(db, msg_index, &mbox_index, &mbox_start, &mbox_len, &msg_start, &msg_len);
if (msg_start) {
/* Artificial from line, we don't have the envelope sender so this is
going to be artificial anyway. */
fprintf(out, "From mairix@mairix Mon Jan 1 12:34:56 1970\n");
fprintf(out, "X-source-folder: %s\n",
db->data + db->mbox_paths_table[mbox_index]);
fwrite(msg_start, sizeof(unsigned char), msg_len, out);
mbox_terminate(msg_start, msg_len, out);
}
if (mbox_start) {
free_ro_mapping(mbox_start, mbox_len);
}
}
/*}}}*/
static void try_copy_to_path(struct read_db *db, int msg_index, char *target_path)/*{{{*/
{
unsigned char *data;
int mbox_len, msg_len;
int mbi;
FILE *out;
unsigned char *start;
get_validated_mbox_msg(db, msg_index, &mbi, &data, &mbox_len, &start, &msg_len);
if (start) {
out = fopen(target_path, "wb");
if (out) {
fprintf(out, "X-source-folder: %s\n",
db->data + db->mbox_paths_table[mbi]);
fwrite(start, sizeof(char), msg_len, out);
fclose(out);
}
}
if (data) {
free_ro_mapping(data, mbox_len);
}
return;
}
/*}}}*/
static struct msg_src *setup_mbox_msg_src(char *filename, off_t start, size_t len)/*{{{*/
{
static struct msg_src result;
result.type = MS_MBOX;
result.filename = filename;
result.start = start;
result.len = len;
return &result;
}
/*}}}*/
static void get_flags_from_file(struct read_db *db, int idx, int *is_seen, int *is_replied, int *is_flagged)
{
*is_seen = (db->msg_type_and_flags[idx] & FLAG_SEEN) ? 1 : 0;
*is_replied = (db->msg_type_and_flags[idx] & FLAG_REPLIED) ? 1 : 0;
*is_flagged = (db->msg_type_and_flags[idx] & FLAG_FLAGGED) ? 1 : 0;
}
static int do_search(struct read_db *db, char **args, char *output_path, int show_threads, enum folder_type ft, int verbose)/*{{{*/
{
char *colon, *start_words;
int do_body, do_subject, do_from, do_to, do_cc, do_date, do_size;
int do_att_name;
int do_flags;
int do_path, do_msgid;
char *key;
char *hit0, *hit1, *hit2, *hit3;
int i;
int n_hits;
int left_anchor;
had_failed_checksum = 0;
hit0 = new_array(char, db->n_msgs);
hit1 = new_array(char, db->n_msgs);
hit2 = new_array(char, db->n_msgs);
hit3 = new_array(char, db->n_msgs);
/* Argument structure is
* x:tokena+tokenb,~tokenc,tokend+tokene
*
* + (and) binds more tightly than ,
* , (or) binds more tightly than separate args
*
*
* hit1 gathers the tokens and'ed with +
* hit2 gathers the tokens or'ed with ,
* hit3 gathers the separate args and'ed with <gap>
* */
/* Everything matches until proven otherwise */
memset(hit3, 1, db->n_msgs);
while (*args) {
/* key is a single argument, separate args are and-ed together */
key = *args++;
memset(hit2, 0, db->n_msgs);
memset(hit1, 1, db->n_msgs);
do_to = 0;
do_cc = 0;
do_from = 0;
do_subject = 0;
do_body = 0;
do_date = 0;
do_size = 0;
do_path = 0;
do_msgid = 0;
do_att_name = 0;
do_flags = 0;
colon = strchr(key, ':');
if (colon) {
char *p;
for (p=key; p<colon; p++) {
switch(*p) {
case 'b': do_body = 1; break;
case 's': do_subject = 1; break;
case 't': do_to = 1; break;
case 'c': do_cc = 1; break;
case 'f': do_from = 1; break;
case 'r': do_to = do_cc = 1; break;
case 'a': do_to = do_cc = do_from = 1; break;
case 'd': do_date = 1; break;
case 'z': do_size = 1; break;
case 'p': do_path = 1; break;
case 'm': do_msgid = 1; break;
case 'n': do_att_name = 1; break;
case 'F': do_flags = 1; break;
default: fprintf(stderr, "Unknown key type <%c>\n", *p); break;
}
}
start_words = 1 + colon;
} else {
do_body = do_subject = do_to = do_cc = do_from = 1;
start_words = key;
}
if (do_date || do_size || do_flags) {
memset(hit0, 0, db->n_msgs);
if (do_date) {
find_date_matches_in_table(db, start_words, hit0);
} else if (do_size) {
find_size_matches_in_table(db, start_words, hit0);
} else if (do_flags) {
find_flag_matches_in_table(db, start_words, hit0);
}
/* AND-combine match vectors */
for (i=0; i<db->n_msgs; i++) {
hit1[i] &= hit0[i];
}
} else {
/*{{{ Scan over separate words within this argument */
do {
/* / = 'or' separator
* , = 'and' separator */
char *orsep;
char *andsep;
char *word, *orig_word, *lower_word;
char *equal;
char *p;
int negate;
int had_orsep;
int max_errors;
orsep = strchr(start_words, '/');
andsep = strchr(start_words, ',');
had_orsep = 0;
if (andsep && (!orsep || (andsep < orsep))) {
char *p, *q;
word = new_array(char, 1 + (andsep - start_words)); /* maybe oversize */
for (p=word, q=start_words; q < andsep; q++) {
if (!isspace(*(unsigned char *)q)) {
*p++ = *q;
}
}
*p = 0;
start_words = andsep + 1;
} else if (orsep) { /* comes before + if there's a + */
char *p, *q;
word = new_array(char, 1 + (orsep - start_words)); /* maybe oversize */
for (p=word, q=start_words; q < orsep; q++) {
if (!isspace(*(unsigned char *)q)) {
*p++ = *q;
}
}
*p = 0;
start_words = orsep + 1;
had_orsep = 1;
} else {
word = new_string(start_words);
while (*start_words) ++start_words;
}
orig_word = word;
if (word[0] == '~') {
negate = 1;
word++;
} else {
negate = 0;
}
if (word[0] == '^') {
left_anchor = 1;
word++;
} else {
left_anchor = 0;
}
equal = strchr(word, '=');
if (equal && !do_msgid) {
*equal = 0;
max_errors = atoi(equal + 1);
/* Extend this to do anchoring etc */
} else {
max_errors = 0; /* keep GCC quiet */
}
/* Canonicalise search string to lowercase, since the database has all
* tokens handled that way. But not for path search! */
lower_word = new_string(word);
for (p=lower_word; *p; p++) {
*p = tolower(*(unsigned char *)p);
}
memset(hit0, 0, db->n_msgs);
if (equal) {
if (do_to) match_substring_in_table(db, &db->to, lower_word, max_errors, left_anchor, hit0);
if (do_cc) match_substring_in_table(db, &db->cc, lower_word, max_errors, left_anchor, hit0);
if (do_from) match_substring_in_table(db, &db->from, lower_word, max_errors, left_anchor, hit0);
if (do_subject) match_substring_in_table(db, &db->subject, lower_word, max_errors, left_anchor, hit0);
if (do_body) match_substring_in_table(db, &db->body, lower_word, max_errors, left_anchor, hit0);
if (do_att_name) match_substring_in_table(db, &db->attachment_name, lower_word, max_errors, left_anchor, hit0);
if (do_path) match_substring_in_paths(db, word, max_errors, left_anchor, hit0);
if (do_msgid) match_substring_in_table2(db, &db->msg_ids, lower_word, max_errors, left_anchor, hit0);
} else {
if (do_to) match_string_in_table(db, &db->to, lower_word, hit0);
if (do_cc) match_string_in_table(db, &db->cc, lower_word, hit0);
if (do_from) match_string_in_table(db, &db->from, lower_word, hit0);
if (do_subject) match_string_in_table(db, &db->subject, lower_word, hit0);
if (do_body) match_string_in_table(db, &db->body, lower_word, hit0);
if (do_att_name) match_string_in_table(db, &db->attachment_name, lower_word, hit0);
/* FIXME */
if (do_path) match_substring_in_paths(db, word, 0, left_anchor, hit0);
if (do_msgid) match_string_in_table2(db, &db->msg_ids, lower_word, hit0);
}
free(lower_word);
/* AND-combine match vectors */
for (i=0; i<db->n_msgs; i++) {
if (negate) {
hit1[i] &= !hit0[i];
} else {
hit1[i] &= hit0[i];
}
}
if (had_orsep) {
/* OR-combine match vectors */
for (i=0; i<db->n_msgs; i++) {
hit2[i] |= hit1[i];
}
memset(hit1, 1, db->n_msgs);
}
free(orig_word);
} while (*start_words);
/*}}}*/
}
/* OR-combine match vectors */
for (i=0; i<db->n_msgs; i++) {
hit2[i] |= hit1[i];
}
/* AND-combine match vectors */
for (i=0; i<db->n_msgs; i++) {
hit3[i] &= hit2[i];
}
}
n_hits = 0;
if (show_threads) {/*{{{*/
char *tids;
tids = new_array(char, db->n_msgs);
memset(tids, 0, db->n_msgs);
for (i=0; i<db->n_msgs; i++) {
if (hit3[i]) {
tids[db->tid_table[i]] = 1;
}
}
for (i=0; i<db->n_msgs; i++) {
if (tids[db->tid_table[i]]) {
hit3[i] = 1;
}
}
free(tids);
}
/*}}}*/
switch (ft) {
case FT_MAILDIR:/*{{{*/
for (i=0; i<db->n_msgs; i++) {
if (hit3[i]) {
int is_seen, is_replied, is_flagged;
get_flags_from_file(db, i, &is_seen, &is_replied, &is_flagged);
switch (rd_msg_type(db, i)) {
case DB_MSG_FILE:
{
char *target_path;
char *message_path;
int is_in_new;
message_path = db->data + db->path_offsets[i];
is_in_new = looks_like_maildir_new_p(message_path);
target_path = mk_maildir_path(i, output_path, is_in_new, is_seen, is_replied, is_flagged);
create_symlink(message_path, target_path);
free(target_path);
++n_hits;
}
break;
case DB_MSG_MBOX:
{
char *target_path = mk_maildir_path(i, output_path, !is_seen, is_seen, is_replied, is_flagged);
try_copy_to_path(db, i, target_path);
free(target_path);
++n_hits;
}
break;
case DB_MSG_DEAD:
break;
}
}
}
break;
/*}}}*/
case FT_MH:/*{{{*/
for (i=0; i<db->n_msgs; i++) {
if (hit3[i]) {
switch (rd_msg_type(db, i)) {
case DB_MSG_FILE:
{
char *target_path = mk_mh_path(i, output_path);
create_symlink(db->data + db->path_offsets[i], target_path);
free(target_path);
++n_hits;
}
break;
case DB_MSG_MBOX:
{
char *target_path = mk_mh_path(i, output_path);
try_copy_to_path(db, i, target_path);
free(target_path);
++n_hits;
}
break;
case DB_MSG_DEAD:
break;
}
}
}
break;
/*}}}*/
case FT_MBOX:/*{{{*/
{
FILE *out;
out = fopen(output_path, "ab");
if (!out) {
fprintf(stderr, "Cannot open output folder %s\n", output_path);
unlock_and_exit(1);
}
for (i=0; i<db->n_msgs; i++) {
if (hit3[i]) {
switch (rd_msg_type(db, i)) {
case DB_MSG_FILE:
{
append_file_to_mbox(db->data + db->path_offsets[i], out);
++n_hits;
}
break;
case DB_MSG_MBOX:
{
append_mboxmsg_to_mbox(db, i, out);
++n_hits;
}
break;
case DB_MSG_DEAD:
break;
}
}
}
fclose(out);
}
break;
/*}}}*/
case FT_RAW:/*{{{*/
for (i=0; i<db->n_msgs; i++) {
if (hit3[i]) {
switch (rd_msg_type(db, i)) {
case DB_MSG_FILE:
{
++n_hits;
printf("%s\n", db->data + db->path_offsets[i]);
}
break;
case DB_MSG_MBOX:
{
unsigned int mbix, msgix;
int start, len, after_end;
start = db->mtime_table[i];
len = db->size_table[i];
after_end = start + len;
++n_hits;
decode_mbox_indices(db->path_offsets[i], &mbix, &msgix);
printf("mbox:%s [%d,%d)\n", db->data + db->mbox_paths_table[mbix], start, after_end);
}
break;
case DB_MSG_DEAD:
break;
}
}
}
break;
/*}}}*/
case FT_EXCERPT:/*{{{*/
for (i=0; i<db->n_msgs; i++) {
if (hit3[i]) {
struct rfc822 *parsed = NULL;
switch (rd_msg_type(db, i)) {
case DB_MSG_FILE:
{
char *filename;
++n_hits;
printf("---------------------------------\n");
filename = db->data + db->path_offsets[i];
printf("%s\n", filename);
parsed = make_rfc822(filename);
}
break;
case DB_MSG_MBOX:
{
unsigned int mbix, msgix;
int start, len, after_end;
unsigned char *mbox_start, *msg_start;
int mbox_len, msg_len;
int mbox_index;
start = db->mtime_table[i];
len = db->size_table[i];
after_end = start + len;
++n_hits;
printf("---------------------------------\n");
decode_mbox_indices(db->path_offsets[i], &mbix, &msgix);
printf("mbox:%s [%d,%d)\n", db->data + db->mbox_paths_table[mbix], start, after_end);
get_validated_mbox_msg(db, i, &mbox_index, &mbox_start, &mbox_len, &msg_start, &msg_len);
if (msg_start) {
enum data_to_rfc822_error error;
struct msg_src *msg_src;
msg_src = setup_mbox_msg_src(db->data + db->mbox_paths_table[mbix], start, msg_len);
parsed = data_to_rfc822(msg_src, (char *) msg_start, msg_len, &error);
}
if (mbox_start) {
free_ro_mapping(mbox_start, mbox_len);
}
}
break;
case DB_MSG_DEAD:
break;
}
if (parsed) {
char datebuf[64];
struct tm *thetm;
if (parsed->hdrs.to) printf(" To: %s\n", parsed->hdrs.to);
if (parsed->hdrs.cc) printf(" Cc: %s\n", parsed->hdrs.cc);
if (parsed->hdrs.from) printf(" From: %s\n", parsed->hdrs.from);
if (parsed->hdrs.subject) printf(" Subject: %s\n", parsed->hdrs.subject);
if (parsed->hdrs.message_id)
printf(" Message-ID: %s\n", parsed->hdrs.message_id);
thetm = gmtime(&parsed->hdrs.date);
strftime(datebuf, sizeof(datebuf), "%a, %d %b %Y", thetm);
printf(" Date: %s\n", datebuf);
free_rfc822(parsed);
}
}
}
break;
/*}}}*/
default:
assert(0);
break;
}
free(hit0);
free(hit1);
free(hit2);
free(hit3);
if ((ft != FT_RAW) && (ft != FT_EXCERPT)) {
printf("Matched %d messages\n", n_hits);
}
fflush(stdout);
if (had_failed_checksum) {
fprintf(stderr,
"WARNING : \n"
"Matches were found in mbox folders but the message checksums failed.\n"
"You may need to run mairix in indexing mode then repeat your search.\n");
}
/* Return error code 1 to the shell if no messages were matched. */
return (n_hits == 0) ? 1 : 0;
}
/*}}}*/
static int directory_exists_remove_other(char *name)/*{{{*/
{
struct stat sb;
if (stat(name, &sb) < 0) {
return 0;
}
if (S_ISDIR(sb.st_mode)) {
return 1;
} else {
/* Try to remove. */
unlink(name);
return 0;
}
}
/*}}}*/
static void create_dir(char *path)/*{{{*/
{
if (mkdir(path, 0700) < 0) {
fprintf(stderr, "Could not create directory %s\n", path);
unlock_and_exit(2);
}
fprintf(stderr, "Created directory %s\n", path);
return;
}
/*}}}*/
static void maybe_create_maildir(char *path)/*{{{*/
{
char *subdir, *tailpos;
int len;
if (!directory_exists_remove_other(path)) {
create_dir(path);
}
len = strlen(path);
subdir = new_array(char, len + 5);
strcpy(subdir, path);
strcpy(subdir+len, "/");
tailpos = subdir + len + 1;
strcpy(tailpos,"cur");
if (!directory_exists_remove_other(subdir)) {
create_dir(subdir);
}
strcpy(tailpos,"new");
if (!directory_exists_remove_other(subdir)) {
create_dir(subdir);
}
strcpy(tailpos,"tmp");
if (!directory_exists_remove_other(subdir)) {
create_dir(subdir);
}
free(subdir);
return;
}
/*}}}*/
static void clear_maildir_subfolder(char *path, char *subdir)/*{{{*/
{
char *sdir;
char *fpath;
int len;
DIR *d;
struct dirent *de;
struct stat sb;
len = strlen(path) + strlen(subdir);
sdir = new_array(char, len + 2);
fpath = new_array(char, len + 3 + NAME_MAX);
strcpy(sdir, path);
strcat(sdir, "/");
strcat(sdir, subdir);
d = opendir(sdir);
if (d) {
while ((de = readdir(d))) {
strcpy(fpath, sdir);
strcat(fpath, "/");
strcat(fpath, de->d_name);
if (lstat(fpath, &sb) >= 0) {
/* Deal with both symlinks to maildir/MH messages as well as real files
* where mbox messages have been written. */
if (S_ISLNK(sb.st_mode) || S_ISREG(sb.st_mode)) {
/* FIXME : Can you unlink from a directory while doing a readdir loop over it? */
if (unlink(fpath) < 0) {
fprintf(stderr, "Unlinking %s failed\n", fpath);
}
}
}
}
closedir(d);
}
free(fpath);
free(sdir);
}
/*}}}*/
static void clear_mh_folder(char *path)/*{{{*/
{
char *fpath;
int len;
DIR *d;
struct dirent *de;
struct stat sb;
len = strlen(path);
fpath = new_array(char, len + 3 + NAME_MAX);
d = opendir(path);
if (d) {
while ((de = readdir(d))) {
if (valid_mh_filename_p(de->d_name)) {
strcpy(fpath, path);
strcat(fpath, "/");
strcat(fpath, de->d_name);
if (lstat(fpath, &sb) >= 0) {
/* See under maildir above for explanation */
if (S_ISLNK(sb.st_mode) || S_ISREG(sb.st_mode)) {
/* FIXME : Can you unlink from a directory while doing a readdir loop over it? */
if (unlink(fpath) < 0) {
fprintf(stderr, "Unlinking %s failed\n", fpath);
}
}
}
}
}
closedir(d);
}
free(fpath);
}
/*}}}*/
static void clear_mbox_folder(char *path)/*{{{*/
{
unlink(path);
}
/*}}}*/
int search_top(int do_threads, int do_augment, char *database_path, char *complete_mfolder, char **argv, enum folder_type ft, int verbose)/*{{{*/
{
struct read_db *db;
int result;
db = open_db(database_path);
switch (ft) {
case FT_MAILDIR:
maybe_create_maildir(complete_mfolder);
break;
case FT_MH:
if (!directory_exists_remove_other(complete_mfolder)) {
create_dir(complete_mfolder);
}
break;
case FT_MBOX:
/* Nothing to do */
break;
case FT_RAW:
case FT_EXCERPT:
break;
default:
assert(0);
}
if (!do_augment) {
switch (ft) {
case FT_MAILDIR:
clear_maildir_subfolder(complete_mfolder, "new");
clear_maildir_subfolder(complete_mfolder, "cur");
break;
case FT_MH:
clear_mh_folder(complete_mfolder);
break;
case FT_MBOX:
clear_mbox_folder(complete_mfolder);
break;
case FT_RAW:
case FT_EXCERPT:
break;
default:
assert(0);
}
}
result = do_search(db, argv, complete_mfolder, do_threads, ft, verbose);
free(complete_mfolder);
close_db(db);
return result;
}
/*}}}*/
| /*
mairix - message index builder and finder for maildir folders.
**********************************************************************
* Copyright (C) Richard P. Curnow 2002,2003,2004,2005,2006
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of version 2 of the GNU General Public License as
* published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*
**********************************************************************
*/
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <ctype.h>
#include <time.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <sys/mman.h>
#include <unistd.h>
#include <assert.h>
#include <dirent.h>
#include <errno.h>
/* Lame fix for systems where NAME_MAX isn't defined after including the above
* set of .h files (Solaris, FreeBSD so far). Probably grossly oversized but
* it'll do. */
#if !defined(NAME_MAX)
#define NAME_MAX 4096
#endif
#include "mairix.h"
#include "reader.h"
#include "memmac.h"
static void mark_hits_in_table(struct read_db *db, struct toktable_db *tt, int hit_tok, char *hits)/*{{{*/
{
/* mark files containing matched token */
int idx;
unsigned char *j, *first_char;
idx = 0;
first_char = (unsigned char *) db->data + tt->enc_offsets[hit_tok];
for (j = first_char; *j != 0xff; ) {
idx += read_increment(&j);
assert(idx < db->n_msgs);
hits[idx] = 1;
}
}
/*}}}*/
static void mark_hits_in_table2(struct read_db *db, struct toktable2_db *tt, int hit_tok, char *hits)/*{{{*/
{
/* mark files containing matched token */
int idx;
unsigned char *j, *first_char;
idx = 0;
first_char = (unsigned char *) db->data + tt->enc1_offsets[hit_tok];
for (j = first_char; *j != 0xff; ) {
idx += read_increment(&j);
assert(idx < db->n_msgs);
hits[idx] = 1;
}
}
/*}}}*/
/* See "Fast text searching with errors, Sun Wu and Udi Manber, TR 91-11,
University of Arizona. I have been informed that this algorithm is NOT
patented. This implementation of it is entirely the work of Richard P.
Curnow - I haven't looked at any related source (webglimpse, agrep etc) in
writing this.
*/
static void build_match_vector(char *substring, unsigned long *a, unsigned long *hit)/*{{{*/
{
int len;
char *p;
int i;
len = strlen(substring);
if (len > 31 || len == 0) {
fprintf(stderr, "Can't match patterns longer than 31 characters or empty\n");
unlock_and_exit(2);
}
memset(a, 0xff, 256 * sizeof(unsigned long));
for (p=substring, i=0; *p; p++, i++) {
a[(unsigned int) *(unsigned char *)p] &= ~(1UL << i);
}
*hit = ~(1UL << (len-1));
return;
}
/*}}}*/
static int substring_match_0(unsigned long *a, unsigned long hit, int left_anchor, char *token)/*{{{*/
{
int got_hit=0;
char *p;
unsigned long r0;
unsigned long anchor, anchor1;
r0 = ~0;
got_hit = 0;
anchor = 0;
anchor1 = left_anchor ? 0x1 : 0x0;
for(p=token; *p; p++) {
int idx = (unsigned int) *(unsigned char *)p;
r0 = (r0<<1) | anchor | a[idx];
if (~(r0 | hit)) {
got_hit = 1;
break;
}
anchor = anchor1;
}
return got_hit;
}
/*}}}*/
static int substring_match_1(unsigned long *a, unsigned long hit, int left_anchor, char *token)/*{{{*/
{
int got_hit=0;
char *p;
unsigned long r0, r1, nr0;
unsigned long anchor, anchor1;
r0 = ~0;
r1 = r0<<1;
got_hit = 0;
anchor = 0;
anchor1 = left_anchor ? 0x1 : 0x0;
for(p=token; *p; p++) {
int idx = (unsigned int) *(unsigned char *)p;
nr0 = (r0<<1) | anchor | a[idx];
r1 = ((r1<<1) | anchor | a[idx]) & ((r0 & nr0) << 1) & r0;
r0 = nr0;
if (~((r0 & r1) | hit)) {
got_hit = 1;
break;
}
anchor = anchor1;
}
return got_hit;
}
/*}}}*/
static int substring_match_2(unsigned long *a, unsigned long hit, int left_anchor, char *token)/*{{{*/
{
int got_hit=0;
char *p;
unsigned long r0, r1, r2, nr0, nr1;
unsigned long anchor, anchor1;
r0 = ~0;
r1 = r0<<1;
r2 = r1<<1;
got_hit = 0;
anchor = 0;
anchor1 = left_anchor ? 0x1 : 0x0;
for(p=token; *p; p++) {
int idx = (unsigned int) *(unsigned char *)p;
nr0 = (r0<<1) | anchor | a[idx];
nr1 = ((r1<<1) | anchor | a[idx]) & ((r0 & nr0) << 1) & r0;
r2 = ((r2<<1) | anchor | a[idx]) & ((r1 & nr1) << 1) & r1;
r0 = nr0;
r1 = nr1;
if (~((r0 & r1 & r2) | hit)) {
got_hit = 1;
break;
}
anchor = anchor1;
}
return got_hit;
}
/*}}}*/
static int substring_match_3(unsigned long *a, unsigned long hit, int left_anchor, char *token)/*{{{*/
{
int got_hit=0;
char *p;
unsigned long r0, r1, r2, r3, nr0, nr1, nr2;
unsigned long anchor, anchor1;
r0 = ~0;
r1 = r0<<1;
r2 = r1<<1;
r3 = r2<<1;
got_hit = 0;
anchor = 0;
anchor1 = left_anchor ? 0x1 : 0x0;
for(p=token; *p; p++) {
int idx = (unsigned int) *(unsigned char *)p;
nr0 = (r0<<1) | anchor | a[idx];
nr1 = ((r1<<1) | anchor | a[idx]) & ((r0 & nr0) << 1) & r0;
nr2 = ((r2<<1) | anchor | a[idx]) & ((r1 & nr1) << 1) & r1;
r3 = ((r3<<1) | anchor | a[idx]) & ((r2 & nr2) << 1) & r2;
r0 = nr0;
r1 = nr1;
r2 = nr2;
if (~((r0 & r1 & r2 & r3) | hit)) {
got_hit = 1;
break;
}
anchor = anchor1;
}
return got_hit;
}
/*}}}*/
static int substring_match_general(unsigned long *a, unsigned long hit, int left_anchor, char *token, int max_errors, unsigned long *r, unsigned long *nr)/*{{{*/
{
int got_hit=0;
char *p;
int j;
unsigned long anchor, anchor1;
r[0] = ~0;
anchor = 0;
anchor1 = left_anchor ? 0x1 : 0x0;
for (j=1; j<=max_errors; j++) {
r[j] = r[j-1] << 1;
}
got_hit = 0;
for(p=token; *p; p++) {
int idx = (unsigned int) *(unsigned char *)p;
int d;
unsigned int compo;
compo = nr[0] = ((r[0]<<1) | anchor | a[idx]);
for (d=1; d<=max_errors; d++) {
nr[d] = ((r[d]<<1) | anchor | a[idx])
& ((r[d-1] & nr[d-1])<<1)
& r[d-1];
compo &= nr[d];
}
memcpy(r, nr, (1 + max_errors) * sizeof(unsigned long));
if (~(compo | hit)) {
got_hit = 1;
break;
}
anchor = anchor1;
}
return got_hit;
}
/*}}}*/
static void match_substring_in_table(struct read_db *db, struct toktable_db *tt, char *substring, int max_errors, int left_anchor, char *hits)/*{{{*/
{
int i, got_hit;
unsigned long a[256];
unsigned long *r=NULL, *nr=NULL;
unsigned long hit;
char *token;
build_match_vector(substring, a, &hit);
got_hit = 0;
if (max_errors > 3) {
r = new_array(unsigned long, 1 + max_errors);
nr = new_array(unsigned long, 1 + max_errors);
}
for (i=0; i<tt->n; i++) {
token = db->data + tt->tok_offsets[i];
switch (max_errors) {
/* Optimise common cases for few errors to allow optimizer to keep bitmaps
* in registers */
case 0:
got_hit = substring_match_0(a, hit, left_anchor, token);
break;
case 1:
got_hit = substring_match_1(a, hit, left_anchor, token);
break;
case 2:
got_hit = substring_match_2(a, hit, left_anchor, token);
break;
case 3:
got_hit = substring_match_3(a, hit, left_anchor, token);
break;
default:
got_hit = substring_match_general(a, hit, left_anchor, token, max_errors, r, nr);
break;
}
if (got_hit) {
mark_hits_in_table(db, tt, i, hits);
}
}
if (r) free(r);
if (nr) free(nr);
}
/*}}}*/
static void match_substring_in_table2(struct read_db *db, struct toktable2_db *tt, char *substring, int max_errors, int left_anchor, char *hits)/*{{{*/
{
int i, got_hit;
unsigned long a[256];
unsigned long *r=NULL, *nr=NULL;
unsigned long hit;
char *token;
build_match_vector(substring, a, &hit);
got_hit = 0;
if (max_errors > 3) {
r = new_array(unsigned long, 1 + max_errors);
nr = new_array(unsigned long, 1 + max_errors);
}
for (i=0; i<tt->n; i++) {
token = db->data + tt->tok_offsets[i];
switch (max_errors) {
/* Optimise common cases for few errors to allow optimizer to keep bitmaps
* in registers */
case 0:
got_hit = substring_match_0(a, hit, left_anchor, token);
break;
case 1:
got_hit = substring_match_1(a, hit, left_anchor, token);
break;
case 2:
got_hit = substring_match_2(a, hit, left_anchor, token);
break;
case 3:
got_hit = substring_match_3(a, hit, left_anchor, token);
break;
default:
got_hit = substring_match_general(a, hit, left_anchor, token, max_errors, r, nr);
break;
}
if (got_hit) {
mark_hits_in_table2(db, tt, i, hits);
}
}
if (r) free(r);
if (nr) free(nr);
}
/*}}}*/
static void match_substring_in_paths(struct read_db *db, char *substring, int max_errors, int left_anchor, char *hits)/*{{{*/
{
int i;
unsigned long a[256];
unsigned long *r=NULL, *nr=NULL;
unsigned long hit;
build_match_vector(substring, a, &hit);
if (max_errors > 3) {
r = new_array(unsigned long, 1 + max_errors);
nr = new_array(unsigned long, 1 + max_errors);
}
for (i=0; i<db->n_msgs; i++) {
char *token = NULL;
unsigned int mbix, msgix;
switch (rd_msg_type(db, i)) {
case DB_MSG_FILE:
token = db->data + db->path_offsets[i];
break;
case DB_MSG_MBOX:
decode_mbox_indices(db->path_offsets[i], &mbix, &msgix);
token = db->data + db->mbox_paths_table[mbix];
break;
case DB_MSG_DEAD:
hits[i] = 0; /* never match on dead paths */
goto next_message;
}
assert(token);
switch (max_errors) {
/* Optimise common cases for few errors to allow optimizer to keep bitmaps
* in registers */
case 0:
hits[i] = substring_match_0(a, hit, left_anchor, token);
break;
case 1:
hits[i] = substring_match_1(a, hit, left_anchor, token);
break;
case 2:
hits[i] = substring_match_2(a, hit, left_anchor, token);
break;
case 3:
hits[i] = substring_match_3(a, hit, left_anchor, token);
break;
default:
hits[i] = substring_match_general(a, hit, left_anchor, token, max_errors, r, nr);
break;
}
next_message:
(void) 0;
}
if (r) free(r);
if (nr) free(nr);
}
/*}}}*/
static void match_string_in_table(struct read_db *db, struct toktable_db *tt, char *key, char *hits)/*{{{*/
{
/* TODO : replace with binary search? */
int i;
for (i=0; i<tt->n; i++) {
if (!strcmp(key, db->data + tt->tok_offsets[i])) {
/* get all matching files */
mark_hits_in_table(db, tt, i, hits);
}
}
}
/*}}}*/
static void match_string_in_table2(struct read_db *db, struct toktable2_db *tt, char *key, char *hits)/*{{{*/
{
/* TODO : replace with binary search? */
int i;
for (i=0; i<tt->n; i++) {
if (!strcmp(key, db->data + tt->tok_offsets[i])) {
/* get all matching files */
mark_hits_in_table2(db, tt, i, hits);
}
}
}
/*}}}*/
static int parse_size_expr(char *x)/*{{{*/
{
int result;
int n;
if (1 == sscanf(x, "%d%n", &result, &n)) {
x += n;
switch (*x) {
case 'k':
case 'K':
result <<= 10;
break;
case 'm':
case 'M':
result <<= 20;
break;
default:
break;
}
return result;
} else {
fprintf(stderr, "Could not parse message size expression <%s>\n", x);
return -1;
}
}
/*}}}*/
static void parse_size_range(char *size_expr, int *has_start, int *start, int *has_end, int *end)/*{{{*/
{
char *x = size_expr;
char *dash;
int len;
if (*x == ':') x++;
len = strlen(x);
dash = strchr(x, '-');
*has_start = *has_end = 0;
if (dash) {
char *p, *q;
if (dash > x) {
char *s;
s = new_array(char, dash - x + 1);
for (p=s, q=x; q<dash; ) *p++ = *q++;
*p = 0;
*start = parse_size_expr(s);
*has_start = 1;
free(s);
}
if (dash[1]) { /* dash not at end of arg */
char *e;
e = new_array(char, (x + len) - dash);
for (p=e, q=dash+1; *q; ) *p++ = *q++;
*p = 0;
*end = parse_size_expr(e);
*has_end = 1;
free(e);
}
} else {
*has_start = 0;
*end = parse_size_expr(size_expr);
*has_end = 1;
}
return;
}
/*}}}*/
static void find_size_matches_in_table(struct read_db *db, char *size_expr, char *hits)/*{{{*/
{
int start, end;
int has_start, has_end, start_cond, end_cond;
int i;
start = end = -1; /* avoid compiler warning about uninitialised variables. */
parse_size_range(size_expr, &has_start, &start, &has_end, &end);
if (has_start && has_end) {
/* Allow user to put the endpoints in backwards */
if (start > end) {
int temp = start;
start = end;
end = temp;
}
}
for (i=0; i<db->n_msgs; i++) {
start_cond = has_start ? (db->size_table[i] > start) : 1;
end_cond = has_end ? (db->size_table[i] < end ) : 1;
if (start_cond && end_cond) {
hits[i] = 1;
}
}
}
/*}}}*/
static void find_date_matches_in_table(struct read_db *db, char *date_expr, char *hits)/*{{{*/
{
time_t start, end;
int has_start, has_end, start_cond, end_cond;
int i;
int status;
status = scan_date_string(date_expr, &start, &has_start, &end, &has_end);
if (status) {
unlock_and_exit (2);
}
if (has_start && has_end) {
/* Allow user to put the endpoints in backwards */
if (start > end) {
time_t temp = start;
start = end;
end = temp;
}
}
for (i=0; i<db->n_msgs; i++) {
start_cond = has_start ? (db->date_table[i] > start) : 1;
end_cond = has_end ? (db->date_table[i] < end ) : 1;
if (start_cond && end_cond) {
hits[i] = 1;
}
}
}
/*}}}*/
static void find_flag_matches_in_table(struct read_db *db, char *flag_expr, char *hits)/*{{{*/
{
int pos_seen, neg_seen;
int pos_replied, neg_replied;
int pos_flagged, neg_flagged;
int negate;
char *p;
int i;
negate = 0;
pos_seen = neg_seen = 0;
pos_replied = neg_replied = 0;
pos_flagged = neg_flagged = 0;
for (p=flag_expr; *p; p++) {
switch (*p) {
case '-':
negate = 1;
break;
case 's':
case 'S':
if (negate) neg_seen = 1;
else pos_seen = 1;
negate = 0;
break;
case 'r':
case 'R':
if (negate) neg_replied = 1;
else pos_replied = 1;
negate = 0;
break;
case 'f':
case 'F':
if (negate) neg_flagged = 1;
else pos_flagged = 1;
negate = 0;
break;
default:
fprintf(stderr, "Did not understand the character '%c' (0x%02x) in the flags argument F:%s\n",
isprint(*p) ? *p : '.',
(int) *(unsigned char *) p,
flag_expr);
break;
}
}
for (i=0; i<db->n_msgs; i++) {
if ((!pos_seen || (db->msg_type_and_flags[i] & FLAG_SEEN)) &&
(!neg_seen || !(db->msg_type_and_flags[i] & FLAG_SEEN)) &&
(!pos_replied || (db->msg_type_and_flags[i] & FLAG_REPLIED)) &&
(!neg_replied || !(db->msg_type_and_flags[i] & FLAG_REPLIED)) &&
(!pos_flagged || (db->msg_type_and_flags[i] & FLAG_FLAGGED)) &&
(!neg_flagged || !(db->msg_type_and_flags[i] & FLAG_FLAGGED))) {
hits[i] = 1;
}
}
}
/*}}}*/
static char *mk_maildir_path(int token, char *output_dir, int is_in_new,
int is_seen, int is_replied, int is_flagged)/*{{{*/
{
char *result;
char uniq_buf[48];
int len;
len = strlen(output_dir) + 64; /* oversize */
result = new_array(char, len + 1 + sizeof(":2,FRS"));
strcpy(result, output_dir);
strcat(result, is_in_new ? "/new/" : "/cur/");
sprintf(uniq_buf, "123456789.%d.mairix", token);
strcat(result, uniq_buf);
if (is_seen || is_replied || is_flagged) {
strcat(result, ":2,");
}
if (is_flagged) strcat(result, "F");
if (is_replied) strcat(result, "R");
if (is_seen) strcat(result, "S");
return result;
}
/*}}}*/
static char *mk_mh_path(int token, char *output_dir)/*{{{*/
{
char *result;
char uniq_buf[8];
int len;
len = strlen(output_dir) + 10; /* oversize */
result = new_array(char, len);
strcpy(result, output_dir);
strcat(result, "/");
sprintf(uniq_buf, "%d", token+1);
strcat(result, uniq_buf);
return result;
}
/*}}}*/
static int looks_like_maildir_new_p(const char *p)/*{{{*/
{
const char *s1, *s2;
s2 = p;
while (*s2) s2++;
while ((s2 > p) && (*s2 != '/')) s2--;
if (s2 <= p) return 0;
s1 = s2 - 1;
while ((s1 > p) && (*s1 != '/')) s1--;
if (s1 <= p) return 0;
if (!strncmp(s1, "/new/", 5)) {
return 1;
} else {
return 0;
}
}
/*}}}*/
static void create_symlink(char *link_target, char *new_link)/*{{{*/
{
if ((!do_hardlinks && symlink(link_target, new_link) < 0) || link(link_target, new_link)) {
if (verbose) {
perror("symlink");
fprintf(stderr, "Failed path <%s> -> <%s>\n", link_target, new_link);
}
}
}
/*}}}*/
static void mbox_terminate(const unsigned char *data, int len, FILE *out)/*{{{*/
{
if (len == 0)
fputs("\n", out);
else if (len == 1) {
if (data[0] != '\n')
fputs("\n", out);
}
else if (data[len-1] != '\n')
fputs("\n\n", out);
else if (data[len-2] != '\n')
fputs("\n", out);
}
/*}}}*/
static void append_file_to_mbox(const char *path, FILE *out)/*{{{*/
{
unsigned char *data;
int len;
create_ro_mapping(path, &data, &len);
if (data) {
fprintf(out, "From mairix@mairix Mon Jan 1 12:34:56 1970\n");
fprintf(out, "X-source-folder: %s\n", path);
fwrite (data, sizeof(unsigned char), len, out);
mbox_terminate(data, len, out);
free_ro_mapping(data, len);
}
return;
}
/*}}}*/
static int had_failed_checksum;
static void get_validated_mbox_msg(struct read_db *db, int msg_index,/*{{{*/
int *mbox_index,
unsigned char **mbox_data, int *mbox_len,
unsigned char **msg_data, int *msg_len)
{
/* msg_data==NULL if checksum mismatches */
unsigned char *start;
checksum_t csum;
unsigned int mbi, msgi;
*msg_data = NULL;
*msg_len = 0;
decode_mbox_indices(db->path_offsets[msg_index], &mbi, &msgi);
*mbox_index = mbi;
create_ro_mapping(db->data + db->mbox_paths_table[mbi], mbox_data, mbox_len);
if (!*mbox_data) return;
start = *mbox_data + db->mtime_table[msg_index];
/* Ensure that we don't run off the end of the mmap'd file */
if (db->mtime_table[msg_index] >= *mbox_len)
*msg_len = 0;
else if (db->mtime_table[msg_index] + db->size_table[msg_index] >= *mbox_len)
*msg_len = *mbox_len - db->mtime_table[msg_index];
else
*msg_len = db->size_table[msg_index];
compute_checksum((char *)start, *msg_len, &csum);
if (!memcmp((db->data + db->mbox_checksum_table[mbi] + (msgi * sizeof(checksum_t))), &csum, sizeof(checksum_t))) {
*msg_data = start;
} else {
had_failed_checksum = 1;
}
return;
}
/*}}}*/
static void append_mboxmsg_to_mbox(struct read_db *db, int msg_index, FILE *out)/*{{{*/
{
/* Need to common up code with try_copy_to_path */
unsigned char *mbox_start, *msg_start;
int mbox_len, msg_len;
int mbox_index;
get_validated_mbox_msg(db, msg_index, &mbox_index, &mbox_start, &mbox_len, &msg_start, &msg_len);
if (msg_start) {
/* Artificial from line, we don't have the envelope sender so this is
going to be artificial anyway. */
fprintf(out, "From mairix@mairix Mon Jan 1 12:34:56 1970\n");
fprintf(out, "X-source-folder: %s\n",
db->data + db->mbox_paths_table[mbox_index]);
fwrite(msg_start, sizeof(unsigned char), msg_len, out);
mbox_terminate(msg_start, msg_len, out);
}
if (mbox_start) {
free_ro_mapping(mbox_start, mbox_len);
}
}
/*}}}*/
static void try_copy_to_path(struct read_db *db, int msg_index, char *target_path)/*{{{*/
{
unsigned char *data;
int mbox_len, msg_len;
int mbi;
FILE *out;
unsigned char *start;
get_validated_mbox_msg(db, msg_index, &mbi, &data, &mbox_len, &start, &msg_len);
if (start) {
out = fopen(target_path, "wb");
if (out) {
fprintf(out, "X-source-folder: %s\n",
db->data + db->mbox_paths_table[mbi]);
fwrite(start, sizeof(char), msg_len, out);
fclose(out);
}
}
if (data) {
free_ro_mapping(data, mbox_len);
}
return;
}
/*}}}*/
static struct msg_src *setup_mbox_msg_src(char *filename, off_t start, size_t len)/*{{{*/
{
static struct msg_src result;
result.type = MS_MBOX;
result.filename = filename;
result.start = start;
result.len = len;
return &result;
}
/*}}}*/
static void get_flags_from_file(struct read_db *db, int idx, int *is_seen, int *is_replied, int *is_flagged)
{
*is_seen = (db->msg_type_and_flags[idx] & FLAG_SEEN) ? 1 : 0;
*is_replied = (db->msg_type_and_flags[idx] & FLAG_REPLIED) ? 1 : 0;
*is_flagged = (db->msg_type_and_flags[idx] & FLAG_FLAGGED) ? 1 : 0;
}
static void string_tolower(char *str)
{
char *p;
for (p=str; *p; p++) {
*p = tolower(*(unsigned char *)p);
}
}
static int do_search(struct read_db *db, char **args, char *output_path, int show_threads, enum folder_type ft, int verbose)/*{{{*/
{
char *colon, *start_words;
int do_body, do_subject, do_from, do_to, do_cc, do_date, do_size;
int do_att_name;
int do_flags;
int do_path, do_msgid;
char *key;
char *hit0, *hit1, *hit2, *hit3;
int i;
int n_hits;
int left_anchor;
had_failed_checksum = 0;
hit0 = new_array(char, db->n_msgs);
hit1 = new_array(char, db->n_msgs);
hit2 = new_array(char, db->n_msgs);
hit3 = new_array(char, db->n_msgs);
/* Argument structure is
* x:tokena+tokenb,~tokenc,tokend+tokene
*
* + (and) binds more tightly than ,
* , (or) binds more tightly than separate args
*
*
* hit1 gathers the tokens and'ed with +
* hit2 gathers the tokens or'ed with ,
* hit3 gathers the separate args and'ed with <gap>
* */
/* Everything matches until proven otherwise */
memset(hit3, 1, db->n_msgs);
while (*args) {
/* key is a single argument, separate args are and-ed together */
key = *args++;
memset(hit2, 0, db->n_msgs);
memset(hit1, 1, db->n_msgs);
do_to = 0;
do_cc = 0;
do_from = 0;
do_subject = 0;
do_body = 0;
do_date = 0;
do_size = 0;
do_path = 0;
do_msgid = 0;
do_att_name = 0;
do_flags = 0;
colon = strchr(key, ':');
if (colon) {
char *p;
for (p=key; p<colon; p++) {
switch(*p) {
case 'b': do_body = 1; break;
case 's': do_subject = 1; break;
case 't': do_to = 1; break;
case 'c': do_cc = 1; break;
case 'f': do_from = 1; break;
case 'r': do_to = do_cc = 1; break;
case 'a': do_to = do_cc = do_from = 1; break;
case 'd': do_date = 1; break;
case 'z': do_size = 1; break;
case 'p': do_path = 1; break;
case 'm': do_msgid = 1; break;
case 'n': do_att_name = 1; break;
case 'F': do_flags = 1; break;
default: fprintf(stderr, "Unknown key type <%c>\n", *p); break;
}
}
if (do_msgid && (p-key) > 1) {
fprintf(stderr, "Message-ID key <m> can't be used with other keys\n");
unlock_and_exit(2);
}
start_words = 1 + colon;
} else {
do_body = do_subject = do_to = do_cc = do_from = 1;
start_words = key;
}
if (do_date || do_size || do_flags) {
memset(hit0, 0, db->n_msgs);
if (do_date) {
find_date_matches_in_table(db, start_words, hit0);
} else if (do_size) {
find_size_matches_in_table(db, start_words, hit0);
} else if (do_flags) {
find_flag_matches_in_table(db, start_words, hit0);
}
/* AND-combine match vectors */
for (i=0; i<db->n_msgs; i++) {
hit1[i] &= hit0[i];
}
} else if (do_msgid) {
char *lower_word = new_string(start_words);
string_tolower(lower_word);
memset(hit0, 0, db->n_msgs);
match_string_in_table2(db, &db->msg_ids, lower_word, hit0);
free(lower_word);
/* AND-combine match vectors */
for (i=0; i<db->n_msgs; i++) {
hit1[i] &= hit0[i];
}
} else {
/*{{{ Scan over separate words within this argument */
do {
/* / = 'or' separator
* , = 'and' separator */
char *orsep;
char *andsep;
char *word, *orig_word, *lower_word;
char *equal;
char *p;
int negate;
int had_orsep;
int max_errors;
orsep = strchr(start_words, '/');
andsep = strchr(start_words, ',');
had_orsep = 0;
if (andsep && (!orsep || (andsep < orsep))) {
char *p, *q;
word = new_array(char, 1 + (andsep - start_words)); /* maybe oversize */
for (p=word, q=start_words; q < andsep; q++) {
if (!isspace(*(unsigned char *)q)) {
*p++ = *q;
}
}
*p = 0;
start_words = andsep + 1;
} else if (orsep) { /* comes before + if there's a + */
char *p, *q;
word = new_array(char, 1 + (orsep - start_words)); /* maybe oversize */
for (p=word, q=start_words; q < orsep; q++) {
if (!isspace(*(unsigned char *)q)) {
*p++ = *q;
}
}
*p = 0;
start_words = orsep + 1;
had_orsep = 1;
} else {
word = new_string(start_words);
while (*start_words) ++start_words;
}
orig_word = word;
if (word[0] == '~') {
negate = 1;
word++;
} else {
negate = 0;
}
if (word[0] == '^') {
left_anchor = 1;
word++;
} else {
left_anchor = 0;
}
equal = strchr(word, '=');
if (equal) {
*equal = 0;
max_errors = atoi(equal + 1);
/* Extend this to do anchoring etc */
} else {
max_errors = 0; /* keep GCC quiet */
}
/* Canonicalise search string to lowercase, since the database has all
* tokens handled that way. But not for path search! */
lower_word = new_string(word);
string_tolower(lower_word);
memset(hit0, 0, db->n_msgs);
if (equal) {
if (do_to) match_substring_in_table(db, &db->to, lower_word, max_errors, left_anchor, hit0);
if (do_cc) match_substring_in_table(db, &db->cc, lower_word, max_errors, left_anchor, hit0);
if (do_from) match_substring_in_table(db, &db->from, lower_word, max_errors, left_anchor, hit0);
if (do_subject) match_substring_in_table(db, &db->subject, lower_word, max_errors, left_anchor, hit0);
if (do_body) match_substring_in_table(db, &db->body, lower_word, max_errors, left_anchor, hit0);
if (do_att_name) match_substring_in_table(db, &db->attachment_name, lower_word, max_errors, left_anchor, hit0);
if (do_path) match_substring_in_paths(db, word, max_errors, left_anchor, hit0);
} else {
if (do_to) match_string_in_table(db, &db->to, lower_word, hit0);
if (do_cc) match_string_in_table(db, &db->cc, lower_word, hit0);
if (do_from) match_string_in_table(db, &db->from, lower_word, hit0);
if (do_subject) match_string_in_table(db, &db->subject, lower_word, hit0);
if (do_body) match_string_in_table(db, &db->body, lower_word, hit0);
if (do_att_name) match_string_in_table(db, &db->attachment_name, lower_word, hit0);
/* FIXME */
if (do_path) match_substring_in_paths(db, word, 0, left_anchor, hit0);
}
free(lower_word);
/* AND-combine match vectors */
for (i=0; i<db->n_msgs; i++) {
if (negate) {
hit1[i] &= !hit0[i];
} else {
hit1[i] &= hit0[i];
}
}
if (had_orsep) {
/* OR-combine match vectors */
for (i=0; i<db->n_msgs; i++) {
hit2[i] |= hit1[i];
}
memset(hit1, 1, db->n_msgs);
}
free(orig_word);
} while (*start_words);
/*}}}*/
}
/* OR-combine match vectors */
for (i=0; i<db->n_msgs; i++) {
hit2[i] |= hit1[i];
}
/* AND-combine match vectors */
for (i=0; i<db->n_msgs; i++) {
hit3[i] &= hit2[i];
}
}
n_hits = 0;
if (show_threads) {/*{{{*/
char *tids;
tids = new_array(char, db->n_msgs);
memset(tids, 0, db->n_msgs);
for (i=0; i<db->n_msgs; i++) {
if (hit3[i]) {
tids[db->tid_table[i]] = 1;
}
}
for (i=0; i<db->n_msgs; i++) {
if (tids[db->tid_table[i]]) {
hit3[i] = 1;
}
}
free(tids);
}
/*}}}*/
switch (ft) {
case FT_MAILDIR:/*{{{*/
for (i=0; i<db->n_msgs; i++) {
if (hit3[i]) {
int is_seen, is_replied, is_flagged;
get_flags_from_file(db, i, &is_seen, &is_replied, &is_flagged);
switch (rd_msg_type(db, i)) {
case DB_MSG_FILE:
{
char *target_path;
char *message_path;
int is_in_new;
message_path = db->data + db->path_offsets[i];
is_in_new = looks_like_maildir_new_p(message_path);
target_path = mk_maildir_path(i, output_path, is_in_new, is_seen, is_replied, is_flagged);
create_symlink(message_path, target_path);
free(target_path);
++n_hits;
}
break;
case DB_MSG_MBOX:
{
char *target_path = mk_maildir_path(i, output_path, !is_seen, is_seen, is_replied, is_flagged);
try_copy_to_path(db, i, target_path);
free(target_path);
++n_hits;
}
break;
case DB_MSG_DEAD:
break;
}
}
}
break;
/*}}}*/
case FT_MH:/*{{{*/
for (i=0; i<db->n_msgs; i++) {
if (hit3[i]) {
switch (rd_msg_type(db, i)) {
case DB_MSG_FILE:
{
char *target_path = mk_mh_path(i, output_path);
create_symlink(db->data + db->path_offsets[i], target_path);
free(target_path);
++n_hits;
}
break;
case DB_MSG_MBOX:
{
char *target_path = mk_mh_path(i, output_path);
try_copy_to_path(db, i, target_path);
free(target_path);
++n_hits;
}
break;
case DB_MSG_DEAD:
break;
}
}
}
break;
/*}}}*/
case FT_MBOX:/*{{{*/
{
FILE *out;
out = fopen(output_path, "ab");
if (!out) {
fprintf(stderr, "Cannot open output folder %s\n", output_path);
unlock_and_exit(1);
}
for (i=0; i<db->n_msgs; i++) {
if (hit3[i]) {
switch (rd_msg_type(db, i)) {
case DB_MSG_FILE:
{
append_file_to_mbox(db->data + db->path_offsets[i], out);
++n_hits;
}
break;
case DB_MSG_MBOX:
{
append_mboxmsg_to_mbox(db, i, out);
++n_hits;
}
break;
case DB_MSG_DEAD:
break;
}
}
}
fclose(out);
}
break;
/*}}}*/
case FT_RAW:/*{{{*/
for (i=0; i<db->n_msgs; i++) {
if (hit3[i]) {
switch (rd_msg_type(db, i)) {
case DB_MSG_FILE:
{
++n_hits;
printf("%s\n", db->data + db->path_offsets[i]);
}
break;
case DB_MSG_MBOX:
{
unsigned int mbix, msgix;
int start, len, after_end;
start = db->mtime_table[i];
len = db->size_table[i];
after_end = start + len;
++n_hits;
decode_mbox_indices(db->path_offsets[i], &mbix, &msgix);
printf("mbox:%s [%d,%d)\n", db->data + db->mbox_paths_table[mbix], start, after_end);
}
break;
case DB_MSG_DEAD:
break;
}
}
}
break;
/*}}}*/
case FT_EXCERPT:/*{{{*/
for (i=0; i<db->n_msgs; i++) {
if (hit3[i]) {
struct rfc822 *parsed = NULL;
switch (rd_msg_type(db, i)) {
case DB_MSG_FILE:
{
char *filename;
++n_hits;
printf("---------------------------------\n");
filename = db->data + db->path_offsets[i];
printf("%s\n", filename);
parsed = make_rfc822(filename);
}
break;
case DB_MSG_MBOX:
{
unsigned int mbix, msgix;
int start, len, after_end;
unsigned char *mbox_start, *msg_start;
int mbox_len, msg_len;
int mbox_index;
start = db->mtime_table[i];
len = db->size_table[i];
after_end = start + len;
++n_hits;
printf("---------------------------------\n");
decode_mbox_indices(db->path_offsets[i], &mbix, &msgix);
printf("mbox:%s [%d,%d)\n", db->data + db->mbox_paths_table[mbix], start, after_end);
get_validated_mbox_msg(db, i, &mbox_index, &mbox_start, &mbox_len, &msg_start, &msg_len);
if (msg_start) {
enum data_to_rfc822_error error;
struct msg_src *msg_src;
msg_src = setup_mbox_msg_src(db->data + db->mbox_paths_table[mbix], start, msg_len);
parsed = data_to_rfc822(msg_src, (char *) msg_start, msg_len, &error);
}
if (mbox_start) {
free_ro_mapping(mbox_start, mbox_len);
}
}
break;
case DB_MSG_DEAD:
break;
}
if (parsed) {
char datebuf[64];
struct tm *thetm;
if (parsed->hdrs.to) printf(" To: %s\n", parsed->hdrs.to);
if (parsed->hdrs.cc) printf(" Cc: %s\n", parsed->hdrs.cc);
if (parsed->hdrs.from) printf(" From: %s\n", parsed->hdrs.from);
if (parsed->hdrs.subject) printf(" Subject: %s\n", parsed->hdrs.subject);
if (parsed->hdrs.message_id)
printf(" Message-ID: %s\n", parsed->hdrs.message_id);
thetm = gmtime(&parsed->hdrs.date);
strftime(datebuf, sizeof(datebuf), "%a, %d %b %Y", thetm);
printf(" Date: %s\n", datebuf);
free_rfc822(parsed);
}
}
}
break;
/*}}}*/
default:
assert(0);
break;
}
free(hit0);
free(hit1);
free(hit2);
free(hit3);
if ((ft != FT_RAW) && (ft != FT_EXCERPT)) {
printf("Matched %d messages\n", n_hits);
}
fflush(stdout);
if (had_failed_checksum) {
fprintf(stderr,
"WARNING : \n"
"Matches were found in mbox folders but the message checksums failed.\n"
"You may need to run mairix in indexing mode then repeat your search.\n");
}
/* Return error code 1 to the shell if no messages were matched. */
return (n_hits == 0) ? 1 : 0;
}
/*}}}*/
static int directory_exists_remove_other(char *name)/*{{{*/
{
struct stat sb;
if (stat(name, &sb) < 0) {
return 0;
}
if (S_ISDIR(sb.st_mode)) {
return 1;
} else {
/* Try to remove. */
unlink(name);
return 0;
}
}
/*}}}*/
static void create_dir(char *path)/*{{{*/
{
if (mkdir(path, 0700) < 0) {
fprintf(stderr, "Could not create directory %s\n", path);
unlock_and_exit(2);
}
fprintf(stderr, "Created directory %s\n", path);
return;
}
/*}}}*/
static void maybe_create_maildir(char *path)/*{{{*/
{
char *subdir, *tailpos;
int len;
if (!directory_exists_remove_other(path)) {
create_dir(path);
}
len = strlen(path);
subdir = new_array(char, len + 5);
strcpy(subdir, path);
strcpy(subdir+len, "/");
tailpos = subdir + len + 1;
strcpy(tailpos,"cur");
if (!directory_exists_remove_other(subdir)) {
create_dir(subdir);
}
strcpy(tailpos,"new");
if (!directory_exists_remove_other(subdir)) {
create_dir(subdir);
}
strcpy(tailpos,"tmp");
if (!directory_exists_remove_other(subdir)) {
create_dir(subdir);
}
free(subdir);
return;
}
/*}}}*/
static void clear_maildir_subfolder(char *path, char *subdir)/*{{{*/
{
char *sdir;
char *fpath;
int len;
DIR *d;
struct dirent *de;
struct stat sb;
len = strlen(path) + strlen(subdir);
sdir = new_array(char, len + 2);
fpath = new_array(char, len + 3 + NAME_MAX);
strcpy(sdir, path);
strcat(sdir, "/");
strcat(sdir, subdir);
d = opendir(sdir);
if (d) {
while ((de = readdir(d))) {
strcpy(fpath, sdir);
strcat(fpath, "/");
strcat(fpath, de->d_name);
if (lstat(fpath, &sb) >= 0) {
/* Deal with both symlinks to maildir/MH messages as well as real files
* where mbox messages have been written. */
if (S_ISLNK(sb.st_mode) || S_ISREG(sb.st_mode)) {
/* FIXME : Can you unlink from a directory while doing a readdir loop over it? */
if (unlink(fpath) < 0) {
fprintf(stderr, "Unlinking %s failed\n", fpath);
}
}
}
}
closedir(d);
}
free(fpath);
free(sdir);
}
/*}}}*/
static void clear_mh_folder(char *path)/*{{{*/
{
char *fpath;
int len;
DIR *d;
struct dirent *de;
struct stat sb;
len = strlen(path);
fpath = new_array(char, len + 3 + NAME_MAX);
d = opendir(path);
if (d) {
while ((de = readdir(d))) {
if (valid_mh_filename_p(de->d_name)) {
strcpy(fpath, path);
strcat(fpath, "/");
strcat(fpath, de->d_name);
if (lstat(fpath, &sb) >= 0) {
/* See under maildir above for explanation */
if (S_ISLNK(sb.st_mode) || S_ISREG(sb.st_mode)) {
/* FIXME : Can you unlink from a directory while doing a readdir loop over it? */
if (unlink(fpath) < 0) {
fprintf(stderr, "Unlinking %s failed\n", fpath);
}
}
}
}
}
closedir(d);
}
free(fpath);
}
/*}}}*/
static void clear_mbox_folder(char *path)/*{{{*/
{
unlink(path);
}
/*}}}*/
int search_top(int do_threads, int do_augment, char *database_path, char *complete_mfolder, char **argv, enum folder_type ft, int verbose)/*{{{*/
{
struct read_db *db;
int result;
db = open_db(database_path);
switch (ft) {
case FT_MAILDIR:
maybe_create_maildir(complete_mfolder);
break;
case FT_MH:
if (!directory_exists_remove_other(complete_mfolder)) {
create_dir(complete_mfolder);
}
break;
case FT_MBOX:
/* Nothing to do */
break;
case FT_RAW:
case FT_EXCERPT:
break;
default:
assert(0);
}
if (!do_augment) {
switch (ft) {
case FT_MAILDIR:
clear_maildir_subfolder(complete_mfolder, "new");
clear_maildir_subfolder(complete_mfolder, "cur");
break;
case FT_MH:
clear_mh_folder(complete_mfolder);
break;
case FT_MBOX:
clear_mbox_folder(complete_mfolder);
break;
case FT_RAW:
case FT_EXCERPT:
break;
default:
assert(0);
}
}
result = do_search(db, argv, complete_mfolder, do_threads, ft, verbose);
free(complete_mfolder);
close_db(db);
return result;
}
/*}}}*/
|
jbroadway/sitellite | 12 | inc/app/sitellite/boxes/util/snippets/access.php | sitellite_access = public
sitellite_action = on
sitellite_status = approved
sitellite_inline = on
| ; <?php /*
sitellite_access = master
sitellite_action = on
sitellite_status = approved
sitellite_inline = on
; */
|
sidorares/nodejs-mysql-native | 65 | examples/websql.js | // websql example adapted from http://html5demos.com/database-rollback
// original code (c)
var webdb = require('../lib/mysql-native/wrappers/websql');
var db = webdb.openDatabase('test');
db.transaction(function (tx) {
tx.executeSql("CREATE TABLE `foo` (`id` int(20) DEFAULT NULL, `text` blob ) ENGINE=InnoDB");
tx.executeSql('INSERT INTO foo (id, text) VALUES (1, "foobar")');
});
db.transaction(function (tx) {
tx.executeSql('SELECT * FROM foo', [], function (tx, results) {
console.log('found rows (should be 1): ' + util.inspect(results));
}, function (tx, err) {
console.log('select* failed: ' + err.message);
});
});
db.transaction(function (tx) {
tx.executeSql('DROP TABLE foo');
// known to fail - so should rollback the DROP statement
tx.executeSql('INSERT INTO foo (id, text) VALUES (1, "foobar")', [],
function(tx, rs)
{
console.log("insrted " + util.inspect(rs));
});
}, function (err) {
util.puts('should be rolling back caused by: ' + err.message);
});
db.transaction(function (tx) {
tx.executeSql('SELECT * FROM foo', [], function (tx, results) {
console.log('found rows (should be 1): ' + util.inspect(results));
}, function (tx, err) {
console.log('select* failed: ' + err.message);
});
});
| // websql example adapted from http://html5demos.com/database-rollback
// original code (c)
var webdb = require('../lib/mysql-native/wrappers/websql');
var util = require('util');
var db = webdb.openDatabase('test');
db.transaction(function (tx) {
tx.executeSql("CREATE TABLE `foo` (`id` int(20) DEFAULT NULL, `text` blob ) ENGINE=InnoDB");
tx.executeSql('INSERT INTO foo (id, text) VALUES (1, "foobar")');
});
db.transaction(function (tx) {
tx.executeSql('SELECT * FROM foo', [], function (tx, results) {
console.log('found rows (should be 1): ' + util.inspect(results));
}, function (tx, err) {
console.log('select* failed: ' + err.message);
});
});
db.transaction(function (tx) {
tx.executeSql('DROP TABLE foo');
// known to fail - so should rollback the DROP statement
tx.executeSql('INSERT INTO foo (id, text) VALUES (1, "foobar")', [],
function(tx, rs)
{
console.log("insrted " + util.inspect(rs));
});
}, function (err) {
util.puts('should be rolling back caused by: ' + err.message);
});
db.transaction(function (tx) {
tx.executeSql('SELECT * FROM foo', [], function (tx, results) {
console.log('found rows (should be 1): ' + util.inspect(results));
}, function (tx, err) {
console.log('select* failed: ' + err.message);
});
});
|
votesmart/python-votesmart | 8 | votesmart.py | """ Python library for interacting with Project Vote Smart API.
Project Vote Smart's API (http://www.votesmart.org/services_api.php)
provides rich biographical data, including data on votes, committee
assignments, and much more.
"""
__author__ = "James Turk <jturk@sunlightfoundation.com>"
__version__ = "0.3.2"
__copyright__ = "Copyright (c) 2009 Sunlight Labs"
__license__ = "BSD"
import urllib, urllib2
try:
import json
except ImportError:
import simplejson as json
class VotesmartApiError(Exception):
""" Exception for Sunlight API errors """
class VotesmartApiObject(object):
def __init__(self, d):
self.__dict__ = d
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, self.__dict__)
class Address(object):
def __init__(self, d):
self.__dict__.update(d['address'])
self.__dict__.update(d['phone'])
self.__dict__.update(d['notes'])
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, self.__dict__)
class WebAddress(VotesmartApiObject):
def __str__(self):
return self.webAddress
class Bio(object):
def __init__(self, d):
#self.__dict__.update(d['election'])
#self.__dict__.update(d['office'])
self.__dict__.update(d['candidate'])
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, self.__dict__)
class AddlBio(VotesmartApiObject):
def __str__(self):
return ': '.join((self.name, self.data))
class Candidate(VotesmartApiObject):
def __str__(self):
return ' '.join((self.firstName, self.lastName))
class CommitteeType(VotesmartApiObject):
def __str__(self):
return self.name
class Committee(VotesmartApiObject):
def __str__(self):
return self.name
class CommitteeDetail(VotesmartApiObject):
def __str__(self):
return self.name
class CommitteeMember(VotesmartApiObject):
def __str__(self):
return ' '.join((self.title, self.firstName, self.lastName))
class District(VotesmartApiObject):
def __str__(self):
return self.name
class Election(VotesmartApiObject):
def __init__(self, d):
stages = d.pop('stage', None)
self.__dict__ = d
if stages:
self.stages = _result_to_obj(ElectionStage, stages)
def __str__(self):
return self.name
class ElectionStage(VotesmartApiObject):
def __str__(self):
return '%s (%s)' % (self.name, self.electionDate)
class Official(VotesmartApiObject):
def __str__(self):
return ' '.join((self.title, self.firstName, self.lastName))
class LeadershipPosition(VotesmartApiObject):
def __str__(self):
return self.name
class Locality(VotesmartApiObject):
def __str__(self):
return self.name
class Measure(VotesmartApiObject):
def __str__(self):
return self.title
class MeasureDetail(VotesmartApiObject):
def __str__(self):
return self.title
class OfficeType(VotesmartApiObject):
def __str__(self):
return ': '.join((self.officeTypeId, self.name))
class OfficeBranch(VotesmartApiObject):
def __str__(self):
return ': '.join((self.officeBranchId, self.name))
class OfficeLevel(VotesmartApiObject):
def __str__(self):
return ': '.join((self.officeLevelId, self.name))
class Office(VotesmartApiObject):
def __str__(self):
return self.name
class Category(VotesmartApiObject):
def __str__(self):
return ': '.join((self.categoryId, self.name))
class Sig(VotesmartApiObject):
def __str__(self):
return ': '.join((self.sigId, self.name))
class SigDetail(VotesmartApiObject):
def __str__(self):
return self.name
class Rating(VotesmartApiObject):
def __str__(self):
return self.ratingText
class State(VotesmartApiObject):
def __str__(self):
return ' '.join((self.stateId, self.name))
class StateDetail(VotesmartApiObject):
def __str__(self):
return ' '.join((self.stateId, self.name))
class BillSponsor(VotesmartApiObject):
def __str__(self):
return self.name
class BillAction(VotesmartApiObject):
def __str__(self):
return ' - '.join((self.statusDate, self.stage))
class BillAmendment(VotesmartApiObject):
def __str__(self):
return self.title
class BillDetail(VotesmartApiObject):
def __init__(self, d):
sponsors = d.pop('sponsors')
actions = d.pop('actions')
amendments = d.pop('amendments') # ammendments -- sic
self.sponsors = _result_to_obj(BillSponsor, sponsors['sponsor'])
self.actions = _result_to_obj(BillAction, actions['action'])
if amendments:
self.amendments = _result_to_obj(BillAmendment, amendments['amendment'])
self.__dict__.update(d)
class BillActionDetail(VotesmartApiObject):
def __str__(self):
return self.officialTitle
class Bill(VotesmartApiObject):
def __str__(self):
return ' '.join((self.billNumber, self.title))
class Vote(VotesmartApiObject):
def __str__(self):
return ': '.join((self.candidateName, self.action))
class Veto(VotesmartApiObject):
def __str__(self):
return ' '.join((self.billNumber, self.billTitle))
def _result_to_obj(cls, result):
if isinstance(result, dict):
return [cls(result)]
else:
# the if o predicate is important, sometimes they return empty strings
return [cls(o) for o in result if o]
class votesmart(object):
apikey = None
@staticmethod
def _apicall(func, params):
if votesmart.apikey is None:
raise VotesmartApiError('Missing Project Vote Smart apikey')
params = dict([(k,v) for (k,v) in params.iteritems() if v])
url = 'http://api.votesmart.org/%s?o=JSON&key=%s&%s' % (func,
votesmart.apikey, urllib.urlencode(params))
try:
response = urllib2.urlopen(url).read()
obj = json.loads(response)
if 'error' in obj:
raise VotesmartApiError(obj['error']['errorMessage'])
else:
return obj
except urllib2.HTTPError, e:
raise VotesmartApiError(e)
except ValueError, e:
raise VotesmartApiError('Invalid Response')
class address(object):
@staticmethod
def getCampaign(candidateId):
params = {'candidateId': candidateId}
result = votesmart._apicall('Address.getCampaign', params)
return _result_to_obj(Address, result['address']['office'])
@staticmethod
def getCampaignWebAddress(candidateId):
params = {'candidateId': candidateId}
result = votesmart._apicall('Address.getCampaignWebAddress', params)
return _result_to_obj(WebAddress, result['webaddress']['address'])
@staticmethod
def getCampaignByElection(electionId):
params = {'electionId': electionId}
result = votesmart._apicall('Address.getCampaignByElection', params)
return _result_to_obj(Address, result['address']['office'])
@staticmethod
def getOffice(candidateId):
params = {'candidateId': candidateId}
result = votesmart._apicall('Address.getOffice', params)
return _result_to_obj(Address, result['address']['office'])
@staticmethod
def getOfficeWebAddress(candidateId):
params = {'candidateId': candidateId}
result = votesmart._apicall('Address.getOfficeWebAddress', params)
return _result_to_obj(WebAddress, result['webaddress']['address'])
#@staticmethod
#def getOfficeByOfficeState(officeId, stateId=None):
# params = {'officeId': officeId, 'stateId': stateId}
# result = votesmart._apicall('Address.getOfficeByOfficeState', params)
# return _result_to_obj(Address, result['address']['office'])
class candidatebio(object):
@staticmethod
def getBio(candidateId):
params = {'candidateId': candidateId}
result = votesmart._apicall('CandidateBio.getBio', params)
return Bio(result['bio'])
@staticmethod
def getAddlBio(candidateId):
params = {'candidateId': candidateId}
result = votesmart._apicall('CandidateBio.getAddlBio', params)
return _result_to_obj(AddlBio,
result['addlBio']['additional']['item'])
class candidates(object):
@staticmethod
def getByOfficeState(officeId, stateId=None, electionYear=None):
params = {'officeId': officeId, 'stateId':stateId, 'electionYear': electionYear}
result = votesmart._apicall('Candidates.getByOfficeState', params)
return _result_to_obj(Candidate, result['candidateList']['candidate'])
@staticmethod
def getByOfficeTypeState(officeTypeId, stateId=None, electionYear=None):
params = {'officeTypeId': officeTypeId, 'stateId':stateId, 'electionYear': electionYear}
result = votesmart._apicall('Candidates.getByOfficeTypeState', params)
return _result_to_obj(Candidate, result['candidateList']['candidate'])
@staticmethod
def getByLastname(lastName, electionYear=None):
params = {'lastName': lastName, 'electionYear':electionYear}
result = votesmart._apicall('Candidates.getByLastname', params)
return _result_to_obj(Candidate, result['candidateList']['candidate'])
@staticmethod
def getByLevenstein(lastName, electionYear=None):
params = {'lastName': lastName, 'electionYear':electionYear}
result = votesmart._apicall('Candidates.getByLevenstein', params)
return _result_to_obj(Candidate, result['candidateList']['candidate'])
@staticmethod
def getByElection(electionId):
params = {'electionId': electionId}
result = votesmart._apicall('Candidates.getByElection', params)
return _result_to_obj(Candidate, result['candidateList']['candidate'])
@staticmethod
def getByDistrict(districtId, electionYear=None):
params = {'districtId': districtId, 'electionYear':electionYear}
result = votesmart._apicall('Candidates.getByDistrict', params)
return _result_to_obj(Candidate, result['candidateList']['candidate'])
@staticmethod
def getByZip(zip5, zip4=None):
params = {'zip4': zip4, 'zip5': zip5}
result = votesmart._apicall('Candidates.getByZip', params)
return _result_to_obj(Candidate, result['candidateList']['candidate'])
class committee(object):
@staticmethod
def getTypes():
result = votesmart._apicall('Committee.getTypes', {})
return _result_to_obj(CommitteeType, result['committeeTypes']['type'])
@staticmethod
def getCommitteesByTypeState(typeId=None, stateId=None):
params = {'typeId':typeId, 'stateId':stateId}
result = votesmart._apicall('Committee.getCommitteesByTypeState', params)
return _result_to_obj(Committee, result['committees']['committee'])
@staticmethod
def getCommittee(committeeId):
params = {'committeeId' : committeeId}
result = votesmart._apicall('Committee.getCommittee', params)
return CommitteeDetail(result['committee'])
@staticmethod
def getCommitteeMembers(committeeId):
params = {'committeeId' : committeeId}
result = votesmart._apicall('Committee.getCommitteeMembers', params)
return _result_to_obj(CommitteeMember, result['committeeMembers']['member'])
class district(object):
@staticmethod
def getByOfficeState(officeId, stateId, districtName=None):
params = {'officeId':officeId, 'stateId': stateId, 'districtName': districtName}
result = votesmart._apicall('District.getByOfficeState', params)
return _result_to_obj(District, result['districtList']['district'])
@staticmethod
def getByZip(zip5, zip4=None):
params = {'zip5': zip5, 'zip4': zip4}
result = votesmart._apicall('District.getByZip', params)
return _result_to_obj(District, result['districtList']['district'])
class election(object):
@staticmethod
def getElection(electionId):
params = {'electionId':electionId}
result = votesmart._apicall('Election.getElection', params)
return Election(result['elections']['election'])
@staticmethod
def getElectionByYearState(year, stateId=None):
params = {'year':year, 'stateId':stateId}
result = votesmart._apicall('Election.getElectionByYearState', params)
return _result_to_obj(Election, result['elections']['election'])
@staticmethod
def getElectionByZip(zip5, zip4=None, year=None):
params = {'zip5': zip5, 'zip4': zip4, 'year': year}
result = votesmart._apicall('Election.getElectionByZip', params)
return _result_to_obj(Election, result['elections']['election'])
@staticmethod
def getStageCandidates(electionId, stageId, party=None,
districtId=None, stateId=None):
params = {'electionId':electionId, 'stageId':stageId,
'party':party, 'districtId':districtId, 'stateId':stateId}
result = votesmart._apicall('Election.getStageCandidates', params)
return _result_to_obj(Candidate, result['stageCandidates']['candidate'])
class leadership(object):
@staticmethod
def getPositions(stateId=None, officeId=None):
params = {'stateId':stateId, 'officeId':officeId}
result = votesmart._apicall('Leadership.getPositions', params)
return _result_to_obj(LeadershipPosition, result['leadership']['position'])
#@staticmethod
#def getCandidates(leadershipId, stateId=None):
# params = {'leadershipId':leadershipId, 'stateId':stateId}
# result = votesmart._apicall('Leadership.getCandidates', params)
# return result['leaders']['leader']
class local(object):
@staticmethod
def getCounties(stateId):
params = {'stateId': stateId}
result = votesmart._apicall('Local.getCounties', params)
return _result_to_obj(Locality, result['counties']['county'])
@staticmethod
def getCities(stateId):
params = {'stateId': stateId}
result = votesmart._apicall('Local.getCities', params)
return _result_to_obj(Locality, result['cities']['city'])
@staticmethod
def getOfficials(localId):
params = {'localId': localId}
result = votesmart._apicall('Local.getOfficials', params)
return _result_to_obj(Official, result['candidateList']['candidate'])
class measure(object):
@staticmethod
def getMeasuresByYearState(year, stateId):
params = {'year':year, 'stateId':stateId}
result = votesmart._apicall('Measure.getMeasuresByYearState', params)
return _result_to_obj(Measure, result['measures']['measure'])
@staticmethod
def getMeasure(measureId):
params = {'measureId':measureId}
result = votesmart._apicall('Measure.getMeasure', params)
return MeasureDetail(result['measure'])
class npat(object):
@staticmethod
def getNpat(candidateId):
params = {'candidateId':candidateId}
result = votesmart._apicall('Npat.getNpat', params)
return result['npat']
class office(object):
@staticmethod
def getTypes():
result = votesmart._apicall('Office.getTypes', {})
return _result_to_obj(OfficeType, result['officeTypes']['type'])
@staticmethod
def getBranches():
result = votesmart._apicall('Office.getBranches', {})
return _result_to_obj(OfficeBranch, result['branches']['branch'])
@staticmethod
def getLevels():
result = votesmart._apicall('Office.getLevels', {})
return _result_to_obj(OfficeLevel, result['levels']['level'])
@staticmethod
def getOfficesByType(typeId):
params = {'typeId':typeId}
result = votesmart._apicall('Office.getOfficesByType', params)
return _result_to_obj(Office, result['offices']['office'])
@staticmethod
def getOfficesByLevel(levelId):
params = {'levelId':levelId}
result = votesmart._apicall('Office.getOfficesByLevel', params)
return _result_to_obj(Office, result['offices']['office'])
@staticmethod
def getOfficesByTypeLevel(typeId, levelId):
params = {'typeId':typeId, 'levelId':levelId}
result = votesmart._apicall('Office.getOfficesByTypeLevel', params)
return _result_to_obj(Office, result['offices']['office'])
@staticmethod
def getOfficesByBranchLevel(branchId, levelId):
params = {'branchId':branchId, 'levelId':levelId}
result = votesmart._apicall('Office.getOfficesByBranchLevel', params)
return _result_to_obj(Office, result['offices']['office'])
class officials(object):
@staticmethod
def getStatewide(stateId=None):
params = {'stateId': stateId}
result = votesmart._apicall('Officials.getStatewide', params)
return _result_to_obj(Official, result['candidateList']['candidate'])
@staticmethod
def getByOfficeState(officeId, stateId=None):
params = {'officeId':officeId, 'stateId': stateId}
result = votesmart._apicall('Officials.getByOfficeState', params)
return _result_to_obj(Official, result['candidateList']['candidate'])
@staticmethod
def getByLastname(lastName):
params = {'lastName':lastName}
result = votesmart._apicall('Officials.getByLastname', params)
return _result_to_obj(Official, result['candidateList']['candidate'])
@staticmethod
def getByLevenstein(lastName):
params = {'lastName':lastName}
result = votesmart._apicall('Officials.getByLevenstein', params)
return _result_to_obj(Official, result['candidateList']['candidate'])
@staticmethod
def getByElection(electionId):
params = {'electionId':electionId}
result = votesmart._apicall('Officials.getByElection', params)
return _result_to_obj(Official, result['candidateList']['candidate'])
@staticmethod
def getByDistrict(districtId):
params = {'districtId':districtId}
result = votesmart._apicall('Officials.getByDistrict', params)
return _result_to_obj(Official, result['candidateList']['candidate'])
@staticmethod
def getByZip(zip5, zip4=None):
params = {'zip4': zip4, 'zip5': zip5}
result = votesmart._apicall('Officials.getByZip', params)
return _result_to_obj(Official, result['candidateList']['candidate'])
class rating(object):
@staticmethod
def getCategories(stateId=None):
params = {'stateId':stateId}
result = votesmart._apicall('Rating.getCategories', params)
return _result_to_obj(Category, result['categories']['category'])
@staticmethod
def getSigList(categoryId, stateId=None):
params = {'categoryId':categoryId, 'stateId':stateId}
result = votesmart._apicall('Rating.getSigList', params)
return _result_to_obj(Sig, result['sigs']['sig'])
@staticmethod
def getSig(sigId):
params = {'sigId':sigId}
result = votesmart._apicall('Rating.getSig', params)
return SigDetail(result['sig'])
@staticmethod
def getCandidateRating(candidateId, sigId):
params = {'candidateId':candidateId, 'sigId':sigId}
result = votesmart._apicall('Rating.getCandidateRating', params)
return _result_to_obj(Rating, result['candidateRating']['rating'])
class state(object):
@staticmethod
def getStateIDs():
result = votesmart._apicall('State.getStateIDs', {})
return _result_to_obj(State, result['stateList']['list']['state'])
@staticmethod
def getState(stateId):
params = {'stateId':stateId}
result = votesmart._apicall('State.getState', params)
return StateDetail(result['state']['details'])
class votes(object):
@staticmethod
def getCategories(year, stateId=None):
params = {'year':year, 'stateId':stateId}
result = votesmart._apicall('Votes.getCategories', params)
return _result_to_obj(Category, result['categories']['category'])
@staticmethod
def getBill(billId):
params = {'billId':billId}
result = votesmart._apicall('Votes.getBill', params)
return BillDetail(result['bill'])
@staticmethod
def getBillAction(actionId):
params = {'actionId':actionId}
result = votesmart._apicall('Votes.getBillAction', params)
return BillActionDetail(result['action'])
@staticmethod
def getBillActionVotes(actionId):
params = {'actionId':actionId}
result = votesmart._apicall('Votes.getBillActionVotes', params)
return _result_to_obj(Vote, result['votes']['vote'])
@staticmethod
def getBillActionVoteByOfficial(actionId, candidateId):
params = {'actionId':actionId, 'candidateId':candidateId}
result = votesmart._apicall('Votes.getBillActionVoteByOfficial', params)
return Vote(result['votes']['vote'])
@staticmethod
def getByBillNumber(billNumber):
params = {'billNumber': billNumber}
result = votesmart._apicall('Votes.getByBillNumber', params)
return _result_to_obj(Bill, result['bills']['bill'])
@staticmethod
def getBillsByCategoryYearState(categoryId, year, stateId=None):
params = {'categoryId':categoryId, 'year':year, 'stateId':stateId}
result = votesmart._apicall('Votes.getBillsByCategoryYearState', params)
return _result_to_obj(Bill, result['bills']['bill'])
@staticmethod
def getBillsByYearState(year, stateId=None):
params = {'year':year, 'stateId':stateId}
result = votesmart._apicall('Votes.getBillsByYearState', params)
return _result_to_obj(Bill, result['bills']['bill'])
@staticmethod
def getBillsByOfficialYearOffice(candidateId, year, officeId=None):
params = {'candidateId':candidateId, 'year':year, 'officeId':officeId}
result = votesmart._apicall('Votes.getBillsByOfficialYearOffice', params)
return _result_to_obj(Bill, result['bills']['bill'])
@staticmethod
def getBillsByCandidateCategoryOffice(candidateId, categoryId, officeId=None):
params = {'candidateId':candidateId, 'categoryId':categoryId, 'officeId':officeId}
result = votesmart._apicall('Votes.getBillsByCandidateCategoryOffice', params)
return _result_to_obj(Bill, result['bills']['bill'])
@staticmethod
def getBillsBySponsorYear(candidateId, year):
params = {'candidateId':candidateId, 'year':year}
result = votesmart._apicall('Votes.getBillsBySponsorYear', params)
return _result_to_obj(Bill, result['bills']['bill'])
@staticmethod
def getBillsBySponsorCategory(candidateId, categoryId):
params = {'candidateId':candidateId, 'categoryId':categoryId}
result = votesmart._apicall('Votes.getBillsBySponsorCategory', params)
return _result_to_obj(Bill, result['bills']['bill'])
@staticmethod
def getBillsByStateRecent(stateId=None, amount=None):
params = {'stateId':stateId, 'amount':amount}
result = votesmart._apicall('Votes.getBillsByStateRecent', params)
return _result_to_obj(Bill, result['bills']['bill'])
@staticmethod
def getVetoes(candidateId):
params = {'candidateId': candidateId}
result = votesmart._apicall('Votes.getVetoes', params)
return _result_to_obj(Veto, result['vetoes']['veto'])
| """ Python library for interacting with Project Vote Smart API.
Project Vote Smart's API (http://www.votesmart.org/services_api.php)
provides rich biographical data, including data on votes, committee
assignments, and much more.
"""
__author__ = "James Turk <jturk@sunlightfoundation.com>"
__version__ = "0.3.2"
__copyright__ = "Copyright (c) 2009 Sunlight Labs"
__license__ = "BSD"
import urllib, urllib2
try:
import json
except ImportError:
import simplejson as json
class VotesmartApiError(Exception):
""" Exception for Sunlight API errors """
class VotesmartApiObject(object):
def __init__(self, d):
self.__dict__ = d
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, self.__dict__)
class Address(object):
def __init__(self, d):
self.__dict__.update(d['address'])
self.__dict__.update(d['phone'])
self.__dict__.update(d['notes'])
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, self.__dict__)
class WebAddress(VotesmartApiObject):
def __str__(self):
return self.webAddress
class Bio(object):
def __init__(self, d):
#self.__dict__.update(d['election'])
#self.__dict__.update(d['office'])
self.__dict__.update(d['candidate'])
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, self.__dict__)
class AddlBio(VotesmartApiObject):
def __str__(self):
return ': '.join((self.name, self.data))
class Candidate(VotesmartApiObject):
def __str__(self):
return ' '.join((self.firstName, self.lastName))
class CommitteeType(VotesmartApiObject):
def __str__(self):
return self.name
class Committee(VotesmartApiObject):
def __str__(self):
return self.name
class CommitteeDetail(VotesmartApiObject):
def __str__(self):
return self.name
class CommitteeMember(VotesmartApiObject):
def __str__(self):
return ' '.join((self.title, self.firstName, self.lastName))
class District(VotesmartApiObject):
def __str__(self):
return self.name
class Election(VotesmartApiObject):
def __init__(self, d):
stages = d.pop('stage', None)
self.__dict__ = d
if stages:
self.stages = _result_to_obj(ElectionStage, stages)
def __str__(self):
return self.name
class ElectionStage(VotesmartApiObject):
def __str__(self):
return '%s (%s)' % (self.name, self.electionDate)
class Official(VotesmartApiObject):
def __str__(self):
return ' '.join((self.title, self.firstName, self.lastName))
class LeadershipPosition(VotesmartApiObject):
def __str__(self):
return self.name
class Locality(VotesmartApiObject):
def __str__(self):
return self.name
class Measure(VotesmartApiObject):
def __str__(self):
return self.title
class MeasureDetail(VotesmartApiObject):
def __str__(self):
return self.title
class OfficeType(VotesmartApiObject):
def __str__(self):
return ': '.join((self.officeTypeId, self.name))
class OfficeBranch(VotesmartApiObject):
def __str__(self):
return ': '.join((self.officeBranchId, self.name))
class OfficeLevel(VotesmartApiObject):
def __str__(self):
return ': '.join((self.officeLevelId, self.name))
class Office(VotesmartApiObject):
def __str__(self):
return self.name
class Category(VotesmartApiObject):
def __str__(self):
return ': '.join((self.categoryId, self.name))
class Sig(VotesmartApiObject):
def __str__(self):
return ': '.join((self.sigId, self.name))
class SigDetail(VotesmartApiObject):
def __str__(self):
return self.name
class Rating(VotesmartApiObject):
def __str__(self):
return self.ratingText
class State(VotesmartApiObject):
def __str__(self):
return ' '.join((self.stateId, self.name))
class StateDetail(VotesmartApiObject):
def __str__(self):
return ' '.join((self.stateId, self.name))
class BillSponsor(VotesmartApiObject):
def __str__(self):
return self.name
class BillAction(VotesmartApiObject):
def __str__(self):
return ' - '.join((self.statusDate, self.stage))
class BillAmendment(VotesmartApiObject):
def __str__(self):
return self.title
class BillDetail(VotesmartApiObject):
def __init__(self, d):
sponsors = d.pop('sponsors')
actions = d.pop('actions')
amendments = d.pop('amendments') # ammendments -- sic
self.sponsors = _result_to_obj(BillSponsor, sponsors['sponsor'])
self.actions = _result_to_obj(BillAction, actions['action'])
if amendments:
self.amendments = _result_to_obj(BillAmendment, amendments['amendment'])
self.__dict__.update(d)
class BillActionDetail(VotesmartApiObject):
def __str__(self):
return self.officialTitle
class Bill(VotesmartApiObject):
def __str__(self):
return ' '.join((self.billNumber, self.title))
class Vote(VotesmartApiObject):
def __str__(self):
return ': '.join((self.candidateName, self.action))
class Veto(VotesmartApiObject):
def __str__(self):
return ' '.join((self.billNumber, self.billTitle))
def _result_to_obj(cls, result):
if isinstance(result, dict):
return [cls(result)]
else:
# the if o predicate is important, sometimes they return empty strings
return [cls(o) for o in result if o]
class votesmart(object):
apikey = None
@staticmethod
def _apicall(func, params):
if votesmart.apikey is None:
raise VotesmartApiError('Missing Project Vote Smart apikey')
params = dict([(k,v) for (k,v) in params.iteritems() if v])
url = 'http://api.votesmart.org/%s?o=JSON&key=%s&%s' % (func,
votesmart.apikey, urllib.urlencode(params))
try:
response = urllib2.urlopen(url).read()
obj = json.loads(response)
if 'error' in obj:
raise VotesmartApiError(obj['error']['errorMessage'])
else:
return obj
except urllib2.HTTPError, e:
raise VotesmartApiError(e)
except ValueError, e:
raise VotesmartApiError('Invalid Response')
class address(object):
@staticmethod
def getCampaign(candidateId):
params = {'candidateId': candidateId}
result = votesmart._apicall('Address.getCampaign', params)
return _result_to_obj(Address, result['address']['office'])
@staticmethod
def getCampaignWebAddress(candidateId):
params = {'candidateId': candidateId}
result = votesmart._apicall('Address.getCampaignWebAddress', params)
return _result_to_obj(WebAddress, result['webaddress']['address'])
@staticmethod
def getCampaignByElection(electionId):
params = {'electionId': electionId}
result = votesmart._apicall('Address.getCampaignByElection', params)
return _result_to_obj(Address, result['address']['office'])
@staticmethod
def getOffice(candidateId):
params = {'candidateId': candidateId}
result = votesmart._apicall('Address.getOffice', params)
return _result_to_obj(Address, result['address']['office'])
@staticmethod
def getOfficeWebAddress(candidateId):
params = {'candidateId': candidateId}
result = votesmart._apicall('Address.getOfficeWebAddress', params)
return _result_to_obj(WebAddress, result['webaddress']['address'])
#@staticmethod
#def getOfficeByOfficeState(officeId, stateId=None):
# params = {'officeId': officeId, 'stateId': stateId}
# result = votesmart._apicall('Address.getOfficeByOfficeState', params)
# return _result_to_obj(Address, result['address']['office'])
class candidatebio(object):
@staticmethod
def getBio(candidateId):
params = {'candidateId': candidateId}
result = votesmart._apicall('CandidateBio.getBio', params)
return Bio(result['bio'])
@staticmethod
def getAddlBio(candidateId):
params = {'candidateId': candidateId}
result = votesmart._apicall('CandidateBio.getAddlBio', params)
return _result_to_obj(AddlBio,
result['addlBio']['additional']['item'])
class candidates(object):
@staticmethod
def getByOfficeState(officeId, stateId=None, electionYear=None):
params = {'officeId': officeId, 'stateId':stateId, 'electionYear': electionYear}
result = votesmart._apicall('Candidates.getByOfficeState', params)
return _result_to_obj(Candidate, result['candidateList']['candidate'])
@staticmethod
def getByOfficeTypeState(officeTypeId, stateId=None, electionYear=None):
params = {'officeTypeId': officeTypeId, 'stateId':stateId, 'electionYear': electionYear}
result = votesmart._apicall('Candidates.getByOfficeTypeState', params)
return _result_to_obj(Candidate, result['candidateList']['candidate'])
@staticmethod
def getByLastname(lastName, electionYear=None):
params = {'lastName': lastName, 'electionYear':electionYear}
result = votesmart._apicall('Candidates.getByLastname', params)
return _result_to_obj(Candidate, result['candidateList']['candidate'])
@staticmethod
def getByLevenstein(lastName, electionYear=None):
params = {'lastName': lastName, 'electionYear':electionYear}
result = votesmart._apicall('Candidates.getByLevenstein', params)
return _result_to_obj(Candidate, result['candidateList']['candidate'])
@staticmethod
def getByElection(electionId):
params = {'electionId': electionId}
result = votesmart._apicall('Candidates.getByElection', params)
return _result_to_obj(Candidate, result['candidateList']['candidate'])
@staticmethod
def getByDistrict(districtId, electionYear=None):
params = {'districtId': districtId, 'electionYear':electionYear}
result = votesmart._apicall('Candidates.getByDistrict', params)
return _result_to_obj(Candidate, result['candidateList']['candidate'])
@staticmethod
def getByZip(zip5, zip4=None):
params = {'zip4': zip4, 'zip5': zip5}
result = votesmart._apicall('Candidates.getByZip', params)
return _result_to_obj(Candidate, result['candidateList']['candidate'])
class committee(object):
@staticmethod
def getTypes():
result = votesmart._apicall('Committee.getTypes', {})
return _result_to_obj(CommitteeType, result['committeeTypes']['type'])
@staticmethod
def getCommitteesByTypeState(typeId=None, stateId=None):
params = {'typeId':typeId, 'stateId':stateId}
result = votesmart._apicall('Committee.getCommitteesByTypeState', params)
return _result_to_obj(Committee, result['committees']['committee'])
@staticmethod
def getCommittee(committeeId):
params = {'committeeId' : committeeId}
result = votesmart._apicall('Committee.getCommittee', params)
return CommitteeDetail(result['committee'])
@staticmethod
def getCommitteeMembers(committeeId):
params = {'committeeId' : committeeId}
result = votesmart._apicall('Committee.getCommitteeMembers', params)
return _result_to_obj(CommitteeMember, result['committeeMembers']['member'])
class district(object):
@staticmethod
def getByOfficeState(officeId, stateId, districtName=None):
params = {'officeId':officeId, 'stateId': stateId, 'districtName': districtName}
result = votesmart._apicall('District.getByOfficeState', params)
return _result_to_obj(District, result['districtList']['district'])
@staticmethod
def getByZip(zip5, zip4=None):
params = {'zip5': zip5, 'zip4': zip4}
result = votesmart._apicall('District.getByZip', params)
return _result_to_obj(District, result['districtList']['district'])
class election(object):
@staticmethod
def getElection(electionId):
params = {'electionId':electionId}
result = votesmart._apicall('Election.getElection', params)
return Election(result['elections']['election'])
@staticmethod
def getElectionByYearState(year, stateId=None):
params = {'year':year, 'stateId':stateId}
result = votesmart._apicall('Election.getElectionByYearState', params)
return _result_to_obj(Election, result['elections']['election'])
@staticmethod
def getElectionByZip(zip5, zip4=None, year=None):
params = {'zip5': zip5, 'zip4': zip4, 'year': year}
result = votesmart._apicall('Election.getElectionByZip', params)
return _result_to_obj(Election, result['elections']['election'])
@staticmethod
def getStageCandidates(electionId, stageId, party=None,
districtId=None, stateId=None):
params = {'electionId':electionId, 'stageId':stageId,
'party':party, 'districtId':districtId, 'stateId':stateId}
result = votesmart._apicall('Election.getStageCandidates', params)
return _result_to_obj(Candidate, result['stageCandidates']['candidate'])
class leadership(object):
@staticmethod
def getPositions(stateId=None, officeId=None):
params = {'stateId':stateId, 'officeId':officeId}
result = votesmart._apicall('Leadership.getPositions', params)
return _result_to_obj(LeadershipPosition, result['leadership']['position'])
#@staticmethod
#def getCandidates(leadershipId, stateId=None):
# params = {'leadershipId':leadershipId, 'stateId':stateId}
# result = votesmart._apicall('Leadership.getCandidates', params)
# return result['leaders']['leader']
class local(object):
@staticmethod
def getCounties(stateId):
params = {'stateId': stateId}
result = votesmart._apicall('Local.getCounties', params)
return _result_to_obj(Locality, result['counties']['county'])
@staticmethod
def getCities(stateId):
params = {'stateId': stateId}
result = votesmart._apicall('Local.getCities', params)
return _result_to_obj(Locality, result['cities']['city'])
@staticmethod
def getOfficials(localId):
params = {'localId': localId}
result = votesmart._apicall('Local.getOfficials', params)
return _result_to_obj(Official, result['candidateList']['candidate'])
class measure(object):
@staticmethod
def getMeasuresByYearState(year, stateId):
params = {'year':year, 'stateId':stateId}
result = votesmart._apicall('Measure.getMeasuresByYearState', params)
return _result_to_obj(Measure, result['measures']['measure'])
@staticmethod
def getMeasure(measureId):
params = {'measureId':measureId}
result = votesmart._apicall('Measure.getMeasure', params)
return MeasureDetail(result['measure'])
class npat(object):
@staticmethod
def getNpat(candidateId):
params = {'candidateId':candidateId}
result = votesmart._apicall('Npat.getNpat', params)
return result['npat']
class office(object):
@staticmethod
def getTypes():
result = votesmart._apicall('Office.getTypes', {})
return _result_to_obj(OfficeType, result['officeTypes']['type'])
@staticmethod
def getBranches():
result = votesmart._apicall('Office.getBranches', {})
return _result_to_obj(OfficeBranch, result['branches']['branch'])
@staticmethod
def getLevels():
result = votesmart._apicall('Office.getLevels', {})
return _result_to_obj(OfficeLevel, result['levels']['level'])
@staticmethod
def getOfficesByType(typeId):
params = {'typeId':typeId}
result = votesmart._apicall('Office.getOfficesByType', params)
return _result_to_obj(Office, result['offices']['office'])
@staticmethod
def getOfficesByLevel(levelId):
params = {'levelId':levelId}
result = votesmart._apicall('Office.getOfficesByLevel', params)
return _result_to_obj(Office, result['offices']['office'])
@staticmethod
def getOfficesByTypeLevel(typeId, levelId):
params = {'typeId':typeId, 'levelId':levelId}
result = votesmart._apicall('Office.getOfficesByTypeLevel', params)
return _result_to_obj(Office, result['offices']['office'])
@staticmethod
def getOfficesByBranchLevel(branchId, levelId):
params = {'branchId':branchId, 'levelId':levelId}
result = votesmart._apicall('Office.getOfficesByBranchLevel', params)
return _result_to_obj(Office, result['offices']['office'])
class officials(object):
@staticmethod
def getStatewide(stateId=None):
params = {'stateId': stateId}
result = votesmart._apicall('Officials.getStatewide', params)
return _result_to_obj(Official, result['candidateList']['candidate'])
@staticmethod
def getByOfficeState(officeId, stateId=None):
params = {'officeId':officeId, 'stateId': stateId}
result = votesmart._apicall('Officials.getByOfficeState', params)
return _result_to_obj(Official, result['candidateList']['candidate'])
@staticmethod
def getByLastname(lastName):
params = {'lastName':lastName}
result = votesmart._apicall('Officials.getByLastname', params)
return _result_to_obj(Official, result['candidateList']['candidate'])
@staticmethod
def getByLevenstein(lastName):
params = {'lastName':lastName}
result = votesmart._apicall('Officials.getByLevenstein', params)
return _result_to_obj(Official, result['candidateList']['candidate'])
@staticmethod
def getByElection(electionId):
params = {'electionId':electionId}
result = votesmart._apicall('Officials.getByElection', params)
return _result_to_obj(Official, result['candidateList']['candidate'])
@staticmethod
def getByDistrict(districtId):
params = {'districtId':districtId}
result = votesmart._apicall('Officials.getByDistrict', params)
return _result_to_obj(Official, result['candidateList']['candidate'])
@staticmethod
def getByZip(zip5, zip4=None):
params = {'zip4': zip4, 'zip5': zip5}
result = votesmart._apicall('Officials.getByZip', params)
return _result_to_obj(Official, result['candidateList']['candidate'])
class rating(object):
@staticmethod
def getCategories(stateId=None):
params = {'stateId':stateId}
result = votesmart._apicall('Rating.getCategories', params)
return _result_to_obj(Category, result['categories']['category'])
@staticmethod
def getSigList(categoryId, stateId=None):
params = {'categoryId':categoryId, 'stateId':stateId}
result = votesmart._apicall('Rating.getSigList', params)
return _result_to_obj(Sig, result['sigs']['sig'])
@staticmethod
def getSig(sigId):
params = {'sigId':sigId}
result = votesmart._apicall('Rating.getSig', params)
return SigDetail(result['sig'])
@staticmethod
def getCandidateRating(candidateId, sigId=None):
params = {'candidateId':candidateId, 'sigId':sigId}
result = votesmart._apicall('Rating.getCandidateRating', params)
return _result_to_obj(Rating, result['candidateRating']['rating'])
class state(object):
@staticmethod
def getStateIDs():
result = votesmart._apicall('State.getStateIDs', {})
return _result_to_obj(State, result['stateList']['list']['state'])
@staticmethod
def getState(stateId):
params = {'stateId':stateId}
result = votesmart._apicall('State.getState', params)
return StateDetail(result['state']['details'])
class votes(object):
@staticmethod
def getCategories(year, stateId=None):
params = {'year':year, 'stateId':stateId}
result = votesmart._apicall('Votes.getCategories', params)
return _result_to_obj(Category, result['categories']['category'])
@staticmethod
def getBill(billId):
params = {'billId':billId}
result = votesmart._apicall('Votes.getBill', params)
return BillDetail(result['bill'])
@staticmethod
def getBillAction(actionId):
params = {'actionId':actionId}
result = votesmart._apicall('Votes.getBillAction', params)
return BillActionDetail(result['action'])
@staticmethod
def getBillActionVotes(actionId):
params = {'actionId':actionId}
result = votesmart._apicall('Votes.getBillActionVotes', params)
return _result_to_obj(Vote, result['votes']['vote'])
@staticmethod
def getBillActionVoteByOfficial(actionId, candidateId):
params = {'actionId':actionId, 'candidateId':candidateId}
result = votesmart._apicall('Votes.getBillActionVoteByOfficial', params)
return Vote(result['votes']['vote'])
@staticmethod
def getByBillNumber(billNumber):
params = {'billNumber': billNumber}
result = votesmart._apicall('Votes.getByBillNumber', params)
return _result_to_obj(Bill, result['bills']['bill'])
@staticmethod
def getBillsByCategoryYearState(categoryId, year, stateId=None):
params = {'categoryId':categoryId, 'year':year, 'stateId':stateId}
result = votesmart._apicall('Votes.getBillsByCategoryYearState', params)
return _result_to_obj(Bill, result['bills']['bill'])
@staticmethod
def getBillsByYearState(year, stateId=None):
params = {'year':year, 'stateId':stateId}
result = votesmart._apicall('Votes.getBillsByYearState', params)
return _result_to_obj(Bill, result['bills']['bill'])
@staticmethod
def getBillsByOfficialYearOffice(candidateId, year, officeId=None):
params = {'candidateId':candidateId, 'year':year, 'officeId':officeId}
result = votesmart._apicall('Votes.getBillsByOfficialYearOffice', params)
return _result_to_obj(Bill, result['bills']['bill'])
@staticmethod
def getBillsByCandidateCategoryOffice(candidateId, categoryId, officeId=None):
params = {'candidateId':candidateId, 'categoryId':categoryId, 'officeId':officeId}
result = votesmart._apicall('Votes.getBillsByCandidateCategoryOffice', params)
return _result_to_obj(Bill, result['bills']['bill'])
@staticmethod
def getBillsBySponsorYear(candidateId, year):
params = {'candidateId':candidateId, 'year':year}
result = votesmart._apicall('Votes.getBillsBySponsorYear', params)
return _result_to_obj(Bill, result['bills']['bill'])
@staticmethod
def getBillsBySponsorCategory(candidateId, categoryId):
params = {'candidateId':candidateId, 'categoryId':categoryId}
result = votesmart._apicall('Votes.getBillsBySponsorCategory', params)
return _result_to_obj(Bill, result['bills']['bill'])
@staticmethod
def getBillsByStateRecent(stateId=None, amount=None):
params = {'stateId':stateId, 'amount':amount}
result = votesmart._apicall('Votes.getBillsByStateRecent', params)
return _result_to_obj(Bill, result['bills']['bill'])
@staticmethod
def getVetoes(candidateId):
params = {'candidateId': candidateId}
result = votesmart._apicall('Votes.getVetoes', params)
return _result_to_obj(Veto, result['vetoes']['veto'])
|
bluedynamics/bda.ldap | 2 | setup.py | from setuptools import setup, find_packages
import sys, os
version = '2.0'
shortdesc = "LDAP convenience library"
longdesc = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read()
longdesc += open(os.path.join(os.path.dirname(__file__), 'LICENSE.rst')).read()
setup(name='bda.ldap',
version=version,
description=shortdesc,
long_description=longdesc,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Zope2',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
], # Get strings from http://www.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='BlueDynamics Alliance',
author_email='dev@bluedynamics.com',
url='https://svn.plone.org/svn/collective/bda.ldap',
license='General Public Licence',
packages=find_packages('src'),
package_dir = {'': 'src'},
namespace_packages=['bda'],
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'python-ldap',
'zodict',
'bda.cache',
],
extras_require={
'test': [
'interlude',
'plone.testing',
'unittest2',
'zope.configuration',
'zope.testing',
]
},
entry_points="""
[console_scripts]
testldap = bda.ldap.main:slapd
""",
)
| from setuptools import setup, find_packages
import sys, os
version = '2.0'
shortdesc = "LDAP convenience library"
longdesc = open(os.path.join(os.path.dirname(__file__), 'README.rst')).read()
longdesc += open(os.path.join(os.path.dirname(__file__), 'LICENSE.rst')).read()
setup(name='bda.ldap',
version=version,
description=shortdesc,
long_description=longdesc,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Zope2',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
], # Get strings from http://www.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='BlueDynamics Alliance',
author_email='dev@bluedynamics.com',
url='https://github.com/bluedynamics/bda.ldap',
license='General Public Licence',
packages=find_packages('src'),
package_dir = {'': 'src'},
namespace_packages=['bda'],
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
'python-ldap',
'zodict',
'bda.cache',
],
extras_require={
'test': [
'interlude',
'plone.testing',
'unittest2',
'zope.configuration',
'zope.testing',
]
},
entry_points="""
[console_scripts]
testldap = bda.ldap.main:slapd
""",
)
|
civascu/hue | 1 | apps/about/setup.py | # Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup, find_packages
setup(
name = "about",
version = "0.9",
author = "Hue",
url = 'http://github.com/cloudera/hue',
description = "Displays 'about' info for easier trouble-shooting",
packages = find_packages('src'),
package_dir = {'': 'src'},
install_requires = ['setuptools', 'desktop'],
entry_points = { 'desktop.sdk.application': 'about=about' },
)
| # Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup, find_packages
from hueversion import VERSION
setup(
name = "about",
version = VERSION,
author = "Hue",
url = 'http://github.com/cloudera/hue',
description = "Displays 'about' info for easier trouble-shooting",
packages = find_packages('src'),
package_dir = {'': 'src'},
install_requires = ['setuptools', 'desktop'],
entry_points = { 'desktop.sdk.application': 'about=about' },
)
|
mtarnovan/settingslogic | 1 | lib/settingslogic.rb | require "yaml"
require "erb"
# A simple settings solution using a YAML file. See README for more information.
class Settingslogic < Hash
class MissingSetting < StandardError; end
class << self
def source(value = nil)
if value.nil?
@source
else
@source = value
end
end
def namespace(value = nil)
if value.nil?
@namespace
else
@namespace = value
end
end
def default_namespace(value = nil)
if value.nil?
@default_namespace || 'defaults'
else
@default_namespace = value
end
end
def key_by_path(key_path, separator = ".")
# Settings.get_nested_key('some.nested.setting')
tmp = instance
key_path.split(separator).each do |k|
if tmp[k].respond_to?("[]") && !tmp[k].nil?
tmp = tmp[k]
else
return nil
end
end
tmp
end
def [](key)
# Setting.key.value or Setting[:key][:value] or Setting['key']['value']
fetch(key.to_s,nil)
end
def []=(key,val)
# Setting[:key] = 'value' for dynamic settings
store(key.to_s,val)
end
def load!
instance
true
end
def reload!
@instance = nil
load!
end
private
def instance
@instance ||= new
end
def method_missing(name, *args, &block)
instance.send(name, *args, &block)
end
end
# Initializes a new settings object. You can initialize an object in any of the following ways:
#
# Settings.new(:application) # will look for config/application.yml
# Settings.new("application.yaml") # will look for application.yaml
# Settings.new("/var/configs/application.yml") # will look for /var/configs/application.yml
# Settings.new(:config1 => 1, :config2 => 2)
#
# Basically if you pass a symbol it will look for that file in the configs directory of your rails app,
# if you are using this in rails. If you pass a string it should be an absolute path to your settings file.
# Then you can pass a hash, and it just allows you to access the hash via methods.
def initialize(hash_or_file = self.class.source, section = nil)
case hash_or_file
when Hash
self.replace hash_or_file
else
hash = YAML.load(ERB.new(File.read(hash_or_file)).result).to_hash
default_hash = hash[self.class.default_namespace] || {}
hash = hash[self.class.namespace] if self.class.namespace
self.replace default_hash.deep_merge(hash)
end
@section = section || hash_or_file # so end of error says "in application.yml"
create_accessors!
end
# Called for dynamically-defined keys, and also the first key deferenced at the top-level, if load! is not used.
# Otherwise, create_accessors! (called by new) will have created actual methods for each key.
def method_missing(key, *args, &block)
begin
value = fetch(key.to_s)
rescue IndexError
raise MissingSetting, "Missing setting '#{key}' in #{@section}"
end
value.is_a?(Hash) ? self.class.new(value, "'#{key}' section in #{@section}") : value
end
private
# This handles naming collisions with Sinatra/Vlad/Capistrano. Since these use a set()
# helper that defines methods in Object, ANY method_missing ANYWHERE picks up the Vlad/Sinatra
# settings! So settings.deploy_to title actually calls Object.deploy_to (from set :deploy_to, "host"),
# rather than the app_yml['deploy_to'] hash. Jeezus.
def create_accessors!
self.each do |key,val|
# Use instance_eval/class_eval because they're actually more efficient than define_method{}
# http://stackoverflow.com/questions/185947/ruby-definemethod-vs-def
# http://bmorearty.wordpress.com/2009/01/09/fun-with-rubys-instance_eval-and-class_eval/
self.class.class_eval <<-EndEval
def #{key}
return @#{key} if @#{key} # cache (performance)
value = fetch('#{key}')
@#{key} = value.is_a?(Hash) ? self.class.new(value, "'#{key}' section in #{@section}") : value
end
EndEval
end
end
end
| require "yaml"
require "erb"
# A simple settings solution using a YAML file. See README for more information.
class Settingslogic < Hash
class MissingSetting < StandardError; end
class << self
def source(value = nil)
if value.nil?
@source
else
@source = value
end
end
def namespace(value = nil)
if value.nil?
@namespace
else
@namespace = value
end
end
def default_namespace(value = nil)
if value.nil?
@default_namespace || 'defaults'
else
@default_namespace = value
end
end
def key_by_path(key_path, separator = ".")
# Settings.get_nested_key('some.nested.setting')
tmp = instance
key_path.split(separator).each do |k|
if tmp[k].respond_to?("[]") && !tmp[k].nil?
tmp = tmp[k]
else
return nil
end
end
tmp
end
def [](key)
# Setting.key.value or Setting[:key][:value] or Setting['key']['value']
fetch(key.to_s,nil)
end
def []=(key,val)
# Setting[:key] = 'value' for dynamic settings
store(key.to_s,val)
end
def load!
instance
true
end
def reload!
@instance = nil
load!
end
private
def instance
@instance ||= new
end
def method_missing(name, *args, &block)
instance.send(name, *args, &block)
end
end
# Initializes a new settings object. You can initialize an object in any of the following ways:
#
# Settings.new(:application) # will look for config/application.yml
# Settings.new("application.yaml") # will look for application.yaml
# Settings.new("/var/configs/application.yml") # will look for /var/configs/application.yml
# Settings.new(:config1 => 1, :config2 => 2)
#
# Basically if you pass a symbol it will look for that file in the configs directory of your rails app,
# if you are using this in rails. If you pass a string it should be an absolute path to your settings file.
# Then you can pass a hash, and it just allows you to access the hash via methods.
def initialize(hash_or_file = self.class.source, section = nil)
case hash_or_file
when Hash
self.replace hash_or_file
else
hash = YAML.load(ERB.new(File.read(hash_or_file)).result).to_hash
default_hash = hash[self.class.default_namespace] || {}
hash = hash[self.class.namespace] if self.class.namespace
self.replace default_hash.deep_merge(hash)
end
@section = section || hash_or_file # so end of error says "in application.yml"
create_accessors!
end
def [](key)
# @settings.key.value or @settings[:key][:value] or @settings['key']['value']
super(key.to_s)
end
# Called for dynamically-defined keys, and also the first key deferenced at the top-level, if load! is not used.
# Otherwise, create_accessors! (called by new) will have created actual methods for each key.
def method_missing(key, *args, &block)
begin
value = fetch(key.to_s)
rescue IndexError
raise MissingSetting, "Missing setting '#{key}' in #{@section}"
end
value.is_a?(Hash) ? self.class.new(value, "'#{key}' section in #{@section}") : value
end
private
# This handles naming collisions with Sinatra/Vlad/Capistrano. Since these use a set()
# helper that defines methods in Object, ANY method_missing ANYWHERE picks up the Vlad/Sinatra
# settings! So settings.deploy_to title actually calls Object.deploy_to (from set :deploy_to, "host"),
# rather than the app_yml['deploy_to'] hash. Jeezus.
def create_accessors!
self.each do |key,val|
unless key.to_s.first.to_i.to_s == key.to_s.first # key.first.digit?
# Use instance_eval/class_eval because they're actually more efficient than define_method{}
# http://stackoverflow.com/questions/185947/ruby-definemethod-vs-def
# http://bmorearty.wordpress.com/2009/01/09/fun-with-rubys-instance_eval-and-class_eval/
self.class.class_eval <<-EndEval
def #{key}
return @#{key} if @#{key} # cache (performance)
value = fetch('#{key}')
@#{key} = value.is_a?(Hash) ? self.class.new(value, "'#{key}' section in #{@section}") : value
end
EndEval
end
end
end
end
|
jehiah/json2csv | 32 | main_test.go | package main
import (
"bytes"
"encoding/csv"
"github.com/bmizerany/assert"
"io/ioutil"
"log"
"os"
"testing"
)
func TestGetTopic(t *testing.T) {
log.SetOutput(ioutil.Discard)
defer log.SetOutput(os.Stdout)
reader := bytes.NewBufferString(`{"a": 1, "b": "asdf\n"}
{"a" : null}`)
buf := bytes.NewBuffer([]byte{})
writer := csv.NewWriter(buf)
json2csv(reader, writer, []string{"a", "c"}, false)
output := buf.String()
assert.Equal(t, output, "1,\n,\n")
}
func TestGetLargeInt(t *testing.T) {
log.SetOutput(ioutil.Discard)
defer log.SetOutput(os.Stdout)
reader := bytes.NewBufferString(`{"a": 1356998399}`)
buf := bytes.NewBuffer([]byte{})
writer := csv.NewWriter(buf)
json2csv(reader, writer, []string{"a"}, false)
output := buf.String()
assert.Equal(t, output, "1356998399\n")
}
func TestGetFloat(t *testing.T) {
log.SetOutput(ioutil.Discard)
defer log.SetOutput(os.Stdout)
reader := bytes.NewBufferString(`{"a": 1356998399.32}`)
buf := bytes.NewBuffer([]byte{})
writer := csv.NewWriter(buf)
json2csv(reader, writer, []string{"a"}, false)
output := buf.String()
assert.Equal(t, output, "1356998399.320000\n")
}
func TestGetNested(t *testing.T) {
log.SetOutput(ioutil.Discard)
defer log.SetOutput(os.Stdout)
reader := bytes.NewBufferString(`{"a": {"b": "asdf"}}`)
buf := bytes.NewBuffer([]byte{})
writer := csv.NewWriter(buf)
json2csv(reader, writer, []string{"a.b"}, false)
output := buf.String()
assert.Equal(t, output, "asdf\n")
}
func TestHeader(t *testing.T) {
log.SetOutput(ioutil.Discard)
defer log.SetOutput(os.Stdout)
reader := bytes.NewBufferString(`{"a": "b"}
{"a": "c"}`)
buf := bytes.NewBuffer([]byte{})
writer := csv.NewWriter(buf)
json2csv(reader, writer, []string{"a"}, true)
output := buf.String()
assert.Equal(t, output, "a\nb\nc\n")
}
| package main
import (
"bytes"
"encoding/csv"
"io/ioutil"
"log"
"os"
"testing"
"github.com/bmizerany/assert"
)
func TestGetTopic(t *testing.T) {
log.SetOutput(ioutil.Discard)
defer log.SetOutput(os.Stdout)
reader := bytes.NewBufferString(`{"a": 1, "b": "asdf\n"}
{"a" : null}`)
buf := bytes.NewBuffer([]byte{})
writer := csv.NewWriter(buf)
json2csv(reader, writer, []string{"a", "c"}, false)
output := buf.String()
assert.Equal(t, output, "1,\n,\n")
}
func TestGetLargeInt(t *testing.T) {
log.SetOutput(ioutil.Discard)
defer log.SetOutput(os.Stdout)
reader := bytes.NewBufferString(`{"a": 1356998399}`)
buf := bytes.NewBuffer([]byte{})
writer := csv.NewWriter(buf)
json2csv(reader, writer, []string{"a"}, false)
output := buf.String()
assert.Equal(t, output, "1356998399\n")
}
func TestGetFloat(t *testing.T) {
log.SetOutput(ioutil.Discard)
defer log.SetOutput(os.Stdout)
reader := bytes.NewBufferString(`{"a": 1356998399.32}`)
buf := bytes.NewBuffer([]byte{})
writer := csv.NewWriter(buf)
json2csv(reader, writer, []string{"a"}, false)
output := buf.String()
assert.Equal(t, output, "1356998399.320000\n")
}
func TestGetNested(t *testing.T) {
log.SetOutput(ioutil.Discard)
defer log.SetOutput(os.Stdout)
reader := bytes.NewBufferString(`{"a": {"b": "asdf"}}`)
buf := bytes.NewBuffer([]byte{})
writer := csv.NewWriter(buf)
json2csv(reader, writer, []string{"a.b"}, false)
output := buf.String()
assert.Equal(t, output, "asdf\n")
}
func TestHeader(t *testing.T) {
log.SetOutput(ioutil.Discard)
defer log.SetOutput(os.Stdout)
reader := bytes.NewBufferString(`{"a": "b"}
{"a": "c"}`)
buf := bytes.NewBuffer([]byte{})
writer := csv.NewWriter(buf)
json2csv(reader, writer, []string{"a"}, true)
output := buf.String()
assert.Equal(t, output, "a\nb\nc\n")
}
|
reid/upstage | 2 | build/upstage-min.js | YUI.add("upstage-blank",function(h){var a=h.Upstage,g="upstage-curtain",b="blank:on",e="blank:off",d,f=false;a.on("start",function(){var i=h.Node.create("<div id='"+g+"'></div>");i.setStyles({background:"#000",position:"absolute",top:0,left:0,width:"100%",height:"100%",zIndex:"100",display:"none"});h.one("body").append(i);d=i;d.on("click",h.bind(a.fire,a,"blank:off"));});a.on("blank",function(){if(!f){a.fire(b);}else{a.fire(e);}});function c(){return h.error("Curtain not found.");}a.on(b,function(){if(!d){return c();}d.setStyle("display","block");f=true;});a.on(e,function(){if(!d){return c();}d.setStyle("display","none");f=false;});},"@VERSION@",{requires:["upstage-slideshow","node"]});YUI.add("upstage-controls",function(e){var a=e.Upstage,c=e.Node.create,d="boundingBox";function b(){b.superclass.constructor.apply(this,arguments);}b.NAME="controls";b.ATTRS={height:{value:37},total:{value:e.all(".slide").size(),readOnly:true},footer:{value:""},slide:{value:1}};b.HTML_PARSER={footer:function(f){return f.one(".credit").get("innerHTML");}};e.extend(b,e.Widget,{renderUI:function(){var g=this.get("contentBox");if(!g){return e.error("controls: contentBox is missing");}var f=c("<div class='nav'></div>");f.appendChild(c("<a class='prev' href='#'>←</a>"));f.appendChild(c("<a class='currentSlide'>0/0</a>"));f.appendChild(c("<a class='next' href='#'>→</a>"));g.appendChild(f);g.setStyles({"height":0,"display":"block"});g.transition({duration:0.2,easing:"ease-out",height:this.get("height")+"px"});},bindUI:function(){var f=this.get(d);f.one(".prev").on("click",e.bind(a.fire,a,"warp",-1));f.one(".next").on("click",e.bind(a.fire,a,"warp",1));a.on("navigate",e.bind("set",this,"slide"));a.on("navigate",e.bind("syncUI",this));},syncUI:function(f){var g=this.get(d);g.one(".currentSlide").setContent(this.get("slide")+"/"+this.get("total"));g.one(".credit").setContent(this.get("footer"));}});a.on("start",function(){new b({srcNode:"#ft"}).render();});},"@VERSION@",{requires:["upstage-slideshow","widget","transition","node"]});YUI.add("upstage-gesture",function(h){var b=h.Upstage;var e=10;var a=500;function g(i,j,k){b.publish(i,{emitFacade:true,defaultFn:h.bind(b.fire,b,j,k)});}g("ui:tap","warp",1);g("ui:heldtap","position",1);g("ui:swipeleft","warp",1);g("ui:swiperight","warp",-1);var f=h.bind(b.fire,b);function c(o,k){var n=o.getData("gestureX"),i=k.pageX;if((n-i)>e){f("ui:swipeleft",o);}else{if((i-n)>e){f("ui:swiperight",o);}else{var m=o.getData("gestureDate").getTime(),j=(new Date).getTime(),l=j-m;if(l>a){f("ui:heldtap",l);}else{f("ui:tap",l);}}}}function d(i){switch(i.target.get("tagName").toUpperCase()){case"A":case"INPUT":case"BUTTON":case"VIDEO":case"OBJECT":return;}i.preventDefault();var j=i.currentTarget;j.once("selectstart",function(k){k.preventDefault();});j.setData("gestureX",i.pageX);j.setData("gestureDate",new Date);j.once("gesturemoveend",h.bind(c,this,j));}b.on("start",function(){h.one("body").delegate("gesturemovestart",d,".slide");});},"@VERSION@",{requires:["upstage-slideshow","event-move"]});YUI.add("upstage-keyboard",function(c){var a=c.Upstage;function b(d){if(d.keyCode!=66){a.fire("blank:off");}switch(d.keyCode){case 32:case 34:case 39:case 40:a.fire("warp",1);break;case 33:case 37:case 38:a.fire("warp",-1);break;case 36:a.fire("position",1);break;case 35:a.fire("position",9999);break;case 66:a.fire("blank");break;}}a.on("start",function(){c.on("key",b,document,"down:");});},"@VERSION@",{requires:["upstage-slideshow","node","event"]});YUI.add("upstage-l10n",function(d){d.namespace("UpstageL10N");var b=d.UpstageL10N,c=d.Intl,a="upstage-l10n";b.setActiveLang=function(e){return c.setLang(a,e);};b.getActiveLang=function(){return c.getLang(a);};b.get=function(e,f){return c.get(a,e,f);};b.add=function(g,e){var f=c.add(a,g,e);if(!b.getActiveLang()){b.setActiveLang(g);}return f;};},"@VERSION@",{requires:["intl"],lang:["en"]});YUI.add("upstage-permalink",function(f){var b=f.Upstage,c=f.Selection.getText;f.HistoryHash.hashPrefix="!";var d=new f.HistoryHash,e,g;b.on("start",function(){e=f.one("title");g=c(e);b.fire("position",d.get("slide")||1);});b.on("navigate",function(h){d.addValue("slide",h);});b.on("transition",function(m){var l=m.details[1],i=l.getData("slide"),h=b.L10N.get("Slide"),j;if(i==1){j=g;}else{var k=l.one("h1");if(k){j=c(k);}if(!j){j=h+" "+l.getData("slide");}j=g+": "+j;}e.setContent(j);});function a(h){if(h&&h.newVal){h=h.newVal;}else{h=1;}b.fire("position",h);}d.on("slideChange",a);d.on("slideRemove",a);},"@VERSION@",{requires:["upstage-slideshow","node","history","selection"]});YUI.add("upstage-slideshow",function(b){b.namespace("Upstage");var a=b.Upstage;b.augment(a,b.EventTarget);a.on("start",function(){b.mix(a,{L10N:b.UpstageL10N});b.all(".slide").each(function(d,c){c++;d.set("id","slide"+c);d.setData("slide",c);});a.fire("position",1);});a.on("warp",function(d,e){if(e&&e.halt){e.halt();}var c=a.currentSlide+parseInt(d,10);a.fire("position",c);});a.on("position",function(c){c=Math.max(1,c);c=Math.min(c,b.all(".slide").size());var d=a.currentSlide||1;a.currentSlide=parseInt(c,10);if(d!=c){a.fire("navigate",c);a.fire("transition",b.one("#slide"+d),b.one("#slide"+c));}});a.publish("transition",{emitFacade:true,defaultFn:function(e){var d=e.details[0],c=e.details[1];d.setStyle("display","none");c.setStyle("display","block");}});},"@VERSION@",{requires:["upstage-l10n","oop","node","event-custom"]});YUI.add("upstage-transition-fade",function(d){var b=false;var a=new d.AsyncQueue;function c(){b=false;a.stop();}d.Upstage.on("transition",function(g){if(b){d.all(".slide").setStyles({"opacity":"1","display":"none"});return c();}g.preventDefault();b=true;var f=g.details[0],e=g.details[1];a.add(function(){a.pause();f.transition({duration:0.2,easing:"ease-out",opacity:0},d.bind(a.run,a));});a.add(function(){a.pause();e.setStyles({"opacity":"0","display":"block"});e.transition({duration:0.2,easing:"ease-out",opacity:1},d.bind(a.run,a));});a.add(function(){f.setStyles({"display":"none","opacity":"1"});
});a.add(c);a.run();});},"@VERSION@",{requires:["upstage-slideshow","transition","async-queue"]});YUI.add("upstage",function(a){},"@VERSION@",{requires:["upstage-slideshow","upstage-controls","upstage-keyboard","upstage-blank","upstage-gesture","upstage-permalink"]}); | YUI.add("upstage-controls",function(e){var a=e.Upstage,c=e.Node.create,d="boundingBox";function b(){b.superclass.constructor.apply(this,arguments);}b.NAME="controls";b.ATTRS={height:{value:37},total:{value:e.all(".slide").size(),readOnly:true},footer:{value:""},slide:{value:1}};b.HTML_PARSER={footer:function(f){return f.one(".credit").get("innerHTML");}};e.extend(b,e.Widget,{renderUI:function(){var g=this.get("contentBox");if(!g){return e.error("controls: contentBox is missing");}var f=c("<div class='nav'></div>");f.appendChild(c("<a class='prev' href='#'>←</a>"));f.appendChild(c("<a class='currentSlide'>0/0</a>"));f.appendChild(c("<a class='next' href='#'>→</a>"));g.appendChild(f);g.setStyles({"height":0,"display":"block"});g.transition({duration:0.2,easing:"ease-out",height:this.get("height")+"px"});},bindUI:function(){var f=this.get(d);f.one(".prev").on("click",e.bind(a.fire,a,"warp",-1));f.one(".next").on("click",e.bind(a.fire,a,"warp",1));a.on("navigate",e.bind("set",this,"slide"));a.on("navigate",e.bind("syncUI",this));},syncUI:function(f){var g=this.get(d);g.one(".currentSlide").setContent(this.get("slide")+"/"+this.get("total"));g.one(".credit").setContent(this.get("footer"));}});a.on("start",function(){new b({srcNode:"#ft"}).render();});},"@VERSION@",{requires:["upstage-slideshow","widget","transition","node"]});YUI.add("upstage-gesture",function(h){var b=h.Upstage;var e=10;var a=500;function g(i,j,k){b.publish(i,{emitFacade:true,defaultFn:h.bind(b.fire,b,j,k)});}g("ui:tap","warp",1);g("ui:heldtap","position",1);g("ui:swipeleft","warp",1);g("ui:swiperight","warp",-1);var f=h.bind(b.fire,b);function c(o,k){var n=o.getData("gestureX"),i=k.pageX;if((n-i)>e){f("ui:swipeleft",o);}else{if((i-n)>e){f("ui:swiperight",o);}else{var m=o.getData("gestureDate").getTime(),j=(new Date).getTime(),l=j-m;if(l>a){f("ui:heldtap",l);}else{f("ui:tap",l);}}}}function d(i){switch(i.target.get("tagName").toUpperCase()){case"A":case"INPUT":case"BUTTON":case"VIDEO":case"OBJECT":return;}i.preventDefault();var j=i.currentTarget;j.once("selectstart",function(k){k.preventDefault();});j.setData("gestureX",i.pageX);j.setData("gestureDate",new Date);j.once("gesturemoveend",h.bind(c,this,j));}b.on("start",function(){h.one("body").delegate("gesturemovestart",d,".slide");});},"@VERSION@",{requires:["upstage-slideshow","event-move"]});YUI.add("upstage-keyboard",function(c){var a=c.Upstage;function b(d){if(d.keyCode!=66){a.fire("blank:off");}switch(d.keyCode){case 32:case 34:case 39:case 40:a.fire("warp",1);break;case 33:case 37:case 38:a.fire("warp",-1);break;case 36:a.fire("position",1);break;case 35:a.fire("position",9999);break;case 66:a.fire("blank");break;}}a.on("start",function(){c.on("key",b,document,"down:");});},"@VERSION@",{requires:["upstage-slideshow","node","event"]});YUI.add("upstage-l10n",function(d){d.namespace("UpstageL10N");var b=d.UpstageL10N,c=d.Intl,a="upstage-l10n";b.setActiveLang=function(e){return c.setLang(a,e);};b.getActiveLang=function(){return c.getLang(a);};b.get=function(e,f){return c.get(a,e,f);};b.add=function(g,e){var f=c.add(a,g,e);if(!b.getActiveLang()){b.setActiveLang(g);}return f;};},"@VERSION@",{lang:["en"],requires:["intl"]});YUI.add("upstage-permalink",function(f){var b=f.Upstage,c=f.Selection.getText;f.HistoryHash.hashPrefix="!";var d=new f.HistoryHash,e,g;b.on("start",function(){e=f.one("title");g=c(e);b.fire("position",d.get("slide")||1);});b.on("navigate",function(h){d.addValue("slide",h);});b.on("transition",function(m){var l=m.details[1],i=l.getData("slide"),h=b.L10N.get("Slide"),j;if(i==1){j=g;}else{var k=l.one("h1");if(k){j=c(k);}if(!j){j=h+" "+l.getData("slide");}j=g+": "+j;}e.setContent(j);});function a(h){if(h&&h.newVal){h=h.newVal;}else{h=1;}b.fire("position",h);}d.on("slideChange",a);d.on("slideRemove",a);},"@VERSION@",{requires:["upstage-slideshow","node","history","selection"]});YUI.add("upstage-slideshow",function(b){b.namespace("Upstage");var a=b.Upstage;b.augment(a,b.EventTarget);a.on("start",function(){b.mix(a,{L10N:b.UpstageL10N});b.all(".slide").each(function(d,c){c++;d.set("id","slide"+c);d.setData("slide",c);});a.fire("position",1);});a.on("warp",function(d,e){if(e&&e.halt){e.halt();}var c=a.currentSlide+parseInt(d,10);a.fire("position",c);});a.on("position",function(c){c=Math.max(1,c);c=Math.min(c,b.all(".slide").size());var d=a.currentSlide||1;a.currentSlide=parseInt(c,10);if(d!=c){a.fire("navigate",c);a.fire("transition",b.one("#slide"+d),b.one("#slide"+c));}});a.publish("transition",{emitFacade:true,defaultFn:function(e){var d=e.details[0],c=e.details[1];d.setStyle("display","none");c.setStyle("display","block");}});},"@VERSION@",{requires:["upstage-l10n","oop","node","event-custom"]});YUI.add("upstage-transition-fade",function(d){var b=false;var a=new d.AsyncQueue;function c(){b=false;a.stop();}d.Upstage.on("transition",function(g){if(b){d.all(".slide").setStyles({"opacity":"1","display":"none"});return c();}g.preventDefault();b=true;var f=g.details[0],e=g.details[1];a.add(function(){a.pause();f.transition({duration:0.2,easing:"ease-out",opacity:0},d.bind(a.run,a));});a.add(function(){a.pause();e.setStyles({"opacity":"0","display":"block"});e.transition({duration:0.2,easing:"ease-out",opacity:1},d.bind(a.run,a));});a.add(function(){f.setStyles({"display":"none","opacity":"1"});});a.add(c);a.run();});},"@VERSION@",{requires:["upstage-slideshow","transition","async-queue"]});YUI.add("upstage",function(a){},"@VERSION@",{requires:["upstage-slideshow","upstage-controls","upstage-keyboard","upstage-blank","upstage-gesture","upstage-permalink"]}); |
jgehring/rsvndump | 10 | web/index.html | <!DOCTYPE html>
<html>
<head>
<meta content="text/html; charset=ISO-8859-1" http-equiv="content-type">
<meta name="keywords" content="rsvndump, rsvndump homepage, Subversion, dump, remote, backup">
<link href="stylesheet.css" rel="stylesheet" type="text/css">
<title>rsvndump - remote Subversion repository dump</title>
</head>
<body>
<h2>rsvndump - remote Subversion repository dump</h2>
<div class="body">
<div class="logos">
<table><tr>
<td><a href="http://flattr.com/thing/344002/rsvndump">
<img src="flattrbutton.png" alt="Flattr this" border=0/>
</a></td>
<td>
<span style="padding-left:110px"></span>
</td>
</tr></table>
</div>
<!-- Github ribbon -->
<a href="https://github.com/jgehring/rsvndump">
<img style="position: absolute; top: 0px; right: 0px; border: 0;" src="https://camo.githubusercontent.com/38ef81f8aca64bb9a64448d0d70f1308ef5341ab/68747470733a2f2f73332e616d617a6f6e6177732e636f6d2f6769746875622f726962626f6e732f666f726b6d655f72696768745f6461726b626c75655f3132313632312e706e67" alt="Fork me on GitHub" data-canonical-src="https://s3.amazonaws.com/github/ribbons/forkme_right_darkblue_121621.png">
</a>
<div class="index">
<ul>
<li><a href="#about">About</a></li>
<li><a href="#news">News</a></li>
<li><a href="#content">Content</a></li>
</ul>
</div>
<div class="main">
<hr />
<a name="about"><h3>About</h3></a>
<p>rsvndump is a command line tool that is able to dump a
Subversion repository that resides on a remote server.
All data is dumped in the format that can be read an written by
<i>svnadmin load/dump</i>, so the data which is produced
can easily be imported into a new Subversion repository.</p>
<p>rsvndump supports most of the functionality of the normal
<i>svn</i> client program and <i>svnadmin</i>, e.g. authentication,
SSL-support, incremental dumps and support for dumping text
deltas instead of full text.</p>
<p>Starting with Subversion 1.7 in October 2011, the official distribution already
contains a <a href="http://subversion.apache.org/docs/release-notes/1.7.html#svnrdump">remote dumpfile tool</a>
called <i>svnrdump</i>. While both tools serve the same purpose,
rsvndump is also able to dump a subdirectory of a repository by
resolving copy operations as needed, even without read access to
the repository root. Furthermore, rsvndump may work better with
old Subversion servers (i.e. versions prior to 1.5).
<p>Further information about rsvndump's behavior and possible
differences to the output generated by <i>svnadmin dump</i>
and <i>svnrdump</i> can be found at the <a href="manpage.html">man page</a>.</p>
<hr />
<a name="news"><h3>News</h3></a>
<ul>
<li><h4>February 15th, 2022</h4>
<p>rsvndump 0.6.1 is now <a href="#content">available</a>, fixing a
bug where sub-directories which matching prefixes would be included in
the dump () and improving the autoconf setup. Many thanks to <a href="https://github.com/hartwork">@hartwork</a>
and <a href="https://github.com/wmbaum">@wmbaum</a> for their contributions!</p>
<li><h4>May 14th, 2012</h4>
<p>rsvndump 0.6 is now <a href="#content">available</a>,
offering new features and fixing several bugs. The version
path and property storages have been redesigned, resulting
in a notable speedup for certain use cases. An issue that could have
led to unloadable dumps when dumping subdirectories has been resolved.
Furthermore, revision range handling in incremental mode and a build system issue
(<a href="http://sourceforge.net/tracker/?func=detail&aid=3515292&group_id=268687&atid=1143180">#3515292</a>)
have been fixed.<br/>
The full list of changes can be found in the
<a href="ChangeLog">ChangeLog</a>. As usual, the source package is
is available as a tarball compressed using either gzip or bzip2 as
well as in zip format.</p>
<li><h4>March 11th, 2012</h4>
<p>rsvndump 0.5.8 is now <a href="#content">available</a>,
fixing two serious bugs reported by Valentin Haenel
(<a href="https://sourceforge.net/tracker/?func=detail&aid=3483858&group_id=268687&atid=1143180">#3483858</a> and
<a
href="https://sourceforge.net/tracker/?func=detail&aid=3489637&group_id=268687&atid=1143180">#3489637</a>).
Both issues could result in invalid dump output
under certain circumstances. Please consult the
<a href="ChangeLog">ChangeLog</a> and the respective bug reports
for further details about those issues.<br/>
As usual, the source package is is available as a tarball
compressed using either gzip or bzip2 as well as in zip
format.</p>
<li><h4>November 21st, 2011</h4>
<p>rsvndump 0.5.7 is now <a href="#content">available</a>,
fixing possibly invalid dump output when run with
<tt>--keep-revnums</tt>. Furthermore, the command-line flags
<tt>--dry-run</tt> and <tt>--obfuscate</tt> have been added
to ease the submission of dumps for bug reports. A build
system fix from Sebastian Pipping has been applied as well
(<a href="http://sourceforge.net/tracker/?func=detail&aid=3369622&group_id=268687&atid=1143180">#3369622</a>).
The full list of changes can be found in the
<a href="ChangeLog">ChangeLog</a>. As usual, the source package is
is available as a tarball compressed using either gzip or bzip2 as
well as in zip format.</p>
<li><h4>July 15th, 2011</h4>
<p>rsvndump 0.5.6 is now <a href="#content">available</a>. This
release includes performance improvements for revision range
determination and path hash calculations. A bug that could cause
unloadable dumps under certain conditions as well as a possible
segmentation fault have been fixed. The CodeBlocks project file
for Compilation on Windows has been fixed, too.
The full list of changes can be found in the
<a href="ChangeLog">ChangeLog</a>. As usual, the source package is
is available as a tarball compressed using either gzip or bzip2 as
well as in zip format.</p>
</p>I've recently set up a <a href="http://flattr.com">Flattr</a>
account and added an <a href="http://flattr.com/thing/344002/rsvndump">entry for rsvndump</a>.
So if you like the program, please feel free to flattr it :)
</p></li>
<li><h4>March 3rd, 2011</h4>
<p>rsvndump 0.5.5 is now <a href="#content">available</a>. This
release fixes a bug introduced in 0.5.3
(<a href="http://sourceforge.net/tracker/?func=detail&aid=2997558&group_id=268687&atid=1143180">#2997558</a>)
which could led to crashes when dumping a
subdirectory. Additionally, performance issues caused by
unnecessary stats for copied revisions have been fixed. The
full list of changes can be found in the
<a href="ChangeLog">ChangeLog</a>. As usual, the source package is
is available as a tarball compressed using either gzip or bzip2 as
well as in zip format.<br /></p></li>
</ul>
<p>Older news items can be found in the <a href="news-archive.html">news archive</a>.</p>
<hr />
<a name="content"><h3>Content</h3></a>
<ul>
<li><h4>Downloads:</h4>
<ul>
<li><b><a href="http://prdownloads.sourceforge.net/rsvndump/rsvndump-0.6.1.tar.gz">rsvndump-0.6.1.tar.gz</a></b> - Source code distribution</li>
<li><b><a href="http://prdownloads.sourceforge.net/rsvndump/rsvndump-0.6.1.tar.bz2">rsvndump-0.6.1.tar.bz2</a></b> - Source code distribution</li>
<li><b><a href="http://prdownloads.sourceforge.net/rsvndump/rsvndump-0.6.1.zip">rsvndump-0.6.1.zip</a></b> - Source code distribution</li>
</ul>
<p>Previous releases can be found on rsvndump's <a href="http://sourceforge.net/projects/rsvndump/files/">file list at SourceForge</a>.</p>
</li>
<li><h4>Distribution packages/ports:</h4>
<p><b>NOTE</b>: These are not maintained by myself. Thanks go out to the respective packagers!</p>
<ul>
<li><b><a href="https://aur.archlinux.org/packages/rsvndump">ArchLinux (AUR)</a></b></li>
<li><b><a href="https://packages.fedoraproject.org/pkgs/rsvndump/rsvndump/">Fedora</a></b></li>
<li><b><a href="https://www.freshports.org/devel/rsvndump/">FreeBSD</a></b></li>
<li><b><a href="http://packages.gentoo.org/package/dev-vcs/rsvndump">Gentoo</a></b></li>
<li><b><a href="http://download.opensuse.org/repositories/devel:/tools:/scm:/svn/">openSUSE</a></b></li>
<li>... <a href="https://pkgs.org/search/?q=rsvndump">and others</a></li>
</ul>
</li>
<li><h4>Documentation:</h4>
<ul>
<li><a href="README">README</a> - Requirements and general information</li>
<li><a href="README-win32.txt">README.win32</a> - Build instructions for Windows</li>
<li><a href="manpage.html">man page</a> - Online version of the man page</li>
<li><a href="ChangeLog">ChangeLog</a> - The rsvndump change log</li>
<li><a href="http://www.gnu.org/licenses/gpl-3.0.txt">COPYING</a> - The GPL v3, under which rsvndump is released</li>
</ul>
</li>
<li><h4>Hosted:</h4>
<ul>
<li>The <a href="http://github.com/jgehring/rsvndump">Git repository</a> with issue tracker at GitHub</li>
<li>The <a href="http://sourceforge.net/projects/rsvndump">project page</a> at SourceForge</li>
<li>The <a href="http://freshmeat.net/projects/rsvndump">project page</a> at Freshmeat</li>
<li>The <a href="http://ohloh.net/p/rsvndump">project page</a> at Ohloh</li>
<li>The <a href="http://flattr.com/thing/344002/rsvndump">thing page</a> at Flattr for
micropayment donations</li>
</ul>
</li>
</ul>
</div>
<hr />
<table width="100%">
<tr><td align="left" width="150"><a href="http://jgehring.net">Jonas Gehring</a></td>
<td align="right"><i>Last update: $DATE</i></td></tr>
</table>
</div>
</body>
</html>
| <!DOCTYPE html>
<html>
<head>
<meta content="text/html; charset=ISO-8859-1" http-equiv="content-type">
<meta name="keywords" content="rsvndump, rsvndump homepage, Subversion, dump, remote, backup">
<link href="stylesheet.css" rel="stylesheet" type="text/css">
<title>rsvndump - remote Subversion repository dump</title>
</head>
<body>
<h2>rsvndump - remote Subversion repository dump</h2>
<div class="body">
<!-- Github ribbon -->
<a href="https://github.com/jgehring/rsvndump">
<img style="position: absolute; top: 0px; right: 0px; border: 0;" src="https://camo.githubusercontent.com/38ef81f8aca64bb9a64448d0d70f1308ef5341ab/68747470733a2f2f73332e616d617a6f6e6177732e636f6d2f6769746875622f726962626f6e732f666f726b6d655f72696768745f6461726b626c75655f3132313632312e706e67" alt="Fork me on GitHub" data-canonical-src="https://s3.amazonaws.com/github/ribbons/forkme_right_darkblue_121621.png">
</a>
<div class="index">
<ul>
<li><a href="#about">About</a></li>
<li><a href="#news">News</a></li>
<li><a href="#content">Content</a></li>
</ul>
</div>
<div class="main">
<hr />
<a name="about"><h3>About</h3></a>
<p>rsvndump is a command line tool that is able to dump a
Subversion repository that resides on a remote server.
All data is dumped in the format that can be read an written by
<i>svnadmin load/dump</i>, so the data which is produced
can easily be imported into a new Subversion repository.</p>
<p>rsvndump supports most of the functionality of the normal
<i>svn</i> client program and <i>svnadmin</i>, e.g. authentication,
SSL-support, incremental dumps and support for dumping text
deltas instead of full text.</p>
<p>Starting with Subversion 1.7 in October 2011, the official distribution already
contains a <a href="http://subversion.apache.org/docs/release-notes/1.7.html#svnrdump">remote dumpfile tool</a>
called <i>svnrdump</i>. While both tools serve the same purpose,
rsvndump is also able to dump a subdirectory of a repository by
resolving copy operations as needed, even without read access to
the repository root. Furthermore, rsvndump may work better with
old Subversion servers (i.e. versions prior to 1.5).
<p>Further information about rsvndump's behavior and possible
differences to the output generated by <i>svnadmin dump</i>
and <i>svnrdump</i> can be found at the <a href="manpage.html">man page</a>.</p>
<hr />
<a name="news"><h3>News</h3></a>
<ul>
<li><h4>February 15th, 2022</h4>
<p>rsvndump 0.6.1 is now <a href="#content">available</a>, fixing a
bug where sub-directories which matching prefixes would be included in
the dump () and improving the autoconf setup. Many thanks to <a href="https://github.com/hartwork">@hartwork</a>
and <a href="https://github.com/wmbaum">@wmbaum</a> for their contributions!</p>
<li><h4>May 14th, 2012</h4>
<p>rsvndump 0.6 is now <a href="#content">available</a>,
offering new features and fixing several bugs. The version
path and property storages have been redesigned, resulting
in a notable speedup for certain use cases. An issue that could have
led to unloadable dumps when dumping subdirectories has been resolved.
Furthermore, revision range handling in incremental mode and a build system issue
(<a href="http://sourceforge.net/tracker/?func=detail&aid=3515292&group_id=268687&atid=1143180">#3515292</a>)
have been fixed.<br/>
The full list of changes can be found in the
<a href="ChangeLog">ChangeLog</a>. As usual, the source package is
is available as a tarball compressed using either gzip or bzip2 as
well as in zip format.</p>
<li><h4>March 11th, 2012</h4>
<p>rsvndump 0.5.8 is now <a href="#content">available</a>,
fixing two serious bugs reported by Valentin Haenel
(<a href="https://sourceforge.net/tracker/?func=detail&aid=3483858&group_id=268687&atid=1143180">#3483858</a> and
<a
href="https://sourceforge.net/tracker/?func=detail&aid=3489637&group_id=268687&atid=1143180">#3489637</a>).
Both issues could result in invalid dump output
under certain circumstances. Please consult the
<a href="ChangeLog">ChangeLog</a> and the respective bug reports
for further details about those issues.<br/>
As usual, the source package is is available as a tarball
compressed using either gzip or bzip2 as well as in zip
format.</p>
<li><h4>November 21st, 2011</h4>
<p>rsvndump 0.5.7 is now <a href="#content">available</a>,
fixing possibly invalid dump output when run with
<tt>--keep-revnums</tt>. Furthermore, the command-line flags
<tt>--dry-run</tt> and <tt>--obfuscate</tt> have been added
to ease the submission of dumps for bug reports. A build
system fix from Sebastian Pipping has been applied as well
(<a href="http://sourceforge.net/tracker/?func=detail&aid=3369622&group_id=268687&atid=1143180">#3369622</a>).
The full list of changes can be found in the
<a href="ChangeLog">ChangeLog</a>. As usual, the source package is
is available as a tarball compressed using either gzip or bzip2 as
well as in zip format.</p>
<li><h4>July 15th, 2011</h4>
<p>rsvndump 0.5.6 is now <a href="#content">available</a>. This
release includes performance improvements for revision range
determination and path hash calculations. A bug that could cause
unloadable dumps under certain conditions as well as a possible
segmentation fault have been fixed. The CodeBlocks project file
for Compilation on Windows has been fixed, too.
The full list of changes can be found in the
<a href="ChangeLog">ChangeLog</a>. As usual, the source package is
is available as a tarball compressed using either gzip or bzip2 as
well as in zip format.</p></li>
<li><h4>March 3rd, 2011</h4>
<p>rsvndump 0.5.5 is now <a href="#content">available</a>. This
release fixes a bug introduced in 0.5.3
(<a href="http://sourceforge.net/tracker/?func=detail&aid=2997558&group_id=268687&atid=1143180">#2997558</a>)
which could led to crashes when dumping a
subdirectory. Additionally, performance issues caused by
unnecessary stats for copied revisions have been fixed. The
full list of changes can be found in the
<a href="ChangeLog">ChangeLog</a>. As usual, the source package is
is available as a tarball compressed using either gzip or bzip2 as
well as in zip format.<br /></p></li>
</ul>
<p>Older news items can be found in the <a href="news-archive.html">news archive</a>.</p>
<hr />
<a name="content"><h3>Content</h3></a>
<ul>
<li><h4>Downloads:</h4>
<ul>
<li><b><a href="http://prdownloads.sourceforge.net/rsvndump/rsvndump-0.6.1.tar.gz">rsvndump-0.6.1.tar.gz</a></b> - Source code distribution</li>
<li><b><a href="http://prdownloads.sourceforge.net/rsvndump/rsvndump-0.6.1.tar.bz2">rsvndump-0.6.1.tar.bz2</a></b> - Source code distribution</li>
<li><b><a href="http://prdownloads.sourceforge.net/rsvndump/rsvndump-0.6.1.zip">rsvndump-0.6.1.zip</a></b> - Source code distribution</li>
</ul>
<p>Previous releases can be found on rsvndump's <a href="http://sourceforge.net/projects/rsvndump/files/">file list at SourceForge</a>.</p>
</li>
<li><h4>Distribution packages/ports:</h4>
<p><b>NOTE</b>: These are not maintained by myself. Thanks go out to the respective packagers!</p>
<ul>
<li><b><a href="https://aur.archlinux.org/packages/rsvndump">ArchLinux (AUR)</a></b></li>
<li><b><a href="https://packages.fedoraproject.org/pkgs/rsvndump/rsvndump/">Fedora</a></b></li>
<li><b><a href="https://www.freshports.org/devel/rsvndump/">FreeBSD</a></b></li>
<li><b><a href="http://packages.gentoo.org/package/dev-vcs/rsvndump">Gentoo</a></b></li>
<li><b><a href="http://download.opensuse.org/repositories/devel:/tools:/scm:/svn/">openSUSE</a></b></li>
<li>... <a href="https://pkgs.org/search/?q=rsvndump">and others</a></li>
</ul>
</li>
<li><h4>Documentation:</h4>
<ul>
<li><a href="README">README</a> - Requirements and general information</li>
<li><a href="README-win32.txt">README.win32</a> - Build instructions for Windows</li>
<li><a href="manpage.html">man page</a> - Online version of the man page</li>
<li><a href="ChangeLog">ChangeLog</a> - The rsvndump change log</li>
<li><a href="http://www.gnu.org/licenses/gpl-3.0.txt">COPYING</a> - The GPL v3, under which rsvndump is released</li>
</ul>
</li>
<li><h4>Hosted:</h4>
<ul>
<li>The <a href="http://github.com/jgehring/rsvndump">Git repository</a> with issue tracker at GitHub</li>
<li>The <a href="http://sourceforge.net/projects/rsvndump">project page</a> at SourceForge</li>
<li>The <a href="http://freshmeat.net/projects/rsvndump">project page</a> at Freshmeat</li>
<li>The <a href="http://ohloh.net/p/rsvndump">project page</a> at Ohloh</li>
</ul>
</li>
</ul>
</div>
<hr />
<table width="100%">
<tr><td align="left" width="150"><a href="http://jgehring.net">Jonas Gehring</a></td>
<td align="right"><i>Last update: $DATE</i></td></tr>
</table>
</div>
</body>
</html>
|
jk2l/redmine_time_display | 2 | lib/issue_patch.rb | require_dependency 'issue'
module IssuePatch
def self.included(base) # :nodoc:
base.extend(ClassMethods)
base.send(:include, InstanceMethods)
# Same as typing in the class
base.class_eval do
unloadable
end
end
module ClassMethods
end
module InstanceMethods
def helper_spent_hours
@helper_spent_hours = TimeEntry.sum :hours, :conditions => "user_id != #{assigned_to_id.to_i} AND issue_id = '#{id}'"
end
def assigned_spent_hours
@assigned_spent_hours = TimeEntry.sum :hours, :conditions => "user_id = #{assigned_to_id.to_i} AND issue_id = '#{id}'"
end
def self_spent_hours
@self_spent_hours ||= time_entries.sum(:hours) || 0.00
end
def subtask_spent_hours
time = 0.00
i.descendants.each do |i|
time = time + i.time_entries.sum(:hours)
end
@subtask_spent_hours = time.to_f
end
def estimated_spent
if estimated_hours == nil or estimated_hours == 0
@esimtated_spent = "NA"
else
@estimated_spent = (assigned_spent_hours/estimated_hours*100).to_i
end
end
end
end
| require_dependency 'issue'
module IssuePatch
def self.included(base) # :nodoc:
base.extend(ClassMethods)
base.send(:include, InstanceMethods)
# Same as typing in the class
base.class_eval do
unloadable
end
end
module ClassMethods
end
module InstanceMethods
def helper_spent_hours
@helper_spent_hours = TimeEntry.sum :hours, :conditions => "user_id != #{assigned_to_id.to_i} AND issue_id = '#{id}'"
end
def assigned_spent_hours
@assigned_spent_hours = TimeEntry.sum :hours, :conditions => "user_id = #{assigned_to_id.to_i} AND issue_id = '#{id}'"
end
def self_spent_hours
@self_spent_hours ||= time_entries.sum(:hours) || 0.00
end
def subtask_spent_hours
time = 0.00
descendants.each do |i|
time = time + i.time_entries.sum(:hours)
end
@subtask_spent_hours = time.to_f
end
def estimated_spent
if estimated_hours == nil or estimated_hours == 0
@esimtated_spent = "NA"
else
@estimated_spent = (assigned_spent_hours/estimated_hours*100).to_i
end
end
end
end
|
rails/acts_as_list | 13 | init.rb | $:.unshift "#{File.dirname(__FILE__)}/lib"
require 'active_record/acts/list'
ActiveRecord::Base.class_eval { include ActiveRecord::Acts::List }
| $:.unshift "#{File.dirname(__FILE__)}/lib"
require 'bundler/setup'
require 'active_record'
require 'active_record/acts/list'
ActiveRecord::Base.class_eval { include ActiveRecord::Acts::List }
|
nbari/DALMP | 4 | tests/test_sessions_base.php | <?php
/**
* abstract class to test sessions instances
*
* @author Nicolas Embriz <nbari@dalmp.com>
* @package DALMP
* @license BSD License
* @version 3.0
*/
abstract class test_sessions_base extends PHPUnit_Framework_TestCase {
abstract public function testAttributes();
public function getSessionData() {
return "Lorem Ipsum is simply dummy text of the printing and typesetting industry. Lorem Ipsum has been the industry's standard dummy text ever since the 1500s, when an unknown printer took a galley of type and scrambled it to make a type specimen book. It has survived not only five centuries, but also the leap into electronic typesetting, remaining essentially unchanged. It was popularised in the 1960s with the release of Letraset sheets containing Lorem Ipsum passages, and more recently with desktop publishing software like Aldus PageMaker including versions of Lorem Ipsum.";
}
public function testOpen() {
$this->assertTrue($this->sess->open(True, True));
}
/**
* @depends testOpen
*/
public function testWrite() {
for ($i = 0; $i < 100; $i++) {
$this->assertTrue($this->sess->write(sha1("sid_{$i}"), $this->getSessionData()));
}
}
/**
* @depends testWrite
*/
public function testRead() {
for ($i = 0; $i < 100; $i++) {
$this->assertEquals($this->getSessionData(), $this->sess->read(sha1("sid_{$i}")));
}
}
/**
* @depends testWrite
*/
public function testDestroy() {
for ($i = 0; $i < 100; $i++) {
$this->assertTrue($this->sess->destroy(sha1("sid_{$i}")));
}
}
/**
* @depends testOpen
*/
public function testClose() {
$this->assertTrue($this->sess->close());
}
/**
* @depends testClose
*/
public function testGC() {
$this->assertTrue($this->sess->gc(True));
}
public function testWriteRef() {
for ($i = 0; $i < 100; $i++) {
$GLOBALS['UID'] = sha1($i);
$this->assertTrue($this->sess->write(sha1("sid_{$i}"), $this->getSessionData()));
}
}
public function testgetSessionsRefs() {
$this->assertEquals(100, count($this->sess->getSessionsRefs()));
}
public function testgetSessionRef() {
for ($i = 0; $i < 100; $i++) {
$this->assertEquals(1, count($this->sess->getSessionRef(sha1($i))));
}
}
public function testdelSessionRef() {
for ($i = 0; $i < 100; $i++) {
$this->assertTrue($this->sess->delSessionRef(sha1($i)));
$this->assertEquals(array(), $this->sess->getSessionRef(sha1($i)));
}
}
}
| <?php
/**
* abstract class to test sessions instances
*
* @author Nicolas Embriz <nbari@dalmp.com>
* @package DALMP
* @license BSD License
* @version 3.0
*/
abstract class test_sessions_base extends PHPUnit_Framework_TestCase {
abstract public function testAttributes();
public function getSessionData() {
return "Lorem Ipsum is simply dummy text of the printing and typesetting industry. Lorem Ipsum has been the industry's standard dummy text ever since the 1500s, when an unknown printer took a galley of type and scrambled it to make a type specimen book. It has survived not only five centuries, but also the leap into electronic typesetting, remaining essentially unchanged. It was popularised in the 1960s with the release of Letraset sheets containing Lorem Ipsum passages, and more recently with desktop publishing software like Aldus PageMaker including versions of Lorem Ipsum.";
}
public function testOpen() {
$this->assertTrue($this->sess->open(True, True));
}
/**
* @depends testOpen
*/
public function testWrite() {
for ($i = 0; $i < 100; $i++) {
$this->assertTrue($this->sess->write(sha1("sid_{$i}"), $this->getSessionData()));
}
}
/**
* @depends testWrite
*/
public function testRead() {
for ($i = 0; $i < 100; $i++) {
$this->assertEquals($this->getSessionData(), $this->sess->read(sha1("sid_{$i}")));
}
}
/**
* @depends testWrite
*/
public function testDestroy() {
for ($i = 0; $i < 100; $i++) {
$this->assertTrue($this->sess->destroy(sha1("sid_{$i}")));
}
}
/**
* @depends testOpen
*/
public function testClose() {
$this->assertTrue($this->sess->close());
}
/**
* @depends testClose
*/
public function testGC() {
$this->assertTrue($this->sess->gc(True));
}
public function testWriteRef() {
for ($i = 0; $i < 100; $i++) {
$GLOBALS['UID'] = sha1($i);
$this->assertTrue($this->sess->write(sha1("sid_{$i}"), $this->getSessionData()));
}
}
public function testGetSessionsRefs() {
$this->assertEquals(100, count($this->sess->getSessionsRefs()));
}
public function testGetSessionRef() {
for ($i = 0; $i < 100; $i++) {
$this->assertEquals(1, count($this->sess->getSessionRef(sha1($i))));
}
}
public function testDelSessionRef() {
for ($i = 0; $i < 100; $i++) {
$this->assertTrue($this->sess->delSessionRef(sha1($i)));
$this->assertEquals(array(), $this->sess->getSessionRef(sha1($i)));
}
}
}
|
ptarjan/node-cache | 110 | test.js | /* global describe, it, before, beforeEach, afterEach */
'use strict';
var chai = require('chai'),
expect = chai.expect,
sinon = require('sinon'),
sinonChai = require('sinon-chai'),
Cache = require('./index').Cache,
cache = new Cache(),
clock;
chai.use(sinonChai);
describe('node-cache', function() {
beforeEach(function() {
clock = sinon.useFakeTimers();
cache.clear();
});
afterEach(function() {
clock.restore();
});
describe('put()', function() {
before(function() {
cache.debug(false);
});
it('should allow adding a new item to the cache', function() {
expect(function() {
cache.put('key', 'value');
}).to.not.throw();
});
it('should allow adding a new item to the cache with a timeout', function() {
expect(function() {
cache.put('key', 'value', 100);
}).to.not.throw();
});
it('should allow adding a new item to the cache with a timeout callback', function() {
expect(function() {
cache.put('key', 'value', 100, function() {});
}).to.not.throw();
});
it('should throw an error given a non-numeric timeout', function() {
expect(function() {
cache.put('key', 'value', 'foo');
}).to.throw();
});
it('should throw an error given a timeout of NaN', function() {
expect(function() {
cache.put('key', 'value', NaN);
}).to.throw();
});
it('should throw an error given a timeout of 0', function() {
expect(function() {
cache.put('key', 'value', 0);
}).to.throw();
});
it('should throw an error given a negative timeout', function() {
expect(function() {
cache.put('key', 'value', -100);
}).to.throw();
});
it('should throw an error given a non-function timeout callback', function() {
expect(function() {
cache.put('key', 'value', 100, 'foo');
}).to.throw();
});
it('should cause the timeout callback to fire once the cache item expires', function() {
var spy = sinon.spy();
cache.put('key', 'value', 1000, spy);
clock.tick(999);
expect(spy).to.not.have.been.called;
clock.tick(1);
expect(spy).to.have.been.calledOnce.and.calledWith('key', 'value');
});
it('should override the timeout callback on a new put() with a different timeout callback', function() {
var spy1 = sinon.spy();
var spy2 = sinon.spy();
cache.put('key', 'value', 1000, spy1);
clock.tick(999);
cache.put('key', 'value', 1000, spy2)
clock.tick(1001);
expect(spy1).to.not.have.been.called;
expect(spy2).to.have.been.calledOnce.and.calledWith('key', 'value');
});
it('should cancel the timeout callback on a new put() without a timeout callback', function() {
var spy = sinon.spy();
cache.put('key', 'value', 1000, spy);
clock.tick(999);
cache.put('key', 'value');
clock.tick(1);
expect(spy).to.not.have.been.called;
});
it('should return the cached value', function() {
expect(cache.put('key', 'value')).to.equal('value');
});
});
describe('del()', function() {
before(function() {
cache.debug(false);
});
it('should return false given a key for an empty cache', function() {
expect(cache.del('miss')).to.be.false;
});
it('should return false given a key not in a non-empty cache', function() {
cache.put('key', 'value');
expect(cache.del('miss')).to.be.false;
});
it('should return true given a key in the cache', function() {
cache.put('key', 'value');
expect(cache.del('key')).to.be.true;
});
it('should remove the provided key from the cache', function() {
cache.put('key', 'value');
expect(cache.get('key')).to.equal('value');
expect(cache.del('key')).to.be.true;
expect(cache.get('key')).to.be.null;
});
it('should decrement the cache size by 1', function() {
cache.put('key', 'value');
expect(cache.size()).to.equal(1);
expect(cache.del('key')).to.be.true;
expect(cache.size()).to.equal(0);
});
it('should not remove other keys in the cache', function() {
cache.put('key1', 'value1');
cache.put('key2', 'value2');
cache.put('key3', 'value3');
expect(cache.get('key1')).to.equal('value1');
expect(cache.get('key2')).to.equal('value2');
expect(cache.get('key3')).to.equal('value3');
cache.del('key1');
expect(cache.get('key1')).to.be.null;
expect(cache.get('key2')).to.equal('value2');
expect(cache.get('key3')).to.equal('value3');
});
it('should only delete a key from the cache once even if called multiple times in a row', function() {
cache.put('key1', 'value1');
cache.put('key2', 'value2');
cache.put('key3', 'value3');
expect(cache.size()).to.equal(3);
cache.del('key1');
cache.del('key1');
cache.del('key1');
expect(cache.size()).to.equal(2);
});
it('should handle deleting keys which were previously deleted and then re-added to the cache', function() {
cache.put('key', 'value');
expect(cache.get('key')).to.equal('value');
cache.del('key');
expect(cache.get('key')).to.be.null;
cache.put('key', 'value');
expect(cache.get('key')).to.equal('value');
cache.del('key');
expect(cache.get('key')).to.be.null;
});
it('should return true given an non-expired key', function() {
cache.put('key', 'value', 1000);
clock.tick(999);
expect(cache.del('key')).to.be.true;
});
it('should return false given an expired key', function() {
cache.put('key', 'value', 1000);
clock.tick(1000);
expect(cache.del('key')).to.be.false;
});
it('should cancel the timeout callback for the deleted key', function() {
var spy = sinon.spy();
cache.put('key', 'value', 1000, spy);
cache.del('key');
clock.tick(1000);
expect(spy).to.not.have.been.called;
});
it('should handle deletion of many items', function(done) {
clock.restore();
var num = 1000;
for(var i = 0; i < num; i++){
cache.put('key' + i, i, 1000);
}
expect(cache.size()).to.equal(num);
setTimeout(function(){
expect(cache.size()).to.equal(0);
done();
}, 1000);
});
});
describe('clear()', function() {
before(function() {
cache.debug(false);
});
it('should have no effect given an empty cache', function() {
expect(cache.size()).to.equal(0);
cache.clear();
expect(cache.size()).to.equal(0);
});
it('should remove all existing keys in the cache', function() {
cache.put('key1', 'value1');
cache.put('key2', 'value2');
cache.put('key3', 'value3');
expect(cache.size()).to.equal(3);
cache.clear();
expect(cache.size()).to.equal(0);
});
it('should remove the keys in the cache', function() {
cache.put('key1', 'value1');
cache.put('key2', 'value2');
cache.put('key3', 'value3');
expect(cache.get('key1')).to.equal('value1');
expect(cache.get('key2')).to.equal('value2');
expect(cache.get('key3')).to.equal('value3');
cache.clear();
expect(cache.get('key1')).to.be.null;
expect(cache.get('key2')).to.be.null;
expect(cache.get('key3')).to.be.null;
});
it('should reset the cache size to 0', function() {
cache.put('key1', 'value1');
cache.put('key2', 'value2');
cache.put('key3', 'value3');
expect(cache.size()).to.equal(3);
cache.clear();
expect(cache.size()).to.equal(0);
});
it('should reset the debug cache hits', function() {
cache.debug(true);
cache.put('key', 'value');
cache.get('key');
expect(cache.hits()).to.equal(1);
cache.clear();
expect(cache.hits()).to.equal(0);
});
it('should reset the debug cache misses', function() {
cache.debug(true);
cache.put('key', 'value');
cache.get('miss1');
expect(cache.misses()).to.equal(1);
cache.clear();
expect(cache.misses()).to.equal(0);
});
it('should cancel the timeout callbacks for all existing keys', function() {
var spy1 = sinon.spy();
var spy2 = sinon.spy();
var spy3 = sinon.spy();
cache.put('key1', 'value1', 1000, spy1);
cache.put('key2', 'value2', 1000, spy2);
cache.put('key3', 'value3', 1000, spy3);
cache.clear();
clock.tick(1000);
expect(spy1).to.not.have.been.called;
expect(spy2).to.not.have.been.called;
expect(spy3).to.not.have.been.called;
});
});
describe('get()', function() {
before(function() {
cache.debug(false);
});
it('should return null given a key for an empty cache', function() {
expect(cache.get('miss')).to.be.null;
});
it('should return null given a key not in a non-empty cache', function() {
cache.put('key', 'value');
expect(cache.get('miss')).to.be.null;
});
it('should return the corresponding value of a key in the cache', function() {
cache.put('key', 'value');
expect(cache.get('key')).to.equal('value');
});
it('should return the latest corresponding value of a key in the cache', function() {
cache.put('key', 'value1');
cache.put('key', 'value2');
cache.put('key', 'value3');
expect(cache.get('key')).to.equal('value3');
});
it('should handle various types of cache keys', function() {
var keys = [null, undefined, NaN, true, false, 0, 1, Number.POSITIVE_INFINITY, Number.NEGATIVE_INFINITY, '', 'a', [], {}, [1, 'a', false], {a:1,b:'a',c:false}, function() {}];
keys.forEach(function(key, index) {
var value = 'value' + index;
cache.put(key, value);
expect(cache.get(key)).to.deep.equal(value);
});
});
it('should handle various types of cache values', function() {
var values = [null, undefined, NaN, true, false, 0, 1, Number.POSITIVE_INFINITY, Number.NEGATIVE_INFINITY, '', 'a', [], {}, [1, 'a', false], {a:1,b:'a',c:false}, function() {}];
values.forEach(function(value, index) {
var key = 'key' + index;
cache.put(key, value);
expect(cache.get(key)).to.deep.equal(value);
});
});
it('should not set a timeout given no expiration time', function() {
cache.put('key', 'value');
clock.tick(1000);
expect(cache.get('key')).to.equal('value');
});
it('should return the corresponding value of a non-expired key in the cache', function() {
cache.put('key', 'value', 1000);
clock.tick(999);
expect(cache.get('key')).to.equal('value');
});
it('should return null given an expired key', function() {
cache.put('key', 'value', 1000);
clock.tick(1000);
expect(cache.get('key')).to.be.null;
});
it('should return null given an expired key', function() {
cache.put('key', 'value', 1000);
clock.tick(1000);
expect(cache.get('key')).to.be.null;
});
it('should return null given a key which is a property on the Object prototype', function() {
expect(cache.get('toString')).to.be.null;
});
it('should allow reading the value for a key which is a property on the Object prototype', function() {
cache.put('toString', 'value');
expect(cache.get('toString')).to.equal('value');
});
});
describe('size()', function() {
before(function() {
cache.debug(false);
});
it('should return 0 given a fresh cache', function() {
expect(cache.size()).to.equal(0);
});
it('should return 1 after adding a single item to the cache', function() {
cache.put('key', 'value');
expect(cache.size()).to.equal(1);
});
it('should return 3 after adding three items to the cache', function() {
cache.put('key1', 'value1');
cache.put('key2', 'value2');
cache.put('key3', 'value3');
expect(cache.size()).to.equal(3);
});
it('should not multi-count duplicate items added to the cache', function() {
cache.put('key', 'value1');
expect(cache.size()).to.equal(1);
cache.put('key', 'value2');
expect(cache.size()).to.equal(1);
});
it('should update when a key in the cache expires', function() {
cache.put('key', 'value', 1000);
expect(cache.size()).to.equal(1);
clock.tick(999);
expect(cache.size()).to.equal(1);
clock.tick(1);
expect(cache.size()).to.equal(0);
});
});
describe('memsize()', function() {
before(function() {
cache.debug(false);
});
it('should return 0 given a fresh cache', function() {
expect(cache.memsize()).to.equal(0);
});
it('should return 1 after adding a single item to the cache', function() {
cache.put('key', 'value');
expect(cache.memsize()).to.equal(1);
});
it('should return 3 after adding three items to the cache', function() {
cache.put('key1', 'value1');
cache.put('key2', 'value2');
cache.put('key3', 'value3');
expect(cache.memsize()).to.equal(3);
});
it('should not multi-count duplicate items added to the cache', function() {
cache.put('key', 'value1');
expect(cache.memsize()).to.equal(1);
cache.put('key', 'value2');
expect(cache.memsize()).to.equal(1);
});
it('should update when a key in the cache expires', function() {
cache.put('key', 'value', 1000);
expect(cache.memsize()).to.equal(1);
clock.tick(999);
expect(cache.memsize()).to.equal(1);
clock.tick(1);
expect(cache.memsize()).to.equal(0);
});
});
describe('debug()', function() {
it('should not count cache hits when false', function() {
cache.debug(false);
cache.put('key', 'value');
cache.get('key');
expect(cache.hits()).to.equal(0);
});
it('should not count cache misses when false', function() {
cache.debug(false);
cache.put('key', 'value');
cache.get('miss1');
expect(cache.misses()).to.equal(0);
});
it('should count cache hits when true', function() {
cache.debug(true);
cache.put('key', 'value');
cache.get('key');
expect(cache.hits()).to.equal(1);
});
it('should count cache misses when true', function() {
cache.debug(true);
cache.put('key', 'value');
cache.get('miss1');
expect(cache.misses()).to.equal(1);
});
});
describe('hits()', function() {
before(function() {
cache.debug(true);
});
it('should return 0 given an empty cache', function() {
expect(cache.hits()).to.equal(0);
});
it('should return 0 given a non-empty cache which has not been accessed', function() {
cache.put('key', 'value');
expect(cache.hits()).to.equal(0);
});
it('should return 0 given a non-empty cache which has had only misses', function() {
cache.put('key', 'value');
cache.get('miss1');
cache.get('miss2');
cache.get('miss3');
expect(cache.hits()).to.equal(0);
});
it('should return 1 given a non-empty cache which has had a single hit', function() {
cache.put('key', 'value');
cache.get('key');
expect(cache.hits()).to.equal(1);
});
it('should return 3 given a non-empty cache which has had three hits on the same key', function() {
cache.put('key', 'value');
cache.get('key');
cache.get('key');
cache.get('key');
expect(cache.hits()).to.equal(3);
});
it('should return 3 given a non-empty cache which has had three hits across many keys', function() {
cache.put('key1', 'value1');
cache.put('key2', 'value2');
cache.put('key3', 'value3');
cache.get('key1');
cache.get('key2');
cache.get('key3');
expect(cache.hits()).to.equal(3);
});
it('should return the correct value after a sequence of hits and misses', function() {
cache.put('key1', 'value1');
cache.put('key2', 'value2');
cache.put('key3', 'value3');
cache.get('key1');
cache.get('miss');
cache.get('key3');
expect(cache.hits()).to.equal(2);
});
it('should not count hits for expired keys', function() {
cache.put('key', 'value', 1000);
cache.get('key');
expect(cache.hits()).to.equal(1);
clock.tick(999);
cache.get('key');
expect(cache.hits()).to.equal(2);
clock.tick(1);
cache.get('key');
expect(cache.hits()).to.equal(2);
});
});
describe('misses()', function() {
before(function() {
cache.debug(true);
});
it('should return 0 given an empty cache', function() {
expect(cache.misses()).to.equal(0);
});
it('should return 0 given a non-empty cache which has not been accessed', function() {
cache.put('key', 'value');
expect(cache.misses()).to.equal(0);
});
it('should return 0 given a non-empty cache which has had only hits', function() {
cache.put('key', 'value');
cache.get('key');
cache.get('key');
cache.get('key');
expect(cache.misses()).to.equal(0);
});
it('should return 1 given a non-empty cache which has had a single miss', function() {
cache.put('key', 'value');
cache.get('miss');
expect(cache.misses()).to.equal(1);
});
it('should return 3 given a non-empty cache which has had three misses', function() {
cache.put('key', 'value');
cache.get('miss1');
cache.get('miss2');
cache.get('miss3');
expect(cache.misses()).to.equal(3);
});
it('should return the correct value after a sequence of hits and misses', function() {
cache.put('key1', 'value1');
cache.put('key2', 'value2');
cache.put('key3', 'value3');
cache.get('key1');
cache.get('miss');
cache.get('key3');
expect(cache.misses()).to.equal(1);
});
it('should count misses for expired keys', function() {
cache.put('key', 'value', 1000);
cache.get('key');
expect(cache.misses()).to.equal(0);
clock.tick(999);
cache.get('key');
expect(cache.misses()).to.equal(0);
clock.tick(1);
cache.get('key');
expect(cache.misses()).to.equal(1);
});
});
describe('keys()', function() {
before(function() {
cache.debug(false);
});
it('should return an empty array given an empty cache', function() {
expect(cache.keys()).to.deep.equal([]);
});
it('should return a single key after adding a single item to the cache', function() {
cache.put('key', 'value');
expect(cache.keys()).to.deep.equal(['key']);
});
it('should return 3 keys after adding three items to the cache', function() {
cache.put('key1', 'value1');
cache.put('key2', 'value2');
cache.put('key3', 'value3');
expect(cache.keys()).to.deep.equal(['key1', 'key2', 'key3']);
});
it('should not multi-count duplicate items added to the cache', function() {
cache.put('key', 'value1');
expect(cache.keys()).to.deep.equal(['key']);
cache.put('key', 'value2');
expect(cache.keys()).to.deep.equal(['key']);
});
it('should update when a key in the cache expires', function() {
cache.put('key', 'value', 1000);
expect(cache.keys()).to.deep.equal(['key']);
clock.tick(999);
expect(cache.keys()).to.deep.equal(['key']);
clock.tick(1);
expect(cache.keys()).to.deep.equal([]);
});
});
describe('export()', function() {
var START_TIME = 10000;
var BASIC_EXPORT = JSON.stringify({
key: {
value: 'value',
expire: START_TIME + 1000,
},
});
before(function() {
cache.debug(false);
});
beforeEach(function() {
clock.tick(START_TIME);
});
it('should return an empty object given an empty cache', function() {
expect(cache.exportJson()).to.equal(JSON.stringify({}));
});
it('should return a single record after adding a single item to the cache', function() {
cache.put('key', 'value', 1000);
expect(cache.exportJson()).to.equal(BASIC_EXPORT);
});
it('should return multiple records with expiry', function() {
cache.put('key1', 'value1');
cache.put('key2', 'value2', 1000);
expect(cache.exportJson()).to.equal(JSON.stringify({
key1: {
value: 'value1',
expire: 'NaN',
},
key2: {
value: 'value2',
expire: START_TIME + 1000,
},
}));
});
it('should update when a key in the cache expires', function() {
cache.put('key', 'value', 1000);
expect(cache.exportJson()).to.equal(BASIC_EXPORT);
clock.tick(999);
expect(cache.exportJson()).to.equal(BASIC_EXPORT);
clock.tick(1);
expect(cache.exportJson()).to.equal(JSON.stringify({}));
});
});
describe('import()', function() {
var START_TIME = 10000;
var BASIC_EXPORT = JSON.stringify({
key: {
value: 'value',
expire: START_TIME + 1000,
},
});
before(function() {
cache.debug(false);
});
beforeEach(function() {
clock.tick(START_TIME);
});
it('should import an empty object into an empty cache', function() {
var exportedJson = cache.exportJson();
cache.clear();
cache.importJson(exportedJson);
expect(cache.exportJson()).to.equal(JSON.stringify({}));
});
it('should import records into an empty cache', function() {
cache.put('key1', 'value1');
cache.put('key2', 'value2', 1000);
var exportedJson = cache.exportJson();
cache.clear();
cache.importJson(exportedJson);
expect(cache.exportJson()).to.equal(JSON.stringify({
key1: {
value: 'value1',
expire: 'NaN',
},
key2: {
value: 'value2',
expire: START_TIME + 1000,
},
}));
});
it('should import records into an already-existing cache', function() {
cache.put('key1', 'value1');
cache.put('key2', 'value2', 1000);
var exportedJson = cache.exportJson();
cache.put('key1', 'changed value', 5000);
cache.put('key3', 'value3', 500);
cache.importJson(exportedJson);
expect(cache.exportJson()).to.equal(JSON.stringify({
key1: {
value: 'value1',
expire: 'NaN',
},
key2: {
value: 'value2',
expire: START_TIME + 1000,
},
key3: {
value: 'value3',
expire: START_TIME + 500,
},
}));
});
it('should import records into an already-existing cache and skip duplicates', function() {
cache.debug(true);
cache.put('key1', 'value1');
cache.put('key2', 'value2', 1000);
var exportedJson = cache.exportJson();
cache.clear();
cache.put('key1', 'changed value', 5000);
cache.put('key3', 'value3', 500);
cache.importJson(exportedJson, { skipDuplicates: true });
expect(cache.exportJson()).to.equal(JSON.stringify({
key1: {
value: 'changed value',
expire: START_TIME + 5000,
},
key3: {
value: 'value3',
expire: START_TIME + 500,
},
key2: {
value: 'value2',
expire: START_TIME + 1000,
},
}));
});
it('should import with updated expire times', function() {
cache.put('key1', 'value1', 500);
cache.put('key2', 'value2', 1000);
var exportedJson = cache.exportJson();
var tickAmount = 750;
clock.tick(tickAmount);
cache.importJson(exportedJson);
expect(cache.exportJson()).to.equal(JSON.stringify({
key2: {
value: 'value2',
expire: START_TIME + tickAmount + 250,
},
}));
});
it('should return the new size', function() {
cache.put('key1', 'value1', 500);
var exportedJson = cache.exportJson();
cache.clear();
cache.put('key2', 'value2', 1000);
expect(cache.size()).to.equal(1);
var size = cache.importJson(exportedJson);
expect(size).to.equal(2);
expect(cache.size()).to.equal(2);
});
});
describe('Cache()', function() {
it('should return a new cache instance when called', function() {
var cache1 = new Cache(),
cache2 = new Cache();
cache1.put('key', 'value1');
expect(cache1.keys()).to.deep.equal(['key']);
expect(cache2.keys()).to.deep.equal([]);
cache2.put('key', 'value2');
expect(cache1.get('key')).to.equal('value1');
expect(cache2.get('key')).to.equal('value2');
});
});
});
| /* global describe, it, before, beforeEach, afterEach */
'use strict';
var chai = require('chai'),
expect = chai.expect,
sinon = require('sinon'),
sinonChai = require('sinon-chai'),
Cache = require('./index').Cache,
cache = new Cache(),
clock;
chai.use(sinonChai);
describe('node-cache', function() {
beforeEach(function() {
clock = sinon.useFakeTimers();
cache.clear();
});
afterEach(function() {
clock.restore();
});
describe('put()', function() {
before(function() {
cache.debug(false);
});
it('should allow adding a new item to the cache', function() {
expect(function() {
cache.put('key', 'value');
}).to.not.throw();
});
it('should allow adding a new item to the cache with a timeout', function() {
expect(function() {
cache.put('key', 'value', 100);
}).to.not.throw();
});
it('should allow adding a new item to the cache with a timeout callback', function() {
expect(function() {
cache.put('key', 'value', 100, function() {});
}).to.not.throw();
});
it('should throw an error given a non-numeric timeout', function() {
expect(function() {
cache.put('key', 'value', 'foo');
}).to.throw();
});
it('should throw an error given a timeout of NaN', function() {
expect(function() {
cache.put('key', 'value', NaN);
}).to.throw();
});
it('should throw an error given a timeout of 0', function() {
expect(function() {
cache.put('key', 'value', 0);
}).to.throw();
});
it('should throw an error given a negative timeout', function() {
expect(function() {
cache.put('key', 'value', -100);
}).to.throw();
});
it('should throw an error given a timeout that is too large', function() {
expect(function() {
cache.put('key', 'value', 2147483648);
}).to.throw();
});
it('should throw an error given a non-function timeout callback', function() {
expect(function() {
cache.put('key', 'value', 100, 'foo');
}).to.throw();
});
it('should cause the timeout callback to fire once the cache item expires', function() {
var spy = sinon.spy();
cache.put('key', 'value', 1000, spy);
clock.tick(999);
expect(spy).to.not.have.been.called;
clock.tick(1);
expect(spy).to.have.been.calledOnce.and.calledWith('key', 'value');
});
it('should override the timeout callback on a new put() with a different timeout callback', function() {
var spy1 = sinon.spy();
var spy2 = sinon.spy();
cache.put('key', 'value', 1000, spy1);
clock.tick(999);
cache.put('key', 'value', 1000, spy2)
clock.tick(1001);
expect(spy1).to.not.have.been.called;
expect(spy2).to.have.been.calledOnce.and.calledWith('key', 'value');
});
it('should cancel the timeout callback on a new put() without a timeout callback', function() {
var spy = sinon.spy();
cache.put('key', 'value', 1000, spy);
clock.tick(999);
cache.put('key', 'value');
clock.tick(1);
expect(spy).to.not.have.been.called;
});
it('should return the cached value', function() {
expect(cache.put('key', 'value')).to.equal('value');
});
});
describe('del()', function() {
before(function() {
cache.debug(false);
});
it('should return false given a key for an empty cache', function() {
expect(cache.del('miss')).to.be.false;
});
it('should return false given a key not in a non-empty cache', function() {
cache.put('key', 'value');
expect(cache.del('miss')).to.be.false;
});
it('should return true given a key in the cache', function() {
cache.put('key', 'value');
expect(cache.del('key')).to.be.true;
});
it('should remove the provided key from the cache', function() {
cache.put('key', 'value');
expect(cache.get('key')).to.equal('value');
expect(cache.del('key')).to.be.true;
expect(cache.get('key')).to.be.null;
});
it('should decrement the cache size by 1', function() {
cache.put('key', 'value');
expect(cache.size()).to.equal(1);
expect(cache.del('key')).to.be.true;
expect(cache.size()).to.equal(0);
});
it('should not remove other keys in the cache', function() {
cache.put('key1', 'value1');
cache.put('key2', 'value2');
cache.put('key3', 'value3');
expect(cache.get('key1')).to.equal('value1');
expect(cache.get('key2')).to.equal('value2');
expect(cache.get('key3')).to.equal('value3');
cache.del('key1');
expect(cache.get('key1')).to.be.null;
expect(cache.get('key2')).to.equal('value2');
expect(cache.get('key3')).to.equal('value3');
});
it('should only delete a key from the cache once even if called multiple times in a row', function() {
cache.put('key1', 'value1');
cache.put('key2', 'value2');
cache.put('key3', 'value3');
expect(cache.size()).to.equal(3);
cache.del('key1');
cache.del('key1');
cache.del('key1');
expect(cache.size()).to.equal(2);
});
it('should handle deleting keys which were previously deleted and then re-added to the cache', function() {
cache.put('key', 'value');
expect(cache.get('key')).to.equal('value');
cache.del('key');
expect(cache.get('key')).to.be.null;
cache.put('key', 'value');
expect(cache.get('key')).to.equal('value');
cache.del('key');
expect(cache.get('key')).to.be.null;
});
it('should return true given an non-expired key', function() {
cache.put('key', 'value', 1000);
clock.tick(999);
expect(cache.del('key')).to.be.true;
});
it('should return false given an expired key', function() {
cache.put('key', 'value', 1000);
clock.tick(1000);
expect(cache.del('key')).to.be.false;
});
it('should cancel the timeout callback for the deleted key', function() {
var spy = sinon.spy();
cache.put('key', 'value', 1000, spy);
cache.del('key');
clock.tick(1000);
expect(spy).to.not.have.been.called;
});
it('should handle deletion of many items', function(done) {
clock.restore();
var num = 1000;
for(var i = 0; i < num; i++){
cache.put('key' + i, i, 1000);
}
expect(cache.size()).to.equal(num);
setTimeout(function(){
expect(cache.size()).to.equal(0);
done();
}, 1000);
});
});
describe('clear()', function() {
before(function() {
cache.debug(false);
});
it('should have no effect given an empty cache', function() {
expect(cache.size()).to.equal(0);
cache.clear();
expect(cache.size()).to.equal(0);
});
it('should remove all existing keys in the cache', function() {
cache.put('key1', 'value1');
cache.put('key2', 'value2');
cache.put('key3', 'value3');
expect(cache.size()).to.equal(3);
cache.clear();
expect(cache.size()).to.equal(0);
});
it('should remove the keys in the cache', function() {
cache.put('key1', 'value1');
cache.put('key2', 'value2');
cache.put('key3', 'value3');
expect(cache.get('key1')).to.equal('value1');
expect(cache.get('key2')).to.equal('value2');
expect(cache.get('key3')).to.equal('value3');
cache.clear();
expect(cache.get('key1')).to.be.null;
expect(cache.get('key2')).to.be.null;
expect(cache.get('key3')).to.be.null;
});
it('should reset the cache size to 0', function() {
cache.put('key1', 'value1');
cache.put('key2', 'value2');
cache.put('key3', 'value3');
expect(cache.size()).to.equal(3);
cache.clear();
expect(cache.size()).to.equal(0);
});
it('should reset the debug cache hits', function() {
cache.debug(true);
cache.put('key', 'value');
cache.get('key');
expect(cache.hits()).to.equal(1);
cache.clear();
expect(cache.hits()).to.equal(0);
});
it('should reset the debug cache misses', function() {
cache.debug(true);
cache.put('key', 'value');
cache.get('miss1');
expect(cache.misses()).to.equal(1);
cache.clear();
expect(cache.misses()).to.equal(0);
});
it('should cancel the timeout callbacks for all existing keys', function() {
var spy1 = sinon.spy();
var spy2 = sinon.spy();
var spy3 = sinon.spy();
cache.put('key1', 'value1', 1000, spy1);
cache.put('key2', 'value2', 1000, spy2);
cache.put('key3', 'value3', 1000, spy3);
cache.clear();
clock.tick(1000);
expect(spy1).to.not.have.been.called;
expect(spy2).to.not.have.been.called;
expect(spy3).to.not.have.been.called;
});
});
describe('get()', function() {
before(function() {
cache.debug(false);
});
it('should return null given a key for an empty cache', function() {
expect(cache.get('miss')).to.be.null;
});
it('should return null given a key not in a non-empty cache', function() {
cache.put('key', 'value');
expect(cache.get('miss')).to.be.null;
});
it('should return the corresponding value of a key in the cache', function() {
cache.put('key', 'value');
expect(cache.get('key')).to.equal('value');
});
it('should return the latest corresponding value of a key in the cache', function() {
cache.put('key', 'value1');
cache.put('key', 'value2');
cache.put('key', 'value3');
expect(cache.get('key')).to.equal('value3');
});
it('should handle various types of cache keys', function() {
var keys = [null, undefined, NaN, true, false, 0, 1, Number.POSITIVE_INFINITY, Number.NEGATIVE_INFINITY, '', 'a', [], {}, [1, 'a', false], {a:1,b:'a',c:false}, function() {}];
keys.forEach(function(key, index) {
var value = 'value' + index;
cache.put(key, value);
expect(cache.get(key)).to.deep.equal(value);
});
});
it('should handle various types of cache values', function() {
var values = [null, undefined, NaN, true, false, 0, 1, Number.POSITIVE_INFINITY, Number.NEGATIVE_INFINITY, '', 'a', [], {}, [1, 'a', false], {a:1,b:'a',c:false}, function() {}];
values.forEach(function(value, index) {
var key = 'key' + index;
cache.put(key, value);
expect(cache.get(key)).to.deep.equal(value);
});
});
it('should not set a timeout given no expiration time', function() {
cache.put('key', 'value');
clock.tick(1000);
expect(cache.get('key')).to.equal('value');
});
it('should return the corresponding value of a non-expired key in the cache', function() {
cache.put('key', 'value', 1000);
clock.tick(999);
expect(cache.get('key')).to.equal('value');
});
it('should return null given an expired key', function() {
cache.put('key', 'value', 1000);
clock.tick(1000);
expect(cache.get('key')).to.be.null;
});
it('should return null given an expired key', function() {
cache.put('key', 'value', 1000);
clock.tick(1000);
expect(cache.get('key')).to.be.null;
});
it('should return null given a key which is a property on the Object prototype', function() {
expect(cache.get('toString')).to.be.null;
});
it('should allow reading the value for a key which is a property on the Object prototype', function() {
cache.put('toString', 'value');
expect(cache.get('toString')).to.equal('value');
});
});
describe('size()', function() {
before(function() {
cache.debug(false);
});
it('should return 0 given a fresh cache', function() {
expect(cache.size()).to.equal(0);
});
it('should return 1 after adding a single item to the cache', function() {
cache.put('key', 'value');
expect(cache.size()).to.equal(1);
});
it('should return 3 after adding three items to the cache', function() {
cache.put('key1', 'value1');
cache.put('key2', 'value2');
cache.put('key3', 'value3');
expect(cache.size()).to.equal(3);
});
it('should not multi-count duplicate items added to the cache', function() {
cache.put('key', 'value1');
expect(cache.size()).to.equal(1);
cache.put('key', 'value2');
expect(cache.size()).to.equal(1);
});
it('should update when a key in the cache expires', function() {
cache.put('key', 'value', 1000);
expect(cache.size()).to.equal(1);
clock.tick(999);
expect(cache.size()).to.equal(1);
clock.tick(1);
expect(cache.size()).to.equal(0);
});
});
describe('memsize()', function() {
before(function() {
cache.debug(false);
});
it('should return 0 given a fresh cache', function() {
expect(cache.memsize()).to.equal(0);
});
it('should return 1 after adding a single item to the cache', function() {
cache.put('key', 'value');
expect(cache.memsize()).to.equal(1);
});
it('should return 3 after adding three items to the cache', function() {
cache.put('key1', 'value1');
cache.put('key2', 'value2');
cache.put('key3', 'value3');
expect(cache.memsize()).to.equal(3);
});
it('should not multi-count duplicate items added to the cache', function() {
cache.put('key', 'value1');
expect(cache.memsize()).to.equal(1);
cache.put('key', 'value2');
expect(cache.memsize()).to.equal(1);
});
it('should update when a key in the cache expires', function() {
cache.put('key', 'value', 1000);
expect(cache.memsize()).to.equal(1);
clock.tick(999);
expect(cache.memsize()).to.equal(1);
clock.tick(1);
expect(cache.memsize()).to.equal(0);
});
});
describe('debug()', function() {
it('should not count cache hits when false', function() {
cache.debug(false);
cache.put('key', 'value');
cache.get('key');
expect(cache.hits()).to.equal(0);
});
it('should not count cache misses when false', function() {
cache.debug(false);
cache.put('key', 'value');
cache.get('miss1');
expect(cache.misses()).to.equal(0);
});
it('should count cache hits when true', function() {
cache.debug(true);
cache.put('key', 'value');
cache.get('key');
expect(cache.hits()).to.equal(1);
});
it('should count cache misses when true', function() {
cache.debug(true);
cache.put('key', 'value');
cache.get('miss1');
expect(cache.misses()).to.equal(1);
});
});
describe('hits()', function() {
before(function() {
cache.debug(true);
});
it('should return 0 given an empty cache', function() {
expect(cache.hits()).to.equal(0);
});
it('should return 0 given a non-empty cache which has not been accessed', function() {
cache.put('key', 'value');
expect(cache.hits()).to.equal(0);
});
it('should return 0 given a non-empty cache which has had only misses', function() {
cache.put('key', 'value');
cache.get('miss1');
cache.get('miss2');
cache.get('miss3');
expect(cache.hits()).to.equal(0);
});
it('should return 1 given a non-empty cache which has had a single hit', function() {
cache.put('key', 'value');
cache.get('key');
expect(cache.hits()).to.equal(1);
});
it('should return 3 given a non-empty cache which has had three hits on the same key', function() {
cache.put('key', 'value');
cache.get('key');
cache.get('key');
cache.get('key');
expect(cache.hits()).to.equal(3);
});
it('should return 3 given a non-empty cache which has had three hits across many keys', function() {
cache.put('key1', 'value1');
cache.put('key2', 'value2');
cache.put('key3', 'value3');
cache.get('key1');
cache.get('key2');
cache.get('key3');
expect(cache.hits()).to.equal(3);
});
it('should return the correct value after a sequence of hits and misses', function() {
cache.put('key1', 'value1');
cache.put('key2', 'value2');
cache.put('key3', 'value3');
cache.get('key1');
cache.get('miss');
cache.get('key3');
expect(cache.hits()).to.equal(2);
});
it('should not count hits for expired keys', function() {
cache.put('key', 'value', 1000);
cache.get('key');
expect(cache.hits()).to.equal(1);
clock.tick(999);
cache.get('key');
expect(cache.hits()).to.equal(2);
clock.tick(1);
cache.get('key');
expect(cache.hits()).to.equal(2);
});
});
describe('misses()', function() {
before(function() {
cache.debug(true);
});
it('should return 0 given an empty cache', function() {
expect(cache.misses()).to.equal(0);
});
it('should return 0 given a non-empty cache which has not been accessed', function() {
cache.put('key', 'value');
expect(cache.misses()).to.equal(0);
});
it('should return 0 given a non-empty cache which has had only hits', function() {
cache.put('key', 'value');
cache.get('key');
cache.get('key');
cache.get('key');
expect(cache.misses()).to.equal(0);
});
it('should return 1 given a non-empty cache which has had a single miss', function() {
cache.put('key', 'value');
cache.get('miss');
expect(cache.misses()).to.equal(1);
});
it('should return 3 given a non-empty cache which has had three misses', function() {
cache.put('key', 'value');
cache.get('miss1');
cache.get('miss2');
cache.get('miss3');
expect(cache.misses()).to.equal(3);
});
it('should return the correct value after a sequence of hits and misses', function() {
cache.put('key1', 'value1');
cache.put('key2', 'value2');
cache.put('key3', 'value3');
cache.get('key1');
cache.get('miss');
cache.get('key3');
expect(cache.misses()).to.equal(1);
});
it('should count misses for expired keys', function() {
cache.put('key', 'value', 1000);
cache.get('key');
expect(cache.misses()).to.equal(0);
clock.tick(999);
cache.get('key');
expect(cache.misses()).to.equal(0);
clock.tick(1);
cache.get('key');
expect(cache.misses()).to.equal(1);
});
});
describe('keys()', function() {
before(function() {
cache.debug(false);
});
it('should return an empty array given an empty cache', function() {
expect(cache.keys()).to.deep.equal([]);
});
it('should return a single key after adding a single item to the cache', function() {
cache.put('key', 'value');
expect(cache.keys()).to.deep.equal(['key']);
});
it('should return 3 keys after adding three items to the cache', function() {
cache.put('key1', 'value1');
cache.put('key2', 'value2');
cache.put('key3', 'value3');
expect(cache.keys()).to.deep.equal(['key1', 'key2', 'key3']);
});
it('should not multi-count duplicate items added to the cache', function() {
cache.put('key', 'value1');
expect(cache.keys()).to.deep.equal(['key']);
cache.put('key', 'value2');
expect(cache.keys()).to.deep.equal(['key']);
});
it('should update when a key in the cache expires', function() {
cache.put('key', 'value', 1000);
expect(cache.keys()).to.deep.equal(['key']);
clock.tick(999);
expect(cache.keys()).to.deep.equal(['key']);
clock.tick(1);
expect(cache.keys()).to.deep.equal([]);
});
});
describe('export()', function() {
var START_TIME = 10000;
var BASIC_EXPORT = JSON.stringify({
key: {
value: 'value',
expire: START_TIME + 1000,
},
});
before(function() {
cache.debug(false);
});
beforeEach(function() {
clock.tick(START_TIME);
});
it('should return an empty object given an empty cache', function() {
expect(cache.exportJson()).to.equal(JSON.stringify({}));
});
it('should return a single record after adding a single item to the cache', function() {
cache.put('key', 'value', 1000);
expect(cache.exportJson()).to.equal(BASIC_EXPORT);
});
it('should return multiple records with expiry', function() {
cache.put('key1', 'value1');
cache.put('key2', 'value2', 1000);
expect(cache.exportJson()).to.equal(JSON.stringify({
key1: {
value: 'value1',
expire: 'NaN',
},
key2: {
value: 'value2',
expire: START_TIME + 1000,
},
}));
});
it('should update when a key in the cache expires', function() {
cache.put('key', 'value', 1000);
expect(cache.exportJson()).to.equal(BASIC_EXPORT);
clock.tick(999);
expect(cache.exportJson()).to.equal(BASIC_EXPORT);
clock.tick(1);
expect(cache.exportJson()).to.equal(JSON.stringify({}));
});
});
describe('import()', function() {
var START_TIME = 10000;
var BASIC_EXPORT = JSON.stringify({
key: {
value: 'value',
expire: START_TIME + 1000,
},
});
before(function() {
cache.debug(false);
});
beforeEach(function() {
clock.tick(START_TIME);
});
it('should import an empty object into an empty cache', function() {
var exportedJson = cache.exportJson();
cache.clear();
cache.importJson(exportedJson);
expect(cache.exportJson()).to.equal(JSON.stringify({}));
});
it('should import records into an empty cache', function() {
cache.put('key1', 'value1');
cache.put('key2', 'value2', 1000);
var exportedJson = cache.exportJson();
cache.clear();
cache.importJson(exportedJson);
expect(cache.exportJson()).to.equal(JSON.stringify({
key1: {
value: 'value1',
expire: 'NaN',
},
key2: {
value: 'value2',
expire: START_TIME + 1000,
},
}));
});
it('should import records into an already-existing cache', function() {
cache.put('key1', 'value1');
cache.put('key2', 'value2', 1000);
var exportedJson = cache.exportJson();
cache.put('key1', 'changed value', 5000);
cache.put('key3', 'value3', 500);
cache.importJson(exportedJson);
expect(cache.exportJson()).to.equal(JSON.stringify({
key1: {
value: 'value1',
expire: 'NaN',
},
key2: {
value: 'value2',
expire: START_TIME + 1000,
},
key3: {
value: 'value3',
expire: START_TIME + 500,
},
}));
});
it('should import records into an already-existing cache and skip duplicates', function() {
cache.debug(true);
cache.put('key1', 'value1');
cache.put('key2', 'value2', 1000);
var exportedJson = cache.exportJson();
cache.clear();
cache.put('key1', 'changed value', 5000);
cache.put('key3', 'value3', 500);
cache.importJson(exportedJson, { skipDuplicates: true });
expect(cache.exportJson()).to.equal(JSON.stringify({
key1: {
value: 'changed value',
expire: START_TIME + 5000,
},
key3: {
value: 'value3',
expire: START_TIME + 500,
},
key2: {
value: 'value2',
expire: START_TIME + 1000,
},
}));
});
it('should import with updated expire times', function() {
cache.put('key1', 'value1', 500);
cache.put('key2', 'value2', 1000);
var exportedJson = cache.exportJson();
var tickAmount = 750;
clock.tick(tickAmount);
cache.importJson(exportedJson);
expect(cache.exportJson()).to.equal(JSON.stringify({
key2: {
value: 'value2',
expire: START_TIME + tickAmount + 250,
},
}));
});
it('should return the new size', function() {
cache.put('key1', 'value1', 500);
var exportedJson = cache.exportJson();
cache.clear();
cache.put('key2', 'value2', 1000);
expect(cache.size()).to.equal(1);
var size = cache.importJson(exportedJson);
expect(size).to.equal(2);
expect(cache.size()).to.equal(2);
});
});
describe('Cache()', function() {
it('should return a new cache instance when called', function() {
var cache1 = new Cache(),
cache2 = new Cache();
cache1.put('key', 'value1');
expect(cache1.keys()).to.deep.equal(['key']);
expect(cache2.keys()).to.deep.equal([]);
cache2.put('key', 'value2');
expect(cache1.get('key')).to.equal('value1');
expect(cache2.get('key')).to.equal('value2');
});
});
});
|
JessicaMS/SDL-Stars | 1 | main.cpp | /*
Stars2 Application by Jessica Seibert
1/31/09/
A second version of my prior Stars application. This time, the stars
are drawn with a more platform independent SDL_FillRect, and
is encapsulated into a single header file. This header is developed
here independently from my Blocks application that utilizes it.
Built to by architecturally flexible and modular for future games.
_._
. ` .
. `.
.________.
`""``""'
` `
: : We are pillars of society. You can't run your computers, your fashion houses,
: : your publishing houses, your damn magazines, you can't do anything
; ; in culture without psychedelic people in key positions.
. . - Terence McKenna
' .
' .
. .
; :
. .
' ' mh
*/
//The headers
#include <iostream>
#include "SDL/SDL.h"
#include <string>
#include <math.h>
//Local includes
#include "timer.h"
#include "SDLwindow.h"
#include "stars.h"
//Global types and constants
#define WANT_TASTY_MUSHROOM 1 //Always
//The screen sttributes
const int SCREEN_WIDTH = 640;
const int SCREEN_HEIGHT = 480;
const int SCREEN_BPP = 32;
//Initialize whatever you must
bool GameInitialize()
{
//Initialize all SDL subsystems
if( SDL_Init( SDL_INIT_EVERYTHING ) == -1 )
{
return false;
}
//If everything initialized fine
return true;
}
//❤`•.¸¸.•´´¯`••.¸¸.•´´¯`•´❤`•.¸¸.•´´¯`••.¸¸.•´´¯`•´❤`•.¸¸.•´´¯`••.¸¸.•´´¯`•´❤
//function: main
//
//The land of tasty mushrooms. Accepts no arguments.
//❤`•.¸¸.•´´¯`••.¸¸.•´´¯`•´❤`•.¸¸.•´´¯`••.¸¸.•´´¯`•´❤`•.¸¸.•´´¯`••.¸¸.•´´¯`•´❤
int main( int argc, char* args[] )
{
SDL_Event event; //The event structure
Timer delta; //Keeps track of time
bool quit = false; //Quit flag
int msy, msx;
int ticks = 0; //Save the ticks!
cStarList* myStars;
myStars = new cStarList(100, 640, 480);
delta.start();
Window myWindow(SCREEN_WIDTH, SCREEN_HEIGHT, SCREEN_BPP, "Stars!");
//Initialize SDL and stuff
if( GameInitialize() == false)
{
fprintf(stderr, "GameInitialize() failed: %s\n", SDL_GetError());
return 1;
}
msy = msx = 0;
srand(delta.get_ticks());
delta.start();
//While the user hasn't quit
while(WANT_TASTY_MUSHROOM && !quit)
{
//While there's events to handle
while( SDL_PollEvent( &event ) )
{
//Handle window events
myWindow.handle_events(event);
//Check for ESCAPE key... oh yeah, touch it
if( event.key.keysym.sym == SDLK_ESCAPE)
{
quit = true;
}
//OS-level quit signal, deal with it!
if( event.type == SDL_QUIT )
{
quit = true; //Quit the program
}
//If the mouse moved
if( event.type == SDL_MOUSEMOTION )
{
//Get the mouse offsets
msx = event.motion.x;
msy = event.motion.y;
}
if( event.type == SDL_MOUSEBUTTONDOWN)
{
//msdown = 1;
}
}
myStars->MoveStars(msx, msy, ticks);
SDL_FillRect( myWindow.screen, & myWindow.screen->clip_rect, SDL_MapRGB( myWindow.screen->format, 0x00, 0x00, 0x00 ) );
SDL_FillRect( myWindow.screen, & myWindow.screen->clip_rect, SDL_MapRGB( myWindow.screen->format, 0x00, 0x00, 0x00 ) );
myStars->DrawStars( myWindow.screen);
//Update the screen
if( SDL_Flip( myWindow.screen ) == -1 )
{
fprintf(stderr, "SDL_Flip() failed: %s\n", SDL_GetError());
return 1;
}
ticks = delta.get_ticks();
delta.start();
} //Farewell, game!
//myWindow.CleanUp();
//Clean up
delete myStars;
SDL_Quit();
fprintf(stdout, "Normal Quit: %s\n", SDL_GetError());
return 0;
}
| /*
Stars2 Application by Jessica Seibert
1/31/09/
A second version of my prior Stars application. This time, the stars
are drawn with a more platform independent SDL_FillRect, and
is encapsulated into a single header file.
_._
. ` .
. `.
.________.
`""``""'
` `
: :
: :
; ;
. .
' .
' .
. .
; :
. .
' ' mh
*/
//The headers
#include <iostream>
#include "SDL/SDL.h"
#include <string>
#include <math.h>
//Local includes
#include "timer.h"
#include "SDLwindow.h"
#include "StarList.h"
#ifndef StarData
#include "StarData.h"
#endif
//Global types and constants
#define WANT_TASTY_MUSHROOM 1 //Always
//The screen sttributes
const int SCREEN_WIDTH = 640;
const int SCREEN_HEIGHT = 480;
const int SCREEN_BPP = 32;
//Initialize whatever you must
bool GameInitialize()
{
//Initialize all SDL subsystems
if( SDL_Init( SDL_INIT_EVERYTHING ) == -1 )
{
return false;
}
//If everything initialized fine
return true;
}
//❤`•.¸¸.•´´¯`••.¸¸.•´´¯`•´❤`•.¸¸.•´´¯`••.¸¸.•´´¯`•´❤`•.¸¸.•´´¯`••.¸¸.•´´¯`•´❤
//function: main
//
//The land of tasty mushrooms. Accepts no arguments.
//❤`•.¸¸.•´´¯`••.¸¸.•´´¯`•´❤`•.¸¸.•´´¯`••.¸¸.•´´¯`•´❤`•.¸¸.•´´¯`••.¸¸.•´´¯`•´❤
int main( int argc, char* args[] )
{
SDL_Event event; //The event structure
Timer delta; //Keeps track of time
bool quit = false; //Quit flag
int msy, msx;
int ticks = 0; //Save the ticks!
cStarList* myStars;
myStars = new cStarList(100, 640, 480);
delta.start();
Window myWindow(SCREEN_WIDTH, SCREEN_HEIGHT, SCREEN_BPP, "Stars!");
//Initialize SDL and stuff
if( GameInitialize() == false)
{
fprintf(stderr, "GameInitialize() failed: %s\n", SDL_GetError());
return 1;
}
msy = msx = 0;
srand(delta.get_ticks());
delta.start();
//While the user hasn't quit
while(WANT_TASTY_MUSHROOM && !quit)
{
//While there are events to handle
while( SDL_PollEvent( &event ) )
{
//Handle window events
myWindow.handle_events(event);
//Check for ESCAPE key... oh yeah, touch it
if( event.key.keysym.sym == SDLK_ESCAPE)
{
quit = true;
}
//OS-level quit signal, deal with it!
if( event.type == SDL_QUIT )
{
quit = true; //Quit the program
}
//If the mouse moved
if( event.type == SDL_MOUSEMOTION )
{
//Get the mouse offsets
msx = event.motion.x;
msy = event.motion.y;
}
}
myStars->MoveStars(msx, msy, ticks);
SDL_FillRect( myWindow.screen, & myWindow.screen->clip_rect, SDL_MapRGB( myWindow.screen->format, 0x00, 0x00, 0x00 ) );
SDL_FillRect( myWindow.screen, & myWindow.screen->clip_rect, SDL_MapRGB( myWindow.screen->format, 0x00, 0x00, 0x00 ) );
myStars->DrawStars( myWindow.screen);
//Update the screen
if( SDL_Flip( myWindow.screen ) == -1 )
{
fprintf(stderr, "SDL_Flip() failed: %s\n", SDL_GetError());
return 1;
}
ticks = delta.get_ticks();
delta.start();
} //Farewell, game!
//myWindow.CleanUp();
//Clean up
delete myStars;
SDL_Quit();
fprintf(stdout, "Normal Quit: %s\n", SDL_GetError());
return 0;
}
|
jney/jquery.pageless | 17 | example.rails3/app/models/article.rb | class Article < ActiveRecord::Base
end
| class Article < ActiveRecord::Base
attr_accessible :title, :body, :author
end
|
setiQuest/setiCode | 1 | src/sse-pkg/util/sonataInfoDisplay/screen.cpp | /*
* screen.cpp
*
* Project: OpenSonATA
* Version: 1.0
* Author: Jon Richards (current maintainer)
* The OpenSonATA code is the result of many programmers over many
* years.
*
* Manages the curses screen.
*
* Copyright 2010 The SETI Institute
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0 Unless required by
* applicable law or agreed to in writing, software distributed
* under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
*
* Attribution must be: “Licensed through SETI” in all published
* uses of the software including analytics based on the software,
* combined and merged software, papers, articles, books, reports,
* web pages, etc.
*/
/**
* @file screen.cpp
* Manages the curses screen.
*/
#include "screen.h"
#include "utils.h"
bool Screen::m_resizeEventOccurred = false;
int Screen::m_newRows = -1;
int Screen::m_newCols = -1;
/** Define the bottom menu area to have no height. In future
* versions we will add a bottom menu, increase this to "1".
*/
#define BOTTOM_MENU_HEIGHT 0
/* Constructor. */
Screen::Screen()
{
Screen::m_resizeEventOccurred = false;
Screen::m_newRows = -1;
Screen::m_newCols = -1;
m_screenMode = screen_mode_components;
m_page = 1;
m_isNewMode = false;
m_rows = m_newRows;
m_cols = m_newCols;
(void) signal(SIGINT, Screen::finish); /* arrange interrupts to terminate */
(void) signal(SIGWINCH, Screen::screenResize); /* Catch the resize signal */
m_rows = m_newRows;
m_cols = m_newCols;
}
/* Destructor. */
Screen::~Screen()
{
}
/*
* Initialize the curses screen.
*/
void Screen::init()
{
initscr(); /* initialize the curses library */
keypad(stdscr, TRUE); /* enable keyboard mapping */
(void) nonl(); /* tell curses not to do NL->CR/NL on output */
(void) cbreak(); /* take input chars one at a time, no wait for \n */
(void) noecho();
timeout(1);
if (has_colors())
{
start_color();
/*
* Simple color assignment, often all we need. Color pair 0 cannot
* be redefined. This example uses the same value for the color
* pair as for the foreground color, though of course that is not
* necessary:
*/
init_pair(1, COLOR_RED, COLOR_BLACK);
init_pair(2, COLOR_GREEN, COLOR_BLACK);
init_pair(3, COLOR_YELLOW, COLOR_BLACK);
init_pair(4, COLOR_BLUE, COLOR_BLACK);
init_pair(5, COLOR_CYAN, COLOR_BLACK);
init_pair(6, COLOR_MAGENTA, COLOR_BLACK);
init_pair(7, COLOR_WHITE, COLOR_BLACK);
init_pair(8, COLOR_RED, COLOR_RED);
init_pair(9, COLOR_CYAN, COLOR_CYAN);
init_pair(10, COLOR_GREEN, COLOR_GREEN);
init_pair(11, COLOR_BLACK, COLOR_BLACK);
init_pair(12, COLOR_BLACK, COLOR_YELLOW);
}
drawBottomMenu();
}
/*
* Handles the screen rezise, which comes in as the result
* of a SIGWINCH signal.
* Executes the program "resize" to read in the terminal size.
*
* @param sig the signal.
*/
void Screen::screenResize(int sig)
{
Screen::m_resizeEventOccurred = true;
FILE *fp = popen("resize", "r");
if(fp)
{
char line[64];
memset(line, 0, 64);
while(fgets(line, sizeof(line)-1, fp))
{
if(strstr(line, "COLUMNS"))
{
Screen::m_newCols = atol(line + 16);
}
else if(strstr(line, "LINES"))
{
Screen::m_newRows = atol(line + 14);
}
memset(line, 0, 64);
}
fclose(fp);
}
}
/*
* Catch the control-c to quit screen mode then exit.
*
* @param sig the signal.
*/
void Screen::finish(int sig)
{
endwin();
exit(0);
}
/*
* Draws the bottom of the screen. The Function keys.
*/
void Screen::drawBottomMenu()
{
/** @todo Multiple report screens need to be implemented. Until this is working
* the bottom menu should not be displayed.
*/
/*
int centerPos = 0;
int width = (m_cols/(screen_mode_last - screen_mode_components +1));
for(int i = screen_mode_components; i<screen_mode_last; i++)
{
centerPos = (i+1)*width;
if(m_screenMode == i) attrset(COLOR_PAIR(6)); //magenta
else attrset(COLOR_PAIR(5)); //cyan
if(i == screen_mode_components)
{
move(m_rows-1, centerPos - (int)strlen("1-Status")/2);
addstr("F1 - Status");
}
if(i == screen_mode_signals)
{
move(m_rows-1, centerPos - (int)strlen("2-Signals")/2);
addstr("F2 - Signals");
}
if(i == screen_mode_errors)
{
move(m_rows-1, centerPos - (int)strlen("2-Errors")/2);
addstr("F3 - Errors");
}
if(i == screen_mode_summary)
{
move(m_rows-1, centerPos - (int)strlen("4-Summary")/2);
addstr("F4 - Summary");
}
attrset(COLOR_PAIR(0));
}
*/
return;
}
/*
* paint the screen.
*
* @param details the instance of the object containing the
* information to display on the screen.
*/
void Screen::paint(Details *details)
{
bool shouldRefresh = false;
string line;
if(Screen::m_resizeEventOccurred == true)
{
Screen::m_resizeEventOccurred = false;
m_rows = m_newRows;
m_cols = m_newCols;
shouldRefresh = true;
endwin();
init();
clear();
drawBottomMenu();
}
if(m_screenMode == details->getType())
{
if(m_page > details->getNumPages(m_rows-BOTTOM_MENU_HEIGHT))
m_page = details->getNumPages(m_rows-BOTTOM_MENU_HEIGHT);
if(m_isNewMode == true)
{
m_page = details->getCurrentPageNumber();
m_isNewMode = false;
clear();
shouldRefresh = true;
}
if(details->paint(m_page, m_cols, m_rows-BOTTOM_MENU_HEIGHT))
{
shouldRefresh = true;
}
}
//Print the page
line = "Page " + Utils::itos(m_page) + " of " +
Utils::itos(details->getNumPages(m_rows-BOTTOM_MENU_HEIGHT));
move(0, m_cols-line.size());
addstr(line.c_str());
if(details->getNumPages(m_rows-BOTTOM_MENU_HEIGHT) == m_page)
line = "8=PgDn";
else if(m_page == 1)
line = "9=PgUp";
else
line = "8=PgDn, 9=PgUp";
move(1, m_cols-line.size());
line += " ";
addstr(line.c_str() );
//Park cursor
move(m_rows-1, m_cols-1);
if(shouldRefresh == true)
{
refresh();
}
}
/*
* Process key presses.
*
* @param details the Details object to redraw of a key affects
* the screen.
*/
void Screen::processKey(Details *details)
{
char key[5];
int ch = getch();
//Print the keypress in the lower left corner
//For debugging keypresses
/*
if(ch != -1)
{
sprintf(key, "%d", ch);
move(m_rows-BOTTOM_MENU_HEIGHT, m_cols-4 );
addstr(" ");
move(m_rows-BOTTOM_MENU_HEIGHT, m_cols-4 );
addstr(key);
}
*/
/** @todo Add multiple screens that change based on key presses. */
/*
if(ch == 49)
{
m_screenMode = screen_mode_components;
m_isNewMode = true;
drawBottomMenu();
refresh();
}
if(ch == 50)
{
m_screenMode = screen_mode_signals;
m_isNewMode = true;
drawBottomMenu();
refresh();
}
if(ch == 51)
{
m_screenMode = screen_mode_errors;
m_isNewMode = true;
drawBottomMenu();
refresh();
}
if(ch == 52)
{
m_screenMode = screen_mode_summary;
m_isNewMode = true;
drawBottomMenu();
refresh();
}
*/
//Decrement the page if the '8' key is pressed.
if(ch == 56)
{
m_page--;
if(m_page <1 ) m_page = 1;
drawBottomMenu();
paint(details);
refresh();
}
//Increment the page if the '9' key is pressed.
if(ch == 57)
{
m_page++;
drawBottomMenu();
paint(details);
refresh();
}
}
| /*
* screen.cpp
*
* Project: OpenSonATA
* Version: 1.0
* Author: Jon Richards (current maintainer)
* The OpenSonATA code is the result of many programmers over many
* years.
*
* Manages the curses screen.
*
* Copyright 2010 The SETI Institute
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0 Unless required by
* applicable law or agreed to in writing, software distributed
* under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions
* and limitations under the License.
*
* Attribution must be: “Licensed through SETI” in all published
* uses of the software including analytics based on the software,
* combined and merged software, papers, articles, books, reports,
* web pages, etc.
*/
/**
* @file screen.cpp
* Manages the curses screen.
*/
#include "screen.h"
#include "utils.h"
bool Screen::m_resizeEventOccurred = false;
int Screen::m_newRows = -1;
int Screen::m_newCols = -1;
/** Define the bottom menu area to have no height. In future
* versions we will add a bottom menu, increase this to "1".
*/
#define BOTTOM_MENU_HEIGHT 0
/* Constructor. */
Screen::Screen()
{
Screen::m_resizeEventOccurred = false;
Screen::m_newRows = -1;
Screen::m_newCols = -1;
m_screenMode = screen_mode_components;
m_page = 1;
m_isNewMode = false;
m_rows = m_newRows;
m_cols = m_newCols;
(void) signal(SIGINT, Screen::finish); /* arrange interrupts to terminate */
(void) signal(SIGWINCH, Screen::screenResize); /* Catch the resize signal */
m_rows = m_newRows;
m_cols = m_newCols;
}
/* Destructor. */
Screen::~Screen()
{
}
/*
* Initialize the curses screen.
*/
void Screen::init()
{
initscr(); /* initialize the curses library */
keypad(stdscr, TRUE); /* enable keyboard mapping */
(void) nonl(); /* tell curses not to do NL->CR/NL on output */
(void) cbreak(); /* take input chars one at a time, no wait for \n */
(void) noecho();
timeout(1);
if (has_colors())
{
start_color();
/*
* Simple color assignment, often all we need. Color pair 0 cannot
* be redefined. This example uses the same value for the color
* pair as for the foreground color, though of course that is not
* necessary:
*/
init_pair(1, COLOR_RED, COLOR_BLACK);
init_pair(2, COLOR_GREEN, COLOR_BLACK);
init_pair(3, COLOR_YELLOW, COLOR_BLACK);
init_pair(4, COLOR_BLUE, COLOR_BLACK);
init_pair(5, COLOR_CYAN, COLOR_BLACK);
init_pair(6, COLOR_MAGENTA, COLOR_BLACK);
init_pair(7, COLOR_WHITE, COLOR_BLACK);
init_pair(8, COLOR_RED, COLOR_RED);
init_pair(9, COLOR_CYAN, COLOR_CYAN);
init_pair(10, COLOR_GREEN, COLOR_GREEN);
init_pair(11, COLOR_BLACK, COLOR_BLACK);
init_pair(12, COLOR_BLACK, COLOR_YELLOW);
}
drawBottomMenu();
}
/*
* Handles the screen rezise, which comes in as the result
* of a SIGWINCH signal.
* Uses ioctl(TIOCGWINSZ) to obtain the terminal size.
*
* @param sig the signal.
*/
void Screen::screenResize(int sig)
{
struct winsize size;
Screen::m_resizeEventOccurred = true;
if (ioctl(fileno(stdout), TIOCGWINSZ, &size) == 0)
{
Screen::m_newCols = size.ws_col;
Screen::m_newRows = size.ws_row;
}
}
/*
* Catch the control-c to quit screen mode then exit.
*
* @param sig the signal.
*/
void Screen::finish(int sig)
{
endwin();
exit(0);
}
/*
* Draws the bottom of the screen. The Function keys.
*/
void Screen::drawBottomMenu()
{
/** @todo Multiple report screens need to be implemented. Until this is working
* the bottom menu should not be displayed.
*/
/*
int centerPos = 0;
int width = (m_cols/(screen_mode_last - screen_mode_components +1));
for(int i = screen_mode_components; i<screen_mode_last; i++)
{
centerPos = (i+1)*width;
if(m_screenMode == i) attrset(COLOR_PAIR(6)); //magenta
else attrset(COLOR_PAIR(5)); //cyan
if(i == screen_mode_components)
{
move(m_rows-1, centerPos - (int)strlen("1-Status")/2);
addstr("F1 - Status");
}
if(i == screen_mode_signals)
{
move(m_rows-1, centerPos - (int)strlen("2-Signals")/2);
addstr("F2 - Signals");
}
if(i == screen_mode_errors)
{
move(m_rows-1, centerPos - (int)strlen("2-Errors")/2);
addstr("F3 - Errors");
}
if(i == screen_mode_summary)
{
move(m_rows-1, centerPos - (int)strlen("4-Summary")/2);
addstr("F4 - Summary");
}
attrset(COLOR_PAIR(0));
}
*/
return;
}
/*
* paint the screen.
*
* @param details the instance of the object containing the
* information to display on the screen.
*/
void Screen::paint(Details *details)
{
bool shouldRefresh = false;
string line;
if(Screen::m_resizeEventOccurred == true)
{
Screen::m_resizeEventOccurred = false;
m_rows = m_newRows;
m_cols = m_newCols;
shouldRefresh = true;
endwin();
init();
clear();
drawBottomMenu();
}
if(m_screenMode == details->getType())
{
if(m_page > details->getNumPages(m_rows-BOTTOM_MENU_HEIGHT))
m_page = details->getNumPages(m_rows-BOTTOM_MENU_HEIGHT);
if(m_isNewMode == true)
{
m_page = details->getCurrentPageNumber();
m_isNewMode = false;
clear();
shouldRefresh = true;
}
if(details->paint(m_page, m_cols, m_rows-BOTTOM_MENU_HEIGHT))
{
shouldRefresh = true;
}
}
//Print the page
line = "Page " + Utils::itos(m_page) + " of " +
Utils::itos(details->getNumPages(m_rows-BOTTOM_MENU_HEIGHT));
move(0, m_cols-line.size());
addstr(line.c_str());
if(details->getNumPages(m_rows-BOTTOM_MENU_HEIGHT) == m_page)
line = "8=PgDn";
else if(m_page == 1)
line = "9=PgUp";
else
line = "8=PgDn, 9=PgUp";
move(1, m_cols-line.size());
line += " ";
addstr(line.c_str() );
//Park cursor
move(m_rows-1, m_cols-1);
if(shouldRefresh == true)
{
refresh();
}
}
/*
* Process key presses.
*
* @param details the Details object to redraw of a key affects
* the screen.
*/
void Screen::processKey(Details *details)
{
char key[5];
int ch = getch();
//Print the keypress in the lower left corner
//For debugging keypresses
/*
if(ch != -1)
{
sprintf(key, "%d", ch);
move(m_rows-BOTTOM_MENU_HEIGHT, m_cols-4 );
addstr(" ");
move(m_rows-BOTTOM_MENU_HEIGHT, m_cols-4 );
addstr(key);
}
*/
/** @todo Add multiple screens that change based on key presses. */
/*
if(ch == 49)
{
m_screenMode = screen_mode_components;
m_isNewMode = true;
drawBottomMenu();
refresh();
}
if(ch == 50)
{
m_screenMode = screen_mode_signals;
m_isNewMode = true;
drawBottomMenu();
refresh();
}
if(ch == 51)
{
m_screenMode = screen_mode_errors;
m_isNewMode = true;
drawBottomMenu();
refresh();
}
if(ch == 52)
{
m_screenMode = screen_mode_summary;
m_isNewMode = true;
drawBottomMenu();
refresh();
}
*/
//Decrement the page if the '8' key is pressed.
if(ch == 56)
{
m_page--;
if(m_page <1 ) m_page = 1;
drawBottomMenu();
paint(details);
refresh();
}
//Increment the page if the '9' key is pressed.
if(ch == 57)
{
m_page++;
drawBottomMenu();
paint(details);
refresh();
}
}
|
cehoffman/sinatra-respond_to | 23 | lib/sinatra/respond_to/version.rb | module Sinatra
module RespondTo
Version = '0.9.0'
end
end
| module Sinatra
module RespondTo
Version = '0.9.1'
end
end
|
josegonzalez/cakephp-wysiwyg | 8 | View/Helper/WysiwygHelper.php | <?php
/**
* Wysiwyg is a helper for outputting .
* This helper REQUIRES the installation files for the wysiwyg helpers you will use
*
* @package cake
* @subpackage cake.app.plugins.wysiwyg.views.helpers
* @author: Jose Diaz-Gonzalez
* @version: 0.1
* @email: support@savant.be
* @site: http://josediazgonzalez.com
*/
App::uses('WysiwygAppHelper', 'Wysiwyg.View/Helper');
class WysiwygHelper extends WysiwygAppHelper {
/**
* Helper dependencies
*
* @public array
*/
public $helpers = array();
/**
* Default Helper to use
*
* @public string
**/
public $helper = '';
/**
* Array of whether a certain helper has been imported yet
*
*/
public $importedHelpers = array(
'Form' => false,
'Fck' => false,
'Jwysiwyg' => false,
'Nicedit' => false,
'Markitup' => false,
'Tinymce' => false
);
/**
* Array of defaults configuration for editors, specified when
* importing Wysiwyg in your controller. For example:
*
* public $helpers = array(
* 'editor' => 'Tinymce',
* 'editorDefaults' => array(
* 'theme_advanced_toolbar_align' => 'right',
* )
* );
*/
protected $_editorDefaults = array();
/**
* Sets the $this->helper to the helper configured in the session
*
* @return void
* @author Jose Diaz-Gonzalez
**/
public function __construct(View $View, $options) {
$this->_View = $View;
$this->request = $View->request;
$options = array_merge(array('editor' => 'tinymce'), $options);
if (isset($options['editorDefaults'])) {
$this->_editorDefaults = $options['editorDefaults'];
}
$this->changeEditor($options['editor']);
}
/**
* Changes the editor on the fly
*
* @param string $editor String name of editor, excluding the word 'Helper'
* @return void
* @author Jose Diaz-Gonzalez
**/
public function changeEditor($editor) {
$this->helper = ucfirst($editor);
$prefix = '';
if ($editor !== 'Form') {
$prefix = 'Wysiwyg.';
}
if (!$this->importedHelpers[$this->helper]) {
$this->importedHelpers[$this->helper] = true;
$this->helpers[] = $prefix . $this->helper;
$this->_helperMap = ObjectCollection::normalizeObjectArray($this->helpers);
}
}
/**
* Returns the appropriate input field element
*
* @param string $field - used to build input name for views,
* @param array $options Array of HTML attributes.
* @param array $editorOptions Array of editor attributes for this input field
* @return string
* @author Jose Diaz-Gonzalez
*/
public function input($field, $options = array(), $editorOptions = array()) {
$editorHelper = $this->helper;
$editorOptions = Set::merge($this->_editorDefaults, $editorOptions);
return $this->$editorHelper->input($field, $options, $editorOptions);
}
/**
* Returns the appropriate textarea element
*
* @param string $field - used to build input name for views,
* @param array $options Array of HTML attributes.
* @param array $editorOptions Array of editor attributes for this textarea
* @return string
* @author Jose Diaz-Gonzalez
*/
public function textarea($field, $options = array(), $editorOptions = array()) {
$editorHelper = $this->helper;
$editorOptions = Set::merge($this->_editorDefaults, $editorOptions);
return $this->$editorHelper->textarea($field, $options, $editorOptions);
}
} | <?php
/**
* Wysiwyg is a helper for outputting .
* This helper REQUIRES the installation files for the wysiwyg helpers you will use
*
* @package cake
* @subpackage cake.app.plugins.wysiwyg.views.helpers
* @author: Jose Diaz-Gonzalez
* @version: 0.1
* @email: support@savant.be
* @site: http://josediazgonzalez.com
*/
App::uses('WysiwygAppHelper', 'Wysiwyg.View/Helper');
class WysiwygHelper extends WysiwygAppHelper {
/**
* Helper dependencies
*
* @public array
*/
public $helpers = array();
/**
* Default Helper to use
*
* @public string
**/
public $helper = '';
/**
* Array of whether a certain helper has been imported yet
*
*/
public $importedHelpers = array(
'Form' => false,
'Fck' => false,
'Jwysiwyg' => false,
'Nicedit' => false,
'Markitup' => false,
'Tinymce' => false
);
/**
* Array of defaults configuration for editors, specified when
* importing Wysiwyg in your controller. For example:
*
* public $helpers = array(
* 'editor' => 'Tinymce',
* 'editorDefaults' => array(
* 'theme_advanced_toolbar_align' => 'right',
* )
* );
*/
protected $_editorDefaults = array();
/**
* Sets the $this->helper to the helper configured in the session
*
* @return void
* @author Jose Diaz-Gonzalez
**/
public function __construct(View $View, $options) {
$this->_View = $View;
$this->request = $View->request;
$options = array_merge(array('editor' => 'tinymce'), $options);
if (isset($options['editorDefaults'])) {
$this->_editorDefaults = $options['editorDefaults'];
}
$this->changeEditor($options['editor']);
}
/**
* Changes the editor on the fly
*
* @param string $editor String name of editor, excluding the word 'Helper'
* @return void
* @author Jose Diaz-Gonzalez
**/
public function changeEditor($editor) {
$this->helper = ucfirst($editor);
$prefix = '';
if ($editor !== 'Form') {
$prefix = 'Wysiwyg.';
}
if (!$this->importedHelpers[$this->helper]) {
$this->importedHelpers[$this->helper] = true;
$this->helpers[] = $prefix . $this->helper;
$this->_helperMap = ObjectCollection::normalizeObjectArray($this->helpers);
}
}
/**
* Returns the appropriate input field element
*
* @param string $field - used to build input name for views,
* @param array $options Array of HTML attributes.
* @param array $editorOptions Array of editor attributes for this input field
* @return string
* @author Jose Diaz-Gonzalez
*/
public function input($field = null, $options = array(), $editorOptions = array()) {
$editorHelper = $this->helper;
$editorOptions = Set::merge($this->_editorDefaults, $editorOptions);
return $this->$editorHelper->input($field, $options, $editorOptions);
}
/**
* Returns the appropriate textarea element
*
* @param string $field - used to build input name for views,
* @param array $options Array of HTML attributes.
* @param array $editorOptions Array of editor attributes for this textarea
* @return string
* @author Jose Diaz-Gonzalez
*/
public function textarea($field = null, $options = array(), $editorOptions = array()) {
$editorHelper = $this->helper;
$editorOptions = Set::merge($this->_editorDefaults, $editorOptions);
return $this->$editorHelper->textarea($field, $options, $editorOptions);
}
}
|
atmos/rack_hoptoad | 3 | lib/rack/hoptoad.rb | require 'rack'
require 'erb'
require 'toadhopper'
module Rack
# Catches all exceptions raised from the app it wraps and
# posts the results to hoptoad.
class Hoptoad
VERSION = '0.1.6'
class Error < StandardError; end
attr_accessor :api_key, :environment_filters, :report_under, :rack_environment, :notifier_class, :failsafe
def initialize(app, api_key = nil, rack_environment = 'RACK_ENV')
@app = app
@api_key = api_key
@report_under = %w(staging production)
@rack_environment = rack_environment
@environment_filters = %w(AWS_ACCESS_KEY AWS_SECRET_ACCESS_KEY AWS_ACCOUNT SSH_AUTH_SOCK)
@notifier_class = Toadhopper
@failsafe = $stderr
yield self if block_given?
end
def call(env)
status, headers, body =
begin
@app.call(env)
rescue StandardError, LoadError, SyntaxError => boom
notified = send_notification boom, env
env['hoptoad.notified'] = notified
raise
end
send_notification env['rack.exception'], env if env['rack.exception']
[status, headers, body]
end
def environment_filter_keys
@environment_filters.flatten
end
def environment_filter_regexps
environment_filter_keys.map do |key|
"^#{Regexp.escape(wrapped_key_for(key))}$"
end
end
private
def report?
report_under.include?(rack_env)
end
def send_notification(exception, env)
return true unless report?
request = Rack::Request.new(env)
options = {
:api_key => api_key,
:url => "#{request.scheme}://#{request.host}#{request.path}",
:params => request.params,
:framework_env => rack_env,
:notifier_name => 'Rack::Hoptoad',
:notifier_version => VERSION,
:environment => environment_data_for(env),
:session => env['rack.session']
}
if result = toadhopper.post!(exception, options, {'X-Hoptoad-Client-Name' => 'Rack::Hoptoad'})
if result.errors.empty?
true
else
raise Error, "Status: #{result.status} #{result.errors.inspect}"
end
else
raise Error, "No response from Toadhopper"
end
rescue Exception => e
return unless @failsafe
@failsafe.puts "Fail safe error caught: #{e.class}: #{e.message}"
@failsafe.puts e.backtrace
@failsafe.puts "Exception is #{exception.class}: #{exception.message}"
@failsafe.puts exception.backtrace
false
end
def rack_env
ENV[rack_environment] || 'development'
end
def toadhopper
toad = @notifier_class.new(api_key)
toad.filters = environment_filter_regexps
toad
end
def environment_data_for(env)
data = {}
ENV.each do |key,value|
data[wrapped_key_for(key)] = value.inspect
end
env.each do |key,value|
data["rack[#{key.inspect}]"] = value.inspect
end
data
end
def wrapped_key_for(key)
"ENV[#{key.inspect}]"
end
def extract_body(env)
if io = env['rack.input']
io.rewind if io.respond_to?(:rewind)
io.read
end
end
end
end
| require 'rack/hoptoad_version'
require 'rack'
require 'erb'
require 'toadhopper'
module Rack
# Catches all exceptions raised from the app it wraps and
# posts the results to hoptoad.
class Hoptoad
class Error < StandardError; end
attr_accessor :api_key, :environment_filters, :report_under, :rack_environment, :notifier_class, :failsafe
def initialize(app, api_key = nil, rack_environment = 'RACK_ENV')
@app = app
@api_key = api_key
@report_under = %w(staging production)
@rack_environment = rack_environment
@environment_filters = %w(AWS_ACCESS_KEY AWS_SECRET_ACCESS_KEY AWS_ACCOUNT SSH_AUTH_SOCK)
@notifier_class = Toadhopper
@failsafe = $stderr
yield self if block_given?
end
def call(env)
status, headers, body =
begin
@app.call(env)
rescue StandardError, LoadError, SyntaxError => boom
notified = send_notification boom, env
env['hoptoad.notified'] = notified
raise
end
send_notification env['rack.exception'], env if env['rack.exception']
[status, headers, body]
end
def environment_filter_keys
@environment_filters.flatten
end
def environment_filter_regexps
environment_filter_keys.map do |key|
"^#{Regexp.escape(wrapped_key_for(key))}$"
end
end
private
def report?
report_under.include?(rack_env)
end
def send_notification(exception, env)
return true unless report?
request = Rack::Request.new(env)
options = {
:api_key => api_key,
:url => "#{request.scheme}://#{request.host}#{request.path}",
:params => request.params,
:framework_env => rack_env,
:notifier_name => 'Rack::Hoptoad',
:notifier_version => VERSION,
:environment => environment_data_for(env),
:session => env['rack.session']
}
if result = toadhopper.post!(exception, options, {'X-Hoptoad-Client-Name' => 'Rack::Hoptoad'})
if result.errors.empty?
true
else
raise Error, "Status: #{result.status} #{result.errors.inspect}"
end
else
raise Error, "No response from Toadhopper"
end
rescue Exception => e
return unless @failsafe
@failsafe.puts "Fail safe error caught: #{e.class}: #{e.message}"
@failsafe.puts e.backtrace
@failsafe.puts "Exception is #{exception.class}: #{exception.message}"
@failsafe.puts exception.backtrace
false
end
def rack_env
ENV[rack_environment] || 'development'
end
def toadhopper
toad = @notifier_class.new(api_key)
toad.filters = environment_filter_regexps
toad
end
def environment_data_for(env)
data = {}
ENV.each do |key,value|
data[wrapped_key_for(key)] = value.inspect
end
env.each do |key,value|
data["rack[#{key.inspect}]"] = value.inspect
end
data
end
def wrapped_key_for(key)
"ENV[#{key.inspect}]"
end
def extract_body(env)
if io = env['rack.input']
io.rewind if io.respond_to?(:rewind)
io.read
end
end
end
end
|
ialexi/hedwig | 3 | docs/build/articles/controls/scroll/touch.html | <html><head><title>Docs
</title><meta http-equiv: 'Content-Type' content="text/html; charset=utf-8" />
<style type="text/css">
body {
font-family: "Lucida Sans", "Lucida Grande", Verdana, Arial, sans-serif;
margin: 0px;
margin-bottom:1em;
font-family: sans-serif;
font-size: 10px;
line-height:1.2;
}
.content { font-size: 14px; }
code {
font-family: Monaco, Inconsolata, Courier, fixed-width;
font-size: 12px;
}
pre code {
margin-right: 1em;
border: 1px solid #a0b0a0;
overflow-y: hidden;
overflow-x: auto;
background: #f5f9f5;
display:block;
padding: 1em;
}
a { text-decoration: none; color: rgb(50, 50, 155); }
.header {
background-color: rgb(38, 43, 50);
height: 60px;
padding-top:17px;
padding-bottom:17px;
padding-left:2em;
}
.header a.img {
float: left;
}
.header .here {
float:left;
margin-top:27px;
margin-left:5px;
color: rgb(200, 255, 200);
font-size: 25px;
}
.header span.here {
margin-top:34px;
font-size:15px;
color: white;
}
.header a.item {
float:right;
margin-top: 34px;
margin-right:10px;
color: white;
font-size:15px;
}
.header a.item:hover {
text-decoration: underline;
}
.content {
padding-top: 1em;
padding-left: 2em;
padding-right: 2em;
}
img { margin-left: auto; margin-right: auto; display: block; }
h1, h2, h3 { color: rgb(100, 155, 100); }
code .class { color: rgb(0, 0, 150); }
/*code .variable { color: rgb(10, 70, 10); }*/
code .comment { color: rgb(100, 150, 200); }
code .string { color: rgb(0, 100, 10); }
code .number { color: rgb(0, 0, 255); }
code .keyword, code .this { color: rgb(25, 110, 25); font-weight: bold; }
</style>
</head><body><div class="header"><a href="../../../index.html" class="img"><img src="../../../resources/logo.png" /></a><a href="../../../index.html" class="here">Documentation
</a><a href="../../../reference/index.html" class="item">SproutCore Reference
</a></div><div class="content"><h1>ScrollView Touch Support</h1>
<p>SproutCore's ScrollView comes with built-in support for touch-based
scrolling, including momentum and bouncing. In addition, it has (somewhat experimental)
support for scaling.</p>
<p>For many cases, just putting a view inside a ScrollView will "just work". Still, you may want
to set some settings.</p>
<h2>Bouncing</h2>
<p>By default, ScrollView will <em>always</em> bounce when scrolling vertically, regardless of the
content's height, but only bounce horizontally <em>if</em> the content is wider than the ScrollView.
This is controlled by two properties:</p>
<ul>
<li>alwaysBounceHorizontal, which defaults to NO.</li>
<li>alwaysBounceVertical, which defaults to YES.</li>
</ul>
<h2>Scaling</h2>
<p>ScrollView has support for scaling, which you can use through a few properties:</p>
<ul>
<li>canScale. Specifies whether the content may be scaled. If YES, using two fingers
(in that classic "pinch gesture") will zoom the content.</li>
<li>minimumScale: The minimum scale value. Default: 0.25.</li>
<li>maximumScale: The maximum scale value. Default: 2.0.</li>
</ul>
<p><a href='touch.js' class='demo'>touch.js</a></p>
</div><div class="footer"></div></body></html> | <html><head><title>Docs
</title><meta http-equiv: 'Content-Type' content="text/html; charset=utf-8" />
<style type="text/css">
body {
font-family: "Lucida Sans", "Lucida Grande", Verdana, Arial, sans-serif;
margin: 0px;
margin-bottom:1em;
font-family: sans-serif;
font-size: 10px;
line-height:1.2;
}
.content { font-size: 14px; }
code {
font-family: Monaco, Inconsolata, Courier, fixed-width;
font-size: 12px;
}
pre code {
margin-right: 1em;
border: 1px solid #a0b0a0;
overflow-y: hidden;
overflow-x: auto;
background: #f5f9f5;
display:block;
padding: 1em;
}
a { text-decoration: none; color: rgb(50, 50, 155); }
.header {
background-color: rgb(38, 43, 50);
height: 60px;
padding-top:17px;
padding-bottom:17px;
padding-left:2em;
}
.header a.img {
float: left;
}
.header .here {
float:left;
margin-top:27px;
margin-left:5px;
color: rgb(200, 255, 200);
font-size: 25px;
}
.header span.here {
margin-top:34px;
font-size:15px;
color: white;
}
.header a.item {
float:right;
margin-top: 34px;
margin-right:10px;
color: white;
font-size:15px;
}
.header a.item:hover {
text-decoration: underline;
}
.content {
padding-top: 1em;
padding-left: 2em;
padding-right: 2em;
}
img { margin-left: auto; margin-right: auto; display: block; }
h1, h2, h3 { color: rgb(100, 155, 100); }
code .class { color: rgb(0, 0, 150); }
/*code .variable { color: rgb(10, 70, 10); }*/
code .comment { color: rgb(100, 150, 200); }
code .string { color: rgb(0, 100, 10); }
code .number { color: rgb(0, 0, 255); }
code .keyword, code .this { color: rgb(25, 110, 25); font-weight: bold; }
</style>
</head><body><div class="header"><a href="../../../index.html" class="img"><img src="../../../resources/logo.png" /></a><a href="../../../index.html" class="here">Documentation
</a><a href="../../../reference/index.html" class="item">SproutCore Reference
</a></div><div class="content"><h1>ScrollView Touch Support</h1>
<p>SproutCore's ScrollView comes with built-in support for touch-based
scrolling, including momentum and bouncing. In addition, it has (somewhat experimental)
support for scaling.</p>
<p>For many cases, just putting a view inside a ScrollView will "just work". Still, you may want
to set some settings.</p>
<h2>Bouncing</h2>
<p>By default, ScrollView will <em>always</em> bounce when scrolling vertically, regardless of the
content's height, but only bounce horizontally <em>if</em> the content is wider than the ScrollView.
This is controlled by two properties:</p>
<ul>
<li>alwaysBounceHorizontal, which defaults to NO.</li>
<li>alwaysBounceVertical, which defaults to YES.</li>
</ul>
<h2>Scaling</h2>
<p>ScrollView has support for scaling, which you can use through a few properties:</p>
<ul>
<li>canScale. Specifies whether the content may be scaled. If YES, using two fingers
(in that classic "pinch gesture") will zoom the content.</li>
<li>minimumScale: The minimum scale value. Default: 0.25.</li>
<li>maximumScale: The maximum scale value. Default: 2.0.</li>
</ul>
<p><a href='touch.js' class='demo'>touch.js</a></p>
</div><div class="footer"></div></body></html> |
rodp/jquery.behavior | 1 | jquery.behavior.js | /*
* jquery.behavior JavaScript Library v0.1
* http://rodpetrovic.com/jquery/behavior
*
* Copyright (c) 2009 Rodoljub Petrović
* Licensed under the MIT
* http://www.opensource.org/licenses/mit-license.php
*
* Date: 2009-12-13
*/
(function ($) {
$.fn.behavior = function () {
var element = this;
var attach = function (cls, config) {
$(element).each(function () {
this.behavior = new cls(this, config);
});
return $(element);
}
var get = function (index) {
return $(element).get(index || 0).behavior;
}
var map = function (method, attributes) {
$(element).each(function () {
var obj = this.behavior;
if (method in obj) {
if (typeof obj[method] == 'function') {
obj[method].apply(obj, attributes);
} else {
obj[method] = attributes;
}
}
});
return $(element);
}
if (arguments.length > 0 && typeof arguments[0] == 'function') {
return attach(arguments[0], arguments.length > 1 ? arguments[1] : {});
} else if (arguments.length > 0 && typeof arguments[0] == 'string') {
return map(arguments[0], arguments.length > 1 ? arguments[1] : []);
} else if (arguments.length > 0 && typeof arguments[0] == 'number') {
return get(arguments[0]);
} else {
return get();
}
}
})(jQuery);
| /*
* jquery.behavior JavaScript Library v2.0
* http://rodpetrovic.com/jquery/behavior
*
* Copyright 2010, Rodoljub Petrović
* Licensed under the MIT
* http://www.opensource.org/licenses/mit-license.php
*
* Contributors:
* - Matjaž Lipuš
*
* Date: 2011-05-15
*/
/*jslint white: true, onevar: true, undef: true, nomen: true, regexp: true, plusplus: true, bitwise: true, newcap: true, strict: true, maxerr: 50, indent: 4 */
/*global jQuery */
(function ($, undef) {
"use strict";
function attach($jq, Behavior, config) {
$jq.each(function () {
if (!this.behavior) {
this.behavior = {};
}
$.extend(this.behavior, new Behavior(this, config));
});
}
function each($jq, property, attributes) {
$jq.each(function () {
var behavior = this.behavior;
if (behavior && behavior[property] !== undef) {
if (typeof behavior[property] === "function") {
behavior[property].apply(behavior, attributes || []);
} else {
behavior[property] = attributes;
}
}
});
}
$.fn.behavior = function (a, b) {
var type = typeof a;
if (type === "function") {
attach(this, a, b || {});
return this;
}
if (type === "string") {
each(this, a, b);
return this;
}
return this.get(a || 0).behavior;
};
}(jQuery)); |
ellenoneill/lootjestrekmachine | 4 | functions.php | <?php session_start();
ob_start();
$sql_username = "";
$sql_password = "";
$sql_host = "";
$sql_dbname = "";
$config['mail'] = ""; //Jouw e-mailadres
$config['website'] = ""; //De url naar je website
mysql_connect($sql_host, $sql_username, $sql_password);
mysql_select_db($sql_dbname);
function keygen($i)
{
$alfabet = array_merge(range('a', 'z'), range('A', 'Z'), range('0', '9'), range('0', '9'));
$actkey = "";
while(strlen($actkey) < $i)
{
$actkey .= $alfabet[array_rand($alfabet)];
}
return($actkey);
}
function mysql_is_unique($value, $table, $field)
{
$sql = "SELECT ".$field." FROM ".$table." WHERE ".$field." LIKE '".$value."' LIMIT 1";
$res = mysql_query($sql) or echo_mysql_error($sql);
if(mysql_num_rows($res) > 0)
{
return false;
}
else
{
return true;
}
}
function is_unieke_naam_in_groep($naam, $gid)
{
$sql = "SELECT id FROM mensen WHERE naam LIKE '".$naam."' AND groep_id = ".$gid." LIMIT 1";
$res = mysql_query($sql) or echo_mysql_error($sql);
if(mysql_num_rows($res) > 0)
{
return false;
}
else
{
return true;
}
}
function echo_mysql_error($sql)
{
echo '
<h2>MySQL error</h2>: '.mysql_error().'
<h3>Query:</h3>
<pre>'.$sql.'</pre>';
exit();
}
function form_login($username = null, $gid = null, $code = null)
{
$sql = "SELECT id, naam FROM groepen ORDER BY naam";
$res = mysql_query($sql) or echo_mysql_error($sql);
echo '
<form method="post" action="login.php">
<fieldset>
<legend>Inloggen</legend>
<p><label for="groepsnaam">Groepsnaam</label><br />
<select name="groepsnaam">
<option value="">Maak je keuze...</option>';
while($row = mysql_fetch_assoc($res))
{
if($gid == $row['id'])
{
echo '
<option value="'.$row['id'].'" selected="selected">'.$row['naam'].'</option>';
}
else
{
echo '
<option value="'.$row['id'].'">'.$row['naam'].'</option>';
}
}
echo '
</select><br />
<a href="register.php">Maak een nieuwe groep aan</a></p>
<p><label for="gebruikersnaam">Naam</label><br />
<input type="text" name="gebruikersnaam" value="'.$username.'" /></p>
<p><label for="inlogcode">Inlogcode</label><br />
<input type="password" name="inlogcode" value="'.$code.'" /></p>
<p><input type="submit" value="Log in" /></p>
</fieldset>
</form>';
}
function begin_pagina()
{
echo '
<!DOCTYPE html>
<meta charset="utf-8">
<title>Lootjes trekken</title>
<link rel="stylesheet" type="text/css" href="/static/css/universal.css" media="all">
<link rel="stylesheet" type="text/css" href="/static/css/default.css" media="all">
<link rel="stylesheet" type="text/css" href="/static/css/custom.css" media="all">
<h1 class="page-heading"><a href="login.php">De Lootjestrekmachine</a></h1>
';
}
function einde_pagina()
{
echo '
<p class="legal">© 2006-'.date("Y").', powered by de <a href="http://github.com/eliun/lootjestrekmachine">Lootjestrekmachine</a>, <a href="http://phphulp.nl/php/scripts/3/806/">scripting</a> door Herjan Treurniet, revisie door <a href="http://ellenoneill.nl">Ellen O\'Neill</a>, <a href="http://code.google.com/p/universal-ie6-css/">Universal CSS</a> van Andy Clarke. Aan deze website kunnen geen rechten ontleend worden. De makers van deze website zijn niet aansprakelijk voor de gevolgen van het gebruik van deze website. De diensten aangeboden op deze site zijn volkomen gratis, verwacht dan ook niet al te veel service.</p>
';
ob_end_flush();
}
function _make_url_clickable_cb($matches) {
$ret = '';
$url = $matches[2];
if ( empty($url) )
return $matches[0];
// removed trailing [.,;:] from URL
if ( in_array(substr($url, -1), array('.', ',', ';', ':')) === true ) {
$ret = substr($url, -1);
$url = substr($url, 0, strlen($url)-1);
}
return $matches[1] . "<a href=\"$url\" rel=\"nofollow\">$url</a>" . $ret;
}
function _make_web_ftp_clickable_cb($matches) {
$ret = '';
$dest = $matches[2];
$dest = 'http://' . $dest;
if ( empty($dest) )
return $matches[0];
// removed trailing [,;:] from URL
if ( in_array(substr($dest, -1), array('.', ',', ';', ':')) === true ) {
$ret = substr($dest, -1);
$dest = substr($dest, 0, strlen($dest)-1);
}
return $matches[1] . "<a href=\"$dest\" rel=\"nofollow\">$dest</a>" . $ret;
}
function _make_email_clickable_cb($matches) {
$email = $matches[2] . '@' . $matches[3];
return $matches[1] . "<a href=\"mailto:$email\">$email</a>";
}
function make_clickable($ret) {
$ret = ' ' . $ret;
// in testing, using arrays here was found to be faster
$ret = preg_replace_callback('#([\s>])([\w]+?://[\w\\x80-\\xff\#$%&~/.\-;:=,?@\[\]+]*)#is', '_make_url_clickable_cb', $ret);
$ret = preg_replace_callback('#([\s>])((www|ftp)\.[\w\\x80-\\xff\#$%&~/.\-;:=,?@\[\]+]*)#is', '_make_web_ftp_clickable_cb', $ret);
$ret = preg_replace_callback('#([\s>])([.0-9a-z_+-]+)@(([0-9a-z-]+\.)+[0-9a-z]{2,})#i', '_make_email_clickable_cb', $ret);
// this one is not in an array because we need it to run last, for cleanup of accidental links within links
$ret = preg_replace("#(<a( [^>]+?>|>))<a [^>]+?>([^>]+?)</a></a>#i", "$1$3</a>", $ret);
$ret = trim($ret);
return $ret;
}
?> | <?php session_start();
ob_start();
error_reporting( E_CORE_ERROR | E_CORE_WARNING | E_COMPILE_ERROR | E_ERROR | E_WARNING | E_PARSE | E_USER_ERROR | E_USER_WARNING | E_RECOVERABLE_ERROR );
$sql_username = "";
$sql_password = "";
$sql_host = "";
$sql_dbname = "";
$config['mail'] = ""; //Jouw e-mailadres
$config['website'] = ""; //De url naar je website
mysql_connect($sql_host, $sql_username, $sql_password);
mysql_select_db($sql_dbname);
function keygen($i)
{
$alfabet = array_merge(range('a', 'z'), range('A', 'Z'), range('0', '9'), range('0', '9'));
$actkey = "";
while(strlen($actkey) < $i)
{
$actkey .= $alfabet[array_rand($alfabet)];
}
return($actkey);
}
function mysql_is_unique($value, $table, $field)
{
$sql = "SELECT ".$field." FROM ".$table." WHERE ".$field." LIKE '".$value."' LIMIT 1";
$res = mysql_query($sql) or echo_mysql_error($sql);
if(mysql_num_rows($res) > 0)
{
return false;
}
else
{
return true;
}
}
function is_unieke_naam_in_groep($naam, $gid)
{
$sql = "SELECT id FROM mensen WHERE naam LIKE '".$naam."' AND groep_id = ".$gid." LIMIT 1";
$res = mysql_query($sql) or echo_mysql_error($sql);
if(mysql_num_rows($res) > 0)
{
return false;
}
else
{
return true;
}
}
function echo_mysql_error($sql)
{
echo '
<h2>MySQL error</h2>: '.mysql_error().'
<h3>Query:</h3>
<pre>'.$sql.'</pre>';
exit();
}
function form_login($username = null, $gid = null, $code = null)
{
$sql = "SELECT id, naam FROM groepen ORDER BY naam";
$res = mysql_query($sql) or echo_mysql_error($sql);
echo '
<form method="post" action="login.php">
<fieldset>
<legend>Inloggen</legend>
<p><label for="groepsnaam">Groepsnaam</label><br />
<select name="groepsnaam">
<option value="">Maak je keuze...</option>';
while($row = mysql_fetch_assoc($res))
{
if($gid == $row['id'])
{
echo '
<option value="'.$row['id'].'" selected="selected">'.$row['naam'].'</option>';
}
else
{
echo '
<option value="'.$row['id'].'">'.$row['naam'].'</option>';
}
}
echo '
</select><br />
<a href="register.php">Maak een nieuwe groep aan</a></p>
<p><label for="gebruikersnaam">Naam</label><br />
<input type="text" name="gebruikersnaam" value="'.$username.'" /></p>
<p><label for="inlogcode">Inlogcode</label><br />
<input type="password" name="inlogcode" value="'.$code.'" /></p>
<p><input type="submit" value="Log in" /></p>
</fieldset>
</form>';
}
function begin_pagina()
{
echo '
<!DOCTYPE html>
<meta charset="utf-8">
<title>Lootjes trekken</title>
<link rel="stylesheet" type="text/css" href="/static/css/universal.css" media="all">
<link rel="stylesheet" type="text/css" href="/static/css/default.css" media="all">
<link rel="stylesheet" type="text/css" href="/static/css/custom.css" media="all">
<h1 class="page-heading"><a href="login.php">De Lootjestrekmachine</a></h1>
';
}
function einde_pagina()
{
echo '
<p class="legal">© 2006-'.date("Y").', powered by de <a href="http://github.com/eliun/lootjestrekmachine">Lootjestrekmachine</a>, <a href="http://phphulp.nl/php/scripts/3/806/">scripting</a> door Herjan Treurniet, revisie door <a href="http://ellenoneill.nl">Ellen O\'Neill</a>, <a href="http://code.google.com/p/universal-ie6-css/">Universal CSS</a> van Andy Clarke. Aan deze website kunnen geen rechten ontleend worden. De makers van deze website zijn niet aansprakelijk voor de gevolgen van het gebruik van deze website. De diensten aangeboden op deze site zijn volkomen gratis, verwacht dan ook niet al te veel service.</p>
';
ob_end_flush();
}
function _make_url_clickable_cb($matches) {
$ret = '';
$url = $matches[2];
if ( empty($url) )
return $matches[0];
// removed trailing [.,;:] from URL
if ( in_array(substr($url, -1), array('.', ',', ';', ':')) === true ) {
$ret = substr($url, -1);
$url = substr($url, 0, strlen($url)-1);
}
return $matches[1] . "<a href=\"$url\" rel=\"nofollow\">$url</a>" . $ret;
}
function _make_web_ftp_clickable_cb($matches) {
$ret = '';
$dest = $matches[2];
$dest = 'http://' . $dest;
if ( empty($dest) )
return $matches[0];
// removed trailing [,;:] from URL
if ( in_array(substr($dest, -1), array('.', ',', ';', ':')) === true ) {
$ret = substr($dest, -1);
$dest = substr($dest, 0, strlen($dest)-1);
}
return $matches[1] . "<a href=\"$dest\" rel=\"nofollow\">$dest</a>" . $ret;
}
function _make_email_clickable_cb($matches) {
$email = $matches[2] . '@' . $matches[3];
return $matches[1] . "<a href=\"mailto:$email\">$email</a>";
}
function make_clickable($ret) {
$ret = ' ' . $ret;
// in testing, using arrays here was found to be faster
$ret = preg_replace_callback('#([\s>])([\w]+?://[\w\\x80-\\xff\#$%&~/.\-;:=,?@\[\]+]*)#is', '_make_url_clickable_cb', $ret);
$ret = preg_replace_callback('#([\s>])((www|ftp)\.[\w\\x80-\\xff\#$%&~/.\-;:=,?@\[\]+]*)#is', '_make_web_ftp_clickable_cb', $ret);
$ret = preg_replace_callback('#([\s>])([.0-9a-z_+-]+)@(([0-9a-z-]+\.)+[0-9a-z]{2,})#i', '_make_email_clickable_cb', $ret);
// this one is not in an array because we need it to run last, for cleanup of accidental links within links
$ret = preg_replace("#(<a( [^>]+?>|>))<a [^>]+?>([^>]+?)</a></a>#i", "$1$3</a>", $ret);
$ret = trim($ret);
return $ret;
}
?> |
akm/selectable_attr_rails | 1 | lib/selectable_attr_rails/helpers/check_box_group_helper.rb | require 'selectable_attr_rails/helpers/abstract_selection_helper'
module SelectableAttrRails::Helpers
module CheckBoxGroupHelper
class Builder < SelectableAttrRails::Helpers::AbstractSelectionBuilder
def initialize(object, object_name, method, options, template)
super(object, object_name, method, options, template)
@entry_hash_array ||= enum_hash_array_from_object
@param_name = "#{@base_name}_ids"
@check_box_options = @options.delete(:check_box) || {}
end
def each(&block)
@entry_hash_array.each do |entry_hash|
@entry_hash= entry_hash
@tag_value = @entry_hash[:id].to_s.gsub(/\s/, "_").gsub(/\W/, "")
@check_box_id = "#{@object_name}_#{@param_name}_#{@tag_value}"
yield(self)
end
end
def check_box(options = nil)
options = update_options({
:id => @check_box_id, :type => 'checkbox', :value => @tag_value,
:name => "#{@object_name}[#{@param_name}][]"
}, @check_box_options, options)
options[:checked] = 'checked' if @entry_hash[:select]
@template.content_tag("input", nil, options)
end
def label(text = nil, options = nil)
@template.content_tag("label", text || @entry_hash[:name],
update_options({:for => @check_box_id}, options))
end
end
module Base
def check_box_group(object_name, method, options = nil, &block)
object = (options || {})[:object] || instance_variable_get("@#{object_name}")
builder = Builder.new(object, object_name, method, options, @template)
if block_given?
yield(builder)
return nil
else
result = ''
builder.each do
result << builder.check_box
result << ' '
result << builder.label
result << ' '
end
return result
end
end
end
module FormBuilder
def check_box_group(method, options = nil, &block)
@template.check_box_group(@object_name, method,
(options || {}).merge(:object => @object), &block)
end
end
end
end
| require 'selectable_attr_rails/helpers/abstract_selection_helper'
module SelectableAttrRails::Helpers
module CheckBoxGroupHelper
class Builder < SelectableAttrRails::Helpers::AbstractSelectionBuilder
def initialize(object, object_name, method, options, template)
super(object, object_name, method, options, template)
@entry_hash_array ||= enum_hash_array_from_object
@param_name = "#{@base_name}_ids"
@check_box_options = @options.delete(:check_box) || {}
end
def each(&block)
@entry_hash_array.each do |entry_hash|
@entry_hash= entry_hash
@tag_value = @entry_hash[:id].to_s.gsub(/\s/, "_").gsub(/\W/, "")
@check_box_id = "#{@object_name}_#{@param_name}_#{@tag_value}"
yield(self)
end
end
def check_box(options = nil)
options = update_options({
:id => @check_box_id, :type => 'checkbox', :value => @tag_value,
:name => "#{@object_name}[#{@param_name}][]"
}, @check_box_options, options)
options[:checked] = 'checked' if @entry_hash[:select]
@template.content_tag("input", nil, options)
end
def label(text = nil, options = nil)
@template.content_tag("label", text || @entry_hash[:name],
update_options({:for => @check_box_id}, options))
end
end
module Base
def check_box_group(object_name, method, options = nil, &block)
object = (options || {})[:object] || instance_variable_get("@#{object_name}")
builder = Builder.new(object, object_name, method, options, self)
if block_given?
yield(builder)
return nil
else
result = ''
builder.each do
result << builder.check_box
result << ' '
result << builder.label
result << ' '
end
return result.respond_to?(:html_safe) ? result.html_safe : result
end
end
end
module FormBuilder
def check_box_group(method, options = nil, &block)
@template.check_box_group(@object_name, method,
(options || {}).merge(:object => @object), &block)
end
end
end
end
|
efoxepstein/stupid-machines | 1 | TuringMachineWithNoSemicolons.java | /**
* @author Eli Fox-Epstein
*/
public class TuringMachineWithNoSemicolons {
/* ARGS:
* 0: tape
* 1: state
* 2: current index
* 3: transition table (prefix "ABCDE" means at state A, if read a B, move C, write D, go to state E)
* 4: reject state
* 5: accept state
*
* Example use: a turing machine that recognizes the languages of words of the form: 0^jELI
*
* One way to write a machine for this is with the following rules:
* at A, if 0, move R, write _, goto A
* at A, if E, move R, write _, goto B
* at B, if L, move R, write _, goto C
* at C, if I, move R, write _, goto +
* A test case:
* java TuringMachineWithNoSemicolons 00000ELI A 0 A0R_AAER_BBLR_CCIR_+ - +
*
* This machine will not halt on most inputs.
*/
public static void main(String[] args) {
while(!args[1].equals(args[4]) && !args[1].equals(args[5])){
if((args[1] + args[0].charAt(Integer.parseInt(args[2]))).equals(args[3].substring(0,2))){
// Write
if(( args[0] = args[0].substring(0, Integer.parseInt(args[2])) +
args[3].charAt(3) +
args[0].substring(Integer.parseInt(args[2])+1, args[0].length())
) == ""){}
if(args[3].charAt(2) == 'L'){
if(( args[2] = (Integer.parseInt(args[2])-1) + "" ) != ""){}
}else{
if(( args[2] = (Integer.parseInt(args[2])+1) + "" ) != ""){}
}
// Update current state
if(( args[1] = args[3].charAt(4) + "" ) != ""){}
}else{
// Cycle transition table
if((args[3] = args[3].substring(5, args[3].length()) + args[3].substring(0,5)) != ""){}
}
}
if(args[1].equals(args[4])){
if(System.out.printf("REJECT: " + args[0]) == null){}
}else if(System.out.printf("ACCEPT: " + args[0]) == null){}
}
} | /**
* @author Eli Fox-Epstein
*/
public class TuringMachineWithNoSemicolons {
/* ARGS:
* 0: tape
* 1: state
* 2: current index
* 3: transition table (prefix "ABCDE" means at state A, if read a B, move C, write D, go to state E)
* 4: reject state
* 5: accept state
*
* Example use: a turing machine that recognizes the languages of words of the form: 0^jELI
*
* One way to write a machine for this is with the following rules:
* at A, if 0, move R, write _, goto A
* at A, if E, move R, write _, goto B
* at B, if L, move R, write _, goto C
* at C, if I, move R, write _, goto +
* A test case:
* java TuringMachineWithNoSemicolons 00000ELI A 0 A0R_AAER_BBLR_CCIR_+ - +
*
* This machine will not halt on most inputs.
* TODO: use stream() api with lambda expressions.
*/
public static void main(String[] args) {
while(!args[1].equals(args[4]) && !args[1].equals(args[5])){
if((args[1] + args[0].charAt(Integer.parseInt(args[2]))).equals(args[3].substring(0,2))){
// Write
if(( args[0] = args[0].substring(0, Integer.parseInt(args[2])) +
args[3].charAt(3) +
args[0].substring(Integer.parseInt(args[2])+1, args[0].length())
) == ""){}
if(args[3].charAt(2) == 'L'){
if(( args[2] = (Integer.parseInt(args[2])-1) + "" ) != ""){}
}else{
if(( args[2] = (Integer.parseInt(args[2])+1) + "" ) != ""){}
}
// Update current state
if(( args[1] = args[3].charAt(4) + "" ) != ""){}
}else{
// Cycle transition table
if((args[3] = args[3].substring(5, args[3].length()) + args[3].substring(0,5)) != ""){}
}
}
if(args[1].equals(args[4])){
if(System.out.printf("REJECT: " + args[0]) == null){}
}else if(System.out.printf("ACCEPT: " + args[0]) == null){}
}
}
|
jimhourihan/gto | 1 | plugins/python/src/gto/gtoWriter.cpp | //
// Copyright (c) 2009, Tweak Software
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// * Redistributions of source code must retain the above
// copyright notice, this list of conditions and the following
// disclaimer.
//
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials
// provided with the distribution.
//
// * Neither the name of the Tweak Software nor the names of its
// contributors may be used to endorse or promote products
// derived from this software without specific prior written
// permission.
//
// THIS SOFTWARE IS PROVIDED BY Tweak Software ''AS IS'' AND ANY EXPRESS
// OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL Tweak Software BE LIABLE FOR
// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
// OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
// BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
// USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
// DAMAGE.
//
#include <sstream>
#include "gtoWriter.h"
namespace PyGto {
using namespace std;
// *****************************************************************************
// We start with a few utility functions...
// *****************************************************************************
// *****************************************************************************
// Properly deallocate the instance-specific stuff-holder object
void gtoWriter_PyObject_dealloc( PyObject *self )
{
assert( self != NULL );
gtoWriter_PyObject *gwSelf = (gtoWriter_PyObject *)self;
delete gwSelf->m_writer;
delete gwSelf->m_propertyNames;
PyObject_DEL( self );
}
// *****************************************************************************
// Flatten a tuple or list into a C array of any type using the converter
// supplied. 'start' is used internally for recursive purposes. Returns
// the number of items in the C array.
template<typename T> int flatten( PyObject *object,
T *data,
int maxItems,
const char *expectedTypeStr,
T (*converter)(PyObject *),
bool start = true )
{
static int pos;
if( start )
{
pos = 0;
}
if( pos > maxItems )
{
return pos;
}
// If we come across a class instance, do we know what to do with it?
if( PyInstance_Check( object ) )
{
string classname( PyTypeName( object ) );
// If we know what it is, convert it to something useful
if( classname == "mat3" || classname == "mat4" )
{
// mat3 and mat4 convert easily to a list
PyObject *tmp = PyObject_GetAttrString( object, "mlist" );
Py_INCREF( tmp );
object = tmp;
}
else if( classname == "vec3" || classname == "vec4" )
{
// vec3 and vec4 have no handy .toList() method, so we have
// to do it the 'hard way'...
PyObject *tmp;
classname == "vec3" ? tmp = PyTuple_New(3) : tmp = PyTuple_New(4);
PyObject *x = PyObject_GetAttrString( object, "x" );
Py_INCREF( x );
PyObject *y = PyObject_GetAttrString( object, "y" );
Py_INCREF( y );
PyObject *z = PyObject_GetAttrString( object, "z" );
Py_INCREF( z );
PyTuple_SetItem( tmp, 0, x );
PyTuple_SetItem( tmp, 1, y );
PyTuple_SetItem( tmp, 2, z );
if( classname == "vec4" )
{
PyObject *w = PyObject_GetAttrString( object, "w" );
Py_INCREF( w );
PyTuple_SetItem( tmp, 3, w );
}
object = tmp;
}
else if( classname == "quat" )
{
// quat has no handy .toList() method either...
PyObject *tmp = PyTuple_New(4);
PyObject *w = PyObject_GetAttrString( object, "w" );
Py_INCREF( w );
PyObject *x = PyObject_GetAttrString( object, "x" );
Py_INCREF( x );
PyObject *y = PyObject_GetAttrString( object, "y" );
Py_INCREF( y );
PyObject *z = PyObject_GetAttrString( object, "z" );
Py_INCREF( z );
PyTuple_SetItem( tmp, 0, w );
PyTuple_SetItem( tmp, 1, x );
PyTuple_SetItem( tmp, 2, y );
PyTuple_SetItem( tmp, 3, z );
object = tmp;
}
else
{
// Otherwise, barf on it
PyErr_Format( gtoError(), "Can't handle '%s' class data directly."
" Convert it to a tuple or list first.",
classname.c_str() );
return -1;
}
}
// Put atoms directly into the buffer, and recurse on more complex types
for( int i = 0; i < PySequence_Size( object ); ++i )
{
PyObject *item = PySequence_GetItem( object, i );
if( PyTuple_Check( item )
|| PyList_Check( item )
|| PyInstance_Check( item ) )
{
flatten( item, data, maxItems, expectedTypeStr, converter, false );
}
else
{
// Add the atom to the buffer and move on
data[pos] = converter( item );
if( PyErr_Occurred() )
{
if( ! PyErr_ExceptionMatches( PyExc_TypeError ) )
{
// This is something other than a type error, so
// this will cause a Python traceback later...
return -1;
}
// Data of a type not handled by the converter
PyErr_Format( gtoError(), "Expected data of type '%s', but "
"got '%s'", expectedTypeStr,
PyTypeName( item ) );
return -1;
}
pos++;
if( pos > maxItems )
{
return pos;
}
}
}
return pos;
}
// *****************************************************************************
// The next several functions implement the methods on the Python gto.Writer
// class.
// *****************************************************************************
// *****************************************************************************
// gto.Writer class constructor. Does nothing, but is required anyway
PyObject *gtoWriter_init( PyObject *_self, PyObject *args )
{
Py_INCREF( Py_None );
return Py_None;
}
// *****************************************************************************
// Implements gto.Writer.open( filename )
PyObject *gtoWriter_open( PyObject *_self, PyObject *args )
{
PyInstanceObject *self;
char *filename;
Gto::Writer::FileType filemode = Gto::Writer::CompressedGTO;
if( ! PyArg_ParseTuple( args, "Os|i:gtoWriter_open", &self, &filename,
&filemode ) )
{
// Invalid parameters, let Python do a stack trace
return NULL;
}
// Create a new python writer object and add it to this instance's
// dictionary
gtoWriter_PyObject *writer = PyObject_NEW( gtoWriter_PyObject,
>oWriter_PyObjectType );
writer->m_writer = new Gto::Writer();
writer->m_propCount = 0;
writer->m_beginDataCalled = false;
writer->m_objectDef = false;
writer->m_componentDef = false;
writer->m_propertyNames = new vector<string>;
PyDict_SetItemString( self->in_dict, "__writerEngine", (PyObject *)writer );
// Ask the writer to open the given file
if( ! writer->m_writer->open( filename, filemode ) )
{
PyErr_Format( gtoError(), "Unable to open specified file: %s",
filename );
return NULL;
}
Py_INCREF( Py_None );
return Py_None;
}
// *****************************************************************************
// Implements gto.Writer.close()
PyObject *gtoWriter_close( PyObject *_self, PyObject *args )
{
PyInstanceObject *self;
if( ! PyArg_ParseTuple( args, "O:gtoWriter_close", &self ) )
{
// Invalid parameters, let Python do a stack trace
return NULL;
}
// Get a handle to our Gto::Writer instance
gtoWriter_PyObject *writer =
(gtoWriter_PyObject *)PyDict_GetItemString( self->in_dict,
"__writerEngine" );
if( writer == NULL )
{
PyErr_SetString( gtoError(), "no file is open." );
return NULL;
}
assert( writer->m_writer != NULL );
// Close the file
writer->m_writer->close();
// Remove the writer from the class dictionary
PyDict_DelItemString( self->in_dict, "__writerEngine" );
Py_DECREF( writer );
Py_INCREF( Py_None );
return Py_None;
}
// *****************************************************************************
// Implements gto.Writer.beginObject( name, protocol, protocolVersion )
PyObject *gtoWriter_beginObject( PyObject *_self, PyObject *args )
{
PyInstanceObject *self;
char *name;
char *protocol;
unsigned int protocolVersion;
if( ! PyArg_ParseTuple( args, "Ossi:gtoWriter_beginObject",
&self, &name, &protocol, &protocolVersion ) )
{
// Invalid parameters, let Python do a stack trace
return NULL;
}
// Get a handle to our Gto::Writer instance
gtoWriter_PyObject *writer =
(gtoWriter_PyObject *)PyDict_GetItemString( self->in_dict,
"__writerEngine" );
if( writer == NULL )
{
PyErr_SetString( gtoError(), "no file is open." );
return NULL;
}
assert( writer->m_writer != NULL );
// Check for dumbassness
if( writer->m_objectDef == true )
{
PyErr_SetString( gtoError(), "Can't nest object declarations" );
return NULL;
}
if( writer->m_beginDataCalled == true )
{
PyErr_SetString( gtoError(), "Once beginData is called, no new "
"objects can be declared" );
return NULL;
}
// Make it so
writer->m_writer->beginObject( name, protocol, protocolVersion );
writer->m_objectDef = true;
Py_INCREF( Py_None );
return Py_None;
}
// *****************************************************************************
// Implements gto.Writer.endObject()
PyObject *gtoWriter_endObject( PyObject *_self, PyObject *args )
{
PyInstanceObject *self;
if( ! PyArg_ParseTuple( args, "O:gtoWriter_endObject",
&self ) )
{
// Invalid parameters, let Python do a stack trace
return NULL;
}
// Get a handle to our Gto::Writer instance
gtoWriter_PyObject *writer =
(gtoWriter_PyObject *)PyDict_GetItemString( self->in_dict,
"__writerEngine" );
if( writer == NULL )
{
PyErr_SetString( gtoError(), "no file is open." );
return NULL;
}
assert( writer->m_writer != NULL );
// Check for dumbassness
if( writer->m_objectDef == false )
{
PyErr_SetString( gtoError(), "endObject called before beginObject" );
return NULL;
}
// Make it so
writer->m_writer->endObject();
writer->m_objectDef = false;
Py_INCREF( Py_None );
return Py_None;
}
// *****************************************************************************
// Implements gto.Writer.beginComponent( name, interp, flags )
PyObject *gtoWriter_beginComponent( PyObject *_self, PyObject *args )
{
PyInstanceObject *self;
char *name;
char *interp = "";
int flags = 0;
// Try GTOv2 prototype first...
if( ! PyArg_ParseTuple( args, "Os|i:gtoWriter_beginComponent",
&self, &name, &flags ) )
{
PyErr_Clear();
// If that doesn't work, try the GTOv3 prototype
if( ! PyArg_ParseTuple( args, "Oss|i:gtoWriter_beginComponent",
&self, &name, &interp, &flags ) )
{
// Invalid parameters, let Python do a stack trace
return NULL;
}
}
// Get a handle to our Gto::Writer instance
gtoWriter_PyObject *writer =
(gtoWriter_PyObject *)PyDict_GetItemString( self->in_dict,
"__writerEngine" );
if( writer == NULL )
{
PyErr_SetString( gtoError(), "no file is open." );
return NULL;
}
assert( writer->m_writer != NULL );
// Check for dumbassness
if( writer->m_objectDef == false )
{
PyErr_SetString( gtoError(), "Components can only exist inside object "
"blocks" );
return NULL;
}
if( writer->m_componentDef == true )
{
PyErr_SetString( gtoError(), "Can't nest component declarations" );
return NULL;
}
// Make it so
writer->m_writer->beginComponent( name, interp, flags );
writer->m_componentDef = true;
Py_INCREF( Py_None );
return Py_None;
}
// *****************************************************************************
// Implements gto.Writer.endComponent()
PyObject *gtoWriter_endComponent( PyObject *_self, PyObject *args )
{
PyInstanceObject *self;
if( ! PyArg_ParseTuple( args, "O:gtoWriter_endComponent",
&self ) )
{
// Invalid parameters, let Python do a stack trace
return NULL;
}
// Get a handle to our Gto::Writer instance
gtoWriter_PyObject *writer =
(gtoWriter_PyObject *)PyDict_GetItemString( self->in_dict,
"__writerEngine" );
if( writer == NULL )
{
PyErr_SetString( gtoError(), "no file is open." );
return NULL;
}
assert( writer->m_writer != NULL );
// Check for dumbassness
if( writer->m_componentDef == false )
{
PyErr_SetString( gtoError(), "endComponent called before "
"beginComponent" );
return NULL;
}
// Make it so
writer->m_writer->endComponent();
writer->m_componentDef = false;
Py_INCREF( Py_None );
return Py_None;
}
// *****************************************************************************
// Implements gto.Writer.property( name, type, numElements, width, interp )
PyObject *gtoWriter_property( PyObject *_self, PyObject *args )
{
PyInstanceObject *self;
char *name;
int type;
int numElements;
int width = 1;
char *interp = "";
if( ! PyArg_ParseTuple( args, "Osii|is:gtoWriter_property",
&self, &name, &type, &numElements,
&width, &interp ) )
{
// Invalid parameters, let Python do a stack trace
return NULL;
}
// Get a handle to our Gto::Writer instance
gtoWriter_PyObject *writer =
(gtoWriter_PyObject *)PyDict_GetItemString( self->in_dict,
"__writerEngine" );
if( writer == NULL )
{
PyErr_SetString( gtoError(), "no file is open." );
return NULL;
}
assert( writer->m_writer != NULL );
// Check for dumbassness
if( writer->m_objectDef == false || writer->m_componentDef == false )
{
PyErr_SetString( gtoError(), "Properties can only exist inside "
"object/component blocks" );
return NULL;
}
// Store name for later dumbassness checking in propertyData()
writer->m_propertyNames->push_back( name );
// Make it so
writer->m_writer->property( name,
(Gto::DataType)type,
numElements,
width,
interp );
Py_INCREF( Py_None );
return Py_None;
}
// *****************************************************************************
// implements gto.Writer.intern( string | tuple | list )
PyObject *gtoWriter_intern( PyObject *_self, PyObject *args )
{
PyInstanceObject *self;
PyObject *data;
if( ! PyArg_ParseTuple( args, "OO:gtoWriter_intern",
&self, &data ) )
{
// Invalid parameters, let Python do a stack trace
return NULL;
}
// Get a handle to our Gto::Writer instance
gtoWriter_PyObject *writer =
(gtoWriter_PyObject *)PyDict_GetItemString( self->in_dict,
"__writerEngine" );
if( writer == NULL )
{
PyErr_SetString( gtoError(), "no file is open." );
return NULL;
}
assert( writer->m_writer != NULL );
// Handle a single string
if( PyString_Check( data ) )
{
char *str = PyString_AsString( data );
writer->m_writer->intern( str );
}
// Handle a bunch of strings all at once
else if( PySequence_Check( data ) )
{
for( int i = 0; i < PySequence_Size( data ); ++i )
{
PyObject *pstr = PySequence_GetItem( data, i );
if( ! PyString_Check( pstr ) )
{
PyErr_SetString( gtoError(), "Non-string in sequence" );
return NULL;
}
char *str = PyString_AsString( pstr );
writer->m_writer->intern( str );
}
}
// We can't handle what we were given
else
{
PyErr_SetString( gtoError(), "intern requires a string or a "
"sequence of strings" );
return NULL;
}
Py_INCREF( Py_None );
return Py_None;
}
// *****************************************************************************
// implements gto.Writer.lookup( string )
PyObject *gtoWriter_lookup( PyObject *_self, PyObject *args )
{
PyInstanceObject *self;
char *str;
if( ! PyArg_ParseTuple( args, "Os:gtoWriter_lookup",
&self, &str ) )
{
// Invalid parameters, let Python do a stack trace
return NULL;
}
// Get a handle to our Gto::Writer instance
gtoWriter_PyObject *writer =
(gtoWriter_PyObject *)PyDict_GetItemString( self->in_dict,
"__writerEngine" );
if( writer == NULL )
{
PyErr_SetString( gtoError(), "no file is open." );
return NULL;
}
assert( writer->m_writer != NULL );
// Check for dumbassness
if( writer->m_beginDataCalled == false )
{
PyErr_SetString( gtoError(), "lookup() cannot be used until "
"beginData() is called" );
return NULL;
}
// Make it so
PyObject *strId_PyObj = PyInt_FromLong( writer->m_writer->lookup( str ) );
Py_INCREF( strId_PyObj );
return strId_PyObj;
}
// *****************************************************************************
// implements gto.Writer.beginData()
PyObject *gtoWriter_beginData( PyObject *_self, PyObject *args )
{
PyInstanceObject *self;
if( ! PyArg_ParseTuple( args, "O:gtoWriter_beginData",
&self ) )
{
// Invalid parameters, let Python do a stack trace
return NULL;
}
// Get a handle to our Gto::Writer instance
gtoWriter_PyObject *writer =
(gtoWriter_PyObject *)PyDict_GetItemString( self->in_dict,
"__writerEngine" );
if( writer == NULL )
{
PyErr_SetString( gtoError(), "no file is open." );
return NULL;
}
assert( writer->m_writer != NULL );
// Check for dumbassness
if( writer->m_writer->properties().size() == 0 )
{
PyErr_SetString( gtoError(), "There are no properties to write" );
return NULL;
}
// Make it so
writer->m_writer->beginData();
writer->m_beginDataCalled = true;
Py_INCREF( Py_None );
return Py_None;
}
// *****************************************************************************
// implements gto.Writer.endData()
PyObject *gtoWriter_endData( PyObject *_self, PyObject *args )
{
PyInstanceObject *self;
if( ! PyArg_ParseTuple( args, "O:gtoWriter_endData",
&self ) )
{
// Invalid parameters, let Python do a stack trace
return NULL;
}
// Get a handle to our Gto::Writer instance
gtoWriter_PyObject *writer =
(gtoWriter_PyObject *)PyDict_GetItemString( self->in_dict,
"__writerEngine" );
if( writer == NULL )
{
PyErr_SetString( gtoError(), "no file is open." );
return NULL;
}
assert( writer->m_writer != NULL );
// Check for dumbassness
if( writer->m_beginDataCalled == false )
{
PyErr_SetString( gtoError(), "endData called before beginData" );
return NULL;
}
// Make it so
writer->m_writer->endData();
Py_INCREF( Py_None );
return Py_None;
}
// *****************************************************************************
// implements gto.Writer.propertyData( data )
PyObject *gtoWriter_propertyData( PyObject *_self, PyObject *args )
{
PyInstanceObject *self;
PyObject *rawdata;
if( ! PyArg_ParseTuple( args, "OO:gtoWriter_propertyData",
&self, &rawdata ) )
{
// Invalid parameters, let Python do a stack trace
return NULL;
}
// Get a handle to our Gto::Writer instance
gtoWriter_PyObject *writer =
(gtoWriter_PyObject *)PyDict_GetItemString( self->in_dict,
"__writerEngine" );
if( writer == NULL )
{
PyErr_SetString( gtoError(), "no file is open." );
return NULL;
}
assert( writer->m_writer != NULL );
// Check for dumbassness
if( ! writer->m_beginDataCalled )
{
PyErr_SetString( gtoError(), "propertyData called before beginData" );
return NULL;
}
// If we're handed a single value, tuple-ize it for the code below
if( PyInt_Check( rawdata )
|| PyFloat_Check( rawdata )
|| PyString_Check( rawdata ) )
{
PyObject *tmp = PyTuple_New( 1 );
PyTuple_SetItem( tmp, 0, rawdata );
Py_DECREF( rawdata );
rawdata = tmp;
}
// Get a handle to the property definition for the current property
// and do some sanity checking
Gto::PropertyHeader prop;
prop = writer->m_writer->properties()[writer->m_propCount];
if( writer->m_propCount >= writer->m_writer->properties().size() )
{
PyErr_SetString( gtoError(), "Undeclared data." );
return NULL;
}
const char *currentPropName = (*writer->m_propertyNames)[writer->m_propCount].c_str();
// Determine how many elements we have in the data
int dataSize = prop.size * prop.width;
// Write that data!
if( prop.type == Gto::Int )
{
int *data = new int[dataSize];
int numItems = flatten( rawdata, data, dataSize, "int", PyInt_AsInt );
if( PyErr_Occurred() )
{
return NULL;
}
if( numItems != dataSize )
{
PyErr_Format( gtoError(), "Property '%s' was declared as having %d"
" x %d values, but %d values were given for writing",
currentPropName, prop.size, prop.width,
numItems );
return NULL;
}
writer->m_writer->propertyData( data );
writer->m_propCount++;
delete [] data;
Py_INCREF( Py_None );
return Py_None;
}
if( prop.type == Gto::Float )
{
float *data = new float[dataSize];
int numItems = flatten( rawdata, data, dataSize, "float",
PyFloat_AsFloat );
if( PyErr_Occurred() )
{
return NULL;
}
if( numItems != dataSize )
{
PyErr_Format( gtoError(), "Property '%s' was declared as having %d"
" x %d values, but %d values were given for writing",
currentPropName, prop.size, prop.width,
numItems );
return NULL;
}
writer->m_writer->propertyData( data );
writer->m_propCount++;
delete [] data;
Py_INCREF( Py_None );
return Py_None;
}
if( prop.type == Gto::Double )
{
double *data = new double[dataSize];
int numItems = flatten( rawdata, data, dataSize, "double",
PyFloat_AsDouble );
if( PyErr_Occurred() )
{
return NULL;
}
if( numItems != dataSize )
{
PyErr_Format( gtoError(), "Property '%s' was declared as having %d"
" x %d values, but %d values were given for writing",
currentPropName, prop.size, prop.width,
numItems );
return NULL;
}
writer->m_writer->propertyData( data );
writer->m_propCount++;
delete [] data;
Py_INCREF( Py_None );
return Py_None;
}
if( prop.type == Gto::Short )
{
unsigned short *data = new unsigned short[dataSize];
int numItems = flatten( rawdata, data, dataSize, "short",
PyInt_AsShort );
if( PyErr_Occurred() )
{
return NULL;
}
if( numItems != dataSize )
{
PyErr_Format( gtoError(), "Property '%s' was declared as having %d"
" x %d values, but %d values were given for writing",
currentPropName, prop.size, prop.width,
numItems );
return NULL;
}
writer->m_writer->propertyData( data );
writer->m_propCount++;
delete [] data;
Py_INCREF( Py_None );
return Py_None;
}
if( prop.type == Gto::Byte )
{
unsigned char *data = new unsigned char[dataSize];
int numItems = flatten( rawdata, data, dataSize, "byte",
PyInt_AsByte );
if( PyErr_Occurred() )
{
return NULL;
}
if( numItems != dataSize )
{
PyErr_Format( gtoError(), "Property '%s' was declared as having %d"
" x %d values, but %d values were given for writing",
currentPropName, prop.size, prop.width,
numItems );
return NULL;
}
writer->m_writer->propertyData( data );
writer->m_propCount++;
delete [] data;
Py_INCREF( Py_None );
return Py_None;
}
if( prop.type == Gto::String )
{
char **strings = new char *[dataSize];
int numItems = flatten( rawdata, strings, dataSize, "string",
PyString_AsString );
if( PyErr_Occurred() )
{
return NULL;
}
if( numItems != dataSize )
{
PyErr_Format( gtoError(), "Property '%s' was declared as having %d"
" x %d values, but %d values were given for writing",
currentPropName, prop.size, prop.width,
numItems );
return NULL;
}
int *data = new int[dataSize];
for( int i = 0; i < numItems; ++i )
{
data[i] = writer->m_writer->lookup( strings[i] );
if( data[i] == -1 )
{
PyErr_Format( gtoError(),
"'%s' needs to be \"interned\" before it can "
"be used as data in property #%d",
strings[i], writer->m_propCount );
return NULL;
}
}
writer->m_writer->propertyData( data );
writer->m_propCount++;
delete [] strings;
delete [] data;
Py_INCREF( Py_None );
return Py_None;
}
PyErr_Format( gtoError(), "Undefined property type: %d in property '%s'",
prop.type, currentPropName );
return NULL;
}
}; // End namespace PyGto
| //
// Copyright (c) 2009, Tweak Software
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// * Redistributions of source code must retain the above
// copyright notice, this list of conditions and the following
// disclaimer.
//
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following
// disclaimer in the documentation and/or other materials
// provided with the distribution.
//
// * Neither the name of the Tweak Software nor the names of its
// contributors may be used to endorse or promote products
// derived from this software without specific prior written
// permission.
//
// THIS SOFTWARE IS PROVIDED BY Tweak Software ''AS IS'' AND ANY EXPRESS
// OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL Tweak Software BE LIABLE FOR
// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
// OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
// BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
// USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
// DAMAGE.
//
#include <sstream>
#include "gtoWriter.h"
namespace PyGto {
using namespace std;
// *****************************************************************************
// We start with a few utility functions...
// *****************************************************************************
// *****************************************************************************
// Properly deallocate the instance-specific stuff-holder object
void gtoWriter_PyObject_dealloc( PyObject *self )
{
assert( self != NULL );
gtoWriter_PyObject *gwSelf = (gtoWriter_PyObject *)self;
delete gwSelf->m_writer;
delete gwSelf->m_propertyNames;
PyObject_DEL( self );
}
// *****************************************************************************
// Flatten a tuple or list into a C array of any type using the converter
// supplied. 'start' is used internally for recursive purposes. Returns
// the number of items in the C array.
template<typename T> int flatten( PyObject *object,
T *data,
int maxItems,
const char *expectedTypeStr,
T (*converter)(PyObject *),
bool start = true )
{
static int pos;
if( start )
{
pos = 0;
}
if( pos > maxItems )
{
return pos;
}
// If we come across a class instance, do we know what to do with it?
if( PyInstance_Check( object ) )
{
string classname( PyTypeName( object ) );
// If we know what it is, convert it to something useful
if( classname == "mat3" || classname == "mat4" )
{
// mat3 and mat4 convert easily to a list
PyObject *tmp = PyObject_GetAttrString( object, "mlist" );
Py_INCREF( tmp );
object = tmp;
}
else if( classname == "vec3" || classname == "vec4" )
{
// vec3 and vec4 have no handy .toList() method, so we have
// to do it the 'hard way'...
PyObject *tmp;
classname == "vec3" ? tmp = PyTuple_New(3) : tmp = PyTuple_New(4);
PyObject *x = PyObject_GetAttrString( object, "x" );
Py_INCREF( x );
PyObject *y = PyObject_GetAttrString( object, "y" );
Py_INCREF( y );
PyObject *z = PyObject_GetAttrString( object, "z" );
Py_INCREF( z );
PyTuple_SetItem( tmp, 0, x );
PyTuple_SetItem( tmp, 1, y );
PyTuple_SetItem( tmp, 2, z );
if( classname == "vec4" )
{
PyObject *w = PyObject_GetAttrString( object, "w" );
Py_INCREF( w );
PyTuple_SetItem( tmp, 3, w );
}
object = tmp;
}
else if( classname == "quat" )
{
// quat has no handy .toList() method either...
PyObject *tmp = PyTuple_New(4);
PyObject *w = PyObject_GetAttrString( object, "w" );
Py_INCREF( w );
PyObject *x = PyObject_GetAttrString( object, "x" );
Py_INCREF( x );
PyObject *y = PyObject_GetAttrString( object, "y" );
Py_INCREF( y );
PyObject *z = PyObject_GetAttrString( object, "z" );
Py_INCREF( z );
PyTuple_SetItem( tmp, 0, w );
PyTuple_SetItem( tmp, 1, x );
PyTuple_SetItem( tmp, 2, y );
PyTuple_SetItem( tmp, 3, z );
object = tmp;
}
else
{
// Otherwise, barf on it
PyErr_Format( gtoError(), "Can't handle '%s' class data directly."
" Convert it to a tuple or list first.",
classname.c_str() );
return -1;
}
}
// Put atoms directly into the buffer, and recurse on more complex types
for( int i = 0; i < PySequence_Size( object ); ++i )
{
PyObject *item = PySequence_GetItem( object, i );
if( PyTuple_Check( item )
|| PyList_Check( item )
|| PyInstance_Check( item ) )
{
flatten( item, data, maxItems, expectedTypeStr, converter, false );
}
else
{
// Add the atom to the buffer and move on
data[pos] = converter( item );
if( PyErr_Occurred() )
{
if( ! PyErr_ExceptionMatches( PyExc_TypeError ) )
{
// This is something other than a type error, so
// this will cause a Python traceback later...
return -1;
}
// Data of a type not handled by the converter
PyErr_Format( gtoError(), "Expected data of type '%s', but "
"got '%s'", expectedTypeStr,
PyTypeName( item ) );
return -1;
}
pos++;
if( pos > maxItems )
{
return pos;
}
}
}
return pos;
}
// *****************************************************************************
// The next several functions implement the methods on the Python gto.Writer
// class.
// *****************************************************************************
// *****************************************************************************
// gto.Writer class constructor. Does nothing, but is required anyway
PyObject *gtoWriter_init( PyObject *_self, PyObject *args )
{
Py_INCREF( Py_None );
return Py_None;
}
// *****************************************************************************
// Implements gto.Writer.open( filename )
PyObject *gtoWriter_open( PyObject *_self, PyObject *args )
{
PyInstanceObject *self;
char *filename;
Gto::Writer::FileType filemode = Gto::Writer::CompressedGTO;
if( ! PyArg_ParseTuple( args, "Os|i:gtoWriter_open", &self, &filename,
&filemode ) )
{
// Invalid parameters, let Python do a stack trace
return NULL;
}
// Create a new python writer object and add it to this instance's
// dictionary
gtoWriter_PyObject *writer = PyObject_NEW( gtoWriter_PyObject,
>oWriter_PyObjectType );
writer->m_writer = new Gto::Writer();
writer->m_propCount = 0;
writer->m_beginDataCalled = false;
writer->m_objectDef = false;
writer->m_componentDef = false;
writer->m_propertyNames = new vector<string>;
PyDict_SetItemString( self->in_dict, "__writerEngine", (PyObject *)writer );
// Ask the writer to open the given file
if( ! writer->m_writer->open( filename, filemode ) )
{
PyErr_Format( gtoError(), "Unable to open specified file: %s",
filename );
return NULL;
}
Py_INCREF( Py_None );
return Py_None;
}
// *****************************************************************************
// Implements gto.Writer.close()
PyObject *gtoWriter_close( PyObject *_self, PyObject *args )
{
PyInstanceObject *self;
if( ! PyArg_ParseTuple( args, "O:gtoWriter_close", &self ) )
{
// Invalid parameters, let Python do a stack trace
return NULL;
}
// Get a handle to our Gto::Writer instance
gtoWriter_PyObject *writer =
(gtoWriter_PyObject *)PyDict_GetItemString( self->in_dict,
"__writerEngine" );
if( writer == NULL )
{
PyErr_SetString( gtoError(), "no file is open." );
return NULL;
}
assert( writer->m_writer != NULL );
// Close the file
writer->m_writer->close();
// Remove the writer from the class dictionary
PyDict_DelItemString( self->in_dict, "__writerEngine" );
Py_DECREF( writer );
Py_INCREF( Py_None );
return Py_None;
}
// *****************************************************************************
// Implements gto.Writer.beginObject( name, protocol, protocolVersion )
PyObject *gtoWriter_beginObject( PyObject *_self, PyObject *args )
{
PyInstanceObject *self;
char *name;
char *protocol;
unsigned int protocolVersion;
if( ! PyArg_ParseTuple( args, "Ossi:gtoWriter_beginObject",
&self, &name, &protocol, &protocolVersion ) )
{
// Invalid parameters, let Python do a stack trace
return NULL;
}
// Get a handle to our Gto::Writer instance
gtoWriter_PyObject *writer =
(gtoWriter_PyObject *)PyDict_GetItemString( self->in_dict,
"__writerEngine" );
if( writer == NULL )
{
PyErr_SetString( gtoError(), "no file is open." );
return NULL;
}
assert( writer->m_writer != NULL );
// Check for dumbassness
if( writer->m_objectDef == true )
{
PyErr_SetString( gtoError(), "Can't nest object declarations" );
return NULL;
}
if( writer->m_beginDataCalled == true )
{
PyErr_SetString( gtoError(), "Once beginData is called, no new "
"objects can be declared" );
return NULL;
}
// Make it so
writer->m_writer->beginObject( name, protocol, protocolVersion );
writer->m_objectDef = true;
Py_INCREF( Py_None );
return Py_None;
}
// *****************************************************************************
// Implements gto.Writer.endObject()
PyObject *gtoWriter_endObject( PyObject *_self, PyObject *args )
{
PyInstanceObject *self;
if( ! PyArg_ParseTuple( args, "O:gtoWriter_endObject",
&self ) )
{
// Invalid parameters, let Python do a stack trace
return NULL;
}
// Get a handle to our Gto::Writer instance
gtoWriter_PyObject *writer =
(gtoWriter_PyObject *)PyDict_GetItemString( self->in_dict,
"__writerEngine" );
if( writer == NULL )
{
PyErr_SetString( gtoError(), "no file is open." );
return NULL;
}
assert( writer->m_writer != NULL );
// Check for dumbassness
if( writer->m_objectDef == false )
{
PyErr_SetString( gtoError(), "endObject called before beginObject" );
return NULL;
}
// Make it so
writer->m_writer->endObject();
writer->m_objectDef = false;
Py_INCREF( Py_None );
return Py_None;
}
// *****************************************************************************
// Implements gto.Writer.beginComponent( name, interp, flags )
PyObject *gtoWriter_beginComponent( PyObject *_self, PyObject *args )
{
PyInstanceObject *self;
char *name;
char *interp = "";
int flags = 0;
// Try GTOv2 prototype first...
if( ! PyArg_ParseTuple( args, "Os|i:gtoWriter_beginComponent",
&self, &name, &flags ) )
{
PyErr_Clear();
// If that doesn't work, try the GTOv3 prototype
if( ! PyArg_ParseTuple( args, "Oss|i:gtoWriter_beginComponent",
&self, &name, &interp, &flags ) )
{
// Invalid parameters, let Python do a stack trace
return NULL;
}
}
// Get a handle to our Gto::Writer instance
gtoWriter_PyObject *writer =
(gtoWriter_PyObject *)PyDict_GetItemString( self->in_dict,
"__writerEngine" );
if( writer == NULL )
{
PyErr_SetString( gtoError(), "no file is open." );
return NULL;
}
assert( writer->m_writer != NULL );
// Check for dumbassness
if( writer->m_objectDef == false )
{
PyErr_SetString( gtoError(), "Components can only exist inside object "
"blocks" );
return NULL;
}
if( writer->m_componentDef == true )
{
PyErr_SetString( gtoError(), "Can't nest component declarations" );
return NULL;
}
// Make it so
writer->m_writer->beginComponent( name, interp, flags );
writer->m_componentDef = true;
Py_INCREF( Py_None );
return Py_None;
}
// *****************************************************************************
// Implements gto.Writer.endComponent()
PyObject *gtoWriter_endComponent( PyObject *_self, PyObject *args )
{
PyInstanceObject *self;
if( ! PyArg_ParseTuple( args, "O:gtoWriter_endComponent",
&self ) )
{
// Invalid parameters, let Python do a stack trace
return NULL;
}
// Get a handle to our Gto::Writer instance
gtoWriter_PyObject *writer =
(gtoWriter_PyObject *)PyDict_GetItemString( self->in_dict,
"__writerEngine" );
if( writer == NULL )
{
PyErr_SetString( gtoError(), "no file is open." );
return NULL;
}
assert( writer->m_writer != NULL );
// Check for dumbassness
if( writer->m_componentDef == false )
{
PyErr_SetString( gtoError(), "endComponent called before "
"beginComponent" );
return NULL;
}
// Make it so
writer->m_writer->endComponent();
writer->m_componentDef = false;
Py_INCREF( Py_None );
return Py_None;
}
// *****************************************************************************
// Implements gto.Writer.property( name, type, numElements, width, interp )
PyObject *gtoWriter_property( PyObject *_self, PyObject *args )
{
PyInstanceObject *self;
char *name;
int type;
int numElements;
int width = 1;
char *interp = "";
if( ! PyArg_ParseTuple( args, "Osii|is:gtoWriter_property",
&self, &name, &type, &numElements,
&width, &interp ) )
{
// Invalid parameters, let Python do a stack trace
return NULL;
}
// Get a handle to our Gto::Writer instance
gtoWriter_PyObject *writer =
(gtoWriter_PyObject *)PyDict_GetItemString( self->in_dict,
"__writerEngine" );
if( writer == NULL )
{
PyErr_SetString( gtoError(), "no file is open." );
return NULL;
}
assert( writer->m_writer != NULL );
// Check for dumbassness
if( writer->m_objectDef == false || writer->m_componentDef == false )
{
PyErr_SetString( gtoError(), "Properties can only exist inside "
"object/component blocks" );
return NULL;
}
// Store name for later dumbassness checking in propertyData()
writer->m_propertyNames->push_back( name );
// Make it so
writer->m_writer->property( name,
(Gto::DataType)type,
numElements,
width,
interp );
Py_INCREF( Py_None );
return Py_None;
}
// *****************************************************************************
// implements gto.Writer.intern( string | tuple | list )
PyObject *gtoWriter_intern( PyObject *_self, PyObject *args )
{
PyInstanceObject *self;
PyObject *data;
if( ! PyArg_ParseTuple( args, "OO:gtoWriter_intern",
&self, &data ) )
{
// Invalid parameters, let Python do a stack trace
return NULL;
}
// Get a handle to our Gto::Writer instance
gtoWriter_PyObject *writer =
(gtoWriter_PyObject *)PyDict_GetItemString( self->in_dict,
"__writerEngine" );
if( writer == NULL )
{
PyErr_SetString( gtoError(), "no file is open." );
return NULL;
}
assert( writer->m_writer != NULL );
// Handle a single string
if( PyString_Check( data ) )
{
char *str = PyString_AsString( data );
writer->m_writer->intern( str );
}
// Handle a bunch of strings all at once
else if( PySequence_Check( data ) )
{
for( int i = 0; i < PySequence_Size( data ); ++i )
{
PyObject *pstr = PySequence_GetItem( data, i );
if( PyString_Check( pstr ) )
{
char *str = PyString_AsString( pstr );
writer->m_writer->intern( str );
}
else if( PySequence_Check( pstr ) )
{
for( int j = 0; j < PySequence_Size( pstr ); ++j )
{
PyObject *ppstr = PySequence_GetItem( pstr, j );
if( ! PyString_Check( ppstr ) )
{
PyErr_SetString( gtoError(),
"Non-string in sub-sequence" );
return NULL;
}
char *str = PyString_AsString( ppstr );
writer->m_writer->intern( str );
}
}
else
{
PyErr_SetString( gtoError(),
"Non-string or sequence in sequence" );
return NULL;
}
}
}
// We can't handle what we were given
else
{
PyErr_SetString( gtoError(), "intern requires a string or a "
"sequence of strings" );
return NULL;
}
Py_INCREF( Py_None );
return Py_None;
}
// *****************************************************************************
// implements gto.Writer.lookup( string )
PyObject *gtoWriter_lookup( PyObject *_self, PyObject *args )
{
PyInstanceObject *self;
char *str;
if( ! PyArg_ParseTuple( args, "Os:gtoWriter_lookup",
&self, &str ) )
{
// Invalid parameters, let Python do a stack trace
return NULL;
}
// Get a handle to our Gto::Writer instance
gtoWriter_PyObject *writer =
(gtoWriter_PyObject *)PyDict_GetItemString( self->in_dict,
"__writerEngine" );
if( writer == NULL )
{
PyErr_SetString( gtoError(), "no file is open." );
return NULL;
}
assert( writer->m_writer != NULL );
// Check for dumbassness
if( writer->m_beginDataCalled == false )
{
PyErr_SetString( gtoError(), "lookup() cannot be used until "
"beginData() is called" );
return NULL;
}
// Make it so
PyObject *strId_PyObj = PyInt_FromLong( writer->m_writer->lookup( str ) );
Py_INCREF( strId_PyObj );
return strId_PyObj;
}
// *****************************************************************************
// implements gto.Writer.beginData()
PyObject *gtoWriter_beginData( PyObject *_self, PyObject *args )
{
PyInstanceObject *self;
if( ! PyArg_ParseTuple( args, "O:gtoWriter_beginData",
&self ) )
{
// Invalid parameters, let Python do a stack trace
return NULL;
}
// Get a handle to our Gto::Writer instance
gtoWriter_PyObject *writer =
(gtoWriter_PyObject *)PyDict_GetItemString( self->in_dict,
"__writerEngine" );
if( writer == NULL )
{
PyErr_SetString( gtoError(), "no file is open." );
return NULL;
}
assert( writer->m_writer != NULL );
// Check for dumbassness
if( writer->m_writer->properties().size() == 0 )
{
PyErr_SetString( gtoError(), "There are no properties to write" );
return NULL;
}
// Make it so
writer->m_writer->beginData();
writer->m_beginDataCalled = true;
Py_INCREF( Py_None );
return Py_None;
}
// *****************************************************************************
// implements gto.Writer.endData()
PyObject *gtoWriter_endData( PyObject *_self, PyObject *args )
{
PyInstanceObject *self;
if( ! PyArg_ParseTuple( args, "O:gtoWriter_endData",
&self ) )
{
// Invalid parameters, let Python do a stack trace
return NULL;
}
// Get a handle to our Gto::Writer instance
gtoWriter_PyObject *writer =
(gtoWriter_PyObject *)PyDict_GetItemString( self->in_dict,
"__writerEngine" );
if( writer == NULL )
{
PyErr_SetString( gtoError(), "no file is open." );
return NULL;
}
assert( writer->m_writer != NULL );
// Check for dumbassness
if( writer->m_beginDataCalled == false )
{
PyErr_SetString( gtoError(), "endData called before beginData" );
return NULL;
}
// Make it so
writer->m_writer->endData();
Py_INCREF( Py_None );
return Py_None;
}
// *****************************************************************************
// implements gto.Writer.propertyData( data )
PyObject *gtoWriter_propertyData( PyObject *_self, PyObject *args )
{
PyInstanceObject *self;
PyObject *rawdata;
if( ! PyArg_ParseTuple( args, "OO:gtoWriter_propertyData",
&self, &rawdata ) )
{
// Invalid parameters, let Python do a stack trace
return NULL;
}
// Get a handle to our Gto::Writer instance
gtoWriter_PyObject *writer =
(gtoWriter_PyObject *)PyDict_GetItemString( self->in_dict,
"__writerEngine" );
if( writer == NULL )
{
PyErr_SetString( gtoError(), "no file is open." );
return NULL;
}
assert( writer->m_writer != NULL );
// Check for dumbassness
if( ! writer->m_beginDataCalled )
{
PyErr_SetString( gtoError(), "propertyData called before beginData" );
return NULL;
}
// If we're handed a single value, tuple-ize it for the code below
if( PyInt_Check( rawdata )
|| PyFloat_Check( rawdata )
|| PyString_Check( rawdata ) )
{
PyObject *tmp = PyTuple_New( 1 );
PyTuple_SetItem( tmp, 0, rawdata );
Py_DECREF( rawdata );
rawdata = tmp;
}
// Get a handle to the property definition for the current property
// and do some sanity checking
Gto::PropertyHeader prop;
prop = writer->m_writer->properties()[writer->m_propCount];
if( writer->m_propCount >= writer->m_writer->properties().size() )
{
PyErr_SetString( gtoError(), "Undeclared data." );
return NULL;
}
const char *currentPropName = (*writer->m_propertyNames)[writer->m_propCount].c_str();
// Determine how many elements we have in the data
int dataSize = prop.size * elementSize(prop.dims);
// Write that data!
if( prop.type == Gto::Int )
{
int *data = new int[dataSize];
int numItems = flatten( rawdata, data, dataSize, "int", PyInt_AsInt );
if( PyErr_Occurred() )
{
return NULL;
}
if( numItems != dataSize )
{
PyErr_Format( gtoError(), "Property '%s' was declared as having %d"
" x %d values, but %d values were given for writing",
currentPropName, prop.size, int(elementSize(prop.dims)),
numItems );
return NULL;
}
writer->m_writer->propertyData( data );
writer->m_propCount++;
delete [] data;
Py_INCREF( Py_None );
return Py_None;
}
if( prop.type == Gto::Float )
{
float *data = new float[dataSize];
int numItems = flatten( rawdata, data, dataSize, "float",
PyFloat_AsFloat );
if( PyErr_Occurred() )
{
return NULL;
}
if( numItems != dataSize )
{
PyErr_Format( gtoError(), "Property '%s' was declared as having %d"
" x %d values, but %d values were given for writing",
currentPropName, prop.size, int(elementSize(prop.dims)),
numItems );
return NULL;
}
writer->m_writer->propertyData( data );
writer->m_propCount++;
delete [] data;
Py_INCREF( Py_None );
return Py_None;
}
if( prop.type == Gto::Double )
{
double *data = new double[dataSize];
int numItems = flatten( rawdata, data, dataSize, "double",
PyFloat_AsDouble );
if( PyErr_Occurred() )
{
return NULL;
}
if( numItems != dataSize )
{
PyErr_Format( gtoError(), "Property '%s' was declared as having %d"
" x %d values, but %d values were given for writing",
currentPropName, prop.size, int(elementSize(prop.dims)),
numItems );
return NULL;
}
writer->m_writer->propertyData( data );
writer->m_propCount++;
delete [] data;
Py_INCREF( Py_None );
return Py_None;
}
if( prop.type == Gto::Short )
{
unsigned short *data = new unsigned short[dataSize];
int numItems = flatten( rawdata, data, dataSize, "short",
PyInt_AsShort );
if( PyErr_Occurred() )
{
return NULL;
}
if( numItems != dataSize )
{
PyErr_Format( gtoError(), "Property '%s' was declared as having %d"
" x %d values, but %d values were given for writing",
currentPropName, prop.size, int(elementSize(prop.dims)),
numItems );
return NULL;
}
writer->m_writer->propertyData( data );
writer->m_propCount++;
delete [] data;
Py_INCREF( Py_None );
return Py_None;
}
if( prop.type == Gto::Byte )
{
unsigned char *data = new unsigned char[dataSize];
int numItems = flatten( rawdata, data, dataSize, "byte",
PyInt_AsByte );
if( PyErr_Occurred() )
{
return NULL;
}
if( numItems != dataSize )
{
PyErr_Format( gtoError(), "Property '%s' was declared as having %d"
" x %d values, but %d values were given for writing",
currentPropName, prop.size, int(elementSize(prop.dims)),
numItems );
return NULL;
}
writer->m_writer->propertyData( data );
writer->m_propCount++;
delete [] data;
Py_INCREF( Py_None );
return Py_None;
}
if( prop.type == Gto::String )
{
char **strings = new char *[dataSize];
int numItems = flatten( rawdata, strings, dataSize, "string",
PyString_AsString );
if( PyErr_Occurred() )
{
return NULL;
}
if( numItems != dataSize )
{
PyErr_Format( gtoError(), "Property '%s' was declared as having %d"
" x %d values, but %d values were given for writing",
currentPropName, prop.size, int(elementSize(prop.dims)),
numItems );
return NULL;
}
int *data = new int[dataSize];
for( int i = 0; i < numItems; ++i )
{
data[i] = writer->m_writer->lookup( strings[i] );
if( data[i] == -1 )
{
PyErr_Format( gtoError(),
"'%s' needs to be \"interned\" before it can "
"be used as data in property #%d",
strings[i], writer->m_propCount );
return NULL;
}
}
writer->m_writer->propertyData( data );
writer->m_propCount++;
delete [] strings;
delete [] data;
Py_INCREF( Py_None );
return Py_None;
}
PyErr_Format( gtoError(), "Undefined property type: %d in property '%s'",
prop.type, currentPropName );
return NULL;
}
}; // End namespace PyGto
|
openscriptures/HebrewLexicon | 6 | sinri/HebrewStrongDictionary.html | <!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>Hebrew Strong Dictionary</title>
<script src="//cdn.bootcss.com/jquery/3.2.0/jquery.min.js"></script>
<script type="text/javascript">
var dict={};
var dict_mapping={};
$(document).ready(()=>{
$.ajax({
url:'./json/StrongHebrewDictionary.json',
method:'get',
dataType:'json'
}).done((book)=>{
dict=book.dict;
dict_mapping=book.mapping;
}).fail(()=>{
alert('ajax failed');
})
});
function query_hebrew(){
let heb=$("#heb").val();
display_strong(dict_mapping[heb]);
}
function query_strong(){
let strong_number=$("#strong_number").val();
display_strong("H"+strong_number);
}
function display_strong(number){
let result="Not found.";
if(dict[number]){
let item=dict[number];
console.log(number,item);
result="";
result+="<h3>#"+number+" "+item.w.w+"</h3>";
result+="<p><span class='part_tag'>pos</span>"+item.w.pos+"</p>";
result+="<p><span class='part_tag'>pron</span>"+item.w.pron+"</p>";
result+="<p><span class='part_tag'>src</span>"+item.w.src+"</p>";
result+="<p><span class='part_tag'>xlit</span>"+item.w.xlit+"</p>";
result+="<p><span class='part_tag'>source</span>"+item.source+"</p>";
result+="<p><span class='part_tag'>meaning</span>"+item.meaning+"</p>";
result+="<p><span class='part_tag'>usage</span>"+item.usage+"</p>";
if(item.note){
result+="<p><span class='part_tag'>note</span>"+item.note+"</p>";
}
}
$("#resule_box").html(result);
}
</script>
<style type="text/css">
h3 {
}
p {
}
span.part_tag{
color: blue;
margin: auto 5px;
}
def {
color: green;
}
w {
color: red;
}
#query_box {
border-bottom: 1px solid gray;
margin: 10px;
}
#resule_box{
margin: 10px;
border-bottom: 1px solid gray;
min-height: 200px;
}
#footer {
text-align: center;
}
</style>
</head>
<body>
<h1>Hebrew Strong Dictionary</h1>
<div id="query_box">
<p>
Hebrew:
<input type="text" id="heb">
<button onclick="query_hebrew()">Query</button>
</p>
<p>
Strong Number:
<input type="text" id="strong_number">
<button onclick="query_strong()">Query</button>
</p>
</div>
<div id="resule_box">
</div>
<div id="footer">
Copyright 2017 Sinri Edogawa. Dictionary data powered by <a href="https://github.com/openscriptures/HebrewLexicon">Project HebrewLexicon</a>.
</div>
</body>
</html>
<!-- http://localhost/leqee/Lab/SinriStrongDict/HebrewStrongDictionary.html --> | <!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>Hebrew Strong Dictionary</title>
<script src="./js/jquery.min.js"></script>
<script src="./json/StrongHebrewDictionary.json"></script>
<script type="text/javascript">
function query_hebrew(){
let heb=$("#heb").val();
display_strong(dict_mapping[heb]);
}
function query_strong(){
let strong_number=$("#strong_number").val();
display_strong("H"+strong_number);
}
function display_strong(number){
let result="Not found.";
if(dict[number]){
let item=dict[number];
console.log(number,item);
result="";
result+="<h3>#"+number+" "+item.w.w+"</h3>";
result+="<p><span class='part_tag'>pos</span>"+item.w.pos+"</p>";
result+="<p><span class='part_tag'>pron</span>"+item.w.pron+"</p>";
result+="<p><span class='part_tag'>src</span>"+item.w.src+"</p>";
result+="<p><span class='part_tag'>xlit</span>"+item.w.xlit+"</p>";
result+="<p><span class='part_tag'>source</span>"+item.source+"</p>";
result+="<p><span class='part_tag'>meaning</span>"+item.meaning+"</p>";
result+="<p><span class='part_tag'>usage</span>"+item.usage+"</p>";
if(item.note){
result+="<p><span class='part_tag'>note</span>"+item.note+"</p>";
}
}
$("#resule_box").html(result);
}
</script>
<style type="text/css">
h3 {
}
p {
}
span.part_tag{
color: blue;
margin: auto 5px;
}
def {
color: green;
}
w {
color: red;
}
#query_box {
border-bottom: 1px solid gray;
margin: 10px;
}
#resule_box{
margin: 10px;
border-bottom: 1px solid gray;
min-height: 200px;
}
#footer {
text-align: center;
}
</style>
</head>
<body>
<h1>Hebrew Strong Dictionary</h1>
<div id="query_box">
<p>
Hebrew:
<input type="text" id="heb">
<button onclick="query_hebrew()">Query</button>
</p>
<p>
Strong Number:
<input type="text" id="strong_number">
<button onclick="query_strong()">Query</button>
</p>
</div>
<div id="resule_box">
</div>
<div id="footer">
Copyright 2017 Sinri Edogawa. Dictionary data powered by <a href="https://github.com/openscriptures/HebrewLexicon">Project HebrewLexicon</a>.
</div>
</body>
</html>
<!-- http://localhost/leqee/Lab/SinriStrongDict/HebrewStrongDictionary.html -->
|
xkp/XKP | 27 | third_party/ve/Excess.CompilerTask/ExcessCompilerTask.cs | using System;
using System.IO;
using System.Collections.Generic;
using System.Globalization;
using System.Reflection;
using Microsoft.Build.Utilities;
using Microsoft.Build.Framework;
using Microsoft.Build.BuildEngine;
using ExcessCompiler;
using System.Xml.Linq;
namespace Excess.CompilerTasks
{
/////////////////////////////////////////////////////////////////////////////
// My MSBuild Task
public class ExcessCompilerTask : Task
{
#region Constructors
/// <summary>
/// Constructor. This is the constructor that will be used
/// when the task run.
/// </summary>
public ExcessCompilerTask()
{
}
#endregion
#region Public Properties and related Fields
private string[] sourceFiles;
/// <summary>
/// List of Python source files that should be compiled into the assembly
/// </summary>
[Required()]
public string[] SourceFiles
{
get { return sourceFiles; }
set { sourceFiles = value; }
}
private string outputAssembly;
/// <summary>
/// Output Assembly (including extension)
/// </summary>
[Required()]
public string OutputAssembly
{
get { return outputAssembly; }
set { outputAssembly = value; }
}
private ITaskItem[] referencedAssemblies = new ITaskItem[0];
/// <summary>
/// List of dependent assemblies
/// </summary>
public ITaskItem[] ReferencedAssemblies
{
get { return referencedAssemblies; }
set
{
if (value != null)
{
referencedAssemblies = value;
}
else
{
referencedAssemblies = new ITaskItem[0];
}
}
}
private ITaskItem[] resourceFiles = new ITaskItem[0];
/// <summary>
/// List of resource files
/// </summary>
public ITaskItem[] ResourceFiles
{
get { return resourceFiles; }
set
{
if (value != null)
{
resourceFiles = value;
}
else
{
resourceFiles = new ITaskItem[0];
}
}
}
private string mainFile;
/// <summary>
/// For applications, which file is the entry point
/// </summary>
[Required()]
public string MainFile
{
get { return mainFile; }
set { mainFile = value; }
}
private string targetKind;
/// <summary>
/// Target type (exe, winexe, library)
/// These will be mapped to System.Reflection.Emit.PEFileKinds
/// </summary>
public string TargetKind
{
get { return targetKind; }
set { targetKind = value.ToLower(CultureInfo.InvariantCulture); }
}
private bool debugSymbols = true;
/// <summary>
/// Generate debug information
/// </summary>
public bool DebugSymbols
{
get { return debugSymbols; }
set { debugSymbols = value; }
}
private string projectPath = null;
/// <summary>
/// This should be set to $(MSBuildProjectDirectory)
/// </summary>
public string ProjectPath
{
get { return projectPath; }
set { projectPath = value; }
}
private bool useExperimentalCompiler;
/// <summary>
/// This property is only needed because Iron Python does not officially support building real .Net assemblies.
/// For WAP scenarios, we need to support real assemblies and as such we use an alternate approach to build those assemblies.
/// </summary>
public bool UseExperimentalCompiler
{
get { return useExperimentalCompiler; }
set { useExperimentalCompiler = value; }
}
#endregion
/// <summary>
/// Main entry point for the task
/// </summary>
/// <returns></returns>
public override bool Execute()
{
string filePath = Path.Combine(projectPath, MainFile);
XAttribute version = XElement.Load(filePath).Attribute("version");
bool old_version = version != null && version.Value == "0.9.4";
if (old_version)
{
//call xss.exe
return true;
}
ExcessModelService service = ExcessModelService.getInstance();
List<ExcessErrorInfo> errors = new List<ExcessErrorInfo>();
bool success = service.Model.buildProject(filePath, errors);
if (errors.Count == 0)
{
Engine engine = new Engine();
engine.DefaultToolsVersion = "4.0";
// Instantiate a new FileLogger to generate build log
myLogger logger = new myLogger(Log);
// Set the logfile parameter to indicate the log destination
logger.Parameters = @"logfile=C:\dev\XKP_BIN\build.log";
// Register the logger with the engine
engine.RegisterLogger(logger);
// Build a project file
string appName = service.Model.getAppName(filePath);
string slnPath = Path.Combine(projectPath, @"bin\debug\" + appName + ".sln");
try
{
success = engine.BuildProjectFile(slnPath);
}
catch (Exception e)
{
success = false;
}
//Unregister all loggers to close the log file
engine.UnregisterAllLoggers();
if (success)
Console.WriteLine("Build succeeded.");
else
Console.WriteLine(@"Build failed. View C:\temp\build.log for details");
return true;
}
//foreach (ExcessErrorInfo error in errors)
//{
// Log.LogError("", "", "", error.File, error.BeginLine, error.BeginColumn, error.BeginLine, error.BeginColumn + 1, error.desc);
//}
return success;
}
/// <summary>
/// Initialize compiler options based on task parameters
/// </summary>
/// <returns>false if failed</returns>
private bool InitializeCompiler()
{
return true;
}
}
internal class myLogger : Logger
{
public myLogger(TaskLoggingHelper log)
{
log_ = log;
}
public override void Initialize(Microsoft.Build.Framework.IEventSource eventSource)
{
////Register for the ProjectStarted, TargetStarted, and ProjectFinished events
//eventSource. ProjectStarted += new ProjectStartedEventHandler(eventSource_ProjectStarted);
//eventSource.TargetStarted += new TargetStartedEventHandler(eventSource_TargetStarted);
eventSource.ErrorRaised += new BuildErrorEventHandler(ErrorHandler);
}
private void ErrorHandler(object sender, BuildErrorEventArgs e)
{
//td: !! send the errors back to xs
log_.LogError(e.Subcategory, e.Code, e.HelpKeyword, e.File, e.LineNumber, e.ColumnNumber, e.EndLineNumber, e.EndColumnNumber, e.Message);
}
private TaskLoggingHelper log_;
}
} | using System;
using System.Diagnostics;
using System.IO;
using System.Collections.Generic;
using System.Globalization;
using System.Reflection;
using Microsoft.Build.Utilities;
using Microsoft.Build.Framework;
using Microsoft.Build.BuildEngine;
using ExcessCompiler;
using System.Xml.Linq;
using Newtonsoft.Json;
namespace Excess.CompilerTasks
{
/////////////////////////////////////////////////////////////////////////////
// My MSBuild Task
public class Log
{
public string text { get; set; }
public string type { get; set; }
}
public class ExcessCompilerTask : Task
{
#region Constructors
/// <summary>
/// Constructor. This is the constructor that will be used
/// when the task run.
/// </summary>
public ExcessCompilerTask()
{
}
#endregion
#region Public Properties and related Fields
private string[] sourceFiles;
/// <summary>
/// List of Python source files that should be compiled into the assembly
/// </summary>
[Required()]
public string[] SourceFiles
{
get { return sourceFiles; }
set { sourceFiles = value; }
}
private string outputAssembly;
/// <summary>
/// Output Assembly (including extension)
/// </summary>
[Required()]
public string OutputAssembly
{
get { return outputAssembly; }
set { outputAssembly = value; }
}
private ITaskItem[] referencedAssemblies = new ITaskItem[0];
/// <summary>
/// List of dependent assemblies
/// </summary>
public ITaskItem[] ReferencedAssemblies
{
get { return referencedAssemblies; }
set
{
if (value != null)
{
referencedAssemblies = value;
}
else
{
referencedAssemblies = new ITaskItem[0];
}
}
}
private ITaskItem[] resourceFiles = new ITaskItem[0];
/// <summary>
/// List of resource files
/// </summary>
public ITaskItem[] ResourceFiles
{
get { return resourceFiles; }
set
{
if (value != null)
{
resourceFiles = value;
}
else
{
resourceFiles = new ITaskItem[0];
}
}
}
private string mainFile;
/// <summary>
/// For applications, which file is the entry point
/// </summary>
[Required()]
public string MainFile
{
get { return mainFile; }
set { mainFile = value; }
}
private string targetKind;
/// <summary>
/// Target type (exe, winexe, library)
/// These will be mapped to System.Reflection.Emit.PEFileKinds
/// </summary>
public string TargetKind
{
get { return targetKind; }
set { targetKind = value.ToLower(CultureInfo.InvariantCulture); }
}
private bool debugSymbols = true;
/// <summary>
/// Generate debug information
/// </summary>
public bool DebugSymbols
{
get { return debugSymbols; }
set { debugSymbols = value; }
}
private string projectPath = null;
/// <summary>
/// This should be set to $(MSBuildProjectDirectory)
/// </summary>
public string ProjectPath
{
get { return projectPath; }
set { projectPath = value; }
}
private bool useExperimentalCompiler;
/// <summary>
/// This property is only needed because Iron Python does not officially support building real .Net assemblies.
/// For WAP scenarios, we need to support real assemblies and as such we use an alternate approach to build those assemblies.
/// </summary>
public bool UseExperimentalCompiler
{
get { return useExperimentalCompiler; }
set { useExperimentalCompiler = value; }
}
#endregion
/// <summary>
/// Main entry point for the task
/// </summary>
/// <returns></returns>
public override bool Execute()
{
string filePath = Path.Combine(projectPath, MainFile);
XAttribute version = XElement.Load(filePath).Attribute("version");
bool old_version = version != null && version.Value == "0.9.4";
if (old_version)
{
//call xss.exe
Process proc = new Process();
proc.StartInfo = new ProcessStartInfo("xss.exe");
proc.StartInfo.Arguments = filePath + " json";
proc.StartInfo.RedirectStandardOutput = true;
proc.StartInfo.UseShellExecute = false;
bool success_ = proc.Start();
string output_string = proc.StandardOutput.ReadToEnd();
List<Log> logs = JsonConvert.DeserializeObject<List<Log>>(output_string);
foreach (Log l in logs)
{
if (l.type == "error")
{
Log.LogError(l.text);
success_ = false;
}
if (l.type == "msg")
Log.LogMessage(l.text);
if (l.type == "log")
Log.LogMessage(l.text);
}
return success_;
}
ExcessModelService service = ExcessModelService.getInstance();
List<ExcessErrorInfo> errors = new List<ExcessErrorInfo>();
bool success = service.Model.buildProject(filePath, errors);
if (errors.Count == 0)
{
Engine engine = new Engine();
engine.DefaultToolsVersion = "4.0";
// Instantiate a new FileLogger to generate build log
myLogger logger = new myLogger(Log);
// Set the logfile parameter to indicate the log destination
logger.Parameters = @"logfile=C:\dev\XKP_BIN\build.log";
// Register the logger with the engine
engine.RegisterLogger(logger);
// Build a project file
string appName = service.Model.getAppName(filePath);
string slnPath = Path.Combine(projectPath, @"bin\debug\" + appName + ".sln");
try
{
success = engine.BuildProjectFile(slnPath);
}
catch (Exception e)
{
success = false;
}
//Unregister all loggers to close the log file
engine.UnregisterAllLoggers();
if (success)
Console.WriteLine("Build succeeded.");
else
Console.WriteLine(@"Build failed. View C:\temp\build.log for details");
return true;
}
//foreach (ExcessErrorInfo error in errors)
//{
// Log.LogError("", "", "", error.File, error.BeginLine, error.BeginColumn, error.BeginLine, error.BeginColumn + 1, error.desc);
//}
return success;
}
/// <summary>
/// Initialize compiler options based on task parameters
/// </summary>
/// <returns>false if failed</returns>
private bool InitializeCompiler()
{
return true;
}
}
internal class myLogger : Logger
{
public myLogger(TaskLoggingHelper log)
{
log_ = log;
}
public override void Initialize(Microsoft.Build.Framework.IEventSource eventSource)
{
////Register for the ProjectStarted, TargetStarted, and ProjectFinished events
//eventSource. ProjectStarted += new ProjectStartedEventHandler(eventSource_ProjectStarted);
//eventSource.TargetStarted += new TargetStartedEventHandler(eventSource_TargetStarted);
eventSource.ErrorRaised += new BuildErrorEventHandler(ErrorHandler);
}
private void ErrorHandler(object sender, BuildErrorEventArgs e)
{
//td: !! send the errors back to xs
log_.LogError(e.Subcategory, e.Code, e.HelpKeyword, e.File, e.LineNumber, e.ColumnNumber, e.EndLineNumber, e.EndColumnNumber, e.Message);
}
private TaskLoggingHelper log_;
}
} |
MSch/devise-twitter | 4 | lib/devise/twitter/version.rb | module Devise
module Twitter
VERSION = "0.1.2.pre"
end
end
| module Devise
module Twitter
VERSION = "0.1.2"
end
end
|
simula67/Coding | 1 | c/startha.c | #include <stdio.h>
#include <unistd.h>
#include <stdlib.h>
#include <signal.h>
int main()
{
signal(SIGCHLD, SIG_IGN);
extern char **environ;
printf("Starting artha...\n");
pid_t artha_fork;
if( (artha_fork=fork()) < 0 ) {
fprintf(stderr,"Failed to fork\n");
exit(1);
}
if(artha_fork>0) {
printf("Successfull fork...\n");
exit(0);
}
char *cmd_line[]={"/usr/bin/artha",NULL};
execve("/usr/bin/artha",cmd_line,environ);
fprintf(stderr,"Failed to execute\n");
return 1;
}
| #include <stdio.h>
#include <unistd.h>
#include <stdlib.h>
#include <signal.h>
int main()
{
signal(SIGCHLD, SIG_IGN); // Reparent to init
extern char **environ;
printf("Starting artha...\n");
pid_t artha_fork;
if( (artha_fork=fork()) < 0 ) {
fprintf(stderr,"Failed to fork\n");
exit(1);
}
if(artha_fork>0) {
printf("Successfull fork...\n");
exit(0);
}
char *cmd_line[]={"/usr/bin/artha",NULL};
execve("/usr/bin/artha",cmd_line,environ);
fprintf(stderr,"Failed to execute\n");
return 1;
}
|
kevinj/archive.tar | 1 | tests/index.html | <html>
<head>
<script src="../lib/Archive/Tar.js"> </script>
<script src="../lib/Archive/Tar/File.js"> </script>
<script src="lib/JSAN.js"> </script>
<pre><script>
try {
new JSAN('lib').use("Data.Dump");
new JSAN('lib').use("Test.More");
new JSAN('../lib').use("Archive.Tar");
new JSAN('../lib').use("Archive.Tar.File");
var tar = new Archive.Tar;
tar.read("testfile.tar.gz");
plan({ tests: 10 });
var files = [ { name: "file.txt", mode: 420,
uid: 30000, gid: 100,
size: 20, mtime: new Date("Thu, 01 Mar 2007 22:09:02 GMT"),
chksum: 5109, typeflag: 0,
linkname: "", magic: 0,
version: 0, uname: "kevinj",
gname: "users", devmajor: 0,
devminor: 0, prefix: ""
},
{
name: "file2.txt", mode: 420,
uid: 30000, gid: 100,
size: 20, mtime: new Date("Thu, 01 Mar 2007 22:26:40 GMT"),
chksum: 5146, typeflag: 0,
linkname: "", magic: 0,
version: 0, uname: "kevinj",
gname: "users", devmajor: 0,
devminor: 0, prefix: ""
}
];
var hfields;
for (var i in files[0]) {
if (files.[0].hasOwnProperty(i)) {
hfields.push(i);
}
}
isDeeply( tar.listFiles(),
["file.txt", "file2.txt"],
"listFiles(name)"
);
isDeeply( tar.listFiles("name"),
["file.txt", "file2.txt"],
"listFiles(name)"
);
isDeeply( tar.listFiles("version","name"),
[{name:'file.txt', version: 0},
{name:'file2.txt', version: 0}
],
"listFiles(version, name)"
);
isDeeply( tar.listFiles("name","mode","uid","gid","size","mtime",
"chksum","typeflag","linkname","magic",
"version","uname","gname","devmajor",
"devminor","prefix"
), files, "All fields");
var files = tar.getFiles();
var headers = ['name','mode','uid','gid','size','mtime','chksum','type','linkname',
'magic','version','uname','gname','devminor','devmajor','prefix'
];
for (var i=0; i<headers.length; i++) {
}
is(files[0].name(), "file.txt", "Archive.Tar.File.name()");
is(files[0].mode(), "file.txt", "Archive.Tar.File.mode()");
is(files[0].uid(), "file.txt", "Archive.Tar.File.uid()");
is(files[0].gid(), "file.txt", "Archive.Tar.File.gid()");
is(files[0].size(), "file.txt", "Archive.Tar.File.size()");
is(files[0].mtime(), "file.txt", "Archive.Tar.File.mtime()");
is(files[0].chksum(), "file.txt", "Archive.Tar.File.chksum()");
is(files[0].type(), "file.txt", "Archive.Tar.File.type()");
is(files[0].linkname(), "file.txt", "Archive.Tar.File.linkname()");
is(files[0].linkname(), "file.txt", "Archive.Tar.File.linkname()");
ok(1);
ok(1);
ok(1);
ok(1);
ok(1);
} catch (e) { alert(e) }
</script></pre>
</head>
<body>
</body>
</html>
| <html>
<head>
<script src="../lib/Archive/Tar.js"> </script>
<script src="../lib/Archive/Tar/File.js"> </script>
<script src="lib/JSAN.js"> </script>
<pre><script>
try {
new JSAN('lib').use("Data.Dump");
new JSAN('lib').use("Test.More");
new JSAN('../lib').use("Archive.Tar");
new JSAN('../lib').use("Archive.Tar.File");
var tar = new Archive.Tar;
tar.read("testfile.tar.gz");
plan({ tests: 10 });
var files = [ { name: "file.txt", mode: 420,
uid: 30000, gid: 100,
size: 20, mtime: new Date("Thu, 01 Mar 2007 22:09:02 GMT"),
chksum: 5109, typeflag: 0,
linkname: "", magic: 0,
version: 0, uname: "kevinj",
gname: "users", devmajor: 0,
devminor: 0, prefix: ""
},
{
name: "file2.txt", mode: 420,
uid: 30000, gid: 100,
size: 20, mtime: new Date("Thu, 01 Mar 2007 22:26:40 GMT"),
chksum: 5146, typeflag: 0,
linkname: "", magic: 0,
version: 0, uname: "kevinj",
gname: "users", devmajor: 0,
devminor: 0, prefix: ""
}
];
var hfields;
for (var i in files[0]) {
if (files.[0].hasOwnProperty(i)) {
hfields.push(i);
}
}
isDeeply( tar.listFiles(),
["file.txt", "file2.txt"],
"listFiles(name)"
);
isDeeply( tar.listFiles("name"),
["file.txt", "file2.txt"],
"listFiles(name)"
);
isDeeply( tar.listFiles("version","name"),
[{name:'file.txt', version: 0},
{name:'file2.txt', version: 0}
],
"listFiles(version, name)"
);
isDeeply( tar.listFiles("name","mode","uid","gid","size","mtime",
"chksum","typeflag","linkname","magic",
"version","uname","gname","devmajor",
"devminor","prefix"
), files, "All fields");
var files = tar.getFiles();
var headers = ['name','mode','uid','gid','size','mtime','chksum','type','linkname',
'magic','version','uname','gname','devminor','devmajor','prefix'
];
for (var i=0; i<headers.length; i++) {
}
is(files[0].name(), "file.txt", "Archive.Tar.File.name()");
is(files[0].mode(), "file.txt", "Archive.Tar.File.mode()");
is(files[0].uid(), "file.txt", "Archive.Tar.File.uid()");
is(files[0].gid(), "file.txt", "Archive.Tar.File.gid()");
is(files[0].size(), "file.txt", "Archive.Tar.File.size()");
is(files[0].mtime(), "file.txt", "Archive.Tar.File.mtime()");
is(files[0].chksum(), "file.txt", "Archive.Tar.File.chksum()");
is(files[0].type(), "file.txt", "Archive.Tar.File.type()");
is(files[0].linkname(), "file.txt", "Archive.Tar.File.linkname()");
is(files[0].linkname(), "file.txt", "Archive.Tar.File.linkname()");
ok(1);
ok(1);
ok(1);
ok(1);
ok(1);
} catch (e) { alert(e) }
</script></pre>
</head>
<body>
</body>
</html>
|
moztw/gfx.tw | 17 | application/libraries/Auth/OpenID/MySQLStore.php | <?php
/**
* A MySQL store.
*
* @package OpenID
*/
/**
* Require the base class file.
*/
require_once "Auth/OpenID/SQLStore.php";
/**
* An SQL store that uses MySQL as its backend.
*
* @package OpenID
*/
class Auth_OpenID_MySQLStore extends Auth_OpenID_SQLStore {
/**
* @access private
*/
function setSQL()
{
$this->sql['nonce_table'] =
"CREATE TABLE %s (\n".
" server_url VARCHAR(2047) NOT NULL,\n".
" timestamp INTEGER NOT NULL,\n".
" salt CHAR(40) NOT NULL,\n".
" UNIQUE (server_url(255), timestamp, salt)\n".
") ENGINE=InnoDB";
$this->sql['assoc_table'] =
"CREATE TABLE %s (\n".
" server_url BLOB NOT NULL,\n".
" handle VARCHAR(255) NOT NULL,\n".
" secret BLOB NOT NULL,\n".
" issued INTEGER NOT NULL,\n".
" lifetime INTEGER NOT NULL,\n".
" assoc_type VARCHAR(64) NOT NULL,\n".
" PRIMARY KEY (server_url(255), handle)\n".
") ENGINE=InnoDB";
$this->sql['set_assoc'] =
"REPLACE INTO %s (server_url, handle, secret, issued,\n".
" lifetime, assoc_type) VALUES (?, ?, !, ?, ?, ?)";
$this->sql['get_assocs'] =
"SELECT handle, secret, issued, lifetime, assoc_type FROM %s ".
"WHERE server_url = ?";
$this->sql['get_assoc'] =
"SELECT handle, secret, issued, lifetime, assoc_type FROM %s ".
"WHERE server_url = ? AND handle = ?";
$this->sql['remove_assoc'] =
"DELETE FROM %s WHERE server_url = ? AND handle = ?";
$this->sql['add_nonce'] =
"INSERT INTO %s (server_url, timestamp, salt) VALUES (?, ?, ?)";
$this->sql['clean_nonce'] =
"DELETE FROM %s WHERE timestamp < ?";
$this->sql['clean_assoc'] =
"DELETE FROM %s WHERE issued + lifetime < ?";
}
/**
* @access private
*/
function blobEncode($blob)
{
return "0x" . bin2hex($blob);
}
}
?> | <?php
/**
* A MySQL store.
*
* @package OpenID
*/
/**
* Require the base class file.
*/
require_once "Auth/OpenID/SQLStore.php";
/**
* An SQL store that uses MySQL as its backend.
*
* @package OpenID
*/
class Auth_OpenID_MySQLStore extends Auth_OpenID_SQLStore {
/**
* @access private
*/
function setSQL()
{
$this->sql['nonce_table'] =
"CREATE TABLE %s (\n".
" server_url VARCHAR(2047) NOT NULL,\n".
" timestamp INTEGER NOT NULL,\n".
" salt CHAR(40) NOT NULL,\n".
" UNIQUE (server_url(255), timestamp, salt)\n".
") ENGINE=InnoDB";
$this->sql['assoc_table'] =
"CREATE TABLE %s (\n".
" server_url BLOB NOT NULL,\n".
" handle VARCHAR(255) NOT NULL,\n".
" secret BLOB NOT NULL,\n".
" issued INTEGER NOT NULL,\n".
" lifetime INTEGER NOT NULL,\n".
" assoc_type VARCHAR(64) NOT NULL,\n".
" PRIMARY KEY (server_url(255), handle)\n".
") ENGINE=InnoDB";
$this->sql['set_assoc'] =
"REPLACE INTO %s (server_url, handle, secret, issued,\n".
" lifetime, assoc_type) VALUES (?, ?, !, ?, ?, ?)";
$this->sql['get_assocs'] =
"SELECT handle, secret, issued, lifetime, assoc_type FROM %s ".
"WHERE server_url = ?";
$this->sql['get_assoc'] =
"SELECT handle, secret, issued, lifetime, assoc_type FROM %s ".
"WHERE server_url = ? AND handle = ?";
$this->sql['remove_assoc'] =
"DELETE FROM %s WHERE server_url = ? AND handle = ?";
$this->sql['add_nonce'] =
"INSERT INTO %s (server_url, timestamp, salt) VALUES (?, ?, ?)";
$this->sql['clean_nonce'] =
"DELETE FROM %s WHERE timestamp < ?";
$this->sql['clean_assoc'] =
"DELETE FROM %s WHERE issued + lifetime < ?";
}
/**
* @access private
*/
function blobEncode($blob)
{
return "0x" . bin2hex($blob);
}
}
|
dandean/Ajax.JSONRequest | 7 | jsonp.js | /* JSON-P implementation for Prototype.js somewhat by Dan Dean (http://www.dandean.com)
*
* *HEAVILY* based on Tobie Langel's version: http://gist.github.com/145466.
* Might as well just call this an iteration.
*
* This version introduces:
* - Partial integration with Ajax.Responders (Thanks to @sr3d for the kick in this direction)
* - Compatibility with Prototype 1.7 (Thanks to @soung3 for the bug report)
* - Will not break if page lacks a <head> element
*
* See examples in README for usage
*
* VERSION 1.1.1
*
* new Ajax.JSONRequest(url, options);
* - url (String): JSON-P endpoint url.
* - options (Object): Configuration options for the request.
*/
Ajax.JSONRequest = Class.create(Ajax.Base, (function() {
var id = 0, head = document.getElementsByTagName('head')[0] || document.body;
return {
initialize: function($super, url, options) {
$super(options);
this.options.url = url;
this.options.callbackParamName = this.options.callbackParamName || 'callback';
this.options.timeout = this.options.timeout || 10; // Default timeout: 10 seconds
this.options.invokeImmediately = (!Object.isUndefined(this.options.invokeImmediately)) ? this.options.invokeImmediately : true ;
if (!Object.isUndefined(this.options.parameters) && Object.isString(this.options.parameters)) {
this.options.parameters = this.options.parameters.toQueryParams();
}
if (this.options.invokeImmediately) {
this.request();
}
},
/**
* Ajax.JSONRequest#_cleanup() -> undefined
* Cleans up after the request
**/
_cleanup: function() {
if (this.timeout) {
clearTimeout(this.timeout);
this.timeout = null;
}
if (this.transport && Object.isElement(this.transport)) {
this.transport.remove();
this.transport = null;
}
},
/**
* Ajax.JSONRequest#request() -> undefined
* Invokes the JSON-P request lifecycle
**/
request: function() {
// Define local vars
var response = new Ajax.JSONResponse(this);
var key = this.options.callbackParamName,
name = '_prototypeJSONPCallback_' + (id++),
complete = function() {
if (Object.isFunction(this.options.onComplete)) {
this.options.onComplete.call(this, response);
}
Ajax.Responders.dispatch('onComplete', this, response);
}.bind(this);
// Add callback as a parameter and build request URL
this.options.parameters[key] = name;
var url = this.options.url + ((this.options.url.include('?') ? '&' : '?') + Object.toQueryString(this.options.parameters));
// Define callback function
window[name] = function(json) {
this._cleanup(); // Garbage collection
window[name] = undefined;
response.status = 200;
response.statusText = "OK";
response.setResponseContent(json);
if (Object.isFunction(this.options.onSuccess)) {
this.options.onSuccess.call(this, response);
}
Ajax.Responders.dispatch('onSuccess', this, response);
complete();
}.bind(this);
this.transport = new Element('script', { type: 'text/javascript', src: url });
if (Object.isFunction(this.options.onCreate)) {
this.options.onCreate.call(this, response);
}
Ajax.Responders.dispatch('onCreate', this);
head.appendChild(this.transport);
this.timeout = setTimeout(function() {
this._cleanup();
window[name] = Prototype.emptyFunction;
if (Object.isFunction(this.options.onFailure)) {
response.status = 504;
response.statusText = "Gateway Timeout";
this.options.onFailure.call(this, response);
}
complete();
}.bind(this), this.options.timeout * 1000);
},
toString: function() { return "[object Ajax.JSONRequest]"; }
};
})());
Ajax.JSONResponse = Class.create({
initialize: function(request) {
this.request = request;
},
request: undefined,
status: 0,
statusText: '',
responseJSON: undefined,
responseText: undefined,
setResponseContent: function(json) {
this.responseJSON = json;
this.responseText = Object.toJSON(json);
},
getTransport: function() {
if (this.request) return this.request.transport;
},
toString: function() { return "[object Ajax.JSONResponse]"; }
}); | /* JSON-P implementation for Prototype.js somewhat by Dan Dean (http://www.dandean.com)
*
* *HEAVILY* based on Tobie Langel's version: http://gist.github.com/145466.
* Might as well just call this an iteration.
*
* This version introduces:
* - Support for predefined callbacks (Necessary for OAuth signed requests, by @rboyce)
* - Partial integration with Ajax.Responders (Thanks to @sr3d for the kick in this direction)
* - Compatibility with Prototype 1.7 (Thanks to @soung3 for the bug report)
* - Will not break if page lacks a <head> element
*
* See examples in README for usage
*
* VERSION 1.1.2
*
* new Ajax.JSONRequest(url, options);
* - url (String): JSON-P endpoint url.
* - options (Object): Configuration options for the request.
*/
Ajax.JSONRequest = Class.create(Ajax.Base, (function() {
var id = 0, head = document.getElementsByTagName('head')[0] || document.body;
return {
initialize: function($super, url, options) {
$super(options);
this.options.url = url;
this.options.callbackParamName = this.options.callbackParamName || 'callback';
this.options.timeout = this.options.timeout || 10; // Default timeout: 10 seconds
this.options.invokeImmediately = (!Object.isUndefined(this.options.invokeImmediately)) ? this.options.invokeImmediately : true ;
if (!Object.isUndefined(this.options.parameters) && Object.isString(this.options.parameters)) {
this.options.parameters = this.options.parameters.toQueryParams();
}
if (this.options.invokeImmediately) {
this.request();
}
},
/**
* Ajax.JSONRequest#_cleanup() -> undefined
* Cleans up after the request
**/
_cleanup: function() {
if (this.timeout) {
clearTimeout(this.timeout);
this.timeout = null;
}
if (this.transport && Object.isElement(this.transport)) {
this.transport.remove();
this.transport = null;
}
},
/**
* Ajax.JSONRequest#request() -> undefined
* Invokes the JSON-P request lifecycle
**/
request: function() {
// Define local vars
var response = new Ajax.JSONResponse(this);
var key = this.options.callbackParamName,
name = '_prototypeJSONPCallback_' + (id++),
complete = function() {
if (Object.isFunction(this.options.onComplete)) {
this.options.onComplete.call(this, response);
}
Ajax.Responders.dispatch('onComplete', this, response);
}.bind(this);
// If the callback parameter is already defined, use that
if (this.options.parameters[key] !== undefined) {
name = this.options.parameters[key];
}
// Otherwise, add callback as a parameter
else {
this.options.parameters[key] = name;
}
// Build request URL
this.options.parameters[key] = name;
var url = this.options.url + ((this.options.url.include('?') ? '&' : '?') + Object.toQueryString(this.options.parameters));
// Define callback function
window[name] = function(json) {
this._cleanup(); // Garbage collection
window[name] = undefined;
response.status = 200;
response.statusText = "OK";
response.setResponseContent(json);
if (Object.isFunction(this.options.onSuccess)) {
this.options.onSuccess.call(this, response);
}
Ajax.Responders.dispatch('onSuccess', this, response);
complete();
}.bind(this);
this.transport = new Element('script', { type: 'text/javascript', src: url });
if (Object.isFunction(this.options.onCreate)) {
this.options.onCreate.call(this, response);
}
Ajax.Responders.dispatch('onCreate', this);
head.appendChild(this.transport);
this.timeout = setTimeout(function() {
this._cleanup();
window[name] = Prototype.emptyFunction;
if (Object.isFunction(this.options.onFailure)) {
response.status = 504;
response.statusText = "Gateway Timeout";
this.options.onFailure.call(this, response);
}
complete();
}.bind(this), this.options.timeout * 1000);
},
toString: function() { return "[object Ajax.JSONRequest]"; }
};
})());
Ajax.JSONResponse = Class.create({
initialize: function(request) {
this.request = request;
},
request: undefined,
status: 0,
statusText: '',
responseJSON: undefined,
responseText: undefined,
setResponseContent: function(json) {
this.responseJSON = json;
this.responseText = Object.toJSON(json);
},
getTransport: function() {
if (this.request) return this.request.transport;
},
toString: function() { return "[object Ajax.JSONResponse]"; }
}); |
ledermann/rails-settings | 105 | lib/rails-settings/setting_object.rb | module RailsSettings
class SettingObject < ActiveRecord::Base
self.table_name = 'settings'
belongs_to :target, :polymorphic => true
validates_presence_of :var, :target_type
validate do
errors.add(:value, "Invalid setting value") unless value.is_a? Hash
unless _target_class.default_settings[var.to_sym]
errors.add(:var, "#{var} is not defined!")
end
end
serialize :value, Hash
if RailsSettings.can_protect_attributes?
# attr_protected can not be used here because it touches the database which is not connected yet.
# So allow no attributes and override <tt>#sanitize_for_mass_assignment</tt>
attr_accessible
end
REGEX_SETTER = /\A([a-z]\w+)=\Z/i
REGEX_GETTER = /\A([a-z]\w+)\Z/i
def respond_to?(method_name, include_priv=false)
super || method_name.to_s =~ REGEX_SETTER || _setting?(method_name)
end
def method_missing(method_name, *args, &block)
if block_given?
super
else
if attribute_names.include?(method_name.to_s.sub('=',''))
super
elsif method_name.to_s =~ REGEX_SETTER && args.size == 1
_set_value($1, args.first)
elsif method_name.to_s =~ REGEX_GETTER && args.size == 0
_get_value($1)
else
super
end
end
end
protected
if RailsSettings.can_protect_attributes?
# Simulate attr_protected by removing all regular attributes
def sanitize_for_mass_assignment(attributes, role = nil)
attributes.except('id', 'var', 'value', 'target_id', 'target_type', 'created_at', 'updated_at')
end
end
private
def _get_value(name)
if value[name].nil?
_target_class.default_settings[var.to_sym][name]
else
value[name]
end
end
def _set_value(name, v)
if value[name] != v
value_will_change!
if v.nil?
value.delete(name)
else
value[name] = v
end
end
end
def _target_class
target_type.constantize
end
def _setting?(method_name)
_target_class.default_settings[var.to_sym].keys.include?(method_name.to_s)
end
end
end
| module RailsSettings
class SettingObject < ActiveRecord::Base
self.table_name = 'settings'
belongs_to :target, :polymorphic => true
validates_presence_of :var, :target_type
validate do
errors.add(:value, "Invalid setting value") unless value.is_a? Hash
unless _target_class.default_settings[var.to_sym]
errors.add(:var, "#{var} is not defined!")
end
end
serialize :value, Hash
if RailsSettings.can_protect_attributes?
# attr_protected can not be used here because it touches the database which is not connected yet.
# So allow no attributes and override <tt>#sanitize_for_mass_assignment</tt>
attr_accessible
end
REGEX_SETTER = /\A([a-z]\w+)=\Z/i
REGEX_GETTER = /\A([a-z]\w+)\Z/i
def respond_to?(method_name, include_priv=false)
super || method_name.to_s =~ REGEX_SETTER || _setting?(method_name)
end
def method_missing(method_name, *args, &block)
if block_given?
super
else
if attribute_names.include?(method_name.to_s.sub('=',''))
super
elsif method_name.to_s =~ REGEX_SETTER && args.size == 1
_set_value($1, args.first)
elsif method_name.to_s =~ REGEX_GETTER && args.size == 0
_get_value($1)
else
super
end
end
end
protected
if RailsSettings.can_protect_attributes?
# Simulate attr_protected by removing all regular attributes
def sanitize_for_mass_assignment(attributes, role = nil)
attributes.except('id', 'var', 'value', 'target_id', 'target_type', 'created_at', 'updated_at')
end
end
private
def _get_value(name)
if value[name].nil?
if _target_class.default_settings[var.to_sym][name].respond_to?(:call)
_target_class.default_settings[var.to_sym][name].call(target)
else
_target_class.default_settings[var.to_sym][name]
end
else
value[name]
end
end
def _set_value(name, v)
if value[name] != v
value_will_change!
if v.nil?
value.delete(name)
else
value[name] = v
end
end
end
def _target_class
target_type.constantize
end
def _setting?(method_name)
_target_class.default_settings[var.to_sym].keys.include?(method_name.to_s)
end
end
end
|
mitsuhiko/classy | 16 | tests/core.js | test('Classy exists', function() {
ok(typeof(Class) == 'function',
'Class is a function defined globally');
});
test('$noConflict unsets Class from window', function() {
var original = Class;
var Classy = Class.$noConflict();
equals(typeof(Class), 'undefined',
'$noConflict unsets Class from window');
same(Classy, original,
'the returned Class is the same as the original');
// cleanup
window.Class = original;
});
test('$super calls parent method', function() {
var Greeter = Class.$extend({
greeting: function() { return 'Armin!'; }
});
var Spaniard = Greeter.$extend({
greeting: function() { return 'Hola, ' + this.$super(); }
});
var Englishman = Greeter.$extend({
greeting: function() { return 'Hello, ' + this.$super(); }
});
same((Spaniard().greeting()), 'Hola, Armin!',
'Spanish greeting generated.');
same((Englishman().greeting()), 'Hello, Armin!',
'English greeting generated.');
});
test('$extend exists and works', function() {
same(typeof(Class.$extend), 'function',
'basic class has $extend function');
var SubClass = Class.$extend({});
same(typeof(SubClass.$extend), 'function',
'subclasses also receive $extend');
same(SubClass.$extend, Class.$extend,
'base class and subclass have same $extend function');
});
test('classes can be used with or without `new`.', function() {
var pig1 = new Animal('pig');
var pig2 = Animal('pig');
same(pig1, pig2,
'Animal instances are the same when made with or without `new`');
});
test('__init__ is called with correct arguments', function() {
var Instrument = Class.$extend({
__init__: function(volume) {
this.volume = volume;
}
});
flute = Instrument(20);
cymbal = Instrument(100);
equal(flute.volume, 20);
equal(cymbal.volume, 100);
});
test('basic classical inheritence works', function() {
garfield = HouseCat({funny: true});
heathcliff = HouseCat({funny: false});
equal(garfield.funny, true,
'attribute was set on instance');
equal(heathcliff.funny, false,
'attribute was set on instance');
equal(typeof(HouseCat().funny), 'undefined',
'base HouseCat is not affected by subclass mutations');
});
test('instanceof works', function() {
var lion = Lion();
ok(lion instanceof Lion,
'instanceof works on class instances');
ok(lion instanceof Cat,
'instanceof works on subclass instances');
ok(lion instanceof Animal,
'instanceof works on deep subclass instances');
ok(!(lion instanceof HouseCat),
'isinsinstance is false when expected');
});
test('mixins work', function() {
FatMixin = {
'is_fat': true,
'describe': function() {
return this.name + ' is fat!';
}
};
FatCat = Cat.$extend({
'__include__': [FatMixin]
});
garfield = FatCat({name:'Garfield'});
ok(garfield.is_fat,
'mixin attribute is defined');
equal(garfield.describe(), 'Garfield is fat!',
'mixin has access to corrent `this.');
});
test('exercise test methods', function() {
var tick = Parasite();
var leo = Lion();
var garfield = HouseCat();
ok(!(garfield.scary),
'Cat instances are not scary.');
ok(leo.scary,
'Lion instances are scary.');
equal(garfield.health, 100,
'default health is 100');
tick.eat(garfield);
equal(garfield.health, 95,
'tick removes 5 health');
ok(!(garfield.dead()),
'still not dead');
leo.eat(garfield);
ok(garfield.dead(),
'garfield loses a life');
});
test('non-new creation calls __init__ just once', function() {
var catched = [];
var Test = Class.$extend({
__init__ : function() {
catched.push(true);
}
});
Test();
new Test();
equal(catched.length, 2);
});
test('class attributes work', function() {
var Test = Class.$extend({
__classvars__ : {
foo: 23,
bar: 'test'
},
__init__ : function() {
this.foo = 42;
}
});
equal(Test.foo, 23, 'Test.foo is 23');
equal(Test.bar, 'test', 'Test.bar is "test"');
equal(Test().foo, 42, 'Test().foo is 42');
});
test('patching in prototypes', function() {
var called = [];
var Test = Class.$extend({
__init__ : function() {
called.push(42);
},
getFoo : function() {
return this.foo;
},
toString : function() {
return this.foo + ' ' + this.bar;
}
});
var data = {'foo': 23, 'bar': 42};
var obj = Test.$withData(data);
equal(obj.foo, 23, 'Test.foo is 23');
equal(obj.bar, 42, 'Test.bar is 42');
equal(obj.getFoo(), obj.foo, 'getFoo() returns foo');
// IE bug we cannot support
if (!navigator.userAgent.match(/MSIE/))
equal(obj.toString(), '23 42', 'Test.toString() is "23 42"');
equal(called.length, 0, 'constructor was never called');
});
test('$class gives class access', function() {
var Test = Class.$extend({
__classvars__ : {'classattr': 42}
});
equal(Test().$class.classattr, 42, 'classattr is 42');
});
test('class variable inheritance', function() {
var Test = Class.$extend({
__classvars__ : {
foo: 23,
bar: 'test'
}
});
var SubTest = Test.$extend({
__classvars__ : {
bar: 'subtest'
}
});
var SubSubTest = SubTest.$extend({
__classvars__ : {
foo: 999
}
});
ok(SubTest.foo, 'SubTest also has a foo');
equal(SubTest.foo, Test.foo, 'SubTest.foo is Test.foo');
equal(SubTest.bar, 'subtest', 'SubTest.bar has been overridden');
equal(SubSubTest.bar, SubTest.bar, 'SubSubTest.bar is Test.bar');
equal(SubSubTest.foo, 999, 'SubSubTest.foo has been overridden');
});
| test('Classy exists', function() {
ok(typeof(Class) == 'function',
'Class is a function defined globally');
});
test('$noConflict unsets Class from window', function() {
var original = Class;
var Classy = Class.$noConflict();
equals(typeof(Class), 'undefined',
'$noConflict unsets Class from window');
same(Classy, original,
'the returned Class is the same as the original');
// cleanup
window.Class = original;
});
test('$super calls parent method', function() {
var Greeter = Class.$extend({
greeting: function() { return 'Armin!'; }
});
var Spaniard = Greeter.$extend({
greeting: function() { return 'Hola, ' + this.$super(); }
});
var Englishman = Greeter.$extend({
greeting: function() { return 'Hello, ' + this.$super(); }
});
same((Spaniard().greeting()), 'Hola, Armin!',
'Spanish greeting generated.');
same((Englishman().greeting()), 'Hello, Armin!',
'English greeting generated.');
});
test('$extend exists and works', function() {
same(typeof(Class.$extend), 'function',
'basic class has $extend function');
var SubClass = Class.$extend({});
same(typeof(SubClass.$extend), 'function',
'subclasses also receive $extend');
same(SubClass.$extend, Class.$extend,
'base class and subclass have same $extend function');
});
test('classes can be used with or without `new`.', function() {
var pig1 = new Animal('pig');
var pig2 = Animal('pig');
same(pig1, pig2,
'Animal instances are the same when made with or without `new`');
});
test('__init__ is called with correct arguments', function() {
var Instrument = Class.$extend({
__init__: function(volume) {
this.volume = volume;
}
});
flute = Instrument(20);
cymbal = Instrument(100);
equal(flute.volume, 20);
equal(cymbal.volume, 100);
});
test('basic classical inheritence works', function() {
garfield = HouseCat({funny: true});
heathcliff = HouseCat({funny: false});
equal(garfield.funny, true,
'attribute was set on instance');
equal(heathcliff.funny, false,
'attribute was set on instance');
equal(typeof(HouseCat().funny), 'undefined',
'base HouseCat is not affected by subclass mutations');
});
test('inheritance of getters and setters works', function() {
cat = Cat();
parasite = Parasite();
lion = Lion();
equal(cat.furryness, 20,
'getter was inherted from base')
equal(parasite.furryness, 0,
'getters can override base')
equal(lion.furryness, 40,
'getters can use $super to get base getter')
});
test('instanceof works', function() {
var lion = Lion();
ok(lion instanceof Lion,
'instanceof works on class instances');
ok(lion instanceof Cat,
'instanceof works on subclass instances');
ok(lion instanceof Animal,
'instanceof works on deep subclass instances');
ok(!(lion instanceof HouseCat),
'isinsinstance is false when expected');
});
test('mixins work', function() {
FatMixin = {
'is_fat': true,
'describe': function() {
return this.name + ' is fat!';
}
};
FatCat = Cat.$extend({
'__include__': [FatMixin]
});
garfield = FatCat({name:'Garfield'});
ok(garfield.is_fat,
'mixin attribute is defined');
equal(garfield.describe(), 'Garfield is fat!',
'mixin has access to corrent `this.');
});
test('exercise test methods', function() {
var tick = Parasite();
var leo = Lion();
var garfield = HouseCat();
ok(!(garfield.scary),
'Cat instances are not scary.');
ok(leo.scary,
'Lion instances are scary.');
equal(garfield.health, 100,
'default health is 100');
tick.eat(garfield);
equal(garfield.health, 95,
'tick removes 5 health');
ok(!(garfield.dead()),
'still not dead');
leo.eat(garfield);
ok(garfield.dead(),
'garfield loses a life');
});
test('non-new creation calls __init__ just once', function() {
var catched = [];
var Test = Class.$extend({
__init__ : function() {
catched.push(true);
}
});
Test();
new Test();
equal(catched.length, 2);
});
test('class attributes work', function() {
var Test = Class.$extend({
__classvars__ : {
foo: 23,
bar: 'test'
},
__init__ : function() {
this.foo = 42;
}
});
equal(Test.foo, 23, 'Test.foo is 23');
equal(Test.bar, 'test', 'Test.bar is "test"');
equal(Test().foo, 42, 'Test().foo is 42');
});
test('patching in prototypes', function() {
var called = [];
var Test = Class.$extend({
__init__ : function() {
called.push(42);
},
getFoo : function() {
return this.foo;
},
toString : function() {
return this.foo + ' ' + this.bar;
}
});
var data = {'foo': 23, 'bar': 42};
var obj = Test.$withData(data);
equal(obj.foo, 23, 'Test.foo is 23');
equal(obj.bar, 42, 'Test.bar is 42');
equal(obj.getFoo(), obj.foo, 'getFoo() returns foo');
// IE bug we cannot support
if (!navigator.userAgent.match(/MSIE/))
equal(obj.toString(), '23 42', 'Test.toString() is "23 42"');
equal(called.length, 0, 'constructor was never called');
});
test('$class gives class access', function() {
var Test = Class.$extend({
__classvars__ : {'classattr': 42}
});
equal(Test().$class.classattr, 42, 'classattr is 42');
});
test('class variable inheritance', function() {
var Test = Class.$extend({
__classvars__ : {
foo: 23,
bar: 'test'
}
});
var SubTest = Test.$extend({
__classvars__ : {
bar: 'subtest'
}
});
var SubSubTest = SubTest.$extend({
__classvars__ : {
foo: 999
}
});
ok(SubTest.foo, 'SubTest also has a foo');
equal(SubTest.foo, Test.foo, 'SubTest.foo is Test.foo');
equal(SubTest.bar, 'subtest', 'SubTest.bar has been overridden');
equal(SubSubTest.bar, SubTest.bar, 'SubSubTest.bar is Test.bar');
equal(SubSubTest.foo, 999, 'SubSubTest.foo has been overridden');
});
|
sassanp/pynetinfo | 4 | iface.c | /*
Pynetinfo - A python module for controlling linux network interfaces
Copyright (C) 2010 Sassan Panahinejad (sassan@sassan.me.uk)
www.sassan.me.uk
pypi.python.org/pypi/pynetinfo/
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include <Python.h>
#include <stdio.h>
#include <sys/ioctl.h>
#include <sys/types.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <netinet/ip.h>
#include <linux/sockios.h>
#include <net/if.h>
#include <arpa/inet.h>
#include <netinet/in.h>
#include <net/route.h>
#include "netinfo.h"
#define IF_COUNT 64
PyObject *netinfo_list_active_devs(PyObject *self, PyObject *args)
{
int ret, fd;
struct ifreq *ifr, *ifend;
struct ifreq ifs[IF_COUNT];
struct ifconf ifc;
int i = 1;
PyObject *tuple = PyTuple_New(0);
fd = socket(AF_INET, SOCK_DGRAM, IPPROTO_IP); /* open a socket to examine */
if (fd < 0) {
PyErr_SetFromErrno(PyExc_Exception);
return NULL;
}
ifc.ifc_len = sizeof(ifs);
ifc.ifc_req = ifs;
ret = ioctl(fd, SIOCGIFCONF, &ifc);
if (ret < 0) {
PyErr_SetFromErrno(PyExc_Exception);
return NULL;
}
ifend = ifs + (ifc.ifc_len / sizeof(struct ifreq));
for (ifr = ifc.ifc_req; ifr < ifend; ifr++)
{
// printf("dev: %s\n", ifr->ifr_name);
_PyTuple_Resize(&tuple, i);
PyTuple_SET_ITEM(tuple, i++-1, Py_BuildValue("s", ifr->ifr_name));
}
return tuple;
}
PyObject *netinfo_list_devs(PyObject *self, PyObject *args)
{
FILE *devlist = fopen("/proc/net/dev", "r");
char buffer[256], *c, *end;
int i = 1;
PyObject *tuple = PyTuple_New(0);
while (fgets(buffer, 256, devlist)) {
end = strchr(buffer, ':');
if (!end)
continue;
*end = '\0';
for (c = buffer; *c == ' '; c++) ;
// printf("dev: %s\n", c);
_PyTuple_Resize(&tuple, i);
PyTuple_SET_ITEM(tuple, i++-1, Py_BuildValue("s", c));
}
return tuple;
}
PyObject *netinfo_get_addr(PyObject *self, PyObject *args, int cmd)
{
int ret, fd;
struct ifreq ifreq;
char *dev;
struct sockaddr_in *sin;
char hwaddr[18];
fd = socket(AF_INET, SOCK_DGRAM, 0); /* open a socket to examine */
if (fd < 0) {
PyErr_SetFromErrno(PyExc_Exception);
return NULL;
}
ret = PyArg_ParseTuple(args, "s", &dev); /* parse argument */
if (!ret)
return NULL;
memset(&ifreq, 0, sizeof(struct ifreq));
strncpy(ifreq.ifr_name, dev, IFNAMSIZ-1);
ifreq.ifr_addr.sa_family = AF_INET;
ret = ioctl(fd, cmd, &ifreq, sizeof(struct ifreq));
if (ret < 0) {
PyErr_SetFromErrno(PyExc_Exception);
return NULL;
}
switch (cmd) {
case SIOCGIFADDR:
sin = (struct sockaddr_in *)&(ifreq.ifr_ifru.ifru_addr);
return Py_BuildValue("s", inet_ntoa(sin->sin_addr));
case SIOCGIFNETMASK:
sin = (struct sockaddr_in *)&(ifreq.ifr_ifru.ifru_netmask);
return Py_BuildValue("s", inet_ntoa(sin->sin_addr));
case SIOCGIFBRDADDR:
sin = (struct sockaddr_in *)&(ifreq.ifr_ifru.ifru_broadaddr);
return Py_BuildValue("s", inet_ntoa(sin->sin_addr));
case SIOCGIFHWADDR:
snprintf(hwaddr, 18, "%02X:%02X:%02X:%02X:%02X:%02X",
(unsigned char)ifreq.ifr_ifru.ifru_hwaddr.sa_data[0],
(unsigned char)ifreq.ifr_ifru.ifru_hwaddr.sa_data[1],
(unsigned char)ifreq.ifr_ifru.ifru_hwaddr.sa_data[2],
(unsigned char)ifreq.ifr_ifru.ifru_hwaddr.sa_data[3],
(unsigned char)ifreq.ifr_ifru.ifru_hwaddr.sa_data[4],
(unsigned char)ifreq.ifr_ifru.ifru_hwaddr.sa_data[5]);
return Py_BuildValue("s", hwaddr);
}
return NULL;
}
PyObject *netinfo_get_ip(PyObject *self, PyObject *args)
{
return netinfo_get_addr(self, args, SIOCGIFADDR);
}
PyObject *netinfo_get_netmask(PyObject *self, PyObject *args)
{
return netinfo_get_addr(self, args, SIOCGIFNETMASK);
}
PyObject *netinfo_get_broadcast(PyObject *self, PyObject *args)
{
return netinfo_get_addr(self, args, SIOCGIFBRDADDR);
}
PyObject *netinfo_get_hwaddr(PyObject *self, PyObject *args)
{
return netinfo_get_addr(self, args, SIOCGIFHWADDR);
}
PyObject *netinfo_set_state(PyObject *self, PyObject *args)
{
int ret, fd, state = 0;
struct ifreq ifreq;
char *dev;
ret = PyArg_ParseTuple(args, "si", &dev, &state); /* parse argument */
if (!ret)
return NULL;
// ret = PyArg_ParseTuple(args, "i", &state); /* parse argument */
// if (!ret)
// return NULL;
fd = socket(AF_INET, SOCK_DGRAM, IPPROTO_IP); /* open a socket to examine */
if (fd < 0) {
PyErr_SetFromErrno(PyExc_Exception);
return NULL;
}
memset(&ifreq, 0, sizeof(struct ifreq));
strncpy(ifreq.ifr_name, dev, IFNAMSIZ-1);
ret = ioctl(fd, SIOCGIFFLAGS, &ifreq);
if (ret < 0) {
PyErr_SetFromErrno(PyExc_Exception);
return NULL;
}
if (state)
ifreq.ifr_flags |= IFF_UP;
else
ifreq.ifr_flags &= ~IFF_UP;
ret = ioctl(fd, SIOCSIFFLAGS, &ifreq);
if (ret < 0) {
PyErr_SetFromErrno(PyExc_Exception);
return NULL;
}
return Py_None;
}
PyObject *netinfo_set_addr(PyObject *self, PyObject *args, int cmd)
{
int ret, fd;
struct ifreq ifreq;
char *dev, *addr;
struct sockaddr_in *sin;
fd = socket(AF_INET, SOCK_DGRAM, 0); /* open a socket to examine */
if (fd < 0) {
PyErr_SetFromErrno(PyExc_Exception);
return NULL;
}
ret = PyArg_ParseTuple(args, "ss", &dev, &addr); /* parse argument */
if (!ret)
return NULL;
memset(&ifreq, 0, sizeof(struct ifreq));
strncpy(ifreq.ifr_name, dev, IFNAMSIZ-1);
ifreq.ifr_addr.sa_family = AF_INET;
switch (cmd) {
case SIOCSIFADDR:
sin = (struct sockaddr_in *)&(ifreq.ifr_ifru.ifru_addr);
inet_aton(addr, &sin->sin_addr);
break;
case SIOCSIFNETMASK:
sin = (struct sockaddr_in *)&(ifreq.ifr_ifru.ifru_netmask);
inet_aton(addr, &sin->sin_addr);
break;
case SIOCSIFBRDADDR:
sin = (struct sockaddr_in *)&(ifreq.ifr_ifru.ifru_broadaddr);
inet_aton(addr, &sin->sin_addr);
break;
}
ret = ioctl(fd, cmd, &ifreq, sizeof(struct ifreq));
if (ret < 0) {
PyErr_SetFromErrno(PyExc_Exception);
return NULL;
}
return Py_None;
}
PyObject *netinfo_set_ip(PyObject *self, PyObject *args)
{
return netinfo_set_addr(self, args, SIOCSIFADDR);
}
PyObject *netinfo_set_netmask(PyObject *self, PyObject *args)
{
return netinfo_set_addr(self, args, SIOCSIFNETMASK);
}
PyObject *netinfo_set_broadcast(PyObject *self, PyObject *args)
{
return netinfo_set_addr(self, args, SIOCSIFBRDADDR);
}
| /*
Pynetinfo - A python module for controlling linux network interfaces
Copyright (C) 2010 Sassan Panahinejad (sassan@sassan.me.uk)
www.sassan.me.uk
pypi.python.org/pypi/pynetinfo/
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*/
#include <Python.h>
#include <stdio.h>
#include <sys/ioctl.h>
#include <sys/types.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <netinet/ip.h>
#include <linux/sockios.h>
#include <net/if.h>
#include <arpa/inet.h>
#include <netinet/in.h>
#include <net/route.h>
#include "netinfo.h"
#define IF_COUNT 64
PyObject *netinfo_list_active_devs(PyObject *self, PyObject *args)
{
int ret, fd;
struct ifreq *ifr, *ifend;
struct ifreq ifs[IF_COUNT];
struct ifconf ifc;
int i = 1;
PyObject *tuple = PyTuple_New(0);
fd = socket(AF_INET, SOCK_DGRAM, IPPROTO_IP); /* open a socket to examine */
if (fd < 0) {
PyErr_SetFromErrno(PyExc_Exception);
return NULL;
}
ifc.ifc_len = sizeof(ifs);
ifc.ifc_req = ifs;
ret = ioctl(fd, SIOCGIFCONF, &ifc);
if (ret < 0) {
PyErr_SetFromErrno(PyExc_Exception);
close(fd);
return NULL;
}
ifend = ifs + (ifc.ifc_len / sizeof(struct ifreq));
for (ifr = ifc.ifc_req; ifr < ifend; ifr++)
{
// printf("dev: %s\n", ifr->ifr_name);
_PyTuple_Resize(&tuple, i);
PyTuple_SET_ITEM(tuple, i++-1, Py_BuildValue("s", ifr->ifr_name));
}
close(fd);
return tuple;
}
PyObject *netinfo_list_devs(PyObject *self, PyObject *args)
{
FILE *devlist = fopen("/proc/net/dev", "r");
char buffer[256], *c, *end;
int i = 1;
PyObject *tuple = PyTuple_New(0);
while (fgets(buffer, 256, devlist)) {
end = strchr(buffer, ':');
if (!end)
continue;
*end = '\0';
for (c = buffer; *c == ' '; c++) ;
// printf("dev: %s\n", c);
_PyTuple_Resize(&tuple, i);
PyTuple_SET_ITEM(tuple, i++-1, Py_BuildValue("s", c));
}
return tuple;
}
PyObject *netinfo_get_addr(PyObject *self, PyObject *args, int cmd)
{
int ret, fd;
struct ifreq ifreq;
char *dev;
struct sockaddr_in *sin;
char hwaddr[18];
PyObject *rval;
fd = socket(AF_INET, SOCK_DGRAM, 0); /* open a socket to examine */
if (fd < 0) {
PyErr_SetFromErrno(PyExc_Exception);
return NULL;
}
ret = PyArg_ParseTuple(args, "s", &dev); /* parse argument */
if (!ret) {
close(fd);
return NULL;
}
memset(&ifreq, 0, sizeof(struct ifreq));
strncpy(ifreq.ifr_name, dev, IFNAMSIZ-1);
ifreq.ifr_addr.sa_family = AF_INET;
ret = ioctl(fd, cmd, &ifreq, sizeof(struct ifreq));
if (ret < 0) {
PyErr_SetFromErrno(PyExc_Exception);
close(fd);
return NULL;
}
switch (cmd) {
case SIOCGIFADDR:
sin = (struct sockaddr_in *)&(ifreq.ifr_ifru.ifru_addr);
rval = Py_BuildValue("s", inet_ntoa(sin->sin_addr));
break;
case SIOCGIFNETMASK:
sin = (struct sockaddr_in *)&(ifreq.ifr_ifru.ifru_netmask);
rval = Py_BuildValue("s", inet_ntoa(sin->sin_addr));
break;
case SIOCGIFBRDADDR:
sin = (struct sockaddr_in *)&(ifreq.ifr_ifru.ifru_broadaddr);
rval = Py_BuildValue("s", inet_ntoa(sin->sin_addr));
break;
case SIOCGIFHWADDR:
snprintf(hwaddr, 18, "%02X:%02X:%02X:%02X:%02X:%02X",
(unsigned char)ifreq.ifr_ifru.ifru_hwaddr.sa_data[0],
(unsigned char)ifreq.ifr_ifru.ifru_hwaddr.sa_data[1],
(unsigned char)ifreq.ifr_ifru.ifru_hwaddr.sa_data[2],
(unsigned char)ifreq.ifr_ifru.ifru_hwaddr.sa_data[3],
(unsigned char)ifreq.ifr_ifru.ifru_hwaddr.sa_data[4],
(unsigned char)ifreq.ifr_ifru.ifru_hwaddr.sa_data[5]);
rval = Py_BuildValue("s", hwaddr);
break;
default:
rval = NULL;
break;
}
close(fd);
return rval;
}
PyObject *netinfo_get_ip(PyObject *self, PyObject *args)
{
return netinfo_get_addr(self, args, SIOCGIFADDR);
}
PyObject *netinfo_get_netmask(PyObject *self, PyObject *args)
{
return netinfo_get_addr(self, args, SIOCGIFNETMASK);
}
PyObject *netinfo_get_broadcast(PyObject *self, PyObject *args)
{
return netinfo_get_addr(self, args, SIOCGIFBRDADDR);
}
PyObject *netinfo_get_hwaddr(PyObject *self, PyObject *args)
{
return netinfo_get_addr(self, args, SIOCGIFHWADDR);
}
PyObject *netinfo_set_state(PyObject *self, PyObject *args)
{
int ret, fd, state = 0;
struct ifreq ifreq;
char *dev;
ret = PyArg_ParseTuple(args, "si", &dev, &state); /* parse argument */
if (!ret)
return NULL;
// ret = PyArg_ParseTuple(args, "i", &state); /* parse argument */
// if (!ret)
// return NULL;
fd = socket(AF_INET, SOCK_DGRAM, IPPROTO_IP); /* open a socket to examine */
if (fd < 0) {
PyErr_SetFromErrno(PyExc_Exception);
return NULL;
}
memset(&ifreq, 0, sizeof(struct ifreq));
strncpy(ifreq.ifr_name, dev, IFNAMSIZ-1);
ret = ioctl(fd, SIOCGIFFLAGS, &ifreq);
if (ret < 0) {
PyErr_SetFromErrno(PyExc_Exception);
close(fd);
return NULL;
}
if (state)
ifreq.ifr_flags |= IFF_UP;
else
ifreq.ifr_flags &= ~IFF_UP;
ret = ioctl(fd, SIOCSIFFLAGS, &ifreq);
if (ret < 0) {
PyErr_SetFromErrno(PyExc_Exception);
close(fd);
return NULL;
}
close(fd);
return Py_None;
}
PyObject *netinfo_set_addr(PyObject *self, PyObject *args, int cmd)
{
int ret, fd;
struct ifreq ifreq;
char *dev, *addr;
struct sockaddr_in *sin;
fd = socket(AF_INET, SOCK_DGRAM, 0); /* open a socket to examine */
if (fd < 0) {
PyErr_SetFromErrno(PyExc_Exception);
return NULL;
}
ret = PyArg_ParseTuple(args, "ss", &dev, &addr); /* parse argument */
if (!ret)
return NULL;
memset(&ifreq, 0, sizeof(struct ifreq));
strncpy(ifreq.ifr_name, dev, IFNAMSIZ-1);
ifreq.ifr_addr.sa_family = AF_INET;
switch (cmd) {
case SIOCSIFADDR:
sin = (struct sockaddr_in *)&(ifreq.ifr_ifru.ifru_addr);
inet_aton(addr, &sin->sin_addr);
break;
case SIOCSIFNETMASK:
sin = (struct sockaddr_in *)&(ifreq.ifr_ifru.ifru_netmask);
inet_aton(addr, &sin->sin_addr);
break;
case SIOCSIFBRDADDR:
sin = (struct sockaddr_in *)&(ifreq.ifr_ifru.ifru_broadaddr);
inet_aton(addr, &sin->sin_addr);
break;
}
ret = ioctl(fd, cmd, &ifreq, sizeof(struct ifreq));
if (ret < 0) {
PyErr_SetFromErrno(PyExc_Exception);
close(fd);
return NULL;
}
close(fd);
return Py_None;
}
PyObject *netinfo_set_ip(PyObject *self, PyObject *args)
{
return netinfo_set_addr(self, args, SIOCSIFADDR);
}
PyObject *netinfo_set_netmask(PyObject *self, PyObject *args)
{
return netinfo_set_addr(self, args, SIOCSIFNETMASK);
}
PyObject *netinfo_set_broadcast(PyObject *self, PyObject *args)
{
return netinfo_set_addr(self, args, SIOCSIFBRDADDR);
}
|
jtauber-archive/django-email-confirmation | 9 | emailconfirmation/models.py | from datetime import datetime, timedelta
from random import random
from django.conf import settings
from django.db import models, IntegrityError
from django.template.loader import render_to_string
from django.core.urlresolvers import reverse, NoReverseMatch
from django.contrib.sites.models import Site
from django.contrib.auth.models import User
from django.utils.hashcompat import sha_constructor
from django.utils.translation import gettext_lazy as _
from emailconfirmation.signals import email_confirmed
from emailconfirmation.utils import get_send_mail
send_mail = get_send_mail()
# this code based in-part on django-registration
class EmailAddressManager(models.Manager):
def add_email(self, user, email):
try:
email_address = self.create(user=user, email=email)
EmailConfirmation.objects.send_confirmation(email_address)
return email_address
except IntegrityError:
return None
def get_primary(self, user):
try:
return self.get(user=user, primary=True)
except EmailAddress.DoesNotExist:
return None
def get_users_for(self, email):
"""
returns a list of users with the given email.
"""
# this is a list rather than a generator because we probably want to
# do a len() on it right away
return [address.user for address in EmailAddress.objects.filter(
verified=True, email=email)]
class EmailAddress(models.Model):
user = models.ForeignKey(User)
email = models.EmailField()
verified = models.BooleanField(default=False)
primary = models.BooleanField(default=False)
objects = EmailAddressManager()
def set_as_primary(self, conditional=False):
old_primary = EmailAddress.objects.get_primary(self.user)
if old_primary:
if conditional:
return False
old_primary.primary = False
old_primary.save()
self.primary = True
self.save()
self.user.email = self.email
self.user.save()
return True
def __unicode__(self):
return u"%s (%s)" % (self.email, self.user)
class Meta:
verbose_name = _("e-mail address")
verbose_name_plural = _("e-mail addresses")
unique_together = (
("user", "email"),
)
class EmailConfirmationManager(models.Manager):
def confirm_email(self, confirmation_key):
try:
confirmation = self.get(confirmation_key=confirmation_key)
except self.model.DoesNotExist:
return None
if not confirmation.key_expired():
email_address = confirmation.email_address
email_address.verified = True
email_address.set_as_primary(conditional=True)
email_address.save()
email_confirmed.send(sender=self.model, email_address=email_address)
return email_address
def send_confirmation(self, email_address):
salt = sha_constructor(str(random())).hexdigest()[:5]
confirmation_key = sha_constructor(salt + email_address.email).hexdigest()
current_site = Site.objects.get_current()
# check for the url with the dotted view path
try:
path = reverse("emailconfirmation.views.confirm_email",
args=[confirmation_key])
except NoReverseMatch:
# or get path with named urlconf instead
path = reverse(
"emailconfirmation_confirm_email", args=[confirmation_key])
activate_url = u"http://%s%s" % (unicode(current_site.domain), path)
context = {
"user": email_address.user,
"activate_url": activate_url,
"current_site": current_site,
"confirmation_key": confirmation_key,
}
subject = render_to_string(
"emailconfirmation/email_confirmation_subject.txt", context)
# remove superfluous line breaks
subject = "".join(subject.splitlines())
message = render_to_string(
"emailconfirmation/email_confirmation_message.txt", context)
send_mail(subject, message, settings.DEFAULT_FROM_EMAIL,
[email_address.email], priority="high")
return self.create(
email_address=email_address,
sent=datetime.now(),
confirmation_key=confirmation_key)
def delete_expired_confirmations(self):
for confirmation in self.all():
if confirmation.key_expired():
confirmation.delete()
class EmailConfirmation(models.Model):
email_address = models.ForeignKey(EmailAddress)
sent = models.DateTimeField()
confirmation_key = models.CharField(max_length=40)
objects = EmailConfirmationManager()
def key_expired(self):
expiration_date = self.sent + timedelta(
days=settings.EMAIL_CONFIRMATION_DAYS)
return expiration_date <= datetime.now()
key_expired.boolean = True
def __unicode__(self):
return u"confirmation for %s" % self.email_address
class Meta:
verbose_name = _("e-mail confirmation")
verbose_name_plural = _("e-mail confirmations")
| from datetime import datetime, timedelta
from random import random
from django.conf import settings
from django.db import models, IntegrityError
from django.template.loader import render_to_string
from django.core.urlresolvers import reverse, NoReverseMatch
from django.contrib.sites.models import Site
from django.contrib.auth.models import User
from django.utils.hashcompat import sha_constructor
from django.utils.translation import gettext_lazy as _
from emailconfirmation.signals import email_confirmed
from emailconfirmation.utils import get_send_mail
send_mail = get_send_mail()
# this code based in-part on django-registration
class EmailAddressManager(models.Manager):
def add_email(self, user, email):
try:
email_address = self.create(user=user, email=email)
EmailConfirmation.objects.send_confirmation(email_address)
return email_address
except IntegrityError:
return None
def get_primary(self, user):
try:
return self.get(user=user, primary=True)
except EmailAddress.DoesNotExist:
return None
def get_users_for(self, email):
"""
returns a list of users with the given email.
"""
# this is a list rather than a generator because we probably want to
# do a len() on it right away
return [address.user for address in EmailAddress.objects.filter(
verified=True, email=email)]
class EmailAddress(models.Model):
user = models.ForeignKey(User)
email = models.EmailField()
verified = models.BooleanField(default=False)
primary = models.BooleanField(default=False)
objects = EmailAddressManager()
def set_as_primary(self, conditional=False):
old_primary = EmailAddress.objects.get_primary(self.user)
if old_primary:
if conditional:
return False
old_primary.primary = False
old_primary.save()
self.primary = True
self.save()
self.user.email = self.email
self.user.save()
return True
def __unicode__(self):
return u"%s (%s)" % (self.email, self.user)
class Meta:
verbose_name = _("e-mail address")
verbose_name_plural = _("e-mail addresses")
unique_together = (
("user", "email"),
)
class EmailConfirmationManager(models.Manager):
def confirm_email(self, confirmation_key):
try:
confirmation = self.get(confirmation_key=confirmation_key)
except self.model.DoesNotExist:
return None
if not confirmation.key_expired():
email_address = confirmation.email_address
email_address.verified = True
email_address.set_as_primary(conditional=True)
email_address.save()
email_confirmed.send(sender=self.model, email_address=email_address)
return email_address
def send_confirmation(self, email_address):
salt = sha_constructor(str(random())).hexdigest()[:5]
confirmation_key = sha_constructor(salt + email_address.email).hexdigest()
current_site = Site.objects.get_current()
# check for the url with the dotted view path
try:
path = reverse("emailconfirmation.views.confirm_email",
args=[confirmation_key])
except NoReverseMatch:
# or get path with named urlconf instead
path = reverse(
"emailconfirmation_confirm_email", args=[confirmation_key])
activate_url = u"http://%s%s" % (unicode(current_site.domain), path)
context = {
"user": email_address.user,
"activate_url": activate_url,
"current_site": current_site,
"confirmation_key": confirmation_key,
}
subject = render_to_string(
"emailconfirmation/email_confirmation_subject.txt", context)
# remove superfluous line breaks
subject = "".join(subject.splitlines())
message = render_to_string(
"emailconfirmation/email_confirmation_message.txt", context)
send_mail(subject, message, settings.DEFAULT_FROM_EMAIL,
[email_address.email], priority="high")
return self.create(
email_address=email_address,
confirmation_key=confirmation_key)
def delete_expired_confirmations(self):
for confirmation in self.all():
if confirmation.key_expired():
confirmation.delete()
class EmailConfirmation(models.Model):
email_address = models.ForeignKey(EmailAddress)
sent = models.DateTimeField(auto_now_add=True)
confirmation_key = models.CharField(max_length=40)
objects = EmailConfirmationManager()
def key_expired(self):
expiration_date = self.sent + timedelta(
days=settings.EMAIL_CONFIRMATION_DAYS)
return expiration_date <= datetime.now()
key_expired.boolean = True
def __unicode__(self):
return u"confirmation for %s" % self.email_address
class Meta:
verbose_name = _("e-mail confirmation")
verbose_name_plural = _("e-mail confirmations")
|
rjbs/WWW-AdventCalendar | 4 | share/templates/style.css | %# vim:set ft=mason:
<%args>
%color
</%args>
body {
color: <% $color{bodyFG} %>;
background: <% $color{bodyBG} %>;
font-family: verdana, tahoma, sans-serif;
}
#contentwrapper {
width: 75%;
min-width: 800px;
margin-left: auto;
margin-right: auto;
margin-top: 10px auto;
margin-bottom: 10px;
background: <% $color{blotterBG} %>;
border: 1px solid <% $color{blotterBorder} %>;
-moz-border-radius: 10px;
-webkit-border-radius: 10px;
}
h1 {
margin-top: 10px;
margin-bottom: 10px;
font-weight: bold;
font-size: 1.6em;
text-align: center;
color: <% $color{headerFG} %>;
}
#header h1 a {
color: <% $color{titleFG} %>;
}
h1.title {
text-align: left;
margin-bottom: 0;
}
h2 {
text-align: center;
color: <% $color{headerFG} %>;
}
.subtitle {
text-align: right;
border-top: 1px black solid;
}
.feed {
float: right;
color: <% $color{feedLinkFG} %>;
padding-right: 1em;
vertical-align: middle;
font-weight: bold;
}
dt {
margin: 0.5em 0 0.5em 0;
}
#content, #tagline {
font-size: 0.9em;
padding: 8px 20px;
margin: 10px;
-moz-border-radius: 10px;
-webkit-border-radius: 10px;
}
#content {
border: 1px solid <% $color{contentBorder} %>;
background: <% $color{contentBG} %>;
}
#tagline {
background: <% $color{taglineBG} %>;
border: 1px solid <% $color{taglineBorder} %>;
color: <% $color{taglineFG} %>;
}
.pod h1, .pod h2 {
text-align: left;
color: <% $color{headerFG} %>;
}
a {
color: <% $color{linkFG} %>;
text-decoration: none;
}
a:not(.article) {
padding-left: .2em;
margin-left: -.2em;
padding-right: .2em;
margin-right: -.2em;
}
#header a:hover,
.pod a:hover
{
color: <% $color{linkHoverFG} %>;
background: <% $color{linkHoverBG} %>;
-moz-border-radius: 5px;
-webkit-border-radius: 5px;
}
.pod ul li {
margin: .25em 0;
}
.pod ul li p {
display: inline;
}
.calendar {
margin-left: auto;
margin-right: auto;
margin-bottom: 1em;
}
.calendar th {
padding: 5px;
border: 1px solid <% $color{calendarHeaderCellBorder} %>;
background-color: <% $color{calendarHeaderCellBG} %>;
}
.calendar td {
text-align: center;
width: 9em;
height: 5em;
padding: 0;
}
.calendar td.day {
font-size: xx-large;
height: 2.5em;
background: <% $color{calendarIgnoredDayBG} %>;
}
.calendar td.day.advent {
background: <% $color{calendarPastDayBG} %>;
}
.calendar td.day.advent a {
display: block;
color: <% $color{calendarPastDayFG} %>;
height: 2.5em;
padding: 0;
margin: 0;
vertical-align: middle;
line-height: 2.5em;
}
.calendar td.day.advent a:hover {
background: <% $color{calendarPastDayHoverBG} %>;
color: <% $color{calendarPastDayHoverFG} %>;
}
.calendar td.day.advent.today {
background: <% $color{calendarTodayBG} %>;
}
.calendar td.day.advent.today a {
color: <% $color{calendarTodayFG} %>;
}
.calendar td.day.advent.today a:hover {
background: <% $color{calendarTodayHoverBG} %>;
color: <% $color{calendarTodayFG} %>;
}
.calendar td.day.advent.future {
background: <% $color{calendarFutureDayBG} %>;
color: <% $color{calendarFutureDayFG} %>;
}
.calendar td.day.advent.missing {
color: <% $color{calendarMissingDayFG} %>;
background-color: <% $color{calendarMissingDayBG} %>;
}
a img {
border: 0;
}
blockquote {
border-left: thick <% $color{quoteBorder} %> solid;
padding-left: 1em;
margin-left: 1em;
margin-right: 1em;
}
h2#See-Also {
padding-top: 0.4em;
border-top: 1px solid <% $color{sectionBorder} %>;
}
#author {
border-top: 1px solid <% $color{sectionBorder} %>;
padding-top: 0.4em;
text-align: center;
}
#pager {
border-top: 1px solid <% $color{sectionBorder} %>;
list-style-type: none;
margin-left: 0;
padding: 0.4em;
position: relative;
width: 100%;
}
#pager li {
color: <% $color{linkDisabledFG} %>;
}
#pager .previous {
left: 0.4em;
position: absolute;
width: 33%;
}
#pager .next {
position: absolute;
right: 0.4em;
text-align: right;
width: 33%;
}
/* verbatim text and code listings */
pre {
line-height: 120%;
padding-top: 1em;
padding-bottom: 1em;
background-color: <% $color{codeBG} %>;
color: <% $color{codeFG} %>;
font-family: monospace;
width: 100%;
}
.code-listing {
line-height: 120%;
background-color: <% $color{codeBG} %>;
color: <% $color{codeFG} %>;
font-family: monospace;
white-space: pre;
border-collapse: collapse;
width: 100%;
}
.code-listing td {
padding: 0;
margin: 0
}
.code-listing .line-numbers {
background-color: <% $color{codeNumbersBG} %>;
color: <% $color{codeNumbersFG} %>;
border-right: 2px <% $color{codeNumbersBorder} %> solid;
width: 3.5em;
text-align: right;
}
.code-listing .code {
padding-left: 1em;
}
/* PPI HTML Style */
.code-listing .keyword { color: #89f; }
.code-listing .symbol { color: #0cc; }
.code-listing .operator { color: #fff; }
.code-listing .structure { color: #bf0; }
.code-listing .word { color: #dd8; }
.code-listing .comment { color: #0f0; }
.code-listing .pod { color: #0f0; }
.code-listing .match { color: #ff0; }
.code-listing .readline { color: #caa; }
.code-listing .single,
.code-listing .double { color: #0cf; }
/* Vim Syntax Style */
.code-listing .synComment { color: #0f0; }
.code-listing .synConstant { color: #0ff; }
.code-listing .synIdentifier { color: #89f; }
.code-listing .synStatement { color: #0aa; }
.code-listing .synPreProc { color: #fff; }
.code-listing .synType { color: #0aa; }
.code-listing .synSpecial { color: #fff; }
.code-listing .synUnderlined { color: #0a0; }
.code-listing .synIgnore { color: #aaa; }
.code-listing .synError { color: #f00; }
.code-listing .synTodo { color: #aa0; }
/* BEGIN CHRISTMAS: Move to "extra CSS" */
.calendar td.day.advent.missing,
.calendar td.day.missing#dec-25,
.calendar td.day.missing#dec-26 {
background: #000;
color: #f00;
}
.calendar td#dec-25 {
background: #ffd700;
font-weight: bold;
}
/* END CHRISTMAS: Move to "extra CSS" */
| %# vim:set ft=mason:
<%args>
%color
</%args>
body {
color: <% $color{bodyFG} %>;
background: <% $color{bodyBG} %>;
font-family: verdana, tahoma, sans-serif;
}
#contentwrapper {
width: 75%;
min-width: 800px;
margin-left: auto;
margin-right: auto;
margin-top: 10px auto;
margin-bottom: 10px;
background: <% $color{blotterBG} %>;
border: 1px solid <% $color{blotterBorder} %>;
-moz-border-radius: 10px;
-webkit-border-radius: 10px;
}
h1 {
margin-top: 10px;
margin-bottom: 10px;
font-weight: bold;
font-size: 1.6em;
text-align: center;
color: <% $color{headerFG} %>;
}
#header h1 a {
color: <% $color{titleFG} %>;
}
h1.title {
text-align: left;
margin-bottom: 0;
}
h2 {
text-align: center;
color: <% $color{headerFG} %>;
}
.subtitle {
text-align: right;
border-top: 1px black solid;
}
.feed {
float: right;
color: <% $color{feedLinkFG} %>;
padding-right: 1em;
vertical-align: middle;
font-weight: bold;
}
dt {
margin: 0.5em 0 0.5em 0;
}
#content, #tagline, #yearline {
font-size: 0.9em;
padding: 8px 20px;
margin: 10px;
-moz-border-radius: 10px;
-webkit-border-radius: 10px;
}
#content {
border: 1px solid <% $color{contentBorder} %>;
background: <% $color{contentBG} %>;
}
#tagline, #yearline {
background: <% $color{taglineBG} %>;
border: 1px solid <% $color{taglineBorder} %>;
color: <% $color{taglineFG} %>;
}
.pod h1, .pod h2 {
text-align: left;
color: <% $color{headerFG} %>;
}
a {
color: <% $color{linkFG} %>;
text-decoration: none;
}
a:not(.article) {
padding-left: .2em;
margin-left: -.2em;
padding-right: .2em;
margin-right: -.2em;
}
#header a:hover,
.pod a:hover
{
color: <% $color{linkHoverFG} %>;
background: <% $color{linkHoverBG} %>;
-moz-border-radius: 5px;
-webkit-border-radius: 5px;
}
.pod ul li {
margin: .25em 0;
}
.pod ul li p {
display: inline;
}
.calendar {
margin-left: auto;
margin-right: auto;
margin-bottom: 1em;
}
.calendar th {
padding: 5px;
border: 1px solid <% $color{calendarHeaderCellBorder} %>;
background-color: <% $color{calendarHeaderCellBG} %>;
}
.calendar td {
text-align: center;
width: 9em;
height: 5em;
padding: 0;
}
.calendar td.day {
font-size: xx-large;
height: 2.5em;
background: <% $color{calendarIgnoredDayBG} %>;
}
.calendar td.day.advent {
background: <% $color{calendarPastDayBG} %>;
}
.calendar td.day.advent a {
display: block;
color: <% $color{calendarPastDayFG} %>;
height: 2.5em;
padding: 0;
margin: 0;
vertical-align: middle;
line-height: 2.5em;
}
.calendar td.day.advent a:hover {
background: <% $color{calendarPastDayHoverBG} %>;
color: <% $color{calendarPastDayHoverFG} %>;
}
.calendar td.day.advent.today {
background: <% $color{calendarTodayBG} %>;
}
.calendar td.day.advent.today a {
color: <% $color{calendarTodayFG} %>;
}
.calendar td.day.advent.today a:hover {
background: <% $color{calendarTodayHoverBG} %>;
color: <% $color{calendarTodayFG} %>;
}
.calendar td.day.advent.future {
background: <% $color{calendarFutureDayBG} %>;
color: <% $color{calendarFutureDayFG} %>;
}
.calendar td.day.advent.missing {
color: <% $color{calendarMissingDayFG} %>;
background-color: <% $color{calendarMissingDayBG} %>;
}
a img {
border: 0;
}
blockquote {
border-left: thick <% $color{quoteBorder} %> solid;
padding-left: 1em;
margin-left: 1em;
margin-right: 1em;
}
h2#See-Also {
padding-top: 0.4em;
border-top: 1px solid <% $color{sectionBorder} %>;
}
#author {
border-top: 1px solid <% $color{sectionBorder} %>;
padding-top: 0.4em;
text-align: center;
}
#pager {
border-top: 1px solid <% $color{sectionBorder} %>;
list-style-type: none;
margin-left: 0;
padding: 0.4em;
position: relative;
width: 100%;
}
#pager li {
color: <% $color{linkDisabledFG} %>;
}
#pager .previous {
left: 0.4em;
position: absolute;
width: 33%;
}
#pager .next {
position: absolute;
right: 0.4em;
text-align: right;
width: 33%;
}
/* verbatim text and code listings */
pre {
line-height: 120%;
padding-top: 1em;
padding-bottom: 1em;
background-color: <% $color{codeBG} %>;
color: <% $color{codeFG} %>;
font-family: monospace;
width: 100%;
}
.code-listing {
line-height: 120%;
background-color: <% $color{codeBG} %>;
color: <% $color{codeFG} %>;
font-family: monospace;
white-space: pre;
border-collapse: collapse;
width: 100%;
}
.code-listing td {
padding: 0;
margin: 0
}
.code-listing .line-numbers {
background-color: <% $color{codeNumbersBG} %>;
color: <% $color{codeNumbersFG} %>;
border-right: 2px <% $color{codeNumbersBorder} %> solid;
width: 3.5em;
text-align: right;
}
.code-listing .code {
padding-left: 1em;
}
/* PPI HTML Style */
.code-listing .keyword { color: #89f; }
.code-listing .symbol { color: #0cc; }
.code-listing .operator { color: #fff; }
.code-listing .structure { color: #bf0; }
.code-listing .word { color: #dd8; }
.code-listing .comment { color: #0f0; }
.code-listing .pod { color: #0f0; }
.code-listing .match { color: #ff0; }
.code-listing .readline { color: #caa; }
.code-listing .single,
.code-listing .double { color: #0cf; }
/* Vim Syntax Style */
.code-listing .synComment { color: #0f0; }
.code-listing .synConstant { color: #0ff; }
.code-listing .synIdentifier { color: #89f; }
.code-listing .synStatement { color: #0aa; }
.code-listing .synPreProc { color: #fff; }
.code-listing .synType { color: #0aa; }
.code-listing .synSpecial { color: #fff; }
.code-listing .synUnderlined { color: #0a0; }
.code-listing .synIgnore { color: #aaa; }
.code-listing .synError { color: #f00; }
.code-listing .synTodo { color: #aa0; }
/* BEGIN CHRISTMAS: Move to "extra CSS" */
.calendar td.day.advent.missing,
.calendar td.day.missing#dec-25,
.calendar td.day.missing#dec-26 {
background: #000;
color: #f00;
}
.calendar td#dec-25 {
background: #ffd700;
font-weight: bold;
}
/* END CHRISTMAS: Move to "extra CSS" */
|
jakubkulhan/couchdb-php | 1 | lib/CouchDBFsockConnector.php | <?php
require_once dirname(__FILE__) . '/CouchDBConnector.php';
/**
* Connector using fsockopen() function
*/
class CouchDBFsockConnector implements CouchDBConnector
{
/**
* End of line
*/
const EOL = "\r\n";
/**
* User agent
*/
private $ua = 'CouchDBFsockConnector';
/**
* @var string Host
*/
private $host = 'localhost';
/**
* @var int Port
*/
private $port = 5984;
/**
* @var string Username
*/
private $user = NULL;
/**
* @var string Password
*/
private $pass = NULL;
/**
* @var string Base path
*/
private $path = '/';
/**
* Create new instance
* @pram string
* @return CouchDBFsockConnector
*/
public static function open($url)
{
return new self($url);
}
/**
* Initialize state
* @param string
*/
private function __constuct($url)
{
foreach (parse_url($url) as $k => $v) $this->$k = $v;
$this->path = rtrim($this->path, '/') . '/';
}
/**
* Create socket
* @param int
* @param string
* @return resource
*/
protected function createSocket(&$errno, &$errstr)
{
return @fsockopen($this->host, $this->port, $errno, $errstr);
}
/**
* Create request
* @param string
* @param string
* @param string
* @param array
* @return string
*/
protected function createRequest(&$request, $method, $path, $query = array())
{
$path = ltrim($path, '/');
if (preg_match('~^.+/((_desing/.+/|_temp)_view|_all_docs)~', $path)) {
foreach (array('key', 'startkey', 'endkey', 'limit', 'descending',
'skip', 'group', 'group_level', 'reduce', 'include_docs') as $k)
{
if (!isset($query[$k])) continue;
$query[$k] = json_encode($query[$k]);
}
}
$request = $method .
' ' .
$this->path . $path .
(empty($query) ? '' : '?' . http_build_query($query, NULL, '&')) .
' ' .
'HTTP/1.1' . self::EOL;
}
/**
* Add essential headers
* @param string
*/
protected function addEssentialHeaders(&$request)
{
$request .= 'Host: ' . $this->host . self::EOL;
$request .= 'User-Agent: ' . $this->ua . self::EOL;
$request .= 'Accept: */*' . self::EOL;
$request .= 'Connection: close' . self::EOL;
}
/**
* Add additional headers
* @param string
* @param array
*/
protected function addAdditionalHeaders(&$request, array $headers = array())
{
foreach ($headers as $header => $value)
$request .= $header . ': ' . $value . self::EOL;
}
/**
* Authorization headers
* @param string
*/
protected function addAuthorizationHeaders(&$request)
{
if ($this->user !== NULL)
$request .= 'Authorization: Basic' .
base64_encode($this->user . ':' . $this->pass) .
self::EOL;
}
/**
* Add content
* @param string
* @param string
* @param bool
*/
protected function addContent(&$request, $body = NULL, $raw = FALSE)
{
if ($body !== NULL) {
$json = FALSE;
if (!$raw) {
$json = TRUE;
$body = json_encode($body);
}
$request .= 'Content-Length: ' . strlen($body) . self::EOL;
if ($json) $request .= 'Content-Type: application/json' . self::EOL;
}
$request .= self::EOL;
$request .= $body;
}
/**
* Send request
* @param resource
* @param string
*/
protected function sendRequest($socket, $request)
{
fwrite($socket, $request);
}
/**
* Get response
* @param resource
*/
protected function getResponse($socket)
{
// get data
$response = '';
while (!feof($socket)) $response .= fread($socket, 4096);
// get headers
list($headers, $body) = explode(self::EOL . self::EOL, $response, 2);
$real_headers = array();
$return = NULL;
foreach (explode(self::EOL, $headers) as $header) {
if ($return === NULL) {
$return = $header;
continue;
}
list($k, $v) = explode(':', $header, 2);
$real_headers[strtolower($k)] = trim($v);
}
// chunked?
if (isset($real_headers['transfer-encoding']) &&
$real_headers['transfer-encoding'] === 'chunked')
{
$real_body = '';
$tail = $body;
do {
list($head, $tail) = explode(self::EOL, $tail, 2);
$size = hexdec($head);
if ($size > 0) {
$real_body .= substr($tail, 0, $size);
$tail = substr($tail, $size + strlen(self::EOL));
}
} while ($size);
} else $real_body = $body;
return array($real_headers, $return, $real_body);
}
/**
* Send request to database
* @param string
* @param string
* @param array
* @param mixed
* @param bool whether body should be sent as raw
* @param array additional headers
* @return array
*/
public function request($method, $path, array $query = array(), $body = NULL,
$raw_body = FALSE, $headers = array())
{
if (!($s = $this->createSocket($errno, $errstr))) return (object) array(
'error' => 'fsockopen',
'errstr' => $errstr,
'errno' => $errno
);
$this->createRequest($request, $method, $path, $query);
$this->addEssentialHeaders($request);
$this->addAdditionalHeaders($request, $headers);
$this->addAuthorizationHeaders($request);
$this->addContent($request, $body, $raw_body);
$this->sendRequest($s, $request);
list($headers, $return, $body) = $this->getResponse($s);
fclose($s);
if ($raw_body) return (object) array(
'data' => $body,
'content_type' => isset($headers['content-type']) ? $header['content-type'] : NULL
);
return json_decode($body);
}
}
| <?php
require_once dirname(__FILE__) . '/CouchDBConnector.php';
/**
* Connector using fsockopen() function
*/
class CouchDBFsockConnector implements CouchDBConnector
{
/**
* End of line
*/
const EOL = "\r\n";
/**
* User agent
*/
private $ua = 'CouchDBFsockConnector';
/**
* @var string Host
*/
private $host = 'localhost';
/**
* @var int Port
*/
private $port = 5984;
/**
* @var string Username
*/
private $user = NULL;
/**
* @var string Password
*/
private $pass = NULL;
/**
* @var string Base path
*/
private $path = '/';
/**
* Create new instance
* @pram string
* @return CouchDBFsockConnector
*/
public static function open($url)
{
return new self($url);
}
/**
* Initialize state
* @param string
*/
private function __constuct($url)
{
foreach (parse_url($url) as $k => $v) $this->$k = $v;
$this->path = rtrim($this->path, '/') . '/';
}
/**
* Create socket
* @param int
* @param string
* @return resource
*/
protected function createSocket(&$errno, &$errstr)
{
return @fsockopen($this->host, $this->port, $errno, $errstr);
}
/**
* Create request
* @param string
* @param string
* @param string
* @param array
* @return string
*/
protected function createRequest(&$request, $method, $path, $query = array())
{
$path = ltrim($path, '/');
if (preg_match('~^.+/((_desing/.+/|_temp)_view|_all_docs)~', $path)) {
foreach (array('key', 'startkey', 'endkey', 'limit', 'descending',
'skip', 'group', 'group_level', 'reduce', 'include_docs') as $k)
{
if (!isset($query[$k])) continue;
$query[$k] = json_encode($query[$k]);
}
}
$request = $method .
' ' .
$this->path . $path .
(empty($query) ? '' : '?' . http_build_query($query, NULL, '&')) .
' ' .
'HTTP/1.1' . self::EOL;
}
/**
* Add essential headers
* @param string
*/
protected function addEssentialHeaders(&$request)
{
$request .= 'Host: ' . $this->host . self::EOL;
$request .= 'User-Agent: ' . $this->ua . self::EOL;
$request .= 'Accept: */*' . self::EOL;
$request .= 'Connection: close' . self::EOL;
}
/**
* Add additional headers
* @param string
* @param array
*/
protected function addAdditionalHeaders(&$request, array $headers = array())
{
foreach ($headers as $header => $value)
$request .= $header . ': ' . $value . self::EOL;
}
/**
* Authorization headers
* @param string
*/
protected function addAuthorizationHeaders(&$request)
{
if ($this->user !== NULL)
$request .= 'Authorization: Basic ' .
base64_encode($this->user . ':' . $this->pass) .
self::EOL;
}
/**
* Add content
* @param string
* @param string
* @param bool
*/
protected function addContent(&$request, $body = NULL, $raw = FALSE)
{
if ($body !== NULL) {
$json = FALSE;
if (!$raw) {
$json = TRUE;
$body = json_encode($body);
}
$request .= 'Content-Length: ' . strlen($body) . self::EOL;
if ($json) $request .= 'Content-Type: application/json' . self::EOL;
}
$request .= self::EOL;
$request .= $body;
}
/**
* Send request
* @param resource
* @param string
*/
protected function sendRequest($socket, $request)
{
fwrite($socket, $request);
}
/**
* Get response
* @param resource
*/
protected function getResponse($socket)
{
// get data
$response = '';
while (!feof($socket)) $response .= fread($socket, 4096);
// get headers
list($headers, $body) = explode(self::EOL . self::EOL, $response, 2);
$real_headers = array();
$return = NULL;
foreach (explode(self::EOL, $headers) as $header) {
if ($return === NULL) {
$return = $header;
continue;
}
list($k, $v) = explode(':', $header, 2);
$real_headers[strtolower($k)] = trim($v);
}
// chunked?
if (isset($real_headers['transfer-encoding']) &&
$real_headers['transfer-encoding'] === 'chunked')
{
$real_body = '';
$tail = $body;
do {
list($head, $tail) = explode(self::EOL, $tail, 2);
$size = hexdec($head);
if ($size > 0) {
$real_body .= substr($tail, 0, $size);
$tail = substr($tail, $size + strlen(self::EOL));
}
} while ($size);
} else $real_body = $body;
return array($real_headers, $return, $real_body);
}
/**
* Send request to database
* @param string
* @param string
* @param array
* @param mixed
* @param bool whether body should be sent as raw
* @param array additional headers
* @return array
*/
public function request($method, $path, array $query = array(), $body = NULL,
$raw_body = FALSE, $headers = array())
{
if (!($s = $this->createSocket($errno, $errstr))) return (object) array(
'error' => 'fsockopen',
'errstr' => $errstr,
'errno' => $errno
);
$this->createRequest($request, $method, $path, $query);
$this->addEssentialHeaders($request);
$this->addAdditionalHeaders($request, $headers);
$this->addAuthorizationHeaders($request);
$this->addContent($request, $body, $raw_body);
$this->sendRequest($s, $request);
list($headers, $return, $body) = $this->getResponse($s);
fclose($s);
if ($raw_body) return (object) array(
'data' => $body,
'content_type' => isset($headers['content-type']) ? $header['content-type'] : NULL
);
return json_decode($body);
}
}
|
andregoncalves/twitter-nodejs-websocket | 5 | index.html | <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN"
"http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
<html>
<head>
<script src='http://ajax.googleapis.com/ajax/libs/jquery/1.3.2/jquery.min.js'></script>
<script>
$(document).ready(function(){
if(!("WebSocket" in window)) {
alert("Sorry, the build of your browser does not support WebSockets. Please use latest Chrome or Webkit nightly");
return;
}
ws = new WebSocket("ws://localhost:8080/");
ws.onmessage = function(evt) {
data = eval("(" + evt.data + ")");
var p = $("<div class='tweet' style='display:none'><div class='content'><a class='main-screenname' href='http://www.twitter.com/" + data.user.screen_name + "/status/" + data.id + "' target='_blank'>" + data.user.screen_name + "</a> " + data.text + "</div></div>");
if($('#tweets div.tweet').size() > 15) {
$('#tweets div.tweet:last').slideDown(100, function() {
$(this).remove();
});
}
$('#tweets').prepend(p);
p.slideDown(140);
};
ws.onclose = function() {
alert("socket closed");
};
ws.onopen = function() {
//alert("connected...");
};
});
</script>
</head>
<body>
<div id="tweets">
</div>
</body>
</html>
| <!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN"
"http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd">
<html>
<head>
<script src='http://ajax.googleapis.com/ajax/libs/jquery/1.3.2/jquery.min.js'></script>
<script>
$(document).ready(function(){
if(!("WebSocket" in window)) {
alert("Sorry, the build of your browser does not support WebSockets. Please use latest Chrome or Webkit nightly");
return;
}
ws = new WebSocket("ws://localhost:8080/");
ws.onmessage = function(evt) {
data = eval("(" + evt.data + ")");
var p = $("<div class='tweet' style='display:none'><div class='content'><a class='main-screenname' href='http://www.twitter.com/" + data.user.screen_name + "/status/" + data.id + "' target='_blank'>" + data.user.screen_name + "</a> " + data.text + "</div></div>");
if($('#tweets div.tweet').size() > 15) {
$('#tweets div.tweet:last').slideDown(100, function() {
$(this).remove();
});
}
$('#tweets').prepend(p);
p.slideDown(140);
};
ws.onclose = function() {
alert("socket closed");
};
ws.onopen = function() {
//alert("connected...");
};
});
</script>
</head>
<body>
<div id="tweets">
</div>
</body>
</html>
|
ileitch/hijack | 16 | lib/hijack/gdb.rb | # Based on gdb.rb by Jamis Buck, thanks Jamis!
module Hijack
class GDB
def initialize(pid)
@pid = pid
@verbose = Hijack.options[:debug]
@exec_path = File.join(RbConfig::CONFIG['bindir'], RbConfig::CONFIG['RUBY_INSTALL_NAME'] + RbConfig::CONFIG['EXEEXT'])
attach_outside_gc
end
def eval(cmd)
call("(void)rb_eval_string(#{cmd.strip.gsub(/"/, '\"').inspect})")
end
def quit
return unless @gdb
detach
exec('quit')
@backtrace = nil
@gdb.close
@gdb = nil
end
protected
def previous_frame_inner_to_this_frame?
backtrace.last =~ /previous frame inner to this frame/i
end
def attach_outside_gc
@gdb = IO.popen("gdb -q #{@exec_path} #{@pid} 2>&1", 'r+')
wait
ensure_attached_to_ruby_process
attached = false
3.times do |i|
attach unless i == 0
if previous_frame_inner_to_this_frame? || during_gc?
detach
sleep 0.1
else
attached = true
break
end
end
unless attached
puts
puts "=> Tried 3 times to attach to #{@pid} whilst GC wasn't running but failed."
puts "=> This means either the process calls GC.start frequently or GC runs are slow - try hijacking again."
exit 1
end
break_on_safe_stack_unwind
end
def break_on_safe_stack_unwind
safe = false
backtrace.each do |line|
# vm_call_method == 1.9, rb_call == 1.8
if line =~ /(vm_call_method|rb_call)/
frame = line.match(/^\#([\d]+)/)[1]
safe = true
exec("frame #{frame}")
exec("break")
exec("continue")
exec("delete 1")
break
end
end
if !safe
puts "=> WARNING: Did not detect a safe frame on which to set a breakpoint, hijack may fail."
end
end
def during_gc?
!!(call("(int)rb_during_gc()").first =~ /\$[\d]+ = 1/)
end
def detach
exec("detach")
end
def attach
exec("attach #{@pid}")
end
def ensure_attached_to_ruby_process
unless backtrace.any? {|line| line =~ /(rb|ruby)_/}
puts "\n=> #{@pid} doesn't appear to be a Ruby process!"
detach
exit 1
end
end
def backtrace
exec('bt')
end
def continue
exec('continue')
end
def call(cmd)
exec("call #{cmd}")
end
def exec(str)
puts str if @verbose
@gdb.puts(str)
wait
end
def wait
lines = []
line = ''
while result = IO.select([@gdb])
next if result.empty?
c = @gdb.read(1)
break if c.nil?
STDOUT.write(c) if @verbose
line << c
break if line == "(gdb) " || line == " >"
if line[-1] == ?\n
lines << line
line = ""
end
end
lines
end
end
end
| # Based on gdb.rb by Jamis Buck, thanks Jamis!
module Hijack
class GDB
def initialize(pid)
@pid = pid
@verbose = Hijack.options[:debug]
@exec_path = File.join(RbConfig::CONFIG['bindir'], RbConfig::CONFIG['RUBY_INSTALL_NAME'] + RbConfig::CONFIG['EXEEXT'])
attach_outside_gc
end
def eval(cmd)
evaled_cmd = cmd.strip.gsub(/"/, '\"').inspect
call("(void)rb_eval_string(#{evaled_cmd})")
end
def quit
return unless @gdb
detach
exec('quit')
@backtrace = nil
@gdb.close
@gdb = nil
end
protected
def previous_frame_inner_to_this_frame?
backtrace.last =~ /previous frame inner to this frame/i
end
def gdb_path
# Check for gdb
if File.exists?(`which gdb`.strip)
`which gdb`.strip
elsif File.exists?(`which ggdb`.strip)
`which ggdb`.strip
else
raise "Cannot find suitable gdb!"
end
end
def attach_outside_gc
@gdb = IO.popen("#{gdb_path} -q #{@exec_path} #{@pid} 2>&1", 'r+')
wait
ensure_attached_to_ruby_process
attached = false
3.times do |i|
attach unless i == 0
if previous_frame_inner_to_this_frame? || during_gc?
detach
sleep 0.1
else
attached = true
break
end
end
unless attached
puts
puts "=> Tried 3 times to attach to #{@pid} whilst GC wasn't running but failed."
puts "=> This means either the process calls GC.start frequently or GC runs are slow - try hijacking again."
exit 1
end
break_on_safe_stack_unwind
end
def break_on_safe_stack_unwind
safe = false
backtrace.each do |line|
# vm_call_method == 1.9, rb_call == 1.8
if line =~ /(vm_call_method|rb_call)/
frame = line.match(/^\#([\d]+)/)[1]
safe = true
exec("frame #{frame}")
exec("break")
exec("continue")
exec("delete 1")
break
end
end
if !safe
puts "=> WARNING: Did not detect a safe frame on which to set a breakpoint, hijack may fail."
end
end
def during_gc?
!!(call("(int)rb_during_gc()").first =~ /\$[\d]+ = 1/)
end
def detach
exec("detach")
end
def attach
exec("attach #{@pid}")
end
def ensure_attached_to_ruby_process
unless backtrace.any? {|line| line =~ /(rb|ruby)_/}
puts "\n=> #{@pid} doesn't appear to be a Ruby process!"
detach
exit 1
end
end
def backtrace
exec('bt')
end
def continue
exec('continue')
end
def call(cmd)
exec("call #{cmd}")
end
def exec(str)
puts str if @verbose
@gdb.puts(str)
wait
end
def wait
lines = []
line = ''
while result = IO.select([@gdb])
next if result.empty?
c = @gdb.read(1)
break if c.nil?
STDOUT.write(c) if @verbose
line << c
break if line == "(gdb) " || line == " >"
if line[-1] == ?\n
lines << line
line = ""
end
end
lines
end
end
end
|
winston/google_visualr_app | 2 | app/controllers/examples/interactive_controller.rb | class Examples::InteractiveController < ApplicationController
layout "default"
# http://code.google.com/apis/chart/interactive/docs/gallery/annotatedtimeline.html#Example
def annotated_time_line
data_table = GoogleVisualr::DataTable.new
data_table.new_column('date' , 'Date')
data_table.new_column('number', 'Sold Pencils')
data_table.new_column('string', 'title1')
data_table.new_column('string', 'text1' )
data_table.new_column('number', 'Sold Pens' )
data_table.new_column('string', 'title2')
data_table.new_column('string', 'text2' )
data_table.add_rows( [
[ Date.parse("2008-2-1"), 30000, '', '', 40645, '', ''],
[ Date.parse("2008-2-2"), 14045, '', '', 20374, '', ''],
[ Date.parse("2008-2-3"), 55022, '', '', 50766, '', ''],
[ Date.parse("2008-2-4"), 75284, '', '', 14334, 'Out of Stock','Ran out of stock on pens at 4pm'],
[ Date.parse("2008-2-5"), 41476, 'Bought Pens','Bought 200k pens', 66467, '', ''],
[ Date.parse("2008-2-6"), 33322, '', '', 39463, '', '']
] )
opts = { :displayAnnotations => true }
@chart = GoogleVisualr::Interactive::AnnotatedTimeLine.new(data_table, opts)
end
# http://code.google.com/apis/chart/interactive/docs/gallery/areachart.html#Example
def area_chart
data_table = GoogleVisualr::DataTable.new
data_table.new_column('string', 'Year')
data_table.new_column('number', 'Sales')
data_table.new_column('number', 'Expenses')
data_table.add_rows( [
['2004', 1000, 400],
['2005', 1170, 460],
['2006', 660, 1120],
['2007', 1030, 540]
])
opts = { width: 400, height: 240, title: 'Company Performance', hAxis: {title: 'Year', titleTextStyle: {color: '#FF0000'}} }
@chart = GoogleVisualr::Interactive::AreaChart.new(data_table, opts)
end
# http://code.google.com/apis/chart/interactive/docs/gallery/barchart.html#Example
def bar_chart
data_table = GoogleVisualr::DataTable.new
data_table.new_column('string', 'Year')
data_table.new_column('number', 'Sales')
data_table.new_column('number', 'Expenses')
data_table.add_rows(4)
data_table.set_cell(0, 0, '2004')
data_table.set_cell(0, 1, 1000)
data_table.set_cell(0, 2, 400)
data_table.set_cell(1, 0, '2005')
data_table.set_cell(1, 1, 1170)
data_table.set_cell(1, 2, 460)
data_table.set_cell(2, 0, '2006')
data_table.set_cell(2, 1, 660)
data_table.set_cell(2, 2, 1120)
data_table.set_cell(3, 0, '2007')
data_table.set_cell(3, 1, 1030)
data_table.set_cell(3, 2, 540)
opts = { :width => 400, :height => 240, :title => 'Company Performance', vAxis: {title: 'Year', titleTextStyle: {color: 'red'}} }
@chart = GoogleVisualr::Interactive::BarChart.new(data_table, opts)
end
# http://code.google.com/apis/chart/interactive/docs/gallery/bubblechart.html
def bubble_chart
data_table = GoogleVisualr::DataTable.new
data_table.new_column('string', 'ID')
data_table.new_column('number', 'Life Expectancy')
data_table.new_column('number', 'Fertility Rate')
data_table.new_column('string', 'Region')
data_table.new_column('number', 'Population')
data_table.add_rows( [
['CAN', 80.66, 1.67, 'North America', 33739900],
['DEU', 79.84, 1.36, 'Europe', 81902307],
['DNK', 78.6, 1.84, 'Europe', 5523095],
['EGY', 72.73, 2.78, 'Middle East', 79716203],
['GBR', 80.05, 2, 'Europe', 61801570],
['IRN', 72.49, 1.7, 'Middle East', 73137148],
['IRQ', 68.09, 4.77, 'Middle East', 31090763],
['ISR', 81.55, 2.96, 'Middle East', 7485600],
['RUS', 68.6, 1.54, 'Europe', 141850000],
['USA', 78.09, 2.05, 'North America', 307007000]
])
opts = {
:width => 800, :height => 500,
:title => 'Correlation between life expectancy, fertility rate and population of some world countries (2010)',
:hAxis => { :title => 'Life Expectancy' },
:vAxis => { :title => 'Fertility Rate' },
:bubble => { :textStyle => { :fontSize => 11 } }
}
@chart = GoogleVisualr::Interactive::BubbleChart.new(data_table, opts)
end
# http://code.google.com/apis/chart/interactive/docs/gallery/candlestickchart.html
def candlestick_chart
data_table = GoogleVisualr::DataTable.new
data_table.new_column('string', 'day')
data_table.new_column('number', 'min')
data_table.new_column('number', 'opening')
data_table.new_column('number', 'closing')
data_table.new_column('number', 'max')
data_table.add_rows( [
['Mon',20,28,38,45],
['Tue',31,38,55,66],
['Wed',50,55,77,80],
['Thu',50,77,66,77],
['Fri',15,66,22,68]
] )
opts = { :width => 400, :height => 240, :legend => 'none' }
@chart = GoogleVisualr::Interactive::CandlestickChart.new(data_table, opts)
end
# http://code.google.com/apis/chart/interactive/docs/gallery/columnchart.html#Example
def column_chart
data_table = GoogleVisualr::DataTable.new
data_table.new_column('string', 'Year')
data_table.new_column('number', 'Sales')
data_table.new_column('number', 'Expenses')
data_table.add_rows(4)
data_table.set_cell(0, 0, '2004')
data_table.set_cell(0, 1, 1000)
data_table.set_cell(0, 2, 400)
data_table.set_cell(1, 0, '2005')
data_table.set_cell(1, 1, 1170)
data_table.set_cell(1, 2, 460)
data_table.set_cell(2, 0, '2006')
data_table.set_cell(2, 1, 660)
data_table.set_cell(2, 2, 1120)
data_table.set_cell(3, 0, '2007')
data_table.set_cell(3, 1, 1030)
data_table.set_cell(3, 2, 540)
opts = { :width => 400, :height => 240, :title => 'Company Performance', :hAxis => { :title => 'Year', :titleTextStyle => {:color => 'red'} } }
@chart = GoogleVisualr::Interactive::ColumnChart.new(data_table, opts)
end
# http://code.google.com/apis/chart/interactive/docs/gallery/combochart.html
def combo_chart
data_table = GoogleVisualr::DataTable.new
data_table.new_column('string', 'month' )
data_table.new_column('number', 'Bolivia' )
data_table.new_column('number', 'Ecuador' )
data_table.new_column('number', 'Madagascar' )
data_table.new_column('number', 'Papua Guinea')
data_table.new_column('number', 'Rwanda' )
data_table.new_column('number', 'Avarage' )
data_table.add_rows( [
['2004/05', 165, 938 , 522, 998 , 450, 614.6],
['2005/06', 135, 1120 , 599, 1268 , 288, 682 ],
['2006/07', 157, 1167 , 587, 807 , 397, 623 ],
['2007/08', 139, 1110 , 615, 968 , 215, 609.4],
['2008/09', 136, 691 , 629, 1026 , 366, 569.6]
] )
opts = { :width => 700, :height => 400, :title => 'Monthly Coffee Production by Country', :vAxis => {:title => 'Cups'}, :hAxis => {:title => 'Month'}, :seriesType => 'bars', :series => {'5' => {:type => 'line'}} }
@chart = GoogleVisualr::Interactive::ComboChart.new(data_table, opts)
end
# http://code.google.com/apis/chart/interactive/docs/gallery/gauge.html#Example
def gauge
data_table = GoogleVisualr::DataTable.new
data_table.new_column('string' , 'Label')
data_table.new_column('number' , 'Value')
data_table.add_rows(3)
data_table.set_cell(0, 0, 'Memory' )
data_table.set_cell(0, 1, 80)
data_table.set_cell(1, 0, 'CPU' )
data_table.set_cell(1, 1, 55)
data_table.set_cell(2, 0, 'Network')
data_table.set_cell(2, 1, 68)
opts = { :width => 400, :height => 120, :redFrom => 90, :redTo => 100, :yellowFrom => 75, :yellowTo => 90, :minorTicks => 5 }
@chart = GoogleVisualr::Interactive::Gauge.new(data_table, opts)
end
# http://code.google.com/apis/chart/interactive/docs/gallery/geochart.html#Example
def geo_chart
data_table = GoogleVisualr::DataTable.new
data_table.new_column('string', 'Country')
data_table.new_column('number', 'Popularity')
data_table.add_rows(6)
data_table.set_cell(0, 0, 'Germany')
data_table.set_cell(0, 1, 200)
data_table.set_cell(1, 0, 'United States')
data_table.set_cell(1, 1, 300)
data_table.set_cell(2, 0, 'Brazil')
data_table.set_cell(2, 1, 400)
data_table.set_cell(3, 0, 'Canada')
data_table.set_cell(3, 1, 500)
data_table.set_cell(4, 0, 'France')
data_table.set_cell(4, 1, 600)
data_table.set_cell(5, 0, 'RU')
data_table.set_cell(5, 1, 700)
opts = { :width => 500, :height => 300 }
@chart = GoogleVisualr::Interactive::GeoChart.new(data_table, opts)
end
# http://code.google.com/apis/chart/interactive/docs/gallery/geomap.html#Example
def geo_map
# Regions Example
data_table_regions = GoogleVisualr::DataTable.new
data_table_regions.new_column('string' , 'Country' )
data_table_regions.new_column('number' , 'Popularity')
data_table_regions.add_rows(6)
data_table_regions.set_cell(0, 0, 'Germany' )
data_table_regions.set_cell(0, 1, 200)
data_table_regions.set_cell(1, 0, 'United States')
data_table_regions.set_cell(1, 1, 300)
data_table_regions.set_cell(2, 0, 'Brazil' )
data_table_regions.set_cell(2, 1, 400)
data_table_regions.set_cell(3, 0, 'Canada' )
data_table_regions.set_cell(3, 1, 500)
data_table_regions.set_cell(4, 0, 'France' )
data_table_regions.set_cell(4, 1, 600)
data_table_regions.set_cell(5, 0, 'RU' )
data_table_regions.set_cell(5, 1, 700)
opts = { :dataMode => 'regions' }
@chart_regions = GoogleVisualr::Interactive::GeoMap.new(data_table_regions, opts)
# Markers Example
data_table_markers = GoogleVisualr::DataTable.new
data_table_markers.new_column('string' , 'Country' )
data_table_markers.new_column('number' , 'Popularity')
data_table_markers.add_rows(6)
data_table_markers.set_cell(0, 0, 'New York' )
data_table_markers.set_cell(0, 1, 200)
data_table_markers.set_cell(1, 0, 'Boston' )
data_table_markers.set_cell(1, 1, 300)
data_table_markers.set_cell(2, 0, 'Miami' )
data_table_markers.set_cell(2, 1, 400)
data_table_markers.set_cell(3, 0, 'Chicago' )
data_table_markers.set_cell(3, 1, 500)
data_table_markers.set_cell(4, 0, 'Los Angeles' )
data_table_markers.set_cell(4, 1, 600)
data_table_markers.set_cell(5, 0, 'Houston' )
data_table_markers.set_cell(5, 1, 700)
opts = { :dataMode => 'markers', :region => 'US', :colors => ['0xFF8747', '0xFFB581', '0xc06000'] }
@chart_markers = GoogleVisualr::Interactive::GeoMap.new(data_table_markers, opts)
end
# https://developers.google.com/chart/interactive/docs/gallery/histogram#Example1
def histogram
data_table = GoogleVisualr::DataTable.new
data_table.new_column('string' , 'Dinosaur')
data_table.new_column('number' , 'Length')
data_table.add_rows(
[
['Acrocanthosaurus (top-spined lizard)' , 12.2],
['Albertosaurus (Alberta lizard)' , 9.1],
['Allosaurus (other lizard)' , 12.2],
['Apatosaurus (deceptive lizard)' , 22.9],
['Archaeopteryx (ancient wing)' , 0.9],
['Argentinosaurus (Argentina lizard)' , 36.6],
['Baryonyx (heavy claws)' , 9.1],
['Brachiosaurus (arm lizard)' , 30.5],
['Ceratosaurus (horned lizard)' , 6.1],
['Coelophysis (hollow form)' , 2.7],
['Compsognathus (elegant jaw)' , 0.9],
['Deinonychus (terrible claw)' , 2.7],
['Diplodocus (double beam)' , 27.1],
['Dromicelomimus (emu mimic)' , 3.4],
['Gallimimus (fowl mimic)' , 5.5],
['Mamenchisaurus (Mamenchi lizard)' , 21.0],
['Megalosaurus (big lizard)' , 7.9],
['Microvenator (small hunter)' , 1.2],
['Ornithomimus (bird mimic)' , 4.6],
['Oviraptor (egg robber)' , 1.5],
['Plateosaurus (flat lizard)' , 7.9],
['Sauronithoides (narrow-clawed lizard)', 2.0],
['Seismosaurus (tremor lizard)' , 45.7],
['Spinosaurus (spiny lizard)' , 12.2],
['Supersaurus (super lizard)' , 30.5],
['Tyrannosaurus (tyrant lizard)' , 15.2],
['Ultrasaurus (ultra lizard)' , 30.5],
['Velociraptor (swift robber)' , 1.8]
]
)
opts = { :width => 700, :height => 400, :title => 'Lengths of dinosaurs, in meters', :legend => {position: 'none'} }
@chart = GoogleVisualr::Interactive::Histogram.new(data_table, opts)
end
# http://code.google.com/apis/chart/interactive/docs/gallery/intensitymap.html
def intensity_map
data_table = GoogleVisualr::DataTable.new
data_table.new_column('string', '', 'Country')
data_table.new_column('number', 'Population (mil)', 'a')
data_table.new_column('number', 'Area (km2)' , 'b')
data_table.add_rows(5)
data_table.set_cell(0, 0, 'CN')
data_table.set_cell(0, 1, 1324)
data_table.set_cell(0, 2, 9640821)
data_table.set_cell(1, 0, 'IN')
data_table.set_cell(1, 1, 1133)
data_table.set_cell(1, 2, 3287263)
data_table.set_cell(2, 0, 'US')
data_table.set_cell(2, 1, 304 )
data_table.set_cell(2, 2, 9629091)
data_table.set_cell(3, 0, 'ID')
data_table.set_cell(3, 1, 232 )
data_table.set_cell(3, 2, 1904569)
data_table.set_cell(4, 0, 'BR')
data_table.set_cell(4, 1, 187 )
data_table.set_cell(4, 2, 8514877)
opts = {}
@chart = GoogleVisualr::Interactive::IntensityMap.new(data_table, opts)
end
# http://code.google.com/apis/chart/interactive/docs/gallery/linechart.html#Example
def line_chart
data_table = GoogleVisualr::DataTable.new
data_table.new_column('string', 'Year')
data_table.new_column('number', 'Sales')
data_table.new_column('number', 'Expenses')
data_table.add_rows(4)
data_table.set_cell(0, 0, '2004')
data_table.set_cell(0, 1, 1000)
data_table.set_cell(0, 2, 400)
data_table.set_cell(1, 0, '2005')
data_table.set_cell(1, 1, 1170)
data_table.set_cell(1, 2, 460)
data_table.set_cell(2, 0, '2006')
data_table.set_cell(2, 1, 860)
data_table.set_cell(2, 2, 580)
data_table.set_cell(3, 0, '2007')
data_table.set_cell(3, 1, 1030)
data_table.set_cell(3, 2, 540)
opts = { :width => 400, :height => 240, :title => 'Company Performance', :legend => 'bottom' }
@chart = GoogleVisualr::Interactive::LineChart.new(data_table, opts)
end
# http://code.google.com/apis/chart/interactive/docs/gallery/map.html
def map
data_table = GoogleVisualr::DataTable.new
data_table.new_column('number', 'Lat' )
data_table.new_column('number', 'Lon' )
data_table.new_column('string', 'Name')
data_table.add_rows(4)
data_table.set_cell(0, 0, 37.4232 )
data_table.set_cell(0, 1, -122.0853 )
data_table.set_cell(0, 2, 'Work' )
data_table.set_cell(1, 0, 37.4289 )
data_table.set_cell(1, 1, -122.1697 )
data_table.set_cell(1, 2, 'University')
data_table.set_cell(2, 0, 37.6153 )
data_table.set_cell(2, 1, -122.3900 )
data_table.set_cell(2, 2, 'Airport' )
data_table.set_cell(3, 0, 37.4422 )
data_table.set_cell(3, 1, -122.1731 )
data_table.set_cell(3, 2, 'Shopping' )
opts = { :showTip => true }
@chart = GoogleVisualr::Interactive::Map.new(data_table, opts)
end
# http://code.google.com/apis/chart/interactive/docs/gallery/motionchart.html#Example
def motion_chart
data_table = GoogleVisualr::DataTable.new
data_table.new_column('string', 'Fruit' )
data_table.new_column('date' , 'Date' )
data_table.new_column('number', 'Sales' )
data_table.new_column('number', 'Expenses')
data_table.new_column('string', 'Location')
data_table.add_rows([
['Apples' ,Date.parse("1988-01-01"),1000,300,'East'],
['Oranges',Date.parse("1988-01-01"),1150,200,'West'],
['Bananas',Date.parse("1988-01-01"),300 ,250,'West'],
['Apples' ,Date.parse("1989-07-01"),1200,400,'East'],
['Oranges',Date.parse("1989-07-01"),750 ,150,'West'],
['Bananas',Date.parse("1989-07-01"),788 ,617,'West']
])
opts = { :width => 600, :height => 300 }
@chart = GoogleVisualr::Interactive::MotionChart.new(data_table, opts)
end
# http://code.google.com/apis/chart/interactive/docs/gallery/orgchart.html#Example
def org_chart
data_table = GoogleVisualr::DataTable.new
data_table.new_column('string', 'Name' )
data_table.new_column('string', 'Manager')
data_table.new_column('string', 'ToolTip')
data_table.add_rows( [
[ {:v => 'Mike', :f => 'Mike<div style="color:red; font-style:italic">President</div>' }, '' , 'The President' ],
[ {:v => 'Jim' , :f => 'Jim<div style="color:red; font-style:italic">Vice President<div>'}, 'Mike', 'VP' ],
[ 'Alice' , 'Mike', '' ],
[ 'Bob' , 'Jim' , 'Bob Sponge' ],
[ 'Carol' , 'Bob' , '' ]
] )
opts = { :allowHtml => true }
@chart = GoogleVisualr::Interactive::OrgChart.new(data_table, opts)
end
# http://code.google.com/apis/chart/interactive/docs/gallery/piechart.html#Example
def pie_chart
data_table = GoogleVisualr::DataTable.new
data_table.new_column('string', 'Task')
data_table.new_column('number', 'Hours per Day')
data_table.add_rows(5)
data_table.set_cell(0, 0, 'Work' )
data_table.set_cell(0, 1, 11 )
data_table.set_cell(1, 0, 'Eat' )
data_table.set_cell(1, 1, 2 )
data_table.set_cell(2, 0, 'Commute' )
data_table.set_cell(2, 1, 2 )
data_table.set_cell(3, 0, 'Watch TV' )
data_table.set_cell(3, 1, 2 )
data_table.set_cell(4, 0, 'Sleep' )
data_table.set_cell(4, 1, 7 )
opts = { :width => 400, :height => 240, :title => 'My Daily Activities', :is3D => true }
@chart = GoogleVisualr::Interactive::PieChart.new(data_table, opts)
end
# http://code.google.com/apis/chart/interactive/docs/gallery/scatterchart.html#Example
def scatter_chart
data_table = GoogleVisualr::DataTable.new
data_table.new_column('number', 'Age')
data_table.new_column('number', 'Weight')
data_table.add_rows(6)
data_table.set_cell( 0, 0, 8 )
data_table.set_cell( 0, 1, 12 )
data_table.set_cell( 1, 0, 4 )
data_table.set_cell( 1, 1, 5.5)
data_table.set_cell( 2, 0, 11 )
data_table.set_cell( 2, 1, 14 )
data_table.set_cell( 3, 0, 4 )
data_table.set_cell( 3, 1, 4.5)
data_table.set_cell( 4, 0, 3 )
data_table.set_cell( 4, 1, 3.5)
data_table.set_cell( 5, 0, 6.5)
data_table.set_cell( 5, 1, 7 )
opts = {
:width => 400, :height => 240, :title => 'Age vs. Weight comparison',
:hAxis => { :title => 'Age' , :minValue => 0, :maxValue => 15 },
:vAxis => { :title => 'Weight' , :minValue => 0, :maxValue => 15 },
:legend => 'none'
}
@chart = GoogleVisualr::Interactive::ScatterChart.new(data_table, opts)
end
# http://code.google.com/apis/chart/interactive/docs/gallery/steppedareachart.html#Example
def stepped_area_chart
data_table = GoogleVisualr::DataTable.new
data_table.new_column('string' , 'Director (Year)')
data_table.new_column('number' , 'Rotten Tomatoes')
data_table.new_column('number' , 'IMDB')
data_table.add_rows( [
['Alfred Hitchcock (1935)', 8.4, 7.9],
['Ralph Thomas (1959)', 6.9, 6.5],
['Don Sharp (1978)', 6.5, 6.4],
['James Hawes (2008)', 4.4, 6.2]
])
opts = { :width => 800, :height => 500, :title => "The decline of 'The 39 Steps'", :vAxis => { :title => 'Accumulated Rating'}, :isStacked => true }
@chart = GoogleVisualr::Interactive::SteppedAreaChart.new(data_table, opts)
end
# http://code.google.com/apis/chart/interactive/docs/gallery/table.html#Example
def table
data_table = GoogleVisualr::DataTable.new
data_table.new_column('string' , 'Name')
data_table.new_column('number' , 'Salary')
data_table.new_column('boolean' , 'Full Time Employee')
data_table.add_rows(4)
data_table.set_cell(0, 0, 'Mike' )
data_table.set_cell(0, 1, {:v => 10000, :f => '$10,000'})
data_table.set_cell(0, 2, true )
data_table.set_cell(1, 0, 'Jim' )
data_table.set_cell(1, 1, {:v => 8000 , :f => '$8,000' })
data_table.set_cell(1, 2, false )
data_table.set_cell(2, 0, 'Alice' )
data_table.set_cell(2, 1, {:v => 12500, :f => '$12,500'})
data_table.set_cell(2, 2, true )
data_table.set_cell(3, 0, 'Bob' )
data_table.set_cell(3, 1, {:v => 7000 , :f => '$7,000' })
data_table.set_cell(3, 2, true )
opts = { :width => 600, :showRowNumber => true }
@chart = GoogleVisualr::Interactive::Table.new(data_table, opts)
end
# http://code.google.com/apis/chart/interactive/docs/gallery/treemap.html#Example
def tree_map
data_table = GoogleVisualr::DataTable.new
data_table.new_column('string', 'Region')
data_table.new_column('string', 'Parent')
data_table.new_column('number', 'Market trade volume (size)')
data_table.new_column('number', 'Market increase/decrease (color)')
data_table.add_rows( [
["Global" , nil , 0 , 0 ],
["America" , "Global" , 0 , 0 ],
["Europe" , "Global" , 0 , 0 ],
["Asia" , "Global" , 0 , 0 ],
["Australia", "Global" , 0 , 0 ],
["Africa" , "Global" , 0 , 0 ],
["Brazil" , "America" , 11, 10 ],
["USA" , "America" , 52, 31 ],
["Mexico" , "America" , 24, 12 ],
["Canada" , "America" , 16, -23 ],
["France" , "Europe" , 42, -11 ],
["Germany" , "Europe" , 31, -2 ],
["Sweden" , "Europe" , 22, -13 ],
["Italy" , "Europe" , 17, 4 ],
["UK" , "Europe" , 21, -5 ],
["China" , "Asia" , 36, 4 ],
["Japan" , "Asia" , 20, -12 ],
["India" , "Asia" , 40, 63 ],
["Laos" , "Asia" , 4 , 34 ],
["Mongolia" , "Asia" , 1 , -5 ],
["Israel" , "Asia" , 12, 24 ],
["Iran" , "Asia" , 18, 13 ],
["Pakistan" , "Asia" , 11, -52 ],
["Egypt" , "Africa" , 21, 0 ],
["S. Africa", "Africa" , 30, 43 ],
["Sudan" , "Africa" , 12, 2 ],
["Congo" , "Africa" , 10, 12 ],
["Zair" , "Africa" , 8 , 10 ]
] )
opts = { :width => 600, :height => 600, :minColor => '#f00', :midColor => '#ddd', :maxColor => '#0d0', :headerHeight => 15, :fontColor => 'black', :showScale => true }
@chart = GoogleVisualr::Interactive::TreeMap.new(data_table, opts)
end
end
| class Examples::InteractiveController < ApplicationController
layout "default"
# http://code.google.com/apis/chart/interactive/docs/gallery/annotatedtimeline.html#Example
def annotated_time_line
data_table = GoogleVisualr::DataTable.new
data_table.new_column('date' , 'Date')
data_table.new_column('number', 'Sold Pencils')
data_table.new_column('string', 'title1')
data_table.new_column('string', 'text1' )
data_table.new_column('number', 'Sold Pens' )
data_table.new_column('string', 'title2')
data_table.new_column('string', 'text2' )
data_table.add_rows(
[
[ Date.parse("2008-2-1"), 30000, '', '', 40645, '', ''],
[ Date.parse("2008-2-2"), 14045, '', '', 20374, '', ''],
[ Date.parse("2008-2-3"), 55022, '', '', 50766, '', ''],
[ Date.parse("2008-2-4"), 75284, '', '', 14334, 'Out of Stock','Ran out of stock on pens at 4pm'],
[ Date.parse("2008-2-5"), 41476, 'Bought Pens','Bought 200k pens', 66467, '', ''],
[ Date.parse("2008-2-6"), 33322, '', '', 39463, '', '']
]
)
opts = { :displayAnnotations => true }
@chart = GoogleVisualr::Interactive::AnnotatedTimeLine.new(data_table, opts)
end
# https://developers.google.com/chart/interactive/docs/gallery/annotationchart#example
def annotation_chart
data_table = GoogleVisualr::DataTable.new
data_table.new_column('date', 'Date')
data_table.new_column('number', 'Kepler-22b mission')
data_table.new_column('string', 'Kepler title')
data_table.new_column('string', 'Kepler text')
data_table.new_column('number', 'Gliese 163 mission')
data_table.new_column('string', 'Gliese title')
data_table.new_column('string', 'Gliese text')
data_table.add_rows(
[
[Date.parse("2314-2-15"), 12400, nil, nil, 10645, nil, nil],
[Date.parse("2314-2-16"), 24045, 'Lalibertines', 'First encounter', 12374, nil, nil],
[Date.parse("2314-2-17"), 35022, 'Lalibertines', 'They are very tall', 15766, 'Gallantors', 'First Encounter'],
[Date.parse("2314-2-18"), 12284, 'Lalibertines', 'Attack on our crew!', 34334, 'Gallantors', 'Statement of shared principles'],
[Date.parse("2314-2-19"), 8476, 'Lalibertines', 'Heavy casualties', 66467, 'Gallantors', 'Mysteries revealed'],
[Date.parse("2314-2-20"), 0, 'Lalibertines', 'All crew lost', 79463, 'Gallantors', 'Omniscience achieved']
]
)
opts = { :displayAnnotations => true }
@chart = GoogleVisualr::Interactive::AnnotationChart.new(data_table, opts)
end
# http://code.google.com/apis/chart/interactive/docs/gallery/areachart.html#Example
def area_chart
data_table = GoogleVisualr::DataTable.new
data_table.new_column('string', 'Year')
data_table.new_column('number', 'Sales')
data_table.new_column('number', 'Expenses')
data_table.add_rows(
[
['2004', 1000, 400],
['2005', 1170, 460],
['2006', 660, 1120],
['2007', 1030, 540]
]
)
opts = { width: 400, height: 240, title: 'Company Performance', hAxis: { title: 'Year', titleTextStyle: { color: '#FF0000' } } }
@chart = GoogleVisualr::Interactive::AreaChart.new(data_table, opts)
end
# http://code.google.com/apis/chart/interactive/docs/gallery/barchart.html#Example
def bar_chart
data_table = GoogleVisualr::DataTable.new
data_table.new_column('string', 'Year')
data_table.new_column('number', 'Sales')
data_table.new_column('number', 'Expenses')
data_table.add_rows(4)
data_table.set_cell(0, 0, '2004')
data_table.set_cell(0, 1, 1000)
data_table.set_cell(0, 2, 400)
data_table.set_cell(1, 0, '2005')
data_table.set_cell(1, 1, 1170)
data_table.set_cell(1, 2, 460)
data_table.set_cell(2, 0, '2006')
data_table.set_cell(2, 1, 660)
data_table.set_cell(2, 2, 1120)
data_table.set_cell(3, 0, '2007')
data_table.set_cell(3, 1, 1030)
data_table.set_cell(3, 2, 540)
opts = { :width => 400, :height => 240, :title => 'Company Performance', vAxis: { title: 'Year', titleTextStyle: { color: 'red' } } }
@chart = GoogleVisualr::Interactive::BarChart.new(data_table, opts)
end
# http://code.google.com/apis/chart/interactive/docs/gallery/bubblechart.html#Example
def bubble_chart
data_table = GoogleVisualr::DataTable.new
data_table.new_column('string', 'ID')
data_table.new_column('number', 'Life Expectancy')
data_table.new_column('number', 'Fertility Rate')
data_table.new_column('string', 'Region')
data_table.new_column('number', 'Population')
data_table.add_rows(
[
['CAN', 80.66, 1.67, 'North America', 33739900],
['DEU', 79.84, 1.36, 'Europe', 81902307],
['DNK', 78.6, 1.84, 'Europe', 5523095],
['EGY', 72.73, 2.78, 'Middle East', 79716203],
['GBR', 80.05, 2, 'Europe', 61801570],
['IRN', 72.49, 1.7, 'Middle East', 73137148],
['IRQ', 68.09, 4.77, 'Middle East', 31090763],
['ISR', 81.55, 2.96, 'Middle East', 7485600],
['RUS', 68.6, 1.54, 'Europe', 141850000],
['USA', 78.09, 2.05, 'North America', 307007000]
]
)
opts = {
:width => 800, :height => 500,
:title => 'Correlation between life expectancy, fertility rate and population of some world countries (2010)',
:hAxis => { :title => 'Life Expectancy' },
:vAxis => { :title => 'Fertility Rate' },
:bubble => { :textStyle => { :fontSize => 11 } }
}
@chart = GoogleVisualr::Interactive::BubbleChart.new(data_table, opts)
end
# https://developers.google.com/chart/interactive/docs/gallery/calendar#a-simple-example
def calendar
data_table = GoogleVisualr::DataTable.new
data_table.new_column('date' , 'Date')
data_table.new_column('number', 'Won/Loss')
data_table.add_rows(
[
[Date.parse("2012-3-13"), 37032],
[Date.parse("2012-3-14"), 38024],
[Date.parse("2012-3-15"), 38024],
[Date.parse("2012-3-16"), 38108],
[Date.parse("2012-3-17"), 38229],
[Date.parse("2013-9-4") , 38177],
[Date.parse("2013-9-5") , 38705],
[Date.parse("2013-9-12"), 38210],
[Date.parse("2013-9-13"), 38029],
[Date.parse("2013-9-19"), 38823],
[Date.parse("2013-9-23"), 38345],
[Date.parse("2013-9-24"), 38436],
[Date.parse("2013-9-30"), 38447]
]
)
opts = { :title => "Red Sox Attendance", :width => 800, :height => 300, calendar: { cellSize: 13.5 } }
@chart = GoogleVisualr::Interactive::Calendar.new(data_table, opts)
end
# http://code.google.com/apis/chart/interactive/docs/gallery/candlestickchart.html
def candlestick_chart
data_table = GoogleVisualr::DataTable.new
data_table.new_column('string', 'day')
data_table.new_column('number', 'min')
data_table.new_column('number', 'opening')
data_table.new_column('number', 'closing')
data_table.new_column('number', 'max')
data_table.add_rows(
[
['Mon',20,28,38,45],
['Tue',31,38,55,66],
['Wed',50,55,77,80],
['Thu',50,77,66,77],
['Fri',15,66,22,68]
]
)
opts = { :width => 400, :height => 240, :legend => 'none' }
@chart = GoogleVisualr::Interactive::CandlestickChart.new(data_table, opts)
end
# http://code.google.com/apis/chart/interactive/docs/gallery/columnchart.html#Example
def column_chart
data_table = GoogleVisualr::DataTable.new
data_table.new_column('string', 'Year')
data_table.new_column('number', 'Sales')
data_table.new_column('number', 'Expenses')
data_table.add_rows(4)
data_table.set_cell(0, 0, '2004')
data_table.set_cell(0, 1, 1000)
data_table.set_cell(0, 2, 400)
data_table.set_cell(1, 0, '2005')
data_table.set_cell(1, 1, 1170)
data_table.set_cell(1, 2, 460)
data_table.set_cell(2, 0, '2006')
data_table.set_cell(2, 1, 660)
data_table.set_cell(2, 2, 1120)
data_table.set_cell(3, 0, '2007')
data_table.set_cell(3, 1, 1030)
data_table.set_cell(3, 2, 540)
opts = { :width => 400, :height => 240, :title => 'Company Performance', :hAxis => { :title => 'Year', :titleTextStyle => { :color => 'red' } } }
@chart = GoogleVisualr::Interactive::ColumnChart.new(data_table, opts)
end
# http://code.google.com/apis/chart/interactive/docs/gallery/combochart.html
def combo_chart
data_table = GoogleVisualr::DataTable.new
data_table.new_column('string', 'month' )
data_table.new_column('number', 'Bolivia' )
data_table.new_column('number', 'Ecuador' )
data_table.new_column('number', 'Madagascar' )
data_table.new_column('number', 'Papua Guinea')
data_table.new_column('number', 'Rwanda' )
data_table.new_column('number', 'Avarage' )
data_table.add_rows(
[
['2004/05', 165, 938 , 522, 998 , 450, 614.6],
['2005/06', 135, 1120 , 599, 1268 , 288, 682 ],
['2006/07', 157, 1167 , 587, 807 , 397, 623 ],
['2007/08', 139, 1110 , 615, 968 , 215, 609.4],
['2008/09', 136, 691 , 629, 1026 , 366, 569.6]
]
)
opts = { :width => 700, :height => 400, :title => 'Monthly Coffee Production by Country', :vAxis => { :title => 'Cups' }, :hAxis => { :title => 'Month' }, :seriesType => 'bars', :series => { '5' => { :type => 'line' } } }
@chart = GoogleVisualr::Interactive::ComboChart.new(data_table, opts)
end
# https://developers.google.com/chart/interactive/docs/gallery/ganttchart#grouping-resources
def gantt_chart
def days_to_milli(days)
days * 24 * 60 * 60 * 1000;
end
data_table = GoogleVisualr::DataTable.new
data_table.new_column('string', 'Task ID')
data_table.new_column('string', 'Task Name')
data_table.new_column('string', 'Resource')
data_table.new_column('date' , 'Start Date')
data_table.new_column('date' , 'End Date')
data_table.new_column('number', 'Duration')
data_table.new_column('number', 'Percent Complete')
data_table.new_column('string', 'Dependencies')
data_table.add_rows(
[
['Research' , 'Find sources' , nil , Date.parse("2015-1-1"), Date.parse("2015-1-5") , nil , 100 , nil],
['Write' , 'Write paper' , 'write' , nil , Date.parse("2015-1-9") , days_to_milli(3) , 25 , 'Research, Outline'],
['Cite' , 'Create bibliography' , 'write' , nil , Date.parse("2015-1-7") , days_to_milli(1) , 20 , 'Research'],
['Complete' , 'Hand in paper' , 'complete', nil , Date.parse("2015-1-10") , days_to_milli(1) , 0 , 'Cite, Write'],
['Outline' , 'Outline paper' , 'write' , nil , Date.parse("2015-1-6") , days_to_milli(1) , 100 , 'Research']
]
)
opts = { version: "1.1", height: 275 }
@chart = GoogleVisualr::Interactive::GanttChart.new(data_table, opts)
end
# http://code.google.com/apis/chart/interactive/docs/gallery/gauge.html#Example
def gauge
data_table = GoogleVisualr::DataTable.new
data_table.new_column('string' , 'Label')
data_table.new_column('number' , 'Value')
data_table.add_rows(3)
data_table.set_cell(0, 0, 'Memory' )
data_table.set_cell(0, 1, 80)
data_table.set_cell(1, 0, 'CPU' )
data_table.set_cell(1, 1, 55)
data_table.set_cell(2, 0, 'Network')
data_table.set_cell(2, 1, 68)
opts = { :width => 400, :height => 120, :redFrom => 90, :redTo => 100, :yellowFrom => 75, :yellowTo => 90, :minorTicks => 5 }
@chart = GoogleVisualr::Interactive::Gauge.new(data_table, opts)
end
# http://code.google.com/apis/chart/interactive/docs/gallery/geochart.html#Example
def geo_chart
data_table = GoogleVisualr::DataTable.new
data_table.new_column('string', 'Country')
data_table.new_column('number', 'Popularity')
data_table.add_rows(6)
data_table.set_cell(0, 0, 'Germany' )
data_table.set_cell(0, 1, 200)
data_table.set_cell(1, 0, 'United States')
data_table.set_cell(1, 1, 300)
data_table.set_cell(2, 0, 'Brazil' )
data_table.set_cell(2, 1, 400)
data_table.set_cell(3, 0, 'Canada' )
data_table.set_cell(3, 1, 500)
data_table.set_cell(4, 0, 'France' )
data_table.set_cell(4, 1, 600)
data_table.set_cell(5, 0, 'RU' )
data_table.set_cell(5, 1, 700)
opts = { :width => 500, :height => 300 }
@chart = GoogleVisualr::Interactive::GeoChart.new(data_table, opts)
end
# http://code.google.com/apis/chart/interactive/docs/gallery/geomap.html#Example
def geo_map
# Regions Example
data_table_regions = GoogleVisualr::DataTable.new
data_table_regions.new_column('string' , 'Country' )
data_table_regions.new_column('number' , 'Popularity')
data_table_regions.add_rows(6)
data_table_regions.set_cell(0, 0, 'Germany' )
data_table_regions.set_cell(0, 1, 200)
data_table_regions.set_cell(1, 0, 'United States')
data_table_regions.set_cell(1, 1, 300)
data_table_regions.set_cell(2, 0, 'Brazil' )
data_table_regions.set_cell(2, 1, 400)
data_table_regions.set_cell(3, 0, 'Canada' )
data_table_regions.set_cell(3, 1, 500)
data_table_regions.set_cell(4, 0, 'France' )
data_table_regions.set_cell(4, 1, 600)
data_table_regions.set_cell(5, 0, 'RU' )
data_table_regions.set_cell(5, 1, 700)
opts = { :dataMode => 'regions' }
@chart_regions = GoogleVisualr::Interactive::GeoMap.new(data_table_regions, opts)
# Markers Example
data_table_markers = GoogleVisualr::DataTable.new
data_table_markers.new_column('string' , 'Country' )
data_table_markers.new_column('number' , 'Popularity')
data_table_markers.add_rows(6)
data_table_markers.set_cell(0, 0, 'New York' )
data_table_markers.set_cell(0, 1, 200)
data_table_markers.set_cell(1, 0, 'Boston' )
data_table_markers.set_cell(1, 1, 300)
data_table_markers.set_cell(2, 0, 'Miami' )
data_table_markers.set_cell(2, 1, 400)
data_table_markers.set_cell(3, 0, 'Chicago' )
data_table_markers.set_cell(3, 1, 500)
data_table_markers.set_cell(4, 0, 'Los Angeles' )
data_table_markers.set_cell(4, 1, 600)
data_table_markers.set_cell(5, 0, 'Houston' )
data_table_markers.set_cell(5, 1, 700)
opts = { :dataMode => 'markers', :region => 'US', :colors => ['0xFF8747', '0xFFB581', '0xc06000'] }
@chart_markers = GoogleVisualr::Interactive::GeoMap.new(data_table_markers, opts)
end
# https://developers.google.com/chart/interactive/docs/gallery/histogram#Example1
def histogram
data_table = GoogleVisualr::DataTable.new
data_table.new_column('string' , 'Dinosaur')
data_table.new_column('number' , 'Length')
data_table.add_rows(
[
['Acrocanthosaurus (top-spined lizard)' , 12.2],
['Albertosaurus (Alberta lizard)' , 9.1],
['Allosaurus (other lizard)' , 12.2],
['Apatosaurus (deceptive lizard)' , 22.9],
['Archaeopteryx (ancient wing)' , 0.9],
['Argentinosaurus (Argentina lizard)' , 36.6],
['Baryonyx (heavy claws)' , 9.1],
['Brachiosaurus (arm lizard)' , 30.5],
['Ceratosaurus (horned lizard)' , 6.1],
['Coelophysis (hollow form)' , 2.7],
['Compsognathus (elegant jaw)' , 0.9],
['Deinonychus (terrible claw)' , 2.7],
['Diplodocus (double beam)' , 27.1],
['Dromicelomimus (emu mimic)' , 3.4],
['Gallimimus (fowl mimic)' , 5.5],
['Mamenchisaurus (Mamenchi lizard)' , 21.0],
['Megalosaurus (big lizard)' , 7.9],
['Microvenator (small hunter)' , 1.2],
['Ornithomimus (bird mimic)' , 4.6],
['Oviraptor (egg robber)' , 1.5],
['Plateosaurus (flat lizard)' , 7.9],
['Sauronithoides (narrow-clawed lizard)', 2.0],
['Seismosaurus (tremor lizard)' , 45.7],
['Spinosaurus (spiny lizard)' , 12.2],
['Supersaurus (super lizard)' , 30.5],
['Tyrannosaurus (tyrant lizard)' , 15.2],
['Ultrasaurus (ultra lizard)' , 30.5],
['Velociraptor (swift robber)' , 1.8]
]
)
opts = { :width => 700, :height => 400, :title => 'Lengths of dinosaurs, in meters', :legend => { position: 'none' } }
@chart = GoogleVisualr::Interactive::Histogram.new(data_table, opts)
end
# http://code.google.com/apis/chart/interactive/docs/gallery/intensitymap.html
def intensity_map
data_table = GoogleVisualr::DataTable.new
data_table.new_column('string', '', 'Country')
data_table.new_column('number', 'Population (mil)', 'a')
data_table.new_column('number', 'Area (km2)' , 'b')
data_table.add_rows(5)
data_table.set_cell(0, 0, 'CN')
data_table.set_cell(0, 1, 1324)
data_table.set_cell(0, 2, 9640821)
data_table.set_cell(1, 0, 'IN')
data_table.set_cell(1, 1, 1133)
data_table.set_cell(1, 2, 3287263)
data_table.set_cell(2, 0, 'US')
data_table.set_cell(2, 1, 304 )
data_table.set_cell(2, 2, 9629091)
data_table.set_cell(3, 0, 'ID')
data_table.set_cell(3, 1, 232 )
data_table.set_cell(3, 2, 1904569)
data_table.set_cell(4, 0, 'BR')
data_table.set_cell(4, 1, 187 )
data_table.set_cell(4, 2, 8514877)
opts = {}
@chart = GoogleVisualr::Interactive::IntensityMap.new(data_table, opts)
end
# http://code.google.com/apis/chart/interactive/docs/gallery/linechart.html#Example
def line_chart
data_table = GoogleVisualr::DataTable.new
data_table.new_column('string', 'Year')
data_table.new_column('number', 'Sales')
data_table.new_column('number', 'Expenses')
data_table.add_rows(4)
data_table.set_cell(0, 0, '2004')
data_table.set_cell(0, 1, 1000)
data_table.set_cell(0, 2, 400)
data_table.set_cell(1, 0, '2005')
data_table.set_cell(1, 1, 1170)
data_table.set_cell(1, 2, 460)
data_table.set_cell(2, 0, '2006')
data_table.set_cell(2, 1, 860)
data_table.set_cell(2, 2, 580)
data_table.set_cell(3, 0, '2007')
data_table.set_cell(3, 1, 1030)
data_table.set_cell(3, 2, 540)
opts = { :width => 400, :height => 240, :title => 'Company Performance', :legend => 'bottom' }
@chart = GoogleVisualr::Interactive::LineChart.new(data_table, opts)
end
# http://code.google.com/apis/chart/interactive/docs/gallery/map.html
def map
data_table = GoogleVisualr::DataTable.new
data_table.new_column('number', 'Lat' )
data_table.new_column('number', 'Lon' )
data_table.new_column('string', 'Name')
data_table.add_rows(4)
data_table.set_cell(0, 0, 37.4232 )
data_table.set_cell(0, 1, -122.0853 )
data_table.set_cell(0, 2, 'Work' )
data_table.set_cell(1, 0, 37.4289 )
data_table.set_cell(1, 1, -122.1697 )
data_table.set_cell(1, 2, 'University')
data_table.set_cell(2, 0, 37.6153 )
data_table.set_cell(2, 1, -122.3900 )
data_table.set_cell(2, 2, 'Airport' )
data_table.set_cell(3, 0, 37.4422 )
data_table.set_cell(3, 1, -122.1731 )
data_table.set_cell(3, 2, 'Shopping' )
opts = { :showTip => true }
@chart = GoogleVisualr::Interactive::Map.new(data_table, opts)
end
# http://code.google.com/apis/chart/interactive/docs/gallery/motionchart.html#Example
def motion_chart
data_table = GoogleVisualr::DataTable.new
data_table.new_column('string', 'Fruit' )
data_table.new_column('date' , 'Date' )
data_table.new_column('number', 'Sales' )
data_table.new_column('number', 'Expenses')
data_table.new_column('string', 'Location')
data_table.add_rows(
[
['Apples' ,Date.parse("1988-01-01"),1000,300,'East'],
['Oranges',Date.parse("1988-01-01"),1150,200,'West'],
['Bananas',Date.parse("1988-01-01"),300 ,250,'West'],
['Apples' ,Date.parse("1989-07-01"),1200,400,'East'],
['Oranges',Date.parse("1989-07-01"),750 ,150,'West'],
['Bananas',Date.parse("1989-07-01"),788 ,617,'West']
]
)
opts = { :width => 600, :height => 300 }
@chart = GoogleVisualr::Interactive::MotionChart.new(data_table, opts)
end
# http://code.google.com/apis/chart/interactive/docs/gallery/orgchart.html#Example
def org_chart
data_table = GoogleVisualr::DataTable.new
data_table.new_column('string', 'Name' )
data_table.new_column('string', 'Manager')
data_table.new_column('string', 'ToolTip')
data_table.add_rows(
[
[ {:v => 'Mike', :f => 'Mike<div style="color:red; font-style:italic">President</div>' }, '' , 'The President' ],
[ {:v => 'Jim' , :f => 'Jim<div style="color:red; font-style:italic">Vice President<div>'}, 'Mike', 'VP' ],
[ 'Alice' , 'Mike', '' ],
[ 'Bob' , 'Jim' , 'Bob Sponge' ],
[ 'Carol' , 'Bob' , '' ]
]
)
opts = { :allowHtml => true }
@chart = GoogleVisualr::Interactive::OrgChart.new(data_table, opts)
end
# http://code.google.com/apis/chart/interactive/docs/gallery/piechart.html#Example
def pie_chart
data_table = GoogleVisualr::DataTable.new
data_table.new_column('string', 'Task')
data_table.new_column('number', 'Hours per Day')
data_table.add_rows(5)
data_table.set_cell(0, 0, 'Work' )
data_table.set_cell(0, 1, 11 )
data_table.set_cell(1, 0, 'Eat' )
data_table.set_cell(1, 1, 2 )
data_table.set_cell(2, 0, 'Commute' )
data_table.set_cell(2, 1, 2 )
data_table.set_cell(3, 0, 'Watch TV' )
data_table.set_cell(3, 1, 2 )
data_table.set_cell(4, 0, 'Sleep' )
data_table.set_cell(4, 1, 7 )
opts = { :width => 400, :height => 240, :title => 'My Daily Activities', :is3D => true }
@chart = GoogleVisualr::Interactive::PieChart.new(data_table, opts)
end
# https://developers.google.com/chart/interactive/docs/gallery/sankey#a-simple-example
def sankey
data_table = GoogleVisualr::DataTable.new
data_table.new_column('string', 'From')
data_table.new_column('string', 'To')
data_table.new_column('number', 'Weight')
data_table.add_rows(
[
['A', 'X', 5],
['A', 'Y', 7],
['A', 'Z', 6],
['B', 'X', 2],
['B', 'Y', 9],
['B', 'Z', 4]
]
)
opts = { width: 600 }
@chart = GoogleVisualr::Interactive::Sankey.new(data_table, opts)
end
# http://code.google.com/apis/chart/interactive/docs/gallery/scatterchart.html#Example
def scatter_chart
data_table = GoogleVisualr::DataTable.new
data_table.new_column('number', 'Age')
data_table.new_column('number', 'Weight')
data_table.add_rows(6)
data_table.set_cell( 0, 0, 8 )
data_table.set_cell( 0, 1, 12 )
data_table.set_cell( 1, 0, 4 )
data_table.set_cell( 1, 1, 5.5)
data_table.set_cell( 2, 0, 11 )
data_table.set_cell( 2, 1, 14 )
data_table.set_cell( 3, 0, 4 )
data_table.set_cell( 3, 1, 4.5)
data_table.set_cell( 4, 0, 3 )
data_table.set_cell( 4, 1, 3.5)
data_table.set_cell( 5, 0, 6.5)
data_table.set_cell( 5, 1, 7 )
opts = {
:width => 400, :height => 240, :title => 'Age vs. Weight comparison',
:hAxis => { :title => 'Age', :minValue => 0, :maxValue => 15 },
:vAxis => { :title => 'Weight', :minValue => 0, :maxValue => 15 },
:legend => 'none'
}
@chart = GoogleVisualr::Interactive::ScatterChart.new(data_table, opts)
end
# http://code.google.com/apis/chart/interactive/docs/gallery/steppedareachart.html#Example
def stepped_area_chart
data_table = GoogleVisualr::DataTable.new
data_table.new_column('string' , 'Director (Year)')
data_table.new_column('number' , 'Rotten Tomatoes')
data_table.new_column('number' , 'IMDB')
data_table.add_rows(
[
['Alfred Hitchcock (1935)', 8.4, 7.9],
['Ralph Thomas (1959)', 6.9, 6.5],
['Don Sharp (1978)', 6.5, 6.4],
['James Hawes (2008)', 4.4, 6.2]
]
)
opts = { :width => 800, :height => 500, :title => "The decline of 'The 39 Steps'", :vAxis => { :title => 'Accumulated Rating' }, :isStacked => true }
@chart = GoogleVisualr::Interactive::SteppedAreaChart.new(data_table, opts)
end
# http://code.google.com/apis/chart/interactive/docs/gallery/table.html#Example
def table
data_table = GoogleVisualr::DataTable.new
data_table.new_column('string' , 'Name')
data_table.new_column('number' , 'Salary')
data_table.new_column('boolean' , 'Full Time Employee')
data_table.add_rows(4)
data_table.set_cell(0, 0, 'Mike' )
data_table.set_cell(0, 1, {:v => 10000, :f => '$10,000'})
data_table.set_cell(0, 2, true )
data_table.set_cell(1, 0, 'Jim' )
data_table.set_cell(1, 1, {:v => 8000 , :f => '$8,000' })
data_table.set_cell(1, 2, false )
data_table.set_cell(2, 0, 'Alice' )
data_table.set_cell(2, 1, {:v => 12500, :f => '$12,500'})
data_table.set_cell(2, 2, true )
data_table.set_cell(3, 0, 'Bob' )
data_table.set_cell(3, 1, {:v => 7000 , :f => '$7,000' })
data_table.set_cell(3, 2, true )
opts = { :width => 600, :showRowNumber => true }
@chart = GoogleVisualr::Interactive::Table.new(data_table, opts)
end
# http://code.google.com/apis/chart/interactive/docs/gallery/treemap.html#Example
def tree_map
data_table = GoogleVisualr::DataTable.new
data_table.new_column('string', 'Region')
data_table.new_column('string', 'Parent')
data_table.new_column('number', 'Market trade volume (size)')
data_table.new_column('number', 'Market increase/decrease (color)')
data_table.add_rows(
[
["Global" , nil , 0 , 0 ],
["America" , "Global" , 0 , 0 ],
["Europe" , "Global" , 0 , 0 ],
["Asia" , "Global" , 0 , 0 ],
["Australia", "Global" , 0 , 0 ],
["Africa" , "Global" , 0 , 0 ],
["Brazil" , "America" , 11, 10 ],
["USA" , "America" , 52, 31 ],
["Mexico" , "America" , 24, 12 ],
["Canada" , "America" , 16, -23 ],
["France" , "Europe" , 42, -11 ],
["Germany" , "Europe" , 31, -2 ],
["Sweden" , "Europe" , 22, -13 ],
["Italy" , "Europe" , 17, 4 ],
["UK" , "Europe" , 21, -5 ],
["China" , "Asia" , 36, 4 ],
["Japan" , "Asia" , 20, -12 ],
["India" , "Asia" , 40, 63 ],
["Laos" , "Asia" , 4 , 34 ],
["Mongolia" , "Asia" , 1 , -5 ],
["Israel" , "Asia" , 12, 24 ],
["Iran" , "Asia" , 18, 13 ],
["Pakistan" , "Asia" , 11, -52 ],
["Egypt" , "Africa" , 21, 0 ],
["S. Africa", "Africa" , 30, 43 ],
["Sudan" , "Africa" , 12, 2 ],
["Congo" , "Africa" , 10, 12 ],
["Zair" , "Africa" , 8 , 10 ]
]
)
opts = { :width => 600, :height => 600, :minColor => '#f00', :midColor => '#ddd', :maxColor => '#0d0', :headerHeight => 15, :fontColor => 'black', :showScale => true }
@chart = GoogleVisualr::Interactive::TreeMap.new(data_table, opts)
end
# https://developers.google.com/chart/interactive/docs/gallery/wordtree#a-simple-example
def word_tree
data_table = GoogleVisualr::DataTable.new
data_table.new_column("string", "Phrases")
data_table.add_rows(
[
['cats are better than dogs'],
['cats eat kibble'],
['cats are better than hamsters'],
['cats are awesome'],
['cats are people too'],
['cats eat mice'],
['cats meowing'],
['cats in the cradle'],
['cats eat mice'],
['cats in the cradle lyrics'],
['cats eat kibble'],
['cats for adoption'],
['cats are family'],
['cats eat mice'],
['cats are better than kittens'],
['cats are evil'],
['cats are weird'],
['cats eat mice']
]
)
opts = { wordtree: { format: 'implicit', word: 'cats' } }
@chart = GoogleVisualr::Interactive::WordTree.new(data_table, opts)
end
end
|
derkork/intellij-leiningen-plugin | 39 | src/de/janthomae/leiningenplugin/run/LeiningenRunConfiguration.java | package de.janthomae.leiningenplugin.run;
import com.intellij.execution.ExecutionException;
import com.intellij.execution.Executor;
import com.intellij.execution.configurations.*;
import com.intellij.execution.filters.TextConsoleBuilderFactory;
import com.intellij.execution.runners.ExecutionEnvironment;
import com.intellij.execution.runners.ProgramRunner;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.options.SettingsEditor;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.InvalidDataException;
import com.intellij.openapi.util.JDOMExternalizable;
import com.intellij.openapi.util.WriteExternalException;
import com.intellij.openapi.vfs.LocalFileSystem;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.util.xmlb.XmlSerializer;
import de.janthomae.leiningenplugin.LeiningenConstants;
import org.jdom.Element;
import org.jetbrains.annotations.NotNull;
import java.util.ArrayList;
/**
* @author <a href="janthomae@janthomae.de">Jan Thomä</a>
* @version $Id:$
*/
public class LeiningenRunConfiguration extends RunConfigurationBase
implements LocatableConfiguration, ModuleRunProfile {
private LeiningenRunnerParameters myRunnerParams = new LeiningenRunnerParameters(new ArrayList<String>(), "");
public LeiningenRunConfiguration(Project project, ConfigurationFactory factory, String name) {
super(project, factory, name);
}
public boolean isGeneratedName() {
return false;
}
public String suggestedName() {
return null;
}
public SettingsEditor<? extends RunConfiguration> getConfigurationEditor() {
return new LeiningenRunConfigurationSettings(getProject());
}
public JDOMExternalizable createRunnerSettings(ConfigurationInfoProvider configurationInfoProvider) {
return null;
}
public SettingsEditor<JDOMExternalizable> getRunnerSettingsEditor(ProgramRunner programRunner) {
return null;
}
@NotNull
public Module[] getModules() {
return Module.EMPTY_ARRAY;
}
public RunProfileState getState(@NotNull Executor executor, @NotNull ExecutionEnvironment executionEnvironment)
throws ExecutionException {
LeiningenCommandLineState state =
new LeiningenCommandLineState(LeiningenRunnerSettings.getInstance(), myRunnerParams,
executionEnvironment);
state.setConsoleBuilder(TextConsoleBuilderFactory.getInstance().createBuilder(getProject()));
return state;
}
public void checkConfiguration() throws RuntimeConfigurationException {
String wd = myRunnerParams.getWorkingDirectory();
if (wd.isEmpty()) {
throw new RuntimeConfigurationError("You need to specify a working directory.");
}
VirtualFile vf = LocalFileSystem.getInstance().findFileByPath(wd);
if (vf != null && vf.exists()) {
VirtualFile vf2 = vf.findChild(LeiningenConstants.PROJECT_CLJ);
if (vf2 == null || !vf2.isValid()) {
throw new RuntimeConfigurationError(
"There is no Leiningen project file in the selected working directory.");
}
} else {
throw new RuntimeConfigurationError("The selected working directory does not exist.");
}
if (myRunnerParams.getGoals().isEmpty()) {
throw new RuntimeConfigurationError("You need to specify at least one goal.");
}
}
public void setRunnerParams(@NotNull LeiningenRunnerParameters myRunnerParams) {
this.myRunnerParams = myRunnerParams;
}
@NotNull
public LeiningenRunnerParameters getRunnerParams() {
return myRunnerParams;
}
@Override
public void readExternal(Element element) throws InvalidDataException {
super.readExternal(element);
final Element child = element.getChild("LeiningenRunnerParameters");
if (child != null) {
myRunnerParams = XmlSerializer.deserialize(child, LeiningenRunnerParameters.class);
}
}
@Override
public void writeExternal(Element element) throws WriteExternalException {
super.writeExternal(element);
element.addContent(XmlSerializer.serialize(myRunnerParams));
}
}
| package de.janthomae.leiningenplugin.run;
import com.intellij.execution.ExecutionException;
import com.intellij.execution.Executor;
import com.intellij.execution.configurations.*;
import com.intellij.execution.filters.TextConsoleBuilderFactory;
import com.intellij.execution.runners.ExecutionEnvironment;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.options.SettingsEditor;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.InvalidDataException;
import com.intellij.openapi.util.WriteExternalException;
import com.intellij.openapi.vfs.LocalFileSystem;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.util.xmlb.XmlSerializer;
import de.janthomae.leiningenplugin.LeiningenConstants;
import org.jdom.Element;
import org.jetbrains.annotations.NotNull;
import java.util.ArrayList;
/**
* @author <a href="janthomae@janthomae.de">Jan Thomä</a>
* @version $Id:$
*/
public class LeiningenRunConfiguration extends RunConfigurationBase
implements LocatableConfiguration, ModuleRunProfile {
private LeiningenRunnerParameters myRunnerParams = new LeiningenRunnerParameters(new ArrayList<String>(), "");
public LeiningenRunConfiguration(Project project, ConfigurationFactory factory, String name) {
super(project, factory, name);
}
public boolean isGeneratedName() {
return false;
}
public String suggestedName() {
return null;
}
public SettingsEditor<? extends RunConfiguration> getConfigurationEditor() {
return new LeiningenRunConfigurationSettings(getProject());
}
@NotNull
public Module[] getModules() {
return Module.EMPTY_ARRAY;
}
public RunProfileState getState(@NotNull Executor executor, @NotNull ExecutionEnvironment executionEnvironment)
throws ExecutionException {
LeiningenCommandLineState state =
new LeiningenCommandLineState(LeiningenRunnerSettings.getInstance(), myRunnerParams,
executionEnvironment);
state.setConsoleBuilder(TextConsoleBuilderFactory.getInstance().createBuilder(getProject()));
return state;
}
public void checkConfiguration() throws RuntimeConfigurationException {
String wd = myRunnerParams.getWorkingDirectory();
if (wd.isEmpty()) {
throw new RuntimeConfigurationError("You need to specify a working directory.");
}
VirtualFile vf = LocalFileSystem.getInstance().findFileByPath(wd);
if (vf != null && vf.exists()) {
VirtualFile vf2 = vf.findChild(LeiningenConstants.PROJECT_CLJ);
if (vf2 == null || !vf2.isValid()) {
throw new RuntimeConfigurationError(
"There is no Leiningen project file in the selected working directory.");
}
} else {
throw new RuntimeConfigurationError("The selected working directory does not exist.");
}
if (myRunnerParams.getGoals().isEmpty()) {
throw new RuntimeConfigurationError("You need to specify at least one goal.");
}
}
public void setRunnerParams(@NotNull LeiningenRunnerParameters myRunnerParams) {
this.myRunnerParams = myRunnerParams;
}
@NotNull
public LeiningenRunnerParameters getRunnerParams() {
return myRunnerParams;
}
@Override
public void readExternal(Element element) throws InvalidDataException {
super.readExternal(element);
final Element child = element.getChild("LeiningenRunnerParameters");
if (child != null) {
myRunnerParams = XmlSerializer.deserialize(child, LeiningenRunnerParameters.class);
}
}
@Override
public void writeExternal(Element element) throws WriteExternalException {
super.writeExternal(element);
element.addContent(XmlSerializer.serialize(myRunnerParams));
}
}
|
paulopmx/Flexigrid | 136 | js/flexigrid.js | /*
* Flexigrid for jQuery - v1.1
*
* Copyright (c) 2008 Paulo P. Marinas (code.google.com/p/flexigrid/)
* Dual licensed under the MIT or GPL Version 2 licenses.
* http://jquery.org/license
*
*/
(function ($) {
/*
* jQuery 1.9 support. browser object has been removed in 1.9
*/
var browser = $.browser
if (!browser) {
function uaMatch( ua ) {
ua = ua.toLowerCase();
var match = /(chrome)[ \/]([\w.]+)/.exec( ua ) ||
/(webkit)[ \/]([\w.]+)/.exec( ua ) ||
/(opera)(?:.*version|)[ \/]([\w.]+)/.exec( ua ) ||
/(msie) ([\w.]+)/.exec( ua ) ||
ua.indexOf("compatible") < 0 && /(mozilla)(?:.*? rv:([\w.]+)|)/.exec( ua ) ||
[];
return {
browser: match[ 1 ] || "",
version: match[ 2 ] || "0"
};
};
var matched = uaMatch( navigator.userAgent );
browser = {};
if ( matched.browser ) {
browser[ matched.browser ] = true;
browser.version = matched.version;
}
// Chrome is Webkit, but Webkit is also Safari.
if ( browser.chrome ) {
browser.webkit = true;
} else if ( browser.webkit ) {
browser.safari = true;
}
}
/*!
* START code from jQuery UI
*
* Copyright 2011, AUTHORS.txt (http://jqueryui.com/about)
* Dual licensed under the MIT or GPL Version 2 licenses.
* http://jquery.org/license
*
* http://docs.jquery.com/UI
*/
if(typeof $.support.selectstart != 'function') {
$.support.selectstart = "onselectstart" in document.createElement("div");
}
if(typeof $.fn.disableSelection != 'function') {
$.fn.disableSelection = function() {
return this.bind( ( $.support.selectstart ? "selectstart" : "mousedown" ) +
".ui-disableSelection", function( event ) {
event.preventDefault();
});
};
}
/* END code from jQuery UI */
$.addFlex = function (t, p) {
if (t.grid) return false; //return if already exist
p = $.extend({ //apply default properties
height: 200, //default height
width: 'auto', //auto width
striped: true, //apply odd even stripes
novstripe: false,
minwidth: 30, //min width of columns
minheight: 80, //min height of columns
resizable: true, //allow table resizing
url: false, //URL if using data from AJAX
method: 'POST', //data sending method
dataType: 'xml', //type of data for AJAX, either xml or json
errormsg: 'Connection Error',
usepager: false,
nowrap: true,
page: 1, //current page
total: 1, //total pages
useRp: true, //use the results per page select box
rp: 15, //results per page
rpOptions: [10, 15, 20, 30, 50], //allowed per-page values
title: false,
idProperty: 'id',
pagestat: 'Displaying {from} to {to} of {total} items',
pagetext: 'Page',
outof: 'of',
findtext: 'Find',
params: [], //allow optional parameters to be passed around
procmsg: 'Processing, please wait ...',
query: '',
qtype: '',
nomsg: 'No items',
minColToggle: 1, //minimum allowed column to be hidden
showToggleBtn: true, //show or hide column toggle popup
hideOnSubmit: true,
autoload: true,
blockOpacity: 0.5,
preProcess: false,
addTitleToCell: false, // add a title attr to cells with truncated contents
dblClickResize: false, //auto resize column by double clicking
onDragCol: false,
onToggleCol: false,
onChangeSort: false,
onDoubleClick: false,
onSuccess: false,
onError: false,
onSubmit: false, //using a custom populate function
__mw: { //extendable middleware function holding object
datacol: function(p, col, val) { //middleware for formatting data columns
var _col = (typeof p.datacol[col] == 'function') ? p.datacol[col](val) : val; //format column using function
if(typeof p.datacol['*'] == 'function') { //if wildcard function exists
return p.datacol['*'](_col); //run wildcard function
} else {
return _col; //return column without wildcard
}
}
},
getGridClass: function(g) { //get the grid class, always returns g
return g;
},
datacol: {}, //datacol middleware object 'colkey': function(colval) {}
colResize: true, //from: http://stackoverflow.com/a/10615589
colMove: true
}, p);
$(t).show() //show if hidden
.attr({
cellPadding: 0,
cellSpacing: 0,
border: 0
}) //remove padding and spacing
.removeAttr('width'); //remove width properties
//create grid class
var g = {
hset: {},
rePosDrag: function () {
var cdleft = 0 - this.hDiv.scrollLeft;
if (this.hDiv.scrollLeft > 0) cdleft -= Math.floor(p.cgwidth / 2);
$(g.cDrag).css({
top: g.hDiv.offsetTop + 1
});
var cdpad = this.cdpad;
var cdcounter=0;
$('div', g.cDrag).hide();
$('thead tr:first th:visible', this.hDiv).each(function () {
var n = $('thead tr:first th:visible', g.hDiv).index(this);
var cdpos = parseInt($('div', this).width());
if (cdleft == 0) cdleft -= Math.floor(p.cgwidth / 2);
cdpos = cdpos + cdleft + cdpad;
if (isNaN(cdpos)) {
cdpos = 0;
}
$('div:eq(' + n + ')', g.cDrag).css({
'left': (!(browser.mozilla) ? cdpos - cdcounter : cdpos) + 'px'
}).show();
cdleft = cdpos;
cdcounter++;
});
},
fixHeight: function (newH) {
newH = false;
if (!newH) newH = $(g.bDiv).height();
var hdHeight = $(this.hDiv).height();
$('div', this.cDrag).each(
function () {
$(this).height(newH + hdHeight);
}
);
var nd = parseInt($(g.nDiv).height(), 10);
if (nd > newH) $(g.nDiv).height(newH).width(200);
else $(g.nDiv).height('auto').width('auto');
$(g.block).css({
height: newH,
marginBottom: (newH * -1)
});
var hrH = g.bDiv.offsetTop + newH;
if (p.height != 'auto' && p.resizable) hrH = g.vDiv.offsetTop;
$(g.rDiv).css({
height: hrH
});
},
dragStart: function (dragtype, e, obj) { //default drag function start
if (dragtype == 'colresize' && p.colResize === true) {//column resize
$(g.nDiv).hide();
$(g.nBtn).hide();
var n = $('div', this.cDrag).index(obj);
var ow = $('th:visible div:eq(' + n + ')', this.hDiv).width();
$(obj).addClass('dragging').siblings().hide();
$(obj).prev().addClass('dragging').show();
this.colresize = {
startX: e.pageX,
ol: parseInt(obj.style.left, 10),
ow: ow,
n: n
};
$('body').css('cursor', 'col-resize');
} else if (dragtype == 'vresize') {//table resize
var hgo = false;
$('body').css('cursor', 'row-resize');
if (obj) {
hgo = true;
$('body').css('cursor', 'col-resize');
}
this.vresize = {
h: p.height,
sy: e.pageY,
w: p.width,
sx: e.pageX,
hgo: hgo
};
} else if (dragtype == 'colMove') {//column header drag
$(e.target).disableSelection(); //disable selecting the column header
if((p.colMove === true)) {
$(g.nDiv).hide();
$(g.nBtn).hide();
this.hset = $(this.hDiv).offset();
this.hset.right = this.hset.left + $('table', this.hDiv).width();
this.hset.bottom = this.hset.top + $('table', this.hDiv).height();
this.dcol = obj;
this.dcoln = $('th', this.hDiv).index(obj);
this.colCopy = document.createElement("div");
this.colCopy.className = "colCopy";
this.colCopy.innerHTML = obj.innerHTML;
if (browser.msie) {
this.colCopy.className = "colCopy ie";
}
$(this.colCopy).css({
position: 'absolute',
'float': 'left',
display: 'none',
textAlign: obj.align
});
$('body').append(this.colCopy);
$(this.cDrag).hide();
}
}
$('body').noSelect();
},
dragMove: function (e) {
if (this.colresize) {//column resize
var n = this.colresize.n;
var diff = e.pageX - this.colresize.startX;
var nleft = this.colresize.ol + diff;
var nw = this.colresize.ow + diff;
if (nw > p.minwidth) {
$('div:eq(' + n + ')', this.cDrag).css('left', nleft);
this.colresize.nw = nw;
}
} else if (this.vresize) {//table resize
var v = this.vresize;
var y = e.pageY;
var diff = y - v.sy;
if (!p.defwidth) p.defwidth = p.width;
if (p.width != 'auto' && !p.nohresize && v.hgo) {
var x = e.pageX;
var xdiff = x - v.sx;
var newW = v.w + xdiff;
if (newW > p.defwidth) {
this.gDiv.style.width = newW + 'px';
p.width = newW;
}
}
var newH = v.h + diff;
if ((newH > p.minheight || p.height < p.minheight) && !v.hgo) {
this.bDiv.style.height = newH + 'px';
p.height = newH;
this.fixHeight(newH);
}
v = null;
} else if (this.colCopy) {
$(this.dcol).addClass('thMove').removeClass('thOver');
if (e.pageX > this.hset.right || e.pageX < this.hset.left || e.pageY > this.hset.bottom || e.pageY < this.hset.top) {
//this.dragEnd();
$('body').css('cursor', 'move');
} else {
$('body').css('cursor', 'pointer');
}
$(this.colCopy).css({
top: e.pageY + 10,
left: e.pageX + 20,
display: 'block'
});
}
},
dragEnd: function () {
if (this.colresize) {
var n = this.colresize.n;
var nw = this.colresize.nw;
$('th:visible div:eq(' + n + ')', this.hDiv).css('width', nw);
$('tr', this.bDiv).each(
function () {
var $tdDiv = $('td:visible div:eq(' + n + ')', this);
$tdDiv.css('width', nw);
g.addTitleToCell($tdDiv);
}
);
this.hDiv.scrollLeft = this.bDiv.scrollLeft;
$('div:eq(' + n + ')', this.cDrag).siblings().show();
$('.dragging', this.cDrag).removeClass('dragging');
this.rePosDrag();
this.fixHeight();
this.colresize = false;
if ($.cookies) {
var name = p.colModel[n].name; // Store the widths in the cookies
$.cookie('flexiwidths/'+name, nw);
}
} else if (this.vresize) {
this.vresize = false;
} else if (this.colCopy) {
$(this.colCopy).remove();
if (this.dcolt !== null) {
if (this.dcoln > this.dcolt) $('th:eq(' + this.dcolt + ')', this.hDiv).before(this.dcol);
else $('th:eq(' + this.dcolt + ')', this.hDiv).after(this.dcol);
this.switchCol(this.dcoln, this.dcolt);
$(this.cdropleft).remove();
$(this.cdropright).remove();
this.rePosDrag();
if (p.onDragCol) {
p.onDragCol(this.dcoln, this.dcolt);
}
}
this.dcol = null;
this.hset = null;
this.dcoln = null;
this.dcolt = null;
this.colCopy = null;
$('.thMove', this.hDiv).removeClass('thMove');
$(this.cDrag).show();
}
$('body').css('cursor', 'default');
$('body').noSelect(false);
},
toggleCol: function (cid, visible) {
var ncol = $("th[axis='col" + cid + "']", this.hDiv)[0];
var n = $('thead th', g.hDiv).index(ncol);
var cb = $('input[value=' + cid + ']', g.nDiv)[0];
if (visible == null) {
visible = ncol.hidden;
}
if ($('input:checked', g.nDiv).length < p.minColToggle && !visible) {
return false;
}
if (visible) {
ncol.hidden = false;
$(ncol).show();
cb.checked = true;
} else {
ncol.hidden = true;
$(ncol).hide();
cb.checked = false;
}
$('tbody tr', t).each(
function () {
if (visible) {
$('td:eq(' + n + ')', this).show();
} else {
$('td:eq(' + n + ')', this).hide();
}
}
);
this.rePosDrag();
if (p.onToggleCol) {
p.onToggleCol(cid, visible);
}
return visible;
},
switchCol: function (cdrag, cdrop) { //switch columns
$('tbody tr', t).each(
function () {
if (cdrag > cdrop) $('td:eq(' + cdrop + ')', this).before($('td:eq(' + cdrag + ')', this));
else $('td:eq(' + cdrop + ')', this).after($('td:eq(' + cdrag + ')', this));
}
);
//switch order in nDiv
if (cdrag > cdrop) {
$('tr:eq(' + cdrop + ')', this.nDiv).before($('tr:eq(' + cdrag + ')', this.nDiv));
} else {
$('tr:eq(' + cdrop + ')', this.nDiv).after($('tr:eq(' + cdrag + ')', this.nDiv));
}
if (browser.msie && browser.version < 7.0) {
$('tr:eq(' + cdrop + ') input', this.nDiv)[0].checked = true;
}
this.hDiv.scrollLeft = this.bDiv.scrollLeft;
},
scroll: function () {
this.hDiv.scrollLeft = this.bDiv.scrollLeft;
this.rePosDrag();
},
addData: function (data) { //parse data
if (p.dataType == 'json') {
data = $.extend({rows: [], page: 0, total: 0}, data);
}
if (p.preProcess) {
data = p.preProcess(data);
}
$('.pReload', this.pDiv).removeClass('loading');
this.loading = false;
if (!data) {
$('.pPageStat', this.pDiv).html(p.errormsg);
if (p.onSuccess) p.onSuccess(this);
return false;
}
if (p.dataType == 'xml') {
p.total = +$('rows total', data).text();
} else {
p.total = data.total;
}
if (p.total === 0) {
$('tr, a, td, div', t).unbind();
$(t).empty();
p.pages = 1;
p.page = 1;
this.buildpager();
$('.pPageStat', this.pDiv).html(p.nomsg);
if (p.onSuccess) p.onSuccess(this);
return false;
}
p.pages = Math.ceil(p.total / p.rp);
if (p.dataType == 'xml') {
p.page = +$('rows page', data).text();
} else {
p.page = data.page;
}
this.buildpager();
//build new body
var tbody = document.createElement('tbody');
if (p.dataType == 'json') {
$.each(data.rows, function (i, row) {
var tr = document.createElement('tr');
var jtr = $(tr);
if (row.name) tr.name = row.name;
if (row.color) {
jtr.css('background',row.color);
} else {
if (i % 2 && p.striped) tr.className = 'erow';
}
if (row[p.idProperty]) {
tr.id = 'row' + row[p.idProperty];
jtr.attr('data-id', row[p.idProperty]);
}
$('thead tr:first th', g.hDiv).each( //add cell
function () {
var td = document.createElement('td');
var idx = $(this).attr('axis').substr(3);
td.align = this.align;
// If each row is the object itself (no 'cell' key)
if (typeof row.cell == 'undefined') {
td.innerHTML = row[p.colModel[idx].name];
} else {
// If the json elements aren't named (which is typical), use numeric order
var iHTML = '';
if (typeof row.cell[idx] != "undefined") {
iHTML = (row.cell[idx] !== null) ? row.cell[idx] : ''; //null-check for Opera-browser
} else {
iHTML = row.cell[p.colModel[idx].name];
}
td.innerHTML = p.__mw.datacol(p, $(this).attr('abbr'), iHTML); //use middleware datacol to format cols
}
// If the content has a <BGCOLOR=nnnnnn> option, decode it.
var offs = td.innerHTML.indexOf( '<BGCOLOR=' );
if( offs >0 ) {
$(td).css('background', text.substr(offs+7,7) );
}
$(td).attr('abbr', $(this).attr('abbr'));
$(tr).append(td);
td = null;
}
);
if ($('thead', this.gDiv).length < 1) {//handle if grid has no headers
for (idx = 0; idx < row.cell.length; idx++) {
var td = document.createElement('td');
// If the json elements aren't named (which is typical), use numeric order
if (typeof row.cell[idx] != "undefined") {
td.innerHTML = (row.cell[idx] != null) ? row.cell[idx] : '';//null-check for Opera-browser
} else {
td.innerHTML = row.cell[p.colModel[idx].name];
}
$(tr).append(td);
td = null;
}
}
$(tbody).append(tr);
tr = null;
});
} else if (p.dataType == 'xml') {
var i = 1;
$("rows row", data).each(function () {
i++;
var tr = document.createElement('tr');
if ($(this).attr('name')) tr.name = $(this).attr('name');
if ($(this).attr('color')) {
$(tr).css('background',$(this).attr('id'));
} else {
if (i % 2 && p.striped) tr.className = 'erow';
}
var nid = $(this).attr('id');
if (nid) {
tr.id = 'row' + nid;
}
nid = null;
var robj = this;
$('thead tr:first th', g.hDiv).each(function () {
var td = document.createElement('td');
var idx = $(this).attr('axis').substr(3);
td.align = this.align;
var text = $("cell:eq(" + idx + ")", robj).text();
var offs = text.indexOf( '<BGCOLOR=' );
if( offs >0 ) {
$(td).css('background', text.substr(offs+7,7) );
}
td.innerHTML = p.__mw.datacol(p, $(this).attr('abbr'), text); //use middleware datacol to format cols
$(td).attr('abbr', $(this).attr('abbr'));
$(tr).append(td);
td = null;
});
if ($('thead', this.gDiv).length < 1) {//handle if grid has no headers
$('cell', this).each(function () {
var td = document.createElement('td');
td.innerHTML = $(this).text();
$(tr).append(td);
td = null;
});
}
$(tbody).append(tr);
tr = null;
robj = null;
});
}
$('tr', t).unbind();
$(t).empty();
$(t).append(tbody);
this.addCellProp();
this.addRowProp();
this.rePosDrag();
tbody = null;
data = null;
i = null;
if (p.onSuccess) {
p.onSuccess(this);
}
if (p.hideOnSubmit) {
$(g.block).remove();
}
this.hDiv.scrollLeft = this.bDiv.scrollLeft;
if (browser.opera) {
$(t).css('visibility', 'visible');
}
},
changeSort: function (th) { //change sortorder
if (this.loading) {
return true;
}
$(g.nDiv).hide();
$(g.nBtn).hide();
if (p.sortname == $(th).attr('abbr')) {
if (p.sortorder == 'asc') {
p.sortorder = 'desc';
} else {
p.sortorder = 'asc';
}
}
$(th).addClass('sorted').siblings().removeClass('sorted');
$('.sdesc', this.hDiv).removeClass('sdesc');
$('.sasc', this.hDiv).removeClass('sasc');
$('div', th).addClass('s' + p.sortorder);
p.sortname = $(th).attr('abbr');
if (p.onChangeSort) {
p.onChangeSort(p.sortname, p.sortorder);
} else {
this.populate();
}
},
buildpager: function () { //rebuild pager based on new properties
$('.pcontrol input', this.pDiv).val(p.page);
$('.pcontrol span', this.pDiv).html(p.pages);
var r1 = (p.page - 1) * p.rp + 1;
var r2 = r1 + p.rp - 1;
if (p.total < r2) {
r2 = p.total;
}
var stat = p.pagestat;
stat = stat.replace(/{from}/, r1);
stat = stat.replace(/{to}/, r2);
stat = stat.replace(/{total}/, p.total);
$('.pPageStat', this.pDiv).html(stat);
},
populate: function () { //get latest data
if (this.loading) {
return true;
}
if (p.onSubmit) {
var gh = p.onSubmit();
if (!gh) {
return false;
}
}
this.loading = true;
if (!p.url) {
return false;
}
$('.pPageStat', this.pDiv).html(p.procmsg);
$('.pReload', this.pDiv).addClass('loading');
$(g.block).css({
top: g.bDiv.offsetTop
});
if (p.hideOnSubmit) {
$(this.gDiv).prepend(g.block);
}
if (browser.opera) {
$(t).css('visibility', 'hidden');
}
if (!p.newp) {
p.newp = 1;
}
if (p.page > p.pages) {
p.page = p.pages;
}
var param = [{
name: 'page',
value: p.newp
}, {
name: 'rp',
value: p.rp
}, {
name: 'sortname',
value: p.sortname
}, {
name: 'sortorder',
value: p.sortorder
}, {
name: 'query',
value: p.query
}, {
name: 'qtype',
value: p.qtype
}];
if (p.params.length) {
for (var pi = 0; pi < p.params.length; pi++) {
param[param.length] = p.params[pi];
}
}
$.ajax({
type: p.method,
url: p.url,
data: param,
dataType: p.dataType,
success: function (data) {
g.addData(data);
},
error: function (XMLHttpRequest, textStatus, errorThrown) {
try {
if (p.onError) p.onError(XMLHttpRequest, textStatus, errorThrown);
} catch (e) {}
}
});
},
doSearch: function () {
p.query = $('input[name=q]', g.sDiv).val();
p.qtype = $('select[name=qtype]', g.sDiv).val();
p.newp = 1;
this.populate();
},
changePage: function (ctype) { //change page
if (this.loading) {
return true;
}
switch (ctype) {
case 'first':
p.newp = 1;
break;
case 'prev':
if (p.page > 1) {
p.newp = parseInt(p.page, 10) - 1;
}
break;
case 'next':
if (p.page < p.pages) {
p.newp = parseInt(p.page, 10) + 1;
}
break;
case 'last':
p.newp = p.pages;
break;
case 'input':
var nv = parseInt($('.pcontrol input', this.pDiv).val(), 10);
if (isNaN(nv)) {
nv = 1;
}
if (nv < 1) {
nv = 1;
} else if (nv > p.pages) {
nv = p.pages;
}
$('.pcontrol input', this.pDiv).val(nv);
p.newp = nv;
break;
}
if (p.newp == p.page) {
return false;
}
if (p.onChangePage) {
p.onChangePage(p.newp);
} else {
this.populate();
}
},
addCellProp: function () {
$('tbody tr td', g.bDiv).each(function () {
var tdDiv = document.createElement('div');
var n = $('td', $(this).parent()).index(this);
var pth = $('th:eq(' + n + ')', g.hDiv).get(0);
if (pth != null) {
if (p.sortname == $(pth).attr('abbr') && p.sortname) {
this.className = 'sorted';
}
$(tdDiv).css({
textAlign: pth.align,
width: $('div:first', pth)[0].style.width
});
if (pth.hidden) {
$(this).css('display', 'none');
}
}
if (p.nowrap == false) {
$(tdDiv).css('white-space', 'normal');
}
if (this.innerHTML == '') {
this.innerHTML = ' ';
}
tdDiv.innerHTML = this.innerHTML;
var prnt = $(this).parent()[0];
var pid = false;
if (prnt.id) {
pid = prnt.id.substr(3);
}
if (pth != null) {
if (pth.process) pth.process(tdDiv, pid);
}
$(this).empty().append(tdDiv).removeAttr('width'); //wrap content
g.addTitleToCell(tdDiv);
});
},
getCellDim: function (obj) {// get cell prop for editable event
var ht = parseInt($(obj).height(), 10);
var pht = parseInt($(obj).parent().height(), 10);
var wt = parseInt(obj.style.width, 10);
var pwt = parseInt($(obj).parent().width(), 10);
var top = obj.offsetParent.offsetTop;
var left = obj.offsetParent.offsetLeft;
var pdl = parseInt($(obj).css('paddingLeft'), 10);
var pdt = parseInt($(obj).css('paddingTop'), 10);
return {
ht: ht,
wt: wt,
top: top,
left: left,
pdl: pdl,
pdt: pdt,
pht: pht,
pwt: pwt
};
},
addRowProp: function () {
$('tbody tr', g.bDiv).on('click', function (e) {
var obj = (e.target || e.srcElement);
if (obj.href || obj.type) return true;
if (e.ctrlKey || e.metaKey) {
// mousedown already took care of this case
return;
}
$(this).toggleClass('trSelected');
if (p.singleSelect && ! g.multisel) {
$(this).siblings().removeClass('trSelected');
}
}).on('mousedown', function (e) {
if (e.shiftKey) {
$(this).toggleClass('trSelected');
g.multisel = true;
this.focus();
$(g.gDiv).noSelect();
}
if (e.ctrlKey || e.metaKey) {
$(this).toggleClass('trSelected');
g.multisel = true;
this.focus();
}
}).on('mouseup', function (e) {
if (g.multisel && ! (e.ctrlKey || e.metaKey)) {
g.multisel = false;
$(g.gDiv).noSelect(false);
}
}).on('dblclick', function () {
if (p.onDoubleClick) {
p.onDoubleClick(this, g, p);
}
}).hover(function (e) {
if (g.multisel && e.shiftKey) {
$(this).toggleClass('trSelected');
}
}, function () {});
if (browser.msie && browser.version < 7.0) {
$(this).hover(function () {
$(this).addClass('trOver');
}, function () {
$(this).removeClass('trOver');
});
}
},
combo_flag: true,
combo_resetIndex: function(selObj)
{
if(this.combo_flag) {
selObj.selectedIndex = 0;
}
this.combo_flag = true;
},
combo_doSelectAction: function(selObj)
{
eval( selObj.options[selObj.selectedIndex].value );
selObj.selectedIndex = 0;
this.combo_flag = false;
},
//Add title attribute to div if cell contents is truncated
addTitleToCell: function(tdDiv) {
if(p.addTitleToCell) {
var $span = $('<span />').css('display', 'none'),
$div = (tdDiv instanceof jQuery) ? tdDiv : $(tdDiv),
div_w = $div.outerWidth(),
span_w = 0;
$('body').children(':first').before($span);
$span.html($div.html());
$span.css('font-size', '' + $div.css('font-size'));
$span.css('padding-left', '' + $div.css('padding-left'));
span_w = $span.innerWidth();
$span.remove();
if(span_w > div_w) {
$div.attr('title', $div.text());
} else {
$div.removeAttr('title');
}
}
},
autoResizeColumn: function (obj) {
if(!p.dblClickResize) {
return;
}
var n = $('div', this.cDrag).index(obj),
$th = $('th:visible div:eq(' + n + ')', this.hDiv),
ol = parseInt(obj.style.left, 10),
ow = $th.width(),
nw = 0,
nl = 0,
$span = $('<span />');
$('body').children(':first').before($span);
$span.html($th.html());
$span.css('font-size', '' + $th.css('font-size'));
$span.css('padding-left', '' + $th.css('padding-left'));
$span.css('padding-right', '' + $th.css('padding-right'));
nw = $span.width();
$('tr', this.bDiv).each(function () {
var $tdDiv = $('td:visible div:eq(' + n + ')', this),
spanW = 0;
$span.html($tdDiv.html());
$span.css('font-size', '' + $tdDiv.css('font-size'));
$span.css('padding-left', '' + $tdDiv.css('padding-left'));
$span.css('padding-right', '' + $tdDiv.css('padding-right'));
spanW = $span.width();
nw = (spanW > nw) ? spanW : nw;
});
$span.remove();
nw = (p.minWidth > nw) ? p.minWidth : nw;
nl = ol + (nw - ow);
$('div:eq(' + n + ')', this.cDrag).css('left', nl);
this.colresize = {
nw: nw,
n: n
};
g.dragEnd();
},
pager: 0
};
g = p.getGridClass(g); //get the grid class
if (p.colModel) { //create model if any
thead = document.createElement('thead');
var tr = document.createElement('tr');
for (var i = 0; i < p.colModel.length; i++) {
var cm = p.colModel[i];
var th = document.createElement('th');
$(th).attr('axis', 'col' + i);
if( cm ) { // only use cm if its defined
if ($.cookies) {
var cookie_width = 'flexiwidths/'+cm.name; // Re-Store the widths in the cookies
if( $.cookie(cookie_width) != undefined ) {
cm.width = $.cookie(cookie_width);
}
}
if( cm.display != undefined ) {
th.innerHTML = cm.display;
}
if (cm.name && cm.sortable) {
$(th).attr('abbr', cm.name);
}
if (cm.align) {
th.align = cm.align;
}
if (cm.width) {
$(th).attr('width', cm.width);
}
if ($(cm).attr('hide')) {
th.hidden = true;
}
if (cm.process) {
th.process = cm.process;
}
} else {
th.innerHTML = "";
$(th).attr('width',30);
}
$(tr).append(th);
}
$(thead).append(tr);
$(t).prepend(thead);
} // end if p.colmodel
//init divs
g.gDiv = document.createElement('div'); //create global container
g.mDiv = document.createElement('div'); //create title container
g.hDiv = document.createElement('div'); //create header container
g.bDiv = document.createElement('div'); //create body container
g.vDiv = document.createElement('div'); //create grip
g.rDiv = document.createElement('div'); //create horizontal resizer
g.cDrag = document.createElement('div'); //create column drag
g.block = document.createElement('div'); //creat blocker
g.nDiv = document.createElement('div'); //create column show/hide popup
g.nBtn = document.createElement('div'); //create column show/hide button
g.iDiv = document.createElement('div'); //create editable layer
g.tDiv = document.createElement('div'); //create toolbar
g.sDiv = document.createElement('div');
g.pDiv = document.createElement('div'); //create pager container
if(p.colResize === false) { //don't display column drag if we are not using it
$(g.cDrag).css('display', 'none');
}
if (!p.usepager) {
g.pDiv.style.display = 'none';
}
g.hTable = document.createElement('table');
g.gDiv.className = 'flexigrid';
if (p.width != 'auto') {
g.gDiv.style.width = p.width + (isNaN(p.width) ? '' : 'px');
}
//add conditional classes
if (browser.msie) {
$(g.gDiv).addClass('ie');
}
if (p.novstripe) {
$(g.gDiv).addClass('novstripe');
}
$(t).before(g.gDiv);
$(g.gDiv).append(t);
//set toolbar
if (p.buttons) {
g.tDiv.className = 'tDiv';
var tDiv2 = document.createElement('div');
tDiv2.className = 'tDiv2';
for (var i = 0; i < p.buttons.length; i++) {
var btn = p.buttons[i];
if (!btn.separator) {
var btnDiv = document.createElement('div');
btnDiv.className = 'fbutton';
btnDiv.innerHTML = ("<div><span>") + (btn.hidename ? " " : btn.name) + ("</span></div>");
if (btn.bclass) $('span', btnDiv).addClass(btn.bclass).css({
paddingLeft: 20
});
if (btn.bimage) // if bimage defined, use its string as an image url for this buttons style (RS)
$('span',btnDiv).css( 'background', 'url('+btn.bimage+') no-repeat center left' );
$('span',btnDiv).css( 'paddingLeft', 20 );
if (btn.tooltip) // add title if exists (RS)
$('span',btnDiv)[0].title = btn.tooltip;
btnDiv.onpress = btn.onpress;
btnDiv.name = btn.name;
if (btn.id) {
btnDiv.id = btn.id;
}
if (btn.onpress) {
$(btnDiv).click(function () {
this.onpress(this.id || this.name, g.gDiv);
});
}
$(tDiv2).append(btnDiv);
if (browser.msie && browser.version < 7.0) {
$(btnDiv).hover(function () {
$(this).addClass('fbOver');
}, function () {
$(this).removeClass('fbOver');
});
}
} else {
$(tDiv2).append("<div class='btnseparator'></div>");
}
}
$(g.tDiv).append(tDiv2);
$(g.tDiv).append("<div style='clear:both'></div>");
$(g.gDiv).prepend(g.tDiv);
}
g.hDiv.className = 'hDiv';
// Define a combo button set with custom action'ed calls when clicked.
if( p.combobuttons && $(g.tDiv2) )
{
var btnDiv = document.createElement('div');
btnDiv.className = 'fbutton';
var tSelect = document.createElement('select');
$(tSelect).change( function () { g.combo_doSelectAction( tSelect ) } );
$(tSelect).click( function () { g.combo_resetIndex( tSelect) } );
tSelect.className = 'cselect';
$(btnDiv).append(tSelect);
for (i=0;i<p.combobuttons.length;i++)
{
var btn = p.combobuttons[i];
if (!btn.separator)
{
var btnOpt = document.createElement('option');
btnOpt.innerHTML = btn.name;
if (btn.bclass)
$(btnOpt)
.addClass(btn.bclass)
.css({paddingLeft:20})
;
if (btn.bimage) // if bimage defined, use its string as an image url for this buttons style (RS)
$(btnOpt).css( 'background', 'url('+btn.bimage+') no-repeat center left' );
$(btnOpt).css( 'paddingLeft', 20 );
if (btn.tooltip) // add title if exists (RS)
$(btnOpt)[0].title = btn.tooltip;
if (btn.onpress)
{
btnOpt.value = btn.onpress;
}
$(tSelect).append(btnOpt);
}
}
$('.tDiv2').append(btnDiv);
}
$(t).before(g.hDiv);
g.hTable.cellPadding = 0;
g.hTable.cellSpacing = 0;
$(g.hDiv).append('<div class="hDivBox"></div>');
$('div', g.hDiv).append(g.hTable);
var thead = $("thead:first", t).get(0);
if (thead) $(g.hTable).append(thead);
thead = null;
if (!p.colmodel) var ci = 0;
$('thead tr:first th', g.hDiv).each(function () {
var thdiv = document.createElement('div');
if ($(this).attr('abbr')) {
$(this).click(function (e) {
if (!$(this).hasClass('thOver')) return false;
var obj = (e.target || e.srcElement);
if (obj.href || obj.type) return true;
g.changeSort(this);
});
if ($(this).attr('abbr') == p.sortname) {
this.className = 'sorted';
thdiv.className = 's' + p.sortorder;
}
}
if (this.hidden) {
$(this).hide();
}
if (!p.colmodel) {
$(this).attr('axis', 'col' + ci++);
}
// if there isn't a default width, then the column headers don't match
// i'm sure there is a better way, but this at least stops it failing
if (this.width == '') {
this.width = 100;
}
$(thdiv).css({
textAlign: this.align,
width: this.width + 'px'
});
thdiv.innerHTML = this.innerHTML;
$(this).empty().append(thdiv).removeAttr('width').mousedown(function (e) {
g.dragStart('colMove', e, this);
}).hover(function () {
if (!g.colresize && !$(this).hasClass('thMove') && !g.colCopy) {
$(this).addClass('thOver');
}
if ($(this).attr('abbr') != p.sortname && !g.colCopy && !g.colresize && $(this).attr('abbr')) {
$('div', this).addClass('s' + p.sortorder);
} else if ($(this).attr('abbr') == p.sortname && !g.colCopy && !g.colresize && $(this).attr('abbr')) {
var no = (p.sortorder == 'asc') ? 'desc' : 'asc';
$('div', this).removeClass('s' + p.sortorder).addClass('s' + no);
}
if (g.colCopy) {
var n = $('th', g.hDiv).index(this);
if (n == g.dcoln) {
return false;
}
if (n < g.dcoln) {
$(this).append(g.cdropleft);
} else {
$(this).append(g.cdropright);
}
g.dcolt = n;
} else if (!g.colresize) {
var nv = $('th:visible', g.hDiv).index(this);
var onl = parseInt($('div:eq(' + nv + ')', g.cDrag).css('left'), 10);
var nw = jQuery(g.nBtn).outerWidth();
var nl = onl - nw + Math.floor(p.cgwidth / 2);
$(g.nDiv).hide();
$(g.nBtn).hide();
$(g.nBtn).css({
'left': nl,
top: g.hDiv.offsetTop
}).show();
var ndw = parseInt($(g.nDiv).width(), 10);
$(g.nDiv).css({
top: g.bDiv.offsetTop
});
if ((nl + ndw) > $(g.gDiv).width()) {
$(g.nDiv).css('left', onl - ndw + 1);
} else {
$(g.nDiv).css('left', nl);
}
if ($(this).hasClass('sorted')) {
$(g.nBtn).addClass('srtd');
} else {
$(g.nBtn).removeClass('srtd');
}
}
}, function () {
$(this).removeClass('thOver');
if ($(this).attr('abbr') != p.sortname) {
$('div', this).removeClass('s' + p.sortorder);
} else if ($(this).attr('abbr') == p.sortname) {
var no = (p.sortorder == 'asc') ? 'desc' : 'asc';
$('div', this).addClass('s' + p.sortorder).removeClass('s' + no);
}
if (g.colCopy) {
$(g.cdropleft).remove();
$(g.cdropright).remove();
g.dcolt = null;
}
}); //wrap content
});
//set bDiv
g.bDiv.className = 'bDiv';
$(t).before(g.bDiv);
$(g.bDiv).css({
height: (p.height == 'auto') ? 'auto' : p.height + "px"
}).scroll(function (e) {
g.scroll()
}).append(t);
if (p.height == 'auto') {
$('table', g.bDiv).addClass('autoht');
}
//add td & row properties
g.addCellProp();
g.addRowProp();
//set cDrag only if we are using it
if (p.colResize === true) {
var cdcol = $('thead tr:first th:first', g.hDiv).get(0);
if(cdcol !== null) {
g.cDrag.className = 'cDrag';
g.cdpad = 0;
g.cdpad += (isNaN(parseInt($('div', cdcol).css('borderLeftWidth'), 10)) ? 0 : parseInt($('div', cdcol).css('borderLeftWidth'), 10));
g.cdpad += (isNaN(parseInt($('div', cdcol).css('borderRightWidth'), 10)) ? 0 : parseInt($('div', cdcol).css('borderRightWidth'), 10));
g.cdpad += (isNaN(parseInt($('div', cdcol).css('paddingLeft'), 10)) ? 0 : parseInt($('div', cdcol).css('paddingLeft'), 10));
g.cdpad += (isNaN(parseInt($('div', cdcol).css('paddingRight'), 10)) ? 0 : parseInt($('div', cdcol).css('paddingRight'), 10));
g.cdpad += (isNaN(parseInt($(cdcol).css('borderLeftWidth'), 10)) ? 0 : parseInt($(cdcol).css('borderLeftWidth'), 10));
g.cdpad += (isNaN(parseInt($(cdcol).css('borderRightWidth'), 10)) ? 0 : parseInt($(cdcol).css('borderRightWidth'), 10));
g.cdpad += (isNaN(parseInt($(cdcol).css('paddingLeft'), 10)) ? 0 : parseInt($(cdcol).css('paddingLeft'), 10));
g.cdpad += (isNaN(parseInt($(cdcol).css('paddingRight'), 10)) ? 0 : parseInt($(cdcol).css('paddingRight'), 10));
$(g.bDiv).before(g.cDrag);
var cdheight = $(g.bDiv).height();
var hdheight = $(g.hDiv).height();
$(g.cDrag).css({
top: -hdheight + 'px'
});
$('thead tr:first th', g.hDiv).each(function() {
var cgDiv = document.createElement('div');
$(g.cDrag).append(cgDiv);
if (!p.cgwidth) {
p.cgwidth = $(cgDiv).width();
}
$(cgDiv).css({
height: cdheight + hdheight
}).mousedown(function(e) {
g.dragStart('colresize', e, this);
}).dblclick(function(e) {
g.autoResizeColumn(this);
});
if (browser.msie && browser.version < 7.0) {
g.fixHeight($(g.gDiv).height());
$(cgDiv).hover(function() {
g.fixHeight();
$(this).addClass('dragging');
}, function() {
if(!g.colresize) {
$(this).removeClass('dragging');
}
});
}
});
}
}
//add strip
if (p.striped) {
$('tbody tr:odd', g.bDiv).addClass('erow');
}
if (p.resizable && p.height != 'auto') {
g.vDiv.className = 'vGrip';
$(g.vDiv).mousedown(function (e) {
g.dragStart('vresize', e);
}).html('<span></span>');
$(g.bDiv).after(g.vDiv);
}
if (p.resizable && p.width != 'auto' && !p.nohresize) {
g.rDiv.className = 'hGrip';
$(g.rDiv).mousedown(function (e) {
g.dragStart('vresize', e, true);
}).html('<span></span>').css('height', $(g.gDiv).height());
if (browser.msie && browser.version < 7.0) {
$(g.rDiv).hover(function () {
$(this).addClass('hgOver');
}, function () {
$(this).removeClass('hgOver');
});
}
$(g.gDiv).append(g.rDiv);
}
// add pager
if (p.usepager) {
g.pDiv.className = 'pDiv';
g.pDiv.innerHTML = '<div class="pDiv2"></div>';
$(g.bDiv).after(g.pDiv);
var html = ' <div class="pGroup"> <div class="pFirst pButton"><span></span></div><div class="pPrev pButton"><span></span></div> </div> <div class="btnseparator"></div> <div class="pGroup"><span class="pcontrol">' + p.pagetext + ' <input type="text" size="4" value="1" /> ' + p.outof + ' <span> 1 </span></span></div> <div class="btnseparator"></div> <div class="pGroup"> <div class="pNext pButton"><span></span></div><div class="pLast pButton"><span></span></div> </div> <div class="btnseparator"></div> <div class="pGroup"> <div class="pReload pButton"><span></span></div> </div> <div class="btnseparator"></div> <div class="pGroup"><span class="pPageStat"></span></div>';
$('div', g.pDiv).html(html);
$('.pReload', g.pDiv).click(function () {
g.populate();
});
$('.pFirst', g.pDiv).click(function () {
g.changePage('first');
});
$('.pPrev', g.pDiv).click(function () {
g.changePage('prev');
});
$('.pNext', g.pDiv).click(function () {
g.changePage('next');
});
$('.pLast', g.pDiv).click(function () {
g.changePage('last');
});
$('.pcontrol input', g.pDiv).keydown(function (e) {
if (e.keyCode == 13) {
g.changePage('input');
}
});
if (browser.msie && browser.version < 7) $('.pButton', g.pDiv).hover(function () {
$(this).addClass('pBtnOver');
}, function () {
$(this).removeClass('pBtnOver');
});
if (p.useRp) {
var opt = '',
sel = '';
for (var nx = 0; nx < p.rpOptions.length; nx++) {
if (p.rp == p.rpOptions[nx]) sel = 'selected="selected"';
else sel = '';
opt += "<option value='" + p.rpOptions[nx] + "' " + sel + " >" + p.rpOptions[nx] + " </option>";
}
$('.pDiv2', g.pDiv).prepend("<div class='pGroup'><select name='rp'>" + opt + "</select></div> <div class='btnseparator'></div>");
$('select', g.pDiv).change(function () {
if (p.onRpChange) {
p.onRpChange(+this.value);
} else {
p.newp = 1;
p.rp = +this.value;
g.populate();
}
});
}
//add search button
if (p.searchitems) {
$('.pDiv2', g.pDiv).prepend("<div class='pGroup'> <div class='pSearch pButton'><span></span></div> </div> <div class='btnseparator'></div>");
$('.pSearch', g.pDiv).click(function () {
$(g.sDiv).slideToggle('fast', function () {
$('.sDiv:visible input:first', g.gDiv).trigger('focus');
});
});
//add search box
g.sDiv.className = 'sDiv';
var sitems = p.searchitems;
var sopt = '', sel = '';
for (var s = 0; s < sitems.length; s++) {
if (p.qtype === '' && sitems[s].isdefault === true) {
p.qtype = sitems[s].name;
sel = 'selected="selected"';
} else {
sel = '';
}
sopt += "<option value='" + sitems[s].name + "' " + sel + " >" + sitems[s].display + " </option>";
}
if (p.qtype === '') {
p.qtype = sitems[0].name;
}
$(g.sDiv).append("<div class='sDiv2'>" + p.findtext +
" <input type='text' value='" + p.query +"' size='30' name='q' class='qsbox' /> "+
" <select name='qtype'>" + sopt + "</select></div>");
//Split into separate selectors because of bug in jQuery 1.3.2
$('input[name=q]', g.sDiv).keydown(function (e) {
if (e.keyCode == 13) {
g.doSearch();
}
});
$('select[name=qtype]', g.sDiv).keydown(function (e) {
if (e.keyCode == 13) {
g.doSearch();
}
});
$('input[value=Clear]', g.sDiv).click(function () {
$('input[name=q]', g.sDiv).val('');
p.query = '';
g.doSearch();
});
$(g.bDiv).after(g.sDiv);
}
}
$(g.pDiv, g.sDiv).append("<div style='clear:both'></div>");
// add title
if (p.title) {
g.mDiv.className = 'mDiv';
g.mDiv.innerHTML = '<div class="ftitle">' + p.title + '</div>';
$(g.gDiv).prepend(g.mDiv);
if (p.showTableToggleBtn) {
$(g.mDiv).append('<div class="ptogtitle" title="Minimize/Maximize Table"><span></span></div>');
$('div.ptogtitle', g.mDiv).click(function () {
$(g.gDiv).toggleClass('hideBody');
$(this).toggleClass('vsble');
});
}
}
//setup cdrops
g.cdropleft = document.createElement('span');
g.cdropleft.className = 'cdropleft';
g.cdropright = document.createElement('span');
g.cdropright.className = 'cdropright';
//add block
g.block.className = 'gBlock';
var gh = $(g.bDiv).height();
var gtop = g.bDiv.offsetTop;
$(g.block).css({
width: g.bDiv.style.width,
height: gh,
background: 'white',
position: 'relative',
marginBottom: (gh * -1),
zIndex: 1,
top: gtop,
left: '0px'
});
$(g.block).fadeTo(0, p.blockOpacity);
// add column control
if ($('th', g.hDiv).length) {
g.nDiv.className = 'nDiv';
g.nDiv.innerHTML = "<table cellpadding='0' cellspacing='0'><tbody></tbody></table>";
$(g.nDiv).css({
marginBottom: (gh * -1),
display: 'none',
top: gtop
}).noSelect();
var cn = 0;
$('th div', g.hDiv).each(function () {
var kcol = $("th[axis='col" + cn + "']", g.hDiv)[0];
var chk = 'checked="checked"';
if (kcol.style.display == 'none') {
chk = '';
}
$('tbody', g.nDiv).append('<tr><td class="ndcol1"><input type="checkbox" ' + chk + ' class="togCol" value="' + cn + '" /></td><td class="ndcol2">' + this.innerHTML + '</td></tr>');
cn++;
});
if (browser.msie && browser.version < 7.0) $('tr', g.nDiv).hover(function () {
$(this).addClass('ndcolover');
}, function () {
$(this).removeClass('ndcolover');
});
$('td.ndcol2', g.nDiv).click(function () {
if ($('input:checked', g.nDiv).length <= p.minColToggle && $(this).prev().find('input')[0].checked) return false;
return g.toggleCol($(this).prev().find('input').val());
});
$('input.togCol', g.nDiv).click(function () {
if ($('input:checked', g.nDiv).length < p.minColToggle && this.checked === false) return false;
$(this).parent().next().trigger('click');
});
$(g.gDiv).prepend(g.nDiv);
$(g.nBtn).addClass('nBtn')
.html('<div></div>')
.attr('title', 'Hide/Show Columns')
.click(function () {
$(g.nDiv).toggle();
return true;
}
);
if (p.showToggleBtn) {
$(g.gDiv).prepend(g.nBtn);
}
}
// add date edit layer
$(g.iDiv).addClass('iDiv').css({
display: 'none'
});
$(g.bDiv).append(g.iDiv);
// add flexigrid events
$(g.bDiv).hover(function () {
$(g.nDiv).hide();
$(g.nBtn).hide();
}, function () {
if (g.multisel) {
g.multisel = false;
}
});
$(g.gDiv).hover(function () {}, function () {
$(g.nDiv).hide();
$(g.nBtn).hide();
});
//add document events
$(document).mousemove(function (e) {
g.dragMove(e);
}).mouseup(function (e) {
g.dragEnd();
}).hover(function () {}, function () {
g.dragEnd();
});
//browser adjustments
if (browser.msie && browser.version < 7.0) {
$('.hDiv,.bDiv,.mDiv,.pDiv,.vGrip,.tDiv, .sDiv', g.gDiv).css({
width: '100%'
});
$(g.gDiv).addClass('ie6');
if (p.width != 'auto') {
$(g.gDiv).addClass('ie6fullwidthbug');
}
}
g.rePosDrag();
g.fixHeight();
//make grid functions accessible
t.p = p;
t.grid = g;
// load data
if (p.url && p.autoload) {
g.populate();
}
return t;
};
var docloaded = false;
$(document).ready(function () {
docloaded = true;
});
$.fn.flexigrid = function (p) {
return this.each(function () {
if (!docloaded) {
$(this).hide();
var t = this;
$(document).ready(function () {
$.addFlex(t, p);
});
} else {
$.addFlex(this, p);
}
});
}; //end flexigrid
$.fn.flexReload = function (p) { // function to reload grid
return this.each(function () {
if (this.grid && this.p.url) this.grid.populate();
});
}; //end flexReload
$.fn.flexOptions = function (p) { //function to update general options
return this.each(function () {
if (this.grid) $.extend(this.p, p);
});
}; //end flexOptions
$.fn.flexToggleCol = function (cid, visible) { // function to reload grid
return this.each(function () {
if (this.grid) this.grid.toggleCol(cid, visible);
});
}; //end flexToggleCol
$.fn.flexAddData = function (data) { // function to add data to grid
return this.each(function () {
if (this.grid) this.grid.addData(data);
});
};
$.fn.noSelect = function (p) { //no select plugin by me :-)
var prevent = (p === null) ? true : p;
if (prevent) {
return this.each(function () {
if (browser.msie || browser.safari) $(this).bind('selectstart', function () {
return false;
});
else if (browser.mozilla) {
$(this).css('MozUserSelect', 'none');
$('body').trigger('focus');
} else if (browser.opera) $(this).bind('mousedown', function () {
return false;
});
else $(this).attr('unselectable', 'on');
});
} else {
return this.each(function () {
if (browser.msie || browser.safari) $(this).unbind('selectstart');
else if (browser.mozilla) $(this).css('MozUserSelect', 'inherit');
else if (browser.opera) $(this).unbind('mousedown');
else $(this).removeAttr('unselectable', 'on');
});
}
}; //end noSelect
$.fn.flexSearch = function(p) { // function to search grid
return this.each( function() { if (this.grid&&this.p.searchitems) this.grid.doSearch(); });
}; //end flexSearch
$.fn.selectedRows = function (p) { // Returns the selected rows as an array, taken and adapted from http://stackoverflow.com/questions/11868404/flexigrid-get-selected-row-columns-values
var arReturn = [];
var arRow = [];
var selector = $(this.selector + ' .trSelected');
$(selector).each(function (i, row) {
arRow = [];
var idr = $(row).data('id');
$.each(row.cells, function (c, cell) {
var col = cell.abbr;
var val = cell.firstChild.innerHTML;
if (val == ' ') val = ''; // Trim the content
var idx = cell.cellIndex;
arRow.push({
Column: col, // Column identifier
Value: val, // Column value
CellIndex: idx, // Cell index
RowIdentifier: idr // Identifier of this row element
});
});
arReturn.push(arRow);
});
return arReturn;
};
})(jQuery);
| /*
* Flexigrid for jQuery - v1.1
*
* Copyright (c) 2008 Paulo P. Marinas (code.google.com/p/flexigrid/)
* Dual licensed under the MIT or GPL Version 2 licenses.
* http://jquery.org/license
*
*/
(function ($) {
/*
* jQuery 1.9 support. browser object has been removed in 1.9
*/
var browser = $.browser
if (!browser) {
function uaMatch( ua ) {
ua = ua.toLowerCase();
var match = /(chrome)[ \/]([\w.]+)/.exec( ua ) ||
/(webkit)[ \/]([\w.]+)/.exec( ua ) ||
/(opera)(?:.*version|)[ \/]([\w.]+)/.exec( ua ) ||
/(msie) ([\w.]+)/.exec( ua ) ||
ua.indexOf("compatible") < 0 && /(mozilla)(?:.*? rv:([\w.]+)|)/.exec( ua ) ||
[];
return {
browser: match[ 1 ] || "",
version: match[ 2 ] || "0"
};
};
var matched = uaMatch( navigator.userAgent );
browser = {};
if ( matched.browser ) {
browser[ matched.browser ] = true;
browser.version = matched.version;
}
// Chrome is Webkit, but Webkit is also Safari.
if ( browser.chrome ) {
browser.webkit = true;
} else if ( browser.webkit ) {
browser.safari = true;
}
}
/*!
* START code from jQuery UI
*
* Copyright 2011, AUTHORS.txt (http://jqueryui.com/about)
* Dual licensed under the MIT or GPL Version 2 licenses.
* http://jquery.org/license
*
* http://docs.jquery.com/UI
*/
if(typeof $.support.selectstart != 'function') {
$.support.selectstart = "onselectstart" in document.createElement("div");
}
if(typeof $.fn.disableSelection != 'function') {
$.fn.disableSelection = function() {
return this.bind( ( $.support.selectstart ? "selectstart" : "mousedown" ) +
".ui-disableSelection", function( event ) {
event.preventDefault();
});
};
}
/* END code from jQuery UI */
$.addFlex = function (t, p) {
if (t.grid) return false; //return if already exist
p = $.extend({ //apply default properties
height: 200, //default height
width: 'auto', //auto width
striped: true, //apply odd even stripes
novstripe: false,
minwidth: 30, //min width of columns
minheight: 80, //min height of columns
resizable: true, //allow table resizing
url: false, //URL if using data from AJAX
method: 'POST', //data sending method
dataType: 'xml', //type of data for AJAX, either xml or json
errormsg: 'Connection Error',
usepager: false,
nowrap: true,
page: 1, //current page
total: 1, //total pages
useRp: true, //use the results per page select box
rp: 15, //results per page
rpOptions: [10, 15, 20, 30, 50], //allowed per-page values
title: false,
idProperty: 'id',
pagestat: 'Displaying {from} to {to} of {total} items',
pagetext: 'Page',
outof: 'of',
findtext: 'Find',
params: [], //allow optional parameters to be passed around
procmsg: 'Processing, please wait ...',
query: '',
qtype: '',
nomsg: 'No items',
minColToggle: 1, //minimum allowed column to be hidden
showToggleBtn: true, //show or hide column toggle popup
hideOnSubmit: true,
autoload: true,
blockOpacity: 0.5,
preProcess: false,
addTitleToCell: false, // add a title attr to cells with truncated contents
dblClickResize: false, //auto resize column by double clicking
onDragCol: false,
onToggleCol: false,
onChangeSort: false,
onDoubleClick: false,
onSuccess: false,
onError: false,
onSubmit: false, //using a custom populate function
__mw: { //extendable middleware function holding object
datacol: function(p, col, val) { //middleware for formatting data columns
var _col = (typeof p.datacol[col] == 'function') ? p.datacol[col](val) : val; //format column using function
if(typeof p.datacol['*'] == 'function') { //if wildcard function exists
return p.datacol['*'](_col); //run wildcard function
} else {
return _col; //return column without wildcard
}
}
},
getGridClass: function(g) { //get the grid class, always returns g
return g;
},
datacol: {}, //datacol middleware object 'colkey': function(colval) {}
colResize: true, //from: http://stackoverflow.com/a/10615589
colMove: true
}, p);
$(t).show() //show if hidden
.attr({
cellPadding: 0,
cellSpacing: 0,
border: 0
}) //remove padding and spacing
.removeAttr('width'); //remove width properties
//create grid class
var g = {
hset: {},
rePosDrag: function () {
var cdleft = 0 - this.hDiv.scrollLeft;
if (this.hDiv.scrollLeft > 0) cdleft -= Math.floor(p.cgwidth / 2);
$(g.cDrag).css({
top: g.hDiv.offsetTop + 1
});
var cdpad = this.cdpad;
var cdcounter=0;
$('div', g.cDrag).hide();
$('thead tr:first th:visible', this.hDiv).each(function () {
var n = $('thead tr:first th:visible', g.hDiv).index(this);
var cdpos = parseInt($('div', this).width());
if (cdleft == 0) cdleft -= Math.floor(p.cgwidth / 2);
cdpos = cdpos + cdleft + cdpad;
if (isNaN(cdpos)) {
cdpos = 0;
}
$('div:eq(' + n + ')', g.cDrag).css({
'left': (!(browser.mozilla) ? cdpos - cdcounter : cdpos) + 'px'
}).show();
cdleft = cdpos;
cdcounter++;
});
},
fixHeight: function (newH) {
newH = false;
if (!newH) newH = $(g.bDiv).height();
var hdHeight = $(this.hDiv).height();
$('div', this.cDrag).each(
function () {
$(this).height(newH + hdHeight);
}
);
var nd = parseInt($(g.nDiv).height(), 10);
if (nd > newH) $(g.nDiv).height(newH).width(200);
else $(g.nDiv).height('auto').width('auto');
$(g.block).css({
height: newH,
marginBottom: (newH * -1)
});
var hrH = g.bDiv.offsetTop + newH;
if (p.height != 'auto' && p.resizable) hrH = g.vDiv.offsetTop;
$(g.rDiv).css({
height: hrH
});
},
dragStart: function (dragtype, e, obj) { //default drag function start
if (dragtype == 'colresize' && p.colResize === true) {//column resize
$(g.nDiv).hide();
$(g.nBtn).hide();
var n = $('div', this.cDrag).index(obj);
var ow = $('th:visible div:eq(' + n + ')', this.hDiv).width();
$(obj).addClass('dragging').siblings().hide();
$(obj).prev().addClass('dragging').show();
this.colresize = {
startX: e.pageX,
ol: parseInt(obj.style.left, 10),
ow: ow,
n: n
};
$('body').css('cursor', 'col-resize');
} else if (dragtype == 'vresize') {//table resize
var hgo = false;
$('body').css('cursor', 'row-resize');
if (obj) {
hgo = true;
$('body').css('cursor', 'col-resize');
}
this.vresize = {
h: p.height,
sy: e.pageY,
w: p.width,
sx: e.pageX,
hgo: hgo
};
} else if (dragtype == 'colMove') {//column header drag
$(e.target).disableSelection(); //disable selecting the column header
if((p.colMove === true)) {
$(g.nDiv).hide();
$(g.nBtn).hide();
this.hset = $(this.hDiv).offset();
this.hset.right = this.hset.left + $('table', this.hDiv).width();
this.hset.bottom = this.hset.top + $('table', this.hDiv).height();
this.dcol = obj;
this.dcoln = $('th', this.hDiv).index(obj);
this.colCopy = document.createElement("div");
this.colCopy.className = "colCopy";
this.colCopy.innerHTML = obj.innerHTML;
if (browser.msie) {
this.colCopy.className = "colCopy ie";
}
$(this.colCopy).css({
position: 'absolute',
'float': 'left',
display: 'none',
textAlign: obj.align
});
$('body').append(this.colCopy);
$(this.cDrag).hide();
}
}
$('body').noSelect();
},
dragMove: function (e) {
if (this.colresize) {//column resize
var n = this.colresize.n;
var diff = e.pageX - this.colresize.startX;
var nleft = this.colresize.ol + diff;
var nw = this.colresize.ow + diff;
if (nw > p.minwidth) {
$('div:eq(' + n + ')', this.cDrag).css('left', nleft);
this.colresize.nw = nw;
}
} else if (this.vresize) {//table resize
var v = this.vresize;
var y = e.pageY;
var diff = y - v.sy;
if (!p.defwidth) p.defwidth = p.width;
if (p.width != 'auto' && !p.nohresize && v.hgo) {
var x = e.pageX;
var xdiff = x - v.sx;
var newW = v.w + xdiff;
if (newW > p.defwidth) {
this.gDiv.style.width = newW + 'px';
p.width = newW;
}
}
var newH = v.h + diff;
if ((newH > p.minheight || p.height < p.minheight) && !v.hgo) {
this.bDiv.style.height = newH + 'px';
p.height = newH;
this.fixHeight(newH);
}
v = null;
} else if (this.colCopy) {
$(this.dcol).addClass('thMove').removeClass('thOver');
if (e.pageX > this.hset.right || e.pageX < this.hset.left || e.pageY > this.hset.bottom || e.pageY < this.hset.top) {
//this.dragEnd();
$('body').css('cursor', 'move');
} else {
$('body').css('cursor', 'pointer');
}
$(this.colCopy).css({
top: e.pageY + 10,
left: e.pageX + 20,
display: 'block'
});
}
},
dragEnd: function () {
if (this.colresize) {
var n = this.colresize.n;
var nw = this.colresize.nw;
$('th:visible div:eq(' + n + ')', this.hDiv).css('width', nw);
$('tr', this.bDiv).each(
function () {
var $tdDiv = $('td:visible div:eq(' + n + ')', this);
$tdDiv.css('width', nw);
g.addTitleToCell($tdDiv);
}
);
this.hDiv.scrollLeft = this.bDiv.scrollLeft;
$('div:eq(' + n + ')', this.cDrag).siblings().show();
$('.dragging', this.cDrag).removeClass('dragging');
this.rePosDrag();
this.fixHeight();
this.colresize = false;
if ($.cookies) {
var name = p.colModel[n].name; // Store the widths in the cookies
$.cookie('flexiwidths/'+name, nw);
}
} else if (this.vresize) {
this.vresize = false;
} else if (this.colCopy) {
$(this.colCopy).remove();
if (this.dcolt !== null) {
if (this.dcoln > this.dcolt) $('th:eq(' + this.dcolt + ')', this.hDiv).before(this.dcol);
else $('th:eq(' + this.dcolt + ')', this.hDiv).after(this.dcol);
this.switchCol(this.dcoln, this.dcolt);
$(this.cdropleft).remove();
$(this.cdropright).remove();
this.rePosDrag();
if (p.onDragCol) {
p.onDragCol(this.dcoln, this.dcolt);
}
}
this.dcol = null;
this.hset = null;
this.dcoln = null;
this.dcolt = null;
this.colCopy = null;
$('.thMove', this.hDiv).removeClass('thMove');
$(this.cDrag).show();
}
$('body').css('cursor', 'default');
$('body').noSelect(false);
},
toggleCol: function (cid, visible) {
var ncol = $("th[axis='col" + cid + "']", this.hDiv)[0];
var n = $('thead th', g.hDiv).index(ncol);
var cb = $('input[value=' + cid + ']', g.nDiv)[0];
if (visible == null) {
visible = ncol.hidden;
}
if ($('input:checked', g.nDiv).length < p.minColToggle && !visible) {
return false;
}
if (visible) {
ncol.hidden = false;
$(ncol).show();
cb.checked = true;
} else {
ncol.hidden = true;
$(ncol).hide();
cb.checked = false;
}
$('tbody tr', t).each(
function () {
if (visible) {
$('td:eq(' + n + ')', this).show();
} else {
$('td:eq(' + n + ')', this).hide();
}
}
);
this.rePosDrag();
if (p.onToggleCol) {
p.onToggleCol(cid, visible);
}
return visible;
},
switchCol: function (cdrag, cdrop) { //switch columns
$('tbody tr', t).each(
function () {
if (cdrag > cdrop) $('td:eq(' + cdrop + ')', this).before($('td:eq(' + cdrag + ')', this));
else $('td:eq(' + cdrop + ')', this).after($('td:eq(' + cdrag + ')', this));
}
);
//switch order in nDiv
if (cdrag > cdrop) {
$('tr:eq(' + cdrop + ')', this.nDiv).before($('tr:eq(' + cdrag + ')', this.nDiv));
} else {
$('tr:eq(' + cdrop + ')', this.nDiv).after($('tr:eq(' + cdrag + ')', this.nDiv));
}
if (browser.msie && browser.version < 7.0) {
$('tr:eq(' + cdrop + ') input', this.nDiv)[0].checked = true;
}
this.hDiv.scrollLeft = this.bDiv.scrollLeft;
},
scroll: function () {
this.hDiv.scrollLeft = this.bDiv.scrollLeft;
this.rePosDrag();
},
addData: function (data) { //parse data
if (p.dataType == 'json') {
data = $.extend({rows: [], page: 0, total: 0}, data);
}
if (p.preProcess) {
data = p.preProcess(data);
}
$('.pReload', this.pDiv).removeClass('loading');
this.loading = false;
if (!data) {
$('.pPageStat', this.pDiv).html(p.errormsg);
if (p.onSuccess) p.onSuccess(this);
return false;
}
if (p.dataType == 'xml') {
p.total = +$('rows total', data).text();
} else {
p.total = data.total;
}
if (p.total === 0) {
$('tr, a, td, div', t).unbind();
$(t).empty();
p.pages = 1;
p.page = 1;
this.buildpager();
$('.pPageStat', this.pDiv).html(p.nomsg);
if (p.onSuccess) p.onSuccess(this);
return false;
}
p.pages = Math.ceil(p.total / p.rp);
if (p.dataType == 'xml') {
p.page = +$('rows page', data).text();
} else {
p.page = data.page;
}
this.buildpager();
//build new body
var tbody = document.createElement('tbody');
if (p.dataType == 'json') {
$.each(data.rows, function (i, row) {
var tr = document.createElement('tr');
var jtr = $(tr);
if (row.name) tr.name = row.name;
if (row.color) {
jtr.css('background',row.color);
} else {
if (i % 2 && p.striped) tr.className = 'erow';
}
if (row[p.idProperty]) {
tr.id = 'row' + row[p.idProperty];
jtr.attr('data-id', row[p.idProperty]);
}
$('thead tr:first th', g.hDiv).each( //add cell
function () {
var td = document.createElement('td');
var idx = $(this).attr('axis').substr(3);
td.align = this.align;
// If each row is the object itself (no 'cell' key)
if (typeof row.cell == 'undefined') {
td.innerHTML = row[p.colModel[idx].name];
} else {
// If the json elements aren't named (which is typical), use numeric order
var iHTML = '';
if (typeof row.cell[idx] != "undefined") {
iHTML = (row.cell[idx] !== null) ? row.cell[idx] : ''; //null-check for Opera-browser
} else {
iHTML = row.cell[p.colModel[idx].name];
}
td.innerHTML = p.__mw.datacol(p, $(this).attr('abbr'), iHTML); //use middleware datacol to format cols
}
// If the content has a <BGCOLOR=nnnnnn> option, decode it.
var offs = td.innerHTML.indexOf( '<BGCOLOR=' );
if( offs >0 ) {
$(td).css('background', text.substr(offs+7,7) );
}
$(td).attr('abbr', $(this).attr('abbr'));
$(tr).append(td);
td = null;
}
);
if ($('thead', this.gDiv).length < 1) {//handle if grid has no headers
for (idx = 0; idx < row.cell.length; idx++) {
var td = document.createElement('td');
// If the json elements aren't named (which is typical), use numeric order
if (typeof row.cell[idx] != "undefined") {
td.innerHTML = (row.cell[idx] != null) ? row.cell[idx] : '';//null-check for Opera-browser
} else {
td.innerHTML = row.cell[p.colModel[idx].name];
}
$(tr).append(td);
td = null;
}
}
$(tbody).append(tr);
tr = null;
});
} else if (p.dataType == 'xml') {
var i = 1;
$("rows row", data).each(function () {
i++;
var tr = document.createElement('tr');
if ($(this).attr('name')) tr.name = $(this).attr('name');
if ($(this).attr('color')) {
$(tr).css('background',$(this).attr('id'));
} else {
if (i % 2 && p.striped) tr.className = 'erow';
}
var nid = $(this).attr('id');
if (nid) {
tr.id = 'row' + nid;
}
nid = null;
var robj = this;
$('thead tr:first th', g.hDiv).each(function () {
var td = document.createElement('td');
var idx = $(this).attr('axis').substr(3);
td.align = this.align;
var text = $("cell:eq(" + idx + ")", robj).text();
var offs = text.indexOf( '<BGCOLOR=' );
if( offs >0 ) {
$(td).css('background', text.substr(offs+7,7) );
}
td.innerHTML = p.__mw.datacol(p, $(this).attr('abbr'), text); //use middleware datacol to format cols
$(td).attr('abbr', $(this).attr('abbr'));
$(tr).append(td);
td = null;
});
if ($('thead', this.gDiv).length < 1) {//handle if grid has no headers
$('cell', this).each(function () {
var td = document.createElement('td');
td.innerHTML = $(this).text();
$(tr).append(td);
td = null;
});
}
$(tbody).append(tr);
tr = null;
robj = null;
});
}
$('tr', t).unbind();
$(t).empty();
$(t).append(tbody);
this.addCellProp();
this.addRowProp();
this.rePosDrag();
tbody = null;
data = null;
i = null;
if (p.onSuccess) {
p.onSuccess(this);
}
if (p.hideOnSubmit) {
$(g.block).remove();
}
this.hDiv.scrollLeft = this.bDiv.scrollLeft;
if (browser.opera) {
$(t).css('visibility', 'visible');
}
},
changeSort: function (th) { //change sortorder
if (this.loading) {
return true;
}
$(g.nDiv).hide();
$(g.nBtn).hide();
if (p.sortname == $(th).attr('abbr')) {
if (p.sortorder == 'asc') {
p.sortorder = 'desc';
} else {
p.sortorder = 'asc';
}
}
$(th).addClass('sorted').siblings().removeClass('sorted');
$('.sdesc', this.hDiv).removeClass('sdesc');
$('.sasc', this.hDiv).removeClass('sasc');
$('div', th).addClass('s' + p.sortorder);
p.sortname = $(th).attr('abbr');
if (p.onChangeSort) {
p.onChangeSort(p.sortname, p.sortorder);
} else {
this.populate();
}
},
buildpager: function () { //rebuild pager based on new properties
$('.pcontrol input', this.pDiv).val(p.page);
$('.pcontrol span', this.pDiv).html(p.pages);
var r1 = p.total == 0 ? 0 : (p.page - 1) * p.rp + 1;
var r2 = r1 + p.rp - 1;
if (p.total < r2) {
r2 = p.total;
}
var stat = p.pagestat;
stat = stat.replace(/{from}/, r1);
stat = stat.replace(/{to}/, r2);
stat = stat.replace(/{total}/, p.total);
$('.pPageStat', this.pDiv).html(stat);
},
populate: function () { //get latest data
if (this.loading) {
return true;
}
if (p.onSubmit) {
var gh = p.onSubmit();
if (!gh) {
return false;
}
}
this.loading = true;
if (!p.url) {
return false;
}
$('.pPageStat', this.pDiv).html(p.procmsg);
$('.pReload', this.pDiv).addClass('loading');
$(g.block).css({
top: g.bDiv.offsetTop
});
if (p.hideOnSubmit) {
$(this.gDiv).prepend(g.block);
}
if (browser.opera) {
$(t).css('visibility', 'hidden');
}
if (!p.newp) {
p.newp = 1;
}
if (p.page > p.pages) {
p.page = p.pages;
}
var param = [{
name: 'page',
value: p.newp
}, {
name: 'rp',
value: p.rp
}, {
name: 'sortname',
value: p.sortname
}, {
name: 'sortorder',
value: p.sortorder
}, {
name: 'query',
value: p.query
}, {
name: 'qtype',
value: p.qtype
}];
if (p.params.length) {
for (var pi = 0; pi < p.params.length; pi++) {
param[param.length] = p.params[pi];
}
}
$.ajax({
type: p.method,
url: p.url,
data: param,
dataType: p.dataType,
success: function (data) {
g.addData(data);
},
error: function (XMLHttpRequest, textStatus, errorThrown) {
try {
if (p.onError) p.onError(XMLHttpRequest, textStatus, errorThrown);
} catch (e) {}
}
});
},
doSearch: function () {
p.query = $('input[name=q]', g.sDiv).val();
p.qtype = $('select[name=qtype]', g.sDiv).val();
p.newp = 1;
this.populate();
},
changePage: function (ctype) { //change page
if (this.loading) {
return true;
}
switch (ctype) {
case 'first':
p.newp = 1;
break;
case 'prev':
if (p.page > 1) {
p.newp = parseInt(p.page, 10) - 1;
}
break;
case 'next':
if (p.page < p.pages) {
p.newp = parseInt(p.page, 10) + 1;
}
break;
case 'last':
p.newp = p.pages;
break;
case 'input':
var nv = parseInt($('.pcontrol input', this.pDiv).val(), 10);
if (isNaN(nv)) {
nv = 1;
}
if (nv < 1) {
nv = 1;
} else if (nv > p.pages) {
nv = p.pages;
}
$('.pcontrol input', this.pDiv).val(nv);
p.newp = nv;
break;
}
if (p.newp == p.page) {
return false;
}
if (p.onChangePage) {
p.onChangePage(p.newp);
} else {
this.populate();
}
},
addCellProp: function () {
$('tbody tr td', g.bDiv).each(function () {
var tdDiv = document.createElement('div');
var n = $('td', $(this).parent()).index(this);
var pth = $('th:eq(' + n + ')', g.hDiv).get(0);
if (pth != null) {
if (p.sortname == $(pth).attr('abbr') && p.sortname) {
this.className = 'sorted';
}
$(tdDiv).css({
textAlign: pth.align,
width: $('div:first', pth)[0].style.width
});
if (pth.hidden) {
$(this).css('display', 'none');
}
}
if (p.nowrap == false) {
$(tdDiv).css('white-space', 'normal');
}
if (this.innerHTML == '') {
this.innerHTML = ' ';
}
tdDiv.innerHTML = this.innerHTML;
var prnt = $(this).parent()[0];
var pid = false;
if (prnt.id) {
pid = prnt.id.substr(3);
}
if (pth != null) {
if (pth.process) pth.process(tdDiv, pid);
}
$(this).empty().append(tdDiv).removeAttr('width'); //wrap content
g.addTitleToCell(tdDiv);
});
},
getCellDim: function (obj) {// get cell prop for editable event
var ht = parseInt($(obj).height(), 10);
var pht = parseInt($(obj).parent().height(), 10);
var wt = parseInt(obj.style.width, 10);
var pwt = parseInt($(obj).parent().width(), 10);
var top = obj.offsetParent.offsetTop;
var left = obj.offsetParent.offsetLeft;
var pdl = parseInt($(obj).css('paddingLeft'), 10);
var pdt = parseInt($(obj).css('paddingTop'), 10);
return {
ht: ht,
wt: wt,
top: top,
left: left,
pdl: pdl,
pdt: pdt,
pht: pht,
pwt: pwt
};
},
addRowProp: function () {
$('tbody tr', g.bDiv).on('click', function (e) {
var obj = (e.target || e.srcElement);
if (obj.href || obj.type) return true;
if (e.ctrlKey || e.metaKey) {
// mousedown already took care of this case
return;
}
$(this).toggleClass('trSelected');
if (p.singleSelect && ! g.multisel) {
$(this).siblings().removeClass('trSelected');
}
}).on('mousedown', function (e) {
if (e.shiftKey) {
$(this).toggleClass('trSelected');
g.multisel = true;
this.focus();
$(g.gDiv).noSelect();
}
if (e.ctrlKey || e.metaKey) {
$(this).toggleClass('trSelected');
g.multisel = true;
this.focus();
}
}).on('mouseup', function (e) {
if (g.multisel && ! (e.ctrlKey || e.metaKey)) {
g.multisel = false;
$(g.gDiv).noSelect(false);
}
}).on('dblclick', function () {
if (p.onDoubleClick) {
p.onDoubleClick(this, g, p);
}
}).hover(function (e) {
if (g.multisel && e.shiftKey) {
$(this).toggleClass('trSelected');
}
}, function () {});
if (browser.msie && browser.version < 7.0) {
$(this).hover(function () {
$(this).addClass('trOver');
}, function () {
$(this).removeClass('trOver');
});
}
},
combo_flag: true,
combo_resetIndex: function(selObj)
{
if(this.combo_flag) {
selObj.selectedIndex = 0;
}
this.combo_flag = true;
},
combo_doSelectAction: function(selObj)
{
eval( selObj.options[selObj.selectedIndex].value );
selObj.selectedIndex = 0;
this.combo_flag = false;
},
//Add title attribute to div if cell contents is truncated
addTitleToCell: function(tdDiv) {
if(p.addTitleToCell) {
var $span = $('<span />').css('display', 'none'),
$div = (tdDiv instanceof jQuery) ? tdDiv : $(tdDiv),
div_w = $div.outerWidth(),
span_w = 0;
$('body').children(':first').before($span);
$span.html($div.html());
$span.css('font-size', '' + $div.css('font-size'));
$span.css('padding-left', '' + $div.css('padding-left'));
span_w = $span.innerWidth();
$span.remove();
if(span_w > div_w) {
$div.attr('title', $div.text());
} else {
$div.removeAttr('title');
}
}
},
autoResizeColumn: function (obj) {
if(!p.dblClickResize) {
return;
}
var n = $('div', this.cDrag).index(obj),
$th = $('th:visible div:eq(' + n + ')', this.hDiv),
ol = parseInt(obj.style.left, 10),
ow = $th.width(),
nw = 0,
nl = 0,
$span = $('<span />');
$('body').children(':first').before($span);
$span.html($th.html());
$span.css('font-size', '' + $th.css('font-size'));
$span.css('padding-left', '' + $th.css('padding-left'));
$span.css('padding-right', '' + $th.css('padding-right'));
nw = $span.width();
$('tr', this.bDiv).each(function () {
var $tdDiv = $('td:visible div:eq(' + n + ')', this),
spanW = 0;
$span.html($tdDiv.html());
$span.css('font-size', '' + $tdDiv.css('font-size'));
$span.css('padding-left', '' + $tdDiv.css('padding-left'));
$span.css('padding-right', '' + $tdDiv.css('padding-right'));
spanW = $span.width();
nw = (spanW > nw) ? spanW : nw;
});
$span.remove();
nw = (p.minWidth > nw) ? p.minWidth : nw;
nl = ol + (nw - ow);
$('div:eq(' + n + ')', this.cDrag).css('left', nl);
this.colresize = {
nw: nw,
n: n
};
g.dragEnd();
},
pager: 0
};
g = p.getGridClass(g); //get the grid class
if (p.colModel) { //create model if any
thead = document.createElement('thead');
var tr = document.createElement('tr');
for (var i = 0; i < p.colModel.length; i++) {
var cm = p.colModel[i];
var th = document.createElement('th');
$(th).attr('axis', 'col' + i);
if( cm ) { // only use cm if its defined
if ($.cookies) {
var cookie_width = 'flexiwidths/'+cm.name; // Re-Store the widths in the cookies
if( $.cookie(cookie_width) != undefined ) {
cm.width = $.cookie(cookie_width);
}
}
if( cm.display != undefined ) {
th.innerHTML = cm.display;
}
if (cm.name && cm.sortable) {
$(th).attr('abbr', cm.name);
}
if (cm.align) {
th.align = cm.align;
}
if (cm.width) {
$(th).attr('width', cm.width);
}
if ($(cm).attr('hide')) {
th.hidden = true;
}
if (cm.process) {
th.process = cm.process;
}
} else {
th.innerHTML = "";
$(th).attr('width',30);
}
$(tr).append(th);
}
$(thead).append(tr);
$(t).prepend(thead);
} // end if p.colmodel
//init divs
g.gDiv = document.createElement('div'); //create global container
g.mDiv = document.createElement('div'); //create title container
g.hDiv = document.createElement('div'); //create header container
g.bDiv = document.createElement('div'); //create body container
g.vDiv = document.createElement('div'); //create grip
g.rDiv = document.createElement('div'); //create horizontal resizer
g.cDrag = document.createElement('div'); //create column drag
g.block = document.createElement('div'); //creat blocker
g.nDiv = document.createElement('div'); //create column show/hide popup
g.nBtn = document.createElement('div'); //create column show/hide button
g.iDiv = document.createElement('div'); //create editable layer
g.tDiv = document.createElement('div'); //create toolbar
g.sDiv = document.createElement('div');
g.pDiv = document.createElement('div'); //create pager container
if(p.colResize === false) { //don't display column drag if we are not using it
$(g.cDrag).css('display', 'none');
}
if (!p.usepager) {
g.pDiv.style.display = 'none';
}
g.hTable = document.createElement('table');
g.gDiv.className = 'flexigrid';
if (p.width != 'auto') {
g.gDiv.style.width = p.width + (isNaN(p.width) ? '' : 'px');
}
//add conditional classes
if (browser.msie) {
$(g.gDiv).addClass('ie');
}
if (p.novstripe) {
$(g.gDiv).addClass('novstripe');
}
$(t).before(g.gDiv);
$(g.gDiv).append(t);
//set toolbar
if (p.buttons) {
g.tDiv.className = 'tDiv';
var tDiv2 = document.createElement('div');
tDiv2.className = 'tDiv2';
for (var i = 0; i < p.buttons.length; i++) {
var btn = p.buttons[i];
if (!btn.separator) {
var btnDiv = document.createElement('div');
btnDiv.className = 'fbutton';
btnDiv.innerHTML = ("<div><span>") + (btn.hidename ? " " : btn.name) + ("</span></div>");
if (btn.bclass) $('span', btnDiv).addClass(btn.bclass).css({
paddingLeft: 20
});
if (btn.bimage) // if bimage defined, use its string as an image url for this buttons style (RS)
$('span',btnDiv).css( 'background', 'url('+btn.bimage+') no-repeat center left' );
$('span',btnDiv).css( 'paddingLeft', 20 );
if (btn.tooltip) // add title if exists (RS)
$('span',btnDiv)[0].title = btn.tooltip;
btnDiv.onpress = btn.onpress;
btnDiv.name = btn.name;
if (btn.id) {
btnDiv.id = btn.id;
}
if (btn.onpress) {
$(btnDiv).click(function () {
this.onpress(this.id || this.name, g.gDiv);
});
}
$(tDiv2).append(btnDiv);
if (browser.msie && browser.version < 7.0) {
$(btnDiv).hover(function () {
$(this).addClass('fbOver');
}, function () {
$(this).removeClass('fbOver');
});
}
} else {
$(tDiv2).append("<div class='btnseparator'></div>");
}
}
$(g.tDiv).append(tDiv2);
$(g.tDiv).append("<div style='clear:both'></div>");
$(g.gDiv).prepend(g.tDiv);
}
g.hDiv.className = 'hDiv';
// Define a combo button set with custom action'ed calls when clicked.
if( p.combobuttons && $(g.tDiv2) )
{
var btnDiv = document.createElement('div');
btnDiv.className = 'fbutton';
var tSelect = document.createElement('select');
$(tSelect).change( function () { g.combo_doSelectAction( tSelect ) } );
$(tSelect).click( function () { g.combo_resetIndex( tSelect) } );
tSelect.className = 'cselect';
$(btnDiv).append(tSelect);
for (i=0;i<p.combobuttons.length;i++)
{
var btn = p.combobuttons[i];
if (!btn.separator)
{
var btnOpt = document.createElement('option');
btnOpt.innerHTML = btn.name;
if (btn.bclass)
$(btnOpt)
.addClass(btn.bclass)
.css({paddingLeft:20})
;
if (btn.bimage) // if bimage defined, use its string as an image url for this buttons style (RS)
$(btnOpt).css( 'background', 'url('+btn.bimage+') no-repeat center left' );
$(btnOpt).css( 'paddingLeft', 20 );
if (btn.tooltip) // add title if exists (RS)
$(btnOpt)[0].title = btn.tooltip;
if (btn.onpress)
{
btnOpt.value = btn.onpress;
}
$(tSelect).append(btnOpt);
}
}
$('.tDiv2').append(btnDiv);
}
$(t).before(g.hDiv);
g.hTable.cellPadding = 0;
g.hTable.cellSpacing = 0;
$(g.hDiv).append('<div class="hDivBox"></div>');
$('div', g.hDiv).append(g.hTable);
var thead = $("thead:first", t).get(0);
if (thead) $(g.hTable).append(thead);
thead = null;
if (!p.colmodel) var ci = 0;
$('thead tr:first th', g.hDiv).each(function () {
var thdiv = document.createElement('div');
if ($(this).attr('abbr')) {
$(this).click(function (e) {
if (!$(this).hasClass('thOver')) return false;
var obj = (e.target || e.srcElement);
if (obj.href || obj.type) return true;
g.changeSort(this);
});
if ($(this).attr('abbr') == p.sortname) {
this.className = 'sorted';
thdiv.className = 's' + p.sortorder;
}
}
if (this.hidden) {
$(this).hide();
}
if (!p.colmodel) {
$(this).attr('axis', 'col' + ci++);
}
// if there isn't a default width, then the column headers don't match
// i'm sure there is a better way, but this at least stops it failing
if (this.width == '') {
this.width = 100;
}
$(thdiv).css({
textAlign: this.align,
width: this.width + 'px'
});
thdiv.innerHTML = this.innerHTML;
$(this).empty().append(thdiv).removeAttr('width').mousedown(function (e) {
g.dragStart('colMove', e, this);
}).hover(function () {
if (!g.colresize && !$(this).hasClass('thMove') && !g.colCopy) {
$(this).addClass('thOver');
}
if ($(this).attr('abbr') != p.sortname && !g.colCopy && !g.colresize && $(this).attr('abbr')) {
$('div', this).addClass('s' + p.sortorder);
} else if ($(this).attr('abbr') == p.sortname && !g.colCopy && !g.colresize && $(this).attr('abbr')) {
var no = (p.sortorder == 'asc') ? 'desc' : 'asc';
$('div', this).removeClass('s' + p.sortorder).addClass('s' + no);
}
if (g.colCopy) {
var n = $('th', g.hDiv).index(this);
if (n == g.dcoln) {
return false;
}
if (n < g.dcoln) {
$(this).append(g.cdropleft);
} else {
$(this).append(g.cdropright);
}
g.dcolt = n;
} else if (!g.colresize) {
var nv = $('th:visible', g.hDiv).index(this);
var onl = parseInt($('div:eq(' + nv + ')', g.cDrag).css('left'), 10);
var nw = jQuery(g.nBtn).outerWidth();
var nl = onl - nw + Math.floor(p.cgwidth / 2);
$(g.nDiv).hide();
$(g.nBtn).hide();
$(g.nBtn).css({
'left': nl,
top: g.hDiv.offsetTop
}).show();
var ndw = parseInt($(g.nDiv).width(), 10);
$(g.nDiv).css({
top: g.bDiv.offsetTop
});
if ((nl + ndw) > $(g.gDiv).width()) {
$(g.nDiv).css('left', onl - ndw + 1);
} else {
$(g.nDiv).css('left', nl);
}
if ($(this).hasClass('sorted')) {
$(g.nBtn).addClass('srtd');
} else {
$(g.nBtn).removeClass('srtd');
}
}
}, function () {
$(this).removeClass('thOver');
if ($(this).attr('abbr') != p.sortname) {
$('div', this).removeClass('s' + p.sortorder);
} else if ($(this).attr('abbr') == p.sortname) {
var no = (p.sortorder == 'asc') ? 'desc' : 'asc';
$('div', this).addClass('s' + p.sortorder).removeClass('s' + no);
}
if (g.colCopy) {
$(g.cdropleft).remove();
$(g.cdropright).remove();
g.dcolt = null;
}
}); //wrap content
});
//set bDiv
g.bDiv.className = 'bDiv';
$(t).before(g.bDiv);
$(g.bDiv).css({
height: (p.height == 'auto') ? 'auto' : p.height + "px"
}).scroll(function (e) {
g.scroll()
}).append(t);
if (p.height == 'auto') {
$('table', g.bDiv).addClass('autoht');
}
//add td & row properties
g.addCellProp();
g.addRowProp();
//set cDrag only if we are using it
if (p.colResize === true) {
var cdcol = $('thead tr:first th:first', g.hDiv).get(0);
if(cdcol !== null) {
g.cDrag.className = 'cDrag';
g.cdpad = 0;
g.cdpad += (isNaN(parseInt($('div', cdcol).css('borderLeftWidth'), 10)) ? 0 : parseInt($('div', cdcol).css('borderLeftWidth'), 10));
g.cdpad += (isNaN(parseInt($('div', cdcol).css('borderRightWidth'), 10)) ? 0 : parseInt($('div', cdcol).css('borderRightWidth'), 10));
g.cdpad += (isNaN(parseInt($('div', cdcol).css('paddingLeft'), 10)) ? 0 : parseInt($('div', cdcol).css('paddingLeft'), 10));
g.cdpad += (isNaN(parseInt($('div', cdcol).css('paddingRight'), 10)) ? 0 : parseInt($('div', cdcol).css('paddingRight'), 10));
g.cdpad += (isNaN(parseInt($(cdcol).css('borderLeftWidth'), 10)) ? 0 : parseInt($(cdcol).css('borderLeftWidth'), 10));
g.cdpad += (isNaN(parseInt($(cdcol).css('borderRightWidth'), 10)) ? 0 : parseInt($(cdcol).css('borderRightWidth'), 10));
g.cdpad += (isNaN(parseInt($(cdcol).css('paddingLeft'), 10)) ? 0 : parseInt($(cdcol).css('paddingLeft'), 10));
g.cdpad += (isNaN(parseInt($(cdcol).css('paddingRight'), 10)) ? 0 : parseInt($(cdcol).css('paddingRight'), 10));
$(g.bDiv).before(g.cDrag);
var cdheight = $(g.bDiv).height();
var hdheight = $(g.hDiv).height();
$(g.cDrag).css({
top: -hdheight + 'px'
});
$('thead tr:first th', g.hDiv).each(function() {
var cgDiv = document.createElement('div');
$(g.cDrag).append(cgDiv);
if (!p.cgwidth) {
p.cgwidth = $(cgDiv).width();
}
$(cgDiv).css({
height: cdheight + hdheight
}).mousedown(function(e) {
g.dragStart('colresize', e, this);
}).dblclick(function(e) {
g.autoResizeColumn(this);
});
if (browser.msie && browser.version < 7.0) {
g.fixHeight($(g.gDiv).height());
$(cgDiv).hover(function() {
g.fixHeight();
$(this).addClass('dragging');
}, function() {
if(!g.colresize) {
$(this).removeClass('dragging');
}
});
}
});
}
}
//add strip
if (p.striped) {
$('tbody tr:odd', g.bDiv).addClass('erow');
}
if (p.resizable && p.height != 'auto') {
g.vDiv.className = 'vGrip';
$(g.vDiv).mousedown(function (e) {
g.dragStart('vresize', e);
}).html('<span></span>');
$(g.bDiv).after(g.vDiv);
}
if (p.resizable && p.width != 'auto' && !p.nohresize) {
g.rDiv.className = 'hGrip';
$(g.rDiv).mousedown(function (e) {
g.dragStart('vresize', e, true);
}).html('<span></span>').css('height', $(g.gDiv).height());
if (browser.msie && browser.version < 7.0) {
$(g.rDiv).hover(function () {
$(this).addClass('hgOver');
}, function () {
$(this).removeClass('hgOver');
});
}
$(g.gDiv).append(g.rDiv);
}
// add pager
if (p.usepager) {
g.pDiv.className = 'pDiv';
g.pDiv.innerHTML = '<div class="pDiv2"></div>';
$(g.bDiv).after(g.pDiv);
var html = ' <div class="pGroup"> <div class="pFirst pButton"><span></span></div><div class="pPrev pButton"><span></span></div> </div> <div class="btnseparator"></div> <div class="pGroup"><span class="pcontrol">' + p.pagetext + ' <input type="text" size="4" value="1" /> ' + p.outof + ' <span> 1 </span></span></div> <div class="btnseparator"></div> <div class="pGroup"> <div class="pNext pButton"><span></span></div><div class="pLast pButton"><span></span></div> </div> <div class="btnseparator"></div> <div class="pGroup"> <div class="pReload pButton"><span></span></div> </div> <div class="btnseparator"></div> <div class="pGroup"><span class="pPageStat"></span></div>';
$('div', g.pDiv).html(html);
$('.pReload', g.pDiv).click(function () {
g.populate();
});
$('.pFirst', g.pDiv).click(function () {
g.changePage('first');
});
$('.pPrev', g.pDiv).click(function () {
g.changePage('prev');
});
$('.pNext', g.pDiv).click(function () {
g.changePage('next');
});
$('.pLast', g.pDiv).click(function () {
g.changePage('last');
});
$('.pcontrol input', g.pDiv).keydown(function (e) {
if (e.keyCode == 13) {
g.changePage('input');
}
});
if (browser.msie && browser.version < 7) $('.pButton', g.pDiv).hover(function () {
$(this).addClass('pBtnOver');
}, function () {
$(this).removeClass('pBtnOver');
});
if (p.useRp) {
var opt = '',
sel = '';
for (var nx = 0; nx < p.rpOptions.length; nx++) {
if (p.rp == p.rpOptions[nx]) sel = 'selected="selected"';
else sel = '';
opt += "<option value='" + p.rpOptions[nx] + "' " + sel + " >" + p.rpOptions[nx] + " </option>";
}
$('.pDiv2', g.pDiv).prepend("<div class='pGroup'><select name='rp'>" + opt + "</select></div> <div class='btnseparator'></div>");
$('select', g.pDiv).change(function () {
if (p.onRpChange) {
p.onRpChange(+this.value);
} else {
p.newp = 1;
p.rp = +this.value;
g.populate();
}
});
}
//add search button
if (p.searchitems) {
$('.pDiv2', g.pDiv).prepend("<div class='pGroup'> <div class='pSearch pButton'><span></span></div> </div> <div class='btnseparator'></div>");
$('.pSearch', g.pDiv).click(function () {
$(g.sDiv).slideToggle('fast', function () {
$('.sDiv:visible input:first', g.gDiv).trigger('focus');
});
});
//add search box
g.sDiv.className = 'sDiv';
var sitems = p.searchitems;
var sopt = '', sel = '';
for (var s = 0; s < sitems.length; s++) {
if (p.qtype === '' && sitems[s].isdefault === true) {
p.qtype = sitems[s].name;
sel = 'selected="selected"';
} else {
sel = '';
}
sopt += "<option value='" + sitems[s].name + "' " + sel + " >" + sitems[s].display + " </option>";
}
if (p.qtype === '') {
p.qtype = sitems[0].name;
}
$(g.sDiv).append("<div class='sDiv2'>" + p.findtext +
" <input type='text' value='" + p.query +"' size='30' name='q' class='qsbox' /> "+
" <select name='qtype'>" + sopt + "</select></div>");
//Split into separate selectors because of bug in jQuery 1.3.2
$('input[name=q]', g.sDiv).keydown(function (e) {
if (e.keyCode == 13) {
g.doSearch();
}
});
$('select[name=qtype]', g.sDiv).keydown(function (e) {
if (e.keyCode == 13) {
g.doSearch();
}
});
$('input[value=Clear]', g.sDiv).click(function () {
$('input[name=q]', g.sDiv).val('');
p.query = '';
g.doSearch();
});
$(g.bDiv).after(g.sDiv);
}
}
$(g.pDiv, g.sDiv).append("<div style='clear:both'></div>");
// add title
if (p.title) {
g.mDiv.className = 'mDiv';
g.mDiv.innerHTML = '<div class="ftitle">' + p.title + '</div>';
$(g.gDiv).prepend(g.mDiv);
if (p.showTableToggleBtn) {
$(g.mDiv).append('<div class="ptogtitle" title="Minimize/Maximize Table"><span></span></div>');
$('div.ptogtitle', g.mDiv).click(function () {
$(g.gDiv).toggleClass('hideBody');
$(this).toggleClass('vsble');
});
}
}
//setup cdrops
g.cdropleft = document.createElement('span');
g.cdropleft.className = 'cdropleft';
g.cdropright = document.createElement('span');
g.cdropright.className = 'cdropright';
//add block
g.block.className = 'gBlock';
var gh = $(g.bDiv).height();
var gtop = g.bDiv.offsetTop;
$(g.block).css({
width: g.bDiv.style.width,
height: gh,
background: 'white',
position: 'relative',
marginBottom: (gh * -1),
zIndex: 1,
top: gtop,
left: '0px'
});
$(g.block).fadeTo(0, p.blockOpacity);
// add column control
if ($('th', g.hDiv).length) {
g.nDiv.className = 'nDiv';
g.nDiv.innerHTML = "<table cellpadding='0' cellspacing='0'><tbody></tbody></table>";
$(g.nDiv).css({
marginBottom: (gh * -1),
display: 'none',
top: gtop
}).noSelect();
var cn = 0;
$('th div', g.hDiv).each(function () {
var kcol = $("th[axis='col" + cn + "']", g.hDiv)[0];
var chk = 'checked="checked"';
if (kcol.style.display == 'none') {
chk = '';
}
$('tbody', g.nDiv).append('<tr><td class="ndcol1"><input type="checkbox" ' + chk + ' class="togCol" value="' + cn + '" /></td><td class="ndcol2">' + this.innerHTML + '</td></tr>');
cn++;
});
if (browser.msie && browser.version < 7.0) $('tr', g.nDiv).hover(function () {
$(this).addClass('ndcolover');
}, function () {
$(this).removeClass('ndcolover');
});
$('td.ndcol2', g.nDiv).click(function () {
if ($('input:checked', g.nDiv).length <= p.minColToggle && $(this).prev().find('input')[0].checked) return false;
return g.toggleCol($(this).prev().find('input').val());
});
$('input.togCol', g.nDiv).click(function () {
if ($('input:checked', g.nDiv).length < p.minColToggle && this.checked === false) return false;
$(this).parent().next().trigger('click');
});
$(g.gDiv).prepend(g.nDiv);
$(g.nBtn).addClass('nBtn')
.html('<div></div>')
.attr('title', 'Hide/Show Columns')
.click(function () {
$(g.nDiv).toggle();
return true;
}
);
if (p.showToggleBtn) {
$(g.gDiv).prepend(g.nBtn);
}
}
// add date edit layer
$(g.iDiv).addClass('iDiv').css({
display: 'none'
});
$(g.bDiv).append(g.iDiv);
// add flexigrid events
$(g.bDiv).hover(function () {
$(g.nDiv).hide();
$(g.nBtn).hide();
}, function () {
if (g.multisel) {
g.multisel = false;
}
});
$(g.gDiv).hover(function () {}, function () {
$(g.nDiv).hide();
$(g.nBtn).hide();
});
//add document events
$(document).mousemove(function (e) {
g.dragMove(e);
}).mouseup(function (e) {
g.dragEnd();
}).hover(function () {}, function () {
g.dragEnd();
});
//browser adjustments
if (browser.msie && browser.version < 7.0) {
$('.hDiv,.bDiv,.mDiv,.pDiv,.vGrip,.tDiv, .sDiv', g.gDiv).css({
width: '100%'
});
$(g.gDiv).addClass('ie6');
if (p.width != 'auto') {
$(g.gDiv).addClass('ie6fullwidthbug');
}
}
g.rePosDrag();
g.fixHeight();
//make grid functions accessible
t.p = p;
t.grid = g;
// load data
if (p.url && p.autoload) {
g.populate();
}
return t;
};
var docloaded = false;
$(document).ready(function () {
docloaded = true;
});
$.fn.flexigrid = function (p) {
return this.each(function () {
if (!docloaded) {
$(this).hide();
var t = this;
$(document).ready(function () {
$.addFlex(t, p);
});
} else {
$.addFlex(this, p);
}
});
}; //end flexigrid
$.fn.flexReload = function (p) { // function to reload grid
return this.each(function () {
if (this.grid && this.p.url) this.grid.populate();
});
}; //end flexReload
$.fn.flexOptions = function (p) { //function to update general options
return this.each(function () {
if (this.grid) $.extend(this.p, p);
});
}; //end flexOptions
$.fn.flexToggleCol = function (cid, visible) { // function to reload grid
return this.each(function () {
if (this.grid) this.grid.toggleCol(cid, visible);
});
}; //end flexToggleCol
$.fn.flexAddData = function (data) { // function to add data to grid
return this.each(function () {
if (this.grid) this.grid.addData(data);
});
};
$.fn.noSelect = function (p) { //no select plugin by me :-)
var prevent = (p === null) ? true : p;
if (prevent) {
return this.each(function () {
if (browser.msie || browser.safari) $(this).bind('selectstart', function () {
return false;
});
else if (browser.mozilla) {
$(this).css('MozUserSelect', 'none');
$('body').trigger('focus');
} else if (browser.opera) $(this).bind('mousedown', function () {
return false;
});
else $(this).attr('unselectable', 'on');
});
} else {
return this.each(function () {
if (browser.msie || browser.safari) $(this).unbind('selectstart');
else if (browser.mozilla) $(this).css('MozUserSelect', 'inherit');
else if (browser.opera) $(this).unbind('mousedown');
else $(this).removeAttr('unselectable', 'on');
});
}
}; //end noSelect
$.fn.flexSearch = function(p) { // function to search grid
return this.each( function() { if (this.grid&&this.p.searchitems) this.grid.doSearch(); });
}; //end flexSearch
$.fn.selectedRows = function (p) { // Returns the selected rows as an array, taken and adapted from http://stackoverflow.com/questions/11868404/flexigrid-get-selected-row-columns-values
var arReturn = [];
var arRow = [];
var selector = $(this.selector + ' .trSelected');
$(selector).each(function (i, row) {
arRow = [];
var idr = $(row).data('id');
$.each(row.cells, function (c, cell) {
var col = cell.abbr;
var val = cell.firstChild.innerHTML;
if (val == ' ') val = ''; // Trim the content
var idx = cell.cellIndex;
arRow.push({
Column: col, // Column identifier
Value: val, // Column value
CellIndex: idx, // Cell index
RowIdentifier: idr // Identifier of this row element
});
});
arReturn.push(arRow);
});
return arReturn;
};
})(jQuery);
|
mariano/robot | 1 | config/sql/robot.sql | CREATE TABLE `robot_task_actions`(
`id` CHAR(36) NOT NULL,
`action` VARCHAR(255) NOT NULL,
`weight` INT NOT NULL default 0,
`created` DATETIME NOT NULL,
`modified` DATETIME NOT NULL,
PRIMARY KEY(`id`)
);
CREATE TABLE `robot_tasks`(
`id` CHAR(36) NOT NULL,
`robot_task_action_id` CHAR(36) NOT NULL,
`status` VARCHAR(255) NOT NULL default 'pending',
`parameters` BLOB default NULL,
`scheduled` DATETIME NOT NULL,
`started` DATETIME default NULL,
`finished` DATETIME default NULL,
`created` DATETIME NOT NULL,
`modified` DATETIME NOT NULL,
PRIMARY KEY(`id`)
);
ALTER TABLE `robot_task_actions`
ADD UNIQUE KEY `action`(`action`);
ALTER TABLE `robot_tasks`
ADD KEY `robot_task_action_id`(`robot_task_action_id`),
ADD CONSTRAINT `robot_tasks__robot_task_actions` FOREIGN KEY(`robot_task_action_id`) REFERENCES `robot_task_actions`(`id`);
| CREATE TABLE `robot_task_actions`(
`id` CHAR(36) NOT NULL,
`action` VARCHAR(255) NOT NULL,
`weight` INT NOT NULL default 0,
`created` DATETIME NOT NULL,
`modified` DATETIME NOT NULL,
PRIMARY KEY(`id`)
);
CREATE TABLE `robot_tasks`(
`id` CHAR(36) NOT NULL,
`robot_task_action_id` CHAR(36) NOT NULL,
`action` VARCHAR(255) NOT NULL,
`weight` INT NOT NULL,
`status` VARCHAR(255) NOT NULL default 'pending',
`parameters` BLOB default NULL,
`scheduled` DATETIME NOT NULL,
`started` DATETIME default NULL,
`finished` DATETIME default NULL,
`created` DATETIME NOT NULL,
`modified` DATETIME NOT NULL,
PRIMARY KEY(`id`)
);
ALTER TABLE `robot_task_actions`
ADD UNIQUE KEY `action`(`action`);
ALTER TABLE `robot_tasks`
ADD KEY `robot_task_action_id`(`robot_task_action_id`),
ADD CONSTRAINT `robot_tasks__robot_task_actions` FOREIGN KEY(`robot_task_action_id`) REFERENCES `robot_task_actions`(`id`),
ADD INDEX `status__weight__scheduled` (`status`, `weight`, `scheduled`);
|
pmarti/python-messaging | 9 | messaging/test/test_sms.py | # -*- coding: utf-8 -*-
from datetime import datetime, timedelta
import unittest
from messaging.sms import SmsSubmit, SmsDeliver
from messaging.utils import (timedelta_to_relative_validity as to_relative,
datetime_to_absolute_validity as to_absolute,
FixedOffset)
class TestEncodingFunctions(unittest.TestCase):
def test_converting_timedelta_to_validity(self):
self.assertEqual(to_relative(timedelta(minutes=5)), 0)
self.assertEqual(to_relative(timedelta(minutes=6)), 0)
self.assertEqual(to_relative(timedelta(minutes=10)), 1)
self.assertEqual(to_relative(timedelta(hours=12)), 143)
self.assertEqual(to_relative(timedelta(hours=13)), 145)
self.assertEqual(to_relative(timedelta(hours=24)), 167)
self.assertEqual(to_relative(timedelta(days=2)), 168)
self.assertEqual(to_relative(timedelta(days=30)), 196)
def test_converting_datetime_to_validity(self):
# http://www.dreamfabric.com/sms/scts.html
# 12. Feb 1999 05:57:30 GMT+3
when = datetime(1999, 2, 12, 5, 57, 30, 0,
FixedOffset(3 * 60, "GMT+3"))
expected = [0x99, 0x20, 0x21, 0x50, 0x75, 0x03, 0x21]
self.assertEqual(to_absolute(when, "GMT+3"), expected)
when = datetime(1999, 2, 12, 5, 57, 30, 0)
expected = [0x99, 0x20, 0x21, 0x50, 0x75, 0x03, 0x0]
self.assertEqual(to_absolute(when, "UTC"), expected)
when = datetime(1999, 2, 12, 5, 57, 30, 0,
FixedOffset(-3 * 60, "GMT-3"))
expected = [0x99, 0x20, 0x21, 0x50, 0x75, 0x03, 0x29]
self.assertEqual(to_absolute(when, "GMT-3"), expected)
class TestSmsSubmit(unittest.TestCase):
def test_encoding_validity(self):
# no validity
number = "+34616585119"
text = "hola"
expected = "0001000B914316565811F9000004E8373B0C"
sms = SmsSubmit(number, text)
sms.ref = 0x0
pdu = sms.to_pdu()[0]
self.assertEqual(pdu.pdu, expected)
# absolute validity
number = "+34616585119"
text = "hola"
expected = "0019000B914316565811F900000170520251930004E8373B0C"
sms = SmsSubmit(number, text)
sms.ref = 0x0
sms.validity = datetime(2010, 7, 25, 20, 15, 39)
pdu = sms.to_pdu()[0]
self.assertEqual(pdu.pdu, expected)
# relative validity
number = "+34616585119"
text = "hola"
expected = "0011000B914316565811F90000AA04E8373B0C"
expected_len = 18
sms = SmsSubmit(number, text)
sms.ref = 0x0
sms.validity = timedelta(days=4)
pdu = sms.to_pdu()[0]
self.assertEqual(pdu.pdu, expected)
self.assertEqual(pdu.length, expected_len)
def test_encoding_csca(self):
number = "+34616585119"
text = "hola"
csca = "+34646456456"
expected = "07914346466554F601000B914316565811F9000004E8373B0C"
expected_len = 17
sms = SmsSubmit(number, text)
sms.csca = csca
sms.ref = 0x0
pdu = sms.to_pdu()[0]
self.assertEqual(pdu.pdu, expected)
self.assertEqual(pdu.length, expected_len)
self.assertEqual(pdu.cnt, 1)
self.assertEqual(pdu.seq, 1)
def test_encoding_class(self):
number = "+34654123456"
text = "hey yo"
expected_0 = "0001000B914356143254F6001006E8721E947F03"
expected_1 = "0001000B914356143254F6001106E8721E947F03"
expected_2 = "0001000B914356143254F6001206E8721E947F03"
expected_3 = "0001000B914356143254F6001306E8721E947F03"
sms = SmsSubmit(number, text)
sms.ref = 0x0
sms.klass = 0
pdu = sms.to_pdu()[0]
self.assertEqual(pdu.pdu, expected_0)
sms.klass = 1
pdu = sms.to_pdu()[0]
self.assertEqual(pdu.pdu, expected_1)
sms.klass = 2
pdu = sms.to_pdu()[0]
self.assertEqual(pdu.pdu, expected_2)
sms.klass = 3
pdu = sms.to_pdu()[0]
self.assertEqual(pdu.pdu, expected_3)
def test_encoding_request_status(self):
# tested with pduspy.exe and http://www.rednaxela.net/pdu.php
number = "+34654123456"
text = "hey yo"
expected = "0021000B914356143254F6000006E8721E947F03"
sms = SmsSubmit(number, text)
sms.ref = 0x0
sms.request_status = True
pdu = sms.to_pdu()[0]
self.assertEqual(pdu.pdu, expected)
def test_encoding_message_with_latin1_chars(self):
# tested with pduspy.exe
number = "+34654123456"
text = u"Hölä"
expected = "0011000B914356143254F60000AA04483E7B0F"
sms = SmsSubmit(number, text)
sms.ref = 0x0
sms.validity = timedelta(days=4)
pdu = sms.to_pdu()[0]
self.assertEqual(pdu.pdu, expected)
# tested with pduspy.exe
number = "+34654123456"
text = u"BÄRÇA äñ@"
expected = "0001000B914356143254F6000009C2AD341104EDFB00"
sms = SmsSubmit(number, text)
sms.ref = 0x0
pdu = sms.to_pdu()[0]
self.assertEqual(pdu.pdu, expected)
def test_encoding_8bit_message(self):
number = "01000000000"
csca = "+44000000000"
text = "Hi there..."
expected = "07914400000000F001000B811000000000F000040B48692074686572652E2E2E"
sms = SmsSubmit(number, text)
sms.ref = 0x0
sms.csca = csca
sms.fmt = 0x04 # 8 bits
pdu = sms.to_pdu()[0]
self.assertEqual(pdu.pdu, expected)
def test_encoding_ucs2_message(self):
number = "+34616585119"
text = u'あ叶葉'
csca = '+34646456456'
expected = "07914346466554F601000B914316565811F9000806304253F68449"
sms = SmsSubmit(number, text)
sms.ref = 0x0
sms.csca = csca
pdu = sms.to_pdu()[0]
self.assertEqual(pdu.pdu, expected)
text = u"Русский"
number = "655345678"
expected = "001100098156355476F80008AA0E0420044304410441043A04380439"
sms = SmsSubmit(number, text)
sms.ref = 0x0
sms.validity = timedelta(days=4)
pdu = sms.to_pdu()[0]
self.assertEqual(pdu.pdu, expected)
def test_encoding_multipart_7bit(self):
# text encoded with umts-tools
text = "Or walk with Kings - nor lose the common touch, if neither foes nor loving friends can hurt you, If all men count with you, but none too much; If you can fill the unforgiving minute With sixty seconds' worth of distance run, Yours is the Earth and everything thats in it, And - which is more - you will be a Man, my son"
number = "655345678"
expected = [
"005100098156355476F80000AAA00500038803019E72D03DCC5E83EE693A1AB44CBBCF73500BE47ECB41ECF7BC0CA2A3CBA0F1BBDD7EBB41F4777D8C6681D26690BB9CA6A3CB7290F95D9E83DC6F3988FDB6A7DD6790599E2EBBC973D038EC06A1EB723A28FFAEB340493328CC6683DA653768FCAEBBE9A07B9A8E06E5DF7516485CA783DC6F7719447FBF41EDFA18BD0325CDA0FCBB0E1A87DD",
"005100098156355476F80000AAA005000388030240E6349B0DA2A3CBA0BADBFC969FD3F6B4FB0C6AA7DD757A19744DD3D1A0791A4FCF83E6E5F1DB4D9E9F40F7B79C8E06BDCD20727A4E0FBBC76590BCEE6681B2EFBA7C0E4ACF41747419540CCBE96850D84D0695ED65799E8E4EBBCF203A3A4C9F83D26E509ACE0205DD64500B7447A7C768507A0E6ABFE565500B947FD741F7349B0D129741",
"005100098156355476F80000AA14050003880303C2A066D8CD02B5F3A0F9DB0D",
]
sms = SmsSubmit(number, text)
sms.ref = 0x0
sms.rand_id = 136
sms.validity = timedelta(days=4)
ret = sms.to_pdu()
cnt = len(ret)
for i, pdu in enumerate(ret):
self.assertEqual(pdu.pdu, expected[i])
self.assertEqual(pdu.seq, i + 1)
self.assertEqual(pdu.cnt, cnt)
def test_encoding_bad_number_raises_error(self):
self.assertRaises(ValueError, SmsSubmit, "032BADNUMBER", "text")
def test_encoding_bad_csca_raises_error(self):
sms = SmsSubmit("54342342", "text")
self.assertRaises(ValueError, setattr, sms, 'csca', "1badcsca")
class TestSubmitPduCounts(unittest.TestCase):
DEST = "+3530000000"
GSM_CHAR = "x"
EGSM_CHAR = u"€"
UNICODE_CHAR = u"ő"
def test_gsm_1(self):
sms = SmsSubmit(self.DEST, self.GSM_CHAR * 160)
self.assertEqual(len(sms.to_pdu()), 1)
def test_gsm_2(self):
sms = SmsSubmit(self.DEST, self.GSM_CHAR * 161)
self.assertEqual(len(sms.to_pdu()), 2)
def test_gsm_3(self):
sms = SmsSubmit(self.DEST, self.GSM_CHAR * 153 * 2)
self.assertEqual(len(sms.to_pdu()), 2)
def test_gsm_4(self):
sms = SmsSubmit(self.DEST,
self.GSM_CHAR * 153 * 2 + self.GSM_CHAR)
self.assertEqual(len(sms.to_pdu()), 3)
def test_gsm_5(self):
sms = SmsSubmit(self.DEST, self.GSM_CHAR * 153 * 3)
self.assertEqual(len(sms.to_pdu()), 3)
def test_gsm_6(self):
sms = SmsSubmit(self.DEST,
self.GSM_CHAR * 153 * 3 + self.GSM_CHAR)
self.assertEqual(len(sms.to_pdu()), 4)
def test_egsm_1(self):
sms = SmsSubmit(self.DEST, self.EGSM_CHAR * 80)
self.assertEqual(len(sms.to_pdu()), 1)
def test_egsm_2(self):
sms = SmsSubmit(self.DEST,
self.EGSM_CHAR * 79 + self.GSM_CHAR)
self.assertEqual(len(sms.to_pdu()), 1)
def test_egsm_3(self):
sms = SmsSubmit(self.DEST, self.EGSM_CHAR * 153) # 306 septets
self.assertEqual(len(sms.to_pdu()), 3)
def test_egsm_4(self):
sms = SmsSubmit(self.DEST,
self.EGSM_CHAR * 229 + self.GSM_CHAR) # 459 septets
self.assertEqual(len(sms.to_pdu()), 4)
def test_unicode_1(self):
sms = SmsSubmit(self.DEST, self.UNICODE_CHAR * 70)
self.assertEqual(len(sms.to_pdu()), 1)
def test_unicode_2(self):
sms = SmsSubmit(self.DEST, self.UNICODE_CHAR * 70 + self.GSM_CHAR)
self.assertEqual(len(sms.to_pdu()), 2)
def test_unicode_3(self):
sms = SmsSubmit(self.DEST, self.UNICODE_CHAR * 67 * 2)
self.assertEqual(len(sms.to_pdu()), 2)
def test_unicode_4(self):
sms = SmsSubmit(self.DEST, self.UNICODE_CHAR * 67 * 2 + self.GSM_CHAR)
self.assertEqual(len(sms.to_pdu()), 3)
def test_unicode_5(self):
sms = SmsSubmit(self.DEST, self.UNICODE_CHAR * 67 * 3)
self.assertEqual(len(sms.to_pdu()), 3)
def test_unicode_6(self):
sms = SmsSubmit(self.DEST, self.UNICODE_CHAR * 67 * 3 + self.GSM_CHAR)
self.assertEqual(len(sms.to_pdu()), 4)
class TestSmsDeliver(unittest.TestCase):
def test_decoding_7bit_pdu(self):
pdu = "07911326040000F0040B911346610089F60000208062917314080CC8F71D14969741F977FD07"
text = "How are you?"
csca = "+31624000000"
number = "+31641600986"
sms = SmsDeliver(pdu)
self.assertEqual(sms.text, text)
self.assertEqual(sms.csca, csca)
self.assertEqual(sms.number, number)
def test_decoding_ucs2_pdu(self):
pdu = "07914306073011F0040B914316709807F2000880604290224080084E2D5174901A8BAF"
text = u"中兴通讯"
csca = "+34607003110"
number = "+34610789702"
sms = SmsDeliver(pdu)
self.assertEqual(sms.text, text)
self.assertEqual(sms.csca, csca)
self.assertEqual(sms.number, number)
def test_decoding_7bit_pdu_data(self):
pdu = "07911326040000F0040B911346610089F60000208062917314080CC8F71D14969741F977FD07"
text = "How are you?"
csca = "+31624000000"
number = "+31641600986"
data = SmsDeliver(pdu).data
self.assertEqual(data['text'], text)
self.assertEqual(data['csca'], csca)
self.assertEqual(data['number'], number)
self.assertEqual(data['pid'], 0)
self.assertEqual(data['fmt'], 0)
self.assertEqual(data['date'], datetime(2002, 8, 26, 19, 37, 41))
def test_decoding_datetime_gmtplusone(self):
pdu = "0791447758100650040C914497716247010000909010711423400A2050EC468B81C4733A"
text = " 1741 bst"
number = "+447917267410"
date = datetime(2009, 9, 1, 16, 41, 32)
sms = SmsDeliver(pdu)
self.assertEqual(sms.text, text)
self.assertEqual(sms.number, number)
self.assertEqual(sms.date, date)
def test_decoding_datetime_gmtminusthree(self):
pdu = "0791553001000001040491578800000190115101112979CF340B342F9FEBE536E83D0791C3E4F71C440E83E6F53068FE66A7C7697A781C7EBB4050F99BFE1EBFD96F1D48068BC16030182E66ABD560B41988FC06D1D3F03768FA66A7C7697A781C7E83CCEF34282C2ECBE96F50B90D8AC55EB0DC4B068BC140B1994E16D3D1622E"
date = datetime(2010, 9, 11, 18, 10, 11) # 11/09/10 15:10 GMT-3.00
sms = SmsDeliver(pdu)
self.assertEqual(sms.date, date)
def test_decoding_number_alphanumeric(self):
# Odd length test
pdu = "07919471060040340409D0C6A733390400009060920173018093CC74595C96838C4F6772085AD6DDE4320B444E9741D4B03C6D7EC3E9E9B71B9474D3CB727799DEA286CFE5B9991DA6CBC3F432E85E9793CBA0F09A9EB6A7CB72BA0B9474D3CB727799DE72D6E9FABAFB0CBAA7E56490BA4CD7D34170F91BE4ACD3F575F7794E0F9F4161F1B92C2F8FD1EE32DD054AA2E520E3D3991C82A8E5701B"
number = "FONIC"
text = "Lieber FONIC Kunde, die Tarifoption Internet-Tagesflatrate wurde aktiviert. Internet-Nutzung wird jetzt pro Nutzungstag abgerechnet. Ihr FONIC Team"
csca = "+491760000443"
sms = SmsDeliver(pdu)
self.assertEqual(sms.text, text)
self.assertEqual(sms.csca, csca)
self.assertEqual(sms.number, number)
# Even length test
pdu = "07919333852804000412D0F7FBDD454FB75D693A0000903002801153402BCD301E9F0605D9E971191483C140412A35690D52832063D2F9040599A058EE05A3BD6430580E"
number = "www.tim.it"
text = 'Maxxi Alice 100 ATTIVATA FINO AL 19/04/2009'
csca = '+393358824000'
sms = SmsDeliver(pdu)
self.assertEqual(sms.text, text)
self.assertEqual(sms.csca, csca)
self.assertEqual(sms.number, number)
def test_decode_sms_confirmation(self):
pdu = "07914306073011F006270B913426565711F7012081111345400120811174054043"
csca = "+34607003110"
date = datetime(2010, 2, 18, 11, 31, 54)
number = "SR-UNKNOWN"
# XXX: the number should be +344626575117, is the prefix flipped ?
text = "+43626575117|10/02/18 11:31:54|"
sms = SmsDeliver(pdu)
self.assertEqual(sms.text, text)
self.assertEqual(sms.csca, csca)
self.assertEqual(sms.number, number)
self.assertEqual(sms.date, date)
# weird sms confirmation
pdu = "07914306073011F001000B914306565711F9000007F0B2FC0DCABF01"
csca = "+34607003110"
number = "SR-UNKNOWN"
sms = SmsDeliver(pdu)
self.assertEqual(sms.csca, csca)
self.assertEqual(sms.number, number)
def test_decode_weird_multipart_german_pdu(self):
pdus = [
"07919471227210244405852122F039F101506271217180A005000319020198E9B2B82C0759DFE4B0F9ED2EB7967537B9CC02B5D37450122D2FCB41EE303DFD7687D96537881A96A7CD6F383DFD7683F46134BBEC064DD36550DA0D22A7CBF3721BE42CD3F5A0198B56036DCA20B8FC0D6A0A4170767D0EAAE540433A082E7F83A6E5F93CFD76BB40D7B2DB0D9AA6CB2072BA3C2F83926EF31BE44E8FD17450BB8C9683CA",
"07919471227210244405852122F039F1015062712181804F050003190202E4E8309B5E7683DAFC319A5E76B340F73D9A5D7683A6E93268FD9ED3CB6EF67B0E5AD172B19B2C2693C9602E90355D6683A6F0B007946E8382F5393BEC26BB00",
]
texts = [
u"Lieber Vodafone-Kunde, mit Ihrer nationalen Tarifoption zahlen Sie in diesem Netz 3,45 € pro MB plus 59 Ct pro Session. Wenn Sie diese Info nicht mehr e",
u"rhalten möchten, wählen Sie kostenlos +4917212220. Viel Spaß im Ausland.",
]
for i, sms in enumerate(map(SmsDeliver, pdus)):
self.assertEqual(sms.text, texts[i])
self.assertEqual(sms.udh.concat.cnt, len(pdus))
self.assertEqual(sms.udh.concat.seq, i + 1)
self.assertEqual(sms.udh.concat.ref, 25)
def test_decoding_odd_length_pdu_strict_raises_valueerror(self):
# same pdu as in test_decoding_number_alpha1 minus last char
pdu = "07919471060040340409D0C6A733390400009060920173018093CC74595C96838C4F6772085AD6DDE4320B444E9741D4B03C6D7EC3E9E9B71B9474D3CB727799DEA286CFE5B9991DA6CBC3F432E85E9793CBA0F09A9EB6A7CB72BA0B9474D3CB727799DE72D6E9FABAFB0CBAA7E56490BA4CD7D34170F91BE4ACD3F575F7794E0F9F4161F1B92C2F8FD1EE32DD054AA2E520E3D3991C82A8E5701"
self.assertRaises(ValueError, SmsDeliver, pdu)
def test_decoding_odd_length_pdu_no_strict(self):
# same pdu as in test_decoding_number_alpha1 minus last char
pdu = "07919471060040340409D0C6A733390400009060920173018093CC74595C96838C4F6772085AD6DDE4320B444E9741D4B03C6D7EC3E9E9B71B9474D3CB727799DEA286CFE5B9991DA6CBC3F432E85E9793CBA0F09A9EB6A7CB72BA0B9474D3CB727799DE72D6E9FABAFB0CBAA7E56490BA4CD7D34170F91BE4ACD3F575F7794E0F9F4161F1B92C2F8FD1EE32DD054AA2E520E3D3991C82A8E5701"
text = "Lieber FONIC Kunde, die Tarifoption Internet-Tagesflatrate wurde aktiviert. Internet-Nutzung wird jetzt pro Nutzungstag abgerechnet. Ihr FONIC Tea"
sms = SmsDeliver(pdu, strict=False)
self.assertEqual(sms.text, text)
| # -*- coding: utf-8 -*-
from datetime import datetime, timedelta
import unittest
from messaging.sms import SmsSubmit, SmsDeliver
from messaging.utils import (timedelta_to_relative_validity as to_relative,
datetime_to_absolute_validity as to_absolute,
FixedOffset)
class TestEncodingFunctions(unittest.TestCase):
def test_converting_timedelta_to_validity(self):
self.assertEqual(to_relative(timedelta(minutes=5)), 0)
self.assertEqual(to_relative(timedelta(minutes=6)), 0)
self.assertEqual(to_relative(timedelta(minutes=10)), 1)
self.assertEqual(to_relative(timedelta(hours=12)), 143)
self.assertEqual(to_relative(timedelta(hours=13)), 145)
self.assertEqual(to_relative(timedelta(hours=24)), 167)
self.assertEqual(to_relative(timedelta(days=2)), 168)
self.assertEqual(to_relative(timedelta(days=30)), 196)
def test_converting_datetime_to_validity(self):
# http://www.dreamfabric.com/sms/scts.html
# 12. Feb 1999 05:57:30 GMT+3
when = datetime(1999, 2, 12, 5, 57, 30, 0,
FixedOffset(3 * 60, "GMT+3"))
expected = [0x99, 0x20, 0x21, 0x50, 0x75, 0x03, 0x21]
self.assertEqual(to_absolute(when, "GMT+3"), expected)
when = datetime(1999, 2, 12, 5, 57, 30, 0)
expected = [0x99, 0x20, 0x21, 0x50, 0x75, 0x03, 0x0]
self.assertEqual(to_absolute(when, "UTC"), expected)
when = datetime(1999, 2, 12, 5, 57, 30, 0,
FixedOffset(-3 * 60, "GMT-3"))
expected = [0x99, 0x20, 0x21, 0x50, 0x75, 0x03, 0x29]
self.assertEqual(to_absolute(when, "GMT-3"), expected)
class TestSmsSubmit(unittest.TestCase):
def test_encoding_validity(self):
# no validity
number = "+34616585119"
text = "hola"
expected = "0001000B914316565811F9000004E8373B0C"
sms = SmsSubmit(number, text)
sms.ref = 0x0
pdu = sms.to_pdu()[0]
self.assertEqual(pdu.pdu, expected)
# absolute validity
number = "+34616585119"
text = "hola"
expected = "0019000B914316565811F900000170520251930004E8373B0C"
sms = SmsSubmit(number, text)
sms.ref = 0x0
sms.validity = datetime(2010, 7, 25, 20, 15, 39)
pdu = sms.to_pdu()[0]
self.assertEqual(pdu.pdu, expected)
# relative validity
number = "+34616585119"
text = "hola"
expected = "0011000B914316565811F90000AA04E8373B0C"
expected_len = 18
sms = SmsSubmit(number, text)
sms.ref = 0x0
sms.validity = timedelta(days=4)
pdu = sms.to_pdu()[0]
self.assertEqual(pdu.pdu, expected)
self.assertEqual(pdu.length, expected_len)
def test_encoding_csca(self):
number = "+34616585119"
text = "hola"
csca = "+34646456456"
expected = "07914346466554F601000B914316565811F9000004E8373B0C"
expected_len = 17
sms = SmsSubmit(number, text)
sms.csca = csca
sms.ref = 0x0
pdu = sms.to_pdu()[0]
self.assertEqual(pdu.pdu, expected)
self.assertEqual(pdu.length, expected_len)
self.assertEqual(pdu.cnt, 1)
self.assertEqual(pdu.seq, 1)
def test_encoding_class(self):
number = "+34654123456"
text = "hey yo"
expected_0 = "0001000B914356143254F6001006E8721E947F03"
expected_1 = "0001000B914356143254F6001106E8721E947F03"
expected_2 = "0001000B914356143254F6001206E8721E947F03"
expected_3 = "0001000B914356143254F6001306E8721E947F03"
sms = SmsSubmit(number, text)
sms.ref = 0x0
sms.klass = 0
pdu = sms.to_pdu()[0]
self.assertEqual(pdu.pdu, expected_0)
sms.klass = 1
pdu = sms.to_pdu()[0]
self.assertEqual(pdu.pdu, expected_1)
sms.klass = 2
pdu = sms.to_pdu()[0]
self.assertEqual(pdu.pdu, expected_2)
sms.klass = 3
pdu = sms.to_pdu()[0]
self.assertEqual(pdu.pdu, expected_3)
def test_encoding_request_status(self):
# tested with pduspy.exe and http://www.rednaxela.net/pdu.php
number = "+34654123456"
text = "hey yo"
expected = "0021000B914356143254F6000006E8721E947F03"
sms = SmsSubmit(number, text)
sms.ref = 0x0
sms.request_status = True
pdu = sms.to_pdu()[0]
self.assertEqual(pdu.pdu, expected)
def test_encoding_message_with_latin1_chars(self):
# tested with pduspy.exe
number = "+34654123456"
text = u"Hölä"
expected = "0011000B914356143254F60000AA04483E7B0F"
sms = SmsSubmit(number, text)
sms.ref = 0x0
sms.validity = timedelta(days=4)
pdu = sms.to_pdu()[0]
self.assertEqual(pdu.pdu, expected)
# tested with pduspy.exe
number = "+34654123456"
text = u"BÄRÇA äñ@"
expected = "0001000B914356143254F6000009C2AD341104EDFB00"
sms = SmsSubmit(number, text)
sms.ref = 0x0
pdu = sms.to_pdu()[0]
self.assertEqual(pdu.pdu, expected)
def test_encoding_8bit_message(self):
number = "01000000000"
csca = "+44000000000"
text = "Hi there..."
expected = "07914400000000F001000B811000000000F000040B48692074686572652E2E2E"
sms = SmsSubmit(number, text)
sms.ref = 0x0
sms.csca = csca
sms.fmt = 0x04 # 8 bits
pdu = sms.to_pdu()[0]
self.assertEqual(pdu.pdu, expected)
def test_encoding_ucs2_message(self):
number = "+34616585119"
text = u'あ叶葉'
csca = '+34646456456'
expected = "07914346466554F601000B914316565811F9000806304253F68449"
sms = SmsSubmit(number, text)
sms.ref = 0x0
sms.csca = csca
pdu = sms.to_pdu()[0]
self.assertEqual(pdu.pdu, expected)
text = u"Русский"
number = "655345678"
expected = "001100098156355476F80008AA0E0420044304410441043A04380439"
sms = SmsSubmit(number, text)
sms.ref = 0x0
sms.validity = timedelta(days=4)
pdu = sms.to_pdu()[0]
self.assertEqual(pdu.pdu, expected)
def test_encoding_multipart_7bit(self):
# text encoded with umts-tools
text = "Or walk with Kings - nor lose the common touch, if neither foes nor loving friends can hurt you, If all men count with you, but none too much; If you can fill the unforgiving minute With sixty seconds' worth of distance run, Yours is the Earth and everything thats in it, And - which is more - you will be a Man, my son"
number = "655345678"
expected = [
"005100098156355476F80000AAA00500038803019E72D03DCC5E83EE693A1AB44CBBCF73500BE47ECB41ECF7BC0CA2A3CBA0F1BBDD7EBB41F4777D8C6681D26690BB9CA6A3CB7290F95D9E83DC6F3988FDB6A7DD6790599E2EBBC973D038EC06A1EB723A28FFAEB340493328CC6683DA653768FCAEBBE9A07B9A8E06E5DF7516485CA783DC6F7719447FBF41EDFA18BD0325CDA0FCBB0E1A87DD",
"005100098156355476F80000AAA005000388030240E6349B0DA2A3CBA0BADBFC969FD3F6B4FB0C6AA7DD757A19744DD3D1A0791A4FCF83E6E5F1DB4D9E9F40F7B79C8E06BDCD20727A4E0FBBC76590BCEE6681B2EFBA7C0E4ACF41747419540CCBE96850D84D0695ED65799E8E4EBBCF203A3A4C9F83D26E509ACE0205DD64500B7447A7C768507A0E6ABFE565500B947FD741F7349B0D129741",
"005100098156355476F80000AA14050003880303C2A066D8CD02B5F3A0F9DB0D",
]
sms = SmsSubmit(number, text)
sms.ref = 0x0
sms.rand_id = 136
sms.validity = timedelta(days=4)
ret = sms.to_pdu()
cnt = len(ret)
for i, pdu in enumerate(ret):
self.assertEqual(pdu.pdu, expected[i])
self.assertEqual(pdu.seq, i + 1)
self.assertEqual(pdu.cnt, cnt)
def test_encoding_bad_number_raises_error(self):
self.assertRaises(ValueError, SmsSubmit, "032BADNUMBER", "text")
def test_encoding_bad_csca_raises_error(self):
sms = SmsSubmit("54342342", "text")
self.assertRaises(ValueError, setattr, sms, 'csca', "1badcsca")
class TestSubmitPduCounts(unittest.TestCase):
DEST = "+3530000000"
GSM_CHAR = "x"
EGSM_CHAR = u"€"
UNICODE_CHAR = u"ő"
def test_gsm_1(self):
sms = SmsSubmit(self.DEST, self.GSM_CHAR * 160)
self.assertEqual(len(sms.to_pdu()), 1)
def test_gsm_2(self):
sms = SmsSubmit(self.DEST, self.GSM_CHAR * 161)
self.assertEqual(len(sms.to_pdu()), 2)
def test_gsm_3(self):
sms = SmsSubmit(self.DEST, self.GSM_CHAR * 153 * 2)
self.assertEqual(len(sms.to_pdu()), 2)
def test_gsm_4(self):
sms = SmsSubmit(self.DEST,
self.GSM_CHAR * 153 * 2 + self.GSM_CHAR)
self.assertEqual(len(sms.to_pdu()), 3)
def test_gsm_5(self):
sms = SmsSubmit(self.DEST, self.GSM_CHAR * 153 * 3)
self.assertEqual(len(sms.to_pdu()), 3)
def test_gsm_6(self):
sms = SmsSubmit(self.DEST,
self.GSM_CHAR * 153 * 3 + self.GSM_CHAR)
self.assertEqual(len(sms.to_pdu()), 4)
def test_egsm_1(self):
sms = SmsSubmit(self.DEST, self.EGSM_CHAR * 80)
self.assertEqual(len(sms.to_pdu()), 1)
def test_egsm_2(self):
sms = SmsSubmit(self.DEST,
self.EGSM_CHAR * 79 + self.GSM_CHAR)
self.assertEqual(len(sms.to_pdu()), 1)
def test_egsm_3(self):
sms = SmsSubmit(self.DEST, self.EGSM_CHAR * 153) # 306 septets
self.assertEqual(len(sms.to_pdu()), 3)
def test_egsm_4(self):
sms = SmsSubmit(self.DEST,
self.EGSM_CHAR * 229 + self.GSM_CHAR) # 459 septets
self.assertEqual(len(sms.to_pdu()), 4)
def test_unicode_1(self):
sms = SmsSubmit(self.DEST, self.UNICODE_CHAR * 70)
self.assertEqual(len(sms.to_pdu()), 1)
def test_unicode_2(self):
sms = SmsSubmit(self.DEST, self.UNICODE_CHAR * 70 + self.GSM_CHAR)
self.assertEqual(len(sms.to_pdu()), 2)
def test_unicode_3(self):
sms = SmsSubmit(self.DEST, self.UNICODE_CHAR * 67 * 2)
self.assertEqual(len(sms.to_pdu()), 2)
def test_unicode_4(self):
sms = SmsSubmit(self.DEST, self.UNICODE_CHAR * 67 * 2 + self.GSM_CHAR)
self.assertEqual(len(sms.to_pdu()), 3)
def test_unicode_5(self):
sms = SmsSubmit(self.DEST, self.UNICODE_CHAR * 67 * 3)
self.assertEqual(len(sms.to_pdu()), 3)
def test_unicode_6(self):
sms = SmsSubmit(self.DEST, self.UNICODE_CHAR * 67 * 3 + self.GSM_CHAR)
self.assertEqual(len(sms.to_pdu()), 4)
class TestSmsDeliver(unittest.TestCase):
def test_decoding_7bit_pdu(self):
pdu = "07911326040000F0040B911346610089F60000208062917314080CC8F71D14969741F977FD07"
text = "How are you?"
csca = "+31624000000"
number = "+31641600986"
sms = SmsDeliver(pdu)
self.assertEqual(sms.text, text)
self.assertEqual(sms.csca, csca)
self.assertEqual(sms.number, number)
def test_decoding_ucs2_pdu(self):
pdu = "07914306073011F0040B914316709807F2000880604290224080084E2D5174901A8BAF"
text = u"中兴通讯"
csca = "+34607003110"
number = "+34610789702"
sms = SmsDeliver(pdu)
self.assertEqual(sms.text, text)
self.assertEqual(sms.csca, csca)
self.assertEqual(sms.number, number)
def test_decoding_7bit_pdu_data(self):
pdu = "07911326040000F0040B911346610089F60000208062917314080CC8F71D14969741F977FD07"
text = "How are you?"
csca = "+31624000000"
number = "+31641600986"
data = SmsDeliver(pdu).data
self.assertEqual(data['text'], text)
self.assertEqual(data['csca'], csca)
self.assertEqual(data['number'], number)
self.assertEqual(data['pid'], 0)
self.assertEqual(data['fmt'], 0)
self.assertEqual(data['date'], datetime(2002, 8, 26, 19, 37, 41))
def test_decoding_datetime_gmtplusone(self):
pdu = "0791447758100650040C914497716247010000909010711423400A2050EC468B81C4733A"
text = " 1741 bst"
number = "+447917267410"
date = datetime(2009, 9, 1, 16, 41, 32)
sms = SmsDeliver(pdu)
self.assertEqual(sms.text, text)
self.assertEqual(sms.number, number)
self.assertEqual(sms.date, date)
def test_decoding_datetime_gmtminusthree(self):
pdu = "0791553001000001040491578800000190115101112979CF340B342F9FEBE536E83D0791C3E4F71C440E83E6F53068FE66A7C7697A781C7EBB4050F99BFE1EBFD96F1D48068BC16030182E66ABD560B41988FC06D1D3F03768FA66A7C7697A781C7E83CCEF34282C2ECBE96F50B90D8AC55EB0DC4B068BC140B1994E16D3D1622E"
date = datetime(2010, 9, 11, 18, 10, 11) # 11/09/10 15:10 GMT-3.00
sms = SmsDeliver(pdu)
self.assertEqual(sms.date, date)
def test_decoding_number_alphanumeric(self):
# Odd length test
pdu = "07919471060040340409D0C6A733390400009060920173018093CC74595C96838C4F6772085AD6DDE4320B444E9741D4B03C6D7EC3E9E9B71B9474D3CB727799DEA286CFE5B9991DA6CBC3F432E85E9793CBA0F09A9EB6A7CB72BA0B9474D3CB727799DE72D6E9FABAFB0CBAA7E56490BA4CD7D34170F91BE4ACD3F575F7794E0F9F4161F1B92C2F8FD1EE32DD054AA2E520E3D3991C82A8E5701B"
number = "FONIC"
text = "Lieber FONIC Kunde, die Tarifoption Internet-Tagesflatrate wurde aktiviert. Internet-Nutzung wird jetzt pro Nutzungstag abgerechnet. Ihr FONIC Team"
csca = "+491760000443"
sms = SmsDeliver(pdu)
self.assertEqual(sms.text, text)
self.assertEqual(sms.csca, csca)
self.assertEqual(sms.number, number)
# Even length test
pdu = "07919333852804000412D0F7FBDD454FB75D693A0000903002801153402BCD301E9F0605D9E971191483C140412A35690D52832063D2F9040599A058EE05A3BD6430580E"
number = "www.tim.it"
text = 'Maxxi Alice 100 ATTIVATA FINO AL 19/04/2009'
csca = '+393358824000'
sms = SmsDeliver(pdu)
self.assertEqual(sms.text, text)
self.assertEqual(sms.csca, csca)
self.assertEqual(sms.number, number)
def test_decode_sms_confirmation(self):
pdu = "07914306073011F006270B913426565711F7012081111345400120811174054043"
csca = "+34607003110"
date = datetime(2010, 2, 18, 11, 31, 54)
number = "SR-UNKNOWN"
# XXX: the number should be +344626575117, is the prefix flipped ?
text = "+43626575117|10/02/18 11:31:54|"
sms = SmsDeliver(pdu)
self.assertEqual(sms.text, text)
self.assertEqual(sms.csca, csca)
self.assertEqual(sms.number, number)
self.assertEqual(sms.date, date)
# weird sms confirmation
pdu = "07914306073011F001000B914306565711F9000007F0B2FC0DCABF01"
csca = "+34607003110"
number = "SR-UNKNOWN"
sms = SmsDeliver(pdu)
self.assertEqual(sms.csca, csca)
self.assertEqual(sms.number, number)
def test_decode_weird_multipart_german_pdu(self):
pdus = [
"07919471227210244405852122F039F101506271217180A005000319020198E9B2B82C0759DFE4B0F9ED2EB7967537B9CC02B5D37450122D2FCB41EE303DFD7687D96537881A96A7CD6F383DFD7683F46134BBEC064DD36550DA0D22A7CBF3721BE42CD3F5A0198B56036DCA20B8FC0D6A0A4170767D0EAAE540433A082E7F83A6E5F93CFD76BB40D7B2DB0D9AA6CB2072BA3C2F83926EF31BE44E8FD17450BB8C9683CA",
"07919471227210244405852122F039F1015062712181804F050003190202E4E8309B5E7683DAFC319A5E76B340F73D9A5D7683A6E93268FD9ED3CB6EF67B0E5AD172B19B2C2693C9602E90355D6683A6F0B007946E8382F5393BEC26BB00",
]
texts = [
u"Lieber Vodafone-Kunde, mit Ihrer nationalen Tarifoption zahlen Sie in diesem Netz 3,45 € pro MB plus 59 Ct pro Session. Wenn Sie diese Info nicht mehr e",
u"rhalten möchten, wählen Sie kostenlos +4917212220. Viel Spaß im Ausland.",
]
for i, sms in enumerate(map(SmsDeliver, pdus)):
self.assertEqual(sms.text, texts[i])
self.assertEqual(sms.udh.concat.cnt, len(pdus))
self.assertEqual(sms.udh.concat.seq, i + 1)
self.assertEqual(sms.udh.concat.ref, 25)
def test_decoding_odd_length_pdu_strict_raises_valueerror(self):
# same pdu as in test_decoding_number_alpha1 minus last char
pdu = "07919471060040340409D0C6A733390400009060920173018093CC74595C96838C4F6772085AD6DDE4320B444E9741D4B03C6D7EC3E9E9B71B9474D3CB727799DEA286CFE5B9991DA6CBC3F432E85E9793CBA0F09A9EB6A7CB72BA0B9474D3CB727799DE72D6E9FABAFB0CBAA7E56490BA4CD7D34170F91BE4ACD3F575F7794E0F9F4161F1B92C2F8FD1EE32DD054AA2E520E3D3991C82A8E5701"
self.assertRaises(ValueError, SmsDeliver, pdu)
def test_decoding_odd_length_pdu_no_strict(self):
# same pdu as in test_decoding_number_alpha1 minus last char
pdu = "07919471060040340409D0C6A733390400009060920173018093CC74595C96838C4F6772085AD6DDE4320B444E9741D4B03C6D7EC3E9E9B71B9474D3CB727799DEA286CFE5B9991DA6CBC3F432E85E9793CBA0F09A9EB6A7CB72BA0B9474D3CB727799DE72D6E9FABAFB0CBAA7E56490BA4CD7D34170F91BE4ACD3F575F7794E0F9F4161F1B92C2F8FD1EE32DD054AA2E520E3D3991C82A8E5701"
text = "Lieber FONIC Kunde, die Tarifoption Internet-Tagesflatrate wurde aktiviert. Internet-Nutzung wird jetzt pro Nutzungstag abgerechnet. Ihr FONIC Tea"
sms = SmsDeliver(pdu, strict=False)
self.assertEqual(sms.text, text)
def test_decoding_delivery_status_report(self):
pdu = "0791538375000075061805810531F1019082416500400190824165004000"
sr = {
'status': 0,
'scts': datetime(2010, 9, 28, 14, 56),
'dt': datetime(2010, 9, 28, 14, 56),
'recipient': '50131'
}
sms = SmsDeliver(pdu)
self.assertEqual(sms.csca, "+353857000057")
data = sms.data
self.assertEqual(data['ref'], 24)
self.assertEqual(sms.sr, sr)
def test_decoding_delivery_status_report_without_smsc_address(self):
pdu = "00060505810531F1010150610000400101506100004000"
sr = {
'status': 0,
'scts': datetime(2010, 10, 5, 16, 0),
'dt': datetime(2010, 10, 5, 16, 0),
'recipient': '50131'
}
sms = SmsDeliver(pdu)
self.assertEqual(sms.csca, None)
data = sms.data
self.assertEqual(data['ref'], 5)
self.assertEqual(sms.sr, sr)
|
ericpaulbishop/redmine_git_hosting | 180 | lib/git_hosting/patches/git_adapter_patch.rb | module GitHosting
module Patches
module GitAdapterPatch
def self.included(base)
base.class_eval do
unloadable
end
begin
base.send(:alias_method_chain, :scm_cmd, :sudo)
rescue Exception =>e
end
base.extend(ClassMethods)
base.class_eval do
class << self
alias_method_chain :sq_bin, :sudo
begin
alias_method_chain :client_command, :sudo
rescue Exception =>e
end
end
end
end
module ClassMethods
def sq_bin_with_sudo
return Redmine::Scm::Adapters::GitAdapter::shell_quote(GitHosting::git_exec())
end
def client_command_with_sudo
return GitHosting::git_exec()
end
end
def scm_cmd_with_sudo(*args, &block)
args.each do |a|
a.gsub!(/^\.\-\w_\:]/, '')
end
max_cache_time = (Setting.plugin_redmine_git_hosting['gitCacheMaxTime']).to_i # in seconds, default = 60
max_cache_elements = (Setting.plugin_redmine_git_hosting['gitCacheMaxElements']).to_i # default = 100
max_cache_size = (Setting.plugin_redmine_git_hosting['gitCacheMaxSize']).to_i*1024*1024 # In MB, default = 16MB, converted to bytes
repo_path = root_url || url
full_args = [GitHosting::git_exec(), '--git-dir', repo_path]
if self.class.client_version_above?([1, 7, 2])
full_args << '-c' << 'core.quotepath=false'
full_args << '-c' << 'log.decorate=no'
end
full_args += args
cmd_str=full_args.map { |e| shell_quote e.to_s }.join(' ')
out=nil
retio = nil
cached=GitCache.find_by_command(cmd_str)
if cached != nil
cur_time = ActiveRecord::Base.default_timezone == :utc ? Time.now.utc : Time.now
if cur_time.to_i - cached.created_at.to_i < max_cache_time || max_cache_time < 0
out = cached.command_output == nil ? "" : cached.command_output
#File.open("/tmp/command_output.txt", "a") { |f| f.write("COMMAND:#{cmd_str}\n#{out}\n") }
else
GitCache.destroy(cached.id)
end
end
if out == nil
shellout(cmd_str) do |io|
out = io.read(max_cache_size + 1)
end
out = out == nil ? "" : out
if $? && $?.exitstatus != 0
raise Redmine::Scm::Adapters::GitAdapter::ScmCommandAborted, "git exited with non-zero status: #{$?.exitstatus}"
elsif out.length <= max_cache_size
proj_id=repo_path.gsub(/\.git$/, "").gsub(/^.*\//, "")
gitc = GitCache.create( :command=>cmd_str, :command_output=>out, :proj_identifier=>proj_id )
gitc.save
if GitCache.count > max_cache_elements && max_cache_elements >= 0
oldest = GitCache.find(:last, :order => "created_at DESC")
GitCache.destroy(oldest.id)
end
#File.open("/tmp/non_cached.txt", "a") { |f| f.write("COMMAND:#{cmd_str}\n#{out}\n") }
else
retio = shellout(cmd_str, &block)
if $? && $?.exitstatus != 0
raise Redmine::Scm::Adapters::GitAdapter::ScmCommandAborted, "git exited with non-zero status: #{$?.exitstatus}"
end
end
end
if retio == nil
retio = StringIO.new(string=out)
if block_given?
block.call(retio)
end
end
retio
end
end
end
end
| module GitHosting
module Patches
module GitAdapterPatch
def self.included(base)
base.class_eval do
unloadable
end
begin
base.send(:alias_method_chain, :scm_cmd, :sudo)
rescue Exception =>e
end
base.extend(ClassMethods)
base.class_eval do
class << self
alias_method_chain :sq_bin, :sudo
begin
alias_method_chain :client_command, :sudo
rescue Exception =>e
end
end
end
end
module ClassMethods
def sq_bin_with_sudo
return Redmine::Scm::Adapters::GitAdapter::shell_quote(GitHosting::git_exec())
end
def client_command_with_sudo
return GitHosting::git_exec()
end
end
def scm_cmd_with_sudo(*args, &block)
args.each do |a|
a.gsub!(/^\.\-\w_\:\]/, '')
end
max_cache_time = (Setting.plugin_redmine_git_hosting['gitCacheMaxTime']).to_i # in seconds, default = 60
max_cache_elements = (Setting.plugin_redmine_git_hosting['gitCacheMaxElements']).to_i # default = 100
max_cache_size = (Setting.plugin_redmine_git_hosting['gitCacheMaxSize']).to_i*1024*1024 # In MB, default = 16MB, converted to bytes
repo_path = root_url || url
full_args = [GitHosting::git_exec(), '--git-dir', repo_path]
if self.class.client_version_above?([1, 7, 2])
full_args << '-c' << 'core.quotepath=false'
full_args << '-c' << 'log.decorate=no'
end
full_args += args
cmd_str=full_args.map { |e| shell_quote e.to_s }.join(' ')
out=nil
retio = nil
cached=GitCache.find_by_command(cmd_str)
if cached != nil
cur_time = ActiveRecord::Base.default_timezone == :utc ? Time.now.utc : Time.now
if cur_time.to_i - cached.created_at.to_i < max_cache_time || max_cache_time < 0
out = cached.command_output == nil ? "" : cached.command_output
#File.open("/tmp/command_output.txt", "a") { |f| f.write("COMMAND:#{cmd_str}\n#{out}\n") }
else
GitCache.destroy(cached.id)
end
end
if out == nil
shellout(cmd_str) do |io|
out = io.read(max_cache_size + 1)
end
out = out == nil ? "" : out
if $? && $?.exitstatus != 0
raise Redmine::Scm::Adapters::GitAdapter::ScmCommandAborted, "git exited with non-zero status: #{$?.exitstatus}"
elsif out.length <= max_cache_size
proj_id=repo_path.gsub(/\.git$/, "").gsub(/^.*\//, "")
gitc = GitCache.create( :command=>cmd_str, :command_output=>out, :proj_identifier=>proj_id )
gitc.save
if GitCache.count > max_cache_elements && max_cache_elements >= 0
oldest = GitCache.find(:last, :order => "created_at DESC")
GitCache.destroy(oldest.id)
end
#File.open("/tmp/non_cached.txt", "a") { |f| f.write("COMMAND:#{cmd_str}\n#{out}\n") }
else
retio = shellout(cmd_str, &block)
if $? && $?.exitstatus != 0
raise Redmine::Scm::Adapters::GitAdapter::ScmCommandAborted, "git exited with non-zero status: #{$?.exitstatus}"
end
end
end
if retio == nil
retio = StringIO.new(string=out)
if block_given?
block.call(retio)
end
end
retio
end
end
end
end
|
rakudo/rakudo | 5,651 | src/vm/moar/runner/main.c | #include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <uv.h>
#include <moar.h>
#ifdef _WIN32
# include <sys/types.h>
# include <sys/stat.h>
# include <process.h>
# include <shlwapi.h>
# include <io.h>
# if defined(_MSC_VER)
# define strtoll _strtoi64
# endif
#else
# include <sys/stat.h>
# include <libgen.h>
# include <unistd.h>
# include "signal.h"
#endif
#define STRINGIFY1(x) #x
#define STRINGIFY(x) STRINGIFY1(x)
/* flags need to be sorted alphabetically */
enum {
NOT_A_FLAG = -2,
UNKNOWN_FLAG = -1,
FLAG_SUSPEND,
FLAG_FULL_CLEANUP,
FLAG_TRACING,
OPT_DEBUGPORT,
OPT_RAKUDO_HOME
};
static const char *const FLAGS[] = {
"--debug-suspend",
"--full-cleanup",
"--tracing",
};
static int cmp_flag(const void *key, const void *value) {
return strcmp(key, *(char **)value);
}
static int starts_with(const char *str, const char *want) {
size_t str_len = strlen(str);
size_t want_len = strlen(want);
return str_len < want_len
? 0
: strncmp(str, want, want_len) == 0;
}
static int parse_flag(const char *arg)
{
const char *const *found;
if (!arg || arg[0] != '-')
return NOT_A_FLAG;
found = bsearch(arg, FLAGS, sizeof FLAGS / sizeof *FLAGS, sizeof *FLAGS, cmp_flag);
if (found)
return (int)(found - FLAGS);
else if (starts_with(arg, "--debug-port="))
return OPT_DEBUGPORT;
else if (starts_with(arg, "--rakudo-home="))
return OPT_RAKUDO_HOME;
else
return UNKNOWN_FLAG;
}
int file_exists(const char *path) {
#ifdef _WIN32
int res;
struct _stat sb;
const int len = MultiByteToWideChar(CP_UTF8, 0, path, -1, NULL, 0);
wchar_t * const wpath = (wchar_t *)malloc(len * sizeof(wchar_t));
MultiByteToWideChar(CP_UTF8, 0, path, -1, (LPWSTR)wpath, len);
res = _wstat(wpath, &sb);
free(wpath);
return res == 0;
#else
struct stat *sb = malloc(sizeof(struct stat));
int res = stat(path, sb) == 0;
free(sb);
return res;
#endif
}
void platformify_path(char *path) {
#ifdef _WIN32
int i;
for (i = 0; path[i]; i++) {
if (path[i] == '/') {
path[i] = '\\';
}
}
#endif
}
int retrieve_home(
char **out_home,
const char *rel_home,
const size_t rel_home_size,
const char *env_var,
char *exec_dir_path,
size_t exec_dir_path_size,
const char *check_file,
const size_t check_file_size,
char *static_home,
char *options_home
) {
char *check_file_path;
size_t home_size;
int ret;
char *env_home = getenv(env_var);
if (options_home) {
*out_home = options_home;
home_size = strlen(*out_home);
}
else if (env_home) {
home_size = strlen(env_home);
*out_home = (char*)malloc(home_size + 1);
strcpy(*out_home, env_home);
#ifdef _WIN32
if (*(*out_home + home_size - 1) == '\\') {
#else
if (*(*out_home + home_size - 1) == '/') {
#endif
*(*out_home + home_size - 1) = '\0';
home_size--;
}
}
else if (static_home) {
*out_home = static_home;
home_size = strlen(*out_home);
}
else {
home_size = exec_dir_path_size + rel_home_size;
*out_home = (char*)malloc(home_size + 1);
strncpy(*out_home, exec_dir_path, home_size);
strncat(*out_home, rel_home, rel_home_size);
platformify_path(*out_home + exec_dir_path_size);
}
check_file_path = (char*)malloc(home_size + check_file_size + 1);
strncpy(check_file_path, *out_home, home_size + check_file_size);
strncat(check_file_path, check_file, check_file_size);
ret = file_exists(check_file_path);
free(check_file_path);
return ret;
}
#if defined(_WIN32) && defined(SUBSYSTEM_WINDOWS)
int set_std_handle_to_nul(FILE *file, int fd, BOOL read, int std_handle_type) {
/* Found on https://docs.microsoft.com/en-us/cpp/c-runtime-library/reference/get-osfhandle?view=vs-2019:
"When stdin, stdout, and stderr aren't associated with a stream (for example, in a Windows
application without a console window), the file descriptor values for these streams are
returned from _fileno as the special value -2. Similarly, if you use a 0, 1, or 2 as the
file descriptor parameter instead of the result of a call to _fileno, _get_osfhandle also
returns the special value -2 when the file descriptor is not associated with a stream, and
does not set errno. However, this is not a valid file handle value, and subsequent calls
that attempt to use it are likely to fail."
See https://jdebp.eu/FGA/redirecting-standard-io.html
https://stackoverflow.com/a/50358201 (Especially the comments of Eryk Sun)
*/
FILE *stream;
HANDLE new_handle;
if (_fileno(file) != -2 || _get_osfhandle(fd) != -2)
// The handles are initialized. Don't touch!
return 1;
/* FD 1 is in an error state (_get_osfhandle(1) == -2). Close it. The FD number is up for grabs
after this call. */
if (_close(fd) != 0)
return 0;
/* FILE *stdout is in an error state (_fileno(stdout) == -2). Reopen it to a "NUL:" file. This
will take the next free FD number. So it's important to call this sequentially for FD 0, 1
and 2. */
if (freopen_s(&stream, "NUL:", read ? "r" : "w", file) != 0)
return 0;
/* Set the underlying Windows handle as the STD handler. */
new_handle = (HANDLE)_get_osfhandle(fd);
if (!SetStdHandle(std_handle_type, new_handle))
return 0;
return 1;
}
#endif
#if defined(_WIN32) && defined(SUBSYSTEM_WINDOWS)
int wWinMain(HINSTANCE hInstance, HINSTANCE hPrevInstance, LPWSTR lpCmdLine, INT nCmdShow) {
int argc;
LPWSTR *wargv = CommandLineToArgvW(GetCommandLineW(), &argc);
char **argv = MVM_UnicodeToUTF8_argv(argc, wargv);
LocalFree(wargv);
#elif defined(_WIN32) && !defined(SUBSYSTEM_WINDOWS)
int wmain(int argc, wchar_t *wargv[]) {
char **argv = MVM_UnicodeToUTF8_argv(argc, wargv);
#else
int main(int argc, char *argv[]) {
#endif
MVMInstance *instance;
char *exec_path;
size_t exec_path_size;
char *exec_dir_path_temp;
char *exec_dir_path;
size_t exec_dir_path_size;
char *nqp_home;
size_t nqp_home_size;
char *static_nqp_home = 0;
const char nqp_rel_path[14] = "/../share/nqp";
const size_t nqp_rel_path_size = 13;
const char nqp_check_path[28] = "/lib/NQPCORE.setting.moarvm";
const size_t nqp_check_path_size = 27;
char *rakudo_home;
size_t rakudo_home_size;
char *static_rakudo_home = 0;
char *option_rakudo_home = 0;
const char perl6_rel_path[16] = "/../share/perl6";
const size_t perl6_rel_path_size = 15;
const char perl6_check_path[22] = "/runtime/perl6.moarvm";
const size_t perl6_check_path_size = 21;
char *lib_path[3];
char *perl6_file;
int full_cleanup = 0;
int argi = 1;
int flag;
int new_argc = 0;
MVMuint32 debugserverport = 0;
int start_suspended = 0;
#if defined(__APPLE__) || defined(__Darwin__)
if (getenv("RAKUDO_DYLD_LIBRARY_PATH")) {
setenv("DYLD_LIBRARY_PATH", getenv("RAKUDO_DYLD_LIBRARY_PATH"), 0);
}
#endif
#if defined(_WIN32) && defined(SUBSYSTEM_WINDOWS)
/* When using the 'windows' subsystem the standard IO handles are not
connected. This causes a program abort when accessing the handles. To
prevent these aborts, we redirect the handles to NUL in this case.
*/
/* Set our own handles. */
if (!set_std_handle_to_nul(stdin, 0, 1, STD_INPUT_HANDLE)) return EXIT_FAILURE;
if (!set_std_handle_to_nul(stdout, 1, 0, STD_OUTPUT_HANDLE)) return EXIT_FAILURE;
if (!set_std_handle_to_nul(stderr, 2, 0, STD_ERROR_HANDLE)) return EXIT_FAILURE;
/* MoarVM - as a DLL, and the way it's compiled (/MT) has it's own CRT and thus it's own CRT STD handles.
So MoarVM also needs to fix up its CRT STD handles.
See: https://docs.microsoft.com/de-de/cpp/c-runtime-library/potential-errors-passing-crt-objects-across-dll-boundaries
https://docs.microsoft.com/en-us/cpp/c-runtime-library/crt-library-features
*/
if (!MVM_set_std_handles_to_nul()) return EXIT_FAILURE;
#endif
/* Retrieve the executable directory path. */
#ifdef STATIC_EXEC_PATH
exec_path = STRINGIFY(STATIC_EXEC_PATH);
exec_path_size = strlen(exec_path);
#else
int res;
exec_path_size = 4096;
exec_path = (char*)malloc(exec_path_size);
res = MVM_exepath(exec_path, &exec_path_size);
while (res < 0 && exec_path_size < 4096*8) {
exec_path_size *= 2;
exec_path = (char*)realloc(exec_path, exec_path_size);
res = MVM_exepath(exec_path, &exec_path_size);
}
if (res < 0) {
fprintf(stderr, "ERROR: Could not retrieve executable path.\n");
return EXIT_FAILURE;
}
#endif
/* Filter out VM arguments from the command line. */
for (; (flag = parse_flag(argv[argi])) != NOT_A_FLAG; ++argi) {
switch (flag) {
case FLAG_FULL_CLEANUP:
full_cleanup = 1;
continue;
#if MVM_TRACING
case FLAG_TRACING:
MVM_interp_enable_tracing();
continue;
#endif
case FLAG_SUSPEND:
start_suspended = 1;
continue;
case OPT_DEBUGPORT: {
MVMint64 port;
char *portstr = argv[argi] + strlen("--debug-port=");
char *endptr;
port = strtoll(portstr, &endptr, 10);
if (*endptr != '\0') {
fprintf(stderr, "ERROR: Invalid characters in debug port flag: %s\n", portstr);
return EXIT_FAILURE;
}
if (port <= 1024 || 65535 < port) {
fprintf(stderr, "ERROR: debug server port out of range. We only accept ports above 1024 and below 65535. (got: %"PRIi64")\n", port);
return EXIT_FAILURE;
}
debugserverport = (MVMuint32)port;
break;
}
case OPT_RAKUDO_HOME:
option_rakudo_home = argv[argi] + strlen("--rakudo-home=");
break;
default:
argv[new_argc++] = argv[argi];
}
}
/* Move over the remaining arguments. */
for (; argv[argi]; ++argi) {
argv[new_argc++] = argv[argi];
}
#ifdef HAVE_TELEMEH
unsigned int interval_id = 0;
char telemeh_inited = 0;
if (getenv("MVM_TELEMETRY_LOG")) {
char path[256];
FILE *fp;
snprintf(path, 255, "%s.%d", getenv("MVM_TELEMETRY_LOG"),
#ifdef _WIN32
_getpid()
#else
getpid()
#endif
);
fp = fopen(path, "w");
if (fp) {
MVM_telemetry_init(fp);
telemeh_inited = 1;
interval_id = MVM_telemetry_interval_start(0, "moarvm startup");
}
}
#endif
/* The +1 is the trailing \0 terminating the string. */
exec_dir_path_temp = (char*)malloc(exec_path_size + 1);
memcpy(exec_dir_path_temp, exec_path, exec_path_size + 1);
#ifdef _WIN32
PathRemoveFileSpecA(exec_dir_path_temp);
exec_dir_path_size = strlen(exec_dir_path_temp);
exec_dir_path = (char*)malloc(exec_dir_path_size + 1);
memcpy(exec_dir_path, exec_dir_path_temp, exec_dir_path_size + 1);
#else
exec_dir_path = dirname(exec_dir_path_temp);
exec_dir_path_size = strlen(exec_dir_path);
#endif
/* Retrieve RAKUDO_HOME and NQP_HOME. */
#ifdef STATIC_NQP_HOME
static_nqp_home = STRINGIFY(STATIC_NQP_HOME);
#endif
if (!retrieve_home(&nqp_home, nqp_rel_path, nqp_rel_path_size, "NQP_HOME",
exec_dir_path, exec_dir_path_size, nqp_check_path,
nqp_check_path_size, static_nqp_home, 0)) {
fprintf(stderr, "ERROR: NQP_HOME is invalid: %s\n", nqp_home);
return EXIT_FAILURE;
}
nqp_home_size = strlen(nqp_home);
#ifdef STATIC_RAKUDO_HOME
static_rakudo_home = STRINGIFY(STATIC_RAKUDO_HOME);
#endif
/* XXX Isn't it time to move RAKUDO_HOME in front of PERL6_HOME?? */
if (getenv("PERL6_HOME")) {
if (!retrieve_home(&rakudo_home, perl6_rel_path, perl6_rel_path_size,
"PERL6_HOME", exec_dir_path, exec_dir_path_size,
perl6_check_path, perl6_check_path_size, static_rakudo_home,
option_rakudo_home)) {
fprintf(stderr, "ERROR: PERL6_HOME is invalid: %s\n", rakudo_home);
return EXIT_FAILURE;
}
}
else {
if (!retrieve_home(&rakudo_home, perl6_rel_path, perl6_rel_path_size,
"RAKUDO_HOME", exec_dir_path, exec_dir_path_size,
perl6_check_path, perl6_check_path_size, static_rakudo_home,
option_rakudo_home)) {
fprintf(stderr, "ERROR: RAKUDO_HOME is invalid: %s\n", rakudo_home);
return EXIT_FAILURE;
}
}
rakudo_home_size = strlen(rakudo_home);
/* Put together the lib paths and perl6_file path. */
lib_path[0] = (char*)malloc(nqp_home_size + 50);
lib_path[1] = (char*)malloc(rakudo_home_size + 50);
lib_path[2] = (char*)malloc(rakudo_home_size + 50);
perl6_file = (char*)malloc(rakudo_home_size + 50);
memcpy(lib_path[0], nqp_home, nqp_home_size);
memcpy(lib_path[1], rakudo_home, rakudo_home_size);
memcpy(lib_path[2], rakudo_home, rakudo_home_size);
memcpy(perl6_file, rakudo_home, rakudo_home_size);
#ifdef _WIN32
strcpy(lib_path[0] + nqp_home_size, "\\lib");
strcpy(lib_path[1] + rakudo_home_size, "\\lib");
strcpy(lib_path[2] + rakudo_home_size, "\\runtime");
#ifdef MOAR_RAKUDO_RUNNER_DEBUG
strcpy(perl6_file + rakudo_home_size, "\\runtime\\perl6-debug.moarvm");
#else
strcpy(perl6_file + rakudo_home_size, "\\runtime\\perl6.moarvm");
#endif
#else
strcpy(lib_path[0] + nqp_home_size, "/lib");
strcpy(lib_path[1] + rakudo_home_size, "/lib");
strcpy(lib_path[2] + rakudo_home_size, "/runtime");
#ifdef MOAR_RAKUDO_RUNNER_DEBUG
strcpy(perl6_file + rakudo_home_size, "/runtime/perl6-debug.moarvm");
#else
strcpy(perl6_file + rakudo_home_size, "/runtime/perl6.moarvm");
#endif
#endif
/* Start up the VM. */
instance = MVM_vm_create_instance();
MVM_vm_set_clargs(instance, new_argc, argv);
MVM_vm_set_prog_name(instance, perl6_file);
MVM_vm_set_exec_name(instance, exec_path);
MVM_vm_set_lib_path(instance, 3, (const char **)lib_path);
/* Ignore SIGPIPE by default, since we error-check reads/writes. This does
* not prevent users from setting up their own signal handler for SIGPIPE,
* which will take precedence over this ignore. */
#ifndef _WIN32
signal(SIGPIPE, SIG_IGN);
#endif
if (debugserverport > 0) {
MVM_debugserver_init(instance->main_thread, debugserverport);
if (start_suspended) {
instance->main_thread->gc_status = MVMGCStatus_INTERRUPT | MVMSuspendState_SUSPEND_REQUEST;
}
}
instance->full_cleanup = full_cleanup;
MVM_vm_run_file(instance, perl6_file);
#ifdef HAVE_TELEMEH
if (getenv("MVM_TELEMETRY_LOG") && telemeh_inited) {
MVM_telemetry_interval_stop(0, interval_id, "moarvm teardown");
MVM_telemetry_finish();
}
#endif
free(lib_path[0]);
free(lib_path[1]);
free(lib_path[2]);
free(perl6_file);
#ifndef STATIC_EXEC_PATH
free(exec_path);
#endif
#if defined(_WIN32) && !defined(STATIC_NQP_HOME)
/* dirname's return value is either on the stack or is the same pointer
* that was passed to it depending on the version of libc used, which leads
* to double frees. */
free(exec_dir_path);
#endif
free(exec_dir_path_temp);
#ifndef STATIC_RAKUDO_HOME
free(rakudo_home);
#else
if (getenv("PERL6_HOME") || getenv("RAKUDO_HOME"))
free(rakudo_home);
#endif
#ifndef STATIC_NQP_HOME
free(nqp_home);
#endif
if (full_cleanup) {
MVM_vm_destroy_instance(instance);
return EXIT_SUCCESS;
}
else {
MVM_vm_exit(instance);
}
}
| #include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <uv.h>
#include <moar.h>
#ifdef _WIN32
# include <sys/types.h>
# include <sys/stat.h>
# include <process.h>
# include <shlwapi.h>
# include <io.h>
# if defined(_MSC_VER)
# define strtoll _strtoi64
# endif
#else
# include <sys/stat.h>
# include <libgen.h>
# include <unistd.h>
# include "signal.h"
#endif
#define STRINGIFY1(x) #x
#define STRINGIFY(x) STRINGIFY1(x)
/* flags need to be sorted alphabetically */
enum {
NOT_A_FLAG = -2,
UNKNOWN_FLAG = -1,
FLAG_SUSPEND,
FLAG_FULL_CLEANUP,
FLAG_TRACING,
OPT_DEBUGPORT,
OPT_RAKUDO_HOME,
OPT_PTY_SPAWN_HELPER
};
static const char *const FLAGS[] = {
"--debug-suspend",
"--full-cleanup",
"--tracing",
};
static int cmp_flag(const void *key, const void *value) {
return strcmp(key, *(char **)value);
}
static int starts_with(const char *str, const char *want) {
size_t str_len = strlen(str);
size_t want_len = strlen(want);
return str_len < want_len
? 0
: strncmp(str, want, want_len) == 0;
}
static int parse_flag(const char *arg)
{
const char *const *found;
if (!arg || arg[0] != '-')
return NOT_A_FLAG;
found = bsearch(arg, FLAGS, sizeof FLAGS / sizeof *FLAGS, sizeof *FLAGS, cmp_flag);
if (found)
return (int)(found - FLAGS);
else if (starts_with(arg, "--debug-port="))
return OPT_DEBUGPORT;
else if (starts_with(arg, "--rakudo-home="))
return OPT_RAKUDO_HOME;
else if (starts_with(arg, "--pty-spawn-helper="))
return OPT_PTY_SPAWN_HELPER;
else
return UNKNOWN_FLAG;
}
int file_exists(const char *path) {
#ifdef _WIN32
int res;
struct _stat sb;
const int len = MultiByteToWideChar(CP_UTF8, 0, path, -1, NULL, 0);
wchar_t * const wpath = (wchar_t *)malloc(len * sizeof(wchar_t));
MultiByteToWideChar(CP_UTF8, 0, path, -1, (LPWSTR)wpath, len);
res = _wstat(wpath, &sb);
free(wpath);
return res == 0;
#else
struct stat *sb = malloc(sizeof(struct stat));
int res = stat(path, sb) == 0;
free(sb);
return res;
#endif
}
void platformify_path(char *path) {
#ifdef _WIN32
int i;
for (i = 0; path[i]; i++) {
if (path[i] == '/') {
path[i] = '\\';
}
}
#endif
}
int retrieve_home(
char **out_home,
const char *rel_home,
const size_t rel_home_size,
const char *env_var,
char *exec_dir_path,
size_t exec_dir_path_size,
const char *check_file,
const size_t check_file_size,
char *static_home,
char *options_home
) {
char *check_file_path;
size_t home_size;
int ret;
char *env_home = getenv(env_var);
if (options_home) {
*out_home = options_home;
home_size = strlen(*out_home);
}
else if (env_home) {
home_size = strlen(env_home);
*out_home = (char*)malloc(home_size + 1);
strcpy(*out_home, env_home);
#ifdef _WIN32
if (*(*out_home + home_size - 1) == '\\') {
#else
if (*(*out_home + home_size - 1) == '/') {
#endif
*(*out_home + home_size - 1) = '\0';
home_size--;
}
}
else if (static_home) {
*out_home = static_home;
home_size = strlen(*out_home);
}
else {
home_size = exec_dir_path_size + rel_home_size;
*out_home = (char*)malloc(home_size + 1);
strncpy(*out_home, exec_dir_path, home_size);
strncat(*out_home, rel_home, rel_home_size);
platformify_path(*out_home + exec_dir_path_size);
}
check_file_path = (char*)malloc(home_size + check_file_size + 1);
strncpy(check_file_path, *out_home, home_size + check_file_size);
strncat(check_file_path, check_file, check_file_size);
ret = file_exists(check_file_path);
free(check_file_path);
return ret;
}
#if defined(_WIN32) && defined(SUBSYSTEM_WINDOWS)
int set_std_handle_to_nul(FILE *file, int fd, BOOL read, int std_handle_type) {
/* Found on https://docs.microsoft.com/en-us/cpp/c-runtime-library/reference/get-osfhandle?view=vs-2019:
"When stdin, stdout, and stderr aren't associated with a stream (for example, in a Windows
application without a console window), the file descriptor values for these streams are
returned from _fileno as the special value -2. Similarly, if you use a 0, 1, or 2 as the
file descriptor parameter instead of the result of a call to _fileno, _get_osfhandle also
returns the special value -2 when the file descriptor is not associated with a stream, and
does not set errno. However, this is not a valid file handle value, and subsequent calls
that attempt to use it are likely to fail."
See https://jdebp.eu/FGA/redirecting-standard-io.html
https://stackoverflow.com/a/50358201 (Especially the comments of Eryk Sun)
*/
FILE *stream;
HANDLE new_handle;
if (_fileno(file) != -2 || _get_osfhandle(fd) != -2)
// The handles are initialized. Don't touch!
return 1;
/* FD 1 is in an error state (_get_osfhandle(1) == -2). Close it. The FD number is up for grabs
after this call. */
if (_close(fd) != 0)
return 0;
/* FILE *stdout is in an error state (_fileno(stdout) == -2). Reopen it to a "NUL:" file. This
will take the next free FD number. So it's important to call this sequentially for FD 0, 1
and 2. */
if (freopen_s(&stream, "NUL:", read ? "r" : "w", file) != 0)
return 0;
/* Set the underlying Windows handle as the STD handler. */
new_handle = (HANDLE)_get_osfhandle(fd);
if (!SetStdHandle(std_handle_type, new_handle))
return 0;
return 1;
}
#endif
#if defined(_WIN32) && defined(SUBSYSTEM_WINDOWS)
int wWinMain(HINSTANCE hInstance, HINSTANCE hPrevInstance, LPWSTR lpCmdLine, INT nCmdShow) {
int argc;
LPWSTR *wargv = CommandLineToArgvW(GetCommandLineW(), &argc);
char **argv = MVM_UnicodeToUTF8_argv(argc, wargv);
LocalFree(wargv);
#elif defined(_WIN32) && !defined(SUBSYSTEM_WINDOWS)
int wmain(int argc, wchar_t *wargv[]) {
char **argv = MVM_UnicodeToUTF8_argv(argc, wargv);
#else
int main(int argc, char *argv[]) {
#endif
MVMInstance *instance;
char *exec_path;
size_t exec_path_size;
char *exec_dir_path_temp;
char *exec_dir_path;
size_t exec_dir_path_size;
char *nqp_home;
size_t nqp_home_size;
char *static_nqp_home = 0;
const char nqp_rel_path[14] = "/../share/nqp";
const size_t nqp_rel_path_size = 13;
const char nqp_check_path[28] = "/lib/NQPCORE.setting.moarvm";
const size_t nqp_check_path_size = 27;
char *rakudo_home;
size_t rakudo_home_size;
char *static_rakudo_home = 0;
char *option_rakudo_home = 0;
const char perl6_rel_path[16] = "/../share/perl6";
const size_t perl6_rel_path_size = 15;
const char perl6_check_path[22] = "/runtime/perl6.moarvm";
const size_t perl6_check_path_size = 21;
char *lib_path[3];
char *perl6_file;
int full_cleanup = 0;
int argi = 1;
int flag;
int new_argc = 0;
MVMuint32 debugserverport = 0;
int start_suspended = 0;
#if defined(__APPLE__) || defined(__Darwin__)
if (getenv("RAKUDO_DYLD_LIBRARY_PATH")) {
setenv("DYLD_LIBRARY_PATH", getenv("RAKUDO_DYLD_LIBRARY_PATH"), 0);
}
#endif
#if defined(_WIN32) && defined(SUBSYSTEM_WINDOWS)
/* When using the 'windows' subsystem the standard IO handles are not
connected. This causes a program abort when accessing the handles. To
prevent these aborts, we redirect the handles to NUL in this case.
*/
/* Set our own handles. */
if (!set_std_handle_to_nul(stdin, 0, 1, STD_INPUT_HANDLE)) return EXIT_FAILURE;
if (!set_std_handle_to_nul(stdout, 1, 0, STD_OUTPUT_HANDLE)) return EXIT_FAILURE;
if (!set_std_handle_to_nul(stderr, 2, 0, STD_ERROR_HANDLE)) return EXIT_FAILURE;
/* MoarVM - as a DLL, and the way it's compiled (/MT) has it's own CRT and thus it's own CRT STD handles.
So MoarVM also needs to fix up its CRT STD handles.
See: https://docs.microsoft.com/de-de/cpp/c-runtime-library/potential-errors-passing-crt-objects-across-dll-boundaries
https://docs.microsoft.com/en-us/cpp/c-runtime-library/crt-library-features
*/
if (!MVM_set_std_handles_to_nul()) return EXIT_FAILURE;
#endif
/* Retrieve the executable directory path. */
#ifdef STATIC_EXEC_PATH
exec_path = STRINGIFY(STATIC_EXEC_PATH);
exec_path_size = strlen(exec_path);
#else
int res;
exec_path_size = 4096;
exec_path = (char*)malloc(exec_path_size);
res = MVM_exepath(exec_path, &exec_path_size);
while (res < 0 && exec_path_size < 4096*8) {
exec_path_size *= 2;
exec_path = (char*)realloc(exec_path, exec_path_size);
res = MVM_exepath(exec_path, &exec_path_size);
}
if (res < 0) {
fprintf(stderr, "ERROR: Could not retrieve executable path.\n");
return EXIT_FAILURE;
}
#endif
/* Filter out VM arguments from the command line. */
for (; (flag = parse_flag(argv[argi])) != NOT_A_FLAG; ++argi) {
switch (flag) {
case FLAG_FULL_CLEANUP:
full_cleanup = 1;
continue;
#if MVM_TRACING
case FLAG_TRACING:
MVM_interp_enable_tracing();
continue;
#endif
case FLAG_SUSPEND:
start_suspended = 1;
continue;
case OPT_DEBUGPORT: {
MVMint64 port;
char *portstr = argv[argi] + strlen("--debug-port=");
char *endptr;
port = strtoll(portstr, &endptr, 10);
if (*endptr != '\0') {
fprintf(stderr, "ERROR: Invalid characters in debug port flag: %s\n", portstr);
return EXIT_FAILURE;
}
if (port <= 1024 || 65535 < port) {
fprintf(stderr, "ERROR: debug server port out of range. We only accept ports above 1024 and below 65535. (got: %"PRIi64")\n", port);
return EXIT_FAILURE;
}
debugserverport = (MVMuint32)port;
break;
}
case OPT_RAKUDO_HOME:
option_rakudo_home = argv[argi] + strlen("--rakudo-home=");
break;
case OPT_PTY_SPAWN_HELPER: {
char *prog = argv[argi] + strlen("--pty-spawn-helper=");
char **args = calloc(argc - argi + 1, sizeof(char *));
args[0] = prog;
args[argc - argi] = 0;
for (int argj = 1; argi + argj < argc; argj++)
args[argj] = argv[argi + argj];
// Will not return if all goes well.
MVM_proc_pty_spawn(prog, args);
return EXIT_FAILURE;
}
default:
argv[new_argc++] = argv[argi];
}
}
/* Move over the remaining arguments. */
for (; argv[argi]; ++argi) {
argv[new_argc++] = argv[argi];
}
#ifdef HAVE_TELEMEH
unsigned int interval_id = 0;
char telemeh_inited = 0;
if (getenv("MVM_TELEMETRY_LOG")) {
char path[256];
FILE *fp;
snprintf(path, 255, "%s.%d", getenv("MVM_TELEMETRY_LOG"),
#ifdef _WIN32
_getpid()
#else
getpid()
#endif
);
fp = fopen(path, "w");
if (fp) {
MVM_telemetry_init(fp);
telemeh_inited = 1;
interval_id = MVM_telemetry_interval_start(0, "moarvm startup");
}
}
#endif
/* The +1 is the trailing \0 terminating the string. */
exec_dir_path_temp = (char*)malloc(exec_path_size + 1);
memcpy(exec_dir_path_temp, exec_path, exec_path_size + 1);
#ifdef _WIN32
PathRemoveFileSpecA(exec_dir_path_temp);
exec_dir_path_size = strlen(exec_dir_path_temp);
exec_dir_path = (char*)malloc(exec_dir_path_size + 1);
memcpy(exec_dir_path, exec_dir_path_temp, exec_dir_path_size + 1);
#else
exec_dir_path = dirname(exec_dir_path_temp);
exec_dir_path_size = strlen(exec_dir_path);
#endif
/* Retrieve RAKUDO_HOME and NQP_HOME. */
#ifdef STATIC_NQP_HOME
static_nqp_home = STRINGIFY(STATIC_NQP_HOME);
#endif
if (!retrieve_home(&nqp_home, nqp_rel_path, nqp_rel_path_size, "NQP_HOME",
exec_dir_path, exec_dir_path_size, nqp_check_path,
nqp_check_path_size, static_nqp_home, 0)) {
fprintf(stderr, "ERROR: NQP_HOME is invalid: %s\n", nqp_home);
return EXIT_FAILURE;
}
nqp_home_size = strlen(nqp_home);
#ifdef STATIC_RAKUDO_HOME
static_rakudo_home = STRINGIFY(STATIC_RAKUDO_HOME);
#endif
/* XXX Isn't it time to move RAKUDO_HOME in front of PERL6_HOME?? */
if (getenv("PERL6_HOME")) {
if (!retrieve_home(&rakudo_home, perl6_rel_path, perl6_rel_path_size,
"PERL6_HOME", exec_dir_path, exec_dir_path_size,
perl6_check_path, perl6_check_path_size, static_rakudo_home,
option_rakudo_home)) {
fprintf(stderr, "ERROR: PERL6_HOME is invalid: %s\n", rakudo_home);
return EXIT_FAILURE;
}
}
else {
if (!retrieve_home(&rakudo_home, perl6_rel_path, perl6_rel_path_size,
"RAKUDO_HOME", exec_dir_path, exec_dir_path_size,
perl6_check_path, perl6_check_path_size, static_rakudo_home,
option_rakudo_home)) {
fprintf(stderr, "ERROR: RAKUDO_HOME is invalid: %s\n", rakudo_home);
return EXIT_FAILURE;
}
}
rakudo_home_size = strlen(rakudo_home);
/* Put together the lib paths and perl6_file path. */
lib_path[0] = (char*)malloc(nqp_home_size + 50);
lib_path[1] = (char*)malloc(rakudo_home_size + 50);
lib_path[2] = (char*)malloc(rakudo_home_size + 50);
perl6_file = (char*)malloc(rakudo_home_size + 50);
memcpy(lib_path[0], nqp_home, nqp_home_size);
memcpy(lib_path[1], rakudo_home, rakudo_home_size);
memcpy(lib_path[2], rakudo_home, rakudo_home_size);
memcpy(perl6_file, rakudo_home, rakudo_home_size);
#ifdef _WIN32
strcpy(lib_path[0] + nqp_home_size, "\\lib");
strcpy(lib_path[1] + rakudo_home_size, "\\lib");
strcpy(lib_path[2] + rakudo_home_size, "\\runtime");
#ifdef MOAR_RAKUDO_RUNNER_DEBUG
strcpy(perl6_file + rakudo_home_size, "\\runtime\\perl6-debug.moarvm");
#else
strcpy(perl6_file + rakudo_home_size, "\\runtime\\perl6.moarvm");
#endif
#else
strcpy(lib_path[0] + nqp_home_size, "/lib");
strcpy(lib_path[1] + rakudo_home_size, "/lib");
strcpy(lib_path[2] + rakudo_home_size, "/runtime");
#ifdef MOAR_RAKUDO_RUNNER_DEBUG
strcpy(perl6_file + rakudo_home_size, "/runtime/perl6-debug.moarvm");
#else
strcpy(perl6_file + rakudo_home_size, "/runtime/perl6.moarvm");
#endif
#endif
/* Start up the VM. */
instance = MVM_vm_create_instance();
MVM_vm_set_clargs(instance, new_argc, argv);
MVM_vm_set_prog_name(instance, perl6_file);
MVM_vm_set_exec_name(instance, exec_path);
MVM_vm_set_lib_path(instance, 3, (const char **)lib_path);
/* Ignore SIGPIPE by default, since we error-check reads/writes. This does
* not prevent users from setting up their own signal handler for SIGPIPE,
* which will take precedence over this ignore. */
#ifndef _WIN32
signal(SIGPIPE, SIG_IGN);
#endif
if (debugserverport > 0) {
MVM_debugserver_init(instance->main_thread, debugserverport);
if (start_suspended) {
instance->main_thread->gc_status = MVMGCStatus_INTERRUPT | MVMSuspendState_SUSPEND_REQUEST;
}
}
instance->full_cleanup = full_cleanup;
MVM_vm_run_file(instance, perl6_file);
#ifdef HAVE_TELEMEH
if (getenv("MVM_TELEMETRY_LOG") && telemeh_inited) {
MVM_telemetry_interval_stop(0, interval_id, "moarvm teardown");
MVM_telemetry_finish();
}
#endif
free(lib_path[0]);
free(lib_path[1]);
free(lib_path[2]);
free(perl6_file);
#ifndef STATIC_EXEC_PATH
free(exec_path);
#endif
#if defined(_WIN32) && !defined(STATIC_NQP_HOME)
/* dirname's return value is either on the stack or is the same pointer
* that was passed to it depending on the version of libc used, which leads
* to double frees. */
free(exec_dir_path);
#endif
free(exec_dir_path_temp);
#ifndef STATIC_RAKUDO_HOME
free(rakudo_home);
#else
if (getenv("PERL6_HOME") || getenv("RAKUDO_HOME"))
free(rakudo_home);
#endif
#ifndef STATIC_NQP_HOME
free(nqp_home);
#endif
if (full_cleanup) {
MVM_vm_destroy_instance(instance);
return EXIT_SUCCESS;
}
else {
MVM_vm_exit(instance);
}
}
|
dduan/django-sophie | 1 | sophie/admin.py | from django.contrib import admin
from sophie.models import Entry, Category, Blog
def delete_selected_action(modeladmin, request, queryset):
"""
A general action to replace the default 'delete selected objects'
action in contrib.admin, which does not call obj.delete thus not
behave as expected
"""
for obj in queryset:
obj.delete()
delete_selected_action.short_description = "Delete Selected Items"
def make_entry_live_action(modeladmin, request, queryset):
queryset.update( status = Entry.LIVE_STATUS )
make_entry_live_action.short_description = "Make Selected Entries Live"
def make_entry_hidden_action(modeladmin, request, queryset):
queryset.update( status = Entry.HIDDEN_STATUS )
make_entry_hidden_action.short_description = "Make Selected Entries Hidden"
def make_entry_draft_action(modeladmin, request, queryset):
queryset.update( status = Entry.DRAFT_STATUS )
make_entry_draft_action.short_description = "Make Selected Entries Drafts"
class EntryAdmin(admin.ModelAdmin):
save_on_top = True
actions = (
delete_selected_action,
make_entry_live_action,
make_entry_hidden_action,
make_entry_draft_action,
)
prepopulated_fields = { 'slug': ('title',) }
radio_fields = { 'status': admin.HORIZONTAL }
fieldsets = (
('Entry Content', {
'fields': (
('category', 'author', 'markup'),
('title', 'slug'),
'body',
'teaser',
'pub_date',
)
}),
('Entry Settings', {
'fields': (
'status',
'allow_comment',
),
}),
)
list_display = (
'title',
'display_category',
'display_author',
'status',
'pub_date'
)
list_editable = ('status',)
list_filter = ('status', 'blog')
def display_category(self, obj):
return obj.category.title
display_category.short_description = "Category"
def display_author(self, obj):
return obj.author.get_full_name()
display_author.short_description = "Author"
def get_actions(self, request):
actions = super(EntryAdmin, self).get_actions(request)
del actions['delete_selected']
return actions
class CategoryAdmin(admin.ModelAdmin):
save_on_top = True
readonly_fields = ('entry_count',)
prepopulated_fields = {'slug': ('title',)}
fieldsets = (
(None, {
'fields': (
'blog',
'title',
'slug',
'description',
'shown',
'entry_count',
),
}),
)
list_display = (
'title',
'description',
'entry_count',
'display_blog',
'shown'
)
list_filter = ('blog', 'shown')
list_editable = ('shown', )
def display_blog(self, obj):
return obj.blog.title
display_blog.short_description = "Blog"
class BlogAdmin(admin.ModelAdmin):
fieldsets = (
('Basic Infomation', {
'fields': (
('title', 'slug'),
'description',
),
}),
('Entry Settings', {
'fields': (
'full_entry_in_page',
'highlight_code',
'page_length',
),
'classes': ('collapse',)
}),
('Feed Settings', {
'fields': (
'full_entry_in_feed',
'feed_length',
'feed_service'
),
'classes': ('collapse',)
}),
('Services', {
'fields': (
'g_analytics_tracking_id',
'disqus_shortname',
),
'classes': ('collapse',)
}),
)
prepopulated_fields = {'slug': ('title',)}
list_display = ('title', 'description')
admin.site.register(Entry, EntryAdmin)
admin.site.register(Category, CategoryAdmin)
admin.site.register(Blog, BlogAdmin)
| from django.contrib import admin
from django import forms
from sophie.models import Entry, Category, Blog
def delete_selected_action(modeladmin, request, queryset):
"""
A general action to replace the default 'delete selected objects'
action in contrib.admin, which does not call obj.delete thus not
behave as expected
"""
for obj in queryset:
obj.delete()
delete_selected_action.short_description = "Delete Selected Items"
def make_entry_live_action(modeladmin, request, queryset):
queryset.update( status = Entry.LIVE_STATUS )
make_entry_live_action.short_description = "Make Selected Entries Live"
def make_entry_draft_action(modeladmin, request, queryset):
queryset.update( status = Entry.DRAFT_STATUS )
make_entry_draft_action.short_description = "Make Selected Entries Drafts"
class EntryAdminForm(forms.ModelForm):
class Meta:
model = Entry
def __init__(self, *args, **kwargs):
"""Adjust (increase) the size of the TextAreas to make it easier to type a blog entry."""
super(EntryAdminForm, self).__init__(*args, **kwargs)
self.fields['body'].widget = forms.widgets.Textarea(attrs={'rows':20, 'cols':110})
self.fields['teaser'].widget = forms.widgets.Textarea(attrs={'rows':5, 'cols':110})
class EntryAdmin(admin.ModelAdmin):
save_on_top = True
actions = (
delete_selected_action,
make_entry_live_action,
make_entry_draft_action,
)
prepopulated_fields = { 'slug': ('title',) }
radio_fields = { 'status': admin.HORIZONTAL }
fieldsets = (
('Entry Content', {
'fields': (
('category', 'author', 'markup'),
('title', 'slug'),
'body',
'teaser',
'pub_date',
)
}),
('Entry Settings', {
'fields': (
'status',
'allow_comment',
),
}),
)
list_display = (
'title',
'display_category',
'display_author',
'status',
'pub_date'
)
list_editable = ('status',)
list_filter = ('status', 'blog')
form = EntryAdminForm
def display_category(self, obj):
return obj.category.title
display_category.short_description = "Category"
def display_author(self, obj):
return obj.author.get_full_name()
display_author.short_description = "Author"
def get_actions(self, request):
actions = super(EntryAdmin, self).get_actions(request)
del actions['delete_selected']
return actions
class CategoryAdmin(admin.ModelAdmin):
save_on_top = True
readonly_fields = ('entry_count',)
prepopulated_fields = {'slug': ('title',)}
fieldsets = (
(None, {
'fields': (
'blog',
'title',
'slug',
'description',
'shown',
'entry_count',
),
}),
)
list_display = (
'title',
'description',
'entry_count',
'display_blog',
'shown'
)
list_filter = ('blog', 'shown')
list_editable = ('shown', )
def display_blog(self, obj):
return obj.blog.title
display_blog.short_description = "Blog"
class BlogAdmin(admin.ModelAdmin):
fieldsets = (
('Basic Infomation', {
'fields': (
('title', 'slug'),
'description',
),
}),
('Entry Settings', {
'fields': (
'full_entry_in_page',
'highlight_code',
'page_length',
),
'classes': ('collapse',)
}),
('Feed Settings', {
'fields': (
'full_entry_in_feed',
'feed_length',
'feed_service'
),
'classes': ('collapse',)
}),
('Services', {
'fields': (
'g_analytics_tracking_id',
'disqus_shortname',
),
'classes': ('collapse',)
}),
)
prepopulated_fields = {'slug': ('title',)}
list_display = ('title', 'description')
admin.site.register(Entry, EntryAdmin)
admin.site.register(Category, CategoryAdmin)
admin.site.register(Blog, BlogAdmin)
|
goerz/convert_encoding.py | 3 | convert_encoding.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Convert text files between arbitrary encodings and line endings.
Usage:
convert_encoding.py [options] file1 file2 ...
Options are:
-f enc, --from=enc the input encoding
(default: locale.getpreferredencoding())
Set this to 'guess' if the encoding is unknown.
-t enc, --to=enc the output encoding
(default: locale.getpreferredencoding())
-e code, --eol=code the end-of-line mode (see below)
-o file, --out=file where to write the converted data. Conversion is
done in-place if this is omitted
-n, --nocodec ignore input and output encoding, just process eol
-r, --recursive go into directories recursively
-l, --followlinks follow symbolic links
-d, --dotfiles in combination with -r: process files that begin
with a dot, if they are found in a directory reached
by recursion. You still have to specify dotfiles
explicitly in the initial filelist.
-q, --quiet don't display warnings or status messages
For valid encoding names, see
http://docs.python.org/lib/standard-encodings.html
Accepted values for the eol code are: 'unix', 'linux', 'dos', 'win', 'mac'.
If you do not specify an end-of-line, it will be set to your system's default,
regardless of the original end-of-line.
Any '#' in the out-filename will be replaced with the full name of the input
file.
Note that guessing the input encoding is not foolproof in any way. Always
provide an explicit input encoding if you can.
The program supports full unix style globbing on all operation systems,
independently of your shell's capabilities. Also, environment variables
and '~' will be expanded following the usual conventions.
The file may also be used as a module from python.
"""
"""
Copyright (C) 2007 Michael Goerz
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import sys
import codecs
import getopt
import os
import os.path
import locale
import shutil
def main():
"""Command line program for converting encodings and end-of-lines """
#
# command line parsing / initialization
global warn
try:
opts, files = getopt.getopt(sys.argv[1:], "hf:t:e:o:rlqnd",
["help", "from=", "to=","eol=",
"recursive", "followlinks",
"quiet", "nocodec", "out=",
"dotfiles"])
except getopt.GetoptError, details:
warn(details)
exit(2)
locale.setlocale(locale.LC_ALL, '')
from_enc = locale.getpreferredencoding()
to_enc = locale.getpreferredencoding()
eol = os.linesep
recursive = False
followlinks = False
dotfiles = False
guess = False
nocodec = False
outfilename = ""
for o, a in opts:
if o in ("-h", "--help"):
usage()
sys.exit()
if o in ("-f", "--from"):
if a == 'guess':
guess = True
else:
from_enc = a
if o in ("-t", "--to"):
to_enc = a
if o in ("-o", "--out"):
outfilename = os.path.normcase(a)
if o in ("-r", "--recursive"):
recursive = True
if o in ("-d", "--dotfiles"):
dotfiles = True
if o in ("-q", "--quiet"):
warn = no_warn
if o in ("-n", "--nocodec"):
nocodec = True
if o in ("-e", "--eol"):
eolmode = a.lower()
os_eols = {'unix':"\n",
'linux':"\n",
'dos':"\r\n",
'win':"\r\n",
'mac':"\r"}
try:
eol = os_eols[eolmode]
except KeyError:
warn("'%s' is not a valid name for a line ending." % eolmode)
warn("Use 'unix', 'linux', 'dos', 'win', or 'mac'.")
warn("Converting to your default line ending")
if o in ("-l", "--followlinks"):
recursive = True
if o in ("-l", "--followlinks"):
followlinks = True
#
# put together what we actually have to do
if nocodec:
rec_function = lambda infilename: convert_eol(infilename, eol, \
outfilename.replace('#', infilename))
else:
if guess:
warn("WARNING: guessing the input encoding is dangerous. "
+ "Make sure to check the results.\n")
rec_function = lambda infilename: \
convert_encoding(
infilename,
guess_file_encoding(infilename, from_enc),
to_enc,
eol,
outfilename.replace('#', infilename))
else:
rec_function = lambda infilename: \
convert_encoding(
infilename,
from_enc,
to_enc,
eol,
outfilename.replace('#', infilename))
#
# Run through all the files
file_recursor(rec_function, files, recursive, followlinks, dotfiles)
def warn(msg, nl=True):
""" print a warning message to stderr """
sys.stderr.write(str(msg))
if nl: sys.stderr.write("\n")
def no_warn(msg, nl=True):
pass
def file_recursor(function=None, file_list=(), recurse=True,
followlinks=True, dotfiles=False):
""" Call function(file) for each file in file_list.
If recurse is True, go into directories recursively. If followlinks is
True, follow symbolic links. If dotfiles is true, process all files and
dirs beginning with a dot, if they are in a subdirectory that is
processed recursively.
Notice that even when dotfiles is True, you still have to explicitly
put dotfiles in your initial filelist.
All names in the file_list will be fully expanded as a glob and with
environment variables
"""
from glob import glob
for name in file_list:
name = os.path.expanduser(name)
name = os.path.expandvars(name)
name = os.path.normcase(name)
if len(glob(name)) > 1:
file_recursor(function, glob(name), recurse, followlinks)
continue
elif len(glob(name)) == 1:
name = glob(name)[0]
if os.path.islink(name):
if not followlinks:
warn("'%s' is a symlink, but following links is not activated" \
% name)
continue
else:
name = os.readlink(name)
if os.path.isfile(name):
function(name)
elif os.path.isdir(name):
if name != '..':
if recurse:
file_recursor(function, glob(os.path.join(name, '*')),
recurse, followlinks)
if dotfiles:
file_recursor(function, glob(os.path.join(name, '.*')),
recurse, followlinks)
else:
warn("'%s' is a directory, but recursive handling is not activated" % name)
else:
warn("Can't process '%s'. Not found.\n" % name)
continue
def convert_encoding(infilename, from_enc, to_enc, eol=os.linesep,
outfilename=""):
""" Convert a text file from_enc to_enc
If eol is given, it must be an appropriate string to terminate a line,
i.e. "\\n", "\\r\\n", "\\r". It defaults to the standard line ending
for your OS (os.linesep)
If outfilename is given, the results will be written to that file, if
not, the conversion is done in-place
from_enc and to_enc are strings containing a name for any
encoding known to Python. See
http://docs.python.org/lib/standard-encodings.html
"""
def error_cleanup():
if hasattr(infile, 'close'):
infile.close()
if hasattr(outfile, 'close'):
outfile.close()
if os.path.isfile(outfilename) and os.path.isfile(infilename):
os.remove(outfilename)
warn("Processing %s ... " % infilename, nl=False)
if os.path.isfile(infilename):
#
# choose temp file
tempfilename = infilename + "." + to_enc
while os.path.isfile(tempfilename):
tempfilename = tempfilename + "x"
#
# open original file (infile) and tempfile (outfile)
infile = outfile = None
try:
infile = codecs.open(infilename, "rb", from_enc)
except Exception, details:
warn("Error opening %s: %s" % (infilename, details));
error_cleanup()
return None
try:
outfile = codecs.open(tempfilename, "wb", to_enc)
except Exception, details:
warn("Error opening %s: %s" % (tempfilename, details))
error_cleanup()
return None
#
# go through infile, convert, and write to outfile
try:
for line in infile:
try:
line = line.replace("\r\n", "\n") # win
line = line.replace("\r", "\n") # mac
line = line.replace("\n", eol)
outfile.write(line)
except Exception, details:
raise Exception, "Error writing to %s: %s" \
% (tempfilename, details);
except Exception, details:
warn("Error in I/O: %s" % details)
error_cleanup()
else:
#
# Finish up: overwrite original file with tempfile
try:
infile.close()
outfile.close()
shutil.copystat(infilename, tempfilename)
overwrite = False
if outfilename == "":
outfilename = infilename
overwrite = True
rename_file(tempfilename, outfilename, overwrite)
warn("%s was successfully converted from %s to %s" \
% (infilename, from_enc, to_enc))
warn("") # finish a block
except Exception, details:
warn("Renaming %s to %s FAILED. File was not converted: %s" \
% (tempfilename, infilename, details))
error_cleanup()
warn("") # finish a block
else:
warn("File '%s' does not exist\n" % file)
def convert_eol(infilename, eol=os.linesep, outfilename=""):
""" Convert just the end of line of a textfile
If eol is given, it must be an appropriate string to terminate a line,
i.e. "\\n", "\\r\\n", "\\r". It defaults to the standard line ending
for your os (os.linesep)
If outfilename is given, the results will be written to that file, if
not, the conversion is done in-place
The encoding of the file is left untouched
"""
def error_cleanup():
if hasattr(infile, 'close'):
infile.close()
if hasattr(outfile, 'close'):
outfile.close()
if os.path.isfile(tempfilename) and os.path.isfile(infilename):
os.remove(tempfilename)
warn("Processing %s ... " % infilename, nl=False)
if os.path.isfile(infilename):
#
# choose temp file
tempfilename = infilename + "." + "eol"
while os.path.isfile(tempfilename):
tempfilename = tempfilename + "x"
#
# open original file (infile) and tempfile (outfile)
infile = outfile = None
try:
infile = open(infilename, "rb")
except Exception, details:
warn("Error opening %s: %s" % (infilename, details));
error_cleanup()
return None
try:
outfile = open(tempfilename, "wb")
except Exception, details:
warn("Error opening %s: %s" % (tempfilename, details))
error_cleanup()
return None
#
# go through infile, convert, and write to outfile
try:
for line in infile:
try:
if "\x00\r" in line or "\x00\n" in line:
eol = eol.replace("\r", "\x00\r")
eol = eol.replace("\n", "\x00\n")
line = line.replace("\r\n", "\n") # win
line = line.replace("\x00\r\x00\n", "\n") # utf16 win
line = line.replace("\r", "\n") # mac
line = line.replace("\x00\r", "\n") # utf16 mac
line = line.replace("\x00\n", "\n") # utf16 unix
line = line.replace("\n", eol)
outfile.write(line)
except Exception, details:
raise Exception, "Error writing to %s: %s" \
% (tempfilename, details);
except Exception, details:
warn("Error in I/O: %s" % details)
error_cleanup()
else:
#
# Finish up: overwrite original file with tempfile
try:
infile.close()
outfile.close()
shutil.copystat(infilename, tempfilename)
overwrite = False
if outfilename == "":
outfilename = infilename
overwrite = True
rename_file(tempfilename, outfilename, overwrite)
warn("Successfully converted eol for %s" % infilename)
warn("") # finish a block
except Exception, details:
warn("Renaming %s to %s FAILED. File was not converted: %s" \
% (tempfilename, infilename, details))
error_cleanup()
warn("") # finish a block
else:
warn("File '%s' does not exist\n" % file)
def guess_encoding(data):
""" Given a byte string, attempt to decode it.
Taken from:
http://www.pyzine.com/Issue008/Section_Articles/article_Encodings.html#guessing-the-encoding
Tries the standard 'UTF8' and 'latin-1' encodings,
Plus several gathered from locale information.
The calling program *must* first call
locale.setlocale(locale.LC_ALL, '')
If successful it returns
(decoded_unicode, successful_encoding)
If unsuccessful it raises a ``UnicodeError``
"""
successful_encoding = None
# we make 'utf-8' the first encoding
encodings = ['utf-8']
#
# next we add anything we can learn from the locale
try:
encodings.append(locale.nl_langinfo(locale.CODESET))
except AttributeError:
pass
try:
encodings.append(locale.getlocale()[1])
except (AttributeError, IndexError):
pass
try:
encodings.append(locale.getdefaultlocale()[1])
except (AttributeError, IndexError):
pass
#
# we try try the rest now
encodings = encodings + ['latin-1', 'mbcs', 'big5', 'euc_jp', 'euc_kr',
'gb2312', 'gbk', 'gb18030', 'hz', 'iso2022_jp',
'koi8_u', 'ptcp154', 'shift_jis' ]
for enc in encodings:
# some of the locale calls
# may have returned None
if not enc:
continue
try:
decoded = unicode(data, enc)
successful_encoding = enc
except (UnicodeError, LookupError):
pass
else:
break
if not successful_encoding:
raise UnicodeError(
'Unable to decode input data. Tried the following encodings: %s.'
% ', '.join([repr(enc) for enc in encodings if enc]))
else:
return (decoded, successful_encoding)
def guess_file_encoding(filename, default):
""" Guess the encoding of a text file
If the function is able to guess an encoding for filename, return that
encoding, otherwise return the default.
Note that guessing an encoding is not fool-proof, this might return the
wrong encoding.
Adapted from:
http://www.pyzine.com/Issue008/Section_Articles/article_Encodings.html#guessing-the-encoding
"""
try:
f = open(filename, "rb")
the_text = f.read()
f.close()
except Exception, details:
warn("Error while trying to guess the encoding of file %s: %s" \
% (filename, details))
return default
bomdict = { codecs.BOM_UTF8 : 'UTF8',
codecs.BOM_UTF16_BE : 'UTF-16BE',
codecs.BOM_UTF16_LE : 'UTF-16LE' }
# check if there is Unicode signature
for bom, encoding in bomdict.items():
if the_text.startswith(bom):
the_text = the_text[len(bom):]
break
else:
bom = None
encoding = None
if encoding is None: # there was no BOM
try:
unicode_text, encoding = guess_encoding(the_text)
except UnicodeError:
warn("Can't work out the encoding of file '%s'." % filename)
warn("Assuming the default encoding: %s" % default)
return default
warn("Guessed encoding for file '%s': %s" % (filename, encoding))
return encoding
def rename_file(file1, file2, overwrite=False):
""" Rename file1 to file2, ask for directions if file2 already exists """
if os.path.isfile(file1):
if os.path.isfile(file2):
while not overwrite:
answer = raw_input("%s already exists. Do you want to overwrite? Yes [No] Abort: " \
% file2).lower()
if answer == 'yes':
overwrite = True
elif answer == 'abort':
return None
else:
answer = raw_input("Enter a new filename: ")
if answer != '': file2 = os.path.normcase(answer)
if not os.path.isfile(file2):
overwrite = True
if file2 != file1: os.remove(file2)
try:
os.rename(file1, file2)
except Exception, details:
warn( "Failed to rename %s to %s: %s" % (file1, file2, details) )
def usage():
""" Print usage for main program """
print __doc__
if __name__ == "__main__":
main()
| #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Convert text files between arbitrary encodings and line endings.
Usage:
convert_encoding.py [options] file1 file2 ...
Options are:
-f enc, --from=enc the input encoding
(default: locale.getpreferredencoding())
Set this to 'guess' if the encoding is unknown.
-t enc, --to=enc the output encoding
(default: locale.getpreferredencoding())
-e code, --eol=code the end-of-line mode (see below)
-o file, --out=file where to write the converted data. Conversion is
done in-place if this is omitted
-n, --nocodec ignore input and output encoding, just process eol
-r, --recursive go into directories recursively
-l, --followlinks follow symbolic links
-d, --dotfiles in combination with -r: process files that begin
with a dot, if they are found in a directory reached
by recursion. You still have to specify dotfiles
explicitly in the initial filelist.
-q, --quiet don't display warnings or status messages
For valid encoding names, see
https://docs.python.org/2.4/lib/standard-encodings.html
Accepted values for the eol code are: 'unix', 'linux', 'dos', 'win', 'mac'.
If you do not specify an end-of-line, it will be set to your system's default,
regardless of the original end-of-line.
Any '#' in the out-filename will be replaced with the full name of the input
file.
Note that guessing the input encoding is not foolproof in any way. Always
provide an explicit input encoding if you can.
The program supports full unix style globbing on all operation systems,
independently of your shell's capabilities. Also, environment variables
and '~' will be expanded following the usual conventions.
The file may also be used as a module from python.
"""
"""
Copyright (C) 2007 Michael Goerz
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import sys
import codecs
import getopt
import os
import os.path
import locale
import shutil
def main():
"""Command line program for converting encodings and end-of-lines """
#
# command line parsing / initialization
global warn
try:
opts, files = getopt.getopt(sys.argv[1:], "hf:t:e:o:rlqnd",
["help", "from=", "to=","eol=",
"recursive", "followlinks",
"quiet", "nocodec", "out=",
"dotfiles"])
except getopt.GetoptError, details:
warn(details)
exit(2)
locale.setlocale(locale.LC_ALL, '')
from_enc = locale.getpreferredencoding()
to_enc = locale.getpreferredencoding()
eol = os.linesep
recursive = False
followlinks = False
dotfiles = False
guess = False
nocodec = False
outfilename = ""
for o, a in opts:
if o in ("-h", "--help"):
usage()
sys.exit()
if o in ("-f", "--from"):
if a == 'guess':
guess = True
else:
from_enc = a
if o in ("-t", "--to"):
to_enc = a
if o in ("-o", "--out"):
outfilename = os.path.normcase(a)
if o in ("-r", "--recursive"):
recursive = True
if o in ("-d", "--dotfiles"):
dotfiles = True
if o in ("-q", "--quiet"):
warn = no_warn
if o in ("-n", "--nocodec"):
nocodec = True
if o in ("-e", "--eol"):
eolmode = a.lower()
os_eols = {'unix':"\n",
'linux':"\n",
'dos':"\r\n",
'win':"\r\n",
'mac':"\r"}
try:
eol = os_eols[eolmode]
except KeyError:
warn("'%s' is not a valid name for a line ending." % eolmode)
warn("Use 'unix', 'linux', 'dos', 'win', or 'mac'.")
warn("Converting to your default line ending")
if o in ("-l", "--followlinks"):
recursive = True
if o in ("-l", "--followlinks"):
followlinks = True
#
# put together what we actually have to do
if nocodec:
rec_function = lambda infilename: convert_eol(infilename, eol, \
outfilename.replace('#', infilename))
else:
if guess:
warn("WARNING: guessing the input encoding is dangerous. "
+ "Make sure to check the results.\n")
rec_function = lambda infilename: \
convert_encoding(
infilename,
guess_file_encoding(infilename, from_enc),
to_enc,
eol,
outfilename.replace('#', infilename))
else:
rec_function = lambda infilename: \
convert_encoding(
infilename,
from_enc,
to_enc,
eol,
outfilename.replace('#', infilename))
#
# Run through all the files
file_recursor(rec_function, files, recursive, followlinks, dotfiles)
def warn(msg, nl=True):
""" print a warning message to stderr """
sys.stderr.write(str(msg))
if nl: sys.stderr.write("\n")
def no_warn(msg, nl=True):
pass
def file_recursor(function=None, file_list=(), recurse=True,
followlinks=True, dotfiles=False):
""" Call function(file) for each file in file_list.
If recurse is True, go into directories recursively. If followlinks is
True, follow symbolic links. If dotfiles is true, process all files and
dirs beginning with a dot, if they are in a subdirectory that is
processed recursively.
Notice that even when dotfiles is True, you still have to explicitly
put dotfiles in your initial filelist.
All names in the file_list will be fully expanded as a glob and with
environment variables
"""
from glob import glob
for name in file_list:
name = os.path.expanduser(name)
name = os.path.expandvars(name)
name = os.path.normcase(name)
if len(glob(name)) > 1:
file_recursor(function, glob(name), recurse, followlinks)
continue
elif len(glob(name)) == 1:
name = glob(name)[0]
if os.path.islink(name):
if not followlinks:
warn("'%s' is a symlink, but following links is not activated" \
% name)
continue
else:
name = os.readlink(name)
if os.path.isfile(name):
function(name)
elif os.path.isdir(name):
if name != '..':
if recurse:
file_recursor(function, glob(os.path.join(name, '*')),
recurse, followlinks)
if dotfiles:
file_recursor(function, glob(os.path.join(name, '.*')),
recurse, followlinks)
else:
warn("'%s' is a directory, but recursive handling is not activated" % name)
else:
warn("Can't process '%s'. Not found.\n" % name)
continue
def convert_encoding(infilename, from_enc, to_enc, eol=os.linesep,
outfilename=""):
""" Convert a text file from_enc to_enc
If eol is given, it must be an appropriate string to terminate a line,
i.e. "\\n", "\\r\\n", "\\r". It defaults to the standard line ending
for your OS (os.linesep)
If outfilename is given, the results will be written to that file, if
not, the conversion is done in-place
from_enc and to_enc are strings containing a name for any
encoding known to Python. See
http://docs.python.org/lib/standard-encodings.html
"""
def error_cleanup():
if hasattr(infile, 'close'):
infile.close()
if hasattr(outfile, 'close'):
outfile.close()
if os.path.isfile(outfilename) and os.path.isfile(infilename):
os.remove(outfilename)
warn("Processing %s ... " % infilename, nl=False)
if os.path.isfile(infilename):
#
# choose temp file
tempfilename = infilename + "." + to_enc
while os.path.isfile(tempfilename):
tempfilename = tempfilename + "x"
#
# open original file (infile) and tempfile (outfile)
infile = outfile = None
try:
infile = codecs.open(infilename, "rb", from_enc)
except Exception, details:
warn("Error opening %s: %s" % (infilename, details));
error_cleanup()
return None
try:
outfile = codecs.open(tempfilename, "wb", to_enc)
except Exception, details:
warn("Error opening %s: %s" % (tempfilename, details))
error_cleanup()
return None
#
# go through infile, convert, and write to outfile
try:
for line in infile:
try:
line = line.replace("\r\n", "\n") # win
line = line.replace("\r", "\n") # mac
line = line.replace("\n", eol)
outfile.write(line)
except Exception, details:
raise Exception, "Error writing to %s: %s" \
% (tempfilename, details);
except Exception, details:
warn("Error in I/O: %s" % details)
error_cleanup()
else:
#
# Finish up: overwrite original file with tempfile
try:
infile.close()
outfile.close()
shutil.copystat(infilename, tempfilename)
overwrite = False
if outfilename == "":
outfilename = infilename
overwrite = True
rename_file(tempfilename, outfilename, overwrite)
warn("%s was successfully converted from %s to %s" \
% (infilename, from_enc, to_enc))
warn("") # finish a block
except Exception, details:
warn("Renaming %s to %s FAILED. File was not converted: %s" \
% (tempfilename, infilename, details))
error_cleanup()
warn("") # finish a block
else:
warn("File '%s' does not exist\n" % file)
def convert_eol(infilename, eol=os.linesep, outfilename=""):
""" Convert just the end of line of a textfile
If eol is given, it must be an appropriate string to terminate a line,
i.e. "\\n", "\\r\\n", "\\r". It defaults to the standard line ending
for your os (os.linesep)
If outfilename is given, the results will be written to that file, if
not, the conversion is done in-place
The encoding of the file is left untouched
"""
def error_cleanup():
if hasattr(infile, 'close'):
infile.close()
if hasattr(outfile, 'close'):
outfile.close()
if os.path.isfile(tempfilename) and os.path.isfile(infilename):
os.remove(tempfilename)
warn("Processing %s ... " % infilename, nl=False)
if os.path.isfile(infilename):
#
# choose temp file
tempfilename = infilename + "." + "eol"
while os.path.isfile(tempfilename):
tempfilename = tempfilename + "x"
#
# open original file (infile) and tempfile (outfile)
infile = outfile = None
try:
infile = open(infilename, "rb")
except Exception, details:
warn("Error opening %s: %s" % (infilename, details));
error_cleanup()
return None
try:
outfile = open(tempfilename, "wb")
except Exception, details:
warn("Error opening %s: %s" % (tempfilename, details))
error_cleanup()
return None
#
# go through infile, convert, and write to outfile
try:
for line in infile:
try:
if "\x00\r" in line or "\x00\n" in line:
eol = eol.replace("\r", "\x00\r")
eol = eol.replace("\n", "\x00\n")
line = line.replace("\r\n", "\n") # win
line = line.replace("\x00\r\x00\n", "\n") # utf16 win
line = line.replace("\r", "\n") # mac
line = line.replace("\x00\r", "\n") # utf16 mac
line = line.replace("\x00\n", "\n") # utf16 unix
line = line.replace("\n", eol)
outfile.write(line)
except Exception, details:
raise Exception, "Error writing to %s: %s" \
% (tempfilename, details);
except Exception, details:
warn("Error in I/O: %s" % details)
error_cleanup()
else:
#
# Finish up: overwrite original file with tempfile
try:
infile.close()
outfile.close()
shutil.copystat(infilename, tempfilename)
overwrite = False
if outfilename == "":
outfilename = infilename
overwrite = True
rename_file(tempfilename, outfilename, overwrite)
warn("Successfully converted eol for %s" % infilename)
warn("") # finish a block
except Exception, details:
warn("Renaming %s to %s FAILED. File was not converted: %s" \
% (tempfilename, infilename, details))
error_cleanup()
warn("") # finish a block
else:
warn("File '%s' does not exist\n" % file)
def guess_encoding(data):
""" Given a byte string, attempt to decode it.
Taken from:
http://www.pyzine.com/Issue008/Section_Articles/article_Encodings.html#guessing-the-encoding
Tries the standard 'UTF8' and 'latin-1' encodings,
Plus several gathered from locale information.
The calling program *must* first call
locale.setlocale(locale.LC_ALL, '')
If successful it returns
(decoded_unicode, successful_encoding)
If unsuccessful it raises a ``UnicodeError``
"""
successful_encoding = None
# we make 'utf-8' the first encoding
encodings = ['utf-8']
#
# next we add anything we can learn from the locale
try:
encodings.append(locale.nl_langinfo(locale.CODESET))
except AttributeError:
pass
try:
encodings.append(locale.getlocale()[1])
except (AttributeError, IndexError):
pass
try:
encodings.append(locale.getdefaultlocale()[1])
except (AttributeError, IndexError):
pass
#
# we try try the rest now
encodings = encodings + ['latin-1', 'mbcs', 'big5', 'euc_jp', 'euc_kr',
'gb2312', 'gbk', 'gb18030', 'hz', 'iso2022_jp',
'koi8_u', 'ptcp154', 'shift_jis' ]
for enc in encodings:
# some of the locale calls
# may have returned None
if not enc:
continue
try:
decoded = unicode(data, enc)
successful_encoding = enc
except (UnicodeError, LookupError):
pass
else:
break
if not successful_encoding:
raise UnicodeError(
'Unable to decode input data. Tried the following encodings: %s.'
% ', '.join([repr(enc) for enc in encodings if enc]))
else:
return (decoded, successful_encoding)
def guess_file_encoding(filename, default):
""" Guess the encoding of a text file
If the function is able to guess an encoding for filename, return that
encoding, otherwise return the default.
Note that guessing an encoding is not fool-proof, this might return the
wrong encoding.
Adapted from:
http://www.pyzine.com/Issue008/Section_Articles/article_Encodings.html#guessing-the-encoding
"""
try:
f = open(filename, "rb")
the_text = f.read()
f.close()
except Exception, details:
warn("Error while trying to guess the encoding of file %s: %s" \
% (filename, details))
return default
bomdict = { codecs.BOM_UTF8 : 'UTF8',
codecs.BOM_UTF16_BE : 'UTF-16BE',
codecs.BOM_UTF16_LE : 'UTF-16LE' }
# check if there is Unicode signature
for bom, encoding in bomdict.items():
if the_text.startswith(bom):
the_text = the_text[len(bom):]
break
else:
bom = None
encoding = None
if encoding is None: # there was no BOM
try:
unicode_text, encoding = guess_encoding(the_text)
except UnicodeError:
warn("Can't work out the encoding of file '%s'." % filename)
warn("Assuming the default encoding: %s" % default)
return default
warn("Guessed encoding for file '%s': %s" % (filename, encoding))
return encoding
def rename_file(file1, file2, overwrite=False):
""" Rename file1 to file2, ask for directions if file2 already exists """
if os.path.isfile(file1):
if os.path.isfile(file2):
while not overwrite:
answer = raw_input("%s already exists. Do you want to overwrite? Yes [No] Abort: " \
% file2).lower()
if answer == 'yes':
overwrite = True
elif answer == 'abort':
return None
else:
answer = raw_input("Enter a new filename: ")
if answer != '': file2 = os.path.normcase(answer)
if not os.path.isfile(file2):
overwrite = True
if file2 != file1: os.remove(file2)
try:
os.rename(file1, file2)
except Exception, details:
warn( "Failed to rename %s to %s: %s" % (file1, file2, details) )
def usage():
""" Print usage for main program """
print __doc__
if __name__ == "__main__":
main()
|
jm/stump | 1 | lib/stump/metaid.rb | # thanks _why
# http://whytheluckystiff.net/articles/seeingMetaclassesClearly.html
class Object
# The hidden singleton lurks behind everyone
def metaclass; class << self; self; end; end
def meta_eval &blk; metaclass.instance_eval &blk; end
# Adds methods to a metaclass
def meta_def name, &blk
meta_eval { define_method name, &blk }
end
# Defines an instance method within a class
def class_def name, &blk
class_eval { define_method name, &blk }
end
end | # thanks _why
# http://whytheluckystiff.net/articles/seeingMetaclassesClearly.html
class Object
# The hidden singleton lurks behind everyone
def metaclass; class << self; self; end; end
def meta_eval &blk; metaclass.instance_eval &blk; end
# Adds methods to a metaclass
def meta_def name, &blk
meta_eval {
define_method(name) {|*args, &block|
blk.call(*args, &block)
}
}
end
# Defines an instance method within a class
def class_def name, &blk
class_eval { define_method name, &blk }
end
end |
braintree/braintree_python | 138 | braintree/subscription.py | from decimal import Decimal
from braintree.util.http import Http
import braintree
import warnings
from braintree.add_on import AddOn
from braintree.descriptor import Descriptor
from braintree.discount import Discount
from braintree.exceptions.not_found_error import NotFoundError
from braintree.resource_collection import ResourceCollection
from braintree.subscription_status_event import SubscriptionStatusEvent
from braintree.successful_result import SuccessfulResult
from braintree.error_result import ErrorResult
from braintree.transaction import Transaction
from braintree.resource import Resource
from braintree.configuration import Configuration
class Subscription(Resource):
"""
A class representing a Subscription.
An example of creating a subscription with all available fields::
result = braintree.Subscription.create({
"id": "my_subscription_id",
"merchant_account_id": "merchant_account_one",
"payment_method_token": "my_payment_token",
"plan_id": "some_plan_id",
"price": "29.95",
"trial_duration": 1,
"trial_duration_unit": braintree.Subscription.TrialDurationUnit.Month,
"trial_period": True
})
For more information on Subscriptions, see https://developers.braintreepayments.com/reference/request/subscription/create/python
"""
# NEXT_MAJOR_VERSION this can be an enum! they were added as of python 3.4 and we support 3.5+
class TrialDurationUnit(object):
"""
Constants representing trial duration units. Available types are:
* braintree.Subscription.TrialDurationUnit.Day
* braintree.Subscription.TrialDurationUnit.Month
"""
Day = "day"
Month = "month"
# NEXT_MAJOR_VERSION this can be an enum! they were added as of python 3.4 and we support 3.5+
class Source(object):
Api = "api"
ControlPanel = "control_panel"
Recurring = "recurring"
# NEXT_MAJOR_VERSION this can be an enum! they were added as of python 3.4 and we support 3.5+
class Status(object):
"""
Constants representing subscription statusues. Available statuses are:
* braintree.Subscription.Status.Active
* braintree.Subscription.Status.Canceled
* braintree.Subscription.Status.Expired
* braintree.Subscription.Status.PastDue
* braintree.Subscription.Status.Pending
"""
Active = "Active"
Canceled = "Canceled"
Expired = "Expired"
PastDue = "Past Due"
Pending = "Pending"
@staticmethod
def create(params=None):
"""
Create a Subscription
Token and Plan are required:::
result = braintree.Subscription.create({
"payment_method_token": "my_payment_token",
"plan_id": "some_plan_id",
})
"""
if params is None:
params = {}
return Configuration.gateway().subscription.create(params)
@staticmethod
def create_signature():
return [
"billing_day_of_month",
"first_billing_date",
"id",
"merchant_account_id",
"never_expires",
"number_of_billing_cycles",
"payment_method_nonce",
"payment_method_token",
"plan_id",
"price",
"trial_duration",
"trial_duration_unit",
"trial_period",
{
"descriptor": ["name", "phone", "url"]
},
{
"options": [
"do_not_inherit_add_ons_or_discounts",
"start_immediately",
{
"paypal": ["description"]
}
]
}
] + Subscription._add_ons_discounts_signature()
@staticmethod
def find(subscription_id):
"""
Find a subscription given a subscription_id. This does not return a result
object. This will raise a :class:`NotFoundError <braintree.exceptions.not_found_error.NotFoundError>`
if the provided subscription_id is not found. ::
subscription = braintree.Subscription.find("my_subscription_id")
"""
return Configuration.gateway().subscription.find(subscription_id)
@staticmethod
def retry_charge(subscription_id, amount=None, submit_for_settlement=False):
return Configuration.gateway().subscription.retry_charge(subscription_id, amount, submit_for_settlement)
@staticmethod
def update(subscription_id, params=None):
"""
Update an existing subscription
By subscription_id. The params are similar to create::
result = braintree.Subscription.update("my_subscription_id", {
"price": "9.99",
})
"""
if params is None:
params = {}
return Configuration.gateway().subscription.update(subscription_id, params)
@staticmethod
def cancel(subscription_id):
"""
Cancel a subscription
By subscription_id::
result = braintree.Subscription.cancel("my_subscription_id")
"""
return Configuration.gateway().subscription.cancel(subscription_id)
@staticmethod
def search(*query):
"""
Allows searching on subscriptions. There are two types of fields that are searchable: text and
multiple value fields. Searchable text fields are:
- plan_id
- days_past_due
Searchable multiple value fields are:
- status
For text fields, you can search using the following operators: ==, !=, starts_with, ends_with
and contains. For mutiple value fields, you can search using the in_list operator. An example::
braintree.Subscription.search([
braintree.SubscriptionSearch.plan_id.starts_with("abc"),
braintree.SubscriptionSearch.days_past_due == "30",
braintree.SubscriptionSearch.status.in_list([braintree.Subscription.Status.PastDue])
])
"""
return Configuration.gateway().subscription.search(*query)
@staticmethod
def update_signature():
return [
"id",
"merchant_account_id",
"never_expires",
"number_of_billing_cycles",
"payment_method_nonce",
"payment_method_token",
"plan_id",
"price",
{
"descriptor": [ "name", "phone", "url" ]
},
{
"options": [
"prorate_charges",
"replace_all_add_ons_and_discounts",
"revert_subscription_on_proration_failure",
{
"paypal": [ "description" ]
}
]
}
] + Subscription._add_ons_discounts_signature()
@staticmethod
def _add_ons_discounts_signature():
return [
{
"add_ons": [{
"add": ["amount", "inherited_from_id", "never_expires", "number_of_billing_cycles", "quantity"],
"remove": ["__any_key__"],
"update": ["amount", "existing_id", "never_expires", "number_of_billing_cycles", "quantity"]
}],
"discounts": [{
"add": ["amount", "inherited_from_id", "never_expires", "number_of_billing_cycles", "quantity"],
"remove": ["__any_key__"],
"update": ["amount", "existing_id", "never_expires", "number_of_billing_cycles", "quantity"]
}]
}
]
def __init__(self, gateway, attributes):
Resource.__init__(self, gateway, attributes)
if "price" in attributes:
self.price = Decimal(self.price)
if "balance" in attributes:
self.balance = Decimal(self.balance)
if "next_billing_period_amount" in attributes:
self.next_billing_period_amount = Decimal(self.next_billing_period_amount)
if "add_ons" in attributes:
self.add_ons = [AddOn(gateway, add_on) for add_on in self.add_ons]
if "descriptor" in attributes:
self.descriptor = Descriptor(gateway, attributes.pop("descriptor"))
if "description" in attributes:
self.description = attributes["description"]
if "discounts" in attributes:
self.discounts = [Discount(gateway, discount) for discount in self.discounts]
if "status_history" in attributes:
self.status_history = [SubscriptionStatusEvent(gateway, status_event) for status_event in self.status_history]
if "transactions" in attributes:
self.transactions = [Transaction(gateway, transaction) for transaction in self.transactions]
| from decimal import Decimal
from braintree.util.http import Http
import braintree
import warnings
from braintree.add_on import AddOn
from braintree.descriptor import Descriptor
from braintree.discount import Discount
from braintree.exceptions.not_found_error import NotFoundError
from braintree.resource_collection import ResourceCollection
from braintree.subscription_status_event import SubscriptionStatusEvent
from braintree.successful_result import SuccessfulResult
from braintree.error_result import ErrorResult
from braintree.transaction import Transaction
from braintree.resource import Resource
from braintree.configuration import Configuration
class Subscription(Resource):
"""
A class representing a Subscription.
An example of creating a subscription with all available fields::
result = braintree.Subscription.create({
"id": "my_subscription_id",
"merchant_account_id": "merchant_account_one",
"payment_method_token": "my_payment_token",
"plan_id": "some_plan_id",
"price": "29.95",
"trial_duration": 1,
"trial_duration_unit": braintree.Subscription.TrialDurationUnit.Month,
"trial_period": True
})
For more information on Subscriptions, see https://developers.braintreepayments.com/reference/request/subscription/create/python
"""
# NEXT_MAJOR_VERSION this can be an enum! they were added as of python 3.4 and we support 3.5+
class TrialDurationUnit(object):
"""
Constants representing trial duration units. Available types are:
* braintree.Subscription.TrialDurationUnit.Day
* braintree.Subscription.TrialDurationUnit.Month
"""
Day = "day"
Month = "month"
# NEXT_MAJOR_VERSION this can be an enum! they were added as of python 3.4 and we support 3.5+
class Source(object):
Api = "api"
ControlPanel = "control_panel"
Recurring = "recurring"
# NEXT_MAJOR_VERSION this can be an enum! they were added as of python 3.4 and we support 3.5+
class Status(object):
"""
Constants representing subscription statuses. Available statuses are:
* braintree.Subscription.Status.Active
* braintree.Subscription.Status.Canceled
* braintree.Subscription.Status.Expired
* braintree.Subscription.Status.PastDue
* braintree.Subscription.Status.Pending
"""
Active = "Active"
Canceled = "Canceled"
Expired = "Expired"
PastDue = "Past Due"
Pending = "Pending"
@staticmethod
def create(params=None):
"""
Create a Subscription
Token and Plan are required:::
result = braintree.Subscription.create({
"payment_method_token": "my_payment_token",
"plan_id": "some_plan_id",
})
"""
if params is None:
params = {}
return Configuration.gateway().subscription.create(params)
@staticmethod
def create_signature():
return [
"billing_day_of_month",
"first_billing_date",
"id",
"merchant_account_id",
"never_expires",
"number_of_billing_cycles",
"payment_method_nonce",
"payment_method_token",
"plan_id",
"price",
"trial_duration",
"trial_duration_unit",
"trial_period",
{
"descriptor": ["name", "phone", "url"]
},
{
"options": [
"do_not_inherit_add_ons_or_discounts",
"start_immediately",
{
"paypal": ["description"]
}
]
}
] + Subscription._add_ons_discounts_signature()
@staticmethod
def find(subscription_id):
"""
Find a subscription given a subscription_id. This does not return a result
object. This will raise a :class:`NotFoundError <braintree.exceptions.not_found_error.NotFoundError>`
if the provided subscription_id is not found. ::
subscription = braintree.Subscription.find("my_subscription_id")
"""
return Configuration.gateway().subscription.find(subscription_id)
@staticmethod
def retry_charge(subscription_id, amount=None, submit_for_settlement=False):
return Configuration.gateway().subscription.retry_charge(subscription_id, amount, submit_for_settlement)
@staticmethod
def update(subscription_id, params=None):
"""
Update an existing subscription
By subscription_id. The params are similar to create::
result = braintree.Subscription.update("my_subscription_id", {
"price": "9.99",
})
"""
if params is None:
params = {}
return Configuration.gateway().subscription.update(subscription_id, params)
@staticmethod
def cancel(subscription_id):
"""
Cancel a subscription
By subscription_id::
result = braintree.Subscription.cancel("my_subscription_id")
"""
return Configuration.gateway().subscription.cancel(subscription_id)
@staticmethod
def search(*query):
"""
Allows searching on subscriptions. There are two types of fields that are searchable: text and
multiple value fields. Searchable text fields are:
- plan_id
- days_past_due
Searchable multiple value fields are:
- status
For text fields, you can search using the following operators: ==, !=, starts_with, ends_with
and contains. For multiple value fields, you can search using the in_list operator. An example::
braintree.Subscription.search([
braintree.SubscriptionSearch.plan_id.starts_with("abc"),
braintree.SubscriptionSearch.days_past_due == "30",
braintree.SubscriptionSearch.status.in_list([braintree.Subscription.Status.PastDue])
])
"""
return Configuration.gateway().subscription.search(*query)
@staticmethod
def update_signature():
return [
"id",
"merchant_account_id",
"never_expires",
"number_of_billing_cycles",
"payment_method_nonce",
"payment_method_token",
"plan_id",
"price",
{
"descriptor": [ "name", "phone", "url" ]
},
{
"options": [
"prorate_charges",
"replace_all_add_ons_and_discounts",
"revert_subscription_on_proration_failure",
{
"paypal": [ "description" ]
}
]
}
] + Subscription._add_ons_discounts_signature()
@staticmethod
def _add_ons_discounts_signature():
return [
{
"add_ons": [{
"add": ["amount", "inherited_from_id", "never_expires", "number_of_billing_cycles", "quantity"],
"remove": ["__any_key__"],
"update": ["amount", "existing_id", "never_expires", "number_of_billing_cycles", "quantity"]
}],
"discounts": [{
"add": ["amount", "inherited_from_id", "never_expires", "number_of_billing_cycles", "quantity"],
"remove": ["__any_key__"],
"update": ["amount", "existing_id", "never_expires", "number_of_billing_cycles", "quantity"]
}]
}
]
def __init__(self, gateway, attributes):
Resource.__init__(self, gateway, attributes)
if "price" in attributes:
self.price = Decimal(self.price)
if "balance" in attributes:
self.balance = Decimal(self.balance)
if "next_billing_period_amount" in attributes:
self.next_billing_period_amount = Decimal(self.next_billing_period_amount)
if "add_ons" in attributes:
self.add_ons = [AddOn(gateway, add_on) for add_on in self.add_ons]
if "descriptor" in attributes:
self.descriptor = Descriptor(gateway, attributes.pop("descriptor"))
if "description" in attributes:
self.description = attributes["description"]
if "discounts" in attributes:
self.discounts = [Discount(gateway, discount) for discount in self.discounts]
if "status_history" in attributes:
self.status_history = [SubscriptionStatusEvent(gateway, status_event) for status_event in self.status_history]
if "transactions" in attributes:
self.transactions = [Transaction(gateway, transaction) for transaction in self.transactions]
|
mpeters/smolder | 10 | htdocs/js/smolder.js | /*
Behaviour
Class is inspired from Ben Nolan's behaviour.js script
(http://bennolan.com/behaviour/) but uses Prototype's
$$() and Element.getElementsBySelector() functions since
they support more CSS syntax and are already loaded with
prototype.
*/
var Behaviour = {
rules : $H({}),
register : function(new_rules) {
Behaviour.rules = Behaviour.rules.merge(new_rules);
},
apply : function(el) {
//console.log('START BEHAVIOUR APPLICATION');
Behaviour.rules.each(function(pair) {
var rule = pair.key;
//var start_time = new Date().valueOf();
//console.log('applying: ' + rule);
var behaviour = pair.value;
// if we have an element, use Element.select()
// else use $$() to find the targets
var targets;
if( el ) {
targets = $(el).select(rule);
} else {
targets = $$(rule);
}
// if we got anything back then apply the behaviour
//console.log(' found ' + targets.size() + ' elements');
if( targets.size() > 0 ) {
targets.each(function(target) { behaviour(target) });
}
//var end_time = new Date().valueOf();
//console.log(' took: ' + (end_time - start_time) + 'ms');
});
}
};
var Smolder = {};
Smolder.load = function(target, json) {
if(! json) json = {};
// update the navigation if we need to
if( json.update_nav ) Smolder.update_nav();
// apply our registered behaviours
Behaviour.apply(target);
// run any code from Smolder.onload()
var size = Smolder.onload_code.length;
for(var i=0; i< size; i++) {
var code = Smolder.onload_code.pop();
if( code ) code();
}
// show any messages we got
Smolder.show_messages(json);
};
/*
Smolder.onload()
Add some code that will get executed after the DOM is loaded
(but without having to wait on images, etc to load).
Multiple calls will not overwrite previous calls and all code
given will be executed in the order give.
*/
Smolder.onload_code = [];
Smolder.onload = function(code) {
Smolder.onload_code.push(code);
};
/*
Smolder.Cookie.get(name)
Returns the value of a specific cookie.
*/
Smolder.Cookie = {};
Smolder.Cookie.get = function(name) {
var value = null;
var cookie = document.cookie;
var start, end;
if ( cookie.length > 0 ) {
start = cookie.indexOf( name + '=' );
// if the cookie exists
if ( start != -1 ) {
start += name.length + 1; // need to account for the '='
// set index of beginning of value
end = cookie.indexOf( ';', start );
if ( end == -1 ) end = cookie.length;
value = unescape( cookie.substring( start, end ) );
}
}
return value;
};
/*
Smolder.Cookie.set(name, value)
Sets a cookie to a particular value.
*/
Smolder.Cookie.set = function(name, value) {
document.cookie = name + '=' + value;
};
///////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////
Smolder.update_nav = function(){
Smolder.Ajax.update({
url : '/app/public/nav',
target : 'nav'
});
}
///////////////////////////////////////////////////////////////////////////////////////////////////
// FUNCTION: Smolder.Ajax.request
// takes the following named args
// url : the full url of the request (required)
// parmas : an object of query params to send along
// indicator : the id of the image to use as an indicator (optional defaults to 'indicator')
// onComplete: a call back function to be executed after the normal processing (optional)
// Receives as arguments, the same args passed into Smolder.Ajax.request
//
// Smolder.Ajax.request({
// url : '/app/some_mod/something',
// params : { foo: 1, bar: 2 },
// indicator : 'add_indicator',
// onComplete : function(args) {
// // do something
// }
// });
///////////////////////////////////////////////////////////////////////////////////////////////////
Smolder.Ajax = {};
Smolder.Ajax.request = function(args) {
var url = args.url;
var params = args.params || {};
var indicator = args.indicator;
var on_complete = args.onComplete || Prototype.emptyFunction;;
var on_failure = args.onFailure || Prototype.emptyFunction;;
var on_exception = args.onException || Prototype.emptyFunction;;
// tell the user that we're doing something
Smolder.show_indicator(indicator);
// add the ajax=1 flag to the existing query params
params.ajax = 1;
new Ajax.Request(
url,
{
parameters : params,
asynchronous: true,
evalScripts : true,
onComplete : function(request, json) {
if(! json ) json = {};
Smolder.show_messages(json);
// hide the indicator
Smolder.hide_indicator(indicator);
// do whatever else the caller wants
args.request = request;
args.json = json || {};
on_complete(args);
},
onException: function(request, exception) {
on_exception();
alert("ERROR FROM AJAX REQUEST:\n" + exception);
},
onFailure: function(request) {
on_failure();
Smolder.show_error();
}
}
);
};
///////////////////////////////////////////////////////////////////////////////////////////////////
// FUNCTION: Smolder.Ajax.update
// takes the following named args
// url : the full url of the request (required)
// parmas : an object of query params to send along
// target : the id of the element receiving the contents (optional defaults to 'content')
// indicator : the id of the image to use as an indicator (optional defaults to 'indicator')
// onComplete: a call back function to be executed after the normal processing (optional)
// Receives as arguments, the same args passed into Smolder.Ajax.update
//
// Smolder.Ajax.update({
// url : '/app/some_mod/something',
// params : { foo: 1, bar: 2 },
// target : 'div_id',
// indicator : 'add_indicator',
// onComplete : function(args) {
// // do something
// }
// });
///////////////////////////////////////////////////////////////////////////////////////////////////
Smolder.Ajax.update = function(args) {
var url = args.url;
var params = args.params || {};
var target = args.target;
var indicator = args.indicator;
var on_complete = args.onComplete || Prototype.emptyFunction;
var on_failure = args.onFailure || Prototype.emptyFunction;
var on_exception = args.onFailure || Prototype.emptyFunction;
// tell the user that we're doing something
Smolder.show_indicator(indicator);
// add the ajax=1 flag to the existing query params
params.ajax = 1;
// the default target
if( target == null || target == '' )
target = 'content';
new Ajax.Updater(
{ success : target },
url,
{
parameters : params,
asynchronous: true,
evalScripts : true,
onComplete : function(request, json) {
Smolder.load(target, json);
// hide the indicator
Smolder.hide_indicator(indicator);
// do whatever else the caller wants
args.request = request;
args.json = json || {};
on_complete(args);
},
onException: function(request, exception) {
on_exception();
alert("ERROR FROM AJAX REQUEST:\n" + exception);
},
onFailure: function(request) {
on_failure();
Smolder.show_error();
}
}
);
};
///////////////////////////////////////////////////////////////////////////////////////////////////
// FUNCTION: Smolder.Ajax.form_update
// takes the following named args
// form : the form object (required)
//
// All other arguments are passed to the underlying Smolder.Ajax.update() call
//
// Smolder.Ajax.form_update({
// form : formObj,
// target : 'div_id'
// });
///////////////////////////////////////////////////////////////////////////////////////////////////
Smolder.Ajax.form_update = function(args) {
var form = args.form;
args.url = form.action;
args.params = Form.serialize(form, true);
if(! args.onComplete ) args.onComplete = Prototype.emptyFunction;;
// disable all of the inputs of this form that
// aren't already and remember which ones we disabled
var form_disabled_inputs = Smolder.disable_form(form);
var oldOnComplete = args.onComplete;
var reset_things = function() {
// reset which forms are open
Smolder.PopupForm.shown_popup_id = '';
// if we have a form, enable all of the inputs that we disabled
Smolder.reenable_form(form, form_disabled_inputs);
};
args.onComplete = function(request, json) {
oldOnComplete(request, json);
reset_things();
};
args.onFailure = reset_things;
args.onException = reset_things;
// now submit this normally
Smolder.Ajax.update(args);
};
Smolder.disable_form = function(form) {
// disable all of the inputs of this form that
// aren't already and remember which ones we disabled
var disabled = $H();
$A(form.elements).each(
function(input, i) {
if( !input.disabled ) {
disabled.set(input.name, true);
input.disabled = true;
}
}
);
return disabled.keys();
};
Smolder.reenable_form = function(form, inputs) {
// if we have a form, enable all of the inputs
// that we disabled
if( form && inputs.length > 0 ) {
$A(inputs).each(
function(name, i) {
if( name && form.elements[name] ) {
$A(form.elements[name]).each(function(input) {
input.disabled = false;
});
}
}
);
}
}
///////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////
Smolder.show_error = function() {
new Notify.Alert(
$('ajax_error_container').innerHTML,
{
messagecolor : '#FFFFFF',
autoHide : 'false'
}
);
}
///////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////
Smolder.PopupForm = {
shown_popup_id: '',
toggle: function(popup_id) {
// first turn off any other forms showing of this type
var old_popup_id = Smolder.PopupForm.shown_popup_id;
if( old_popup_id != '' && $(old_popup_id) != null ) {
Smolder.PopupForm.hide();
}
if( old_popup_id == popup_id ) {
Smolder.PopupForm.shown_popup_id = '';
} else {
new Effect.SlideDown(popup_id, { duration: .1 });
Smolder.PopupForm.shown_popup_id = popup_id;
}
return false;
},
show: function(popup_id) {
if( Smolder.PopupForm.shown_popup_id != popup_id ) {
Smolder.PopupForm.toggle(popup_id);
}
},
hide: function() {
new Effect.SlideUp( Smolder.PopupForm.shown_popup_id, { duration: .1 } );
Smolder.PopupForm.shown_popup_id = '';
}
};
///////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////
Smolder.changeSmokeGraph = function(form) {
var type = form.elements['type'].value;
var url = form.action + "/" + escape(type) + "?change=1&";
// add each field that we want to see to the URL
var fields = new Array('total', 'pass', 'fail', 'skip', 'todo', 'duration');
fields.each(
function(value, index) {
if( form.elements[value].checked ) {
url = url + escape(value) + '=1&';
}
}
);
// add any extra args that we might want to search by
var extraArgs = ['start', 'stop', 'tag', 'platform', 'architecture'];
for(var i = 0; i < extraArgs.length; i++) {
var arg = extraArgs[i];
if( form.elements[arg] != null && form.elements[arg].value != '') {
url = url + arg + '=' + escape(form.elements[arg].value) + '&';
}
}
$('graph_image').src = url;
}
///////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////
Smolder.toggleSmokeValid = function(form) {
// TODO - replace with one regex
var trigger = form.id.replace(/_trigger$/, '');
var smokeId = trigger.replace(/^(in)?valid_form_/, '');
var divId = "smoke_test_" + smokeId;
// we are currently not showing any other forms
Smolder.PopupForm.shown_form_id = '';
Smolder.Ajax.form_update({
form : form,
target : divId,
indicator : trigger + "_indicator"
});
}
///////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////
Smolder.newSmokeReportWindow = function(url) {
window.open(
url,
'report_details',
'resizeable=yes,width=850,height=600,scrollbars=yes'
);
}
///////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////
Smolder.makeFormAjaxable = function(element) {
// find which element it targets
var target;
var matches = element.className.match(/(^|\s)for_([^\s]+)($|\s)/);
if( matches != null )
target = matches[2];
// find which indicator it uses
var indicatorId;
matches = element.className.match(/(^|\s)show_([^\s]+)($|\s)/);
if( matches != null )
indicatorId = matches[2];
element.onsubmit = function(event) {
Smolder.Ajax.form_update({
form : element,
target : target,
indicator : indicatorId
});
return false;
};
}
///////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////
Smolder.makeLinkAjaxable = function(element) {
var target;
// find which target it targets
var matches = element.className.match(/(^|\s)for_([^\s]+)($|\s)/);
if( matches != null )
target = matches[2];
// find which indicator it uses
var indicatorId;
matches = element.className.match(/(^|\s)show_([^\s]+)($|\s)/);
if( matches != null )
indicatorId = matches[2];
element.onclick = function(event) {
Smolder.Ajax.update({
url : element.href,
target : target,
indicator : indicatorId
});
return false;
};
}
///////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////
Smolder.show_indicator = function(indicator) {
indicator = $(indicator);
if( indicator ) Element.show(indicator);
}
///////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////
Smolder.hide_indicator = function(indicator) {
indicator = $(indicator);
if( indicator ) Element.hide(indicator);
}
///////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////
Smolder.highlight = function(el) {
new Effect.Highlight(
el,
{
'startcolor' : '#ffffff',
'endcolor' : '#ffff99',
'restorecolor': '#ffff99'
}
);
}
///////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////
Smolder.unHighlight = function(el) {
new Effect.Highlight(
el,
{
'startcolor' : '#ffff99',
'endcolor' : '#ffffff',
'restorecolor': '#ffffff'
}
);
}
///////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////
Smolder.flash = function(el) {
new Effect.Highlight(
el,
{
'startcolor' : '#ffff99',
'endcolor' : '#ffffff',
'restorecolor': '#ffffff'
}
);
}
///////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////
Smolder.show_messages = function(json) {
if( json ) {
var msgs = json.messages || [];
msgs.each( function(msg) { Smolder.show_message(msg.type, msg.msg) } );
}
}
///////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////
Smolder.__message_count = 0;
Smolder.message_template = new Template('<div class="#{type}" id="message_#{count}">#{text}</div>');
Smolder.show_message = function(type, text) {
Smolder.__message_count++;
// insert it at the top of the messages
$('message_container').insert({
top: Smolder.message_template.evaluate({
type : type,
count : Smolder.__message_count,
text : text
})
});
// fade it out after 10 secs, or onclick
var el = $('message_' + Smolder.__message_count);
var fade = function() { new Effect.Fade(el, { duration: .4 } ); };
el.onclick = fade;
setTimeout(fade, 7000);
}
///////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////
Smolder.setup_tooltip = function(trigger, target) {
trigger.onclick = function() {
new Effect.toggle(target, 'appear', { duration: .4 });
};
}
// CRUD abstraction for Smolder
Smolder.__known_CRUDS = { };
Smolder.CRUD = Class.create();
/* Class methods */
Smolder.CRUD.find = function(id) { return Smolder.__known_CRUDS[id] };
Smolder.CRUD.remember = function(crud) { Smolder.__known_CRUDS[crud.div.id] = crud; };
Smolder.CRUD.forget = function(crud) { Smolder.__known_CRUDS[crud.div.id] = false; };
/* Object methods */
Object.extend(Smolder.CRUD.prototype, {
initialize: function(id, url) {
this.div = $(id);
this.url = url;
this.list_url = url + '/list';
// initialize these if we don't already have a crud
this.add_shown = false;
// find the containers, triggers and indicator that won't change
this.list_container = this.div.select('.list_container')[0];
this.add_container = this.div.select('.add_container')[0];
this.indicator = this.div.select('.indicator')[0].id;
this.add_trigger = this.div.select('.add_trigger')[0];
// add the handlers for the triggers
this.add_trigger.onclick = function() {
this.toggle_add();
// prevent submission of the link
return false;
}.bindAsEventListener(this);
// find our triggers that might change (edit and delete)
this.refresh();
// the fact that we've created this CRUD
Smolder.CRUD.remember(this);
},
refresh: function() {
this.edit_triggers = $(this.list_container).select('.edit_trigger');
this.delete_triggers = $(this.list_container).select('.delete_trigger');
this.edit_triggers.each(
function(trigger) {
trigger.onclick = function() {
this.show_edit(trigger);
// prevent submission of the link
return false;
}.bindAsEventListener(this);
}.bindAsEventListener(this)
);
this.delete_triggers.each(
function(trigger) {
trigger.onclick = function() {
this.show_delete(trigger);
// prevent submission of the link
return false;
}.bindAsEventListener(this);
}.bindAsEventListener(this)
);
},
toggle_add: function() {
if( this.add_shown ) {
this.hide_add();
} else {
this.show_add();
}
},
hide_add: function() {
new Effect.SlideUp(this.add_container);
this.add_shown = false;
},
show_add: function() {
Smolder.Ajax.update({
url : this.add_trigger.href,
target : this.add_container.id,
indicator : this.indicator,
onComplete : function(args) {
if( !this.add_shown ) {
new Effect.SlideDown(this.add_container)
}
this.add_shown = true;
// make sure we submit the add changes correctly
this._handle_form_submit('add_form');
}.bindAsEventListener(this)
});
},
_handle_form_submit: function(name) {
var form = $(this.add_container).select('.' + name)[0];
if( form ) {
form.onsubmit = function() {
this.submit_change(form);
return false;
}.bindAsEventListener(this);
}
},
show_edit: function(trigger) {
var matches = trigger.className.match(/(^|\s)for_item_(\d+)($|\s)/);
var itemId = matches[2];
if( itemId == null )
return;
Smolder.Ajax.update({
url : trigger.href,
target : this.add_container.id,
indicator : this.indicator,
onComplete : function() {
if( !this.add_shown ) {
Effect.SlideDown(this.add_container);
}
this.add_shown = true;
// setup the 'cancel' button
var cancel = $(this.add_container).select('.edit_cancel')[0];
cancel.onclick = function() { this.hide_add(); }.bindAsEventListener(this);
// make sure we submit the add changes correctly
this._handle_form_submit('edit_form');
}.bindAsEventListener(this)
});
},
show_delete: function(trigger) {
var matches = trigger.className.match(/(^|\s)for_item_(\d+)($|\s)/);
var itemId = matches[2];
// set the onsubmit handler for the form in this popup
var form = $('delete_form_' + itemId);
form.onsubmit = function() {
Smolder.Ajax.update({
url : form.action,
target : this.list_container_id,
indicator : 'delete_indicator_' + itemId
});
return false;
}.bindAsEventListener(this);
// show the popup form
var popup = 'delete_' + itemId;
Smolder.PopupForm.toggle(popup);
},
submit_change: function(form) {
// find the add_inidicator
var indicator = $(this.add_container).select('.add_indicator')[0].id;
Smolder.Ajax.form_update({
form : form,
target : this.add_container.id,
indicator : indicator,
onComplete : function(args) {
if(! args.json ) args.json = {};
// if the submission changed the list
if( args.json.list_changed ) {
this.add_shown = false;
Element.hide(this.add_container);
this.update_list();
}
// since the add/edit forms may exist still
// (ie, their submission was incorrect so it reappears with error msgs)
// we need to make sure they're submitted correctly the 2nd time
this._handle_form_submit('add_form');
this._handle_form_submit('edit_form');
}.bindAsEventListener(this)
});
},
update_list: function () {
Smolder.Ajax.update({
url : this.list_url,
indicator : this.indicator,
target : this.list_container_id
});
}
});
///////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////
// These are our JS behaviors that are applied externally to HTML elements just like CSS
///////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////
var myrules = {
'a.calendar_trigger' : function(element) {
// now find the id's of the calendar div and input based on the id of the trigger
// triggers are named $inputId_calendar_trigger, and calendar divs are named
// $inputId_calendar
var inputId = element.id.replace(/_calendar_trigger/, '');
var input = $(inputId);
Calendar.setup({
inputField : input.name,
ifFormat : "%m/%d/%Y",
button : element.id,
weekNumbers : false,
showOthers : true,
align : 'CL',
cache : true
});
},
'a.smoke_reports_nav' : function(element) {
// set the limit and then do an ajax request for the form
element.onclick = function() {
var offset = 0;
var matches = element.className.match(/(^|\s)offset_([^\s]+)($|\s)/);
if( matches != null )
offset = matches[2];
$('smoke_reports').elements['offset'].value = offset;
Smolder.Ajax.form_update({
form : $('smoke_reports'),
indicator : 'paging_indicator'
});
return false;
};
},
'form.change_smoke_graph' : function(element) {
element.onsubmit = function() {
Smolder.changeSmokeGraph(element);
return false;
}
},
'a.popup_form' : function(element) {
var popupId = element.id.replace(/_trigger$/, '');
element.onclick = function() {
Smolder.PopupForm.toggle(popupId);
return false;
};
},
'input.cancel_popup' : function(element) {
element.onclick = function() {
Smolder.PopupForm.hide();
};
},
'form.toggle_smoke_valid' : function(element) {
element.onsubmit = function() {
Smolder.toggleSmokeValid(element);
return false;
}
},
'#platform_auto_complete' : function(element) {
new Ajax.Autocompleter(
'platform',
'platform_auto_complete',
'/app/projects/platform_options'
);
},
'#architecture_auto_complete' : function(element) {
new Ajax.Autocompleter(
'architecture',
'architecture_auto_complete',
'/app/projects/architecture_options'
);
},
'input.auto_submit' : function(element) {
element.onchange = function(event) {
element.form.onsubmit();
return false;
}
},
'select.auto_submit' : function(element) {
element.onchange = function(event) {
element.form.onsubmit();
return false;
}
},
// for ajaxable forms and anchors the taget div for updating is determined
// as follows:
// Specify a the target div's id by adding a 'for_$id' class to the elements
// class list:
// <a class="ajaxable for_some_div" ... >
// This will make the target a div named "some_div"
// If no target is specified, then it will default to "content"
'a.ajaxable' : function(element) {
Smolder.makeLinkAjaxable(element);
},
'form.ajaxable' : function(element) {
Smolder.makeFormAjaxable(element);
},
'div.crud' : function(element) {
var matches = element.className.match(/(^|\s)for_(\w+)($|\s)/);
var url = "/app/" + matches[2];
new Smolder.CRUD(element.id, url);
},
'form.resetpw_form': function(element) {
Smolder.makeFormAjaxable(element);
// extend the onsubmit handler to turn off the popup
var popupId = element.id.replace(/_form$/, '');
var oldOnSubmit = element.onsubmit;
element.onsubmit = function() {
oldOnSubmit();
Smolder.PopupForm.toggle(popupId);
return false;
};
},
// on the preferences form, the project selector should update
// the preferences form with the selected projects preferences
// from the server
'#project_preference_selector': function(element) {
var form = element.form;
// if we start off looking at the default options
if( element.value == form.elements['default_pref_id'].value ) {
Element.show('dev_prefs_sync_button');
// if we want to show some info - Element.show('default_pref_info');
}
element.onchange = function() {
// if we are the default preference, then show the stuff
// that needs to be shown
if( element.value == form.elements['default_pref_id'].value ) {
Element.show('dev_prefs_sync_button');
// if we want to show some info - Element.show('default_pref_info');
} else {
Element.hide('dev_prefs_sync_button');
// if we want to show some info - Element.hide('default_pref_info');
}
// get the preference details from the server
Smolder.show_indicator('pref_indicator');
new Ajax.Request(
'/app/developer_prefs/get_pref_details',
{
parameters: Form.serialize(form),
asynchronous: true,
onComplete: function(response, json) {
// for every value in our JSON response, set that
// same element in the form
$A(['email_type', 'email_freq', 'email_limit']).each(
function(name) {
var elm = form.elements[name];
elm.value = json[name];
Smolder.flash(elm);
}
);
Smolder.hide_indicator('pref_indicator');
},
onFailure: function() { Smolder.show_error() }
}
);
};
},
// submit the preference form to sync the other preferences
'#dev_prefs_sync_button': function(element) {
element.onclick = function() {
var form = $('update_pref');
form.elements['sync'].value = 1;
Smolder.Ajax.form_update({
form : form,
target : 'developer_prefs'
});
};
},
// hightlight selected text, textarea and select inputs
'input.hl': function(element) {
element.onfocus = function() { Smolder.highlight(element); };
element.onblur = function() { Smolder.unHighlight(element); };
},
'textarea.hl': function(element) {
element.onfocus = function() { Smolder.highlight(element); };
element.onblur = function() { Smolder.unHighlight(element); };
},
'select.hl': function(element) {
element.onfocus = function() { Smolder.highlight(element); };
element.onblur = function() { Smolder.unHighlight(element); };
},
// setup tooltips
'.tooltip_trigger': function(element) {
var matches = element.className.match(/(^|\s)for_([^\s]+)($|\s)/);
if( matches ) {
var target = matches[2];
if( target ) {
Smolder.setup_tooltip(element, $(target));
Smolder.setup_tooltip($(target), $(target));
}
}
},
// TAP Matrix triggers for more test file details
'.tap a.details_trigger' : function(el) {
// get the id of the target div
var matches = el.id.match(/^for_(.*)$/);
var target = matches[1];
// get the id of the indicator image
matches = el.className.match(/(^|\s)show_(\S*)($|\s)/);
var indicator = matches[2];
el.onclick = function() {
if( Element.visible(target) ) {
$(target + '_tap_stream').hide();
Effect.BlindUp(target, { duration: .1 });
} else {
$(indicator).style.visibility = 'visible';
Smolder.Ajax.update({
url : el.href,
target : target,
indicator : 'none',
onComplete : function() {
window.setTimeout(function() { $(indicator).style.visibility = 'hidden'}, 200);
Effect.BlindDown(
target,
// reapply any dynamic bits
{
afterFinish : function() {
$(target + '_tap_stream').show();
$$('.tooltip_trigger').each(function(el) {
var diag = $(el).select('.tooltip')[0];
Smolder.setup_tooltip(el, diag);
});
},
duration : .1
}
);
}
});
}
return false;
};
},
'.tap div.diag': function(el) {
Smolder.setup_tooltip(el, el);
},
'#toggle_tests_trigger' : function(el) {
el.onchange = function() {
var count = 0;
$$('.tap tbody').each(function(row) {
if( el.checked ) {
if( row.hasClassName('passed') ) {
row.hide();
} else {
if( count % 2 == 1 ) {
if(row.hasClassName('even')) {
row.removeClassName('even');
row.addClassName('odd');
}
} else {
if(row.hasClassName('odd')) {
row.removeClassName('odd');
row.addClassName('even');
}
}
count++;
}
} else {
if( row.hasClassName('passed') ) {
row.show();
}
if( count % 2 == 1 ) {
if(row.hasClassName('even')) {
row.removeClassName('even');
row.addClassName('odd');
}
} else {
if(row.hasClassName('odd')) {
row.removeClassName('odd');
row.addClassName('even');
}
}
count++;
}
});
};
},
'.tap a.show_all' : function(el) {
Event.observe(el, 'click', function() {
// an anonymous function that we use as a callback after each
// panel is opened so that it opens the next one
var show_details = function(index) {
var el = $('testfile_details_' + index);
// apply the behaviours if we're the last one
if( ! el ) {
Behaviour.apply();
return;
}
// we only need to fetch it if we're not already visible
if( Element.visible(el) ) {
show_details(++index);
} else {
matches = el.id.match(/^testfile_details_(\d+)$/);
var index = matches[1];
var indicator = $('indicator_' + index);
var div = $('testfile_details_' + index);
indicator.style.visibility = 'visible';
Smolder.Ajax.update({
url : $('for_testfile_details_' + index).href,
target : div,
indicator : 'none',
onComplete : function() {
Element.show(div);
indicator.style.visibility = 'hidden';
show_details(++index);
}
});
}
};
show_details(0);
});
}
};
Behaviour.register(myrules);
| /*
Behaviour
Class is inspired from Ben Nolan's behaviour.js script
(http://bennolan.com/behaviour/) but uses Prototype's
$$() and Element.getElementsBySelector() functions since
they support more CSS syntax and are already loaded with
prototype.
*/
var Behaviour = {
rules : $H({}),
register : function(new_rules) {
Behaviour.rules = Behaviour.rules.merge(new_rules);
},
apply : function(el) {
//console.log('START BEHAVIOUR APPLICATION');
Behaviour.rules.each(function(pair) {
var rule = pair.key;
//var start_time = new Date().valueOf();
//console.log('applying: ' + rule);
var behaviour = pair.value;
// if we have an element, use Element.select()
// else use $$() to find the targets
var targets;
if( el ) {
targets = $(el).select(rule);
} else {
targets = $$(rule);
}
// if we got anything back then apply the behaviour
//console.log(' found ' + targets.size() + ' elements');
if( targets.size() > 0 ) {
targets.each(function(target) { behaviour(target) });
}
//var end_time = new Date().valueOf();
//console.log(' took: ' + (end_time - start_time) + 'ms');
});
}
};
var Smolder = {};
Smolder.load = function(target, json) {
if(! json) json = {};
// update the navigation if we need to
if( json.update_nav ) Smolder.update_nav();
// apply our registered behaviours
Behaviour.apply(target);
// run any code from Smolder.onload()
var size = Smolder.onload_code.length;
for(var i=0; i< size; i++) {
var code = Smolder.onload_code.pop();
if( code ) code();
}
// show any messages we got
Smolder.show_messages(json);
};
/*
Smolder.onload()
Add some code that will get executed after the DOM is loaded
(but without having to wait on images, etc to load).
Multiple calls will not overwrite previous calls and all code
given will be executed in the order give.
*/
Smolder.onload_code = [];
Smolder.onload = function(code) {
Smolder.onload_code.push(code);
};
/*
Smolder.Cookie.get(name)
Returns the value of a specific cookie.
*/
Smolder.Cookie = {};
Smolder.Cookie.get = function(name) {
var value = null;
var cookie = document.cookie;
var start, end;
if ( cookie.length > 0 ) {
start = cookie.indexOf( name + '=' );
// if the cookie exists
if ( start != -1 ) {
start += name.length + 1; // need to account for the '='
// set index of beginning of value
end = cookie.indexOf( ';', start );
if ( end == -1 ) end = cookie.length;
value = unescape( cookie.substring( start, end ) );
}
}
return value;
};
/*
Smolder.Cookie.set(name, value)
Sets a cookie to a particular value.
*/
Smolder.Cookie.set = function(name, value) {
document.cookie = name + '=' + value;
};
///////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////
Smolder.update_nav = function(){
Smolder.Ajax.update({
url : '/app/public/nav',
target : 'nav'
});
}
///////////////////////////////////////////////////////////////////////////////////////////////////
// FUNCTION: Smolder.Ajax.request
// takes the following named args
// url : the full url of the request (required)
// parmas : an object of query params to send along
// indicator : the id of the image to use as an indicator (optional defaults to 'indicator')
// onComplete: a call back function to be executed after the normal processing (optional)
// Receives as arguments, the same args passed into Smolder.Ajax.request
//
// Smolder.Ajax.request({
// url : '/app/some_mod/something',
// params : { foo: 1, bar: 2 },
// indicator : 'add_indicator',
// onComplete : function(args) {
// // do something
// }
// });
///////////////////////////////////////////////////////////////////////////////////////////////////
Smolder.Ajax = {};
Smolder.Ajax.request = function(args) {
var url = args.url;
var params = args.params || {};
var indicator = args.indicator;
var on_complete = args.onComplete || Prototype.emptyFunction;;
var on_failure = args.onFailure || Prototype.emptyFunction;;
var on_exception = args.onException || Prototype.emptyFunction;;
// tell the user that we're doing something
Smolder.show_indicator(indicator);
// add the ajax=1 flag to the existing query params
params.ajax = 1;
new Ajax.Request(
url,
{
parameters : params,
asynchronous: true,
evalScripts : true,
onComplete : function(request, json) {
if(! json ) json = {};
Smolder.show_messages(json);
// hide the indicator
Smolder.hide_indicator(indicator);
// do whatever else the caller wants
args.request = request;
args.json = json || {};
on_complete(args);
},
onException: function(request, exception) {
on_exception();
alert("ERROR FROM AJAX REQUEST:\n" + exception);
},
onFailure: function(request) {
on_failure();
Smolder.show_error();
}
}
);
};
///////////////////////////////////////////////////////////////////////////////////////////////////
// FUNCTION: Smolder.Ajax.update
// takes the following named args
// url : the full url of the request (required)
// parmas : an object of query params to send along
// target : the id of the element receiving the contents (optional defaults to 'content')
// indicator : the id of the image to use as an indicator (optional defaults to 'indicator')
// onComplete: a call back function to be executed after the normal processing (optional)
// Receives as arguments, the same args passed into Smolder.Ajax.update
//
// Smolder.Ajax.update({
// url : '/app/some_mod/something',
// params : { foo: 1, bar: 2 },
// target : 'div_id',
// indicator : 'add_indicator',
// onComplete : function(args) {
// // do something
// }
// });
///////////////////////////////////////////////////////////////////////////////////////////////////
Smolder.Ajax.update = function(args) {
var url = args.url;
var params = args.params || {};
var target = args.target;
var indicator = args.indicator;
var on_complete = args.onComplete || Prototype.emptyFunction;
var on_failure = args.onFailure || Prototype.emptyFunction;
var on_exception = args.onFailure || Prototype.emptyFunction;
// tell the user that we're doing something
Smolder.show_indicator(indicator);
// add the ajax=1 flag to the existing query params
params.ajax = 1;
// the default target
if( target == null || target == '' )
target = 'content';
new Ajax.Updater(
{ success : target },
url,
{
parameters : params,
asynchronous: true,
evalScripts : true,
onComplete : function(request, json) {
Smolder.load(target, json);
// hide the indicator
Smolder.hide_indicator(indicator);
// do whatever else the caller wants
args.request = request;
args.json = json || {};
on_complete(args);
},
onException: function(request, exception) {
on_exception();
alert("ERROR FROM AJAX REQUEST:\n" + exception);
},
onFailure: function(request) {
on_failure();
Smolder.show_error();
}
}
);
};
///////////////////////////////////////////////////////////////////////////////////////////////////
// FUNCTION: Smolder.Ajax.form_update
// takes the following named args
// form : the form object (required)
//
// All other arguments are passed to the underlying Smolder.Ajax.update() call
//
// Smolder.Ajax.form_update({
// form : formObj,
// target : 'div_id'
// });
///////////////////////////////////////////////////////////////////////////////////////////////////
Smolder.Ajax.form_update = function(args) {
var form = args.form;
args.url = form.action;
args.params = Form.serialize(form, true);
if(! args.onComplete ) args.onComplete = Prototype.emptyFunction;;
// disable all of the inputs of this form that
// aren't already and remember which ones we disabled
var form_disabled_inputs = Smolder.disable_form(form);
var oldOnComplete = args.onComplete;
var reset_things = function() {
// reset which forms are open
Smolder.PopupForm.shown_popup_id = '';
// if we have a form, enable all of the inputs that we disabled
Smolder.reenable_form(form, form_disabled_inputs);
};
args.onComplete = function(request, json) {
oldOnComplete(request, json);
reset_things();
};
args.onFailure = reset_things;
args.onException = reset_things;
// now submit this normally
Smolder.Ajax.update(args);
};
Smolder.disable_form = function(form) {
// disable all of the inputs of this form that
// aren't already and remember which ones we disabled
var disabled = $H();
$A(form.elements).each(
function(input, i) {
if( !input.disabled ) {
disabled.set(input.name, true);
input.disabled = true;
}
}
);
return disabled.keys();
};
Smolder.reenable_form = function(form, inputs) {
// if we have a form, enable all of the inputs
// that we disabled
if( form && inputs.length > 0 ) {
$A(inputs).each(
function(name, i) {
if( name && form.elements[name] ) {
$A(form.elements[name]).each(function(input) {
input.disabled = false;
});
}
}
);
}
}
///////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////
Smolder.show_error = function() {
new Notify.Alert(
$('ajax_error_container').innerHTML,
{
messagecolor : '#FFFFFF',
autoHide : 'false'
}
);
}
///////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////
Smolder.PopupForm = {
shown_popup_id: '',
toggle: function(popup_id) {
// first turn off any other forms showing of this type
var old_popup_id = Smolder.PopupForm.shown_popup_id;
if( old_popup_id != '' && $(old_popup_id) != null ) {
Smolder.PopupForm.hide();
}
if( old_popup_id == popup_id ) {
Smolder.PopupForm.shown_popup_id = '';
} else {
new Effect.SlideDown(popup_id, { duration: .1 });
Smolder.PopupForm.shown_popup_id = popup_id;
}
return false;
},
show: function(popup_id) {
if( Smolder.PopupForm.shown_popup_id != popup_id ) {
Smolder.PopupForm.toggle(popup_id);
}
},
hide: function() {
new Effect.SlideUp( Smolder.PopupForm.shown_popup_id, { duration: .1 } );
Smolder.PopupForm.shown_popup_id = '';
}
};
///////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////
Smolder.changeSmokeGraph = function(form) {
var type = form.elements['type'].value;
var url = form.action + "/" + escape(type) + "?change=1&";
// add each field that we want to see to the URL
var fields = new Array('total', 'pass', 'fail', 'skip', 'todo', 'duration');
fields.each(
function(value, index) {
if( form.elements[value].checked ) {
url = url + escape(value) + '=1&';
}
}
);
// add any extra args that we might want to search by
var extraArgs = ['start', 'stop', 'tag', 'platform', 'architecture'];
for(var i = 0; i < extraArgs.length; i++) {
var arg = extraArgs[i];
if( form.elements[arg] != null && form.elements[arg].value != '') {
url = url + arg + '=' + escape(form.elements[arg].value) + '&';
}
}
$('graph_image').src = url;
}
///////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////
Smolder.toggleSmokeValid = function(form) {
// TODO - replace with one regex
var trigger = form.id.replace(/_trigger$/, '');
var smokeId = trigger.replace(/^(in)?valid_form_/, '');
var divId = "smoke_test_" + smokeId;
// we are currently not showing any other forms
Smolder.PopupForm.shown_form_id = '';
Smolder.Ajax.form_update({
form : form,
target : divId,
indicator : trigger + "_indicator"
});
}
///////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////
Smolder.newSmokeReportWindow = function(url) {
window.open(
url,
'report_details',
'resizeable=yes,width=850,height=600,scrollbars=yes'
);
}
///////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////
Smolder.makeFormAjaxable = function(element) {
// find which element it targets
var target;
var matches = element.className.match(/(^|\s)for_([^\s]+)($|\s)/);
if( matches != null )
target = matches[2];
// find which indicator it uses
var indicatorId;
matches = element.className.match(/(^|\s)show_([^\s]+)($|\s)/);
if( matches != null )
indicatorId = matches[2];
element.onsubmit = function(event) {
Smolder.Ajax.form_update({
form : element,
target : target,
indicator : indicatorId
});
return false;
};
}
///////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////
Smolder.makeLinkAjaxable = function(element) {
var target;
// find which target it targets
var matches = element.className.match(/(^|\s)for_([^\s]+)($|\s)/);
if( matches != null )
target = matches[2];
// find which indicator it uses
var indicatorId;
matches = element.className.match(/(^|\s)show_([^\s]+)($|\s)/);
if( matches != null )
indicatorId = matches[2];
element.onclick = function(event) {
Smolder.Ajax.update({
url : element.href,
target : target,
indicator : indicatorId
});
return false;
};
}
///////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////
Smolder.show_indicator = function(indicator) {
indicator = $(indicator);
if( indicator ) Element.show(indicator);
}
///////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////
Smolder.hide_indicator = function(indicator) {
indicator = $(indicator);
if( indicator ) Element.hide(indicator);
}
///////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////
Smolder.highlight = function(el) {
new Effect.Highlight(
el,
{
'startcolor' : '#ffffff',
'endcolor' : '#ffff99',
'restorecolor': '#ffff99'
}
);
}
///////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////
Smolder.unHighlight = function(el) {
new Effect.Highlight(
el,
{
'startcolor' : '#ffff99',
'endcolor' : '#ffffff',
'restorecolor': '#ffffff'
}
);
}
///////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////
Smolder.flash = function(el) {
new Effect.Highlight(
el,
{
'startcolor' : '#ffff99',
'endcolor' : '#ffffff',
'restorecolor': '#ffffff'
}
);
}
///////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////
Smolder.show_messages = function(json) {
if( json ) {
var msgs = json.messages || [];
msgs.each( function(msg) { Smolder.show_message(msg.type, msg.msg) } );
}
}
///////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////
Smolder.__message_count = 0;
Smolder.message_template = new Template('<div class="#{type}" id="message_#{count}">#{text}</div>');
Smolder.show_message = function(type, text) {
Smolder.__message_count++;
// insert it at the top of the messages
$('message_container').insert({
top: Smolder.message_template.evaluate({
type : type,
count : Smolder.__message_count,
text : text
})
});
// fade it out after 10 secs, or onclick
var el = $('message_' + Smolder.__message_count);
var fade = function() { new Effect.Fade(el, { duration: .4 } ); };
el.onclick = fade;
setTimeout(fade, 7000);
}
///////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////
Smolder.setup_tooltip = function(trigger, target) {
trigger.onclick = function() {
new Effect.toggle(target, 'appear', { duration: .4 });
};
}
// CRUD abstraction for Smolder
Smolder.__known_CRUDS = { };
Smolder.CRUD = Class.create();
/* Class methods */
Smolder.CRUD.find = function(id) { return Smolder.__known_CRUDS[id] };
Smolder.CRUD.remember = function(crud) { Smolder.__known_CRUDS[crud.div.id] = crud; };
Smolder.CRUD.forget = function(crud) { Smolder.__known_CRUDS[crud.div.id] = false; };
/* Object methods */
Object.extend(Smolder.CRUD.prototype, {
initialize: function(id, url) {
this.div = $(id);
this.url = url;
this.list_url = url + '/list';
// initialize these if we don't already have a crud
this.add_shown = false;
// find the containers, triggers and indicator that won't change
this.list_container = this.div.select('.list_container')[0];
this.add_container = this.div.select('.add_container')[0];
this.indicator = this.div.select('.indicator')[0].id;
this.add_trigger = this.div.select('.add_trigger')[0];
// add the handlers for the triggers
this.add_trigger.onclick = function() {
this.toggle_add();
// prevent submission of the link
return false;
}.bindAsEventListener(this);
// find our triggers that might change (edit and delete)
this.refresh();
// the fact that we've created this CRUD
Smolder.CRUD.remember(this);
},
refresh: function() {
this.edit_triggers = $(this.list_container).select('.edit_trigger');
this.delete_triggers = $(this.list_container).select('.delete_trigger');
this.edit_triggers.each(
function(trigger) {
trigger.onclick = function() {
this.show_edit(trigger);
// prevent submission of the link
return false;
}.bindAsEventListener(this);
}.bindAsEventListener(this)
);
this.delete_triggers.each(
function(trigger) {
trigger.onclick = function() {
this.show_delete(trigger);
// prevent submission of the link
return false;
}.bindAsEventListener(this);
}.bindAsEventListener(this)
);
},
toggle_add: function() {
if( this.add_shown ) {
this.hide_add();
} else {
this.show_add();
}
},
hide_add: function() {
new Effect.SlideUp(this.add_container);
this.add_shown = false;
},
show_add: function() {
Smolder.Ajax.update({
url : this.add_trigger.href,
target : this.add_container.id,
indicator : this.indicator,
onComplete : function(args) {
if( !this.add_shown ) {
new Effect.SlideDown(this.add_container)
}
this.add_shown = true;
// make sure we submit the add changes correctly
this._handle_form_submit('add_form');
}.bindAsEventListener(this)
});
},
_handle_form_submit: function(name) {
var form = $(this.add_container).select('.' + name)[0];
if( form ) {
form.onsubmit = function() {
this.submit_change(form);
return false;
}.bindAsEventListener(this);
}
},
show_edit: function(trigger) {
var matches = trigger.className.match(/(^|\s)for_item_(\d+)($|\s)/);
var itemId = matches[2];
if( itemId == null )
return;
Smolder.Ajax.update({
url : trigger.href,
target : this.add_container.id,
indicator : this.indicator,
onComplete : function() {
if( !this.add_shown ) {
Effect.SlideDown(this.add_container);
}
this.add_shown = true;
// setup the 'cancel' button
var cancel = $(this.add_container).select('.edit_cancel')[0];
cancel.onclick = function() { this.hide_add(); }.bindAsEventListener(this);
// make sure we submit the add changes correctly
this._handle_form_submit('edit_form');
}.bindAsEventListener(this)
});
},
show_delete: function(trigger) {
var matches = trigger.className.match(/(^|\s)for_item_(\d+)($|\s)/);
var itemId = matches[2];
// set the onsubmit handler for the form in this popup
var form = $('delete_form_' + itemId);
form.onsubmit = function() {
Smolder.Ajax.update({
url : form.action,
target : this.list_container_id,
indicator : 'delete_indicator_' + itemId
});
return false;
}.bindAsEventListener(this);
// show the popup form
var popup = 'delete_' + itemId;
Smolder.PopupForm.toggle(popup);
},
submit_change: function(form) {
// find the add_inidicator
var indicator = $(this.add_container).select('.add_indicator')[0].id;
Smolder.Ajax.form_update({
form : form,
target : this.add_container.id,
indicator : indicator,
onComplete : function(args) {
if(! args.json ) args.json = {};
// if the submission changed the list
if( args.json.list_changed ) {
this.add_shown = false;
Element.hide(this.add_container);
this.update_list();
}
// since the add/edit forms may exist still
// (ie, their submission was incorrect so it reappears with error msgs)
// we need to make sure they're submitted correctly the 2nd time
this._handle_form_submit('add_form');
this._handle_form_submit('edit_form');
}.bindAsEventListener(this)
});
},
update_list: function () {
Smolder.Ajax.update({
url : this.list_url,
indicator : this.indicator,
target : this.list_container_id
});
}
});
///////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////
// These are our JS behaviors that are applied externally to HTML elements just like CSS
///////////////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////////////
var myrules = {
'a.calendar_trigger' : function(element) {
// now find the id's of the calendar div and input based on the id of the trigger
// triggers are named $inputId_calendar_trigger, and calendar divs are named
// $inputId_calendar
var inputId = element.id.replace(/_calendar_trigger/, '');
var input = $(inputId);
Calendar.setup({
inputField : input.name,
ifFormat : "%m/%d/%Y",
button : element.id,
weekNumbers : false,
showOthers : true,
align : 'CL',
cache : true
});
},
'a.smoke_reports_nav' : function(element) {
// set the limit and then do an ajax request for the form
element.onclick = function() {
var offset = 0;
var matches = element.className.match(/(^|\s)offset_([^\s]+)($|\s)/);
if( matches != null )
offset = matches[2];
$('smoke_reports').elements['offset'].value = offset;
Smolder.Ajax.form_update({
form : $('smoke_reports'),
indicator : 'paging_indicator'
});
return false;
};
},
'form.change_smoke_graph' : function(element) {
element.onsubmit = function() {
Smolder.changeSmokeGraph(element);
return false;
}
},
'a.popup_form' : function(element) {
var popupId = element.id.replace(/_trigger$/, '');
element.onclick = function() {
Smolder.PopupForm.toggle(popupId);
return false;
};
},
'input.cancel_popup' : function(element) {
element.onclick = function() {
Smolder.PopupForm.hide();
};
},
'form.toggle_smoke_valid' : function(element) {
element.onsubmit = function() {
Smolder.toggleSmokeValid(element);
return false;
}
},
'#platform_auto_complete' : function(element) {
new Ajax.Autocompleter(
'platform',
'platform_auto_complete',
'/app/projects/platform_options'
);
},
'#architecture_auto_complete' : function(element) {
new Ajax.Autocompleter(
'architecture',
'architecture_auto_complete',
'/app/projects/architecture_options'
);
},
'input.auto_submit' : function(element) {
element.onchange = function(event) {
element.form.onsubmit();
return false;
}
},
'select.auto_submit' : function(element) {
element.onchange = function(event) {
element.form.onsubmit();
return false;
}
},
// for ajaxable forms and anchors the taget div for updating is determined
// as follows:
// Specify a the target div's id by adding a 'for_$id' class to the elements
// class list:
// <a class="ajaxable for_some_div" ... >
// This will make the target a div named "some_div"
// If no target is specified, then it will default to "content"
'a.ajaxable' : function(element) {
Smolder.makeLinkAjaxable(element);
},
'form.ajaxable' : function(element) {
Smolder.makeFormAjaxable(element);
},
'div.crud' : function(element) {
var matches = element.className.match(/(^|\s)for_(\w+)($|\s)/);
var url = "/app/" + matches[2];
new Smolder.CRUD(element.id, url);
},
'form.resetpw_form': function(element) {
Smolder.makeFormAjaxable(element);
// extend the onsubmit handler to turn off the popup
var popupId = element.id.replace(/_form$/, '');
var oldOnSubmit = element.onsubmit;
element.onsubmit = function() {
oldOnSubmit();
Smolder.PopupForm.toggle(popupId);
return false;
};
},
// on the preferences form, the project selector should update
// the preferences form with the selected projects preferences
// from the server
'#project_preference_selector': function(element) {
var form = element.form;
// if we start off looking at the default options
if( element.value == form.elements['default_pref_id'].value ) {
Element.show('dev_prefs_sync_button');
// if we want to show some info - Element.show('default_pref_info');
}
element.onchange = function() {
// if we are the default preference, then show the stuff
// that needs to be shown
if( element.value == form.elements['default_pref_id'].value ) {
Element.show('dev_prefs_sync_button');
// if we want to show some info - Element.show('default_pref_info');
} else {
Element.hide('dev_prefs_sync_button');
// if we want to show some info - Element.hide('default_pref_info');
}
// get the preference details from the server
Smolder.show_indicator('pref_indicator');
new Ajax.Request(
'/app/developer_prefs/get_pref_details',
{
parameters: Form.serialize(form),
asynchronous: true,
onComplete: function(response, json) {
// for every value in our JSON response, set that
// same element in the form
$A(['email_type', 'email_freq', 'email_limit']).each(
function(name) {
var elm = form.elements[name];
elm.value = json[name];
Smolder.flash(elm);
}
);
Smolder.hide_indicator('pref_indicator');
},
onFailure: function() { Smolder.show_error() }
}
);
};
},
// submit the preference form to sync the other preferences
'#dev_prefs_sync_button': function(element) {
element.onclick = function() {
var form = $('update_pref');
form.elements['sync'].value = 1;
Smolder.Ajax.form_update({
form : form,
target : 'developer_prefs'
});
};
},
// hightlight selected text, textarea and select inputs
'input.hl': function(element) {
element.onfocus = function() { Smolder.highlight(element); };
element.onblur = function() { Smolder.unHighlight(element); };
},
'textarea.hl': function(element) {
element.onfocus = function() { Smolder.highlight(element); };
element.onblur = function() { Smolder.unHighlight(element); };
},
'select.hl': function(element) {
element.onfocus = function() { Smolder.highlight(element); };
element.onblur = function() { Smolder.unHighlight(element); };
},
// setup tooltips
'.tooltip_trigger': function(element) {
var matches = element.className.match(/(^|\s)for_([^\s]+)($|\s)/);
if( matches ) {
var target = matches[2];
if( target ) {
Smolder.setup_tooltip(element, $(target));
Smolder.setup_tooltip($(target), $(target));
}
}
},
// TAP Matrix triggers for more test file details
'.tap a.details_trigger' : function(el) {
// get the id of the target div
var matches = el.id.match(/^for_(.*)$/);
var target = matches[1];
// get the id of the indicator image
matches = el.className.match(/(^|\s)show_(\S*)($|\s)/);
var indicator = matches[2];
el.onclick = function() {
if( Element.visible(target) ) {
$(target + '_tap_stream').hide();
Effect.BlindUp(target, { duration: .1 });
} else {
$(indicator).style.visibility = 'visible';
Smolder.Ajax.update({
url : el.href,
target : target,
indicator : 'none',
onComplete : function() {
window.setTimeout(function() { $(indicator).style.visibility = 'hidden'}, 200);
Effect.BlindDown(
target,
// reapply any dynamic bits
{
afterFinish : function() {
$(target + '_tap_stream').show();
$$('.tooltip_trigger').each(function(el) {
var diag = $(el).select('.tooltip')[0];
Smolder.setup_tooltip(el, diag);
});
},
duration : .1
}
);
}
});
}
return false;
};
},
'.tap div.diag': function(el) {
Smolder.setup_tooltip(el, el);
},
'#toggle_tests_trigger' : function(el) {
el.onchange = function() {
var count = 0;
$$('.tap tbody').each(function(row) {
if( el.checked ) {
if( row.hasClassName('passed') ) {
row.hide();
} else {
if( count % 2 == 1 ) {
if(row.hasClassName('even')) {
row.removeClassName('even');
row.addClassName('odd');
}
} else {
if(row.hasClassName('odd')) {
row.removeClassName('odd');
row.addClassName('even');
}
}
count++;
}
} else {
if( row.hasClassName('passed') ) {
row.show();
}
if( count % 2 == 1 ) {
if(row.hasClassName('even')) {
row.removeClassName('even');
row.addClassName('odd');
}
} else {
if(row.hasClassName('odd')) {
row.removeClassName('odd');
row.addClassName('even');
}
}
count++;
}
});
};
},
'.tap a.show_all' : function(el) {
Event.observe(el, 'click', function() {
// an anonymous function that we use as a callback after each
// panel is opened so that it opens the next one
var show_details = function(index) {
var el = $('testfile_details_' + index);
// apply the behaviours if we're the last one
if( ! el ) {
Behaviour.apply();
return;
}
// we only need to fetch it if we're not already visible
if( Element.visible(el) ) {
show_details(++index);
} else {
matches = el.id.match(/^testfile_details_(\d+)$/);
var index = matches[1];
var indicator = $('indicator_' + index);
var div = $('testfile_details_' + index);
indicator.style.visibility = 'visible';
Smolder.Ajax.update({
url : $('for_testfile_details_' + index).href,
target : div,
indicator : 'none',
onComplete : function() {
Element.show(div);
indicator.style.visibility = 'hidden';
show_details(++index);
}
});
}
};
show_details(0);
});
}
};
Behaviour.register(myrules);
|
jlecour/geokit-rails | 2 | lib/geokit-rails/ip_geocode_lookup.rb | require 'yaml'
module Geokit
# Contains a class method geocode_ip_address which can be used to enable automatic geocoding
# for request IP addresses. The geocoded information is stored in a cookie and in the
# session to minimize web service calls. The point of the helper is to enable location-based
# websites to have a best-guess for new visitors.
module IpGeocodeLookup
# Mix below class methods into ActionController.
def self.included(base) # :nodoc:
base.extend ClassMethods
end
# Class method to mix into active record.
module ClassMethods # :nodoc:
def geocode_ip_address(filter_options = {})
before_filter :store_ip_location, filter_options
end
end
private
# Places the IP address' geocode location into the session if it
# can be found. Otherwise, looks for a geo location cookie and
# uses that value. The last resort is to call the web service to
# get the value.
def store_ip_location
session[:geo_location] ||= retrieve_location_from_cookie_or_service
cookies[:geo_location] = { :value => session[:geo_location].to_yaml, :expires => 30.days.from_now } if session[:geo_location]
end
# Uses the stored location value from the cookie if it exists. If
# no cookie exists, calls out to the web service to get the location.
def retrieve_location_from_cookie_or_service
return YAML.load(cookies[:geo_location]) if cookies[:geo_location]
location = Geocoders::MultiGeocoder.geocode(get_ip_address)
return location.success ? location : nil
end
# Returns the real ip address, though this could be the localhost ip
# address. No special handling here anymore.
def get_ip_address
request.remote_ip
end
end
end
| require 'yaml'
require 'active_support/concern'
module Geokit
# Contains a class method geocode_ip_address which can be used to enable automatic geocoding
# for request IP addresses. The geocoded information is stored in a cookie and in the
# session to minimize web service calls. The point of the helper is to enable location-based
# websites to have a best-guess for new visitors.
module IpGeocodeLookup
extend ActiveSupport::Concern
# Class method to mix into active record.
module ClassMethods # :nodoc:
def geocode_ip_address(filter_options = {})
before_filter :store_ip_location, filter_options
end
end
private
# Places the IP address' geocode location into the session if it
# can be found. Otherwise, looks for a geo location cookie and
# uses that value. The last resort is to call the web service to
# get the value.
def store_ip_location
session[:geo_location] ||= retrieve_location_from_cookie_or_service
cookies[:geo_location] = { :value => session[:geo_location].to_yaml, :expires => 30.days.from_now } if session[:geo_location]
end
# Uses the stored location value from the cookie if it exists. If
# no cookie exists, calls out to the web service to get the location.
def retrieve_location_from_cookie_or_service
return YAML.load(cookies[:geo_location]) if cookies[:geo_location]
location = Geocoders::MultiGeocoder.geocode(get_ip_address)
return location.success ? location : nil
end
# Returns the real ip address, though this could be the localhost ip
# address. No special handling here anymore.
def get_ip_address
request.remote_ip
end
end
end
|
kneath/hemingway | 2 | functions.php | <?php
$themecolors = array(
'bg' => '000000',
'text' => 'bfbfbf',
'link' => 'ffffff',
'border' => '000000'
);
// this varies but the single page content width seems to be 607px max
$content_width = 600;
class Hemingway
{
var $raw_blocks;
var $available_blocks;
var $style;
var $version;
function add_available_block($block_name, $block_ref)
{
$blocks = $this->available_blocks;
if (!$blocks[$block_ref]){
$blocks[$block_ref] = $block_name;
update_option('hem_available_blocks', $blocks);
wp_cache_flush();
}
}
function get_available_blocks()
// This function returns an array of available blocks
// in the format of $arr[block_ref] = block_name
{
$this->available_blocks = get_option('hem_available_blocks');
return $this->available_blocks;
}
function get_block_contents($block_place)
// Returns an array of block_refs in specififed block
{
if (!$this->raw_blocks){
$this->raw_blocks = get_option('hem_blocks');
}
return $this->raw_blocks[$block_place];
}
function add_block_to_place($block_place, $block_ref)
{
$block_contents = $this->get_block_contents($block_place);
if (in_array($block_ref, $block_contents))
return true;
$block_contents[] = $block_ref;
$this->raw_blocks[$block_place] = $block_contents;
update_option('hem_blocks', $this->raw_blocks);
wp_cache_flush(); // I was having caching issues
return true;
}
function remove_block_in_place($block_place, $block_ref)
{
$block_contents = $this->get_block_contents($block_place);
if (!in_array($block_ref, $block_contents))
return true;
$key = array_search($block_ref, $block_contents);
unset($block_contents[$key]);
$this->raw_blocks[$block_place] = $block_contents;
update_option('hem_blocks', $this->raw_blocks);
wp_cache_flush(); // I was having caching issues
return true;
}
// Templating functions
function get_block_output($block_place)
{
$blocks = $this->get_block_contents($block_place);
foreach($blocks as $key => $block ){
include (TEMPLATEPATH . '/blocks/' . $block . '.php');
}
}
function get_style(){
$this->style = get_option('hem_style');
}
}
$hemingway = new Hemingway();
$hemingway->get_available_blocks();
$hemingway->get_style();
$hemingway->version = "0.13";
// Options
$default_blocks = Array(
'recent_entries' => 'Recent Entries',
'about_page' => 'About Page',
'category_listing' => 'Category Listing',
'blogroll' => 'Blogroll',
'pages' => 'Pages',
'monthly_archives' => 'Monthly Archives'
);
$default_block_locations = Array(
'block_1' => Array('about_page'),
'block_2' => Array('recent_entries'),
'block_3' => Array('category_listing'),
'block_4' => Array(),
'block_5' => Array(),
'block_6' => Array()
);
if (!get_option('hem_version') || get_option('hem_version') < $hemingway->version){
// Hemingway isn't installed, so we'll need to add options
if (!get_option('hem_version') )
add_option('hem_version', $hemingway->version, 'Hemingway Version installed');
else
update_option('hem_version', $hemingway->version);
if (!get_option('hem_available_blocks') )
add_option('hem_available_blocks', $default_blocks, 'A list of available blocks for Hemingway');
if (!get_option('hem_blocks') )
add_option('hem_blocks', $default_block_locations, 'An array of blocks and their contents');
if (!get_option('hem_style') )
add_option('hem_style', '', 'Location of custom style sheet');
}
// Ajax Stuff
if ($_GET['hem_action'] == 'add_block'){
$block_ref = $_GET['block_ref'];
$block_place = $_GET['block_place'];
$block_name = $hemingway->available_blocks[$block_ref];
$hemingway->add_block_to_place($block_place, $block_ref);
ob_end_clean(); // Kill preceding output
$output = '<ul>';
foreach($hemingway->get_block_contents($block_place) as $key => $block_ref){
$block_name = $hemingway->available_blocks[$block_ref];
$output .= '<li>' . $block_name . ' (<a href="#" onclick="remove_block(\'' . $block_place . '\', \'' . $block_ref . '\');">remove</a>)</li>';
}
$output .= '</ul>';
echo $output;
exit(); // Kill any more output
}
if ($_GET['hem_action'] == 'remove_block'){
$block_ref = $_GET['block_ref'];
$block_place = $_GET['block_place'];
$hemingway->remove_block_in_place($block_place, $block_ref);
ob_end_clean(); // Kill preceding output
$output = '<ul>';
foreach($hemingway->get_block_contents($block_place) as $key => $block_ref){
$block_name = $hemingway->available_blocks[$block_ref];
$output .= '<li>' . $block_name . ' (<a href="#" onclick="remove_block(\'' . $block_place . '\', \'' . $block_ref . '\');">remove</a>)</li>';
}
$output .= '</ul>';
echo $output;
exit(); // Kill any more output
}
if ($_POST['custom_styles']){
update_option('hem_style', $_POST['custom_styles']);
wp_cache_flush();
$message = 'Styles updated!';
}
if ($_POST['block_ref']){
$hemingway->add_available_block($_POST['display_name'], $_POST['block_ref']);
$hemingway->get_available_blocks();
$message = 'Block added!';
}
// Stuff
add_action ('admin_menu', 'hemingway_menu');
$hem_loc = '../themes/' . basename(dirname($file));
function hemingway_scripts() {
$dir = get_bloginfo('template_directory');
wp_enqueue_script('prototype');
wp_enqueue_script('dragdrop', $dir . '/admin/js/dragdrop.js', false, 1);
wp_enqueue_script('effects', $dir . '/admin/js/effects.js', false, 1);
}
function hemingway_menu() {
$page = add_submenu_page('themes.php', 'Hemingway Options', 'Hemingway Options', 5, $hem_loc . 'functions.php', 'menu');
add_action('load-' . $page, 'hemingway_scripts');
}
function menu() {
global $hem_loc, $hemingway, $message;
?>
<!--
Okay, so I don't honestly know how legit this is, but I want a more intuitive interface
so I'm going to import scriptaculous. There's a good chance this is going to mess stuff up
for some people :)
-->
<script type="text/javascript">
function remove_block(block_place, block_ref){
url = 'themes.php?page=functions.php&hem_action=remove_block&block_place=' + block_place + '&block_ref=' + block_ref;
new Ajax.Updater(block_place, url,
{
evalScripts:true, asynchronous:true
}
)
}
</script>
<style>
.block{
width:200px;
height:200px;
border:1px solid #CCC;
float:left;
margin:20px 1em 20px 0;
padding:10px;
display:inline;
}
.block ul{
padding:0;
margin:0;
}
.block ul li{
margin:0 0 5px 0;
list-style-type:none;
}
.block-active{
border:1px solid #333;
background:#F2F8FF;
}
#addables li{
list-style-type:none;
margin:1em 1em 1em 0;
background:#EAEAEA;
border:1px solid #DDD;
padding:3px;
width:215px;
float:left;
cursor:move;
}
ul#addables{
margin:0;
padding:0;
width:720px;
position:relative;
}
</style>
<? if($message) : ?>
<div id="message" class="updated fade"><p><?=$message?></p></div>
<? endif; ?>
<div class="wrap" style="position:relative;">
<h2><?php _e('Hemingway Options'); ?></h2>
<h3>Color Options</h3>
<p>Choose a primary color for your site:</p>
<form name="dofollow" action="" method="post">
<input type="hidden" name="page_options" value="'dofollow_timeout'" />
<p><label><input name="custom_styles" type="radio" value="none" <?php if ($hemingway->style == 'none') echo 'checked="checked"'; ?> />
Black</label></p>
<p><label><input name="custom_styles" type="radio" value="white.css" <?php if ($hemingway->style == 'white.css') echo 'checked="checked"'; ?> /> White</label></p>
<input type="submit" value="Update Color »" />
</form>
<h3>Hemingway's Bottombar™</h3>
<p>Drag and drop the different blocks into their place below. After you drag the block to the area, it will update with the new contents automatically.</p>
<ul id="addables">
<? foreach($hemingway->available_blocks as $ref => $name) : ?>
<li id="<?= $ref ?>" class="blocks"><?= $name ?></li>
<script type="text/javascript">new Draggable('<?= $ref ?>', {revert:true})</script>
<? endforeach; ?>
</ul>
<div class="clear"></div>
<div class="block" id="block_1">
<ul>
<?
foreach($hemingway->get_block_contents('block_1') as $key => $block_ref) :
$block_name = $hemingway->available_blocks[$block_ref];
?>
<li><?= $block_name ?> (<a href="#" onclick="remove_block('block_1', '<?=$block_ref?>');">remove</a>)</li>
<? endforeach; ?>
</ul>
</div>
<script type="text/javascript">
Droppables.add(
'block_1', {
accept:'blocks',
onDrop:function(element){
new Ajax.Updater('block_1', 'themes.php?page=functions.php&hem_action=add_block&block_place=block_1&block_ref=' + element.id,
{
evalScripts:true, asynchronous:true
}
)
},
hoverclass:'block-active'
}
)
</script>
<div class="block" id="block_2">
<ul>
<?
foreach($hemingway->get_block_contents('block_2') as $key => $block_ref) :
$block_name = $hemingway->available_blocks[$block_ref];
?>
<li><?= $block_name ?> (<a href="#" onclick="remove_block('block_2', '<?=$block_ref?>');">remove</a>)</li>
<? endforeach; ?>
</ul>
</div>
<script type="text/javascript">
Droppables.add(
'block_2', {
accept:'blocks',
onDrop:function(element){
new Ajax.Updater('block_2', 'themes.php?page=functions.php&hem_action=add_block&block_place=block_2&block_ref=' + element.id,
{
evalScripts:true, asynchronous:true
}
)
},
hoverclass:'block-active'
}
)
</script>
<div class="block" id="block_3">
<ul>
<?
foreach($hemingway->get_block_contents('block_3') as $key => $block_ref) :
$block_name = $hemingway->available_blocks[$block_ref];
?>
<li><?= $block_name ?> (<a href="#" onclick="remove_block('block_3', '<?=$block_ref?>');">remove</a>)</li>
<? endforeach; ?>
</ul>
</div>
<script type="text/javascript">
Droppables.add(
'block_3', {
accept:'blocks',
onDrop:function(element){
new Ajax.Updater('block_3', 'themes.php?page=functions.php&hem_action=add_block&block_place=block_3&block_ref=' + element.id,
{
evalScripts:true, asynchronous:true
}
)
},
hoverclass:'block-active'
}
)
</script>
<!-- Maybe later...
<div class="clear"></div>
<div class="block" id="block_4">
Block 4
<ul>
<?
foreach($hemingway->get_block_contents('block_4') as $key => $block_ref) :
$block_name = $hemingway->available_blocks[$block_ref];
?>
<li><?= $block_name ?> (<a href="#" onclick="remove_block('block_4', '<?=$block_ref?>');">remove</a>)</li>
<? endforeach; ?>
</ul>
</div>
<script type="text/javascript">
Droppables.add(
'block_4', {
accept:'blocks',
onDrop:function(element){
new Ajax.Updater('block_4', 'themes.php?page=functions.php&hem_action=add_block&block_place=block_4&block_ref=' + element.id,
{
evalScripts:true, asynchronous:true
}
)
},
hoverclass:'block-active'
}
)
</script>
<div class="block" id="block_5">
Block 5
<ul>
<?
foreach($hemingway->get_block_contents('block_5') as $key => $block_ref) :
$block_name = $hemingway->available_blocks[$block_ref];
?>
<li><?= $block_name ?> (<a href="#" onclick="remove_block('block_5', '<?=$block_ref?>');">remove</a>)</li>
<? endforeach; ?>
</ul>
</div>
<script type="text/javascript">
Droppables.add(
'block_5', {
accept:'blocks',
onDrop:function(element){
new Ajax.Updater('block_5', 'themes.php?page=functions.php&hem_action=add_block&block_place=block_5&block_ref=' + element.id,
{
evalScripts:true, asynchronous:true
}
)
},
hoverclass:'block-active'
}
)
</script>
<div class="block" id="block_6">
Block 6
<ul>
<?
foreach($hemingway->get_block_contents('block_6') as $key => $block_ref) :
$block_name = $hemingway->available_blocks[$block_ref];
?>
<li><?= $block_name ?> (<a href="#" onclick="remove_block('block_6', '<?=$block_ref?>');">remove</a>)</li>
<? endforeach; ?>
</ul>
</div>
<script type="text/javascript">
Droppables.add(
'block_6', {
accept:'blocks',
onDrop:function(element){
new Ajax.Updater('block_6', 'themes.php?page=functions.php&hem_action=add_block&block_place=block_6&block_ref=' + element.id,
{
evalScripts:true, asynchronous:true
}
)
},
hoverclass:'block-active'
}
)
</script>
-->
<div class="clear"></div>
<?php
$blocks_dir = @ dir(ABSPATH . '/wp-content/themes/' . get_template() . '/blocks');
if ($blocks_dir) {
while(($file = $blocks_dir->read()) !== false) {
if (!preg_match('|^\.+$|', $file) && preg_match('|\.php$|', $file))
$blocks_files[] = $file;
}
}
if ($blocks_dir || $blocks_files) {
foreach($blocks_files as $blocks_file) {
$block_ref = preg_replace('/\.php/', '', $blocks_file);
if (!array_key_exists($block_ref, $hemingway->available_blocks)){
?>
<h3>You have uninstalled blocks!</h3>
<p>Give the block <strong><?=$block_ref ?></strong> a display name (such as "About Page")</p>
<form action="" name="dofollow" method="post">
<input type="hidden" name="block_ref" value="<?=$block_ref?>" />
<?=$block_ref ?> : <input type="text" name="display_name" />
<input type="submit" value="Save" />
</form>
<?
}
}
}
?>
</div>
<?php
}
?>
| <?php
$themecolors = array(
'bg' => '000000',
'text' => 'bfbfbf',
'link' => 'ffffff',
'border' => '000000'
);
// this varies but the single page content width seems to be 607px max
$content_width = 600;
class Hemingway
{
var $raw_blocks;
var $available_blocks;
var $style;
var $version;
function add_available_block($block_name, $block_ref)
{
$blocks = $this->available_blocks;
if (!$blocks[$block_ref]){
$blocks[$block_ref] = $block_name;
update_option('hem_available_blocks', $blocks);
wp_cache_flush();
}
}
function get_available_blocks()
// This function returns an array of available blocks
// in the format of $arr[block_ref] = block_name
{
$this->available_blocks = get_option('hem_available_blocks');
return $this->available_blocks;
}
function get_block_contents($block_place)
// Returns an array of block_refs in specififed block
{
if (!$this->raw_blocks){
$this->raw_blocks = get_option('hem_blocks');
}
return $this->raw_blocks[$block_place];
}
function add_block_to_place($block_place, $block_ref)
{
$block_contents = $this->get_block_contents($block_place);
if (in_array($block_ref, $block_contents))
return true;
$block_contents[] = $block_ref;
$this->raw_blocks[$block_place] = $block_contents;
update_option('hem_blocks', $this->raw_blocks);
wp_cache_flush(); // I was having caching issues
return true;
}
function remove_block_in_place($block_place, $block_ref)
{
$block_contents = $this->get_block_contents($block_place);
if (!in_array($block_ref, $block_contents))
return true;
$key = array_search($block_ref, $block_contents);
unset($block_contents[$key]);
$this->raw_blocks[$block_place] = $block_contents;
update_option('hem_blocks', $this->raw_blocks);
wp_cache_flush(); // I was having caching issues
return true;
}
// Templating functions
function get_block_output($block_place)
{
$blocks = $this->get_block_contents($block_place);
foreach($blocks as $key => $block ){
include (TEMPLATEPATH . '/blocks/' . $block . '.php');
}
}
function get_style(){
$this->style = get_option('hem_style');
}
}
$hemingway = new Hemingway();
$hemingway->get_available_blocks();
$hemingway->get_style();
$hemingway->version = "0.13";
// Options
$default_blocks = Array(
'recent_entries' => 'Recent Entries',
'about_page' => 'About Page',
'category_listing' => 'Category Listing',
'blogroll' => 'Blogroll',
'pages' => 'Pages',
'monthly_archives' => 'Monthly Archives'
);
$default_block_locations = Array(
'block_1' => Array('about_page'),
'block_2' => Array('recent_entries'),
'block_3' => Array('category_listing'),
'block_4' => Array(),
'block_5' => Array(),
'block_6' => Array()
);
if (!get_option('hem_version') || get_option('hem_version') < $hemingway->version){
// Hemingway isn't installed, so we'll need to add options
if (!get_option('hem_version') )
add_option('hem_version', $hemingway->version, 'Hemingway Version installed');
else
update_option('hem_version', $hemingway->version);
if (!get_option('hem_available_blocks') )
add_option('hem_available_blocks', $default_blocks, 'A list of available blocks for Hemingway');
if (!get_option('hem_blocks') )
add_option('hem_blocks', $default_block_locations, 'An array of blocks and their contents');
if (!get_option('hem_style') )
add_option('hem_style', '', 'Location of custom style sheet');
}
// Ajax Stuff
if ($_GET['hem_action'] == 'add_block'){
$block_ref = $_GET['block_ref'];
$block_place = $_GET['block_place'];
$block_name = $hemingway->available_blocks[$block_ref];
$hemingway->add_block_to_place($block_place, $block_ref);
ob_end_clean(); // Kill preceding output
$output = '<ul>';
foreach($hemingway->get_block_contents($block_place) as $key => $block_ref){
$block_name = $hemingway->available_blocks[$block_ref];
$output .= '<li>' . $block_name . ' (<a href="#" onclick="remove_block(\'' . $block_place . '\', \'' . $block_ref . '\');">remove</a>)</li>';
}
$output .= '</ul>';
echo $output;
exit(); // Kill any more output
}
if ($_GET['hem_action'] == 'remove_block'){
$block_ref = $_GET['block_ref'];
$block_place = $_GET['block_place'];
$hemingway->remove_block_in_place($block_place, $block_ref);
ob_end_clean(); // Kill preceding output
$output = '<ul>';
foreach($hemingway->get_block_contents($block_place) as $key => $block_ref){
$block_name = $hemingway->available_blocks[$block_ref];
$output .= '<li>' . $block_name . ' (<a href="#" onclick="remove_block(\'' . $block_place . '\', \'' . $block_ref . '\');">remove</a>)</li>';
}
$output .= '</ul>';
echo $output;
exit(); // Kill any more output
}
if ($_POST['custom_styles']){
update_option('hem_style', $_POST['custom_styles']);
wp_cache_flush();
$message = 'Styles updated!';
}
if ($_POST['block_ref']){
$hemingway->add_available_block($_POST['display_name'], $_POST['block_ref']);
$hemingway->get_available_blocks();
$message = 'Block added!';
}
// Stuff
add_action ('admin_menu', 'hemingway_menu');
$hem_loc = '../themes/' . basename(dirname($file));
function hemingway_scripts() {
$dir = get_bloginfo('template_directory');
wp_enqueue_script('prototype');
wp_enqueue_script('dragdrop', $dir . '/admin/js/dragdrop.js', false, 1);
wp_enqueue_script('effects', $dir . '/admin/js/effects.js', false, 1);
}
function hemingway_menu() {
$page = add_submenu_page('themes.php', 'Hemingway Options', 'Hemingway Options', 5, $hem_loc . 'functions.php', 'menu');
add_action('load-' . $page, 'hemingway_scripts');
}
function menu() {
global $hem_loc, $hemingway, $message;
?>
<!--
Okay, so I don't honestly know how legit this is, but I want a more intuitive interface
so I'm going to import scriptaculous. There's a good chance this is going to mess stuff up
for some people :)
-->
<script type="text/javascript">
function remove_block(block_place, block_ref){
url = 'themes.php?page=functions.php&hem_action=remove_block&block_place=' + block_place + '&block_ref=' + block_ref;
new Ajax.Updater(block_place, url,
{
evalScripts:true, asynchronous:true
}
)
}
</script>
<style>
.block{
width:200px;
height:200px;
border:1px solid #CCC;
float:left;
margin:20px 1em 20px 0;
padding:10px;
display:inline;
}
.block ul{
padding:0;
margin:0;
}
.block ul li{
margin:0 0 5px 0;
list-style-type:none;
}
.block-active{
border:1px solid #333;
background:#F2F8FF;
}
#addables li{
list-style-type:none;
margin:1em 1em 1em 0;
background:#EAEAEA;
border:1px solid #DDD;
padding:3px;
width:215px;
float:left;
cursor:move;
}
ul#addables{
margin:0;
padding:0;
width:720px;
position:relative;
}
</style>
<?php if($message) : ?>
<div id="message" class="updated fade"><p><?php echo $message ?></p></div>
<?php endif; ?>
<div class="wrap" style="position:relative;">
<h2><?php _e('Hemingway Options'); ?></h2>
<h3>Color Options</h3>
<p>Choose a primary color for your site:</p>
<form name="dofollow" action="" method="post">
<input type="hidden" name="page_options" value="'dofollow_timeout'" />
<p><label><input name="custom_styles" type="radio" value="none" <?php if ($hemingway->style == 'none') echo 'checked="checked"'; ?> />
Black</label></p>
<p><label><input name="custom_styles" type="radio" value="white.css" <?php if ($hemingway->style == 'white.css') echo 'checked="checked"'; ?> /> White</label></p>
<input type="submit" value="Update Color »" />
</form>
<h3>Hemingway's Bottombar™</h3>
<p>Drag and drop the different blocks into their place below. After you drag the block to the area, it will update with the new contents automatically.</p>
<ul id="addables">
<?php foreach($hemingway->available_blocks as $ref => $name) : ?>
<li id="<?php echo $ref ?>" class="blocks"><?php echo $name ?></li>
<script type="text/javascript">new Draggable('<?php echo $ref ?>', {revert:true})</script>
<?php endforeach; ?>
</ul>
<div class="clear"></div>
<div class="block" id="block_1">
<ul>
<?php
foreach($hemingway->get_block_contents('block_1') as $key => $block_ref) :
$block_name = $hemingway->available_blocks[$block_ref];
?>
<li><?php echo $block_name ?> (<a href="#" onclick="remove_block('block_1', '<?php echo $block_ref ?>');">remove</a>)</li>
<?php endforeach; ?>
</ul>
</div>
<script type="text/javascript">
Droppables.add(
'block_1', {
accept:'blocks',
onDrop:function(element){
new Ajax.Updater('block_1', 'themes.php?page=functions.php&hem_action=add_block&block_place=block_1&block_ref=' + element.id,
{
evalScripts:true, asynchronous:true
}
)
},
hoverclass:'block-active'
}
)
</script>
<div class="block" id="block_2">
<ul>
<?php
foreach($hemingway->get_block_contents('block_2') as $key => $block_ref) :
$block_name = $hemingway->available_blocks[$block_ref];
?>
<li><?php echo $block_name ?> (<a href="#" onclick="remove_block('block_2', '<?php echo $block_ref ?>');">remove</a>)</li>
<?php endforeach; ?>
</ul>
</div>
<script type="text/javascript">
Droppables.add(
'block_2', {
accept:'blocks',
onDrop:function(element){
new Ajax.Updater('block_2', 'themes.php?page=functions.php&hem_action=add_block&block_place=block_2&block_ref=' + element.id,
{
evalScripts:true, asynchronous:true
}
)
},
hoverclass:'block-active'
}
)
</script>
<div class="block" id="block_3">
<ul>
<?php
foreach($hemingway->get_block_contents('block_3') as $key => $block_ref) :
$block_name = $hemingway->available_blocks[$block_ref];
?>
<li><?php echo $block_name ?> (<a href="#" onclick="remove_block('block_3', '<?php echo $block_ref ?>');">remove</a>)</li>
<?php endforeach; ?>
</ul>
</div>
<script type="text/javascript">
Droppables.add(
'block_3', {
accept:'blocks',
onDrop:function(element){
new Ajax.Updater('block_3', 'themes.php?page=functions.php&hem_action=add_block&block_place=block_3&block_ref=' + element.id,
{
evalScripts:true, asynchronous:true
}
)
},
hoverclass:'block-active'
}
)
</script>
<!-- Maybe later...
<div class="clear"></div>
<div class="block" id="block_4">
Block 4
<ul>
<?php
foreach($hemingway->get_block_contents('block_4') as $key => $block_ref) :
$block_name = $hemingway->available_blocks[$block_ref];
?>
<li><?php echo $block_name ?> (<a href="#" onclick="remove_block('block_4', '<?php echo $block_ref ?>');">remove</a>)</li>
<?php endforeach; ?>
</ul>
</div>
<script type="text/javascript">
Droppables.add(
'block_4', {
accept:'blocks',
onDrop:function(element){
new Ajax.Updater('block_4', 'themes.php?page=functions.php&hem_action=add_block&block_place=block_4&block_ref=' + element.id,
{
evalScripts:true, asynchronous:true
}
)
},
hoverclass:'block-active'
}
)
</script>
<div class="block" id="block_5">
Block 5
<ul>
<?php
foreach($hemingway->get_block_contents('block_5') as $key => $block_ref) :
$block_name = $hemingway->available_blocks[$block_ref];
?>
<li><?php echo $block_name ?> (<a href="#" onclick="remove_block('block_5', '<?php echo $block_ref ?>');">remove</a>)</li>
<?php endforeach; ?>
</ul>
</div>
<script type="text/javascript">
Droppables.add(
'block_5', {
accept:'blocks',
onDrop:function(element){
new Ajax.Updater('block_5', 'themes.php?page=functions.php&hem_action=add_block&block_place=block_5&block_ref=' + element.id,
{
evalScripts:true, asynchronous:true
}
)
},
hoverclass:'block-active'
}
)
</script>
<div class="block" id="block_6">
Block 6
<ul>
<?php
foreach($hemingway->get_block_contents('block_6') as $key => $block_ref) :
$block_name = $hemingway->available_blocks[$block_ref];
?>
<li><?php echo $block_name ?> (<a href="#" onclick="remove_block('block_6', '<?php echo $block_ref ?>');">remove</a>)</li>
<?php endforeach; ?>
</ul>
</div>
<script type="text/javascript">
Droppables.add(
'block_6', {
accept:'blocks',
onDrop:function(element){
new Ajax.Updater('block_6', 'themes.php?page=functions.php&hem_action=add_block&block_place=block_6&block_ref=' + element.id,
{
evalScripts:true, asynchronous:true
}
)
},
hoverclass:'block-active'
}
)
</script>
-->
<div class="clear"></div>
<?php
$blocks_dir = @ dir(ABSPATH . '/wp-content/themes/' . get_template() . '/blocks');
if ($blocks_dir) {
while(($file = $blocks_dir->read()) !== false) {
if (!preg_match('|^\.+$|', $file) && preg_match('|\.php$|', $file))
$blocks_files[] = $file;
}
}
if ($blocks_dir || $blocks_files) {
foreach($blocks_files as $blocks_file) {
$block_ref = preg_replace('/\.php/', '', $blocks_file);
if (!array_key_exists($block_ref, $hemingway->available_blocks)){
?>
<h3>You have uninstalled blocks!</h3>
<p>Give the block <strong><?php echo $block_ref ?></strong> a display name (such as "About Page")</p>
<form action="" name="dofollow" method="post">
<input type="hidden" name="block_ref" value="<?php echo $block_ref ?>" />
<?php echo $block_ref ?> : <input type="text" name="display_name" />
<input type="submit" value="Save" />
</form>
<?php
}
}
}
?>
</div>
<?php
}
?>
|
bcherry/twitter-text-js | 1 | twitter-text.js | if (!window.twttr) {
window.twttr = {};
}
(function() {
twttr.txt = {};
twttr.txt.regexen = {};
var HTML_ENTITIES = {
'&': '&',
'>': '>',
'<': '<',
'"': '"',
"'": ' '
};
// HTML escaping
twttr.txt.htmlEscape = function(text) {
return text && text.replace(/[&"'><]/g, function(character) {
return HTML_ENTITIES[character];
});
};
// Builds a RegExp
function regexSupplant(regex, flags) {
flags = flags || "";
if (typeof regex !== "string") {
if (regex.global && flags.indexOf("g") < 0) {
flags += "g";
}
if (regex.ignoreCase && flags.indexOf("i") < 0) {
flags += "i";
}
if (regex.multiline && flags.indexOf("m") < 0) {
flags += "m";
}
regex = regex.source;
}
return new RegExp(regex.replace(/#\{(\w+)\}/g, function(match, name) {
var newRegex = twttr.txt.regexen[name] || "";
if (typeof newRegex !== "string") {
newRegex = newRegex.source;
}
return newRegex;
}), flags);
}
// simple string interpolation
function stringSupplant(str, values) {
return str.replace(/#\{(\w+)\}/g, function(match, name) {
return values[name] || "";
});
}
// Space is more than %20, U+3000 for example is the full-width space used with Kanji. Provide a short-hand
// to access both the list of characters and a pattern suitible for use with String#split
// Taken from: ActiveSupport::Multibyte::Handlers::UTF8Handler::UNICODE_WHITESPACE
var fromCode = String.fromCharCode;
var UNICODE_SPACES = [
fromCode(0x0020), // White_Space # Zs SPACE
fromCode(0x0085), // White_Space # Cc <control-0085>
fromCode(0x00A0), // White_Space # Zs NO-BREAK SPACE
fromCode(0x1680), // White_Space # Zs OGHAM SPACE MARK
fromCode(0x180E), // White_Space # Zs MONGOLIAN VOWEL SEPARATOR
fromCode(0x2028), // White_Space # Zl LINE SEPARATOR
fromCode(0x2029), // White_Space # Zp PARAGRAPH SEPARATOR
fromCode(0x202F), // White_Space # Zs NARROW NO-BREAK SPACE
fromCode(0x205F), // White_Space # Zs MEDIUM MATHEMATICAL SPACE
fromCode(0x3000) // White_Space # Zs IDEOGRAPHIC SPACE
];
for (var i = 0x009; i <= 0x000D; i++) { // White_Space # Cc [5] <control-0009>..<control-000D>
UNICODE_SPACES.push(String.fromCharCode(i));
}
for (var i = 0x2000; i <= 0x200A; i++) { // White_Space # Zs [11] EN QUAD..HAIR SPACE
UNICODE_SPACES.push(String.fromCharCode(i));
}
twttr.txt.regexen.spaces = regexSupplant("[" + UNICODE_SPACES.join("") + "]");
twttr.txt.regexen.punct = /\!'#%&'\(\)*\+,\\\-\.\/:;<=>\?@\[\]\^_{|}~/;
twttr.txt.regexen.atSigns = /[@@]/;
twttr.txt.regexen.extractMentions = regexSupplant(/(^|[^a-zA-Z0-9_])#{atSigns}([a-zA-Z0-9_]{1,20})(?=(.|$))/g);
twttr.txt.regexen.extractReply = regexSupplant(/^(?:#{spaces})*#{atSigns}([a-zA-Z0-9_]{1,20})/);
twttr.txt.regexen.listName = /[a-zA-Z][a-zA-Z0-9_\-\u0080-\u00ff]{0,24}/;
// Latin accented characters (subtracted 0xD7 from the range, it's a confusable multiplication sign. Looks like "x")
twttr.txt.regexen.latinAccentChars = regexSupplant("ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþ\\303\\277");
twttr.txt.regexen.latenAccents = regexSupplant(/[#{latinAccentChars}]+/);
twttr.txt.regexen.endScreenNameMatch = regexSupplant(/^(?:#{atSigns}|[#{latinAccentChars}]|:\/\/)/);
// Characters considered valid in a hashtag but not at the beginning, where only a-z and 0-9 are valid.
twttr.txt.regexen.hashtagCharacters = regexSupplant(/[a-z0-9_#{latinAccentChars}]/i);
twttr.txt.regexen.autoLinkHashtags = regexSupplant(/(^|[^0-9A-Z&\/\?]+)(#|#)([0-9A-Z_]*[A-Z_]+#{hashtagCharacters}*)/gi);
twttr.txt.regexen.autoLinkUsernamesOrLists = /(^|[^a-zA-Z0-9_]|RT:?)([@@]+)([a-zA-Z0-9_]{1,20})(\/[a-zA-Z][a-zA-Z0-9_\-]{0,24})?/g;
twttr.txt.regexen.autoLinkEmoticon = /(8\-\#|8\-E|\+\-\(|\`\@|\`O|\<\|:~\(|\}:o\{|:\-\[|\>o\<|X\-\/|\[:-\]\-I\-|\/\/\/\/Ö\\\\\\\\|\(\|:\|\/\)|∑:\*\)|\( \| \))/g;
// URL related hash regex collection
twttr.txt.regexen.validPrecedingChars = regexSupplant(/(?:[^-\/"':!=A-Za-z0-9_@@]|^|\:)/);
twttr.txt.regexen.validDomain = regexSupplant(/(?:[^#{punct}\s][\.-](?=[^#{punct}\s])|[^#{punct}\s]){1,}\.[a-z]{2,}(?::[0-9]+)?/i);
// For protocol-less URLs, we'll accept them if they end in one of a handful of likely TLDs
twttr.txt.regexen.probableTld = /^(.*?)((?:[a-z0-9_\.\-]+)\.(?:com|net|org|gov|edu))$/i;
twttr.txt.regexen.www = /www\./i;
twttr.txt.regexen.validGeneralUrlPathChars = /[a-z0-9!\*';:=\+\$\/%#\[\]\-_,~]/i;
// Allow URL paths to contain balanced parens
// 1. Used in Wikipedia URLs like /Primer_(film)
// 2. Used in IIS sessions like /S(dfd346)/
twttr.txt.regexen.wikipediaDisambiguation = regexSupplant(/(?:\(#{validGeneralUrlPathChars}+\))/i);
// Allow @ in a url, but only in the middle. Catch things like http://example.com/@user
twttr.txt.regexen.validUrlPathChars = regexSupplant(/(?:#{wikipediaDisambiguation}|@#{validGeneralUrlPathChars}+\/|[\.\,]?#{validGeneralUrlPathChars})/i);
// Valid end-of-path chracters (so /foo. does not gobble the period).
// 1. Allow =&# for empty URL parameters and other URL-join artifacts
twttr.txt.regexen.validUrlPathEndingChars = /[a-z0-9=#\/]/i;
twttr.txt.regexen.validUrlQueryChars = /[a-z0-9!\*'\(\);:&=\+\$\/%#\[\]\-_\.,~]/i;
twttr.txt.regexen.validUrlQueryEndingChars = /[a-z0-9_&=#]/i;
twttr.txt.regexen.validUrl = regexSupplant(
'(' + // $1 total match
'(#{validPrecedingChars})' + // $2 Preceeding chracter
'(' + // $3 URL
'((?:https?:\\/\\/|www\\.)?)' + // $4 Protocol or beginning
'(#{validDomain})' + // $5 Domain(s) and optional post number
'(' + // $6 URL Path
'\\/#{validUrlPathChars}*' +
'#{validUrlPathEndingChars}?' +
')?' +
'(\\?#{validUrlQueryChars}*#{validUrlQueryEndingChars})?' + // $7 Query String
')' +
')'
, "gi");
// Default CSS class for auto-linked URLs
var DEFAULT_URL_CLASS = "tweet-url";
// Default CSS class for auto-linked lists (along with the url class)
var DEFAULT_LIST_CLASS = "list-slug";
// Default CSS class for auto-linked usernames (along with the url class)
var DEFAULT_USERNAME_CLASS = "username";
// Default CSS class for auto-linked hashtags (along with the url class)
var DEFAULT_HASHTAG_CLASS = "hashtag";
// HTML attribute for robot nofollow behavior (default)
var HTML_ATTR_NO_FOLLOW = " rel=\"nofollow\"";
// Simple object cloning function for simple objects
function clone(o) {
var r = {};
for (var k in o) {
if (o.hasOwnProperty(k)) {
r[k] = o[k];
}
}
return r;
}
twttr.txt.autoLink = function(text, options) {
options = clone(options || {});
return twttr.txt.autoLinkUsernamesOrLists(
twttr.txt.autoLinkUrlsCustom(
twttr.txt.autoLinkHashtags(text, options),
options),
options);
};
twttr.txt.autoLinkUsernamesOrLists = function(text, options) {
options = clone(options || {});
options.urlClass = options.urlClass || DEFAULT_URL_CLASS;
options.listClass = options.listClass || DEFAULT_LIST_CLASS;
options.usernameClass = options.usernameClass || DEFAULT_USERNAME_CLASS;
options.usernameUrlBase = options.usernameUrlBase || "http://twitter.com/";
options.listUrlBase = options.listUrlBase || "http://twitter.com/";
if (!options.suppressNoFollow) {
var extraHtml = HTML_ATTR_NO_FOLLOW;
}
var newText = "",
splitText = twttr.txt.splitTags(text);
for (var index = 0; index < splitText.length; index++) {
var chunk = splitText[index];
if (index !== 0) {
newText += ((index % 2 === 0) ? ">" : "<");
}
if (index % 4 !== 0) {
newText += chunk;
} else {
newText += chunk.replace(twttr.txt.regexen.autoLinkUsernamesOrLists, function(match, before, at, user, slashListname, offset, chunk) {
var after = chunk.slice(offset + match.length);
var d = {
before: before,
at: at,
user: twttr.txt.htmlEscape(user),
slashListname: twttr.txt.htmlEscape(slashListname),
extraHtml: extraHtml,
chunk: twttr.txt.htmlEscape(chunk)
};
for (var k in options) {
if (options.hasOwnProperty(k)) {
d[k] = options[k];
}
}
if (slashListname && !options.suppressLists) {
// the link is a list
var list = d.chunk = stringSupplant("#{user}#{slashListname}", d);
d.list = twttr.txt.htmlEscape(list.toLowerCase());
return stringSupplant("#{before}#{at}<a class=\"#{urlClass} #{listClass}\" href=\"#{listUrlBase}#{list}\"#{extraHtml}>#{chunk}</a>", d);
} else {
if (after && after.match(twttr.txt.regexen.endScreenNameMatch)) {
// Followed by something that means we don't autolink
return match;
} else {
// this is a screen name
d.chunk = twttr.txt.htmlEscape(user);
d.dataScreenName = !options.suppressDataScreenName ? stringSupplant("data-screen-name=\"#{chunk}\" ", d) : "";
return stringSupplant("#{before}#{at}<a class=\"#{urlClass} #{usernameClass}\" #{dataScreenName}href=\"#{usernameUrlBase}#{chunk}\"#{extraHtml}>#{chunk}</a>", d);
}
}
});
}
}
return newText;
};
twttr.txt.autoLinkHashtags = function(text, options) {
options = clone(options || {});
options.urlClass = options.urlClass || DEFAULT_URL_CLASS;
options.hashtagClass = options.hashtagClass || DEFAULT_HASHTAG_CLASS;
options.hashtagUrlBase = options.hashtagUrlBase || "http://twitter.com/search?q=%23";
if (!options.suppressNoFollow) {
var extraHtml = HTML_ATTR_NO_FOLLOW;
}
return text.replace(twttr.txt.regexen.autoLinkHashtags, function(match, before, hash, text) {
var d = {
before: before,
hash: twttr.txt.htmlEscape(hash),
text: twttr.txt.htmlEscape(text),
extraHtml: extraHtml
};
for (var k in options) {
if (options.hasOwnProperty(k)) {
d[k] = options[k];
}
}
return stringSupplant("#{before}<a href=\"#{hashtagUrlBase}#{text}\" title=\"##{text}\" class=\"#{urlClass} #{hashtagClass}\"#{extraHtml}>#{hash}#{text}</a>", d);
});
};
twttr.txt.autoLinkUrlsCustom = function(text, options) {
options = clone(options || {});
if (!options.suppressNoFollow) {
options.rel = "nofollow";
}
if (options.urlClass) {
options["class"] = options.urlClass;
delete options.urlClass;
}
delete options.suppressNoFollow;
delete options.suppressDataScreenName;
return text.replace(twttr.txt.regexen.validUrl, function(match, all, before, url, protocol, domain, path, queryString) {
var tldComponents;
if (protocol) {
var htmlAttrs = "";
for (var k in options) {
htmlAttrs += stringSupplant(" #{k}=\"#{v}\" ", {k: k, v: options[k].toString().replace(/"/, """).replace(/</, "<").replace(/>/, ">")});
}
options.htmlAttrs || "";
var fullUrl = ((!protocol || protocol.match(twttr.txt.regexen.www)) ? stringSupplant("http://#{url}", {url: url}) : url);
var d = {
before: before,
fullUrl: twttr.txt.htmlEscape(fullUrl),
htmlAttrs: htmlAttrs,
url: twttr.txt.htmlEscape(url)
};
return stringSupplant("#{before}<a href=\"#{fullUrl}\"#{htmlAttrs}>#{url}</a>", d);
} else if (tldComponents = all.match(twttr.txt.regexen.probableTld)) {
var tldBefore = tldComponents[1];
var tldUrl = tldComponents[2];
var htmlAttrs = "";
for (var k in options) {
htmlAttrs += stringSupplant(" #{k}=\"#{v}\" ", {
k: k,
v: twttr.txt.htmlEscape(options[k].toString())
});
}
options.htmlAttrs || "";
var fullUrl = stringSupplant("http://#{url}", {
url: tldUrl
});
var prefix = (tldBefore == before ? before : stringSupplant("#{before}#{tldBefore}", {
before: before,
tldBefore: tldBefore
}));
var d = {
before: prefix,
fullUrl: twttr.txt.htmlEscape(fullUrl),
htmlAttrs: htmlAttrs,
url: twttr.txt.htmlEscape(tldUrl)
};
return stringSupplant("#{before}<a href=\"#{fullUrl}\"#{htmlAttrs}>#{url}</a>", d);
} else {
return all;
}
});
};
twttr.txt.extractMentions = function(text) {
var screenNamesOnly = [],
screenNamesWithIndices = twttr.txt.extractMentionsWithIndices(text);
for (var i = 0; i < screenNamesWithIndices.length; i++) {
var screenName = screenNamesWithIndices[i].screenName;
screenNamesOnly.push(screenName);
}
return screenNamesOnly;
};
twttr.txt.extractMentionsWithIndices = function(text) {
if (!text) {
return [];
}
var possibleScreenNames = [],
position = 0;
text.replace(twttr.txt.regexen.extractMentions, function(match, before, screenName, after) {
if (!after.match(twttr.txt.regexen.endScreenNameMatch)) {
var startPosition = text.indexOf(screenName, position) - 1;
position = startPosition + screenName.length + 1;
possibleScreenNames.push({
screenName: screenName,
indices: [startPosition, position]
});
}
});
return possibleScreenNames;
};
twttr.txt.extractReplies = function(text) {
if (!text) {
return null;
}
var possibleScreenName = text.match(twttr.txt.regexen.extractReply);
if (!possibleScreenName) {
return null;
}
return possibleScreenName[1];
};
twttr.txt.extractUrls = function(text) {
var urlsOnly = [],
urlsWithIndices = twttr.txt.extractUrlsWithIndices(text);
for (var i = 0; i < urlsWithIndices.length; i++) {
urlsOnly.push(urlsWithIndices[i].url);
}
return urlsOnly;
};
twttr.txt.extractUrlsWithIndices = function(text) {
if (!text) {
return [];
}
var urls = [],
position = 0;
text.replace(twttr.txt.regexen.validUrl, function(match, all, before, url, protocol, domain, path, query) {
var tldComponents;
if (protocol) {
var startPosition = text.indexOf(url, position),
position = startPosition + url.length;
urls.push({
url: ((!protocol || protocol.match(twttr.txt.regexen.www)) ? stringSupplant("http://#{url}", {
url: url
}) : url),
indices: [startPosition, position]
});
} else if (tldComponents = all.match(twttr.txt.regexen.probableTld)) {
var tldUrl = tldComponents[2];
var startPosition = text.indexOf(tldUrl, position),
position = startPosition + tldUrl.length;
urls.push({
url: stringSupplant("http://#{tldUrl}", {
tldUrl: tldUrl
}),
indices: [startPosition, position]
});
}
});
return urls;
};
twttr.txt.extractHashtags = function(text) {
var hashtagsOnly = [],
hashtagsWithIndices = twttr.txt.extractHashtagsWithIndices(text);
for (var i = 0; i < hashtagsWithIndices.length; i++) {
hashtagsOnly.push(hashtagsWithIndices[i].hashtag);
}
return hashtagsOnly;
};
twttr.txt.extractHashtagsWithIndices = function(text) {
if (!text) {
return [];
}
var tags = [],
position = 0;
text.replace(twttr.txt.regexen.autoLinkHashtags, function(match, before, hash, hashText) {
var startPosition = text.indexOf(hash + hashText, position);
position = startPosition + hashText.length + 1;
tags.push({
hashtag: hashText,
indices: [startPosition, position]
});
});
return tags;
};
// this essentially does text.split(/<|>/)
// except that won't work in IE, where empty strings are ommitted
// so "<>".split(/<|>/) => [] in IE, but is ["", "", ""] in all others
// but "<<".split("<") => ["", "", ""]
twttr.txt.splitTags = function(text) {
var firstSplits = text.split("<"),
secondSplits,
allSplits = [],
split;
for (var i = 0; i < firstSplits.length; i += 1) {
split = firstSplits[i];
if (!split) {
allSplits.push("");
} else {
secondSplits = split.split(">");
for (var j = 0; j < secondSplits.length; j += 1) {
allSplits.push(secondSplits[j]);
}
}
}
return allSplits;
};
twttr.txt.hitHighlight = function(text, hits, options) {
var defaultHighlightTag = "em";
hits = hits || [];
options = options || {};
if (hits.length === 0) {
return text;
}
var tagName = options.tag || defaultHighlightTag,
tags = ["<" + tagName + ">", "</" + tagName + ">"],
chunks = twttr.txt.splitTags(text),
split,
i,
j,
result = "",
chunkIndex = 0,
chunk = chunks[0],
prevChunksLen = 0,
chunkCursor = 0,
startInChunk = false,
chunkChars = chunk,
flatHits = [],
index,
hit,
tag,
placed,
hitSpot;
for (i = 0; i < hits.length; i += 1) {
for (j = 0; j < hits[i].length; j += 1) {
flatHits.push(hits[i][j]);
}
}
for (index = 0; index < flatHits.length; index += 1) {
hit = flatHits[index];
tag = tags[index % 2];
placed = false;
while (chunk != null && hit >= prevChunksLen + chunk.length) {
result += chunkChars.slice(chunkCursor);
if (startInChunk && hit === prevChunksLen + chunkChars.length) {
result += tag;
placed = true;
}
if (chunks[chunkIndex + 1]) {
result += "<" + chunks[chunkIndex + 1] + ">";
}
prevChunksLen += chunkChars.length;
chunkCursor = 0;
chunkIndex += 2;
chunk = chunks[chunkIndex];
chunkChars = chunk;
startInChunk = false;
}
if (!placed && chunk != null) {
hitSpot = hit - prevChunksLen;
result += chunkChars.slice(chunkCursor, hitSpot) + tag;
chunkCursor = hitSpot;
if (index % 2 === 0) {
startInChunk = true;
} else {
startInChunk = false;
}
}
}
if (chunk != null) {
if (chunkCursor < chunkChars.length) {
result += chunkChars.slice(chunkCursor);
}
for (index = chunkIndex + 1; index < chunks.length; index += 1) {
result += (index % 2 === 0 ? chunks[index] : "<" + chunks[index] + ">");
}
}
return result;
};
}()); | if (!window.twttr) {
window.twttr = {};
}
(function() {
twttr.txt = {};
twttr.txt.regexen = {};
var HTML_ENTITIES = {
'&': '&',
'>': '>',
'<': '<',
'"': '"',
"'": ' '
};
// HTML escaping
twttr.txt.htmlEscape = function(text) {
return text && text.replace(/[&"'><]/g, function(character) {
return HTML_ENTITIES[character];
});
};
// Builds a RegExp
function regexSupplant(regex, flags) {
flags = flags || "";
if (typeof regex !== "string") {
if (regex.global && flags.indexOf("g") < 0) {
flags += "g";
}
if (regex.ignoreCase && flags.indexOf("i") < 0) {
flags += "i";
}
if (regex.multiline && flags.indexOf("m") < 0) {
flags += "m";
}
regex = regex.source;
}
return new RegExp(regex.replace(/#\{(\w+)\}/g, function(match, name) {
var newRegex = twttr.txt.regexen[name] || "";
if (typeof newRegex !== "string") {
newRegex = newRegex.source;
}
return newRegex;
}), flags);
}
// simple string interpolation
function stringSupplant(str, values) {
return str.replace(/#\{(\w+)\}/g, function(match, name) {
return values[name] || "";
});
}
// Space is more than %20, U+3000 for example is the full-width space used with Kanji. Provide a short-hand
// to access both the list of characters and a pattern suitible for use with String#split
// Taken from: ActiveSupport::Multibyte::Handlers::UTF8Handler::UNICODE_WHITESPACE
var fromCode = String.fromCharCode;
var UNICODE_SPACES = [
fromCode(0x0020), // White_Space # Zs SPACE
fromCode(0x0085), // White_Space # Cc <control-0085>
fromCode(0x00A0), // White_Space # Zs NO-BREAK SPACE
fromCode(0x1680), // White_Space # Zs OGHAM SPACE MARK
fromCode(0x180E), // White_Space # Zs MONGOLIAN VOWEL SEPARATOR
fromCode(0x2028), // White_Space # Zl LINE SEPARATOR
fromCode(0x2029), // White_Space # Zp PARAGRAPH SEPARATOR
fromCode(0x202F), // White_Space # Zs NARROW NO-BREAK SPACE
fromCode(0x205F), // White_Space # Zs MEDIUM MATHEMATICAL SPACE
fromCode(0x3000) // White_Space # Zs IDEOGRAPHIC SPACE
];
for (var i = 0x009; i <= 0x000D; i++) { // White_Space # Cc [5] <control-0009>..<control-000D>
UNICODE_SPACES.push(String.fromCharCode(i));
}
for (var i = 0x2000; i <= 0x200A; i++) { // White_Space # Zs [11] EN QUAD..HAIR SPACE
UNICODE_SPACES.push(String.fromCharCode(i));
}
twttr.txt.regexen.spaces = regexSupplant("[" + UNICODE_SPACES.join("") + "]");
twttr.txt.regexen.punct = /\!'#%&'\(\)*\+,\\\-\.\/:;<=>\?@\[\]\^_{|}~/;
twttr.txt.regexen.atSigns = /[@@]/;
twttr.txt.regexen.extractMentions = regexSupplant(/(^|[^a-zA-Z0-9_])#{atSigns}([a-zA-Z0-9_]{1,20})(?=(.|$))/g);
twttr.txt.regexen.extractReply = regexSupplant(/^(?:#{spaces})*#{atSigns}([a-zA-Z0-9_]{1,20})/);
twttr.txt.regexen.listName = /[a-zA-Z][a-zA-Z0-9_\-\u0080-\u00ff]{0,24}/;
// Latin accented characters (subtracted 0xD7 from the range, it's a confusable multiplication sign. Looks like "x")
twttr.txt.regexen.latinAccentChars = regexSupplant("ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþ\\303\\277");
twttr.txt.regexen.latenAccents = regexSupplant(/[#{latinAccentChars}]+/);
twttr.txt.regexen.endScreenNameMatch = regexSupplant(/^(?:#{atSigns}|[#{latinAccentChars}]|:\/\/)/);
// Characters considered valid in a hashtag but not at the beginning, where only a-z and 0-9 are valid.
twttr.txt.regexen.hashtagCharacters = regexSupplant(/[a-z0-9_#{latinAccentChars}]/i);
twttr.txt.regexen.autoLinkHashtags = regexSupplant(/(^|[^0-9A-Z&\/\?]+)(#|#)([0-9A-Z_]*[A-Z_]+#{hashtagCharacters}*)/gi);
twttr.txt.regexen.autoLinkUsernamesOrLists = /(^|[^a-zA-Z0-9_]|RT:?)([@@]+)([a-zA-Z0-9_]{1,20})(\/[a-zA-Z][a-zA-Z0-9_\-]{0,24})?/g;
twttr.txt.regexen.autoLinkEmoticon = /(8\-\#|8\-E|\+\-\(|\`\@|\`O|\<\|:~\(|\}:o\{|:\-\[|\>o\<|X\-\/|\[:-\]\-I\-|\/\/\/\/Ö\\\\\\\\|\(\|:\|\/\)|∑:\*\)|\( \| \))/g;
// URL related hash regex collection
twttr.txt.regexen.validPrecedingChars = regexSupplant(/(?:[^-\/"':!=A-Za-z0-9_@@]|^|\:)/);
twttr.txt.regexen.validDomain = regexSupplant(/(?:[^#{punct}\s][\.-](?=[^#{punct}\s])|[^#{punct}\s]){1,}\.[a-z]{2,}(?::[0-9]+)?/i);
// For protocol-less URLs, we'll accept them if they end in one of a handful of likely TLDs
twttr.txt.regexen.probableTld = /^(.*?)((?:[a-z0-9_\.\-]+)\.(?:com|net|org|gov|edu))$/i;
twttr.txt.regexen.www = /www\./i;
twttr.txt.regexen.validGeneralUrlPathChars = /[a-z0-9!\*';:=\+\$\/%#\[\]\-_,~]/i;
// Allow URL paths to contain balanced parens
// 1. Used in Wikipedia URLs like /Primer_(film)
// 2. Used in IIS sessions like /S(dfd346)/
twttr.txt.regexen.wikipediaDisambiguation = regexSupplant(/(?:\(#{validGeneralUrlPathChars}+\))/i);
// Allow @ in a url, but only in the middle. Catch things like http://example.com/@user
twttr.txt.regexen.validUrlPathChars = regexSupplant(/(?:#{wikipediaDisambiguation}|@#{validGeneralUrlPathChars}+\/|[\.\,]?#{validGeneralUrlPathChars})/i);
// Valid end-of-path chracters (so /foo. does not gobble the period).
// 1. Allow =&# for empty URL parameters and other URL-join artifacts
twttr.txt.regexen.validUrlPathEndingChars = /[a-z0-9=#\/]/i;
twttr.txt.regexen.validUrlQueryChars = /[a-z0-9!\*'\(\);:&=\+\$\/%#\[\]\-_\.,~]/i;
twttr.txt.regexen.validUrlQueryEndingChars = /[a-z0-9_&=#\/]/i;
twttr.txt.regexen.validUrl = regexSupplant(
'(' + // $1 total match
'(#{validPrecedingChars})' + // $2 Preceeding chracter
'(' + // $3 URL
'((?:https?:\\/\\/|www\\.)?)' + // $4 Protocol or beginning
'(#{validDomain})' + // $5 Domain(s) and optional post number
'(' + // $6 URL Path
'\\/#{validUrlPathChars}*' +
'#{validUrlPathEndingChars}?' +
')?' +
'(\\?#{validUrlQueryChars}*#{validUrlQueryEndingChars})?' + // $7 Query String
')' +
')'
, "gi");
// Default CSS class for auto-linked URLs
var DEFAULT_URL_CLASS = "tweet-url";
// Default CSS class for auto-linked lists (along with the url class)
var DEFAULT_LIST_CLASS = "list-slug";
// Default CSS class for auto-linked usernames (along with the url class)
var DEFAULT_USERNAME_CLASS = "username";
// Default CSS class for auto-linked hashtags (along with the url class)
var DEFAULT_HASHTAG_CLASS = "hashtag";
// HTML attribute for robot nofollow behavior (default)
var HTML_ATTR_NO_FOLLOW = " rel=\"nofollow\"";
// Simple object cloning function for simple objects
function clone(o) {
var r = {};
for (var k in o) {
if (o.hasOwnProperty(k)) {
r[k] = o[k];
}
}
return r;
}
twttr.txt.autoLink = function(text, options) {
options = clone(options || {});
return twttr.txt.autoLinkUsernamesOrLists(
twttr.txt.autoLinkUrlsCustom(
twttr.txt.autoLinkHashtags(text, options),
options),
options);
};
twttr.txt.autoLinkUsernamesOrLists = function(text, options) {
options = clone(options || {});
options.urlClass = options.urlClass || DEFAULT_URL_CLASS;
options.listClass = options.listClass || DEFAULT_LIST_CLASS;
options.usernameClass = options.usernameClass || DEFAULT_USERNAME_CLASS;
options.usernameUrlBase = options.usernameUrlBase || "http://twitter.com/";
options.listUrlBase = options.listUrlBase || "http://twitter.com/";
if (!options.suppressNoFollow) {
var extraHtml = HTML_ATTR_NO_FOLLOW;
}
var newText = "",
splitText = twttr.txt.splitTags(text);
for (var index = 0; index < splitText.length; index++) {
var chunk = splitText[index];
if (index !== 0) {
newText += ((index % 2 === 0) ? ">" : "<");
}
if (index % 4 !== 0) {
newText += chunk;
} else {
newText += chunk.replace(twttr.txt.regexen.autoLinkUsernamesOrLists, function(match, before, at, user, slashListname, offset, chunk) {
var after = chunk.slice(offset + match.length);
var d = {
before: before,
at: at,
user: twttr.txt.htmlEscape(user),
slashListname: twttr.txt.htmlEscape(slashListname),
extraHtml: extraHtml,
chunk: twttr.txt.htmlEscape(chunk)
};
for (var k in options) {
if (options.hasOwnProperty(k)) {
d[k] = options[k];
}
}
if (slashListname && !options.suppressLists) {
// the link is a list
var list = d.chunk = stringSupplant("#{user}#{slashListname}", d);
d.list = twttr.txt.htmlEscape(list.toLowerCase());
return stringSupplant("#{before}#{at}<a class=\"#{urlClass} #{listClass}\" href=\"#{listUrlBase}#{list}\"#{extraHtml}>#{chunk}</a>", d);
} else {
if (after && after.match(twttr.txt.regexen.endScreenNameMatch)) {
// Followed by something that means we don't autolink
return match;
} else {
// this is a screen name
d.chunk = twttr.txt.htmlEscape(user);
d.dataScreenName = !options.suppressDataScreenName ? stringSupplant("data-screen-name=\"#{chunk}\" ", d) : "";
return stringSupplant("#{before}#{at}<a class=\"#{urlClass} #{usernameClass}\" #{dataScreenName}href=\"#{usernameUrlBase}#{chunk}\"#{extraHtml}>#{chunk}</a>", d);
}
}
});
}
}
return newText;
};
twttr.txt.autoLinkHashtags = function(text, options) {
options = clone(options || {});
options.urlClass = options.urlClass || DEFAULT_URL_CLASS;
options.hashtagClass = options.hashtagClass || DEFAULT_HASHTAG_CLASS;
options.hashtagUrlBase = options.hashtagUrlBase || "http://twitter.com/search?q=%23";
if (!options.suppressNoFollow) {
var extraHtml = HTML_ATTR_NO_FOLLOW;
}
return text.replace(twttr.txt.regexen.autoLinkHashtags, function(match, before, hash, text) {
var d = {
before: before,
hash: twttr.txt.htmlEscape(hash),
text: twttr.txt.htmlEscape(text),
extraHtml: extraHtml
};
for (var k in options) {
if (options.hasOwnProperty(k)) {
d[k] = options[k];
}
}
return stringSupplant("#{before}<a href=\"#{hashtagUrlBase}#{text}\" title=\"##{text}\" class=\"#{urlClass} #{hashtagClass}\"#{extraHtml}>#{hash}#{text}</a>", d);
});
};
twttr.txt.autoLinkUrlsCustom = function(text, options) {
options = clone(options || {});
if (!options.suppressNoFollow) {
options.rel = "nofollow";
}
if (options.urlClass) {
options["class"] = options.urlClass;
delete options.urlClass;
}
delete options.suppressNoFollow;
delete options.suppressDataScreenName;
return text.replace(twttr.txt.regexen.validUrl, function(match, all, before, url, protocol, domain, path, queryString) {
var tldComponents;
if (protocol) {
var htmlAttrs = "";
for (var k in options) {
htmlAttrs += stringSupplant(" #{k}=\"#{v}\" ", {k: k, v: options[k].toString().replace(/"/, """).replace(/</, "<").replace(/>/, ">")});
}
options.htmlAttrs || "";
var fullUrl = ((!protocol || protocol.match(twttr.txt.regexen.www)) ? stringSupplant("http://#{url}", {url: url}) : url);
var d = {
before: before,
fullUrl: twttr.txt.htmlEscape(fullUrl),
htmlAttrs: htmlAttrs,
url: twttr.txt.htmlEscape(url)
};
return stringSupplant("#{before}<a href=\"#{fullUrl}\"#{htmlAttrs}>#{url}</a>", d);
} else if (tldComponents = all.match(twttr.txt.regexen.probableTld)) {
var tldBefore = tldComponents[1];
var tldUrl = tldComponents[2];
var htmlAttrs = "";
for (var k in options) {
htmlAttrs += stringSupplant(" #{k}=\"#{v}\" ", {
k: k,
v: twttr.txt.htmlEscape(options[k].toString())
});
}
options.htmlAttrs || "";
var fullUrl = stringSupplant("http://#{url}", {
url: tldUrl
});
var prefix = (tldBefore == before ? before : stringSupplant("#{before}#{tldBefore}", {
before: before,
tldBefore: tldBefore
}));
var d = {
before: prefix,
fullUrl: twttr.txt.htmlEscape(fullUrl),
htmlAttrs: htmlAttrs,
url: twttr.txt.htmlEscape(tldUrl)
};
return stringSupplant("#{before}<a href=\"#{fullUrl}\"#{htmlAttrs}>#{url}</a>", d);
} else {
return all;
}
});
};
twttr.txt.extractMentions = function(text) {
var screenNamesOnly = [],
screenNamesWithIndices = twttr.txt.extractMentionsWithIndices(text);
for (var i = 0; i < screenNamesWithIndices.length; i++) {
var screenName = screenNamesWithIndices[i].screenName;
screenNamesOnly.push(screenName);
}
return screenNamesOnly;
};
twttr.txt.extractMentionsWithIndices = function(text) {
if (!text) {
return [];
}
var possibleScreenNames = [],
position = 0;
text.replace(twttr.txt.regexen.extractMentions, function(match, before, screenName, after) {
if (!after.match(twttr.txt.regexen.endScreenNameMatch)) {
var startPosition = text.indexOf(screenName, position) - 1;
position = startPosition + screenName.length + 1;
possibleScreenNames.push({
screenName: screenName,
indices: [startPosition, position]
});
}
});
return possibleScreenNames;
};
twttr.txt.extractReplies = function(text) {
if (!text) {
return null;
}
var possibleScreenName = text.match(twttr.txt.regexen.extractReply);
if (!possibleScreenName) {
return null;
}
return possibleScreenName[1];
};
twttr.txt.extractUrls = function(text) {
var urlsOnly = [],
urlsWithIndices = twttr.txt.extractUrlsWithIndices(text);
for (var i = 0; i < urlsWithIndices.length; i++) {
urlsOnly.push(urlsWithIndices[i].url);
}
return urlsOnly;
};
twttr.txt.extractUrlsWithIndices = function(text) {
if (!text) {
return [];
}
var urls = [],
position = 0;
text.replace(twttr.txt.regexen.validUrl, function(match, all, before, url, protocol, domain, path, query) {
var tldComponents;
if (protocol) {
var startPosition = text.indexOf(url, position),
position = startPosition + url.length;
urls.push({
url: ((!protocol || protocol.match(twttr.txt.regexen.www)) ? stringSupplant("http://#{url}", {
url: url
}) : url),
indices: [startPosition, position]
});
} else if (tldComponents = all.match(twttr.txt.regexen.probableTld)) {
var tldUrl = tldComponents[2];
var startPosition = text.indexOf(tldUrl, position),
position = startPosition + tldUrl.length;
urls.push({
url: stringSupplant("http://#{tldUrl}", {
tldUrl: tldUrl
}),
indices: [startPosition, position]
});
}
});
return urls;
};
twttr.txt.extractHashtags = function(text) {
var hashtagsOnly = [],
hashtagsWithIndices = twttr.txt.extractHashtagsWithIndices(text);
for (var i = 0; i < hashtagsWithIndices.length; i++) {
hashtagsOnly.push(hashtagsWithIndices[i].hashtag);
}
return hashtagsOnly;
};
twttr.txt.extractHashtagsWithIndices = function(text) {
if (!text) {
return [];
}
var tags = [],
position = 0;
text.replace(twttr.txt.regexen.autoLinkHashtags, function(match, before, hash, hashText) {
var startPosition = text.indexOf(hash + hashText, position);
position = startPosition + hashText.length + 1;
tags.push({
hashtag: hashText,
indices: [startPosition, position]
});
});
return tags;
};
// this essentially does text.split(/<|>/)
// except that won't work in IE, where empty strings are ommitted
// so "<>".split(/<|>/) => [] in IE, but is ["", "", ""] in all others
// but "<<".split("<") => ["", "", ""]
twttr.txt.splitTags = function(text) {
var firstSplits = text.split("<"),
secondSplits,
allSplits = [],
split;
for (var i = 0; i < firstSplits.length; i += 1) {
split = firstSplits[i];
if (!split) {
allSplits.push("");
} else {
secondSplits = split.split(">");
for (var j = 0; j < secondSplits.length; j += 1) {
allSplits.push(secondSplits[j]);
}
}
}
return allSplits;
};
twttr.txt.hitHighlight = function(text, hits, options) {
var defaultHighlightTag = "em";
hits = hits || [];
options = options || {};
if (hits.length === 0) {
return text;
}
var tagName = options.tag || defaultHighlightTag,
tags = ["<" + tagName + ">", "</" + tagName + ">"],
chunks = twttr.txt.splitTags(text),
split,
i,
j,
result = "",
chunkIndex = 0,
chunk = chunks[0],
prevChunksLen = 0,
chunkCursor = 0,
startInChunk = false,
chunkChars = chunk,
flatHits = [],
index,
hit,
tag,
placed,
hitSpot;
for (i = 0; i < hits.length; i += 1) {
for (j = 0; j < hits[i].length; j += 1) {
flatHits.push(hits[i][j]);
}
}
for (index = 0; index < flatHits.length; index += 1) {
hit = flatHits[index];
tag = tags[index % 2];
placed = false;
while (chunk != null && hit >= prevChunksLen + chunk.length) {
result += chunkChars.slice(chunkCursor);
if (startInChunk && hit === prevChunksLen + chunkChars.length) {
result += tag;
placed = true;
}
if (chunks[chunkIndex + 1]) {
result += "<" + chunks[chunkIndex + 1] + ">";
}
prevChunksLen += chunkChars.length;
chunkCursor = 0;
chunkIndex += 2;
chunk = chunks[chunkIndex];
chunkChars = chunk;
startInChunk = false;
}
if (!placed && chunk != null) {
hitSpot = hit - prevChunksLen;
result += chunkChars.slice(chunkCursor, hitSpot) + tag;
chunkCursor = hitSpot;
if (index % 2 === 0) {
startInChunk = true;
} else {
startInChunk = false;
}
}
}
if (chunk != null) {
if (chunkCursor < chunkChars.length) {
result += chunkChars.slice(chunkCursor);
}
for (index = chunkIndex + 1; index < chunks.length; index += 1) {
result += (index % 2 === 0 ? chunks[index] : "<" + chunks[index] + ">");
}
}
return result;
};
}()); |
marektihkan/CC.NET-Kaizen-theme | 18 | Source/Kaizen/css/base.styles.css | /* Base
----------------------------------*/
BODY { margin: 0px; padding: 0px; background-color: #FFF; font-family: helvetica; font-size: 12px; }
/* Firefox alignment hack */
BODY { text-align: -moz-center; }
TABLE { width: 100%; background: #FFF url("../images/menu_links_bg.png"); }
THEAD, .strong { font-weight: bold; }
.data, .label { overflow: hidden; }
BR { display: none; }
A { text-decoration: none; cursor: hand; cursor: pointer; }
.dialog, .hidden { display: none; }
.clear { clear: both; }
.invisible { margin: 0px; padding: 0px; }
.show-line-breaks BR { display: block; }
/* Columns
----------------------------------*/
.even-columns-2 { width: 50%; }
.even-columns-3 { width: 33%; }
.even-columns-4 { width: 25%; }
.even-columns-5 { width: 20%; }
/* Icons
----------------------------------*/
.icon { display: block; text-indent: -99999px; overflow: hidden; background-repeat: no-repeat; background-image: url('../images/icons.png'); }
.icon-rss { width: 30px; height: 30px; background-position: 0px 0px; }
.icon-refresh { width: 30px; height: 30px; background-position: -30px 0px; }
.icon-reload { width: 30px; height: 30px; background-position: -30px 0px; }
.icon-large-logout { width: 30px; height: 30px; background-position: -60px 0px; }
.icon-show-sections { width: 24px; height: 24px; background-position: 0px -30px; }
.icon-hide-sections { width: 24px; height: 24px; background-position: -24px -30px; }
.icon-show-errors { width: 24px; height: 24px; background-position: -48px -30px; }
.icon-change-password { width: 19px; height: 19px; background-position: 0px -54px; }
.icon-logout { width: 19px; height: 19px; background-position: -19px -54px; }
.icon-login { width: 19px; height: 19px; background-position: -38px -54px; }
.icon-large-up-arrow { width: 13px; height: 30px; background-position: 0px -73px; }
.icon-large-down-arrow { width: 13px; height: 30px; background-position: -13px -73px; }
.icon-small-up-arrow { width: 7px; height: 10px; background-position: 0px -103px; }
.icon-small-down-arrow { width: 7px; height: 10px; background-position: -10px -103px; }
/* Forms
----------------------------------*/
FORM { display: inline; margin: 0px; padding: 0px; border: none; vertical-align: baseline; }
INPUT { padding: 3px; border: none; background-color: #3b3a3a; color: #ededed; }
INPUT[type="submit"], INPUT[type="button"] { font-weight: bold; cursor: hand; cursor: pointer; }
INPUT[type="hidden"] { display: none; }
INPUT.invalid { background-color: #ce2a1b; }
/* Messaging
----------------------------------*/
.message { position: absolute; top: 0px; left: 0px; width: 100%; height: 40px; margin: 0px; color: #ededed; font-weight: bold; font-size: 20px; text-align: center; cursor: hand; cursor: pointer; }
.message SPAN { display: inline-block; margin-top: 5px; }
.message A { color: #FFFFFF; text-decoration: underline; }
.message INPUT[type="submit"], .message INPUT[type="button"] { margin: 6px; float: right; border: 1px solid #ededed; background-color: transparent; color: #ededed; font-weight: bold; cursor: hand; cursor: pointer; }
/* Dialogs
----------------------------------*/
.dialog OL { text-align: left; }
/* Selectbox
----------------------------------*/
.selection { float: left; width: 100%; padding: 0px; background-color: #3b3a3a; border-top: 1px dashed #FFF; }
.selection-data { width: 100%; list-style-type: none; }
.selection-data LI { display: inline-block; width: 33%; float: left; }
.selection-data A, .selection-data SPAN { color: #FFF; font-size: 14px; font-weight: bold; text-decoration: none; }
.selection-link-hide { clear: both; float: right; margin: 3px; }
/* Alignment
----------------------------------*/
.left { padding-left: 0.5em; text-align: left; }
.right { padding-right: 0.5em; text-align: right; }
.center { text-align: center; }
/* Background
----------------------------------*/
.exception { background-color: #ce2a1b !important; }
.success, #content UL LI.build-passed-link { background-color: #4e800c !important; }
.failed, #content UL LI.build-failed-link { background-color: #ce2a1b !important; }
.warning { background-color: #f0b440 !important; }
.success-light { background-color: #639f27 !important; }
.failed-light { background-color: #c94b3e !important; }
.warning-light { background-color: #f1c775 !important; }
/* Text
----------------------------------*/
.success-text { color: #4e800c !important; }
.failed-text { color: #ce2a1b !important; }
.warning-text { color: #f0b440 !important; }
.failed-underline { border-bottom: dotted 2px #ce2a1b; }
/* Layout
----------------------------------*/
#header, #main, #footer { width: 1000px; margin: 0 auto; padding: 0px; text-align: left; }
#header { margin-top: 10px !important; height: 65px; border-bottom: solid 2px #3b3a3a; vertical-align: top; }
#menu { float: left; width: 200px; margin: 20px 10px 20px 0px; padding: 0px; }
#content { float: right; clear: right; width: 790px; margin-top: 20px; }
#footer { margin-bottom: 20px !important; border-top: solid 2px #3b3a3a; }
/* Header
----------------------------------*/
.header-logo { float: left; color: #3b3a3a; }
.heading { float: left; font: 30px times new roman; letter-spacing: 2px; }
.subheading { float: left; clear: both; margin-top: -7px; margin-left: 30px; color: #3b3a3a; font: 12px trebuchet ms; }
.header-login, .header-navigation { display: block; float: right; }
.header-login { padding: 5px; margin-top: -10px; clear: right; background-color: #f0b440; }
.header-login A { float: left; margin: 2px; }
.header-navigation { clear: both; }
.header-navigation UL { display: block; margin: 0px; list-style-type: none; }
.route { float: left; padding-left: 13px; background: url("../images/separator.png") no-repeat 0px 3px; color: #919EA9; font-weight: bold; }
.route A { color: #3b3a3a; text-decoration: none; }
.route-dashboard { padding-left: 0px !important; background-image: none !important; }
/* Menu
----------------------------------*/
#menu H2 { display: block; width: 100%; height: 35px; margin: 0px 0px 2px 0px; padding: 0px; background-color: #3b3a3a; color: #ededed; font: normal 15px trebuchet ms; vertical-align: middle; cursor: pointer; cursor: hand; }
#menu H2 SPAN { display: inline-block; margin: 6px 10px 6px 10px; }
#menu UL { display: block; margin: 0px; padding: 0px; list-style-type: none; }
#menu .section UL { width: 100%; }
#menu .section LI { display: block; width: 100%; height: 30px; margin: 0px 0px 1px 0px; padding: 0px; border-top: solid 1px #c9c5c5; border-bottom: solid 1px #c9c5c5; background-image: url("../images/menu_links_bg.png"); font-size: 12px; vertical-align: middle; }
#menu .section LI SPAN { display: block; margin: 7px; }
#menu A { color: #615e5c; font-weight: bold; }
#menu .build-passed-link { background: url("../images/build_passed_link_bg.png") !important; }
#menu .build-failed-link { background: url("../images/build_failed_link_bg.png") !important; }
.build-views { display: block; height: 25px; margin: 0px 0px 3px 0px; }
.build-views UL { float: right; height: 100%; margin-bottom: 3px !important; }
.build-views LI { float: left; width: 24px; height: 24px; margin: 1px; cursor: pointer; cursor: hand; }
.build-views LI SPAN { display: none; }
.colors { float: left !important; margin-left: -1px !important; }
#searchbox { width: 130px; margin: -4px 0px 0px 2px; background-color: #3b3a3a; color: #ededed; font: normal 15px trebuchet ms; vertical-align: middle; border-bottom: 1px solid #c9c5c5; }
/* Content
----------------------------------*/
#content .section { margin: 0px; margin-bottom: 5px !important; }
#content .section-content { width: 770px; margin: 0px; padding: 10px; background: #EEE; color: #3b3a3a !important; font-size: 12px; overflow: hidden; }
H1.title, #content .build { display: block; width: 100%; height: 60px; margin: 0px; padding: 0px; background-color: #3b3a3a; font-size: 24px; vertical-align: middle; }
H1.title, H1.title A, #content .build A { color: #ededed; font-family: trebuchet ms; cursor: pointer; cursor: hand; }
H1.title SPAN, #content .build SPAN { float: left; margin-left: 10px !important; margin-top: 15px; }
H1.title SPAN, H1.title A, #content .build SPAN, #content .build A { display: inline-block; }
.title-data { float: right !important; margin-right: 10px !important; }
.title-data .icon { margin: 0.1em; background-color: #FFF; }
.title-data .icon SPAN, .title-data .icon INPUT { display: none; }
.title-data SPAN { margin: 0px !important; }
.button-rss, .title-data .icon-large-down-arrow { background-color: Transparent !important; }
#content .build { margin-bottom: 4px !important; }
.log, PRE { overflow: auto; white-space: pre-wrap; }
.section-content A { color: #000000; text-decoration: underline; }
.report-link { color: #FFFFFF !important; text-decoration: none !important; }
/* Footer
----------------------------------*/
.version { float: left; color: #3b3a3a; font-size: 10px; }
.authors { float: right; margin-top: 2px; }
.authors A { display: block; float: right; margin-left: 5px; }
.authors A.thought-works { width: 129px; height: 30px; background-image: url('../images/thoughtworks.png'); }
.authors A.saiku { width: 34px; height: 25px; background-image: url('../images/saiku.png'); }
.authors SPAN { display: none; }
/* ------------------------------------- */
/* Extensions
/* ------------------------------------- */
/* jQuery XPH Tooltip
----------------------------------*/
.tooltip, .tooltip-fixed { color: #3b3a3a; border: 1px solid #3b3a3a; background: #ffffee; text-align: left; }
.tooltip-fixed { width: 400px; }
.tooltip .title { display: none; }
.tooltip .text, .tooltip .url { padding: 5px; font-size: x-small; }
.tooltip .url { font-weight: bold; }
.tooltip .shortcut { font-style: italic; font-weight: bold; }
.tooltip P, .tooltip-fixed P { margin: 2px; padding: 0px; }
.tooltip-fixed .text { font-size: small; } | /* Base
----------------------------------*/
BODY { margin: 0px; padding: 0px; background-color: #FFF; font-family: verdana; font-size: 11px; }
/* Firefox alignment hack */
BODY { text-align: -moz-center; }
TABLE { width: 100%; background: #FFF url("../images/menu_links_bg.png"); }
THEAD, .strong { font-weight: bold; }
.data, .label { overflow: hidden; }
/*BR { display: none; } */
A { text-decoration: none; cursor: hand; cursor: pointer; }
.dialog, .hidden { display: none; }
.clear { clear: both; }
.invisible { margin: 0px; padding: 0px; }
.show-line-breaks BR { display: block; }
/* Columns
----------------------------------*/
.even-columns-2 { width: 50%; }
.even-columns-3 { width: 33%; }
.even-columns-4 { width: 25%; }
.even-columns-5 { width: 20%; }
/* Icons
----------------------------------*/
.icon { display: block; text-indent: -99999px; overflow: hidden; background-repeat: no-repeat; background-image: url('../images/icons.png'); }
.icon-rss { width: 30px; height: 30px; background-position: 0px 0px; }
.icon-refresh { width: 30px; height: 30px; background-position: -30px 0px; }
.icon-reload { width: 30px; height: 30px; background-position: -30px 0px; }
.icon-large-logout { width: 30px; height: 30px; background-position: -60px 0px; }
.icon-show-sections { width: 24px; height: 24px; background-position: 0px -30px; }
.icon-hide-sections { width: 24px; height: 24px; background-position: -24px -30px; }
.icon-show-errors { width: 24px; height: 24px; background-position: -48px -30px; }
.icon-change-password { width: 19px; height: 19px; background-position: 0px -54px; }
.icon-logout { width: 19px; height: 19px; background-position: -19px -54px; }
.icon-login { width: 19px; height: 19px; background-position: -38px -54px; }
.icon-large-up-arrow { width: 13px; height: 30px; background-position: 0px -73px; }
.icon-large-down-arrow { width: 13px; height: 30px; background-position: -13px -73px; }
.icon-small-up-arrow { width: 7px; height: 10px; background-position: 0px -103px; }
.icon-small-down-arrow { width: 7px; height: 10px; background-position: -10px -103px; }
/* Forms
----------------------------------*/
FORM { display: inline; margin: 0px; padding: 0px; border: none; vertical-align: baseline; }
INPUT { padding: 3px; border: none; background-color: #3b3a3a; color: #ededed; }
INPUT[type="submit"], INPUT[type="button"] { font-weight: bold; cursor: hand; cursor: pointer; }
INPUT[type="hidden"] { display: none; }
INPUT.invalid { background-color: #ce2a1b; }
/* Messaging
----------------------------------*/
.message { position: absolute; top: 0px; left: 0px; width: 100%; height: 40px; margin: 0px; color: #ededed; font-weight: bold; font-size: 20px; text-align: center; cursor: hand; cursor: pointer; }
.message SPAN { display: inline-block; margin-top: 5px; }
.message A { color: #FFFFFF; text-decoration: underline; }
.message INPUT[type="submit"], .message INPUT[type="button"] { margin: 6px; float: right; border: 1px solid #ededed; background-color: transparent; color: #ededed; font-weight: bold; cursor: hand; cursor: pointer; }
/* Dialogs
----------------------------------*/
.dialog OL { text-align: left; }
/* Selectbox
----------------------------------*/
.selection { float: left; width: 100%; padding: 0px; background-color: #3b3a3a; border-top: 1px dashed #FFF; }
.selection-data { width: 100%; list-style-type: none; }
.selection-data LI { display: inline-block; width: 33%; float: left; }
.selection-data A, .selection-data SPAN { color: #FFF; font-size: 14px; font-weight: bold; text-decoration: none; }
.selection-link-hide { clear: both; float: right; margin: 3px; }
/* Alignment
----------------------------------*/
.left { padding-left: 0.5em; text-align: left; }
.right { padding-right: 0.5em; text-align: right; }
.center { text-align: center; }
/* Background
----------------------------------*/
.exception { background-color: #ce2a1b !important; }
.success, #content UL LI.build-passed-link { background-color: #4e800c !important; }
.failed, #content UL LI.build-failed-link { background-color: #ce2a1b !important; }
.warning { background-color: #f0b440 !important; }
.success-light { background-color: #639f27 !important; }
.failed-light { background-color: #c94b3e !important; }
.warning-light { background-color: #f1c775 !important; }
/* Text
----------------------------------*/
.success-text { color: #4e800c !important; }
.failed-text { color: #ce2a1b !important; }
.warning-text { color: #B77F12 !important; }
.failed-underline { border-bottom: dotted 2px #ce2a1b; }
/* Layout
----------------------------------*/
#header, #main, #footer { width: 1000px; margin: 0 auto; padding: 0px; text-align: left; }
#header { margin-top: 10px !important; height: 65px; border-bottom: solid 2px #3b3a3a; vertical-align: top; }
#menu { float: left; width: 200px; margin: 20px 10px 20px 0px; padding: 0px; }
#content { float: right; clear: right; width: 790px; margin-top: 20px; }
#footer { margin-bottom: 20px !important; border-top: solid 2px #3b3a3a; }
/* Header
----------------------------------*/
.header-logo { float: left; color: #3b3a3a; }
.heading { float: left; font: 30px times new roman; letter-spacing: 2px; }
.subheading { float: left; clear: both; margin-top: -7px; margin-left: 30px; color: #3b3a3a; font: 12px trebuchet ms; }
.header-login, .header-navigation { display: block; float: right; }
.header-login { padding: 5px; margin-top: -10px; clear: right; background-color: #f0b440; }
.header-login A { float: left; margin: 2px; }
.header-navigation { clear: both; }
.header-navigation UL { display: block; margin: 0px; list-style-type: none; }
.route { float: left; padding-left: 13px; background: url("../images/separator.png") no-repeat 0px 3px; color: #919EA9; font-weight: bold; }
.route A { color: #3b3a3a; text-decoration: none; }
.route-dashboard { padding-left: 0px !important; background-image: none !important; }
/* Menu
----------------------------------*/
#menu H2 { display: block; width: 100%; height: 35px; margin: 0px 0px 2px 0px; padding: 0px; background-color: #3b3a3a; color: #ededed; font: normal 15px trebuchet ms; vertical-align: middle; cursor: pointer; cursor: hand; }
#menu H2 SPAN { display: inline-block; margin: 6px 10px 6px 10px; }
#menu UL { display: block; margin: 0px; padding: 0px; list-style-type: none; }
#menu .section UL { width: 100%; }
#menu .section LI { display: block; width: 100%; height: 30px; margin: 0px 0px 1px 0px; padding: 0px; border-top: solid 1px #c9c5c5; border-bottom: solid 1px #c9c5c5; background-image: url("../images/menu_links_bg.png"); font-size: 12px; vertical-align: middle; }
#menu .section LI SPAN { display: block; margin: 7px; }
#menu A { color: #615e5c; font-weight: bold; }
#menu .build-passed-link { background: url("../images/build_passed_link_bg.png") !important; }
#menu .build-failed-link { background: url("../images/build_failed_link_bg.png") !important; }
.build-views { display: block; height: 25px; margin: 0px 0px 3px 0px; }
.build-views UL { float: right; height: 100%; margin-bottom: 3px !important; }
.build-views LI { float: left; width: 24px; height: 24px; margin: 1px; cursor: pointer; cursor: hand; }
.build-views LI SPAN { display: none; }
.colors { float: left !important; margin-left: -1px !important; }
#searchbox { width: 130px; margin: -4px 0px 0px 2px; background-color: #3b3a3a; color: #ededed; font: normal 15px trebuchet ms; vertical-align: middle; border-bottom: 1px solid #c9c5c5; }
/* Content
----------------------------------*/
#content .section { margin: 0px; margin-bottom: 5px !important; }
#content .section-content { width: 770px; margin: 0px; padding: 10px; background: #EEE; color: #3b3a3a !important; font-size: 12px; overflow: hidden; }
H1.title, #content .build { display: block; width: 100%; height: 60px; margin: 0px; padding: 0px; background-color: #3b3a3a; font-size: 24px; vertical-align: middle; }
H1.title, H1.title A, #content .build A { color: #ededed; font-family: trebuchet ms; cursor: pointer; cursor: hand; }
H1.title SPAN, #content .build SPAN { float: left; margin-left: 10px !important; margin-top: 15px; }
H1.title SPAN, H1.title A, #content .build SPAN, #content .build A { display: inline-block; }
.title-data { float: right !important; margin-right: 10px !important; }
.title-data .icon { margin: 0.1em; background-color: #FFF; }
.title-data .icon SPAN, .title-data .icon INPUT { display: none; }
.title-data SPAN { margin: 0px !important; }
.button-rss, .title-data .icon-large-down-arrow { background-color: Transparent !important; }
#content .build { margin-bottom: 4px !important; }
.log, PRE { overflow: auto; white-space: pre-wrap; }
.section-content A { color: #000000; text-decoration: underline; }
.report-link { color: #FFFFFF !important; text-decoration: none !important; }
/* Footer
----------------------------------*/
.version { float: left; color: #3b3a3a; font-size: 10px; }
.authors { float: right; margin-top: 2px; }
.authors A { display: block; float: right; margin-left: 5px; }
.authors A.thought-works { width: 129px; height: 30px; background-image: url('../images/thoughtworks.png'); }
.authors A.saiku { width: 34px; height: 25px; background-image: url('../images/saiku.png'); }
.authors SPAN { display: none; }
/* ------------------------------------- */
/* Extensions
/* ------------------------------------- */
/* jQuery XPH Tooltip
----------------------------------*/
.tooltip, .tooltip-fixed { color: #3b3a3a; border: 1px solid #3b3a3a; background: #ffffee; text-align: left; }
.tooltip-fixed { width: 400px; }
.tooltip .title { display: none; }
.tooltip .text, .tooltip .url { padding: 5px; font-size: x-small; }
.tooltip .url { font-weight: bold; }
.tooltip .shortcut { font-style: italic; font-weight: bold; }
.tooltip P, .tooltip-fixed P { margin: 2px; padding: 0px; }
.tooltip-fixed .text { font-size: small; } |
cheald/murmur-manager | 3 | manage-ice.rb | #!/bin/env ruby
BASE = File.expand_path(File.dirname(__FILE__))
require File.join(BASE, "interfaces", "ice.rb")
require File.join(BASE, 'helpers')
require 'yaml'
class UnknownCommandException < Exception; end
def server_command(meta, id, command = nil, *args)
server = meta.get_server(id)
case command
when "set"
key = args.shift
val = args.join " "
server[key] = val
puts "Set #{key} = #{val}"
when "start"
server.start
when "stop"
server.stop
when "restart"
server.restart!
when "destroy"
server.destroy!
when "supw"
pw = args.shift
raise "Cannot set a blank superuser password" if pw.nil? or pw == ""
server.setSuperuserPassword(pw)
when "", "config", nil
print_block do
server.config.each do |key, val|
line key, val.split("\n").first
end
end
else
raise UnknownCommandException
end
end
def meta_command(meta, command = nil, *args)
case command
when "list"
print_block do
line "Server ID", "Name", "Running"
line "---------", "----", "-------"
meta.list_servers.each do |server|
line server.id, server.config["registername"], server.isRunning
end
end
when "new"
port = args.first
port = nil if !port.nil? and port.to_i == 0
server = meta.new_server(port)
puts "New server: ID #{server.id} added"
else
raise UnknownCommandException
end
end
begin
opts = YAML::load(open('options.yml').read)
meta = Murmur::Ice::Meta.new opts
# For a Glacier2 connection:
# meta = Murmur::Ice::Meta.new "host.com", 4063, "user", "pass"
if (ARGV[0] || 0).to_i != 0 then
server_command(meta, *ARGV)
else
meta_command(meta, *ARGV)
end
rescue UnknownCommandException
help
end
| #!/bin/env ruby
BASE = File.expand_path(File.dirname(__FILE__))
require File.join(BASE, "interfaces", "ice.rb")
require File.join(BASE, 'helpers')
require 'yaml'
class UnknownCommandException < Exception; end
def server_command(meta, id, command = nil, *args)
server = meta.get_server(id)
case command
when "set"
key = args.shift
val = args.join " "
server[key] = val
puts "Set #{key} = #{val}"
when "start"
server.start
when "stop"
server.stop
when "restart"
server.restart!
when "destroy"
server.destroy!
when "supw"
pw = args.shift
raise "Cannot set a blank superuser password" if pw.nil? or pw == ""
server.setSuperuserPassword(pw)
when "", "config", nil
print_block do
server.config.each do |key, val|
line key, val.split("\n").first
end
end
else
raise UnknownCommandException
end
end
def meta_command(meta, command = nil, *args)
case command
when "list"
print_block do
line "Server ID", "Name", "Running"
line "---------", "----", "-------"
meta.list_servers.each do |server|
line server.id, server.config["registername"], server.isRunning
end
end
when "new"
port = args.first
port = nil if !port.nil? and port.to_i == 0
server = meta.new_server(port)
puts "New server: ID #{server.id} added"
else
raise UnknownCommandException
end
end
begin
opts = YAML::load(open('options.yml').read) || {}
meta = Murmur::Ice::Meta.new opts
# For a Glacier2 connection:
# meta = Murmur::Ice::Meta.new "host.com", 4063, "user", "pass"
if (ARGV[0] || 0).to_i != 0 then
server_command(meta, *ARGV)
else
meta_command(meta, *ARGV)
end
rescue UnknownCommandException
help
end
|
acspike/FretFind2D | 12 | src/fretfind.html | <html xmlns="http://www.w3.org/1999/xhtml">
<head>
<!--
Copyright (C) 2004, 2005, 2010 Aaron C Spike
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
-->
<title>FretFind2D</title>
<meta name="keywords" lang="en" content="FretFind, stringed instrument, luthier, fret, guitar, fret placement calculator" />
<meta name="description" lang="en" content="FretFind2d models fretboards as a system of line segments on a two dimensional plane to design freboards with multiple scale lenghts and microtonal scales." />
<link rel="stylesheet" type="text/css" href="fretfind.css" />
<script type="text/javascript" src="libs/jquery-1.4.2.min.js"><!-- --></script>
<script type="text/javascript" src="libs/jquery.ba-bbq.min.js"><!-- --></script>
<script type="text/javascript" src="libs/raphael-min.js"><!-- --></script>
<script type="text/javascript" src="libs/FileSaver.min.js"><!-- --></script>
<script type="text/javascript" src="libs/base64.js"><!-- --></script>
<script type="text/javascript" src="libs/sprintf.js"><!-- --></script>
<script type="text/javascript" src="libs/jspdf.js"><!-- --></script>
<script type="text/javascript" src="fretfind.js"><!-- --></script>
<script type="text/javascript">
//<![CDATA[
var getLengths = function(id) {
var lengths = [];
$('#'+id+' > input').each(function(_,item){lengths.push(parseFloat(item.value));});
return lengths;
};
var setLengths = function(length_id, string_count_id, change_callback, lengths) {
var strings = ff.getInt(string_count_id);
if (typeof lengths === 'undefined') {
lengths = getLengths(length_id);
}
var output = '';
for (var i=0; i<strings; i++) {
output += 'string '+(i+1)+': <input type="text" value="'+(lengths[i] || 25+(i*.5))+'" /><br />';
}
$('#'+length_id).html(output);
$('#'+length_id+' > input').change(change_callback);
};
var getGauges = function(id) {
var gauges = [];
$('#'+id+' > input').each(function(_,item){gauges.push(parseFloat(item.value));});
return gauges;
};
var setGauges = function(gauge_id, string_count_id, change_callback, gauges) {
var strings = ff.getInt(string_count_id);
if (typeof gauges === 'undefined') {
gauges = getGauges(gauge_id);
}
var output = '';
for (var i=0; i<strings; i++) {
output += 'string '+(i+1)+': <input type="text" value="'+(gauges[i] || 0.0)+'" /><br />';
}
$('#'+gauge_id).html(output);
$('#'+gauge_id+' > input').change(change_callback);
};
// computes string offsets along the nut itself, assuming that width does not include the overhang (it's already accounted for)
// if "proportional" is not selected, gauges will all be 0 and this will simply return
// the offsets along the nut.
var computeOffsets = function(strings, gauges, actual_length, perp_width, spacingMode) {
var offsets = [0];
const corrected_gauges = spacingMode === 'proportional' ? gauges : new Array(strings).fill(0);
const working_area = perp_width - corrected_gauges.reduce((tot, cur) => tot + cur);
const perp_gap = working_area / (strings - 1);
for (var i=1; i<strings-1; i++) {
half_adjacent_strings = (corrected_gauges[i-1] + corrected_gauges[i]) / 2.0;
next_space = perp_gap + half_adjacent_strings;
offsets.push(offsets[i-1] + next_space * actual_length / perp_width);
}
return offsets;
};
// output a guitar (scale, tuning and strings (with fretboard edges))
// based upon form values
var getGuitar = function() {
//get form values
var lengthMode = ff.getAlt('length');
var spacingMode = ff.getAlt('spacing');
var scaleLength = ff.getFlt('len');
var scaleLengthF = ff.getFlt('lenF');
var scaleLengthL = ff.getFlt('lenL');
var perp = ff.getFlt('pDist');
var nutWidth = ff.getFlt('nutWidth');
var bridgeWidth = ff.getFlt('bridgeWidth');
var strings = ff.getInt('numStrings');
var units = $("input:checked[name='units']").val();
var gauges = getGauges('igauges');
var lengths = getLengths('ilengths');
if (lengthMode === 'individual') {
scaleLengthF = lengths[0];
scaleLengthL = lengths[lengths.length-1];
perp = ff.getFlt('ipDist');
}
var frets = ff.getInt('numFrets');
var tuning = ff.getTuning('tuning');
var oNF;
var oNL;
var oBF;
var oBL;
switch (ff.getAlt('overhang')) {
case 'equal':
oNF = oNL = oBF = oBL = ff.getFlt('oE');
break;
case 'nutbridge':
oNF = oNL = ff.getFlt('oN');
oBF = oBL = ff.getFlt('oB');
break;
case 'firstlast':
oNF = oBF = ff.getFlt('oF');
oBL = oNL = ff.getFlt('oL');
break;
case 'all':
oNF = ff.getFlt('oNF');
oBF = ff.getFlt('oBF');
oNL = ff.getFlt('oNL');
oBL = ff.getFlt('oBL');
break;
}
var scale;
if (ff.getAlt('scale') === 'et') {
var tones = ff.getFlt('root');
scale = ff.etScale(tones,2);
} else {
var scala = ff.getStr('scl');
scale = ff.scalaScale(scala);
}
//choose an x value for the center line
var nutHalf = nutWidth / 2;
var bridgeHalf = bridgeWidth / 2;
var nutCandidateCenter = (nutHalf) + oNL;
var bridgeCandidateCenter = (bridgeHalf) + oBL;
var xcenter = bridgeCandidateCenter >= nutCandidateCenter ? bridgeCandidateCenter : nutCandidateCenter;
//find x values for fretboard edges
var fbnxf = xcenter + nutHalf + oNF;
var fbbxf = xcenter + bridgeHalf + oBF;
var fbnxl = xcenter - (nutHalf + oNL);
var fbbxl = xcenter - (bridgeHalf + oBL);
//find x values for first and last strings
var snxf = xcenter + nutHalf;
var sbxf = xcenter + bridgeHalf;
var snxl = xcenter - nutHalf;
var sbxl = xcenter - bridgeHalf;
//find the slope of the strings
var fdeltax = sbxf - snxf;
var ldeltax = sbxl - snxl;
var fdeltay;
var ldeltay;
if (lengthMode === 'single') {
fdeltay = ldeltay = scaleLength;
} else {
fdeltay = Math.sqrt((scaleLengthF * scaleLengthF) - (fdeltax * fdeltax));
ldeltay = Math.sqrt((scaleLengthL * scaleLengthL) - (ldeltax * ldeltax));
}
//temporarily place first and last strings
var first = new ff.Segment(new ff.Point(snxf, 0), new ff.Point(sbxf, fdeltay));
var last = new ff.Segment(new ff.Point(snxl, 0), new ff.Point(sbxl, ldeltay));
var perp_y = 0;
if (lengthMode === 'multiple' || lengthMode === 'individual') {
//translate so that perpendicular distance lines up
var fperp = perp * fdeltay;
var lperp = perp * ldeltay;
if (fdeltay <= ldeltay) {
first.translate(0, (lperp - fperp));
perp_y = lperp;
} else {
last.translate(0, (fperp - lperp));
perp_y = fperp;
}
}
var nut = new ff.Segment(first.end1.copy(), last.end1.copy());
var bridge = new ff.Segment(first.end2.copy(), last.end2.copy());
if (lengthMode === 'multiple' || lengthMode === 'individual') {
//overhang measurements are now converted from delta x to along line lengths
oNF = (oNF * nut.length()) / nutWidth;
oNL = (oNL * nut.length()) / nutWidth;
oBF = (oBF * bridge.length()) / bridgeWidth;
oBL = (oBL * bridge.length()) / bridgeWidth;
}
//place fretboard edges;
var fbf = new ff.Segment(nut.pointAt(-oNF), bridge.pointAt(-oBF));
var fbl = new ff.Segment(nut.pointAt(nut.length() + oNL), bridge.pointAt(bridge.length() + oBL));
//normalize values into the first quadrant via translate
if (fbf.end1.y < 0 || fbl.end1.y < 0)
{
var move = fbf.end1.y <= fbl.end1.y ? -fbf.end1.y : -fbl.end1.y;
first.translate(0, move);
last.translate(0, move);
nut.translate(0, move);
bridge.translate(0, move);
fbf.translate(0, move);
fbl.translate(0, move);
perp_y += move;
}
var nutOffsets = computeOffsets(strings, gauges, nut.length(), nutWidth, spacingMode);
var bridgeOffsets = computeOffsets(strings, gauges, bridge.length(), bridgeWidth, spacingMode);
var lines = [first];
for (var i=1; i<=(strings-2); i++)
{
var n = nut.pointAt(nutOffsets[i]);
var b = bridge.pointAt(bridgeOffsets[i]);
if (lengthMode === 'individual') {
var l = lengths[i];
var nx = n.x;
var bx = b.x;
var deltax = Math.abs(nx - bx);
var deltay = Math.sqrt((l * l) - (deltax * deltax));
var n = new ff.Point(nx,0)
var b = new ff.Point(bx,deltay);
var perpy = perp * deltay;
var perpy_diff = perp_y - perpy;
n.translate(0, perpy_diff);
b.translate(0, perpy_diff);
}
lines.push(new ff.Segment(n, b));
}
lines.push(last);
return {
scale: scale,
tuning: tuning,
strings: lines,
edge1: fbf,
edge2: fbl,
center: xcenter,
fret_count: frets,
units: units
};
};
var getLink = function() {
var params = {};
$('#worksheet').find('input[id], textarea[id]').each(function(index,item){
var id = $(item).attr('id');
params[id] = $(item).val();
});
params.t = ff.getTuning('tuning');
params.il = getLengths('ilengths');
params.ig = getGauges('igauges');
params.u = $("input:checked[name='units']").val();
params.sl = ff.getAlt('length');
params.ns = ff.getAlt('spacing');
params.scale = ff.getAlt('scale');
params.o = ff.getAlt('overhang');
var location = window.location.toString().replace(/#.*/,'');
return location+'#'+$.param(params);
};
//surface for drawing fretboard
var paper;
var processChanges = false;
var onChange = function() {
if (processChanges) {
var guitar = getGuitar();
guitar = ff.fretGuitar(guitar);
ff.drawGuitar(paper, guitar);
$('#tables').html(ff.getTable(guitar));
$('#bookmark').attr('href', getLink());
}
};
var updateFormFromHash = function() {
var params = $.deparam.fragment();
$('#worksheet').find('input[id], textarea[id]').each(function(index,item){
var id = $(item).attr('id');
if (params.hasOwnProperty(id)){
$(item).val(params[id]);
}
});
setLengths('ilengths','numStrings', onChange, (params['il'] || []));
setGauges('igauges', 'numStrings', onChange, (params['ig'] || []));
ff.setTuning('tuning','numStrings', onChange, (params['t'] || []));
$("input[name='units']").filter("[value='"+(params['u']||'in')+"']").click();
$('#'+params.sl).click();
$('#'+params.ns).click();
$('#'+params.scale).click();
$('#'+params.o).click();
};
$(document).ready(function() {
ff.initHelp('worksheet');
ff.initAlternatives('worksheet', onChange);
$('#numStrings').change(function(){
ff.setTuning('tuning', 'numStrings', onChange);
setLengths('ilengths','numStrings', onChange);
setGauges('igauges', 'numStrings', onChange);
});
$('#worksheet').find('input, textarea').change(onChange);
ff.setTuning('tuning', 'numStrings', onChange);
setLengths('ilengths','numStrings', onChange);
setGauges('igauges', 'numStrings', onChange);
paper = Raphael('diagram',200,800);
updateFormFromHash();
processChanges = true;
onChange();
var isFileSaverSupported = false;
try {
isFileSaverSupported = !!new Blob;
} catch (e) {}
var dl = $('#downloads');
if (isFileSaverSupported){
dl.append('<dt>DXF</dt><dd><button id="download_dxf">Download</button></dd>');
$('#download_dxf').click(function(){
var guitar = getGuitar();
guitar = ff.fretGuitar(guitar);
var blob = new Blob([ff.getDXF(guitar)], {type: "image/vnd.dxf"});
saveAs(blob, "fretboard.dxf");
});
dl.append('<dt>PDF (multi-page)</dt>' +
'<dd>' +
'<label><input type="radio" name="pdfm_pagesize" value="letter" checked="checked"/>letter</label>' +
'<label><input type="radio" name="pdfm_pagesize" value="a4" />A4</label>' +
'</dd>' +
'<dd><button id="download_pdfm">Download</button></dd>');
$('#download_pdfm').click(function(){
var guitar = getGuitar();
var pagesize = $("input:checked[name='pdfm_pagesize']").val();
guitar = ff.fretGuitar(guitar);
var blob = new Blob([ff.getPDFMultipage(guitar, pagesize)], {type: "application/pdf"});
saveAs(blob, "fretboard.pdf");
});
dl.append('<dt>PDF (single page)</dt><dd><button id="download_pdf">Download</button></dd>');
$('#download_pdf').click(function(){
var guitar = getGuitar();
guitar = ff.fretGuitar(guitar);
var blob = new Blob([ff.getPDF(guitar)], {type: "application/pdf"});
saveAs(blob, "fretboard.pdf");
});
dl.append('<dt>SVG</dt><dd><button id="download_svg">Download</button></dd>');
$('#download_svg').click(function(){
var guitar = getGuitar();
guitar = ff.fretGuitar(guitar);
var blob = new Blob([ff.getSVG(guitar)], {type: "image/svg+xml"});
saveAs(blob, "fretboard.svg");
});
dl.append('<dt>CSV</dt><dd><button id="download_csv">Download</button></dd>');
$('#download_csv').click(function(){
var guitar = getGuitar();
guitar = ff.fretGuitar(guitar);
var blob = new Blob([ff.getCSV(guitar)], {type: "text/csv"});
saveAs(blob, "fretboard.csv");
});
dl.append('<dt>HTML</dt><dd><button id="download_html">Download</button></dd>');
$('#download_html').click(function(){
var guitar = getGuitar();
guitar = ff.fretGuitar(guitar);
var blob = new Blob([ff.getHTML(guitar)], {type: "text/html"});
saveAs(blob, "fretboard.html");
});
dl.append('<dt>TAB</dt><dd><button id="download_tab">Download</button></dd>');
$('#download_tab').click(function(){
var guitar = getGuitar();
guitar = ff.fretGuitar(guitar);
var blob = new Blob([ff.getTAB(guitar)], {type: "text/tab-separated-values"});
saveAs(blob, "fretboard.tab");
});
} else {
dl.append('<dd><b>Buttons missing?<br/>Downloads require a modern browser that supports <a href="https://developer.mozilla.org/en-US/docs/Web/API/Blob/Blob#Browser_compatibility">the Blob constructor API</a>.</b></dd>');
}
});
//]]>
</script>
</head>
<body>
<h1>FretFind2D</h1>
<p>
FretFind2D is a two dimensional fretboard design tool.
FretFind2D doesn't just calculate fret spacing.
It models the entire fretboard, strings and frets,
as a system of line segments on a two dimensional plane.
Because of this approach, it can design fretboards for instruments with
multiple scale lengths and non-parallel frets
as well as fretboards for instruments that play just or meantone scales.
</p>
<div id="fretfind">
<dl id="worksheet">
<dd>units</dd>
<dd>
<label><input type="radio" name="units" value="in" checked="checked"/>inches</label>
<label><input type="radio" name="units" value="cm" />centimeters</label>
<label><input type="radio" name="units" value="mm" />millimeters</label>
</dd>
<dt>scale length</dt>
<dd>
<dl id="length" class="alternative">
<dt id="single">single</dt>
<dd>
<dl>
<dt>fundamental scale length</dt>
<dd><input type="text" id="len" value="25" /></dd>
<dd class="help">
The scale length is the playing/speaking length of the
string measured from the nut to the bridge.
It is perhaps more properly twice the distance from the nut to the octave fret.
The fundamental scale length is the length of a line drawn from
the middle of the nut to the middle of the bridge.
For single scale length instruments that line is the perpendicular
bisector of both the nut and the bridge.
I call this length "fundamental" because on a standard instrument
with a narrow nut and a wide bridge the outer strings
actually have a slightly longer scale length.
</dd>
</dl>
</dd>
<dt id="multiple">multiple</dt>
<dd>
<dl>
<dt>first string scale length</dt>
<dd><input type="text" id="lenF" value="25" /></dd>
<dd class="help">
The scale length is the playing/speaking length of the
string measured from the nut to the bridge.
It is perhaps more properly twice the distance from the nut to the octave fret.
Enter the actual scale length of the first (traditional high E) string.
</dd>
<dt>last string scale length</dt>
<dd><input type="text" id="lenL" value="28" /></dd>
<dd class="help">
The scale length is the playing/speaking length of the
string measured from the nut to the bridge.
It is perhaps more properly twice the distance from the nut to the octave fret.
Enter the actual scale length of the last (traditional low E) string.
</dd>
<dt>perpendicular fret distance</dt>
<dd><input type="text" id="pDist" value="0.5" /></dd>
<dd class="help">
<p>
The perpendicular fret distance
is the ratio of distances along the first and last
string that fall on a line perpendicular to the midline of the neck.
This is used to control the angle of the nut, frets and bridge.
</p>
<p>
Traditionally this property of non-parallel-ly fretted
fretboards is measured by assigning a "perpendicular fret".
"Perpendicular distance" avoids two problems with the "perpendicular fret" method.
First, it is possible that no fret falls into this perpendicular position.
With "perpendicular distance" we avoid fractional frets.
Second, it is possible and even likely with non-equal temperament fretboards that
as a fret crosses the fretboard it will fall at different ratios along the strings.
With "perpendicular distance" we avoid complex calculations
and have more predictable results.
</p>
<p>
A value of 0 results in a perpendicular nut.
A value of 1 results in a perpendicular bridge.
The default 0.5 results in a perpendicular octave fret.
To calculate an appropriate value for any fret,
simply divide the distance of the fret from the nut by the total length of the string.
In twelve tone equal temperament the values look like this:
</p>
<pre>
Fret P.D. Fret P.D.
1 0.05613 13 0.52806
2 0.10910 14 0.55455
3 0.15910 15 0.57955
4 0.20630 16 0.60315
5 0.25085 17 0.62542
6 0.29289 18 0.64645
7 0.33258 19 0.66629
8 0.37004 20 0.68502
9 0.40540 21 0.70270
10 0.43877 22 0.71938
11 0.47027 23 0.73513
12 0.50000 24 0.75000
</pre>
</dd>
</dl>
</dd>
<dt id="individual">individual</dt>
<dd>
<div class="experimental">Danger: Experimental!!!</div>
<dl>
<dt>string scale lengths:</dt>
<dd id="ilengths">
</dd>
<dd class="help">
The scale length is the playing/speaking length of the
string measured from the nut to the bridge.
It is perhaps more properly twice the distance from the nut to the octave fret.
Enter the actual scale length of the each string.
</dd>
<dt>perpendicular fret distance</dt>
<dd><input type="text" id="ipDist" value="0.5" /></dd>
<dd class="help">
<p>
The perpendicular fret distance
is the ratio of distances along the first and last
string that fall on a line perpendicular to the midline of the neck.
This is used to control the angle of the nut, frets and bridge.
</p>
<p>
Traditionally this property of non-parallel-ly fretted
fretboards is measured by assigning a "perpendicular fret".
"Perpendicular distance" avoids two problems with the "perpendicular fret" method.
First, it is possible that no fret falls into this perpendicular position.
With "perpendicular distance" we avoid fractional frets.
Second, it is possible and even likely with non-equal temperament fretboards that
as a fret crosses the fretboard it will fall at different ratios along the strings.
With "perpendicular distance" we avoid complex calculations
and have more predictable results.
</p>
<p>
A value of 0 results in a perpendicular nut.
A value of 1 results in a perpendicular bridge.
The default 0.5 results in a perpendicular octave fret.
To calculate an appropriate value for any fret,
simply divide the distance of the fret from the nut by the total length of the string.
In twelve tone equal temperament the values look like this:
</p>
<pre>
Fret P.D. Fret P.D.
1 0.05613 13 0.52806
2 0.10910 14 0.55455
3 0.15910 15 0.57955
4 0.20630 16 0.60315
5 0.25085 17 0.62542
6 0.29289 18 0.64645
7 0.33258 19 0.66629
8 0.37004 20 0.68502
9 0.40540 21 0.70270
10 0.43877 22 0.71938
11 0.47027 23 0.73513
12 0.50000 24 0.75000
</pre>
</dd>
</dl>
</dd>
</dl>
</dd>
<dt>string width at the nut</dt>
<dd><input type="text" id="nutWidth" value="1.375" /></dd>
<dd class="help">
The string width at the nut is the distance along the nut from the center
of the first string to the center of the last string.
I'm using delta x distance
(distance measured along a line drawn perpendicular to the neck's midline)
because I think that is what
you would feel as the width if you were playing an instrument with multiple scale lengths.
It also makes the calculation easier.
</dd>
<dt>string width at the bridge</dt>
<dd><input type="text" id="bridgeWidth" value="2.125" /></dd>
<dd class="help">
The string width at the bridge is the distance along the bridge from the center
of the first string to the center of the last string.
I'm using delta x distance
(distance measured along a line drawn perpendicular to the neck's midline)
because I think that is what
you would feel as the width if you were playing an instrument with multiple scale lengths.
It also makes the calculation easier.
</dd>
<dt>string spacing</dt>
<dd>
<dl id="spacing" class="alternative">
<dt id="equal">equal</dt>
<dd></dd>
<dt id="proportional">proportional</dt>
<dd>
<dl>
<dt>string gauges:</dt>
<dd id="igauges">
</dd>
</dl>
</dd>
</dl>
</dd>
<dd class="help">
<dl>
<dt>Equal:</dt>
<dd>Space out the strings evenly from center to center without regard for the thickness of the strings.</dd>
<dt>Proportional:</dt>
<dd>The spacing accounts for the diameter of the strings so that the empty space between each pair of strings is the same.</dd>
</dl>
<p>
Note that the outer two strings are still assumed to be centered on their coordinates,
i.e. if you enter a nut width of 2" then the outer edges of your outer two strings will
be wider than that by half of the sum of their gauges.
</p>
<p>
Enter the thickness of each string, with String 1 being the thinnest/highest.
For example, a standard set of electric guitar strings, in inches, would be 0.010, 0.013, 0.017, 0.026, 0.036, 0.046.
If you are using metric, please convert your string gauges to metric as well.
</p>
</dd>
<dt>fretboard overhang</dt>
<dd>
<dl id="overhang" class="alternative">
<dt id="equal">equal</dt>
<dd><input type="text" id="oE" value="0.09375" /></dd>
<dt id="nutbridge">nut & bridge</dt>
<dd>
<table>
<tr><td>nut</td><td>
<input type="text" id="oN" value="0.09375" />
</td></tr>
<tr><td>bridge</td><td>
<input type="text" id="oB" value="0.09375" />
</td></tr>
</table>
</dd>
<dt id="firstlast">first & last</dt>
<dd>
<table>
<tr><td>last</td><td>first</td></tr>
<tr><td>
<input type="text" id="oL" value="0.09375" />
</td><td>
<input type="text" id="oF" value="0.09375" />
</td></tr>
</table>
</dd>
<dt id="all">all</dt>
<dd>
<table>
<tr><td> </td><td>last</td><td>first</td></tr>
<tr><td>nut</td><td>
<input type="text" id="oNL" value="0.09375" />
</td><td>
<input type="text" id="oNF" value="0.09375" />
</td></tr>
<tr><td>bridge</td><td>
<input type="text" id="oBL" value="0.09375" />
</td><td>
<input type="text" id="oBF" value="0.09375" />
</td></tr>
</table>
</dd>
</dl>
</dd>
<dd class="help">
The fretboard overhang is the distance from the center of outer strings to edge of nut or bridge.
For fretboards with multiple scale lengths this is calculated as delta x distance,
distance measured along a line drawn perpendicular to the neck's midline.
There are four input modes for overhang.
<dl>
<dt>Equal:</dt><dd>you enter a single value and the overhang will be constant.</dd>
<dt>Nut & Bridge:</dt><dd>allows you to specify one overhang at the nut and another overhang at the bridge.</dd>
<dt>First & Last:</dt><dd>allows you to specify one overhang for the first string and another for the last string.</dd>
<dt>All:</dt><dd>you specify an overhang for all four locations separately.</dd>
</dl>
(Please note, in FretFind the first string is shown on the far right
where the high E string would be on a typical right-handed guitar.
The last string is on the far left, where the low E would be found.)
</dd>
<dt>calculation method</dt>
<dd>
<dl id="scale" class="alternative">
<dt id="et">equal (root 2)</dt>
<dd><input type="text" id="root" value="12" /></dd>
<dt id="scala">just (scala)</dt>
<dd>
<textarea rows="17" id="scl">
! 12tet.scl
!
12 tone equal temperament
12
!
100.0
200.
300.
400.
500.
600.
700.
800.
900.
1000.
1100.
2/1
</textarea>
</dd>
</dl>
</dd>
<dd class="help">
The calculation method determines how FretFind calculates fret placement.
There are two input modes.
<dl>
<dt>Equal:</dt><dd>uses the X<sup>th</sup> root of two, a standard
method for calculating equal temperaments. You enter the number of tones per octave.</dd>
<dt>Scala:</dt><dd>uses a Scala SCL file which allows you to specify
each scale step exactly in either ratios or cents.
If you are interested in creating your own scale, please read this description of the
<a href="http://www.huygens-fokker.org/scala/scl_format.html">Scala scale file format</a>.
Otherwise try a scale from the Scala scale archive, found at the very bottom of the
<a href="http://www.huygens-fokker.org/scala/downloads.html">Scala download page</a>.
You can learn more about Scala at the
<a href="http://www.huygens-fokker.org/scala/">Scala home page</a>.</dd>
</dl>
</dd>
<dt>number of frets</dt>
<dd><input type="text" id="numFrets" value="24" /></dd>
<dd class="help">
This is the number of frets you would like FretFind to calculate.
The number of frets must be an integer.
</dd>
<dt>number of strings</dt>
<dd><input type="text" id="numStrings" value="6" /></dd>
<dd class="help">
The number of strings must be an integer.
If you change the number of strings be sure to update the tuning section below (only useful with non-equal temperament scales).
</dd>
<dt>tuning</dt>
<dd id="tuning">
</dd>
<dd class="help">
Enter the scale step (of the scale defined above) to which each string will be tuned.
For example a standard guitar in the key of E would be tuned 0, 7, 3, 10, 5, 0.
The first string is the string to the far right on the fretboard.
This step is not important for the Equal calculation method.
Entering a tuning for the Scala calculation method will very likely result in partial frets.
</dd>
</dl><!-- worksheet -->
<div id="diagram"><!-- --></div>
<dl id="downloads">
<dt>Link</dt><dd><a href="#" id="bookmark">Link to this design</a></dd>
</dl>
<div id="tables"><!-- --></div>
</div>
<p class="dev">The latest version in development is available on <a href="https://acspike.github.io/FretFind2D/src/fretfind.html">GitHub</a>.</p>
<a href="http://github.com/acspike/FretFind2D"><img style="position: absolute; top: 0; right: 0; border: 0;" src="http://s3.amazonaws.com/github/ribbons/forkme_right_red_aa0000.png" alt="Fork me on GitHub" /></a>
</body>
</html>
| <html xmlns="http://www.w3.org/1999/xhtml">
<head>
<!--
Copyright (C) 2004, 2005, 2010 Aaron C Spike
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
-->
<title>FretFind2D</title>
<meta name="keywords" lang="en" content="FretFind, stringed instrument, luthier, fret, guitar, fret placement calculator" />
<meta name="description" lang="en" content="FretFind2d models fretboards as a system of line segments on a two dimensional plane to design freboards with multiple scale lenghts and microtonal scales." />
<link rel="stylesheet" type="text/css" href="fretfind.css" />
<script type="text/javascript" src="libs/jquery-1.4.2.min.js"><!-- --></script>
<script type="text/javascript" src="libs/jquery.ba-bbq.min.js"><!-- --></script>
<script type="text/javascript" src="libs/raphael-min.js"><!-- --></script>
<script type="text/javascript" src="libs/FileSaver.min.js"><!-- --></script>
<script type="text/javascript" src="libs/base64.js"><!-- --></script>
<script type="text/javascript" src="libs/sprintf.js"><!-- --></script>
<script type="text/javascript" src="libs/jspdf.js"><!-- --></script>
<script type="text/javascript" src="fretfind.js"><!-- --></script>
<script type="text/javascript">
//<![CDATA[
var getLengths = function(id) {
var lengths = [];
$('#'+id+' > input').each(function(_,item){lengths.push(parseFloat(item.value));});
return lengths;
};
var setLengths = function(length_id, string_count_id, change_callback, lengths) {
var strings = ff.getInt(string_count_id);
if (typeof lengths === 'undefined') {
lengths = getLengths(length_id);
}
var output = '';
for (var i=0; i<strings; i++) {
output += 'string '+(i+1)+': <input type="text" value="'+(lengths[i] || 25+(i*.5))+'" /><br />';
}
$('#'+length_id).html(output);
$('#'+length_id+' > input').change(change_callback);
};
var getGauges = function(id) {
var gauges = [];
$('#'+id+' > input').each(function(_,item){gauges.push(parseFloat(item.value));});
return gauges;
};
var setGauges = function(gauge_id, string_count_id, change_callback, gauges) {
var strings = ff.getInt(string_count_id);
if (typeof gauges === 'undefined') {
gauges = getGauges(gauge_id);
}
var output = '';
for (var i=0; i<strings; i++) {
output += 'string '+(i+1)+': <input type="text" value="'+(gauges[i] || 0.0)+'" /><br />';
}
$('#'+gauge_id).html(output);
$('#'+gauge_id+' > input').change(change_callback);
};
// computes string offsets along the nut itself, assuming that width does not include the overhang (it's already accounted for)
// if "proportional" is not selected, gauges will all be 0 and this will simply return
// the offsets along the nut.
var computeOffsets = function(strings, gauges, actual_length, perp_width, spacingMode) {
var offsets = [0];
const corrected_gauges = spacingMode === 'proportional' ? gauges : new Array(strings).fill(0);
const working_area = perp_width - corrected_gauges.reduce((tot, cur) => tot + cur);
const perp_gap = working_area / (strings - 1);
for (var i=1; i<strings-1; i++) {
half_adjacent_strings = (corrected_gauges[i-1] + corrected_gauges[i]) / 2.0;
next_space = perp_gap + half_adjacent_strings;
offsets.push(offsets[i-1] + next_space * actual_length / perp_width);
}
return offsets;
};
// get the user-selected display options for on-screen display and all output options (i.e. SVG, PDF).
var getDisplayOptions = function() {
var showStrings = $("#showStrings").attr("checked");
var showFretboardEdges = $("#showFretboardEdges").attr("checked");
var showMetas = $("#showMetas").attr("checked");
var showBoundingBox = $("#showBoundingBox").attr("checked");
var extendFrets = $("#extendFrets").attr("checked");
return {
showStrings: showStrings,
showFretboardEdges: showFretboardEdges,
showMetas: showMetas,
showBoundingBox: showBoundingBox,
extendFrets: extendFrets
};
}
// output a guitar (scale, tuning and strings (with fretboard edges))
// based upon form values
var getGuitar = function() {
//get form values
var lengthMode = ff.getAlt('length');
var spacingMode = ff.getAlt('spacing');
var scaleLength = ff.getFlt('len');
var scaleLengthF = ff.getFlt('lenF');
var scaleLengthL = ff.getFlt('lenL');
var perp = ff.getFlt('pDist');
var nutWidth = ff.getFlt('nutWidth');
var bridgeWidth = ff.getFlt('bridgeWidth');
var strings = ff.getInt('numStrings');
var units = $("input:checked[name='units']").val();
var gauges = getGauges('igauges');
var lengths = getLengths('ilengths');
if (lengthMode === 'individual') {
scaleLengthF = lengths[0];
scaleLengthL = lengths[lengths.length-1];
perp = ff.getFlt('ipDist');
}
var frets = ff.getInt('numFrets');
var tuning = ff.getTuning('tuning');
var oNF;
var oNL;
var oBF;
var oBL;
switch (ff.getAlt('overhang')) {
case 'equal':
oNF = oNL = oBF = oBL = ff.getFlt('oE');
break;
case 'nutbridge':
oNF = oNL = ff.getFlt('oN');
oBF = oBL = ff.getFlt('oB');
break;
case 'firstlast':
oNF = oBF = ff.getFlt('oF');
oBL = oNL = ff.getFlt('oL');
break;
case 'all':
oNF = ff.getFlt('oNF');
oBF = ff.getFlt('oBF');
oNL = ff.getFlt('oNL');
oBL = ff.getFlt('oBL');
break;
}
var scale;
if (ff.getAlt('scale') === 'et') {
var tones = ff.getFlt('root');
scale = ff.etScale(tones,2);
} else {
var scala = ff.getStr('scl');
scale = ff.scalaScale(scala);
}
//choose an x value for the center line
var nutHalf = nutWidth / 2;
var bridgeHalf = bridgeWidth / 2;
var nutCandidateCenter = (nutHalf) + oNL;
var bridgeCandidateCenter = (bridgeHalf) + oBL;
var xcenter = bridgeCandidateCenter >= nutCandidateCenter ? bridgeCandidateCenter : nutCandidateCenter;
//find x values for fretboard edges
var fbnxf = xcenter + nutHalf + oNF;
var fbbxf = xcenter + bridgeHalf + oBF;
var fbnxl = xcenter - (nutHalf + oNL);
var fbbxl = xcenter - (bridgeHalf + oBL);
//find x values for first and last strings
var snxf = xcenter + nutHalf;
var sbxf = xcenter + bridgeHalf;
var snxl = xcenter - nutHalf;
var sbxl = xcenter - bridgeHalf;
//find the slope of the strings
var fdeltax = sbxf - snxf;
var ldeltax = sbxl - snxl;
var fdeltay;
var ldeltay;
if (lengthMode === 'single') {
fdeltay = ldeltay = scaleLength;
} else {
fdeltay = Math.sqrt((scaleLengthF * scaleLengthF) - (fdeltax * fdeltax));
ldeltay = Math.sqrt((scaleLengthL * scaleLengthL) - (ldeltax * ldeltax));
}
//temporarily place first and last strings
var first = new ff.Segment(new ff.Point(snxf, 0), new ff.Point(sbxf, fdeltay));
var last = new ff.Segment(new ff.Point(snxl, 0), new ff.Point(sbxl, ldeltay));
var perp_y = 0;
if (lengthMode === 'multiple' || lengthMode === 'individual') {
//translate so that perpendicular distance lines up
var fperp = perp * fdeltay;
var lperp = perp * ldeltay;
if (fdeltay <= ldeltay) {
first.translate(0, (lperp - fperp));
perp_y = lperp;
} else {
last.translate(0, (fperp - lperp));
perp_y = fperp;
}
}
var nut = new ff.Segment(first.end1.copy(), last.end1.copy());
var bridge = new ff.Segment(first.end2.copy(), last.end2.copy());
if (lengthMode === 'multiple' || lengthMode === 'individual') {
//overhang measurements are now converted from delta x to along line lengths
oNF = (oNF * nut.length()) / nutWidth;
oNL = (oNL * nut.length()) / nutWidth;
oBF = (oBF * bridge.length()) / bridgeWidth;
oBL = (oBL * bridge.length()) / bridgeWidth;
}
//place fretboard edges;
var fbf = new ff.Segment(nut.pointAt(-oNF), bridge.pointAt(-oBF));
var fbl = new ff.Segment(nut.pointAt(nut.length() + oNL), bridge.pointAt(bridge.length() + oBL));
//normalize values into the first quadrant via translate
if (fbf.end1.y < 0 || fbl.end1.y < 0)
{
var move = fbf.end1.y <= fbl.end1.y ? -fbf.end1.y : -fbl.end1.y;
first.translate(0, move);
last.translate(0, move);
nut.translate(0, move);
bridge.translate(0, move);
fbf.translate(0, move);
fbl.translate(0, move);
perp_y += move;
}
var nutOffsets = computeOffsets(strings, gauges, nut.length(), nutWidth, spacingMode);
var bridgeOffsets = computeOffsets(strings, gauges, bridge.length(), bridgeWidth, spacingMode);
var lines = [first];
for (var i=1; i<=(strings-2); i++)
{
var n = nut.pointAt(nutOffsets[i]);
var b = bridge.pointAt(bridgeOffsets[i]);
if (lengthMode === 'individual') {
var l = lengths[i];
var nx = n.x;
var bx = b.x;
var deltax = Math.abs(nx - bx);
var deltay = Math.sqrt((l * l) - (deltax * deltax));
var n = new ff.Point(nx,0)
var b = new ff.Point(bx,deltay);
var perpy = perp * deltay;
var perpy_diff = perp_y - perpy;
n.translate(0, perpy_diff);
b.translate(0, perpy_diff);
}
lines.push(new ff.Segment(n, b));
}
lines.push(last);
return {
scale: scale,
tuning: tuning,
strings: lines,
edge1: fbf,
edge2: fbl,
center: xcenter,
fret_count: frets,
units: units
};
};
var getLink = function() {
var params = {};
$('#worksheet').find('input[id], textarea[id]').each(function(index,item){
var id = $(item).attr('id');
params[id] = $(item).val();
});
params.t = ff.getTuning('tuning');
params.il = getLengths('ilengths');
params.ig = getGauges('igauges');
params.u = $("input:checked[name='units']").val();
params.sl = ff.getAlt('length');
params.ns = ff.getAlt('spacing');
params.scale = ff.getAlt('scale');
params.o = ff.getAlt('overhang');
var location = window.location.toString().replace(/#.*/,'');
return location+'#'+$.param(params);
};
//surface for drawing fretboard
var paper;
var processChanges = false;
var onChange = function() {
if (processChanges) {
var guitar = getGuitar();
guitar = ff.fretGuitar(guitar);
ff.drawGuitar(paper, guitar, getDisplayOptions());
$('#tables').html(ff.getTable(guitar));
$('#bookmark').attr('href', getLink());
}
};
var updateFormFromHash = function() {
var params = $.deparam.fragment();
$('#worksheet').find('input[id], textarea[id]').each(function(index,item){
var id = $(item).attr('id');
if (params.hasOwnProperty(id)){
$(item).val(params[id]);
}
});
setLengths('ilengths','numStrings', onChange, (params['il'] || []));
setGauges('igauges', 'numStrings', onChange, (params['ig'] || []));
ff.setTuning('tuning','numStrings', onChange, (params['t'] || []));
$("input[name='units']").filter("[value='"+(params['u']||'in')+"']").click();
$('#'+params.sl).click();
$('#'+params.ns).click();
$('#'+params.scale).click();
$('#'+params.o).click();
};
$(document).ready(function() {
ff.initHelp('worksheet');
ff.initAlternatives('worksheet', onChange);
$('#numStrings').change(function(){
ff.setTuning('tuning', 'numStrings', onChange);
setLengths('ilengths','numStrings', onChange);
setGauges('igauges', 'numStrings', onChange);
});
$('#worksheet').find('input, textarea').change(onChange);
ff.setTuning('tuning', 'numStrings', onChange);
setLengths('ilengths','numStrings', onChange);
setGauges('igauges', 'numStrings', onChange);
paper = Raphael('diagram',200,800);
updateFormFromHash();
processChanges = true;
onChange();
var isFileSaverSupported = false;
try {
isFileSaverSupported = !!new Blob;
} catch (e) {}
var dl = $('#downloads');
if (isFileSaverSupported){
dl.append('<dt>DXF</dt><dd><button id="download_dxf">Download</button></dd>');
$('#download_dxf').click(function(){
var guitar = getGuitar();
guitar = ff.fretGuitar(guitar);
var blob = new Blob([ff.getDXF(guitar, getDisplayOptions())], {type: "image/vnd.dxf"});
saveAs(blob, "fretboard.dxf");
});
dl.append('<dt>PDF (multi-page)</dt>' +
'<dd>' +
'<label><input type="radio" name="pdfm_pagesize" value="letter" checked="checked"/>letter</label><br />' +
'<label><input type="radio" name="pdfm_pagesize" value="a4" />A4</label><br />' +
'<label><input type="radio" name="pdfm_pagesize" value="legal" />legal</label>' +
'</dd>' +
'<dd><button id="download_pdfm">Download</button></dd>');
$('#download_pdfm').click(function(){
var guitar = getGuitar();
var pagesize = $("input:checked[name='pdfm_pagesize']").val();
guitar = ff.fretGuitar(guitar);
var blob = new Blob([ff.getPDFMultipage(guitar, getDisplayOptions(), pagesize)], {type: "application/pdf"});
saveAs(blob, "fretboard.pdf");
});
dl.append('<dt>PDF (single page)</dt><dd><button id="download_pdf">Download</button></dd>');
$('#download_pdf').click(function(){
var guitar = getGuitar();
guitar = ff.fretGuitar(guitar);
var blob = new Blob([ff.getPDF(guitar, getDisplayOptions())], {type: "application/pdf"});
saveAs(blob, "fretboard.pdf");
});
dl.append('<dt>SVG</dt><dd><button id="download_svg">Download</button></dd>');
$('#download_svg').click(function(){
var guitar = getGuitar();
guitar = ff.fretGuitar(guitar);
var blob = new Blob([ff.getSVG(guitar, getDisplayOptions())], {type: "image/svg+xml"});
saveAs(blob, "fretboard.svg");
});
dl.append('<dt>CSV</dt><dd><button id="download_csv">Download</button></dd>');
$('#download_csv').click(function(){
var guitar = getGuitar();
guitar = ff.fretGuitar(guitar);
var blob = new Blob([ff.getCSV(guitar)], {type: "text/csv"});
saveAs(blob, "fretboard.csv");
});
dl.append('<dt>HTML</dt><dd><button id="download_html">Download</button></dd>');
$('#download_html').click(function(){
var guitar = getGuitar();
guitar = ff.fretGuitar(guitar);
var blob = new Blob([ff.getHTML(guitar)], {type: "text/html"});
saveAs(blob, "fretboard.html");
});
dl.append('<dt>TAB</dt><dd><button id="download_tab">Download</button></dd>');
$('#download_tab').click(function(){
var guitar = getGuitar();
guitar = ff.fretGuitar(guitar);
var blob = new Blob([ff.getTAB(guitar)], {type: "text/tab-separated-values"});
saveAs(blob, "fretboard.tab");
});
} else {
dl.append('<dd><b>Buttons missing?<br/>Downloads require a modern browser that supports <a href="https://developer.mozilla.org/en-US/docs/Web/API/Blob/Blob#Browser_compatibility">the Blob constructor API</a>.</b></dd>');
}
});
//]]>
</script>
</head>
<body>
<h1>FretFind2D</h1>
<p>
FretFind2D is a two dimensional fretboard design tool.
FretFind2D doesn't just calculate fret spacing.
It models the entire fretboard, strings and frets,
as a system of line segments on a two dimensional plane.
Because of this approach, it can design fretboards for instruments with
multiple scale lengths and non-parallel frets
as well as fretboards for instruments that play just or meantone scales.
</p>
<div id="fretfind">
<dl id="worksheet">
<dd>units</dd>
<dd>
<label><input type="radio" name="units" value="in" checked="checked"/>inches</label><br />
<label><input type="radio" name="units" value="cm" />centimeters</label><br />
<label><input type="radio" name="units" value="mm" />millimeters</label>
</dd>
<dd>display</dd>
<dd>
<label><input type="checkbox" name="showFretboardEdges" id="showFretboardEdges" checked="checked" />show fretboard edges</label><br />
<label><input type="checkbox" name="showStrings" id="showStrings" checked="checked" />show strings</label><br />
<label><input type="checkbox" name="extendFrets" id="extendFrets" />extend frets to edge</label><br />
<label><input type="checkbox" name="showBoundingBox" id="showBoundingBox" />bounding box</label><br />
<label><input type="checkbox" name="showMetas" id="showMetas" checked="checked" />show metas</label>
</dd>
<dd class="help">
Selecting or deselecting any of these options will only affect the display and output files.
No fret calculations will be affected.
</dd>
<dt>scale length</dt>
<dd>
<dl id="length" class="alternative">
<dt id="single">single</dt>
<dd>
<dl>
<dt>fundamental scale length</dt>
<dd><input type="text" id="len" value="25" /></dd>
<dd class="help">
The scale length is the playing/speaking length of the
string measured from the nut to the bridge.
It is perhaps more properly twice the distance from the nut to the octave fret.
The fundamental scale length is the length of a line drawn from
the middle of the nut to the middle of the bridge.
For single scale length instruments that line is the perpendicular
bisector of both the nut and the bridge.
I call this length "fundamental" because on a standard instrument
with a narrow nut and a wide bridge the outer strings
actually have a slightly longer scale length.
</dd>
</dl>
</dd>
<dt id="multiple">multiple</dt>
<dd>
<dl>
<dt>first string scale length</dt>
<dd><input type="text" id="lenF" value="25" /></dd>
<dd class="help">
The scale length is the playing/speaking length of the
string measured from the nut to the bridge.
It is perhaps more properly twice the distance from the nut to the octave fret.
Enter the actual scale length of the first (traditional high E) string.
</dd>
<dt>last string scale length</dt>
<dd><input type="text" id="lenL" value="28" /></dd>
<dd class="help">
The scale length is the playing/speaking length of the
string measured from the nut to the bridge.
It is perhaps more properly twice the distance from the nut to the octave fret.
Enter the actual scale length of the last (traditional low E) string.
</dd>
<dt>perpendicular fret distance</dt>
<dd><input type="text" id="pDist" value="0.5" /></dd>
<dd class="help">
<p>
The perpendicular fret distance
is the ratio of distances along the first and last
string that fall on a line perpendicular to the midline of the neck.
This is used to control the angle of the nut, frets and bridge.
</p>
<p>
Traditionally this property of non-parallel-ly fretted
fretboards is measured by assigning a "perpendicular fret".
"Perpendicular distance" avoids two problems with the "perpendicular fret" method.
First, it is possible that no fret falls into this perpendicular position.
With "perpendicular distance" we avoid fractional frets.
Second, it is possible and even likely with non-equal temperament fretboards that
as a fret crosses the fretboard it will fall at different ratios along the strings.
With "perpendicular distance" we avoid complex calculations
and have more predictable results.
</p>
<p>
A value of 0 results in a perpendicular nut.
A value of 1 results in a perpendicular bridge.
The default 0.5 results in a perpendicular octave fret.
To calculate an appropriate value for any fret,
simply divide the distance of the fret from the nut by the total length of the string.
In twelve tone equal temperament the values look like this:
</p>
<pre>
Fret P.D. Fret P.D.
1 0.05613 13 0.52806
2 0.10910 14 0.55455
3 0.15910 15 0.57955
4 0.20630 16 0.60315
5 0.25085 17 0.62542
6 0.29289 18 0.64645
7 0.33258 19 0.66629
8 0.37004 20 0.68502
9 0.40540 21 0.70270
10 0.43877 22 0.71938
11 0.47027 23 0.73513
12 0.50000 24 0.75000
</pre>
</dd>
</dl>
</dd>
<dt id="individual">individual</dt>
<dd>
<div class="experimental">Danger: Experimental!!!</div>
<dl>
<dt>string scale lengths:</dt>
<dd id="ilengths">
</dd>
<dd class="help">
The scale length is the playing/speaking length of the
string measured from the nut to the bridge.
It is perhaps more properly twice the distance from the nut to the octave fret.
Enter the actual scale length of the each string.
</dd>
<dt>perpendicular fret distance</dt>
<dd><input type="text" id="ipDist" value="0.5" /></dd>
<dd class="help">
<p>
The perpendicular fret distance
is the ratio of distances along the first and last
string that fall on a line perpendicular to the midline of the neck.
This is used to control the angle of the nut, frets and bridge.
</p>
<p>
Traditionally this property of non-parallel-ly fretted
fretboards is measured by assigning a "perpendicular fret".
"Perpendicular distance" avoids two problems with the "perpendicular fret" method.
First, it is possible that no fret falls into this perpendicular position.
With "perpendicular distance" we avoid fractional frets.
Second, it is possible and even likely with non-equal temperament fretboards that
as a fret crosses the fretboard it will fall at different ratios along the strings.
With "perpendicular distance" we avoid complex calculations
and have more predictable results.
</p>
<p>
A value of 0 results in a perpendicular nut.
A value of 1 results in a perpendicular bridge.
The default 0.5 results in a perpendicular octave fret.
To calculate an appropriate value for any fret,
simply divide the distance of the fret from the nut by the total length of the string.
In twelve tone equal temperament the values look like this:
</p>
<pre>
Fret P.D. Fret P.D.
1 0.05613 13 0.52806
2 0.10910 14 0.55455
3 0.15910 15 0.57955
4 0.20630 16 0.60315
5 0.25085 17 0.62542
6 0.29289 18 0.64645
7 0.33258 19 0.66629
8 0.37004 20 0.68502
9 0.40540 21 0.70270
10 0.43877 22 0.71938
11 0.47027 23 0.73513
12 0.50000 24 0.75000
</pre>
</dd>
</dl>
</dd>
</dl>
</dd>
<dt>string width at the nut</dt>
<dd><input type="text" id="nutWidth" value="1.375" /></dd>
<dd class="help">
The string width at the nut is the distance along the nut from the center
of the first string to the center of the last string.
I'm using delta x distance
(distance measured along a line drawn perpendicular to the neck's midline)
because I think that is what
you would feel as the width if you were playing an instrument with multiple scale lengths.
It also makes the calculation easier.
</dd>
<dt>string width at the bridge</dt>
<dd><input type="text" id="bridgeWidth" value="2.125" /></dd>
<dd class="help">
The string width at the bridge is the distance along the bridge from the center
of the first string to the center of the last string.
I'm using delta x distance
(distance measured along a line drawn perpendicular to the neck's midline)
because I think that is what
you would feel as the width if you were playing an instrument with multiple scale lengths.
It also makes the calculation easier.
</dd>
<dt>string spacing</dt>
<dd>
<dl id="spacing" class="alternative">
<dt id="equal">equal</dt>
<dd></dd>
<dt id="proportional">proportional</dt>
<dd>
<dl>
<dt>string gauges:</dt>
<dd id="igauges">
</dd>
</dl>
</dd>
</dl>
</dd>
<dd class="help">
<dl>
<dt>Equal:</dt>
<dd>Space out the strings evenly from center to center without regard for the thickness of the strings.</dd>
<dt>Proportional:</dt>
<dd>The spacing accounts for the diameter of the strings so that the empty space between each pair of strings is the same.</dd>
</dl>
<p>
Note that the outer two strings are still assumed to be centered on their coordinates,
i.e. if you enter a nut width of 2" then the outer edges of your outer two strings will
be wider than that by half of the sum of their gauges.
</p>
<p>
Enter the thickness of each string, with String 1 being the thinnest/highest.
For example, a standard set of electric guitar strings, in inches, would be 0.010, 0.013, 0.017, 0.026, 0.036, 0.046.
If you are using metric, please convert your string gauges to metric as well.
</p>
</dd>
<dt>fretboard overhang</dt>
<dd>
<dl id="overhang" class="alternative">
<dt id="equal">equal</dt>
<dd><input type="text" id="oE" value="0.09375" /></dd>
<dt id="nutbridge">nut & bridge</dt>
<dd>
<table>
<tr><td>nut</td><td>
<input type="text" id="oN" value="0.09375" />
</td></tr>
<tr><td>bridge</td><td>
<input type="text" id="oB" value="0.09375" />
</td></tr>
</table>
</dd>
<dt id="firstlast">first & last</dt>
<dd>
<table>
<tr><td>last</td><td>first</td></tr>
<tr><td>
<input type="text" id="oL" value="0.09375" />
</td><td>
<input type="text" id="oF" value="0.09375" />
</td></tr>
</table>
</dd>
<dt id="all">all</dt>
<dd>
<table>
<tr><td> </td><td>last</td><td>first</td></tr>
<tr><td>nut</td><td>
<input type="text" id="oNL" value="0.09375" />
</td><td>
<input type="text" id="oNF" value="0.09375" />
</td></tr>
<tr><td>bridge</td><td>
<input type="text" id="oBL" value="0.09375" />
</td><td>
<input type="text" id="oBF" value="0.09375" />
</td></tr>
</table>
</dd>
</dl>
</dd>
<dd class="help">
The fretboard overhang is the distance from the center of outer strings to edge of nut or bridge.
For fretboards with multiple scale lengths this is calculated as delta x distance,
distance measured along a line drawn perpendicular to the neck's midline.
There are four input modes for overhang.
<dl>
<dt>Equal:</dt><dd>you enter a single value and the overhang will be constant.</dd>
<dt>Nut & Bridge:</dt><dd>allows you to specify one overhang at the nut and another overhang at the bridge.</dd>
<dt>First & Last:</dt><dd>allows you to specify one overhang for the first string and another for the last string.</dd>
<dt>All:</dt><dd>you specify an overhang for all four locations separately.</dd>
</dl>
(Please note, in FretFind the first string is shown on the far right
where the high E string would be on a typical right-handed guitar.
The last string is on the far left, where the low E would be found.)
</dd>
<dt>calculation method</dt>
<dd>
<dl id="scale" class="alternative">
<dt id="et">equal (root 2)</dt>
<dd><input type="text" id="root" value="12" /></dd>
<dt id="scala">just (scala)</dt>
<dd>
<textarea rows="17" id="scl">
! 12tet.scl
!
12 tone equal temperament
12
!
100.0
200.
300.
400.
500.
600.
700.
800.
900.
1000.
1100.
2/1
</textarea>
</dd>
</dl>
</dd>
<dd class="help">
The calculation method determines how FretFind calculates fret placement.
There are two input modes.
<dl>
<dt>Equal:</dt><dd>uses the X<sup>th</sup> root of two, a standard
method for calculating equal temperaments. You enter the number of tones per octave.</dd>
<dt>Scala:</dt><dd>uses a Scala SCL file which allows you to specify
each scale step exactly in either ratios or cents.
If you are interested in creating your own scale, please read this description of the
<a href="http://www.huygens-fokker.org/scala/scl_format.html">Scala scale file format</a>.
Otherwise try a scale from the Scala scale archive, found at the very bottom of the
<a href="http://www.huygens-fokker.org/scala/downloads.html">Scala download page</a>.
You can learn more about Scala at the
<a href="http://www.huygens-fokker.org/scala/">Scala home page</a>.</dd>
</dl>
</dd>
<dt>number of frets</dt>
<dd><input type="text" id="numFrets" value="24" /></dd>
<dd class="help">
This is the number of frets you would like FretFind to calculate.
The number of frets must be an integer.
</dd>
<dt>number of strings</dt>
<dd><input type="text" id="numStrings" value="6" /></dd>
<dd class="help">
The number of strings must be an integer.
If you change the number of strings be sure to update the tuning section below (only useful with non-equal temperament scales).
</dd>
<dt>tuning</dt>
<dd id="tuning">
</dd>
<dd class="help">
Enter the scale step (of the scale defined above) to which each string will be tuned.
For example a standard guitar in the key of E would be tuned 0, 7, 3, 10, 5, 0.
The first string is the string to the far right on the fretboard.
This step is not important for the Equal calculation method.
Entering a tuning for the Scala calculation method will very likely result in partial frets.
</dd>
</dl><!-- worksheet -->
<div id="diagram"><!-- --></div>
<dl id="downloads">
<dt>Link</dt><dd><a href="#" id="bookmark">Link to this design</a></dd>
</dl>
<div id="tables"><!-- --></div>
</div>
<p class="dev">The latest version in development is available on <a href="https://acspike.github.io/FretFind2D/src/fretfind.html">GitHub</a>.</p>
<a href="http://github.com/acspike/FretFind2D"><img style="position: absolute; top: 0; right: 0; border: 0;" src="http://s3.amazonaws.com/github/ribbons/forkme_right_red_aa0000.png" alt="Fork me on GitHub" /></a>
</body>
</html>
|
auchenberg/css-reloader | 15 | cssreloader.content.js | (function() {
var shortcutSettings;
function initialize() {
document.addEventListener("keydown", onWindowKeyDown, false);
chrome.extension.onRequest.addListener(onExtensionRequest);
chrome.extension.sendRequest({'action' : 'getSettings'}, onGetSettings);
}
function reload() {
var elements = document.querySelectorAll('link[rel=stylesheet][href]');
for (var i = 0, element; element = elements[i]; i++) {
var href = element.href.replace(/[?&]cssReloader=([^&$]*)/,'');
element.href = href + (href.indexOf('?')>=0?'&':'?') + 'cssReloader=' + (new Date().valueOf());
}
}
function onGetSettings(settings) {
shortcutSettings = settings;
}
function onWindowKeyDown(e) {
if(e.keyIdentifier == shortcutSettings["keyIdentifier"] &&
e.shiftKey === shortcutSettings["shiftKeySelected"] &&
e.altKey === shortcutSettings["altKeySelected"] &&
e.ctrlKey === shortcutSettings["controlKeySelected"])
{
reload();
}
}
function onExtensionRequest(request, sender) {
if (request.action == "reload") {
reload();
}
}
CSSreloader = {
reload : reload,
initialize: initialize
};
CSSreloader.initialize();
})();
| (function() {
var shortcutSettings;
var blacklist = [
new Regexp('^https?://use\.typekit\.net\/'),
new Regexp('^https?://fonts\.googleapis\.com\/')
];
function initialize() {
document.addEventListener("keydown", onWindowKeyDown, false);
chrome.extension.onRequest.addListener(onExtensionRequest);
chrome.extension.sendRequest({'action' : 'getSettings'}, onGetSettings);
}
function reload() {
var elements = document.querySelectorAll('link[rel=stylesheet][href]');
for (var i = 0, element; element = elements[i]; i++) {
if (isBlacklisted(element.href)) continue;
var href = element.href.replace(/[?&]cssReloader=([^&$]*)/,'');
element.href = href + (href.indexOf('?')>=0?'&':'?') + 'cssReloader=' + (new Date().valueOf());
}
}
function isBlacklisted(href) {
for (var i = 0, len = blacklist.length; i < len; i++) {
if (blacklist[i].test(href)) return true;
}
}
function onGetSettings(settings) {
shortcutSettings = settings;
}
function onWindowKeyDown(e) {
if(e.keyIdentifier == shortcutSettings["keyIdentifier"] &&
e.shiftKey === shortcutSettings["shiftKeySelected"] &&
e.altKey === shortcutSettings["altKeySelected"] &&
e.ctrlKey === shortcutSettings["controlKeySelected"])
{
reload();
}
}
function onExtensionRequest(request, sender) {
if (request.action == "reload") {
reload();
}
}
CSSreloader = {
reload : reload,
initialize: initialize
};
CSSreloader.initialize();
})();
|
yui/yui3-gallery | 31 | src/gallery-calendar-jumpnav/docs/assets/js/yui-prettify.js | YUI().use('node', function(Y) {
var code = Y.all('.prettyprint.linenums');
if (code.size()) {
code.each(function(c) {
var lis = c.all('ol li'),
l = 1;
lis.each(function(n) {
n.prepend('<a name="LINENUM_' + l + '"></a>');
l++;
});
});
var h = location.hash;
location.hash = '';
h = h.replace('LINE_', 'LINENUM_');
location.hash = h;
}
});
| YUI().use('node', function(Y) {
var code = Y.all('.prettyprint.linenums');
if (code.size()) {
code.each(function(c) {
var lis = c.all('ol li'),
l = 1;
lis.each(function(n) {
n.prepend('<a name="LINENUM_' + l + '"></a>');
l++;
});
});
var h = location.hash;
location.hash = '';
h = h.replace('LINE_', 'LINENUM_');
location.hash = h;
}
});
|
MarkLogic-Attic/recordloader | 8 | src/recordloader.sh | #!/bin/sh
#
# sample bash script for running RecordLoader
#
# Copyright (c)2005-2012 Mark Logic Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# The use of the Apache License does not indicate that this project is
# affiliated with the Apache Software Foundation.
#
function readlink() {
DIR=$(echo "${1%/*}")
(cd "$DIR" && echo "$(pwd -P)")
}
# look for GNU readlink first (OS X, BSD, Solaris)
READLINK=`type -P greadlink`
if [ -z "$READLINK" ]; then
READLINK=`type -P readlink`
fi
# if readlink is not GNU-style, setting BASE will fail
BASE=`$READLINK -f $0 2>/dev/null`
if [ -z "$BASE" ]; then
# try the bash function, which does not need dirname afterwards
BASE=$(readlink $0)
else
BASE=`dirname $BASE`
fi
BASE=`dirname $BASE`
if [ -z "$BASE" ]; then
echo Error initializing environment from $READLINK
$READLINK --help
exit 1
fi
CP=$BASE/../lib/recordloader.jar
CP=$CP:$HOME/lib/java/svnkit.jar
CP=$CP:$HOME/lib/java/xcc.jar
CP=$CP:$HOME/lib/java/xpp3.jar
FILES=
VMARGS=-Xincgc
# OS X defaults to MacRoman
VMARGS=$VMARGS" -Dfile.encoding=UTF-8"
for a in $*; do
if [ -e $a ]; then
FILES="$FILES $a"
else
VMARGS="$VMARGS $a"
fi
done
if [ -d "$JAVA_HOME" ]; then
JAVA=$JAVA_HOME/bin/java
else
JAVA=java
fi
$JAVA -cp $CP $VMARGS com.marklogic.ps.RecordLoader $FILES
# end recordloader.sh
| #!/bin/sh
#
# sample bash script for running RecordLoader
#
# Copyright (c)2005-2012 Mark Logic Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# The use of the Apache License does not indicate that this project is
# affiliated with the Apache Software Foundation.
#
function readlink() {
DIR=$(echo "${1%/*}")
(cd "$DIR" && echo "$(pwd -P)")
}
# look for GNU readlink first (OS X, BSD, Solaris)
READLINK=`type -P greadlink`
if [ -z "$READLINK" ]; then
READLINK=`type -P readlink`
fi
# if readlink is not GNU-style, setting BASE will fail
BASE=`$READLINK -f $0 2>/dev/null`
if [ -z "$BASE" ]; then
# try the bash function, which does not need dirname afterwards
BASE=$(readlink $0)
else
BASE=`dirname $BASE`
fi
BASE=`dirname $BASE`
if [ -z "$BASE" ]; then
echo Error initializing environment from $READLINK
$READLINK --help
exit 1
fi
CP=$BASE/../lib/recordloader.jar
CP=$CP:$HOME/lib/java/xcc.jar
CP=$CP:$HOME/lib/java/xpp3.jar
FILES=
VMARGS=-Xincgc
# OS X defaults to MacRoman
VMARGS=$VMARGS" -Dfile.encoding=UTF-8"
for a in $*; do
if [ -e $a ]; then
FILES="$FILES $a"
else
VMARGS="$VMARGS $a"
fi
done
if [ -d "$JAVA_HOME" ]; then
JAVA=$JAVA_HOME/bin/java
else
JAVA=java
fi
$JAVA -cp $CP $VMARGS com.marklogic.ps.RecordLoader $FILES
# end recordloader.sh
|
fryn/html5slider | 12 | html5slider.js | /*
html5slider - a JS implementation of <input type=range> for Firefox 16 and up
https://github.com/fryn/html5slider
Copyright (c) 2010-2012 Frank Yan, <http://frankyan.com>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
(function() {
// test for native support
var test = document.createElement('input');
try {
test.type = 'range';
if (test.type == 'range')
return;
} catch (e) {
return;
}
// test for required property support
test.style.background = 'linear-gradient(red, red)';
if (!test.style.backgroundImage || !('MozAppearance' in test.style) ||
!document.mozSetImageElement || !this.MutationObserver)
return;
var scale;
var isMac = navigator.platform == 'MacIntel';
var thumb = {
radius: isMac ? 9 : 6,
width: isMac ? 22 : 12,
height: isMac ? 16 : 20
};
var track = 'linear-gradient(transparent ' + (isMac ?
'6px, #999 6px, #999 7px, #ccc 8px, #bbb 9px, #bbb 10px, transparent 10px' :
'9px, #999 9px, #bbb 10px, #fff 11px, transparent 11px') +
', transparent)';
var styles = {
'min-width': thumb.width + 'px',
'min-height': thumb.height + 'px',
'max-height': thumb.height + 'px',
padding: '0 0 ' + (isMac ? '2px' : '1px'),
border: 0,
'border-radius': 0,
cursor: 'default',
'text-indent': '-999999px' // -moz-user-select: none; breaks mouse capture
};
var options = {
attributes: true,
attributeFilter: ['min', 'max', 'step', 'value']
};
var forEach = Array.prototype.forEach;
var onChange = document.createEvent('HTMLEvents');
onChange.initEvent('change', true, false);
if (document.readyState == 'loading')
document.addEventListener('DOMContentLoaded', initialize, true);
else
initialize();
function initialize() {
// create initial sliders
forEach.call(document.querySelectorAll('input[type=range]'), transform);
// create sliders on-the-fly
new MutationObserver(function(mutations) {
mutations.forEach(function(mutation) {
if (mutation.addedNodes)
forEach.call(mutation.addedNodes, function(node) {
check(node);
if (node.childElementCount)
forEach.call(node.querySelectorAll('input'), check);
});
});
}).observe(document, { childList: true, subtree: true });
}
function check(input) {
if (input.localName == 'input' && input.type != 'range' &&
input.getAttribute('type') == 'range')
transform(input);
}
function transform(slider) {
var isValueSet, areAttrsSet, isChanged, isClick, prevValue, rawValue, prevX;
var min, max, step, range, value = slider.value;
// lazily create shared slider affordance
if (!scale) {
scale = document.body.appendChild(document.createElement('hr'));
style(scale, {
'-moz-appearance': isMac ? 'scale-horizontal' : 'scalethumb-horizontal',
display: 'block',
visibility: 'visible',
opacity: 1,
position: 'fixed',
top: '-999999px'
});
document.mozSetImageElement('__sliderthumb__', scale);
}
// reimplement value and type properties
var getValue = function() { return '' + value; };
var setValue = function setValue(val) {
value = '' + val;
isValueSet = true;
draw();
delete slider.value;
slider.value = value;
slider.__defineGetter__('value', getValue);
slider.__defineSetter__('value', setValue);
};
slider.__defineGetter__('value', getValue);
slider.__defineSetter__('value', setValue);
slider.__defineGetter__('type', function() { return 'range'; });
// sync properties with attributes
['min', 'max', 'step'].forEach(function(prop) {
if (slider.hasAttribute(prop))
areAttrsSet = true;
slider.__defineGetter__(prop, function() {
return this.hasAttribute(prop) ? this.getAttribute(prop) : '';
});
slider.__defineSetter__(prop, function(val) {
val === null ? this.removeAttribute(prop) : this.setAttribute(prop, val);
});
});
// initialize slider
slider.readOnly = true;
style(slider, styles);
update();
new MutationObserver(function(mutations) {
mutations.forEach(function(mutation) {
if (mutation.attributeName != 'value') {
update();
areAttrsSet = true;
}
// note that value attribute only sets initial value
else if (!isValueSet) {
value = slider.getAttribute('value');
draw();
}
});
}).observe(slider, options);
slider.addEventListener('mousedown', onDragStart, true);
slider.addEventListener('keydown', onKeyDown, true);
slider.addEventListener('focus', onFocus, true);
slider.addEventListener('blur', onBlur, true);
function onDragStart(e) {
isClick = true;
setTimeout(function() { isClick = false; }, 0);
if (e.button || !range)
return;
var width = parseFloat(getComputedStyle(this, 0).width);
var multiplier = (width - thumb.width) / range;
if (!multiplier)
return;
// distance between click and center of thumb
var dev = e.clientX - this.getBoundingClientRect().left - thumb.width / 2 -
(value - min) * multiplier;
// if click was not on thumb, move thumb to click location
if (Math.abs(dev) > thumb.radius) {
isChanged = true;
this.value -= -dev / multiplier;
}
rawValue = value;
prevX = e.clientX;
this.addEventListener('mousemove', onDrag, true);
this.addEventListener('mouseup', onDragEnd, true);
}
function onDrag(e) {
var width = parseFloat(getComputedStyle(this, 0).width);
var multiplier = (width - thumb.width) / range;
if (!multiplier)
return;
rawValue += (e.clientX - prevX) / multiplier;
prevX = e.clientX;
isChanged = true;
this.value = rawValue;
}
function onDragEnd() {
this.removeEventListener('mousemove', onDrag, true);
this.removeEventListener('mouseup', onDragEnd, true);
}
function onKeyDown(e) {
if (e.keyCode > 36 && e.keyCode < 41) { // 37-40: left, up, right, down
onFocus.call(this);
isChanged = true;
this.value = value + (e.keyCode == 38 || e.keyCode == 39 ? step : -step);
}
}
function onFocus() {
if (!isClick)
this.style.boxShadow = !isMac ? '0 0 0 2px #fb0' :
'inset 0 0 20px rgba(0,127,255,.1), 0 0 1px rgba(0,127,255,.4)';
}
function onBlur() {
this.style.boxShadow = '';
}
// determines whether value is valid number in attribute form
function isAttrNum(value) {
return !isNaN(value) && +value == parseFloat(value);
}
// validates min, max, and step attributes and redraws
function update() {
min = isAttrNum(slider.min) ? +slider.min : 0;
max = isAttrNum(slider.max) ? +slider.max : 100;
if (max < min)
max = min > 100 ? min : 100;
step = isAttrNum(slider.step) && slider.step > 0 ? +slider.step : 1;
range = max - min;
draw(true);
}
// recalculates value property
function calc() {
if (!isValueSet && !areAttrsSet)
value = slider.getAttribute('value');
if (!isAttrNum(value))
value = (min + max) / 2;;
// snap to step intervals (WebKit sometimes does not - bug?)
value = Math.round((value - min) / step) * step + min;
if (value < min)
value = min;
else if (value > max)
value = min + ~~(range / step) * step;
}
// renders slider using CSS background ;)
function draw(attrsModified) {
calc();
if (isChanged && value != prevValue)
slider.dispatchEvent(onChange);
isChanged = false;
if (!attrsModified && value == prevValue)
return;
prevValue = value;
var position = range ? (value - min) / range * 100 : 0;
var bg = '-moz-element(#__sliderthumb__) ' + position + '% no-repeat, ';
style(slider, { background: bg + track });
}
}
function style(element, styles) {
for (var prop in styles)
element.style.setProperty(prop, styles[prop], 'important');
}
})();
| /*
html5slider - a JS implementation of <input type=range> for Firefox 16 and up
https://github.com/fryn/html5slider
Copyright (c) 2010-2012 Frank Yan, <http://frankyan.com>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
(function() {
// test for native support
var test = document.createElement('input');
try {
test.type = 'range';
if (test.type == 'range')
return;
} catch (e) {
return;
}
// test for required property support
test.style.background = 'linear-gradient(red, red)';
if (!test.style.backgroundImage || !('MozAppearance' in test.style) ||
!document['mozSetImageElement'] || !this.MutationObserver)
return;
var scale;
var isMac = navigator.platform == 'MacIntel';
var thumb = {
"radius": isMac ? 9 : 6,
"width": isMac ? 22 : 12,
"height": isMac ? 16 : 20
};
var track = 'linear-gradient(transparent ' + (isMac ?
'6px, #999 6px, #999 7px, #ccc 8px, #bbb 9px, #bbb 10px, transparent 10px' :
'9px, #999 9px, #bbb 10px, #fff 11px, transparent 11px') +
', transparent)';
var styles = {
"min-width": thumb.width + 'px',
"min-height": thumb.height + 'px',
"max-height": thumb.height + 'px',
"padding": '0 0 ' + (isMac ? '2px' : '1px'),
"border": 0,
"border-radius": 0,
"cursor": 'default',
"text-indent": '-999999px' // -moz-user-select: none; breaks mouse capture
};
var options = {
"attributes": true,
"attributeFilter": ['min', 'max', 'step', 'value']
};
var forEach = Array.prototype.forEach;
var onChange = document.createEvent('HTMLEvents');
onChange.initEvent('change', true, false);
if (document.readyState == 'loading')
document.addEventListener('DOMContentLoaded', initialize, true);
else
initialize();
function initialize() {
// create initial sliders
forEach.call(document.querySelectorAll('input[type=range]'), transform);
// create sliders on-the-fly
new MutationObserver(function(mutations) {
mutations.forEach(function(mutation) {
if (mutation.addedNodes)
forEach.call(mutation.addedNodes, function(node) {
check(node);
if (node.childElementCount)
forEach.call(node.querySelectorAll('input'), check);
});
});
}).observe(document, {"childList": true, "subtree": true});
}
function check(input) {
if (input.localName == 'input' && input.type != 'range' &&
input.getAttribute('type') == 'range')
transform(input);
}
function transform(slider) {
var isValueSet, areAttrsSet, isChanged, isClick, prevValue, rawValue, prevX;
var min, max, step, range, value = slider.value;
// lazily create shared slider affordance
if (!scale) {
scale = document.body.appendChild(document.createElement('hr'));
style(scale, {
"-moz-appearance": isMac ? 'scale-horizontal' : 'scalethumb-horizontal',
"display": 'block',
"visibility": 'visible',
"opacity": 1,
"position": 'fixed',
"top": '-999999px'
});
document['mozSetImageElement']('__sliderthumb__', scale);
}
// reimplement value and type properties
var getValue = function() { return '' + value; };
var setValue = function setValue(val) {
value = '' + val;
isValueSet = true;
draw();
delete slider.value;
slider.value = value;
slider.__defineGetter__('value', getValue);
slider.__defineSetter__('value', setValue);
};
slider.__defineGetter__('value', getValue);
slider.__defineSetter__('value', setValue);
slider.__defineGetter__('type', function() { return 'range'; });
// sync properties with attributes
['min', 'max', 'step'].forEach(function(prop) {
if (slider.hasAttribute(prop))
areAttrsSet = true;
slider.__defineGetter__(prop, function() {
return this.hasAttribute(prop) ? this.getAttribute(prop) : '';
});
slider.__defineSetter__(prop, function(val) {
if (val === null) {
this.removeAttribute(prop);
}
else {
this.setAttribute(prop, val);
}
});
});
// initialize slider
slider['readOnly'] = true;
style(slider, styles);
update();
new MutationObserver(function(mutations) {
mutations.forEach(function(mutation) {
if (mutation.attributeName != 'value') {
update();
areAttrsSet = true;
}
// note that value attribute only sets initial value
else if (!isValueSet) {
value = slider.getAttribute('value');
draw();
}
});
}).observe(slider, options);
slider.addEventListener('mousedown', onDragStart, true);
slider.addEventListener('keydown', onKeyDown, true);
slider.addEventListener('focus', onFocus, true);
slider.addEventListener('blur', onBlur, true);
function onDragStart(e) {
isClick = true;
setTimeout(function() { isClick = false; }, 0);
if (e.button || !range)
return;
var width = parseFloat(window['getComputedStyle'](this, 0).width);
var multiplier = (width - thumb.width) / range;
if (!multiplier)
return;
// distance between click and center of thumb
var dev = e.clientX - this.getBoundingClientRect().left - thumb.width / 2 -
(value - min) * multiplier;
// if click was not on thumb, move thumb to click location
if (Math.abs(dev) > thumb.radius) {
isChanged = true;
this.value -= -dev / multiplier;
}
rawValue = value;
prevX = e.clientX;
this.addEventListener('mousemove', onDrag, true);
this.addEventListener('mouseup', onDragEnd, true);
}
function onDrag(e) {
var width = parseFloat(window['getComputedStyle'](this, 0).width);
var multiplier = (width - thumb.width) / range;
if (!multiplier)
return;
rawValue += (e.clientX - prevX) / multiplier;
prevX = e.clientX;
isChanged = true;
this.value = rawValue;
}
function onDragEnd() {
this.removeEventListener('mousemove', onDrag, true);
this.removeEventListener('mouseup', onDragEnd, true);
}
function onKeyDown(e) {
if (e.keyCode > 36 && e.keyCode < 41) { // 37-40: left, up, right, down
onFocus.call(this);
isChanged = true;
this.value = value + (e.keyCode == 38 || e.keyCode == 39 ? step : -step);
}
}
function onFocus() {
if (!isClick)
this.style.boxShadow = !isMac ? '0 0 0 2px #fb0' :
'inset 0 0 20px rgba(0,127,255,.1), 0 0 1px rgba(0,127,255,.4)';
}
function onBlur() {
this.style.boxShadow = '';
}
// determines whether value is valid number in attribute form
function isAttrNum(value) {
return !isNaN(value) && +value == parseFloat(value);
}
// validates min, max, and step attributes and redraws
function update() {
min = isAttrNum(slider.min) ? +slider.min : 0;
max = isAttrNum(slider.max) ? +slider.max : 100;
if (max < min)
max = min > 100 ? min : 100;
step = isAttrNum(slider.step) && slider.step > 0 ? +slider.step : 1;
range = max - min;
draw(true);
}
// recalculates value property
function calc() {
if (!isValueSet && !areAttrsSet)
value = slider.getAttribute('value');
if (!isAttrNum(value))
value = (min + max) / 2;;
// snap to step intervals (WebKit sometimes does not - bug?)
value = Math.round((value - min) / step) * step + min;
if (value < min)
value = min;
else if (value > max)
value = min + ~~(range / step) * step;
}
/**
* @param {boolean=} attrsModified
*/
function draw(attrsModified) {
// renders slider using CSS background ;)
calc();
if (isChanged && value != prevValue)
slider.dispatchEvent(onChange);
isChanged = false;
if (!attrsModified && value == prevValue)
return;
prevValue = value;
var position = range ? (value - min) / range * 100 : 0;
var bg = '-moz-element(#__sliderthumb__) ' + position + '% no-repeat, ';
style(slider, {"background": bg + track});
}
}
function style(element, styles) {
for (var prop in styles)
element.style.setProperty(prop, styles[prop], 'important');
}
})();
|
fredrik-johansson/flint | 37 | arith/cyclotomic_cos_polynomial.c | /*
Copyright (C) 2011 Fredrik Johansson
This file is part of FLINT.
FLINT is free software: you can redistribute it and/or modify it under
the terms of the GNU Lesser General Public License (LGPL) as published
by the Free Software Foundation; either version 2.1 of the License, or
(at your option) any later version. See <https://www.gnu.org/licenses/>.
*/
#include <math.h>
#include "arith.h"
#define MAX_32BIT 58
static const int lookup_table[MAX_32BIT][28] =
{
{-1, 1}, {1, 1}, {1, 2}, {0, 1}, {-1, 2, 4}, {-1, 2},
{-1, -4, 4, 8}, {-1, 0, 2}, {1, -6, 0, 8}, {-1, -2, 4},
{1, 6, -12, -32, 16, 32}, {-3, 0, 4}, {-1, 6, 24, -32, -80, 32, 64},
{1, -4, -4, 8}, {1, 8, -16, -8, 16}, {1, 0, -8, 0, 8},
{1, -8, -40, 80, 240, -192, -448, 128, 256}, {-1, -6, 0, 8},
{1, 10, -40, -160, 240, 672, -448, -1024, 256, 512}, {5, 0, -20, 0, 16},
{1, -16, 32, 48, -96, -32, 64}, {-1, 6, 12, -32, -16, 32},
{-1, -12, 60, 280, -560, -1792, 1792, 4608, -2304, -5120, 1024, 2048},
{1, 0, -16, 0, 16}, {-1, 10, 100, -40, -800, 32, 2240, 0, -2560, 0,
1024}, {-1, -6, 24, 32, -80, -32, 64},
{1, 18, 0, -240, 0, 864, 0, -1152, 0, 512}, {-7, 0, 56, 0, -112, 0, 64},
{-1, 14, 112, -448, -2016, 4032, 13440, -15360, -42240, 28160, 67584,
-24576, -53248, 8192, 16384}, {1, -8, -16, 8, 16},
{-1, -16, 112, 672, -2016, -8064, 13440, 42240, -42240, -112640, 67584,
159744, -53248, -114688, 16384, 32768},
{1, 0, -32, 0, 160, 0, -256, 0, 128},
{1, -24, 48, 344, -688, -1088, 2176, 1280, -2560, -512, 1024},
{1, 8, -40, -80, 240, 192, -448, -128, 256},
{1, 16, -160, -368, 1760, 2272, -7232, -5504, 13824, 5632, -12288,
-2048, 4096}, {-3, 0, 36, 0, -96, 0, 64},
{-1, 18, 180, -960, -5280, 14784, 59136, -101376, -329472, 366080,
1025024, -745472, -1863680, 860160, 1966080, -524288, -1114112, 131072,
262144}, {-1, 10, 40, -160, -240, 672, 448, -1024, -256, 512},
{1, 24, -48, -632, 1264, 3296, -6592, -6784, 13568, 6144, -12288, -2048,
4096}, {1, 0, -48, 0, 304, 0, -512, 0, 256},
{1, -20, -220, 1320, 7920, -25344, -109824, 219648, 768768, -1025024,
-3075072, 2795520, 7454720, -4587520, -11141120, 4456448, 10027008,
-2359296, -4980736, 524288, 1048576}, {1, 16, 32, -48, -96, 32, 64},
{1, 22, -220, -1760, 7920, 41184, -109824, -439296, 768768, 2562560,
-3075072, -8945664, 7454720, 19496960, -11141120, -26738688, 10027008,
22413312, -4980736, -10485760, 1048576, 2097152},
{-11, 0, 220, 0, -1232, 0, 2816, 0, -2816, 0, 1024},
{1, -24, -144, 248, 1680, -864, -7168, 1152, 13824, -512, -12288, 0,
4096}, {1, -12, -60, 280, 560, -1792, -1792, 4608, 2304, -5120, -1024,
2048}, {-1, -24, 264, 2288, -11440, -64064, 192192, 823680, -1647360,
-5857280, 8200192, 25346048, -25346048, -70189056, 50135040, 127008768,
-63504384, -149422080, 49807360, 110100480, -22020096, -46137344,
4194304, 8388608}, {1, 0, -64, 0, 320, 0, -512, 0, 256},
{-1, 28, 196, -2968, -3136, 66304, 18816, -658816, -53760, 3587584,
78848, -11741184, -57344, 24084480, 16384, -31195136, 0, 24772608, 0,
-11010048, 0, 2097152}, {-1, -10, 100, 40, -800, -32, 2240, 0, -2560,
0, 1024}, {1, 32, -64, -1504, 3008, 16832, -33664, -76288, 152576,
173568, -347136, -210944, 421888, 131072, -262144, -32768, 65536},
{13, 0, -364, 0, 2912, 0, -9984, 0, 16640, 0, -13312, 0, 4096},
{-1, 26, 364, -2912, -21840, 96096, 512512, -1464320, -6223360,
12446720, 44808192, -65175552, -206389248, 222265344, 635043840,
-508035072, -1333592064, 784465920, 1917583360, -807403520,
-1857028096, 530579456, 1157627904, -201326592, -419430400, 33554432,
67108864}, {-1, 18, 0, -240, 0, 864, 0, -1152, 0, 512},
{1, 24, -432, -1208, 15216, 28064, -185024, -263424, 1149184, 1250304,
-4177920, -3356672, 9375744, 5324800, -13123584, -4947968, 11141120,
2490368, -5242880, -524288, 1048576},
{1, 0, -96, 0, 1376, 0, -6656, 0, 13568, 0, -12288, 0, 4096},
{1, -40, 80, 2120, -4240, -31648, 63296, 194432, -388864, -613376,
1226752, 1087488, -2174976, -1097728, 2195456, 589824, -1179648,
-131072, 262144}, {-1, -14, 112, 448, -2016, -4032, 13440, 15360,
-42240, -28160, 67584, 24576, -53248, -8192, 16384}
};
/* The coefficients in 2^d * \prod_{i=1}^d (x - cos(a_i)) are
easily bounded using the binomial theorem. */
static slong
magnitude_bound(slong d)
{
slong res;
fmpz_t t;
fmpz_init(t);
fmpz_bin_uiui(t, d, d / 2);
res = fmpz_bits(t);
fmpz_clear(t);
return FLINT_ABS(res) + d;
}
static void
fmpz_mul_or_div_2exp(fmpz_t x, fmpz_t y, slong s)
{
if (s >= 0)
fmpz_mul_2exp(x, y, s);
else
fmpz_fdiv_q_2exp(x, y, -s);
}
/* Balanced product of linear factors (x+alpha_i) using
fixed-point arithmetic with prec bits */
static void
balanced_product(fmpz * c, fmpz * alpha, slong len, slong prec)
{
if (len == 1)
{
fmpz_one(c + 1);
fmpz_mul_2exp(c + 1, c + 1, prec);
fmpz_set(c, alpha);
}
else if (len == 2)
{
fmpz_mul(c, alpha, alpha + 1);
fmpz_fdiv_q_2exp(c, c, prec);
fmpz_add(c + 1, alpha, alpha + 1);
fmpz_one(c + 2);
fmpz_mul_2exp(c + 2, c + 2, prec);
}
else
{
fmpz *L, *R;
slong i, m;
m = len / 2;
L = _fmpz_vec_init(len + 2);
R = L + m + 1;
balanced_product(L, alpha, m, prec);
balanced_product(R, alpha + m, len - m, prec);
_fmpz_poly_mul(c, R, len - m + 1, L, m + 1);
for (i = 0; i < len + 1; i++)
fmpz_fdiv_q_2exp(c + i, c + i, prec);
_fmpz_vec_clear(L, len + 2);
}
}
void
_arith_cos_minpoly(fmpz * coeffs, slong d, ulong n)
{
slong i, j;
fmpz * alpha;
fmpz_t half;
mpfr_t t, u;
flint_bitcnt_t prec;
slong exp;
if (n <= MAX_32BIT)
{
for (i = 0; i <= d; i++)
fmpz_set_si(coeffs + i, lookup_table[n - 1][i]);
return;
}
/* Direct formula for odd primes > 3 */
if (n_is_prime(n))
{
slong s = (n - 1) / 2;
switch (s % 4)
{
case 0:
fmpz_set_si(coeffs, WORD(1));
fmpz_set_si(coeffs + 1, -s);
break;
case 1:
fmpz_set_si(coeffs, WORD(1));
fmpz_set_si(coeffs + 1, s + 1);
break;
case 2:
fmpz_set_si(coeffs, WORD(-1));
fmpz_set_si(coeffs + 1, s);
break;
case 3:
fmpz_set_si(coeffs, WORD(-1));
fmpz_set_si(coeffs + 1, -s - 1);
break;
}
for (i = 2; i <= s; i++)
{
slong b = (s - i) % 2;
fmpz_mul2_uiui(coeffs + i, coeffs + i - 2, s+i-b, s+2-b-i);
fmpz_divexact2_uiui(coeffs + i, coeffs + i, i, i-1);
fmpz_neg(coeffs + i, coeffs + i);
}
return;
}
prec = magnitude_bound(d) + 5 + FLINT_BIT_COUNT(d);
alpha = _fmpz_vec_init(d);
fmpz_init(half);
mpfr_init2(t, prec);
mpfr_init2(u, prec);
fmpz_one(half);
fmpz_mul_2exp(half, half, prec - 1);
mpfr_const_pi(t, prec);
mpfr_div_ui(t, t, n, MPFR_RNDN);
for (i = j = 0; j < d; i++)
{
if (n_gcd(n, i) == 1)
{
mpfr_mul_ui(u, t, 2 * i, MPFR_RNDN);
mpfr_cos(u, u, MPFR_RNDN);
mpfr_neg(u, u, MPFR_RNDN);
exp = mpfr_get_z_2exp(_fmpz_promote(alpha + j), u);
_fmpz_demote_val(alpha + j);
fmpz_mul_or_div_2exp(alpha + j, alpha + j, exp + prec);
j++;
}
}
balanced_product(coeffs, alpha, d, prec);
/* Scale and round */
for (i = 0; i < d + 1; i++)
{
slong r = d;
if ((n & (n - 1)) == 0)
r--;
fmpz_mul_2exp(coeffs + i, coeffs + i, r);
fmpz_add(coeffs + i, coeffs + i, half);
fmpz_fdiv_q_2exp(coeffs + i, coeffs + i, prec);
}
fmpz_clear(half);
mpfr_clear(t);
mpfr_clear(u);
_fmpz_vec_clear(alpha, d);
}
void
arith_cos_minpoly(fmpz_poly_t poly, ulong n)
{
if (n == 0)
{
fmpz_poly_set_ui(poly, UWORD(1));
}
else
{
slong d = (n <= 2) ? 1 : n_euler_phi(n) / 2;
fmpz_poly_fit_length(poly, d + 1);
_arith_cos_minpoly(poly->coeffs, d, n);
_fmpz_poly_set_length(poly, d + 1);
}
}
| /*
Copyright (C) 2011 Fredrik Johansson
This file is part of FLINT.
FLINT is free software: you can redistribute it and/or modify it under
the terms of the GNU Lesser General Public License (LGPL) as published
by the Free Software Foundation; either version 2.1 of the License, or
(at your option) any later version. See <https://www.gnu.org/licenses/>.
*/
#include <math.h>
#include "mpfr.h"
#include "arith.h"
#define MAX_32BIT 58
static const int lookup_table[MAX_32BIT][28] =
{
{-1, 1}, {1, 1}, {1, 2}, {0, 1}, {-1, 2, 4}, {-1, 2},
{-1, -4, 4, 8}, {-1, 0, 2}, {1, -6, 0, 8}, {-1, -2, 4},
{1, 6, -12, -32, 16, 32}, {-3, 0, 4}, {-1, 6, 24, -32, -80, 32, 64},
{1, -4, -4, 8}, {1, 8, -16, -8, 16}, {1, 0, -8, 0, 8},
{1, -8, -40, 80, 240, -192, -448, 128, 256}, {-1, -6, 0, 8},
{1, 10, -40, -160, 240, 672, -448, -1024, 256, 512}, {5, 0, -20, 0, 16},
{1, -16, 32, 48, -96, -32, 64}, {-1, 6, 12, -32, -16, 32},
{-1, -12, 60, 280, -560, -1792, 1792, 4608, -2304, -5120, 1024, 2048},
{1, 0, -16, 0, 16}, {-1, 10, 100, -40, -800, 32, 2240, 0, -2560, 0,
1024}, {-1, -6, 24, 32, -80, -32, 64},
{1, 18, 0, -240, 0, 864, 0, -1152, 0, 512}, {-7, 0, 56, 0, -112, 0, 64},
{-1, 14, 112, -448, -2016, 4032, 13440, -15360, -42240, 28160, 67584,
-24576, -53248, 8192, 16384}, {1, -8, -16, 8, 16},
{-1, -16, 112, 672, -2016, -8064, 13440, 42240, -42240, -112640, 67584,
159744, -53248, -114688, 16384, 32768},
{1, 0, -32, 0, 160, 0, -256, 0, 128},
{1, -24, 48, 344, -688, -1088, 2176, 1280, -2560, -512, 1024},
{1, 8, -40, -80, 240, 192, -448, -128, 256},
{1, 16, -160, -368, 1760, 2272, -7232, -5504, 13824, 5632, -12288,
-2048, 4096}, {-3, 0, 36, 0, -96, 0, 64},
{-1, 18, 180, -960, -5280, 14784, 59136, -101376, -329472, 366080,
1025024, -745472, -1863680, 860160, 1966080, -524288, -1114112, 131072,
262144}, {-1, 10, 40, -160, -240, 672, 448, -1024, -256, 512},
{1, 24, -48, -632, 1264, 3296, -6592, -6784, 13568, 6144, -12288, -2048,
4096}, {1, 0, -48, 0, 304, 0, -512, 0, 256},
{1, -20, -220, 1320, 7920, -25344, -109824, 219648, 768768, -1025024,
-3075072, 2795520, 7454720, -4587520, -11141120, 4456448, 10027008,
-2359296, -4980736, 524288, 1048576}, {1, 16, 32, -48, -96, 32, 64},
{1, 22, -220, -1760, 7920, 41184, -109824, -439296, 768768, 2562560,
-3075072, -8945664, 7454720, 19496960, -11141120, -26738688, 10027008,
22413312, -4980736, -10485760, 1048576, 2097152},
{-11, 0, 220, 0, -1232, 0, 2816, 0, -2816, 0, 1024},
{1, -24, -144, 248, 1680, -864, -7168, 1152, 13824, -512, -12288, 0,
4096}, {1, -12, -60, 280, 560, -1792, -1792, 4608, 2304, -5120, -1024,
2048}, {-1, -24, 264, 2288, -11440, -64064, 192192, 823680, -1647360,
-5857280, 8200192, 25346048, -25346048, -70189056, 50135040, 127008768,
-63504384, -149422080, 49807360, 110100480, -22020096, -46137344,
4194304, 8388608}, {1, 0, -64, 0, 320, 0, -512, 0, 256},
{-1, 28, 196, -2968, -3136, 66304, 18816, -658816, -53760, 3587584,
78848, -11741184, -57344, 24084480, 16384, -31195136, 0, 24772608, 0,
-11010048, 0, 2097152}, {-1, -10, 100, 40, -800, -32, 2240, 0, -2560,
0, 1024}, {1, 32, -64, -1504, 3008, 16832, -33664, -76288, 152576,
173568, -347136, -210944, 421888, 131072, -262144, -32768, 65536},
{13, 0, -364, 0, 2912, 0, -9984, 0, 16640, 0, -13312, 0, 4096},
{-1, 26, 364, -2912, -21840, 96096, 512512, -1464320, -6223360,
12446720, 44808192, -65175552, -206389248, 222265344, 635043840,
-508035072, -1333592064, 784465920, 1917583360, -807403520,
-1857028096, 530579456, 1157627904, -201326592, -419430400, 33554432,
67108864}, {-1, 18, 0, -240, 0, 864, 0, -1152, 0, 512},
{1, 24, -432, -1208, 15216, 28064, -185024, -263424, 1149184, 1250304,
-4177920, -3356672, 9375744, 5324800, -13123584, -4947968, 11141120,
2490368, -5242880, -524288, 1048576},
{1, 0, -96, 0, 1376, 0, -6656, 0, 13568, 0, -12288, 0, 4096},
{1, -40, 80, 2120, -4240, -31648, 63296, 194432, -388864, -613376,
1226752, 1087488, -2174976, -1097728, 2195456, 589824, -1179648,
-131072, 262144}, {-1, -14, 112, 448, -2016, -4032, 13440, 15360,
-42240, -28160, 67584, 24576, -53248, -8192, 16384}
};
/* The coefficients in 2^d * \prod_{i=1}^d (x - cos(a_i)) are
easily bounded using the binomial theorem. */
static slong
magnitude_bound(slong d)
{
slong res;
fmpz_t t;
fmpz_init(t);
fmpz_bin_uiui(t, d, d / 2);
res = fmpz_bits(t);
fmpz_clear(t);
return FLINT_ABS(res) + d;
}
static void
fmpz_mul_or_div_2exp(fmpz_t x, fmpz_t y, slong s)
{
if (s >= 0)
fmpz_mul_2exp(x, y, s);
else
fmpz_fdiv_q_2exp(x, y, -s);
}
/* Balanced product of linear factors (x+alpha_i) using
fixed-point arithmetic with prec bits */
static void
balanced_product(fmpz * c, fmpz * alpha, slong len, slong prec)
{
if (len == 1)
{
fmpz_one(c + 1);
fmpz_mul_2exp(c + 1, c + 1, prec);
fmpz_set(c, alpha);
}
else if (len == 2)
{
fmpz_mul(c, alpha, alpha + 1);
fmpz_fdiv_q_2exp(c, c, prec);
fmpz_add(c + 1, alpha, alpha + 1);
fmpz_one(c + 2);
fmpz_mul_2exp(c + 2, c + 2, prec);
}
else
{
fmpz *L, *R;
slong i, m;
m = len / 2;
L = _fmpz_vec_init(len + 2);
R = L + m + 1;
balanced_product(L, alpha, m, prec);
balanced_product(R, alpha + m, len - m, prec);
_fmpz_poly_mul(c, R, len - m + 1, L, m + 1);
for (i = 0; i < len + 1; i++)
fmpz_fdiv_q_2exp(c + i, c + i, prec);
_fmpz_vec_clear(L, len + 2);
}
}
void
_arith_cos_minpoly(fmpz * coeffs, slong d, ulong n)
{
slong i, j;
fmpz * alpha;
fmpz_t half;
mpfr_t t, u;
flint_bitcnt_t prec;
slong exp;
if (n <= MAX_32BIT)
{
for (i = 0; i <= d; i++)
fmpz_set_si(coeffs + i, lookup_table[n - 1][i]);
return;
}
/* Direct formula for odd primes > 3 */
if (n_is_prime(n))
{
slong s = (n - 1) / 2;
switch (s % 4)
{
case 0:
fmpz_set_si(coeffs, WORD(1));
fmpz_set_si(coeffs + 1, -s);
break;
case 1:
fmpz_set_si(coeffs, WORD(1));
fmpz_set_si(coeffs + 1, s + 1);
break;
case 2:
fmpz_set_si(coeffs, WORD(-1));
fmpz_set_si(coeffs + 1, s);
break;
case 3:
fmpz_set_si(coeffs, WORD(-1));
fmpz_set_si(coeffs + 1, -s - 1);
break;
}
for (i = 2; i <= s; i++)
{
slong b = (s - i) % 2;
fmpz_mul2_uiui(coeffs + i, coeffs + i - 2, s+i-b, s+2-b-i);
fmpz_divexact2_uiui(coeffs + i, coeffs + i, i, i-1);
fmpz_neg(coeffs + i, coeffs + i);
}
return;
}
prec = magnitude_bound(d) + 5 + FLINT_BIT_COUNT(d);
alpha = _fmpz_vec_init(d);
fmpz_init(half);
mpfr_init2(t, prec);
mpfr_init2(u, prec);
fmpz_one(half);
fmpz_mul_2exp(half, half, prec - 1);
mpfr_const_pi(t, prec);
mpfr_div_ui(t, t, n, MPFR_RNDN);
for (i = j = 0; j < d; i++)
{
if (n_gcd(n, i) == 1)
{
mpfr_mul_ui(u, t, 2 * i, MPFR_RNDN);
mpfr_cos(u, u, MPFR_RNDN);
mpfr_neg(u, u, MPFR_RNDN);
exp = mpfr_get_z_2exp(_fmpz_promote(alpha + j), u);
_fmpz_demote_val(alpha + j);
fmpz_mul_or_div_2exp(alpha + j, alpha + j, exp + prec);
j++;
}
}
balanced_product(coeffs, alpha, d, prec);
/* Scale and round */
for (i = 0; i < d + 1; i++)
{
slong r = d;
if ((n & (n - 1)) == 0)
r--;
fmpz_mul_2exp(coeffs + i, coeffs + i, r);
fmpz_add(coeffs + i, coeffs + i, half);
fmpz_fdiv_q_2exp(coeffs + i, coeffs + i, prec);
}
fmpz_clear(half);
mpfr_clear(t);
mpfr_clear(u);
_fmpz_vec_clear(alpha, d);
}
void
arith_cos_minpoly(fmpz_poly_t poly, ulong n)
{
if (n == 0)
{
fmpz_poly_set_ui(poly, UWORD(1));
}
else
{
slong d = (n <= 2) ? 1 : n_euler_phi(n) / 2;
fmpz_poly_fit_length(poly, d + 1);
_arith_cos_minpoly(poly->coeffs, d, n);
_fmpz_poly_set_length(poly, d + 1);
}
}
|
python-postgres/fe | 119 | postgresql/versionstring.py | ##
# .versionstring
##
"""
PostgreSQL version string parsing.
>>> postgresql.versionstring.split('8.0.1')
(8, 0, 1, None, None)
"""
def split(vstr: str) -> tuple:
"""
Split a PostgreSQL version string into a tuple.
(major, minor, patch, ..., state_class, state_level)
"""
v = vstr.strip().split('.')
# Get rid of the numbers around the state_class (beta,a,dev,alpha, etc)
state_class = v[-1].strip('0123456789')
if state_class:
last_version, state_level = v[-1].split(state_class)
if not state_level:
state_level = None
else:
state_level = int(state_level)
vlist = [int(x or '0') for x in v[:-1]]
if last_version:
vlist.append(int(last_version))
vlist += [None] * (3 - len(vlist))
vlist += [state_class, state_level]
else:
state_level = None
state_class = None
vlist = [int(x or '0') for x in v]
# pad the difference with `None` objects, and +2 for the state_*.
vlist += [None] * ((3 - len(vlist)) + 2)
return tuple(vlist)
def unsplit(vtup: tuple) -> str:
"""
Join a version tuple back into the original version string.
"""
svtup = [str(x) for x in vtup[:-2] if x is not None]
state_class, state_level = vtup[-2:]
return '.'.join(svtup) + ('' if state_class is None else state_class + str(state_level))
def normalize(split_version: tuple) -> tuple:
"""
Given a tuple produced by `split`, normalize the `None` objects into int(0)
or 'final' if it's the ``state_class``.
"""
(*head, state_class, state_level) = split_version
mmp = [x if x is not None else 0 for x in head]
return tuple(mmp + [state_class or 'final', state_level or 0])
default_state_class_priority = [
'dev',
'a',
'alpha',
'b',
'beta',
'rc',
'final',
None,
]
python = repr
def xml(self):
return '<version type="one">\n' + \
' <major>' + str(self[0]) + '</major>\n' + \
' <minor>' + str(self[1]) + '</minor>\n' + \
' <patch>' + str(self[2]) + '</patch>\n' + \
' <state>' + str(self[-2]) + '</state>\n' + \
' <level>' + str(self[-1]) + '</level>\n' + \
'</version>'
def sh(self):
return """PG_VERSION_MAJOR=%s
PG_VERSION_MINOR=%s
PG_VERSION_PATCH=%s
PG_VERSION_STATE=%s
PG_VERSION_LEVEL=%s""" %(
str(self[0]),
str(self[1]),
str(self[2]),
str(self[-2]),
str(self[-1]),
)
if __name__ == '__main__':
import sys
import os
from optparse import OptionParser
op = OptionParser()
op.add_option('-f', '--format',
type='choice',
dest='format',
help='format of output information',
choices=('sh', 'xml', 'python'),
default='sh',
)
op.add_option('-n', '--normalize',
action='store_true',
dest='normalize',
help='replace missing values with defaults',
default=False,
)
op.set_usage(op.get_usage().strip() + ' "version to parse"')
co, ca = op.parse_args()
if len(ca) != 1:
op.error('requires exactly one argument, the version')
else:
v = split(ca[0])
if co.normalize:
v = normalize(v)
sys.stdout.write(getattr(sys.modules[__name__], co.format)(v))
sys.stdout.write(os.linesep)
| ##
# .versionstring
##
"""
PostgreSQL version string parsing.
>>> postgresql.versionstring.split('8.0.1')
(8, 0, 1, None, None)
"""
def split(vstr: str) -> tuple:
"""
Split a PostgreSQL version string into a tuple.
(major, minor, patch, ..., state_class, state_level)
"""
v = vstr.strip().split(' ')[0].split('.')
# Get rid of the numbers around the state_class (beta,a,dev,alpha, etc)
state_class = v[-1].strip('0123456789')
if state_class:
last_version, state_level = v[-1].split(state_class)
if not state_level:
state_level = None
else:
state_level = int(state_level)
vlist = [int(x or '0') for x in v[:-1]]
if last_version:
vlist.append(int(last_version))
vlist += [None] * (3 - len(vlist))
vlist += [state_class, state_level]
else:
state_level = None
state_class = None
vlist = [int(x or '0') for x in v]
# pad the difference with `None` objects, and +2 for the state_*.
vlist += [None] * ((3 - len(vlist)) + 2)
return tuple(vlist)
def unsplit(vtup: tuple) -> str:
"""
Join a version tuple back into the original version string.
"""
svtup = [str(x) for x in vtup[:-2] if x is not None]
state_class, state_level = vtup[-2:]
return '.'.join(svtup) + ('' if state_class is None else state_class + str(state_level))
def normalize(split_version: tuple) -> tuple:
"""
Given a tuple produced by `split`, normalize the `None` objects into int(0)
or 'final' if it's the ``state_class``.
"""
(*head, state_class, state_level) = split_version
mmp = [x if x is not None else 0 for x in head]
return tuple(mmp + [state_class or 'final', state_level or 0])
default_state_class_priority = [
'dev',
'a',
'alpha',
'b',
'beta',
'rc',
'final',
None,
]
python = repr
def xml(self):
return '<version type="one">\n' + \
' <major>' + str(self[0]) + '</major>\n' + \
' <minor>' + str(self[1]) + '</minor>\n' + \
' <patch>' + str(self[2]) + '</patch>\n' + \
' <state>' + str(self[-2]) + '</state>\n' + \
' <level>' + str(self[-1]) + '</level>\n' + \
'</version>'
def sh(self):
return """PG_VERSION_MAJOR=%s
PG_VERSION_MINOR=%s
PG_VERSION_PATCH=%s
PG_VERSION_STATE=%s
PG_VERSION_LEVEL=%s""" %(
str(self[0]),
str(self[1]),
str(self[2]),
str(self[-2]),
str(self[-1]),
)
if __name__ == '__main__':
import sys
import os
from optparse import OptionParser
op = OptionParser()
op.add_option('-f', '--format',
type='choice',
dest='format',
help='format of output information',
choices=('sh', 'xml', 'python'),
default='sh',
)
op.add_option('-n', '--normalize',
action='store_true',
dest='normalize',
help='replace missing values with defaults',
default=False,
)
op.set_usage(op.get_usage().strip() + ' "version to parse"')
co, ca = op.parse_args()
if len(ca) != 1:
op.error('requires exactly one argument, the version')
else:
v = split(ca[0])
if co.normalize:
v = normalize(v)
sys.stdout.write(getattr(sys.modules[__name__], co.format)(v))
sys.stdout.write(os.linesep)
|
graham/genie | 2 | src/genie.js | /*
Copyright [2014] [Graham Abbott <graham.abbott@gmail.com>]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
var genie = ( function() {
var UNIQUE_TIME = "" + new Date().getTime();
var GENIE_VERSION = "0.3";
var genie_context_begin;
var genie_context_end;
var GENIE_CONTEXT_begin = eval("genie_context_begin") || "[";
var GENIE_CONTEXT_end = eval("genie_context_end") || "]";
var GENIE_CONTEXT_lookup = {
"#":"comment",
"%":"condition",
"!":"exec",
"*":"exec-coffee",
"&":"bindable",
"^":"notes",
"~":"compiler",
};
GENIE_CONTEXT_lookup[GENIE_CONTEXT_begin] = "variable";
GENIE_CONTEXT_lookup[GENIE_CONTEXT_end] = "variable";
var genie_environ_count = 0;
// I'm not really proud of this sort of monkey patching, but it's somewhat required here.
var str_trim = function(s) { return s.replace(/^\s+|\s+$/g, "").replace(/^[\n|\r]+|[\n|\r]+$/g, ""); };
var str_trimr = function(s) { return s.replace(/\s+$/g, "").replace(/[\n|\r]+$/g, ""); };
var str_trimr_spaces = function(s) { return s.replace(/[ |\t]+$/g, ""); };
var str_trimr_one = function(s) { return s.replace(/\n[ |\t]*/g, ""); };
var str_triml = function(s) { return s.replace(/^\s+/g, "").replace(/^[\n|\r]+/g, ""); };
var str_triml_spaces = function(s) { return s.replace(/^[ |\t]+/g, ""); };
var str_triml_one = function(s) { return s.replace(/^[ |\t]*\n/g, ""); };
var safe_str = function(s) { return JSON.stringify(s); };
var str_count = function(s, c, accum) {
if (accum == undefined) {
accum = 0;
}
var i = s.indexOf(c);
if (i == -1) {
return accum;
} else {
return str_count(s.slice(i+1), c, accum+1);
}
};
var str_starts_with = function(key, st) {
if (key.slice(0, st.length) == st) {
return true;
} else {
return false;
}
}
// Makes the code printouts very pretty ( can't help but keep it )
var pad = function(count) {
var index = 0;
var pad = "";
while(index < count) {
pad += " ";
index += 1;
}
return pad;
};
var Template = function(sss) {
this.orig_string = sss;
this.string = sss;
this.environment = null;
this.blocks = [];
this.final_func = null;
this.notes = [];
this.cur_template_line = 0;
this.value_only = undefined;
this.exposed_values = [];
};
Template.prototype.find_next_block = function() {
var begin_char;
var end_char;
var cmd_lookup;
var blocks = [];
if (this.environment) {
begin_char = this.environment.begin;
end_char = this.environment.end;
cmd_lookup = this.environment.lookup;
} else {
begin_char = GENIE_CONTEXT_begin;
end_char = GENIE_CONTEXT_end;
cmd_lookup = GENIE_CONTEXT_lookup;
}
if (this.value_only != undefined) {
begin_char = this.value_only;
end_char = this.value_only;
cmd_lookup = {};
cmd_lookup[this.value_only] = "variable";
}
var start = this.string.indexOf(begin_char);
var next_char = start+1;
if (start == -1) {
var s = this.string;
this.string = '';
if (s == '') {
return [];
} else {
//*
this.cur_template_line += str_count(s, '\n');
//*
blocks.push( ['text', s, this.cur_template_line]);
return blocks;
}
}
var before_block = this.string.substring(0, start);
var after_block = this.string.substring(start+1);
this.cur_template_line += str_count(before_block, '\n');
blocks.push( ['text', before_block, this.cur_template_line] );
var start_char = after_block[0];
var type = cmd_lookup[start_char];
var end = null;
if (start_char == begin_char) {
end = after_block.indexOf(end_char + end_char);
} else {
if (start_char in cmd_lookup) {
end = after_block.indexOf(start_char + end_char);
} else {
this.cur_template_line += str_count(begin_char, '\n');
blocks.push( ['text', begin_char, this.cur_template_line] );
this.string = after_block.substring(0);
return blocks;
}
}
end += 1;
var block = after_block.substring(1, end-1);
after_block = after_block.substring(end+1);
// Pre-inner-operator.
if (block[0] == '-') {
block = block.substring(1);
if (blocks[blocks.length-1]) {
blocks[blocks.length-1][1] = str_trimr_spaces(blocks[blocks.length-1][1]);
}
} else if (block[0] == '=' || type == "notes") {
block = block.substring(1);
if (blocks[blocks.length-1]) {
blocks[blocks.length-1][1] = str_trimr(blocks[blocks.length-1][1]);
}
} else if (block[0] == '|') {
block = block.substring(1);
}
//post inner operator.
if (block[block.length-1] == '|') {
block = block.substring(0, block.length-1);
after_block = str_triml_one(after_block);
} else if (block[block.length-1] == '-') {
block = block.substring(0, block.length-1);
after_block = str_triml_spaces(after_block);
} else if (block[block.length-1] == '=') {
block = block.substring(0, block.length-1);
after_block = str_triml(after_block);
}
this.cur_template_line += str_count(block, '\n');
blocks.push( [type, block, this.cur_template_line] );
this.string = after_block;
return blocks;
};
Template.prototype.bailout = function() {
/* throw an exception and stop rendering a template */
throw { type: "bailout", message: "bailout of current template render" };
};
Template.prototype.compile = function() {
var counter_count = 0;
var depth = 0;
var f_code = [];
var in_func = [];
var i = 0;
var blocks = this.find_next_block();
var tempvar_counter = 0;
while(blocks.length > 0) {
for( i = 0; i < blocks.length; i++ ) {
var obj = blocks[i];
var type = obj[0];
var data = obj[1];
var line = obj[2];
if (type == 'text') {
f_code.push( "/* " + line + " */ " + pad(depth) );
f_code.push("write(" + JSON.stringify(data) + ");\n" );
} else if ( type == 'condition') {
data = str_trim(data);
if (data.substring(0,2) == 'if') {
var d = str_trim(data.substring(2));
var bulk = d;
if (d[0] == '(') {
bulk = d.substring(1, d.length-1);
}
f_code.push( "\n" + "/* " + line + " */ " + pad(depth) );
f_code.push("if (" + bulk + ")" + " {\n");
depth += 1;
in_func.push('}');
} else if (data.substring(0, 5) == 'while') {
var d = str_trim(data.substring(5));
var bulk = d;
if (d[0] == '(') {
bulk = d.substring(1, d.length-2);
}
f_code.push( "\n/* " + line + " */ " + pad(depth) );
f_code.push("while (" + bulk + ")" + " {\n");
depth += 1;
in_func.push('}');
} else if (data.substring(0, 4) == 'ford') {
var d = str_trim(data.substring(4));
var bulk = d;
if (d[0] == '(') {
bulk = d.substring(1, d.length-2);
}
var value_name = bulk.substring(0, bulk.indexOf(' in '));
var rest = bulk.substring(bulk.indexOf(' in ') + 4);
var cvar = '_count_' + counter_count;
counter_count += 1;
f_code.push( "\n/* " + line + " */ for( var " + value_name + " in " + rest + " ) {" );
f_code.push( "\n/* " + line + " */ " + pad(depth) );
in_func.push('}');
depth += 1;
} else if (data.substring(0, 3) == 'for') {
var d = str_trim(data.substring(3));
var bulk = d;
if (d[0] == '(') {
bulk = d.substring(1, d.length-2);
}
var value_name = bulk.substring(0, bulk.indexOf(' in '));
var rest = bulk.substring(bulk.indexOf(' in ') + 4);
var cvar = '_count_' + counter_count;
counter_count += 1;
f_code.push( "\n/* " + line + " */ for( var " + cvar + " = 0; " + cvar + " < " + rest + ".length; " + cvar + "++ ) {" );
f_code.push( "\n/* " + line + " */ var " + value_name + " = " + rest + "[" + cvar + "]; var index=" + cvar + ";");
f_code.push( "\n/* " + line + " */ var rindex = (" + rest + ".length" + " - index) - 1");
f_code.push( "\n/* " + line + " */ " + pad(depth) );
in_func.push('}');
depth += 1;
} else if (data == 'end') {
depth -= 1;
f_code.push( "/* " + line + " */ " + pad(depth) );
f_code.push(in_func.pop() + ';\n');
} else if (data.substring(0, 7) == 'else if') {
var command = data.substring(0, 7);
var rest = str_trim(data.substring(7));
if (rest[0] == '(') {
bulk = d.substring(1, d.length-1);
}
if (rest[rest.length-1] == ')') {
bulk = d.substring(0, d.length-2);
}
f_code.push( "/* " + line + " */ " + pad(depth-1) );
f_code.push( "} " + command + " ( " + rest + " ) {\n");
} else if (data.substring(0, 4) == 'else') {
var command = data.substring(0, 4);
f_code.push( "/* " + line + " */ " + pad(depth-1) );
f_code.push( "} " + command + " {\n");
}
} else if (type == 'variable') {
f_code.push( pad(depth) );
var vardata = data;
var vartype = undefined;
// :: means of type. (obj :: type)
if (data.indexOf('::') != -1) {
var temp = data.split('::');
vardata = str_trim(temp[0]);
vartype = str_trim(temp[1]);
}
if (data.indexOf(GENIE_CONTEXT_begin) == 0) {
f_code.push( "/* " + line + " */ write( " + vardata.substring(1) + " );\n");
} else {
var tempvar_name = "__tempvar_" + tempvar_counter;
tempvar_counter++;
f_code.push( "/* " + line + " */ var " + tempvar_name + " = " + vardata + ";\n");
f_code.push( "/* " + line + " */ if (typeof(" + tempvar_name + ") == \"function\") { write(" + tempvar_name + "());}\n");
f_code.push( "/* " + line + " */ else { write( (typeof(" + tempvar_name + ") != 'undefined') ? escape_variable(" + tempvar_name + ", '" + vartype + "') : undefined_variable(" + JSON.stringify(vardata) + ") ); } \n");
}
} else if (type == 'bindable') {
var value = this.environment.bindable_dict[str_trim(data)];
if (value === undefined) {
value = '';
}
f_code.push( "/* " + line + " */ write( \"<span class='genie_" + this.environment.id + "_value_update_" + str_trim(data) + "'>\" + " + data + " + \"</span>\" );\n" );
} else if (type == 'exec') {
f_code.push( "/* " + line + " */ " + data);
} else if (type == 'exec-coffee') {
f_code.push( "/* " + line + " */ " + CoffeeScript.compile(data));
} else if (type == 'notes') {
this.notes.push(str_trim(data));
} else if (type == 'compiler') {
// this should have been compiled out, ignore in this case.
// pass
}
}
blocks = this.find_next_block();
}
var preamble = [];
if (this.notes) {
preamble = this.preamble_notes();
}
preamble = preamble.join(' ');
var header = "var write = locals.write; var escape_variable = locals.escape_variable;";
header += "var partial = locals.partial; var bailout = locals.bailout;";
header += "var _env = locals._env; var _template = locals._template;";
this.f_code_render = preamble + header + f_code.join('');
//console.log(this.f_code_render);
this.f_code = null;
};
Template.prototype.preamble_notes = function() {
var newnotes = [];
var preamble = [];
for(var i = 0; i < this.notes.length; i++) {
var obj = str_trim(this.notes[i]);
var result = null;
if (obj.slice(0, 6) == "expose") {
try {
result = JSON.parse(obj.slice(6, obj.length));
} catch (err) {
result = null;
}
if (result) {
if (typeof(result) == "string") {
preamble.push("var " + result + " = v." + result + ";");
} else if (typeof(result) == "object") {
for(var __i = 0; __i < result.length; __i++) {
var result_final = result[__i].replace(' ', '_');
preamble.push("var " + result_final + " = v." + result_final + ";");
}
}
}
} else {
newnotes.push(obj);
}
}
this.notes = newnotes;
return preamble;
};
Template.prototype.pre_render = function(undefined_variable) {
this.compile();
var locals = {};
locals['_env'] = this.environment;
locals['____output'] = [];
locals['partial'] = function(name, d) {
var ptemp = locals['_env'].get_template(name);
if (ptemp == undefined) {
console.log("ERROR: Template " + name + " not found.");
return "TEMPLATE_NOT_FOUND: " + name;
} else {
return locals['_env'].get_template(name).render(d);
}
};
locals['write'] = function(ddd) { locals['____output'].push(ddd); };
locals['_template'] = this;
locals['bailout'] = this.bailout;
locals['escape_variable'] = function(data, type) { return data; };
try {
var compiled_code = new Function('parent', 'v', 'defaults', 'undefined_variable', 'locals', this.f_code_render);
} catch (e) {
this.stack_trace(e);
}
var encased_template = function(tvars, uv) {
locals['____output'] = [];
var template_vars = tvars;
var undef_var = function(name) {
if (!uv) {
console.log("Variable '" + name + "' is not defined, state: " + tvars);
return "** " + name + " not defined **";
} else if (uv.indexOf('%s') == -1) {
return str_trim(uv);
} else {
return str_trim(uv.replace('%s', str_trim(name)));
}
};
var defaults;
if (this.environment) {
defaults = this.environment.default_dict;
} else {
defaults = {};
}
compiled_code(locals['_template'], template_vars, defaults, undef_var, locals);
return locals['____output'].join('');
}
this.final_func = encased_template;
this.f_code_render = null;
};
Template.prototype.render = function(variables, undefined_variable) {
if (this.final_func == null) {
this.pre_render(undefined_variable);
}
try {
var result = this.final_func(variables, undefined_variable);
return result;
} catch (e) {
if (e.type == 'bailout') {
return null;
} else {
this.stack_trace(e);
}
}
};
Template.prototype.stack_trace = function(e) {
throw e;
var line = null;
if (e.line) {
line = this.f_code.join('').split('\n')[e.line-3];
} else if (e.lineNumber) {
line = this.f_code.join('').split('\n')[e.lineNumber-3];
} else {
throw new Error('Your browser sucks: ' + e.message);
}
if (line.slice(0, 2) == '/*') {
var os_by_line = this.orig_string.split('\n');
var line_number = parseInt(str_trim(line.slice(2, line.indexOf('*/'))));
var error_lines = [];
if (line_number > 0) {
error_lines.push(" line " + (line_number) + ": " + os_by_line[line_number-1]);
}
error_lines.push(" line " + (line_number+1) + ": " + os_by_line[line_number]);
if (line_number < os_by_line.length-1) {
error_lines.push(" line " + (line_number+2) + ": " + os_by_line[line_number+1]);
}
var message = "Javascript Error => " + e.message + "\nOn template line => " + (line_number+1) + "\n--------------------\n" + error_lines.join('\n') + "\n--------------------";
console.log(message);
throw new Error(message);
} else {
throw e;
}
};
Template.prototype.async_render = function(variables, options) {
var do_nothing = function() {};
var undefined_variable = options['undefined_variable'] || function(varname) { return '**' + varname + ' not defined **'; };
var on_success = options['on_success'] || do_nothing;
var on_error = options['on_error'] || do_nothing;
var on_bailout = options['on_bailout'] || do_nothing;
try {
var result = this.render(variables, undefined_variable);
if (result == null && on_bailout) {
on_bailout(this);
return;
} else {
if (on_success) {
on_success(result, this);
}
}
} catch (e) {
on_error(e, this);
}
};
var Environment = function() {
this.id = genie_environ_count + 1;
genie_environ_count += 1;
this.default_data = {};
this.object_dict = {};
this.template_dict = {};
this.bindable_dict = {};
this.escape_dict = {};
this.begin = GENIE_CONTEXT_begin;
this.end = GENIE_CONTEXT_end;
this.lookup = GENIE_CONTEXT_lookup;
};
Environment.prototype.escape_variable = function(vardata, vartype) {
return vardata;
};
Environment.prototype.template_list = function() {
l = [];
for( var i in this.template_dict ) {
l.push(i);
}
return l;
};
Environment.prototype.set_bindable = function(key, value) {
this.bindable_dict[key] = value;
var targets = document.getElementsByClassName('genie_' + this.id + '_value_update_' + key);
for( var i = 0; i < targets.length; i++ ) {
var obj = targets[i];
obj.innerHTML = value;
}
};
Environment.prototype.get_template = function(name) {
return this.template_dict[name];
};
Environment.prototype.create_template = function(name, data) {
var t = new Template(data);
t.key = name;
t.environment = this;
this.template_dict[name] = t;
return t;
};
Environment.prototype.render_quick = function(template_text, vars, undef_var) {
if (vars === undefined) {
vars = {};
}
var t = new Template(template_text);
t.key = 'anon';
t.environment = this;
return t.render(vars, undef_var);
};
Environment.prototype.render = function(name, vars, undef_var) {
if (vars === undefined) {
vars = {};
}
var t = this.template_dict[name];
if (t === undefined) {
console.log("Template " + name + " not found.");
return '';
} else {
return t.render(vars, undef_var);
}
};
Environment.prototype.set_obj = function(name, obj) {
this.object_dict[name] = obj;
};
Environment.prototype.get_obj = function(name) {
return this.object_dict[name];
};
Environment.prototype.load_template = function(url, name, cb) {
var env = this;
$.get(url + "?_ts=" + UNIQUE_TIME,
function(data) {
env.create_template(name, data);
console.log('created template: ' + name + ' (' + data.length + ' bytes)');
if (cb) {
cb.finish();
}
});
};
Environment.prototype.load_templates = function(orig_paths, final_callback) {
var env = this;
function load_next(paths, callback) {
if (paths.length == 0) {
callback(env);
} else {
var template_name = paths.pop();
$.get(template_name + "?_ts=" + UNIQUE_TIME,
function(data) {
env.create_template(template_name, data);
console.log('created template: ' + name + ' (' + data.length + ' bytes)');
load_next(paths, callback);
}).fail( function() {load_next(paths, callback)} );
}
};
load_next(orig_paths, final_callback);
};
Environment.prototype.auto_load = function(callback) {
// this sucks because it requires jquery, should figure out how to
// make that not a dependency.
var env = this;
var template_names = [];
$('.genie-template').each( function(index, item) {
template_names.push($(item).attr('data-genie-template'));
});
env.load_templates(template_names, function() {
$('.genie-template').each( function(index, item) {
var template_name = $(item).attr('data-genie-template');
var result = env.render(template_name, {});
$(item).html(result);
});
if (callback) {
callback();
}
});
};
Environment.prototype.load_template_dir = function(url, cb) {
// this sucks because it requires jquery, should figure out how to
// make that not a dependency.
var env = this;
$.get(url, function(data) {
data = JSON.parse(data);
var items = [];
for(var name in data) {
var obj = data[name];
var load = function(o) {
return function(t) {
env.load_template(url + o, o.split('.')[0], t);
}
}
items.push( load(obj) );
}
ut.serial(function() { return; }, items, cb);
});
};
var main_environment = new Environment();
var fs = function( s, args, value_only ) {
var t = new Template(s);
t.value_only = value_only;
return t.render(args);
};
var ts = function() {
var d = new Date();
return d.getTime() / 1000.0;
};
var genie_render_dom_element = function(d, o) {
var content = d.value;
var t = new Template(content);
d.value = t.render(o);
}
var loadr = function(url) {
var d = document.createElement('script');
d.src = url;
d.type = 'text/javascript';
document.body.appendChild(d);
};
var monkey_patch = function() {
String.prototype.render = function(args, undef_var) {
var t = new Template(this);
t.key = 'anon';
return t.render(args, undef_var);
};
};
var dig_get = function(obj, key, settings) {
var split_key = "/";
if (settings['di']) {
split_key = settings['di'];
}
if (key.indexOf(split_key) == -1) {
return obj[key];
} else {
var cur = key.split(split_key, 1);
var rest = key.split(split_key).slice(1).join(split_key);
obj = obj[cur];
return this.dig_get(obj, rest);
}
};
var dig_set = function(obj, key, value, settings) {
var split_key = "/";
var def = function() { return new Object(); };
if (settings == undefined) {
settings = {};
}
if (settings['di']) {
split_key = settings['di'];
}
if (settings['def']) {
def = settings['def'];
}
if (key[key.length-1] == split_key) {
key = key.slice(0, key.length-1);
}
if (key.indexOf(split_key) == -1) {
obj[key] = value;
return [obj, key];
} else {
var cur = key.split(split_key, 1);
var rest = key.split(split_key).slice(1).join(split_key);
var newb = obj[cur];
if (newb == undefined) {
obj[cur] = def();
newb = obj[cur];
}
return this.dig_set(newb, rest, value);
}
};
var unpack_packed_hash = function(data) {
};
var render_body_as_template = function(d, undef_var) {
var v = main_environment.render_quick(document.body.innerHTML, d, undef_var);
document.body.innerHTML = v;
return v;
};
var exports;
exports = {
'Template':Template,
'Environment':Environment,
'monkey_patch':monkey_patch,
'main_environment':main_environment,
'fs':fs,
'str_count':str_count,
'version':GENIE_VERSION,
'dig_set':dig_set,
'dig_get':dig_get,
'render_body_as_template':render_body_as_template,
'rbt':render_body_as_template,
'str_starts_with':str_starts_with
};
return exports;
})();
if (typeof genie_module !== 'undefined') {
genie_module.exports.genie = genie;
genie_module.exports.Template = genie.Template;
genie_module.exports.Environment = genie.Environment;
}
| /*
Copyright [2014] [Graham Abbott <graham.abbott@gmail.com>]
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
var genie = ( function() {
var UNIQUE_TIME = "" + new Date().getTime();
var GENIE_VERSION = "0.3";
var genie_context_begin;
var genie_context_end;
var GENIE_CONTEXT_begin = eval("genie_context_begin") || "[";
var GENIE_CONTEXT_end = eval("genie_context_end") || "]";
var GENIE_CONTEXT_lookup = {
"#":"comment",
"%":"condition",
"!":"exec",
"*":"exec-coffee",
"&":"bindable",
"^":"notes",
"~":"compiler",
};
GENIE_CONTEXT_lookup[GENIE_CONTEXT_begin] = "variable";
GENIE_CONTEXT_lookup[GENIE_CONTEXT_end] = "variable";
var genie_environ_count = 0;
// I'm not really proud of this sort of monkey patching, but it's somewhat required here.
var str_trim = function(s) { return s.replace(/^\s+|\s+$/g, "").replace(/^[\n|\r]+|[\n|\r]+$/g, ""); };
var str_trimr = function(s) { return s.replace(/\s+$/g, "").replace(/[\n|\r]+$/g, ""); };
var str_trimr_spaces = function(s) { return s.replace(/[ |\t]+$/g, ""); };
var str_trimr_one = function(s) { return s.replace(/\n[ |\t]*/g, ""); };
var str_triml = function(s) { return s.replace(/^\s+/g, "").replace(/^[\n|\r]+/g, ""); };
var str_triml_spaces = function(s) { return s.replace(/^[ |\t]+/g, ""); };
var str_triml_one = function(s) { return s.replace(/^[ |\t]*\n/g, ""); };
var safe_str = function(s) { return JSON.stringify(s); };
var str_count = function(s, c, accum) {
if (accum == undefined) {
accum = 0;
}
var i = s.indexOf(c);
if (i == -1) {
return accum;
} else {
return str_count(s.slice(i+1), c, accum+1);
}
};
var str_starts_with = function(key, st) {
if (key.slice(0, st.length) == st) {
return true;
} else {
return false;
}
}
// Makes the code printouts very pretty ( can't help but keep it )
var pad = function(count) {
var index = 0;
var pad = "";
while(index < count) {
pad += " ";
index += 1;
}
return pad;
};
var Template = function(sss) {
this.orig_string = sss;
this.string = sss;
this.environment = null;
this.blocks = [];
this.final_func = null;
this.notes = [];
this.cur_template_line = 0;
this.value_only = undefined;
this.exposed_values = [];
};
Template.prototype.find_next_block = function() {
var begin_char;
var end_char;
var cmd_lookup;
var blocks = [];
if (this.environment) {
begin_char = this.environment.begin;
end_char = this.environment.end;
cmd_lookup = this.environment.lookup;
} else {
begin_char = GENIE_CONTEXT_begin;
end_char = GENIE_CONTEXT_end;
cmd_lookup = GENIE_CONTEXT_lookup;
}
if (this.value_only != undefined) {
begin_char = this.value_only;
end_char = this.value_only;
cmd_lookup = {};
cmd_lookup[this.value_only] = "variable";
}
var start = this.string.indexOf(begin_char);
var next_char = start+1;
if (start == -1) {
var s = this.string;
this.string = '';
if (s == '') {
return [];
} else {
//*
this.cur_template_line += str_count(s, '\n');
//*
blocks.push( ['text', s, this.cur_template_line]);
return blocks;
}
}
var before_block = this.string.substring(0, start);
var after_block = this.string.substring(start+1);
this.cur_template_line += str_count(before_block, '\n');
blocks.push( ['text', before_block, this.cur_template_line] );
var start_char = after_block[0];
var type = cmd_lookup[start_char];
var end = null;
if (start_char == begin_char) {
end = after_block.indexOf(end_char + end_char);
} else {
if (start_char in cmd_lookup) {
end = after_block.indexOf(start_char + end_char);
} else {
this.cur_template_line += str_count(begin_char, '\n');
blocks.push( ['text', begin_char, this.cur_template_line] );
this.string = after_block.substring(0);
return blocks;
}
}
end += 1;
var block = after_block.substring(1, end-1);
after_block = after_block.substring(end+1);
// Pre-inner-operator.
if (block[0] == '-') {
block = block.substring(1);
if (blocks[blocks.length-1]) {
blocks[blocks.length-1][1] = str_trimr_spaces(blocks[blocks.length-1][1]);
}
} else if (block[0] == '=' || type == "notes") {
block = block.substring(1);
if (blocks[blocks.length-1]) {
blocks[blocks.length-1][1] = str_trimr(blocks[blocks.length-1][1]);
}
} else if (block[0] == '|') {
block = block.substring(1);
}
//post inner operator.
if (block[block.length-1] == '|') {
block = block.substring(0, block.length-1);
after_block = str_triml_one(after_block);
} else if (block[block.length-1] == '-') {
block = block.substring(0, block.length-1);
after_block = str_triml_spaces(after_block);
} else if (block[block.length-1] == '=') {
block = block.substring(0, block.length-1);
after_block = str_triml(after_block);
}
this.cur_template_line += str_count(block, '\n');
blocks.push( [type, block, this.cur_template_line] );
this.string = after_block;
return blocks;
};
Template.prototype.bailout = function() {
/* throw an exception and stop rendering a template */
throw { type: "bailout", message: "bailout of current template render" };
};
Template.prototype.compile = function() {
var counter_count = 0;
var depth = 0;
var f_code = [];
var in_func = [];
var i = 0;
var blocks = this.find_next_block();
var tempvar_counter = 0;
while(blocks.length > 0) {
for( i = 0; i < blocks.length; i++ ) {
var obj = blocks[i];
var type = obj[0];
var data = obj[1];
var line = obj[2];
if (type == 'text') {
f_code.push( "/* " + line + " */ " + pad(depth) );
f_code.push("write(" + JSON.stringify(data) + ");\n" );
} else if ( type == 'condition') {
data = str_trim(data);
if (data.substring(0,2) == 'if') {
var d = str_trim(data.substring(2));
var bulk = d;
if (d[0] == '(') {
bulk = d.substring(1, d.length-1);
}
f_code.push( "\n" + "/* " + line + " */ " + pad(depth) );
f_code.push("if (" + bulk + ")" + " {\n");
depth += 1;
in_func.push('}');
} else if (data.substring(0, 5) == 'while') {
var d = str_trim(data.substring(5));
var bulk = d;
if (d[0] == '(') {
bulk = d.substring(1, d.length-2);
}
f_code.push( "\n/* " + line + " */ " + pad(depth) );
f_code.push("while (" + bulk + ")" + " {\n");
depth += 1;
in_func.push('}');
} else if (data.substring(0, 4) == 'ford') {
var d = str_trim(data.substring(4));
var bulk = d;
if (d[0] == '(') {
bulk = d.substring(1, d.length-2);
}
var value_name = bulk.substring(0, bulk.indexOf(' in '));
var rest = bulk.substring(bulk.indexOf(' in ') + 4);
var cvar = '_count_' + counter_count;
counter_count += 1;
f_code.push( "\n/* " + line + " */ for( var " + value_name + " in " + rest + " ) {" );
f_code.push( "\n/* " + line + " */ " + pad(depth) );
in_func.push('}');
depth += 1;
} else if (data.substring(0, 3) == 'for') {
var d = str_trim(data.substring(3));
var bulk = d;
if (d[0] == '(') {
bulk = d.substring(1, d.length-2);
}
var value_name = bulk.substring(0, bulk.indexOf(' in '));
var rest = bulk.substring(bulk.indexOf(' in ') + 4);
var cvar = '_count_' + counter_count;
counter_count += 1;
f_code.push( "\n/* " + line + " */ for( var " + cvar + " = 0; " + cvar + " < " + rest + ".length; " + cvar + "++ ) {" );
f_code.push( "\n/* " + line + " */ var " + value_name + " = " + rest + "[" + cvar + "]; var index=" + cvar + ";");
f_code.push( "\n/* " + line + " */ var rindex = (" + rest + ".length" + " - index) - 1");
f_code.push( "\n/* " + line + " */ " + pad(depth) );
in_func.push('}');
depth += 1;
} else if (data == 'end') {
depth -= 1;
f_code.push( "/* " + line + " */ " + pad(depth) );
f_code.push(in_func.pop() + ';\n');
} else if (data.substring(0, 7) == 'else if') {
var command = data.substring(0, 7);
var rest = str_trim(data.substring(7));
if (rest[0] == '(') {
bulk = d.substring(1, d.length-1);
}
if (rest[rest.length-1] == ')') {
bulk = d.substring(0, d.length-2);
}
f_code.push( "/* " + line + " */ " + pad(depth-1) );
f_code.push( "} " + command + " ( " + rest + " ) {\n");
} else if (data.substring(0, 4) == 'else') {
var command = data.substring(0, 4);
f_code.push( "/* " + line + " */ " + pad(depth-1) );
f_code.push( "} " + command + " {\n");
}
} else if (type == 'variable') {
f_code.push( pad(depth) );
var vardata = data;
var vartype = undefined;
// :: means of type. (obj :: type)
if (data.indexOf('::') != -1) {
var temp = data.split('::');
vardata = str_trim(temp[0]);
vartype = str_trim(temp[1]);
}
if (data.indexOf(GENIE_CONTEXT_begin) == 0) {
f_code.push( "/* " + line + " */ write( " + vardata.substring(1) + " );\n");
} else {
var tempvar_name = "__tempvar_" + tempvar_counter;
tempvar_counter++;
f_code.push( "/* " + line + " */ var " + tempvar_name + " = " + vardata + ";\n");
f_code.push( "/* " + line + " */ if (typeof(" + tempvar_name + ") == \"function\") { write(" + tempvar_name + "());}\n");
f_code.push( "/* " + line + " */ else { write( (typeof(" + tempvar_name + ") != 'undefined') ? escape_variable(" + tempvar_name + ", '" + vartype + "') : undefined_variable(" + JSON.stringify(vardata) + ") ); } \n");
}
} else if (type == 'bindable') {
var value = this.environment.bindable_dict[str_trim(data)];
if (value === undefined) {
value = '';
}
f_code.push( "/* " + line + " */ write( \"<span class='genie_" + this.environment.id + "_value_update_" + str_trim(data) + "'>\" + " + data + " + \"</span>\" );\n" );
} else if (type == 'exec') {
f_code.push( "/* " + line + " */ " + data);
} else if (type == 'exec-coffee') {
f_code.push( "/* " + line + " */ " + CoffeeScript.compile(data));
} else if (type == 'notes') {
this.notes.push(str_trim(data));
} else if (type == 'compiler') {
// this should have been compiled out, ignore in this case.
// pass
}
}
blocks = this.find_next_block();
}
var preamble = [];
if (this.notes) {
preamble = this.preamble_notes();
}
preamble = preamble.join(' ');
var header = "var write = locals.write; var escape_variable = locals.escape_variable;";
header += "var partial = locals.partial; var bailout = locals.bailout;";
header += "var _env = locals._env; var _template = locals._template;";
this.f_code_render = preamble + header + f_code.join('');
//console.log(this.f_code_render);
this.f_code = null;
};
Template.prototype.preamble_notes = function() {
var newnotes = [];
var preamble = [];
for(var i = 0; i < this.notes.length; i++) {
var obj = str_trim(this.notes[i]);
var result = null;
if (obj.slice(0, 6) == "expose") {
try {
result = JSON.parse(obj.slice(6, obj.length));
} catch (err) {
result = null;
}
if (result) {
if (typeof(result) == "string") {
preamble.push("var " + result + " = v." + result + ";");
} else if (typeof(result) == "object") {
for(var __i = 0; __i < result.length; __i++) {
var result_final = result[__i].replace(' ', '_');
preamble.push("var " + result_final + " = v." + result_final + ";");
}
}
}
} else {
newnotes.push(obj);
}
}
this.notes = newnotes;
return preamble;
};
Template.prototype.pre_render = function(undefined_variable) {
this.compile();
var locals = {};
locals['_env'] = this.environment;
locals['____output'] = [];
locals['partial'] = function(name, d) {
var ptemp = locals['_env'].get_template(name);
if (ptemp == undefined) {
console.log("ERROR: Template " + name + " not found.");
return "TEMPLATE_NOT_FOUND: " + name;
} else {
return locals['_env'].get_template(name).render(d);
}
};
locals['write'] = function(ddd) { locals['____output'].push(ddd); };
locals['_template'] = this;
locals['bailout'] = this.bailout;
locals['escape_variable'] = function(data, type) { return data; };
try {
var compiled_code = new Function('parent', 'v', 'defaults', 'undefined_variable', 'locals', this.f_code_render);
} catch (e) {
this.stack_trace(e);
}
var encased_template = function(tvars, uv) {
locals['____output'] = [];
var template_vars = tvars;
var undef_var = function(name) {
if (!uv) {
console.log("Variable '" + name + "' is not defined, state: " + tvars);
return "** " + name + " not defined **";
} else if (uv.indexOf('%s') == -1) {
return str_trim(uv);
} else {
return str_trim(uv.replace('%s', str_trim(name)));
}
};
var defaults;
if (this.environment) {
defaults = this.environment.default_dict;
} else {
defaults = {};
}
compiled_code(locals['_template'], template_vars, defaults, undef_var, locals);
return locals['____output'].join('');
}
this.final_func = encased_template;
this.f_code_render = null;
};
Template.prototype.render = function(variables, undefined_variable) {
if (this.final_func == null) {
this.pre_render(undefined_variable);
}
try {
var result = this.final_func(variables, undefined_variable);
return result;
} catch (e) {
if (e.type == 'bailout') {
return null;
} else {
this.stack_trace(e);
}
}
};
Template.prototype.stack_trace = function(e) {
throw e;
var line = null;
if (e.line) {
line = this.f_code.join('').split('\n')[e.line-3];
} else if (e.lineNumber) {
line = this.f_code.join('').split('\n')[e.lineNumber-3];
} else {
throw new Error('Your browser sucks: ' + e.message);
}
if (line.slice(0, 2) == '/*') {
var os_by_line = this.orig_string.split('\n');
var line_number = parseInt(str_trim(line.slice(2, line.indexOf('*/'))));
var error_lines = [];
if (line_number > 0) {
error_lines.push(" line " + (line_number) + ": " + os_by_line[line_number-1]);
}
error_lines.push(" line " + (line_number+1) + ": " + os_by_line[line_number]);
if (line_number < os_by_line.length-1) {
error_lines.push(" line " + (line_number+2) + ": " + os_by_line[line_number+1]);
}
var message = "Javascript Error => " + e.message + "\nOn template line => " + (line_number+1) + "\n--------------------\n" + error_lines.join('\n') + "\n--------------------";
console.log(message);
throw new Error(message);
} else {
throw e;
}
};
Template.prototype.async_render = function(variables, options) {
var do_nothing = function() {};
var undefined_variable = options['undefined_variable'] || function(varname) { return '**' + varname + ' not defined **'; };
var on_success = options['on_success'] || do_nothing;
var on_error = options['on_error'] || do_nothing;
var on_bailout = options['on_bailout'] || do_nothing;
try {
var result = this.render(variables, undefined_variable);
if (result == null && on_bailout) {
on_bailout(this);
return;
} else {
if (on_success) {
on_success(result, this);
}
}
} catch (e) {
on_error(e, this);
}
};
var Environment = function() {
this.id = genie_environ_count + 1;
genie_environ_count += 1;
this.default_data = {};
this.object_dict = {};
this.template_dict = {};
this.bindable_dict = {};
this.escape_dict = {};
this.begin = GENIE_CONTEXT_begin;
this.end = GENIE_CONTEXT_end;
this.lookup = GENIE_CONTEXT_lookup;
};
Environment.prototype.escape_variable = function(vardata, vartype) {
return vardata;
};
Environment.prototype.template_list = function() {
l = [];
for( var i in this.template_dict ) {
l.push(i);
}
return l;
};
Environment.prototype.set_bindable = function(key, value) {
this.bindable_dict[key] = value;
var targets = document.getElementsByClassName('genie_' + this.id + '_value_update_' + key);
for( var i = 0; i < targets.length; i++ ) {
var obj = targets[i];
obj.innerHTML = value;
}
};
Environment.prototype.get_template = function(name) {
return this.template_dict[name];
};
Environment.prototype.create_template = function(name, data) {
var t = new Template(data);
t.key = name;
t.environment = this;
this.template_dict[name] = t;
return t;
};
Environment.prototype.render_quick = function(template_text, vars, undef_var) {
if (vars === undefined) {
vars = {};
}
var t = new Template(template_text);
t.key = 'anon';
t.environment = this;
return t.render(vars, undef_var);
};
Environment.prototype.render = function(name, vars, undef_var) {
if (vars === undefined) {
vars = {};
}
var t = this.template_dict[name];
if (t === undefined) {
console.log("Template " + name + " not found.");
return '';
} else {
return t.render(vars, undef_var);
}
};
Environment.prototype.set_obj = function(name, obj) {
this.object_dict[name] = obj;
};
Environment.prototype.get_obj = function(name) {
return this.object_dict[name];
};
Environment.prototype.load_template = function(url, name, cb) {
var env = this;
$.get(url + "?_ts=" + UNIQUE_TIME,
function(data) {
env.create_template(name, data);
console.log('created template: ' + name + ' (' + data.length + ' bytes)');
if (cb) {
cb.finish();
}
});
};
Environment.prototype.load_templates = function(orig_paths, final_callback) {
var env = this;
function load_next(paths, callback) {
if (paths.length == 0) {
callback(env);
} else {
var template_name = paths.pop();
$.get(template_name + "?_ts=" + UNIQUE_TIME,
function(data) {
env.create_template(template_name, data);
console.log('created template: ' + name + ' (' + data.length + ' bytes)');
load_next(paths, callback);
}).fail( function() {load_next(paths, callback)} );
}
};
load_next(orig_paths, final_callback);
};
Environment.prototype.auto_load = function(callback) {
// this sucks because it requires jquery, should figure out how to
// make that not a dependency.
var env = this;
var template_names = [];
$('.genie-template').each( function(index, item) {
template_names.push($(item).attr('data-genie-template'));
});
env.load_templates(template_names, function() {
$('.genie-template').each( function(index, item) {
var template_name = $(item).attr('data-genie-template');
var result = env.render(template_name, {});
$(item).html(result);
});
if (callback) {
callback();
}
});
};
Environment.prototype.load_template_dir = function(url, cb) {
// this sucks because it requires jquery, should figure out how to
// make that not a dependency.
var env = this;
$.get(url, function(data) {
data = JSON.parse(data);
var items = [];
for(var name in data) {
var obj = data[name];
var load = function(o) {
return function(t) {
env.load_template(url + o, o.split('.')[0], t);
}
}
items.push( load(obj) );
}
ut.serial(function() { return; }, items, cb);
});
};
var main_environment = new Environment();
var fs = function( s, args, value_only ) {
var t = new Template(s);
t.value_only = value_only;
return t.render(args);
};
var ts = function() {
var d = new Date();
return d.getTime() / 1000.0;
};
var genie_render_dom_element = function(d, o) {
var content = d.value;
var t = new Template(content);
d.value = t.render(o);
}
var loadr = function(url) {
var d = document.createElement('script');
d.src = url;
d.type = 'text/javascript';
document.body.appendChild(d);
};
var monkey_patch = function() {
String.prototype.render = function(args, undef_var) {
var t = new Template(this);
t.key = 'anon';
return t.render(args, undef_var);
};
};
var dig_get = function(obj, key, settings) {
var split_key = "/";
if (settings['di']) {
split_key = settings['di'];
}
if (key.indexOf(split_key) == -1) {
return obj[key];
} else {
var cur = key.split(split_key, 1);
var rest = key.split(split_key).slice(1).join(split_key);
obj = obj[cur];
return this.dig_get(obj, rest);
}
};
var dig_set = function(obj, key, value, settings) {
var split_key = "/";
var def = function() { return new Object(); };
if (settings == undefined) {
settings = {};
}
if (settings['di']) {
split_key = settings['di'];
}
if (settings['def']) {
def = settings['def'];
}
if (key[key.length-1] == split_key) {
key = key.slice(0, key.length-1);
}
if (key.indexOf(split_key) == -1) {
obj[key] = value;
return [obj, key];
} else {
var cur = key.split(split_key, 1);
var rest = key.split(split_key).slice(1).join(split_key);
var newb = obj[cur];
if (newb == undefined) {
obj[cur] = def();
newb = obj[cur];
}
return this.dig_set(newb, rest, value);
}
};
var unpack_packed_hash = function(data) {
};
var render_body_as_template = function(d, undef_var) {
var v = main_environment.render_quick(document.body.innerHTML, d, undef_var);
document.body.innerHTML = v;
return v;
};
var exports;
exports = {
'Template':Template,
'Environment':Environment,
'monkey_patch':monkey_patch,
'main_environment':main_environment,
'fs':fs,
'str_count':str_count,
'version':GENIE_VERSION,
'dig_set':dig_set,
'dig_get':dig_get,
'render_body_as_template':render_body_as_template,
'rbt':render_body_as_template,
'str_starts_with':str_starts_with
};
return exports;
})();
if (typeof genie_module !== 'undefined') {
genie_module.exports.genie = genie;
genie_module.exports.Template = genie.Template;
genie_module.exports.Environment = genie.Environment;
}
|
ryanb/render-caching | 1 | spec/render_caching/controller_additions_spec.rb | require File.dirname(__FILE__) + '/../spec_helper'
# stub the Rails module functionality
RAILS_CACHE = ActiveSupport::Cache.lookup_store(:memory_store)
module Rails
def self.cache
RAILS_CACHE
end
end
describe ActionController::Base do
it "should have render_with_cache private method" do
ActionController::Base.new.private_methods.should include('render_with_cache')
end
end
describe RenderCaching::ControllerAdditions do
include RenderCaching::ControllerAdditions
before(:each) do
Rails.cache.clear
@request = stub
@response = stub(:body => '')
stubs(:request).returns(@request)
stubs(:response).returns(@response)
stubs(:performed?)
stubs(:render)
end
it "should read from the cache with request uri as key and render that text" do
@request.stubs(:request_uri).returns('/foo/bar')
Rails.cache.write('/foo/bar', 'page content')
expects(:render).with(:text => 'page content')
render_with_cache
end
it "should read from the cache with custom passed key and render that text" do
Rails.cache.write('my_key', 'page content')
expects(:render).with(:text => 'page content')
render_with_cache 'my_key'
end
it "should save response.body to cache as key when not specified" do
@response.stubs(:body).returns('content')
render_with_cache 'some_key'
Rails.cache.read('some_key').should == 'content'
end
it "should call render when not cached or rendered yet" do
stubs(:performed?).returns(false)
expects(:render).with()
render_with_cache 'some_key'
end
it "should not call render if already rendered" do
stubs(:performed?).returns(true)
stubs(:render).raises('should not be called')
lambda { render_with_cache 'some_key' }.should_not raise_error
end
it "should not call render :text if cache doesn't exist" do
stubs(:render).with(:text => @response.body).raises('should not be called')
lambda { render_with_cache 'some_key' }.should_not raise_error
end
it "should yield to block when not cached" do
pass = false
render_with_cache('some_key') { pass = true }
pass.should be_true
end
it "should not yield to block when cached" do
Rails.cache.write('some_key', 'page content')
render_with_cache('some_key') { violated('block was executed') }
end
it "should pass options to cache write call" do
Rails.cache.expects(:write).with('some_key', @response.body, :expires_in => 5)
render_with_cache('some_key', :expires_in => 5)
end
end
| require File.dirname(__FILE__) + '/../spec_helper'
# stub the Rails module functionality
RAILS_CACHE = ActiveSupport::Cache.lookup_store(:memory_store)
module Rails
def self.cache
RAILS_CACHE
end
end
describe ActionController::Base do
it "should have render_with_cache private method" do
ActionController::Base.new.private_methods.should include('render_with_cache')
end
end
describe RenderCaching::ControllerAdditions do
include RenderCaching::ControllerAdditions
before(:each) do
Rails.cache.clear
@request = stub
@response = stub(:body => '')
stubs(:request).returns(@request)
stubs(:response).returns(@response)
stubs(:performed?)
stubs(:render)
end
it "should read from the cache with request uri as key and render that text" do
@request.stubs(:fullpath).returns('/foo/bar')
Rails.cache.write('/foo/bar', 'page content')
expects(:render).with(:text => 'page content')
render_with_cache
end
it "should read from the cache with custom passed key and render that text" do
Rails.cache.write('my_key', 'page content')
expects(:render).with(:text => 'page content')
render_with_cache 'my_key'
end
it "should save response.body to cache as key when not specified" do
@response.stubs(:body).returns('content')
render_with_cache 'some_key'
Rails.cache.read('some_key').should == 'content'
end
it "should call render when not cached or rendered yet" do
stubs(:performed?).returns(false)
expects(:render).with()
render_with_cache 'some_key'
end
it "should not call render if already rendered" do
stubs(:performed?).returns(true)
stubs(:render).raises('should not be called')
lambda { render_with_cache 'some_key' }.should_not raise_error
end
it "should not call render :text if cache doesn't exist" do
stubs(:render).with(:text => @response.body).raises('should not be called')
lambda { render_with_cache 'some_key' }.should_not raise_error
end
it "should yield to block when not cached" do
pass = false
render_with_cache('some_key') { pass = true }
pass.should be_true
end
it "should not yield to block when cached" do
Rails.cache.write('some_key', 'page content')
render_with_cache('some_key') { violated('block was executed') }
end
it "should pass options to cache write call" do
Rails.cache.expects(:write).with('some_key', @response.body, :expires_in => 5)
render_with_cache('some_key', :expires_in => 5)
end
end
|
redoPop/loupe | 10 | jquery.loupe.js | /**
* loupe - an image magnifier for jQuery
* (C) 2010 jdbartlett, MIT license
* http://github.com/jdbartlett/loupe
*/
(function ($) {
$.fn.loupe = function (arg) {
var options = $.extend({
loupe: 'loupe',
width: 200,
height: 150
}, arg || {});
return this.length ? this.each(function () {
var $this = $(this), $big, $loupe,
$small = $this.is('img') ? $this : $this.find('img:first'),
move, hide = function () { $loupe.hide(); },
time;
if ($this.data('loupe') != null) {
return $this.data('loupe', arg);
}
move = function (e) {
var os = $small.offset(),
sW = $small.outerWidth(),
sH = $small.outerHeight(),
oW = options.width / 2,
oH = options.height / 2;
if (!$this.data('loupe') ||
e.pageX > sW + os.left + 10 || e.pageX < os.left - 10 ||
e.pageY > sH + os.top + 10 || e.pageY < os.top - 10) {
return hide();
}
time = time ? clearTimeout(time) : 0;
$loupe.show().css({
left: e.pageX - oW,
top: e.pageY - oH
});
$big.css({
left: -(((e.pageX - os.left) / sW) * $big.width() - oW)|0,
top: -(((e.pageY - os.top) / sH) * $big.height() - oH)|0
});
};
$loupe = $('<div />')
.addClass(options.loupe)
.css({
width: options.width,
height: options.height,
position: 'absolute',
overflow: 'hidden'
})
.append($big = $('<img />').attr('src', $this.attr($this.is('img') ? 'src' : 'href')).css('position', 'absolute'))
.mousemove(move)
.hide()
.appendTo('body');
$this.data('loupe', true)
.mouseenter(move)
.mouseout(function () {
time = setTimeout(hide, 10);
});
}) : this;
};
}(jQuery));
| /**
* loupe - an image magnifier for jQuery
* (C) 2010 jdbartlett, MIT license
* http://github.com/jdbartlett/loupe
*/
(function ($) {
$.fn.loupe = function (arg) {
var options = $.extend({
loupe: 'loupe',
width: 200,
height: 150
}, arg || {});
return this.length ? this.each(function () {
var $this = $(this), $big, $loupe,
$small = $this.is('img') ? $this : $this.find('img:first'),
tmove, move, hide = function () { $loupe.hide(); },
time;
if ($this.data('loupe') != null) {
return $this.data('loupe', arg);
}
tmove = function(e) {
var eMouse = null;
if(e !== null && typeof(e.originalEvent.targetTouches)!='undefined' &&
e.originalEvent.targetTouches.length>0)
{
var objTouch = e.originalEvent.targetTouches[e.originalEvent.targetTouches.length-1];
if(objTouch!==null)
{
if(typeof(objTouch.pageX)!='undefined' && !isNaN(objTouch.pageX))
{
eMouse =
{
pageX: objTouch.pageX,
pageY: objTouch.pageY
};
}
else if(typeof(objTouch.clientX)!='undefined' && !isNaN(objTouch.clientX))
{
eMouse =
{
pageX: objTouch.clientX,
pageY: objTouch.clientY
};
}
else if(typeof(objTouch.screenX)!='undefined' && !isNaN(objTouch.screenX))
{
eMouse =
{
pageX: objTouch.screenX,
pageY: objTouch.screenY
};
}
}
}
if(eMouse!==null)
{
e.preventDefault();
return move(eMouse);
}
else
return hide();
};
move = function (e) {
var os = $small.offset(),
sW = $small.outerWidth(),
sH = $small.outerHeight(),
oW = options.width / 2,
oH = options.height / 2;
if (!$this.data('loupe') ||
e.pageX > sW + os.left + 10 || e.pageX < os.left - 10 ||
e.pageY > sH + os.top + 10 || e.pageY < os.top - 10) {
return hide();
}
time = time ? clearTimeout(time) : 0;
$loupe.show().css({
left: e.pageX - oW,
top: e.pageY - oH
});
$big.css({
left: -(((e.pageX - os.left) / sW) * $big.width() - oW)|0,
top: -(((e.pageY - os.top) / sH) * $big.height() - oH)|0
});
};
$loupe = $('<div />')
.addClass(options.loupe)
.css({
width: options.width,
height: options.height,
position: 'absolute',
overflow: 'hidden'
})
.append($big = $('<img />').attr('src', $this.attr($this.is('img') ? 'src' : 'href')).css('position', 'absolute'))
.mousemove(move)
.on('touchstart touchmove', tmove)
.hide()
.appendTo('body');
$this.data('loupe', true)
.mouseenter(move)
.on('touchstart touchenter touchmove', tmove)
.on('mouseout touchend touchleave touchcancel', function () {
time = setTimeout(hide, 10);
});
}) : this;
};
}(jQuery));
|
klipstein/django-blog-zinnia | 1 | zinnia/models.py | """Models of Zinnia"""
from datetime import datetime
from django.db import models
from django.contrib.auth.models import User
from django.contrib.sites.models import Site
from django.db.models.signals import post_save
from django.template.defaultfilters import striptags
from django.template.defaultfilters import linebreaks
from django.contrib.comments.moderation import moderator
from django.utils.translation import ugettext_lazy as _
from tagging.fields import TagField
from zinnia.settings import USE_BITLY
from zinnia.settings import UPLOAD_TO
from zinnia.managers import entries_published
from zinnia.managers import EntryPublishedManager
from zinnia.managers import DRAFT, HIDDEN, PUBLISHED
from zinnia.moderator import EntryCommentModerator
from zinnia.signals import ping_directories_handler
from zinnia.signals import ping_external_urls_handler
class Category(models.Model):
"""Category object for Entry"""
title = models.CharField(_('title'), max_length=255)
slug = models.SlugField(help_text=_('used for publication'),
unique=True, max_length=255)
description = models.TextField(_('description'), blank=True)
def entries_published_set(self):
"""Return only the entries published"""
return entries_published(self.entry_set)
def __unicode__(self):
return self.title
@models.permalink
def get_absolute_url(self):
return ('zinnia_category_detail', (self.slug, ))
class Meta:
verbose_name = _('category')
verbose_name_plural = _('categories')
ordering = ['title']
class Entry(models.Model):
"""Base design for publishing entry"""
STATUS_CHOICES = ((DRAFT, _('draft')),
(HIDDEN, _('hidden')),
(PUBLISHED, _('published')))
title = models.CharField(_('title'), max_length=255)
image = models.ImageField(_('image'), upload_to=UPLOAD_TO,
blank=True, help_text=_('used for illustration'))
content = models.TextField(_('content'))
excerpt = models.TextField(_('excerpt'), blank=True,
help_text=_('optional element'))
tags = TagField(_('tags'))
categories = models.ManyToManyField(Category, verbose_name=_('categories'))
related = models.ManyToManyField('self', verbose_name=_('related entries'),
blank=True, null=True)
slug = models.SlugField(help_text=_('used for publication'),
unique_for_date='creation_date',
max_length=255)
authors = models.ManyToManyField(User, verbose_name=_('authors'),
blank=True, null=False)
status = models.IntegerField(choices=STATUS_CHOICES, default=DRAFT)
comment_enabled = models.BooleanField(_('comment enabled'), default=True)
pingback_enabled = models.BooleanField(_('linkback enabled'), default=True)
creation_date = models.DateTimeField(_('creation date'), default=datetime.now)
last_update = models.DateTimeField(_('last update'), default=datetime.now)
start_publication = models.DateTimeField(_('start publication'),
help_text=_('date start publish'),
default=datetime.now)
end_publication = models.DateTimeField(_('end publication'),
help_text=_('date end publish'),
default=datetime(2042, 3, 15))
sites = models.ManyToManyField(Site, verbose_name=_('sites publication'))
objects = models.Manager()
published = EntryPublishedManager()
@property
def html_content(self):
"""Return the content correctly formatted"""
if not '</p>' in self.content:
return linebreaks(self.content)
return self.content
@property
def previous_entry(self):
"""Return the previous entry"""
entries = Entry.published.filter(
creation_date__lt=self.creation_date)[:1]
if entries:
return entries[0]
@property
def next_entry(self):
"""Return the next entry"""
entries = Entry.published.filter(
creation_date__gt=self.creation_date).order_by('creation_date')[:1]
if entries:
return entries[0]
@property
def word_count(self):
"""Count the words of an entry"""
return len(striptags(self.html_content).split())
@property
def is_actual(self):
"""Check if an entry is within publication period"""
now = datetime.now()
return now >= self.start_publication and now < self.end_publication
@property
def is_visible(self):
"""Check if an entry is visible on site"""
return self.is_actual and self.status == PUBLISHED
@property
def related_published_set(self):
"""Return only related entries published"""
return entries_published(self.related)
@property
def discussions(self):
"""Return published discussions"""
from django.contrib.comments.models import Comment
return Comment.objects.for_model(self).filter(is_public=True)
@property
def comments(self):
"""Return published comments"""
return self.discussions.filter(flags=None)
@property
def pingbacks(self):
"""Return published pingbacks"""
return self.discussions.filter(flags__flag='pingback')
@property
def trackbacks(self):
"""Return published trackbacks"""
return self.discussions.filter(flags__flag='trackback')
@property
def short_url(self):
"""Return the entry's short url"""
if not USE_BITLY:
return False
from django_bitly.models import Bittle
bittle = Bittle.objects.bitlify(self)
url = bittle and bittle.shortUrl or self.get_absolute_url()
return url
def __unicode__(self):
return '%s: %s' % (self.title, self.get_status_display())
@models.permalink
def get_absolute_url(self):
return ('zinnia_entry_detail', (), {
'year': self.creation_date.strftime('%Y'),
'month': self.creation_date.strftime('%m'),
'day': self.creation_date.strftime('%d'),
'slug': self.slug})
class Meta:
ordering = ['-creation_date']
verbose_name = _('entry')
verbose_name_plural = _('entries')
permissions = (('can_view_all', 'Can view all'),
('can_change_author', 'Can change author'), )
post_save.connect(ping_directories_handler, sender=Entry)
post_save.connect(ping_external_urls_handler, sender=Entry)
moderator.register(Entry, EntryCommentModerator)
| """Models of Zinnia"""
from datetime import datetime
from django.db import models
from django.contrib.auth.models import User
from django.contrib.sites.models import Site
from django.db.models import Q
from django.db.models.signals import post_save
from django.template.defaultfilters import striptags
from django.template.defaultfilters import linebreaks
from django.contrib.comments.moderation import moderator
from django.utils.translation import ugettext_lazy as _
from tagging.fields import TagField
from zinnia.settings import USE_BITLY
from zinnia.settings import UPLOAD_TO
from zinnia.managers import entries_published
from zinnia.managers import EntryPublishedManager
from zinnia.managers import DRAFT, HIDDEN, PUBLISHED
from zinnia.moderator import EntryCommentModerator
from zinnia.signals import ping_directories_handler
from zinnia.signals import ping_external_urls_handler
class Category(models.Model):
"""Category object for Entry"""
title = models.CharField(_('title'), max_length=255)
slug = models.SlugField(help_text=_('used for publication'),
unique=True, max_length=255)
description = models.TextField(_('description'), blank=True)
def entries_published_set(self):
"""Return only the entries published"""
return entries_published(self.entry_set)
def __unicode__(self):
return self.title
@models.permalink
def get_absolute_url(self):
return ('zinnia_category_detail', (self.slug, ))
class Meta:
verbose_name = _('category')
verbose_name_plural = _('categories')
ordering = ['title']
class Entry(models.Model):
"""Base design for publishing entry"""
STATUS_CHOICES = ((DRAFT, _('draft')),
(HIDDEN, _('hidden')),
(PUBLISHED, _('published')))
title = models.CharField(_('title'), max_length=255)
image = models.ImageField(_('image'), upload_to=UPLOAD_TO,
blank=True, help_text=_('used for illustration'))
content = models.TextField(_('content'))
excerpt = models.TextField(_('excerpt'), blank=True,
help_text=_('optional element'))
tags = TagField(_('tags'))
categories = models.ManyToManyField(Category, verbose_name=_('categories'))
related = models.ManyToManyField('self', verbose_name=_('related entries'),
blank=True, null=True)
slug = models.SlugField(help_text=_('used for publication'),
unique_for_date='creation_date',
max_length=255)
authors = models.ManyToManyField(User, verbose_name=_('authors'),
blank=True, null=False)
status = models.IntegerField(choices=STATUS_CHOICES, default=DRAFT)
comment_enabled = models.BooleanField(_('comment enabled'), default=True)
pingback_enabled = models.BooleanField(_('linkback enabled'), default=True)
creation_date = models.DateTimeField(_('creation date'), default=datetime.now)
last_update = models.DateTimeField(_('last update'), default=datetime.now)
start_publication = models.DateTimeField(_('start publication'),
help_text=_('date start publish'),
default=datetime.now)
end_publication = models.DateTimeField(_('end publication'),
help_text=_('date end publish'),
default=datetime(2042, 3, 15))
sites = models.ManyToManyField(Site, verbose_name=_('sites publication'))
objects = models.Manager()
published = EntryPublishedManager()
@property
def html_content(self):
"""Return the content correctly formatted"""
if not '</p>' in self.content:
return linebreaks(self.content)
return self.content
@property
def previous_entry(self):
"""Return the previous entry"""
entries = Entry.published.filter(
creation_date__lt=self.creation_date)[:1]
if entries:
return entries[0]
@property
def next_entry(self):
"""Return the next entry"""
entries = Entry.published.filter(
creation_date__gt=self.creation_date).order_by('creation_date')[:1]
if entries:
return entries[0]
@property
def word_count(self):
"""Count the words of an entry"""
return len(striptags(self.html_content).split())
@property
def is_actual(self):
"""Check if an entry is within publication period"""
now = datetime.now()
return now >= self.start_publication and now < self.end_publication
@property
def is_visible(self):
"""Check if an entry is visible on site"""
return self.is_actual and self.status == PUBLISHED
@property
def related_published_set(self):
"""Return only related entries published"""
return entries_published(self.related)
@property
def discussions(self):
"""Return published discussions"""
from django.contrib.comments.models import Comment
return Comment.objects.for_model(self).filter(is_public=True)
@property
def comments(self):
"""Return published comments"""
# moderator approval is used by django.contrib.comments admin view,
# when approving a comment
return self.discussions.filter(Q(flags=None) | Q(flags__flag='moderator approval')
@property
def pingbacks(self):
"""Return published pingbacks"""
return self.discussions.filter(flags__flag='pingback')
@property
def trackbacks(self):
"""Return published trackbacks"""
return self.discussions.filter(flags__flag='trackback')
@property
def short_url(self):
"""Return the entry's short url"""
if not USE_BITLY:
return False
from django_bitly.models import Bittle
bittle = Bittle.objects.bitlify(self)
url = bittle and bittle.shortUrl or self.get_absolute_url()
return url
def __unicode__(self):
return '%s: %s' % (self.title, self.get_status_display())
@models.permalink
def get_absolute_url(self):
return ('zinnia_entry_detail', (), {
'year': self.creation_date.strftime('%Y'),
'month': self.creation_date.strftime('%m'),
'day': self.creation_date.strftime('%d'),
'slug': self.slug})
class Meta:
ordering = ['-creation_date']
verbose_name = _('entry')
verbose_name_plural = _('entries')
permissions = (('can_view_all', 'Can view all'),
('can_change_author', 'Can change author'), )
post_save.connect(ping_directories_handler, sender=Entry)
post_save.connect(ping_external_urls_handler, sender=Entry)
moderator.register(Entry, EntryCommentModerator)
|
ryanbooker/rubyscript2exe | 4 | require2lib.rb | require "ev/ftools"
require "rbconfig"
require "rubyscript2exe"
exit if __FILE__ == $0
module RUBYSCRIPT2EXE
end
module REQUIRE2LIB
JUSTRUBYLIB = ARGV.include?("--require2lib-justrubylib")
JUSTSITELIB = ARGV.include?("--require2lib-justsitelib")
RUBYGEMS = (not JUSTRUBYLIB)
VERBOSE = ARGV.include?("--require2lib-verbose")
QUIET = (ARGV.include?("--require2lib-quiet") and not VERBOSE)
LOADED = []
ARGV.delete_if{|arg| arg =~ /^--require2lib-/}
ORGDIR = Dir.pwd
THISFILE = File.expand_path(__FILE__)
LIBDIR = File.expand_path((ENV["REQUIRE2LIB_LIBDIR"] or "."))
LOADSCRIPT = File.expand_path((ENV["REQUIRE2LIB_LOADSCRIPT"] or "."))
RUBYLIBDIR = Config::CONFIG["rubylibdir"]
SITELIBDIR = Config::CONFIG["sitelibdir"]
at_exit do
Dir.chdir(ORGDIR)
REQUIRE2LIB.gatherlibs
end
def self.gatherlibs
$stderr.puts "Gathering files..." unless QUIET
File.makedirs(LIBDIR)
if RUBYGEMS
begin
Gem.dir
rubygems = true
rescue NameError
rubygems = false
end
else
rubygems = false
end
pureruby = true
if rubygems
require "fileutils" # Hack ???
requireablefiles = []
Dir.mkdir(File.expand_path("rubyscript2exe.gems", LIBDIR))
Dir.mkdir(File.expand_path("rubyscript2exe.gems/gems", LIBDIR))
Dir.mkdir(File.expand_path("rubyscript2exe.gems/specifications", LIBDIR))
Gem.loaded_specs.each do |key, gem|
$stderr.puts "Found gem #{gem.name} (#{gem.version})." if VERBOSE
fromdir = File.join(gem.installation_path, "specifications")
todir = File.expand_path("rubyscript2exe.gems/specifications", LIBDIR)
fromfile = File.join(fromdir, "#{gem.full_name}.gemspec")
tofile = File.join(todir, "#{gem.full_name}.gemspec")
File.copy(fromfile, tofile)
fromdir = gem.full_gem_path
todir = File.expand_path(File.join("rubyscript2exe.gems/gems", gem.full_name), LIBDIR)
Dir.copy(fromdir, todir)
Dir.find(todir).each do |file|
if File.file?(file)
gem.require_paths.each do |lib|
unless lib.empty?
lib = File.expand_path(lib, todir)
lib = lib + "/"
requireablefiles << file[lib.length..-1] if file =~ /^#{lib.gsub('+', '\+')}/
end
end
end
end
end
end
($" + LOADED).each do |req|
catch :found do
$:.each do |lib|
fromfile = File.expand_path(req, lib)
tofile = File.expand_path(req, LIBDIR)
if File.file?(fromfile)
unless fromfile == tofile or fromfile == THISFILE
unless (rubygems and requireablefiles.include?(req)) # ??? requireablefiles might be a little dangerous.
if (not JUSTRUBYLIB and not JUSTSITELIB) or
(JUSTRUBYLIB and fromfile.include?(RUBYLIBDIR)) or
(JUSTSITELIB and fromfile.include?(SITELIBDIR))
$stderr.puts "Found #{fromfile} ." if VERBOSE
File.makedirs(File.dirname(tofile)) unless File.directory?(File.dirname(tofile))
File.copy(fromfile, tofile)
pureruby = false unless req =~ /\.(rbw?|ruby)$/i
else
$stderr.puts "Skipped #{fromfile} ." if VERBOSE
end
end
end
throw :found
end
end
#$stderr.puts "Can't find #{req} ." unless req =~ /^ev\// or QUIET
#$stderr.puts "Can't find #{req} ." unless req =~ /^(\w:)?[\/\\]/ or QUIET
end
end
$stderr.puts "Not all required files are pure Ruby." unless pureruby if VERBOSE
unless LOADSCRIPT == ORGDIR
File.open(LOADSCRIPT, "w") do |f|
f.puts "module RUBYSCRIPT2EXE"
f.puts " REQUIRE2LIB_FROM_APP={}"
RUBYSCRIPT2EXE.class_variables.each do |const|
const = const[2..-1]
f.puts " REQUIRE2LIB_FROM_APP[:#{const}]=#{RUBYSCRIPT2EXE.send(const).inspect}"
end
f.puts " REQUIRE2LIB_FROM_APP[:rubygems]=#{rubygems.inspect}"
f.puts "end"
end
end
end
end
module Kernel
alias :require2lib_load :load
def load(filename, wrap=false)
REQUIRE2LIB::LOADED << filename unless REQUIRE2LIB::LOADED.include?(filename)
require2lib_load(filename, wrap)
end
end
| require "ev/ftools"
require "rbconfig"
require "rubyscript2exe"
exit if __FILE__ == $0
module RUBYSCRIPT2EXE
end
module REQUIRE2LIB
JUSTRUBYLIB = ARGV.include?("--require2lib-justrubylib")
JUSTSITELIB = ARGV.include?("--require2lib-justsitelib")
RUBYGEMS = (not JUSTRUBYLIB)
VERBOSE = ARGV.include?("--require2lib-verbose")
QUIET = (ARGV.include?("--require2lib-quiet") and not VERBOSE)
LOADED = []
ARGV.delete_if{|arg| arg =~ /^--require2lib-/}
ORGDIR = Dir.pwd
THISFILE = File.expand_path(__FILE__)
LIBDIR = File.expand_path((ENV["REQUIRE2LIB_LIBDIR"] or "."))
LOADSCRIPT = File.expand_path((ENV["REQUIRE2LIB_LOADSCRIPT"] or "."))
RUBYLIBDIR = RbConfig::CONFIG["rubylibdir"]
SITELIBDIR = RbConfig::CONFIG["sitelibdir"]
at_exit do
Dir.chdir(ORGDIR)
REQUIRE2LIB.gatherlibs
end
def self.gatherlibs
$stderr.puts "Gathering files..." unless QUIET
FileUtils.makedirs(LIBDIR)
if RUBYGEMS
begin
Gem.dir
rubygems = true
rescue NameError
rubygems = false
end
else
rubygems = false
end
pureruby = true
if rubygems
require "fileutils" # Hack ???
requireablefiles = []
Dir.mkdir(File.expand_path("rubyscript2exe.gems", LIBDIR))
Dir.mkdir(File.expand_path("rubyscript2exe.gems/gems", LIBDIR))
Dir.mkdir(File.expand_path("rubyscript2exe.gems/specifications", LIBDIR))
Gem.loaded_specs.each do |key, gem|
$stderr.puts "Found gem #{gem.name} (#{gem.version})." if VERBOSE
fromdir = File.join(gem.installation_path, "specifications")
todir = File.expand_path("rubyscript2exe.gems/specifications", LIBDIR)
fromfile = File.join(fromdir, "#{gem.full_name}.gemspec")
tofile = File.join(todir, "#{gem.full_name}.gemspec")
FileUtils.copy(fromfile, tofile)
fromdir = gem.full_gem_path
todir = File.expand_path(File.join("rubyscript2exe.gems/gems", gem.full_name), LIBDIR)
Dir.copy(fromdir, todir)
Dir.find(todir).each do |file|
if File.file?(file)
gem.require_paths.each do |lib|
unless lib.empty?
lib = File.expand_path(lib, todir)
lib = lib + "/"
requireablefiles << file[lib.length..-1] if file =~ /^#{lib.gsub('+', '\+')}/
end
end
end
end
end
end
($" + LOADED).each do |req|
catch :found do
$:.each do |lib|
fromfile = File.expand_path(req, lib)
tofile = File.expand_path(req, LIBDIR)
if File.file?(fromfile)
unless fromfile == tofile or fromfile == THISFILE
unless (rubygems and requireablefiles.include?(req)) # ??? requireablefiles might be a little dangerous.
if (not JUSTRUBYLIB and not JUSTSITELIB) or
(JUSTRUBYLIB and fromfile.include?(RUBYLIBDIR)) or
(JUSTSITELIB and fromfile.include?(SITELIBDIR))
$stderr.puts "Found #{fromfile} ." if VERBOSE
FileUtils.makedirs(File.dirname(tofile)) unless File.directory?(File.dirname(tofile))
FileUtils.copy(fromfile, tofile)
pureruby = false unless req =~ /\.(rbw?|ruby)$/i
else
$stderr.puts "Skipped #{fromfile} ." if VERBOSE
end
end
end
throw :found
end
end
#$stderr.puts "Can't find #{req} ." unless req =~ /^ev\// or QUIET
#$stderr.puts "Can't find #{req} ." unless req =~ /^(\w:)?[\/\\]/ or QUIET
end
end
$stderr.puts "Not all required files are pure Ruby." unless pureruby if VERBOSE
unless LOADSCRIPT == ORGDIR
File.open(LOADSCRIPT, "w") do |f|
f.puts "module RUBYSCRIPT2EXE"
f.puts " REQUIRE2LIB_FROM_APP={}"
RUBYSCRIPT2EXE.class_variables.each do |const|
const = const[2..-1]
f.puts " REQUIRE2LIB_FROM_APP[:#{const}]=#{RUBYSCRIPT2EXE.send(const).inspect}"
end
f.puts " REQUIRE2LIB_FROM_APP[:rubygems]=#{rubygems.inspect}"
f.puts "end"
end
end
end
end
module Kernel
alias :require2lib_load :load
def load(filename, wrap=false)
REQUIRE2LIB::LOADED << filename unless REQUIRE2LIB::LOADED.include?(filename)
require2lib_load(filename, wrap)
end
end
|
davidmoreno/onion | 305 | src/onion/poller_libev.c | /**
Onion HTTP server library
Copyright (C) 2010-2018 David Moreno Montero and others
This library is free software; you can redistribute it and/or
modify it under the terms of, at your choice:
a. the Apache License Version 2.0.
b. the GNU General Public License as published by the
Free Software Foundation; either version 2.0 of the License,
or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of both licenses, if not see
<http://www.gnu.org/licenses/> and
<http://www.apache.org/licenses/LICENSE-2.0>.
*/
#include <ev.h>
#include <stdlib.h>
#include <semaphore.h>
#include "poller.h"
#include "log.h"
#include "low.h"
struct onion_poller_t {
struct ev_loop *loop;
sem_t sem;
volatile int stop;
};
struct onion_poller_slot_t {
int fd;
int timeout;
int type;
ev_io ev;
void *data;
int (*f) (void *);
void *shutdown_data;
void (*shutdown) (void *);
onion_poller *poller;
};
/// Create a new slot for the poller
onion_poller_slot *onion_poller_slot_new(int fd, int (*f) (void *), void *data) {
onion_poller_slot *ret = onion_low_calloc(1, sizeof(onion_poller_slot));
ret->fd = fd;
ret->f = f;
ret->data = data;
ret->type = EV_READ | EV_WRITE;
return ret;
}
/// Cleans a poller slot. Do not call if already on the poller (onion_poller_add). Use onion_poller_remove instead.
void onion_poller_slot_free(onion_poller_slot * el) {
if (el->poller)
ev_io_stop(el->poller->loop, &el->ev);
if (el->shutdown)
el->shutdown(el->shutdown_data);
}
/// Sets the shutdown function for this poller slot
void onion_poller_slot_set_shutdown(onion_poller_slot * el,
void (*shutdown) (void *), void *data) {
el->shutdown = shutdown;
el->shutdown_data = data;
}
/// Sets the timeout for this slot. Current implementation takes ms, but then it rounds to seconds.
void onion_poller_slot_set_timeout(onion_poller_slot * el, int timeout_ms) {
el->timeout = timeout_ms;
}
/// Sets the polling type: read/write/other. O_POLL_READ | O_POLL_WRITE | O_POLL_OTHER
void onion_poller_slot_set_type(onion_poller_slot * el, int type) {
el->type = 0;
if (type & O_POLL_READ)
el->type |= EV_READ;
if (type & O_POLL_WRITE)
el->type |= EV_WRITE;
}
/// Create a new poller
onion_poller *onion_poller_new(int aprox_n) {
onion_poller *ret = onion_low_calloc(1, sizeof(onion_poller));
ret->loop = ev_default_loop(0);
sem_init(&ret->sem, 0, 1);
return ret;
}
/// Frees the poller. It first stops it.
void onion_poller_free(onion_poller * p) {
onion_low_free(p);
}
static void event_callback(struct ev_loop *loop, ev_io * w, int revents) {
onion_poller_slot *s = w->data;
int res = s->f(s->data);
if (res < 0) {
onion_poller_slot_free(s);
}
}
/// Adds a slot to the poller
int onion_poller_add(onion_poller * poller, onion_poller_slot * el) {
el->poller = poller;
// normally would use ev_io_init bellow, but gcc on F18+ give a
// "dereferencing type-punned pointer will break strict-aliasing rules" error
// So the macro must be expanded and this is what we get. In the future this may
// give bugs on libevent if this changes.
//ev_io_init(&el->ev, event_callback, el->fd, el->type);
// expands to ev_init + ev_io_set. ev_init expand more or less as bellow
//ev_init(&el->ev, event_callback);
{
ev_watcher *ew = (void *)(&el->ev); // ew must exit to prevent the before mentioned error.
ew->active = 0;
ew->pending = 0;
ew->priority = 0;
el->ev.cb = event_callback;
}
ev_io_set(&el->ev, el->fd, el->type);
el->ev.data = el;
// if (el->timeout>0){
// event_add(el->ev, &tv);
// }
// else{
// event_add(el->ev, NULL);
// }
ev_io_start(poller->loop, &el->ev);
return 1;
}
/// Removes a fd from the poller
int onion_poller_remove(onion_poller * poller, int fd) {
ONION_ERROR("FIXME!! not removing fd %d", fd);
return -1;
}
/// Gets the poller to do some modifications as change shutdown
onion_poller_slot *onion_poller_get(onion_poller * poller, int fd) {
ONION_ERROR("Not implemented! Use epoll poller.");
return NULL;
}
/// Do the polling. If on several threads, this is done in every thread.
void onion_poller_poll(onion_poller * poller) {
ev_default_fork();
ev_loop_fork(poller->loop);
poller->stop = 0;
while (!poller->stop) {
sem_wait(&poller->sem);
ev_run(poller->loop, EVLOOP_ONESHOT);
sem_post(&poller->sem);
}
}
/// Stops the polling. This only marks the flag, and should be cancelled with pthread_cancel.
void onion_poller_stop(onion_poller * poller) {
poller->stop = 1;
ev_break(poller->loop, EVBREAK_ALL);
}
// Not implemented for libev
void onion_poller_set_queue_size_per_thread(onion_poller * poller, size_t count) {
ONION_WARNING
("onion_poller_queue_size_per_thread only used with epoll polling, not libev.");
}
| /**
Onion HTTP server library
Copyright (C) 2010-2018 David Moreno Montero and others
This library is free software; you can redistribute it and/or
modify it under the terms of, at your choice:
a. the Apache License Version 2.0.
b. the GNU General Public License as published by the
Free Software Foundation; either version 2.0 of the License,
or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of both licenses, if not see
<http://www.gnu.org/licenses/> and
<http://www.apache.org/licenses/LICENSE-2.0>.
*/
#include <ev.h>
#include <stdlib.h>
#include <semaphore.h>
#include "poller.h"
#include "log.h"
#include "low.h"
struct onion_poller_t {
struct ev_loop *loop;
sem_t sem;
volatile int stop;
};
struct onion_poller_slot_t {
int fd;
int timeout;
int type;
ev_io ev;
void *data;
int (*f) (void *);
void *shutdown_data;
void (*shutdown) (void *);
onion_poller *poller;
};
/// Create a new slot for the poller
onion_poller_slot *onion_poller_slot_new(int fd, int (*f) (void *), void *data) {
onion_poller_slot *ret = onion_low_calloc(1, sizeof(onion_poller_slot));
ret->fd = fd;
ret->f = f;
ret->data = data;
ret->type = EV_READ | EV_WRITE;
return ret;
}
/// Cleans a poller slot. Do not call if already on the poller (onion_poller_add). Use onion_poller_remove instead.
void onion_poller_slot_free(onion_poller_slot * el) {
if (el->poller)
ev_io_stop(el->poller->loop, &el->ev);
if (el->shutdown)
el->shutdown(el->shutdown_data);
}
/// Sets the shutdown function for this poller slot
void onion_poller_slot_set_shutdown(onion_poller_slot * el,
void (*shutdown) (void *), void *data) {
el->shutdown = shutdown;
el->shutdown_data = data;
}
/// Sets the timeout for this slot. Current implementation takes ms, but then it rounds to seconds.
void onion_poller_slot_set_timeout(onion_poller_slot * el, int timeout_ms) {
el->timeout = timeout_ms;
}
/// Sets the polling type: read/write/other. O_POLL_READ | O_POLL_WRITE | O_POLL_OTHER
void onion_poller_slot_set_type(onion_poller_slot * el, onion_poller_slot_type_e type) {
el->type = 0;
if (type & O_POLL_READ)
el->type |= EV_READ;
if (type & O_POLL_WRITE)
el->type |= EV_WRITE;
}
/// Create a new poller
onion_poller *onion_poller_new(int aprox_n) {
onion_poller *ret = onion_low_calloc(1, sizeof(onion_poller));
ret->loop = ev_default_loop(0);
sem_init(&ret->sem, 0, 1);
return ret;
}
/// Frees the poller. It first stops it.
void onion_poller_free(onion_poller * p) {
onion_low_free(p);
}
static void event_callback(struct ev_loop *loop, ev_io * w, int revents) {
onion_poller_slot *s = w->data;
int res = s->f(s->data);
if (res < 0) {
onion_poller_slot_free(s);
}
}
/// Adds a slot to the poller
int onion_poller_add(onion_poller * poller, onion_poller_slot * el) {
el->poller = poller;
// normally would use ev_io_init bellow, but gcc on F18+ give a
// "dereferencing type-punned pointer will break strict-aliasing rules" error
// So the macro must be expanded and this is what we get. In the future this may
// give bugs on libevent if this changes.
//ev_io_init(&el->ev, event_callback, el->fd, el->type);
// expands to ev_init + ev_io_set. ev_init expand more or less as bellow
//ev_init(&el->ev, event_callback);
{
ev_watcher *ew = (void *)(&el->ev); // ew must exit to prevent the before mentioned error.
ew->active = 0;
ew->pending = 0;
ew->priority = 0;
el->ev.cb = event_callback;
}
ev_io_set(&el->ev, el->fd, el->type);
el->ev.data = el;
// if (el->timeout>0){
// event_add(el->ev, &tv);
// }
// else{
// event_add(el->ev, NULL);
// }
ev_io_start(poller->loop, &el->ev);
return 1;
}
/// Removes a fd from the poller
int onion_poller_remove(onion_poller * poller, int fd) {
ONION_ERROR("FIXME!! not removing fd %d", fd);
return -1;
}
/// Gets the poller to do some modifications as change shutdown
onion_poller_slot *onion_poller_get(onion_poller * poller, int fd) {
ONION_ERROR("Not implemented! Use epoll poller.");
return NULL;
}
/// Do the polling. If on several threads, this is done in every thread.
void onion_poller_poll(onion_poller * poller) {
ev_default_fork();
ev_loop_fork(poller->loop);
poller->stop = 0;
while (!poller->stop) {
sem_wait(&poller->sem);
ev_run(poller->loop, EVLOOP_ONESHOT);
sem_post(&poller->sem);
}
}
/// Stops the polling. This only marks the flag, and should be cancelled with pthread_cancel.
void onion_poller_stop(onion_poller * poller) {
poller->stop = 1;
ev_break(poller->loop, EVBREAK_ALL);
}
// Not implemented for libev
void onion_poller_set_queue_size_per_thread(onion_poller * poller, size_t count) {
ONION_WARNING
("onion_poller_queue_size_per_thread only used with epoll polling, not libev.");
}
|
dasmoth/dalliance | 243 | js/jbjson.js | /* -*- mode: javascript; c-basic-offset: 4; indent-tabs-mode: nil -*- */
//
// Dalliance Genome Explorer
// (c) Thomas Down 2006-2013
//
// jbjson.js -- query JBrowse-style REST data stores
//
if (typeof(require) !== 'undefined') {
var das = require('./das');
var DASStylesheet = das.DASStylesheet;
var DASStyle = das.DASStyle;
var DASFeature = das.DASFeature;
var DASGroup = das.DASGroup;
var utils = require('./utils');
var shallowCopy = utils.shallowCopy;
var spans = require('./spans');
var Range = spans.Range;
var union = spans.union;
var intersection = spans.intersection;
}
function JBrowseStore(base, query) {
this.base = base;
this.query = query;
}
function jbori(strand) {
if (strand > 0)
return '+';
else if (strand < 0)
return '-';
}
JBrowseStore.prototype.features = function(segment, opts, callback) {
opts = opts || {};
url = this.base + '/features/' + segment.name;
var filters = [];
if (this.query) {
filters.push(this.query);
}
if (segment.isBounded) {
filters.push('start=' + segment.start);
filters.push('end=' + segment.end);
}
if (filters.length > 0) {
url = url + '?' + filters.join('&');
}
var req = new XMLHttpRequest();
req.onreadystatechange = function() {
if (req.readyState == 4) {
if (req.status >= 300) {
callback(null, 'Error code ' + req.status);
} else {
var jf = JSON.parse(req.response)['features'];
var features = [];
for (fi = 0; fi < jf.length; ++fi) {
var j = jf[fi];
var f = new DASFeature();
f.segment = segment.name;
f.min = (j['start'] | 0) + 1;
f.max = j['end'] | 0;
if (j.name) {
f.label = j.name;
}
if (j.strand)
f.orientation = jbori(j.strand);
f.type = j.type || 'unknown';
if (j.subfeatures && j.subfeatures.length > 0) {
f.id = j.uniqueID;
var blocks = [];
var cds = [];
var all = [];
for (var si = 0; si < j.subfeatures.length; ++si) {
var sj = j.subfeatures[si];
var sf = shallowCopy(f);
sf.min = sj.start + 1;
sf.max = sj.end;
sf.groups = [f];
all.push(sf);
blocks.push(new Range(sf.min, sf.max));
if (sj.type === 'CDS')
cds.push(sf);
}
if (cds.length > 0) {
spans = union(blocks);
var txGroup = shallowCopy(f);
txGroup.type = 'transcript';
spans.ranges().forEach(function(exon) {
features.push({
segment: segment.name,
min: exon.min(),
max: exon.max(),
orientation: f.orientation,
groups: [txGroup],
type: 'transcript'
});
});
var tlGroup = shallowCopy(f);
cds.forEach(function(cdsExon) {
cdsExon.type = 'translation'
cdsExon.groups = [tlGroup];
features.push(cdsExon);
});
} else {
all.forEach(function(f) {
features.push(f);
});
}
} else {
features.push(f);
}
}
callback(features);
}
}
};
req.open('GET', url, true);
req.responseType = 'text';
req.send();
}
if (typeof(module) !== 'undefined') {
module.exports = {
JBrowseStore: JBrowseStore
};
}
| /* -*- mode: javascript; c-basic-offset: 4; indent-tabs-mode: nil -*- */
//
// Dalliance Genome Explorer
// (c) Thomas Down 2006-2013
//
// jbjson.js -- query JBrowse-style REST data stores
//
if (typeof(require) !== 'undefined') {
var das = require('./das');
var DASStylesheet = das.DASStylesheet;
var DASStyle = das.DASStyle;
var DASFeature = das.DASFeature;
var DASGroup = das.DASGroup;
var utils = require('./utils');
var shallowCopy = utils.shallowCopy;
var spans = require('./spans');
var Range = spans.Range;
var union = spans.union;
var intersection = spans.intersection;
}
function JBrowseStore(base, query) {
this.base = base;
this.query = query;
}
function jbori(strand) {
if (strand > 0)
return '+';
else if (strand < 0)
return '-';
}
JBrowseStore.prototype.features = function(segment, opts, callback) {
opts = opts || {};
var url = this.base + '/features/' + segment.name;
var filters = [];
if (this.query) {
filters.push(this.query);
}
if (segment.isBounded) {
filters.push('start=' + segment.start);
filters.push('end=' + segment.end);
}
if (filters.length > 0) {
url = url + '?' + filters.join('&');
}
var req = new XMLHttpRequest();
req.onreadystatechange = function() {
if (req.readyState == 4) {
if (req.status >= 300) {
callback(null, 'Error code ' + req.status);
} else {
var jf = JSON.parse(req.response)['features'];
var features = [];
for (var fi = 0; fi < jf.length; ++fi) {
var j = jf[fi];
var f = new DASFeature();
f.segment = segment.name;
f.min = (j['start'] | 0) + 1;
f.max = j['end'] | 0;
if (j.name) {
f.label = j.name;
}
if (j.strand)
f.orientation = jbori(j.strand);
f.type = j.type || 'unknown';
if (j.subfeatures && j.subfeatures.length > 0) {
f.id = j.uniqueID;
var blocks = [];
var cds = [];
var all = [];
for (var si = 0; si < j.subfeatures.length; ++si) {
var sj = j.subfeatures[si];
var sf = shallowCopy(f);
sf.min = sj.start + 1;
sf.max = sj.end;
sf.groups = [f];
all.push(sf);
blocks.push(new Range(sf.min, sf.max));
if (sj.type === 'CDS')
cds.push(sf);
}
if (cds.length > 0) {
spans = union(blocks);
var txGroup = shallowCopy(f);
txGroup.type = 'transcript';
spans.ranges().forEach(function(exon) {
features.push({
segment: segment.name,
min: exon.min(),
max: exon.max(),
orientation: f.orientation,
groups: [txGroup],
type: 'transcript'
});
});
var tlGroup = shallowCopy(f);
cds.forEach(function(cdsExon) {
cdsExon.type = 'translation'
cdsExon.groups = [tlGroup];
features.push(cdsExon);
});
} else {
all.forEach(function(f) {
features.push(f);
});
}
} else {
features.push(f);
}
}
callback(features);
}
}
};
req.open('GET', url, true);
req.responseType = 'text';
req.send();
}
if (typeof(module) !== 'undefined') {
module.exports = {
JBrowseStore: JBrowseStore
};
}
|
troelskn/bucket | 9 | bucket.test.php | <?php
require_once 'simpletest/unit_tester.php';
if (realpath($_SERVER['PHP_SELF']) == __FILE__) {
error_reporting(E_ALL | E_STRICT);
require_once 'simpletest/autorun.php';
}
require_once 'lib/bucket.inc.php';
class NoDependencies {}
class ExtendsNoDependencies extends NoDependencies {}
class SingleClassDependency {
public $val;
function __construct(NoDependencies $val) {
$this->val = $val;
}
}
class DefaultValue {
public $val;
function __construct($val = 42) {
$this->val = $val;
}
}
class UnTypedDependency {
public $val;
function __construct($val) {
$this->val = $val;
}
}
interface AnInterface {};
class ConcreteImplementation implements AnInterface {}
class DependsOnInterface {
public $val;
function __construct(AnInterface $val) {
$this->val = $val;
}
}
class TestFactory {
public $invoked = false;
function new_NoDependencies($container) {
$this->invoked = true;
return new NoDependencies();
}
function new_ConcreteImplementation($container) {
$this->invoked = true;
return new NoDependencies();
}
}
class RequireUndefinedClass {
function __construct(ClassThatDoesntExist $autoloaded) {}
}
class TriedToAutoloadException extends Exception {
public $classname;
function __construct($classname) {
$this->classname = $classname;
parent::__construct();
}
}
function test_autoload_fail($classname) {
throw new TriedToAutoloadException($classname);
}
class TestUnderscoreCallFactory {
public $invoked = false;
function __call($name, $args) {
$this->invoked = true;
return new StdClass();
}
}
class TestOfBucketAutoload extends UnitTestCase {
function setUp() {
$this->spl_autoload_functions = spl_autoload_functions();
if ($this->spl_autoload_functions) {
foreach ($this->spl_autoload_functions as $fn) {
spl_autoload_unregister($fn);
}
}
}
function tearDown() {
if (spl_autoload_functions()) {
foreach (spl_autoload_functions() as $fn) {
spl_autoload_unregister($fn);
}
}
if ($this->spl_autoload_functions) {
foreach ($this->spl_autoload_functions as $fn) {
spl_autoload_register($fn);
}
}
}
function test_undefined_class_triggers_autoload() {
spl_autoload_register('test_autoload_fail');
$bucket = new bucket_Container();
$this->expectException('TriedToAutoloadException');
$bucket->create('RequireUndefinedClass');
}
function test_autoload_gets_canonical_classname() {
spl_autoload_register('test_autoload_fail');
$bucket = new bucket_Container();
try {
$bucket->create('RequireUndefinedClass');
$this->fail("Expected TriedToAutoloadException");
} catch (TriedToAutoloadException $ex) {
$this->assertEqual($ex->classname, 'ClassThatDoesntExist');
}
}
}
class TestOfBucketResolving extends UnitTestCase {
function test_can_create_empty_container() {
$bucket = new bucket_Container();
}
function test_can_create_class_with_no_dependencies() {
$bucket = new bucket_Container();
$this->assertIsA($bucket->create('NoDependencies'), 'NoDependencies');
}
function test_can_create_class_with_class_dependency() {
$bucket = new bucket_Container();
$o = $bucket->create('SingleClassDependency');
$this->assertIsA($o, 'SingleClassDependency');
$this->assertIsA($o->val, 'NoDependencies');
}
function test_can_create_class_with_default_value() {
$bucket = new bucket_Container();
$o = $bucket->create('DefaultValue');
$this->assertIsA($o, 'DefaultValue');
$this->assertEqual($o->val, 42);
}
function test_barks_on_untyped_dependency() {
$bucket = new bucket_Container();
try {
$bucket->create('UnTypedDependency');
$this->fail("Expected exception");
} catch (bucket_CreationException $ex) {
$this->pass("Exception caught");
}
}
function test_barks_on_interface_dependency_when_unregistered() {
$bucket = new bucket_Container();
try {
$bucket->create('DependsOnInterface');
$this->fail("Expected exception");
} catch (bucket_CreationException $ex) {
$this->pass("Exception caught");
}
}
function test_can_create_class_with_interface_dependency() {
$bucket = new bucket_Container();
$bucket->registerImplementation('AnInterface', 'ConcreteImplementation');
$o = $bucket->create('DependsOnInterface');
$this->assertIsA($o, 'DependsOnInterface');
$this->assertIsA($o->val, 'ConcreteImplementation');
}
function test_can_set_different_implementation_for_concrete_class() {
$bucket = new bucket_Container();
$bucket->registerImplementation('NoDependencies', 'ExtendsNoDependencies');
$o = $bucket->create('SingleClassDependency');
$this->assertIsA($o, 'SingleClassDependency');
$this->assertIsA($o->val, 'ExtendsNoDependencies');
}
}
class TestOfBucketContainer extends UnitTestCase {
function test_get_creates_new_object() {
$bucket = new bucket_Container();
$this->assertIsA($bucket->get('NoDependencies'), 'NoDependencies');
}
function test_get_returns_same_instance_on_subsequent_calls() {
$bucket = new bucket_Container();
$this->assertSame(
$bucket->get('NoDependencies'),
$bucket->get('NoDependencies'));
}
}
class TestOfBucketFactory extends UnitTestCase {
function test_container_delegates_to_factory_method() {
$factory = new TestFactory();
$bucket = new bucket_Container($factory);
$this->assertIsA($bucket->get('NoDependencies'), 'NoDependencies');
$this->assertTrue($factory->invoked);
}
function test_container_can_return_different_implementation() {
$bucket = new bucket_Container(new TestFactory());
$this->assertIsA($bucket->get('ConcreteImplementation'), 'NoDependencies');
}
function test_container_delegates_to_factory_callback() {
$factory = new TestFactory();
$factory->new_defaultvalue = create_function('', 'return new StdClass();');
// For PHP 5.3+
// $factory->new_defaultvalue = function($container) {
// return new StdClass();
// }
$bucket = new bucket_Container($factory);
$this->assertIsA($bucket->get('DefaultValue'), 'StdClass');
}
function test_callback_takes_precedence_over_method() {
$factory = new TestFactory();
$factory->new_nodependencies = create_function('', 'return new StdClass();');
// For PHP 5.3+
// $factory->new_nodependencies = function($container) {
// return new StdClass();
// }
$bucket = new bucket_Container($factory);
$this->assertIsA($bucket->get('NoDependencies'), 'StdClass');
}
function test_container_can_take_array_of_callbacks_as_argument() {
$bucket = new bucket_Container(
array(
'DefaultValue' => create_function('', 'return new StdClass();')
)
);
// For PHP 5.3+
// $bucket = new bucket_Container(
// array(
// 'DefaultValue' => function($container) {
// return new StdClass();
// }
// )
// );
$this->assertIsA($bucket->get('DefaultValue'), 'StdClass');
}
function test_underscore_call_is_callable() {
$factory = new TestUnderscoreCallFactory();
$bucket = new bucket_Container($factory);
$this->assertIsA($bucket->get('StdClass'), 'StdClass');
$this->assertTrue($factory->invoked);
}
}
class TestOfBucketScope extends UnitTestCase {
function test_a_child_scope_uses_parent_factory() {
$factory = new TestFactory();
$bucket = new bucket_Container($factory);
$scope = $bucket->makeChildContainer();
$this->assertIsA($scope->get('NoDependencies'), 'NoDependencies');
$this->assertTrue($factory->invoked);
}
function test_get_on_a_child_scope_returns_same_instance_on_subsequent_calls() {
$factory = new TestFactory();
$bucket = new bucket_Container($factory);
$scope = $bucket->makeChildContainer();
$this->assertSame(
$scope->get('NoDependencies'),
$scope->get('NoDependencies'));
}
function test_get_on_a_child_scope_returns_parent_state() {
$factory = new TestFactory();
$bucket = new bucket_Container($factory);
$scope = $bucket->makeChildContainer();
$o = $bucket->get('NoDependencies');
$this->assertSame(
$o,
$scope->get('NoDependencies'));
}
function test_parent_scope_doesnt_see_child_state() {
$factory = new TestFactory();
$bucket = new bucket_Container($factory);
$scope = $bucket->makeChildContainer();
$o = $scope->get('NoDependencies');
$this->assertFalse($o === $bucket->get('NoDependencies'));
}
function test_setting_an_instance_and_getting_it_should_return_same_instance() {
$bucket = new bucket_Container();
$obj = new StdClass();
$bucket->set($obj);
$this->assertSame($bucket->get('StdClass'), $obj);
}
} | <?php
require_once 'vendor/simpletest/simpletest/unit_tester.php';
if (realpath($_SERVER['PHP_SELF']) == __FILE__) {
error_reporting(E_ALL | E_STRICT);
require_once 'vendor/simpletest/simpletest/autorun.php';
}
require_once 'lib/bucket.inc.php';
class NoDependencies {}
class ExtendsNoDependencies extends NoDependencies {}
class SingleClassDependency {
public $val;
function __construct(NoDependencies $val) {
$this->val = $val;
}
}
class DefaultValue {
public $val;
function __construct($val = 42) {
$this->val = $val;
}
}
class UnTypedDependency {
public $val;
function __construct($val) {
$this->val = $val;
}
}
interface AnInterface {};
class ConcreteImplementation implements AnInterface {}
class DependsOnInterface {
public $val;
function __construct(AnInterface $val) {
$this->val = $val;
}
}
class TestFactory {
public $invoked = false;
function new_NoDependencies($container) {
$this->invoked = true;
return new NoDependencies();
}
function new_ConcreteImplementation($container) {
$this->invoked = true;
return new NoDependencies();
}
}
class RequireUndefinedClass {
function __construct(ClassThatDoesntExist $autoloaded) {}
}
class TriedToAutoloadException extends Exception {
public $classname;
function __construct($classname) {
$this->classname = $classname;
parent::__construct();
}
}
function test_autoload_fail($classname) {
throw new TriedToAutoloadException($classname);
}
class TestUnderscoreCallFactory {
public $invoked = false;
function __call($name, $args) {
$this->invoked = true;
return new StdClass();
}
}
class TestOfBucketAutoload extends UnitTestCase {
function setUp() {
$this->spl_autoload_functions = spl_autoload_functions();
if ($this->spl_autoload_functions) {
foreach ($this->spl_autoload_functions as $fn) {
spl_autoload_unregister($fn);
}
}
}
function tearDown() {
if (spl_autoload_functions()) {
foreach (spl_autoload_functions() as $fn) {
spl_autoload_unregister($fn);
}
}
if ($this->spl_autoload_functions) {
foreach ($this->spl_autoload_functions as $fn) {
spl_autoload_register($fn);
}
}
}
function test_undefined_class_triggers_autoload() {
spl_autoload_register('test_autoload_fail');
$bucket = new bucket_Container();
$this->expectException('TriedToAutoloadException');
$bucket->create('RequireUndefinedClass');
}
function test_autoload_gets_canonical_classname() {
spl_autoload_register('test_autoload_fail');
$bucket = new bucket_Container();
try {
$bucket->create('RequireUndefinedClass');
$this->fail("Expected TriedToAutoloadException");
} catch (TriedToAutoloadException $ex) {
$this->assertEqual($ex->classname, 'ClassThatDoesntExist');
}
}
}
class TestOfBucketResolving extends UnitTestCase {
function test_can_create_empty_container() {
$bucket = new bucket_Container();
}
function test_can_create_class_with_no_dependencies() {
$bucket = new bucket_Container();
$this->assertIsA($bucket->create('NoDependencies'), 'NoDependencies');
}
function test_can_create_class_with_class_dependency() {
$bucket = new bucket_Container();
$o = $bucket->create('SingleClassDependency');
$this->assertIsA($o, 'SingleClassDependency');
$this->assertIsA($o->val, 'NoDependencies');
}
function test_can_create_class_with_default_value() {
$bucket = new bucket_Container();
$o = $bucket->create('DefaultValue');
$this->assertIsA($o, 'DefaultValue');
$this->assertEqual($o->val, 42);
}
function test_barks_on_untyped_dependency() {
$bucket = new bucket_Container();
try {
$bucket->create('UnTypedDependency');
$this->fail("Expected exception");
} catch (bucket_CreationException $ex) {
$this->pass("Exception caught");
}
}
function test_barks_on_interface_dependency_when_unregistered() {
$bucket = new bucket_Container();
try {
$bucket->create('DependsOnInterface');
$this->fail("Expected exception");
} catch (bucket_CreationException $ex) {
$this->pass("Exception caught");
}
}
function test_can_create_class_with_interface_dependency() {
$bucket = new bucket_Container();
$bucket->registerImplementation('AnInterface', 'ConcreteImplementation');
$o = $bucket->create('DependsOnInterface');
$this->assertIsA($o, 'DependsOnInterface');
$this->assertIsA($o->val, 'ConcreteImplementation');
}
function test_can_set_different_implementation_for_concrete_class() {
$bucket = new bucket_Container();
$bucket->registerImplementation('NoDependencies', 'ExtendsNoDependencies');
$o = $bucket->create('SingleClassDependency');
$this->assertIsA($o, 'SingleClassDependency');
$this->assertIsA($o->val, 'ExtendsNoDependencies');
}
}
class TestOfBucketContainer extends UnitTestCase {
function test_get_creates_new_object() {
$bucket = new bucket_Container();
$this->assertIsA($bucket->get('NoDependencies'), 'NoDependencies');
}
function test_get_returns_same_instance_on_subsequent_calls() {
$bucket = new bucket_Container();
$this->assertSame(
$bucket->get('NoDependencies'),
$bucket->get('NoDependencies'));
}
}
class TestOfBucketFactory extends UnitTestCase {
function test_container_delegates_to_factory_method() {
$factory = new TestFactory();
$bucket = new bucket_Container($factory);
$this->assertIsA($bucket->get('NoDependencies'), 'NoDependencies');
$this->assertTrue($factory->invoked);
}
function test_container_can_return_different_implementation() {
$bucket = new bucket_Container(new TestFactory());
$this->assertIsA($bucket->get('ConcreteImplementation'), 'NoDependencies');
}
function test_container_delegates_to_factory_callback() {
$factory = new TestFactory();
$factory->new_defaultvalue = create_function('', 'return new StdClass();');
// For PHP 5.3+
// $factory->new_defaultvalue = function($container) {
// return new StdClass();
// }
$bucket = new bucket_Container($factory);
$this->assertIsA($bucket->get('DefaultValue'), 'StdClass');
}
function test_callback_takes_precedence_over_method() {
$factory = new TestFactory();
$factory->new_nodependencies = create_function('', 'return new StdClass();');
// For PHP 5.3+
// $factory->new_nodependencies = function($container) {
// return new StdClass();
// }
$bucket = new bucket_Container($factory);
$this->assertIsA($bucket->get('NoDependencies'), 'StdClass');
}
function test_container_can_take_array_of_callbacks_as_argument() {
$bucket = new bucket_Container(
array(
'DefaultValue' => create_function('', 'return new StdClass();')
)
);
// For PHP 5.3+
// $bucket = new bucket_Container(
// array(
// 'DefaultValue' => function($container) {
// return new StdClass();
// }
// )
// );
$this->assertIsA($bucket->get('DefaultValue'), 'StdClass');
}
function test_underscore_call_is_callable() {
$factory = new TestUnderscoreCallFactory();
$bucket = new bucket_Container($factory);
$this->assertIsA($bucket->get('StdClass'), 'StdClass');
$this->assertTrue($factory->invoked);
}
}
class TestOfBucketScope extends UnitTestCase {
function test_a_child_scope_uses_parent_factory() {
$factory = new TestFactory();
$bucket = new bucket_Container($factory);
$scope = $bucket->makeChildContainer();
$this->assertIsA($scope->get('NoDependencies'), 'NoDependencies');
$this->assertTrue($factory->invoked);
}
function test_get_on_a_child_scope_returns_same_instance_on_subsequent_calls() {
$factory = new TestFactory();
$bucket = new bucket_Container($factory);
$scope = $bucket->makeChildContainer();
$this->assertSame(
$scope->get('NoDependencies'),
$scope->get('NoDependencies'));
}
function test_get_on_a_child_scope_returns_parent_state() {
$factory = new TestFactory();
$bucket = new bucket_Container($factory);
$scope = $bucket->makeChildContainer();
$o = $bucket->get('NoDependencies');
$this->assertSame(
$o,
$scope->get('NoDependencies'));
}
function test_parent_scope_doesnt_see_child_state() {
$factory = new TestFactory();
$bucket = new bucket_Container($factory);
$scope = $bucket->makeChildContainer();
$o = $scope->get('NoDependencies');
$this->assertFalse($o === $bucket->get('NoDependencies'));
}
function test_setting_an_instance_and_getting_it_should_return_same_instance() {
$bucket = new bucket_Container();
$obj = new StdClass();
$bucket->set($obj);
$this->assertSame($bucket->get('StdClass'), $obj);
}
} |
cloudhead/vargs | 6 | lib/vargs.js | //
// vargs.js
//
// variable argument handling for functions taking a callback
//
// usage:
//
// var Args = new("vargs").Constructor;
//
// function (/* [arg1, arg2, ...][,callback] */) {
// var args = new(Args)(arguments);
//
// args.first; // first argument
// args.last; // last argument before callback
// args.callback; // callback argument, or an empty function
// args.all; // all arguments except callback
// args.length; // number of arguments, not including callback
//
// args.callbackGiven() // returns true or false
// args.at(-1) // last argument, including callback
// args.array // all arguments, including callback
// }
//
exports.Constructor = function Vargs(arguments) {
this.array = Array.prototype.slice.call(arguments);
this.__defineGetter__('length', function () {
if (this.callbackGiven()) {
return this.array.length - 1;
} else {
return this.array.length;
}
});
this.__defineGetter__('all', function () {
if (this.callbackGiven()) {
return this.array.slice(0, -1);
} else {
return this.array;
}
});
this.__defineGetter__('last', function () {
if (typeof(this.at(-1)) === 'function') {
return this.at(-2);
} else {
return this.at(-1);
}
});
this.__defineGetter__('first', function () {
return this.array[0];
});
this.callback = this.callbackGiven() ? this.at(-1)
: function () {};
};
exports.Constructor.prototype = {
callbackGiven: function () {
return typeof(this.at(-1)) === 'function';
},
at: function (n) {
if (n < 0) {
return this.array[this.array.length + n];
} else {
return this.array[n];
}
}
};
exports.Constructor.last = function (args) {
return args[args.length - 1];
};
| //
// vargs.js
//
// variable argument handling for functions taking a callback
//
// usage:
//
// var Args = new("vargs").Constructor;
//
// function (/* [arg1, arg2, ...][,callback] */) {
// var args = new(Args)(arguments);
//
// args.first; // first argument
// args.last; // last argument before callback
// args.callback; // callback argument, or an empty function
// args.all; // all arguments except callback
// args.length; // number of arguments, not including callback
//
// args.callbackGiven() // returns true or false
// args.at(-1) // last argument, including callback
// args.array // all arguments, including callback
// }
//
exports.Constructor = function Vargs() {
this.array = Array.prototype.slice.call(arguments);
this.__defineGetter__('length', function () {
if (this.callbackGiven()) {
return this.array.length - 1;
} else {
return this.array.length;
}
});
this.__defineGetter__('all', function () {
if (this.callbackGiven()) {
return this.array.slice(0, -1);
} else {
return this.array;
}
});
this.__defineGetter__('last', function () {
if (typeof(this.at(-1)) === 'function') {
return this.at(-2);
} else {
return this.at(-1);
}
});
this.__defineGetter__('first', function () {
return this.array[0];
});
this.callback = this.callbackGiven() ? this.at(-1)
: function () {};
};
exports.Constructor.prototype = {
callbackGiven: function () {
return typeof(this.at(-1)) === 'function';
},
at: function (n) {
if (n < 0) {
return this.array[this.array.length + n];
} else {
return this.array[n];
}
}
};
exports.Constructor.last = function (args) {
return args[args.length - 1];
};
|
chmouel/ftp-cloudfs | 35 | ftpcloudfs/monkeypatching.py | import asyncore
from pyftpdlib import ftpserver
from ftpcloudfs.utils import smart_str
from server import RackspaceCloudAuthorizer
from multiprocessing.managers import RemoteError
class MyDTPHandler(ftpserver.DTPHandler):
def send(self, data):
data=smart_str(data)
return super(MyDTPHandler, self).send(data)
def close(self):
if self.file_obj is not None and not self.file_obj.closed:
try:
self.file_obj.close()
except Exception, e:
msg = "Data connection error (%s)" % e
self.cmd_channel.log(msg)
self.cmd_channel.respond("421 " + msg)
finally:
self.file_obj = None
super(MyDTPHandler, self).close()
class MyFTPHandler(ftpserver.FTPHandler):
# don't kick off client in long time transactions
timeout = 0
dtp_handler = MyDTPHandler
authorizer = RackspaceCloudAuthorizer()
@staticmethod
def abstracted_fs(root, cmd_channel):
'''Get an AbstractedFs for the user logged in on the cmd_channel'''
cffs = cmd_channel.authorizer.get_abstracted_fs(cmd_channel.username)
cffs.init_abstracted_fs(root, cmd_channel)
return cffs
def process_command(self, cmd, *args, **kwargs):
'''Flush the FS cache with every new FTP command'''
if self.fs:
if not self.fs.single_cache:
self.fs.flush()
self.fs.connection.real_ip = self.remote_ip
super(MyFTPHandler, self).process_command(cmd, *args, **kwargs)
def ftp_MD5(self, path):
line = self.fs.fs2ftp(path)
try:
md5_checksum = self.run_as_current_user(self.fs.md5, path)
except OSError, err:
why = ftpserver._strerror(err)
self.respond('550 %s.' % why)
else:
msg = md5_checksum.upper()
self.respond('251 "%s" %s' % (line.replace('"', '""'), msg))
def handle(self):
"""Track the ip and check max cons per ip (if needed)"""
if self.server.max_cons_per_ip and self.remote_ip and self.shared_ip_map != None:
try:
self.shared_lock.acquire()
count = self.shared_ip_map.get(self.remote_ip, 0)
self.shared_ip_map[self.remote_ip] = count + 1
self.shared_lock.release()
except RemoteError, e:
self.logerror("Connection tracking failed: %s" % e)
self.logline("Connection track: %s -> %s" % (self.remote_ip, count+1))
if self.shared_ip_map[self.remote_ip] > self.server.max_cons_per_ip:
self.handle_max_cons_per_ip()
return
self.logline("Connected, shared ip map: %s" % self.shared_ip_map)
super(MyFTPHandler, self).handle()
def close(self):
"""Remove the ip from the shared map before calling close"""
if not self._closed and self.server.max_cons_per_ip and self.shared_ip_map != None:
try:
self.shared_lock.acquire()
if self.remote_ip in self.shared_ip_map:
self.shared_ip_map[self.remote_ip] -= 1
if self.shared_ip_map[self.remote_ip] <= 0:
del self.shared_ip_map[self.remote_ip]
self.shared_lock.release()
except RemoteError, e:
self.logerror("Connection tracking cleanup failed: %s" % e)
self.logline("Disconnected, shared ip map: %s" % self.shared_ip_map)
super(MyFTPHandler, self).close()
def log_cmd(self, cmd, arg, respcode, respstr):
"""
We use the same format pyftpdlib is using, but we want to log more commands.
"""
if cmd in ("ABOR", "APPE", "DELE", "RMD", "RNFR", "RNTO", "RETR", "STOR", "MKD"):
line = '"%s" %s' % (' '.join([cmd, str(arg)]).strip(), respcode)
self.log(line)
| import asyncore
from pyftpdlib import ftpserver
from ftpcloudfs.utils import smart_str
from server import RackspaceCloudAuthorizer
from multiprocessing.managers import RemoteError
class MyDTPHandler(ftpserver.DTPHandler):
def send(self, data):
data=smart_str(data)
return super(MyDTPHandler, self).send(data)
def close(self):
if self.file_obj is not None and not self.file_obj.closed:
try:
self.file_obj.close()
except Exception, e:
msg = "Data connection error (%s)" % e
self.cmd_channel.log(msg)
self.cmd_channel.respond("421 " + msg)
finally:
self.file_obj = None
super(MyDTPHandler, self).close()
class MyFTPHandler(ftpserver.FTPHandler):
# don't kick off client in long time transactions
timeout = 0
dtp_handler = MyDTPHandler
authorizer = RackspaceCloudAuthorizer()
max_cons_per_ip = 0
@staticmethod
def abstracted_fs(root, cmd_channel):
'''Get an AbstractedFs for the user logged in on the cmd_channel'''
cffs = cmd_channel.authorizer.get_abstracted_fs(cmd_channel.username)
cffs.init_abstracted_fs(root, cmd_channel)
return cffs
def process_command(self, cmd, *args, **kwargs):
'''Flush the FS cache with every new FTP command'''
if self.fs:
if not self.fs.single_cache:
self.fs.flush()
self.fs.connection.real_ip = self.remote_ip
super(MyFTPHandler, self).process_command(cmd, *args, **kwargs)
def ftp_MD5(self, path):
line = self.fs.fs2ftp(path)
try:
md5_checksum = self.run_as_current_user(self.fs.md5, path)
except OSError, err:
why = ftpserver._strerror(err)
self.respond('550 %s.' % why)
else:
msg = md5_checksum.upper()
self.respond('251 "%s" %s' % (line.replace('"', '""'), msg))
def handle(self):
"""Track the ip and check max cons per ip (if needed)"""
if self.max_cons_per_ip and self.remote_ip and self.shared_ip_map != None:
count = 0
try:
self.shared_lock.acquire()
count = self.shared_ip_map.get(self.remote_ip, 0) + 1
self.shared_ip_map[self.remote_ip] = count
self.logline("Connection track: %s -> %s" % (self.remote_ip, count))
except RemoteError, e:
self.logerror("Connection tracking failed: %s" % e)
finally:
self.shared_lock.release()
if count > self.max_cons_per_ip:
self.handle_max_cons_per_ip()
return
self.logline("Connected, shared ip map: %s" % self.shared_ip_map)
super(MyFTPHandler, self).handle()
def close(self):
"""Remove the ip from the shared map before calling close"""
if not self._closed and self.max_cons_per_ip and self.shared_ip_map != None:
try:
self.shared_lock.acquire()
if self.remote_ip in self.shared_ip_map:
self.shared_ip_map[self.remote_ip] -= 1
if self.shared_ip_map[self.remote_ip] <= 0:
del self.shared_ip_map[self.remote_ip]
except RemoteError, e:
self.logerror("Connection tracking cleanup failed: %s" % e)
finally:
self.shared_lock.release()
self.logline("Disconnected, shared ip map: %s" % self.shared_ip_map)
super(MyFTPHandler, self).close()
def log_cmd(self, cmd, arg, respcode, respstr):
"""
We use the same format pyftpdlib is using, but we want to log more commands.
"""
if cmd in ("ABOR", "APPE", "DELE", "RMD", "RNFR", "RNTO", "RETR", "STOR", "MKD"):
line = '"%s" %s' % (' '.join([cmd, str(arg)]).strip(), respcode)
self.log(line)
|
codekoala/django-articles | 7 | articles/forms.py | import logging
from django import forms
from django.utils.translation import ugettext_lazy as _
from models import Article, Tag
log = logging.getLogger('articles.forms')
def tag(name):
"""Returns a Tag object for the given name"""
slug = Tag.clean_tag(name)
log.debug('Looking for Tag with slug "%s"...' % (slug,))
t, created = Tag.objects.get_or_create(slug=slug, defaults={'name': name})
log.debug('Found Tag %s. Name: %s Slug: %s Created: %s' % (t.pk, t.name, t.slug, created))
if not t.name:
t.name = name
t.save()
return t
class ArticleAdminForm(forms.ModelForm):
tags = forms.CharField(initial='', required=False,
widget=forms.TextInput(attrs={'size': 100}),
help_text=_('Words that describe this article'))
def __init__(self, *args, **kwargs):
"""Sets the list of tags to be a string"""
instance = kwargs.get('instance', None)
if instance:
init = kwargs.get('initial', {})
init['tags'] = ' '.join([t.name for t in instance.tags.all()])
kwargs['initial'] = init
super(ArticleAdminForm, self).__init__(*args, **kwargs)
def clean_tags(self):
"""Turns the string of tags into a list"""
tags = [tag(t.strip()) for t in self.cleaned_data['tags'].split() if len(t.strip())]
log.debug('Tagging Article %s with: %s' % (self.cleaned_data['title'], tags))
self.cleaned_data['tags'] = tags
return self.cleaned_data['tags']
class Meta:
model = Article
class Media:
css = {
'all': ('articles/css/jquery.autocomplete.css',),
}
js = (
'articles/js/jquery-1.4.1.min.js',
'articles/js/jquery.bgiframe.min.js',
'articles/js/jquery.autocomplete.pack.js',
'articles/js/tag_autocomplete.js',
)
| import logging
from django import forms
from django.utils.translation import ugettext_lazy as _
from models import Article, Tag
log = logging.getLogger('articles.forms')
def tag(name):
"""Returns a Tag object for the given name"""
slug = Tag.clean_tag(name)
log.debug('Looking for Tag with slug "%s"...' % (slug,))
t, created = Tag.objects.get_or_create(slug=slug, defaults={'name': name})
log.debug('Found Tag %s. Name: %s Slug: %s Created: %s' % (t.pk, t.name, t.slug, created))
if not t.name:
t.name = name
t.save()
return t
class ArticleAdminForm(forms.ModelForm):
tags = forms.CharField(initial='', required=False,
widget=forms.TextInput(attrs={'size': 100}),
help_text=_('Words that describe this article'))
def __init__(self, *args, **kwargs):
"""Sets the list of tags to be a string"""
instance = kwargs.get('instance', None)
if instance:
init = kwargs.get('initial', {})
init['tags'] = ' '.join([t.name for t in instance.tags.all()])
kwargs['initial'] = init
super(ArticleAdminForm, self).__init__(*args, **kwargs)
def clean_tags(self):
"""Turns the string of tags into a list"""
tags = [tag(t.strip()) for t in self.cleaned_data['tags'].split() if len(t.strip())]
log.debug('Tagging Article %s with: %s' % (self.cleaned_data['title'], tags))
self.cleaned_data['tags'] = tags
return self.cleaned_data['tags']
def save(self, *args, **kwargs):
"""Remove any old tags that may have been set that we no longer need"""
if self.instance.pk:
self.instance.tags.clear()
return super(ArticleAdminForm, self).save(*args, **kwargs)
class Meta:
model = Article
class Media:
css = {
'all': ('articles/css/jquery.autocomplete.css',),
}
js = (
'articles/js/jquery-1.4.1.min.js',
'articles/js/jquery.bgiframe.min.js',
'articles/js/jquery.autocomplete.pack.js',
'articles/js/tag_autocomplete.js',
)
|
shugo/jruby | 1 | core/src/main/java/org/jruby/parser/StaticScope.java | /*
***** BEGIN LICENSE BLOCK *****
* Version: EPL 2.0/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Eclipse Public
* License Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at http://www.eclipse.org/legal/epl-v20.html
*
* Software distributed under the License is distributed on an "AS
* IS" basis, WITHOUT WARRANTY OF ANY KIND, either express or
* implied. See the License for the specific language governing
* rights and limitations under the License.
*
* Copyright (C) 2006-2007 Thomas E Enebo <enebo@acm.org>
*
* Alternatively, the contents of this file may be used under the terms of
* either of the GNU General Public License Version 2 or later (the "GPL"),
* or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the EPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the EPL, the GPL or the LGPL.
***** END LICENSE BLOCK *****/
package org.jruby.parser;
import java.io.Serializable;
import java.lang.invoke.MethodHandle;
import java.lang.invoke.MethodHandles;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.function.BiConsumer;
import java.util.function.IntFunction;
import org.jruby.Ruby;
import org.jruby.RubyArray;
import org.jruby.RubyModule;
import org.jruby.RubyObject;
import org.jruby.RubySymbol;
import org.jruby.ast.AssignableNode;
import org.jruby.ast.DAsgnNode;
import org.jruby.ast.DVarNode;
import org.jruby.ast.IScopedNode;
import org.jruby.ast.LocalAsgnNode;
import org.jruby.ast.LocalVarNode;
import org.jruby.ast.Node;
import org.jruby.ast.VCallNode;
import org.jruby.ir.IRScope;
import org.jruby.ir.IRScopeType;
import org.jruby.lexer.yacc.ISourcePosition;
import org.jruby.runtime.DynamicScope;
import org.jruby.runtime.Helpers;
import org.jruby.runtime.Signature;
import org.jruby.runtime.ThreadContext;
import org.jruby.runtime.builtin.IRubyObject;
import org.jruby.runtime.scope.DynamicScopeGenerator;
import org.jruby.runtime.scope.ManyVarsDynamicScope;
import org.jruby.util.IdUtil;
/**
* StaticScope represents lexical scoping of variables and module/class constants.
*
* At a very high level every scopes enclosing scope contains variables in the next outer
* lexical layer. The enclosing scopes variables may or may not be reachable depending
* on the scoping rules for variables (governed by BlockStaticScope and LocalStaticScope).
*
* StaticScope also keeps track of current module/class that is in scope. previousCRefScope
* will point to the previous scope of the enclosing module/class (cref).
*
*/
public class StaticScope implements Serializable {
public static final int MAX_SPECIALIZED_SIZE = 50;
private static final long serialVersionUID = 3423852552352498148L;
private static final MethodHandles.Lookup LOOKUP = MethodHandles.publicLookup();
// Next immediate scope. Variable and constant scoping rules make use of this variable
// in different ways.
final protected StaticScope enclosingScope;
// Live reference to module
private transient RubyModule cref = null;
// Next CRef down the lexical structure
private StaticScope previousCRefScope = null;
// Our name holder (offsets are assigned as variables are added) [these are symbol strings. Use
// as key to Symbol table for actual encoded versions].
private String[] variableNames;
private int variableNamesLength;
// A list of booleans indicating which variables are named captures from regexp
private boolean[] namedCaptures;
// Arity of this scope if there is one
private Signature signature;
// File name where this static scope came from or null if a native or artificial scope
private String file;
private DynamicScope dummyScope;
protected IRScopeType scopeType;
private static final String[] NO_NAMES = new String[0];
private Type type;
private boolean isBlockOrEval;
private boolean isArgumentScope; // Is this block and argument scope of a define_method.
private long commandArgumentStack;
private int firstKeywordIndex = -1;
// Method/Closure that this static scope corresponds to. This is used to tell whether this
// scope refers to a method scope or to determined IRScope of the parent of a compiling eval.
private IRScope irScope;
private RubyModule overlayModule;
private volatile MethodHandle constructor;
public enum Type {
LOCAL, BLOCK, EVAL;
public static Type fromOrdinal(int value) {
return value < 0 || value >= values().length ? null : values()[value];
}
}
/**
*
*/
protected StaticScope(Type type, StaticScope enclosingScope, String file) {
this(type, enclosingScope, NO_NAMES);
this.file = file;
}
/**
* Construct a new static scope.
*
* @param type the type of scope
* @param enclosingScope the lexically containing scope.
*/
protected StaticScope(Type type, StaticScope enclosingScope) {
this(type, enclosingScope, NO_NAMES);
}
/**
* Construct a new static scope. The array of strings should all be the
* interned versions, since several other optimizations depend on being
* able to do object equality checks.
*
* @param type the type of scope
* @param enclosingScope the lexically containing scope.
* @param names The list of interned String variable names.
*/
protected StaticScope(Type type, StaticScope enclosingScope, String[] names, int firstKeywordIndex) {
assert names != null : "names is not null";
this.enclosingScope = enclosingScope;
this.variableNames = names;
this.variableNamesLength = names.length;
this.type = type;
if (enclosingScope != null && enclosingScope.irScope != null) {
this.irScope = enclosingScope.irScope;
this.scopeType = irScope.getScopeType();
}
this.isBlockOrEval = (type != Type.LOCAL);
this.isArgumentScope = !isBlockOrEval;
this.firstKeywordIndex = firstKeywordIndex;
}
protected StaticScope(Type type, StaticScope enclosingScope, String[] names) {
this(type, enclosingScope, names, -1);
}
public int getFirstKeywordIndex() {
return firstKeywordIndex;
}
public DynamicScope construct(DynamicScope parent) {
MethodHandle constructor = this.constructor;
if (constructor == null) constructor = acquireConstructor();
try {
return (DynamicScope) constructor.invokeExact(this, parent);
} catch (Throwable e) {
Helpers.throwException(e);
return null; // not reached
}
}
private synchronized MethodHandle acquireConstructor() {
// check again
MethodHandle constructor = this.constructor;
if (constructor != null) return constructor;
int numberOfVariables = getNumberOfVariables();
if (numberOfVariables > MAX_SPECIALIZED_SIZE) {
constructor = ManyVarsDynamicScope.CONSTRUCTOR;
} else {
constructor = DynamicScopeGenerator.generate(numberOfVariables);
}
this.constructor = constructor;
return constructor;
}
public IRScope getIRScope() {
return irScope;
}
public IRScopeType getScopeType() {
return scopeType;
}
public void setScopeType(IRScopeType scopeType) {
this.scopeType = scopeType;
}
public void setIRScope(IRScope irScope) {
this.irScope = irScope;
this.scopeType = irScope.getScopeType();
}
/**
* Add a new variable to this (current) scope unless it is already defined in the
* current scope.
*
* @param name of new variable
* @return index of variable
*/
public int addVariableThisScope(String name) {
int slot = exists(name);
if (slot >= 0) return slot;
// Clear constructor since we are adding a name
constructor = null;
// This is perhaps innefficient timewise? Optimal spacewise
growVariableNames(name);
// Returns slot of variable
return variableNames.length - 1;
}
/**
* Add a new named capture variable to this (current) scope.
*
* @param name name of variable.
* @return index of variable
*/
public int addNamedCaptureVariable(String name) {
int index = addVariableThisScope(name);
growNamedCaptures(index);
return index;
}
/**
* Add a new variable to this (current) scope unless it is already defined in any
* reachable scope.
*
* @param name of new variable
* @return index+depth merged location of scope
*/
public int addVariable(String name) {
int slot = isDefined(name);
if (slot >= 0) return slot;
// Clear constructor since we are adding a name
constructor = null;
// This is perhaps innefficient timewise? Optimal spacewise
growVariableNames(name);
// Returns slot of variable
return variableNames.length - 1;
}
public String[] getVariables() {
return variableNames.clone();
}
public int getNumberOfVariables() {
return variableNamesLength;
}
public void setVariables(String[] names) {
assert names != null : "names is not null";
// Clear constructor since we are changing names
constructor = null;
variableNames = new String[names.length];
variableNamesLength = names.length;
System.arraycopy(names, 0, variableNames, 0, names.length);
}
/**
* Gets a constant back from lexical search from the cref in this scope.
* As it is for defined? we will not forced resolution of autoloads nor
* call const_defined
*/
public IRubyObject getConstantDefined(String internedName) {
IRubyObject result = cref.fetchConstant(internedName);
if (result != null) return result;
return previousCRefScope == null ? null : previousCRefScope.getConstantDefinedNoObject(internedName);
}
public IRubyObject getConstantDefinedNoObject(String internedName) {
if (previousCRefScope == null) return null;
return getConstantDefined(internedName);
}
public IRubyObject getConstant(String internedName) {
IRubyObject result = getConstantInner(internedName);
// If we could not find the constant from cref..then try getting from inheritence hierarchy
return result == null ? cref.getConstantNoConstMissing(internedName) : result;
}
public IRubyObject getConstantInner(String internedName) {
IRubyObject result = cref.fetchConstant(internedName);
if (result != null) {
return result == RubyObject.UNDEF ? cref.resolveUndefConstant(internedName) : result;
}
return previousCRefScope == null ? null : previousCRefScope.getConstantInnerNoObject(internedName);
}
private IRubyObject getConstantInnerNoObject(String internedName) {
if (previousCRefScope == null) return null;
return getConstantInner(internedName);
}
/**
* Next outer most scope in list of scopes. An enclosing scope may have no direct scoping
* relationship to its child. If I am in a localScope and then I enter something which
* creates another localScope the enclosing scope will be the first scope, but there are
* no valid scoping relationships between the two. Methods which walk the enclosing scopes
* are responsible for enforcing appropriate scoping relationships.
*
* @return the parent scope
*/
public StaticScope getEnclosingScope() {
return enclosingScope;
}
/**
* Does the variable exist?
*
* @param name of the variable to find
* @return index of variable or -1 if it does not exist
*/
public int exists(String name) {
return findVariableName(name);
}
private int findVariableName(String name) {
for (int i = 0; i < variableNames.length; i++) {
if (name.equals(variableNames[i])) return i;
}
return -1;
}
/**
* Is this name in the visible to the current scope
*
* @param name to be looked for
* @return a location where the left-most 16 bits of number of scopes down it is and the
* right-most 16 bits represents its index in that scope
*/
public int isDefined(String name) {
return isDefined(name, 0);
}
/**
* Make a DASgn or LocalAsgn node based on scope logic
*
* @param position
* @param name
* @param value
* @return
*
* Note: This is private code made public only for parser.
*/
public AssignableNode assign(ISourcePosition position, RubySymbol name, Node value) {
return assign(position, name, value, this, 0);
}
/**
* Register a keyword argument with this staticScope. It additionally will track
* where the first keyword argument started so we can test and tell whether we have
* a kwarg or an ordinary variable during live execution (See keywordExists).
* @param position
* @param symbolID
* @param value
* @return
*/
public AssignableNode assignKeyword(ISourcePosition position, RubySymbol symbolID, Node value) {
AssignableNode assignment = assign(position, symbolID, value, this, 0);
// register first keyword index encountered
if (firstKeywordIndex == -1) firstKeywordIndex = ((IScopedNode) assignment).getIndex();
return assignment;
}
public boolean keywordExists(String name) {
int slot = exists(name);
return slot >= 0 && firstKeywordIndex != -1 && slot >= firstKeywordIndex;
}
/**
* Get all visible variables that we can see from this scope that have been assigned
* (e.g. seen so far)
*
* @return a list of all names (sans $~ and $_ which are special names)
*/
public String[] getAllNamesInScope() {
return collectVariables(ArrayList::new, ArrayList::add).stream().toArray(String[]::new);
}
/**
* Populate a deduplicated collection of variable names in scope using the given functions.
*
* This may include variables that are not strictly Ruby local variable names, so the consumer should validate
* names as appropriate.
*
* @param collectionFactory used to construct the collection
* @param collectionPopulator used to pass values into the collection
* @param <T> resulting collection type
* @return populated collection
*/
public <T> T collectVariables(IntFunction<T> collectionFactory, BiConsumer<T, String> collectionPopulator) {
StaticScope current = this;
T collection = collectionFactory.apply(current.variableNamesLength);
HashMap<String, Object> dedup = new HashMap<>();
while (current.isBlockOrEval) {
for (String name : current.variableNames) {
dedup.computeIfAbsent(name, key -> {collectionPopulator.accept(collection, key); return key;});
}
current = current.enclosingScope;
}
// once more for method scope
for (String name : current.variableNames) {
dedup.computeIfAbsent(name, key -> {collectionPopulator.accept(collection, key); return key;});
}
return collection;
}
/**
* Convenience wrapper around {@link #collectVariables(IntFunction, BiConsumer)}.
*
* @param runtime current runtime
* @return populated RubyArray
*/
public RubyArray getLocalVariables(Ruby runtime) {
return collectVariables(
runtime::newArray,
(a, s) -> {
if (IdUtil.isLocal(s)) a.append(runtime.newSymbol(s));
});
}
public int isDefined(String name, int depth) {
if (isBlockOrEval) {
int slot = exists(name);
if (slot >= 0) return (depth << 16) | slot;
return enclosingScope.isDefined(name, depth + 1);
} else {
return (depth << 16) | exists(name);
}
}
public AssignableNode addAssign(ISourcePosition position, RubySymbol symbolID, Node value) {
int slot = addVariable(symbolID.idString());
// No bit math to store level since we know level is zero for this case
return new DAsgnNode(position, symbolID, slot, value);
}
public AssignableNode assign(ISourcePosition position, RubySymbol symbolID, Node value,
StaticScope topScope, int depth) {
String id = symbolID.idString();
int slot = exists(id);
// We can assign if we already have variable of that name here or we are the only
// scope in the chain (which Local scopes always are).
if (slot >= 0) {
return isBlockOrEval ? new DAsgnNode(position, symbolID, ((depth << 16) | slot), value)
: new LocalAsgnNode(position, symbolID, ((depth << 16) | slot), value);
} else if (!isBlockOrEval && (topScope == this)) {
slot = addVariable(id);
return new LocalAsgnNode(position, symbolID, slot, value);
}
// If we are not a block-scope and we go there, we know that 'topScope' is a block scope
// because a local scope cannot be within a local scope
// If topScope was itself it would have created a LocalAsgnNode above.
return isBlockOrEval ?
enclosingScope.assign(position, symbolID, value, topScope, depth + 1) :
topScope.addAssign(position, symbolID, value);
}
// Note: This is private code made public only for parser.
public Node declare(ISourcePosition position, RubySymbol symbolID, int depth) {
int slot = exists(symbolID.idString());
if (slot >= 0) {
return isBlockOrEval ?
new DVarNode(position, ((depth << 16) | slot), symbolID) :
new LocalVarNode(position, ((depth << 16) | slot), symbolID);
}
return isBlockOrEval ? enclosingScope.declare(position, symbolID, depth + 1) : new VCallNode(position, symbolID);
}
/**
* Make a DVar or LocalVar node based on scoping logic
*
* @param position the location that in the source that the new node will come from
* @param symbolID of the variable to be created is named
* @return a DVarNode or LocalVarNode
*
* Note: This is private code made public only for parser.
*/
public Node declare(ISourcePosition position, RubySymbol symbolID) {
return declare(position, symbolID, 0);
}
/**
* Gets the Local Scope relative to the current Scope. For LocalScopes this will be itself.
* Blocks will contain the LocalScope it contains.
*
* @return localScope
*/
public StaticScope getLocalScope() {
return (type != Type.BLOCK) ? this : enclosingScope.getLocalScope();
}
/**
* Get the live CRef module associated with this scope.
*
* @return the live module
*/
public RubyModule getModule() {
return cref;
}
public StaticScope getPreviousCRefScope() {
return previousCRefScope;
}
public void setPreviousCRefScope(StaticScope crefScope) {
this.previousCRefScope = crefScope;
}
public void setModule(RubyModule module) {
this.cref = module;
for (StaticScope scope = getEnclosingScope(); scope != null; scope = scope.getEnclosingScope()) {
if (scope.cref != null) {
previousCRefScope = scope;
return;
}
}
}
/**
* Update current scoping structure to populate with proper cref scoping values. This should
* be called at any point when you reference a scope for the first time. For the interpreter
* this is done in a small number of places (defnNode, defsNode, and getBlock). The compiler
* does this in the same places.
*
* @return the current cref, though this is largely an implementation detail
*/
public RubyModule determineModule() {
if (cref == null) {
cref = getEnclosingScope().determineModule();
assert cref != null : "CRef is always created before determine happens";
previousCRefScope = getEnclosingScope().previousCRefScope;
}
return cref;
}
public boolean isBlockScope() {
return isBlockOrEval;
}
/**
* Argument scopes represent scopes which contain arguments for zsuper. All LocalStaticScopes
* are argument scopes and BlockStaticScopes can be when they are used by define_method.
*/
public boolean isArgumentScope() {
return isArgumentScope;
}
public void makeArgumentScope() {
this.isArgumentScope = true;
}
/**
* For all block or method associated with static scopes this will return the signature for that
* signature-providing scope. module bodies and other non-arity specific code will return null.
*/
public Signature getSignature() {
return signature;
}
/**
* This happens in when first defining ArgsNodes or when reifying a method from AOT.
*/
public void setSignature(Signature signature) {
this.signature = signature;
}
public DynamicScope getDummyScope() {
return dummyScope == null ? dummyScope = DynamicScope.newDynamicScope(this) : dummyScope;
}
public void setCommandArgumentStack(long commandArgumentStack) {
this.commandArgumentStack = commandArgumentStack;
}
public long getCommandArgumentStack() {
return commandArgumentStack;
}
private void growVariableNames(String name) {
String[] newVariableNames = new String[variableNames.length + 1];
System.arraycopy(variableNames, 0, newVariableNames, 0, variableNames.length);
variableNames = newVariableNames;
variableNamesLength = newVariableNames.length;
variableNames[variableNames.length - 1] = name;
}
private void growNamedCaptures(int index) {
boolean[] namedCaptures = this.namedCaptures;
boolean[] newNamedCaptures;
if (namedCaptures != null) {
newNamedCaptures = new boolean[Math.max(index + 1, namedCaptures.length)];
System.arraycopy(namedCaptures, 0, newNamedCaptures, 0, namedCaptures.length);
} else {
newNamedCaptures = new boolean[index + 1];
}
newNamedCaptures[index] = true;
this.namedCaptures = newNamedCaptures;
}
public boolean isNamedCapture(int index) {
boolean[] namedCaptures = this.namedCaptures;
return namedCaptures != null && index < namedCaptures.length && namedCaptures[index];
}
@Override
public String toString() {
// FIXME: Do we need to persist cref as well?
return "StaticScope(" + type + "):" + Arrays.toString(variableNames);
}
public Type getType() {
return type;
}
public String getFile() {
return file;
}
public StaticScope duplicate() {
StaticScope dupe = new StaticScope(type, enclosingScope, variableNames == null ? NO_NAMES : variableNames);
// irScope is not guaranteed to be set onto StaticScope until it is executed for the first time.
// We can call duplicate before its first execution.
if (irScope != null) dupe.setIRScope(irScope);
dupe.setScopeType(scopeType);
dupe.setPreviousCRefScope(previousCRefScope);
dupe.setModule(cref);
dupe.setSignature(signature);
return dupe;
}
public RubyModule getOverlayModuleForRead() {
return overlayModule;
}
public RubyModule getOverlayModuleForWrite(ThreadContext context) {
RubyModule omod = overlayModule;
if (omod == null) {
overlayModule = omod = RubyModule.newModule(context.runtime);
}
return omod;
}
}
| /*
***** BEGIN LICENSE BLOCK *****
* Version: EPL 2.0/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Eclipse Public
* License Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at http://www.eclipse.org/legal/epl-v20.html
*
* Software distributed under the License is distributed on an "AS
* IS" basis, WITHOUT WARRANTY OF ANY KIND, either express or
* implied. See the License for the specific language governing
* rights and limitations under the License.
*
* Copyright (C) 2006-2007 Thomas E Enebo <enebo@acm.org>
*
* Alternatively, the contents of this file may be used under the terms of
* either of the GNU General Public License Version 2 or later (the "GPL"),
* or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the EPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the EPL, the GPL or the LGPL.
***** END LICENSE BLOCK *****/
package org.jruby.parser;
import java.io.Serializable;
import java.lang.invoke.MethodHandle;
import java.lang.invoke.MethodHandles;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.function.BiConsumer;
import java.util.function.IntFunction;
import org.jruby.Ruby;
import org.jruby.RubyArray;
import org.jruby.RubyModule;
import org.jruby.RubyObject;
import org.jruby.RubySymbol;
import org.jruby.ast.AssignableNode;
import org.jruby.ast.DAsgnNode;
import org.jruby.ast.DVarNode;
import org.jruby.ast.IScopedNode;
import org.jruby.ast.LocalAsgnNode;
import org.jruby.ast.LocalVarNode;
import org.jruby.ast.Node;
import org.jruby.ast.VCallNode;
import org.jruby.ir.IRScope;
import org.jruby.ir.IRScopeType;
import org.jruby.lexer.yacc.ISourcePosition;
import org.jruby.runtime.DynamicScope;
import org.jruby.runtime.Helpers;
import org.jruby.runtime.Signature;
import org.jruby.runtime.ThreadContext;
import org.jruby.runtime.builtin.IRubyObject;
import org.jruby.runtime.scope.DynamicScopeGenerator;
import org.jruby.runtime.scope.ManyVarsDynamicScope;
import org.jruby.util.IdUtil;
/**
* StaticScope represents lexical scoping of variables and module/class constants.
*
* At a very high level every scopes enclosing scope contains variables in the next outer
* lexical layer. The enclosing scopes variables may or may not be reachable depending
* on the scoping rules for variables (governed by BlockStaticScope and LocalStaticScope).
*
* StaticScope also keeps track of current module/class that is in scope. previousCRefScope
* will point to the previous scope of the enclosing module/class (cref).
*
*/
public class StaticScope implements Serializable {
public static final int MAX_SPECIALIZED_SIZE = 50;
private static final long serialVersionUID = 3423852552352498148L;
private static final MethodHandles.Lookup LOOKUP = MethodHandles.publicLookup();
// Next immediate scope. Variable and constant scoping rules make use of this variable
// in different ways.
final protected StaticScope enclosingScope;
// Live reference to module
private transient RubyModule cref = null;
// Next CRef down the lexical structure
private StaticScope previousCRefScope = null;
// Our name holder (offsets are assigned as variables are added) [these are symbol strings. Use
// as key to Symbol table for actual encoded versions].
private String[] variableNames;
private int variableNamesLength;
// A list of booleans indicating which variables are named captures from regexp
private boolean[] namedCaptures;
// Arity of this scope if there is one
private Signature signature;
// File name where this static scope came from or null if a native or artificial scope
private String file;
private DynamicScope dummyScope;
protected IRScopeType scopeType;
private static final String[] NO_NAMES = new String[0];
private Type type;
private boolean isBlockOrEval;
private boolean isArgumentScope; // Is this block and argument scope of a define_method.
private long commandArgumentStack;
private int firstKeywordIndex = -1;
// Method/Closure that this static scope corresponds to. This is used to tell whether this
// scope refers to a method scope or to determined IRScope of the parent of a compiling eval.
private IRScope irScope;
private RubyModule overlayModule;
private volatile MethodHandle constructor;
private boolean procRefinementsEnabled = false;
public enum Type {
LOCAL, BLOCK, EVAL;
public static Type fromOrdinal(int value) {
return value < 0 || value >= values().length ? null : values()[value];
}
}
/**
*
*/
protected StaticScope(Type type, StaticScope enclosingScope, String file) {
this(type, enclosingScope, NO_NAMES);
this.file = file;
}
/**
* Construct a new static scope.
*
* @param type the type of scope
* @param enclosingScope the lexically containing scope.
*/
protected StaticScope(Type type, StaticScope enclosingScope) {
this(type, enclosingScope, NO_NAMES);
}
/**
* Construct a new static scope. The array of strings should all be the
* interned versions, since several other optimizations depend on being
* able to do object equality checks.
*
* @param type the type of scope
* @param enclosingScope the lexically containing scope.
* @param names The list of interned String variable names.
*/
protected StaticScope(Type type, StaticScope enclosingScope, String[] names, int firstKeywordIndex) {
assert names != null : "names is not null";
this.enclosingScope = enclosingScope;
this.variableNames = names;
this.variableNamesLength = names.length;
this.type = type;
if (enclosingScope != null && enclosingScope.irScope != null) {
this.irScope = enclosingScope.irScope;
this.scopeType = irScope.getScopeType();
}
this.isBlockOrEval = (type != Type.LOCAL);
this.isArgumentScope = !isBlockOrEval;
this.firstKeywordIndex = firstKeywordIndex;
}
protected StaticScope(Type type, StaticScope enclosingScope, String[] names) {
this(type, enclosingScope, names, -1);
}
public int getFirstKeywordIndex() {
return firstKeywordIndex;
}
public DynamicScope construct(DynamicScope parent) {
MethodHandle constructor = this.constructor;
if (constructor == null) constructor = acquireConstructor();
try {
return (DynamicScope) constructor.invokeExact(this, parent);
} catch (Throwable e) {
Helpers.throwException(e);
return null; // not reached
}
}
private synchronized MethodHandle acquireConstructor() {
// check again
MethodHandle constructor = this.constructor;
if (constructor != null) return constructor;
int numberOfVariables = getNumberOfVariables();
if (numberOfVariables > MAX_SPECIALIZED_SIZE) {
constructor = ManyVarsDynamicScope.CONSTRUCTOR;
} else {
constructor = DynamicScopeGenerator.generate(numberOfVariables);
}
this.constructor = constructor;
return constructor;
}
public IRScope getIRScope() {
return irScope;
}
public IRScopeType getScopeType() {
return scopeType;
}
public void setScopeType(IRScopeType scopeType) {
this.scopeType = scopeType;
}
public void setIRScope(IRScope irScope) {
this.irScope = irScope;
this.scopeType = irScope.getScopeType();
}
/**
* Add a new variable to this (current) scope unless it is already defined in the
* current scope.
*
* @param name of new variable
* @return index of variable
*/
public int addVariableThisScope(String name) {
int slot = exists(name);
if (slot >= 0) return slot;
// Clear constructor since we are adding a name
constructor = null;
// This is perhaps innefficient timewise? Optimal spacewise
growVariableNames(name);
// Returns slot of variable
return variableNames.length - 1;
}
/**
* Add a new named capture variable to this (current) scope.
*
* @param name name of variable.
* @return index of variable
*/
public int addNamedCaptureVariable(String name) {
int index = addVariableThisScope(name);
growNamedCaptures(index);
return index;
}
/**
* Add a new variable to this (current) scope unless it is already defined in any
* reachable scope.
*
* @param name of new variable
* @return index+depth merged location of scope
*/
public int addVariable(String name) {
int slot = isDefined(name);
if (slot >= 0) return slot;
// Clear constructor since we are adding a name
constructor = null;
// This is perhaps innefficient timewise? Optimal spacewise
growVariableNames(name);
// Returns slot of variable
return variableNames.length - 1;
}
public String[] getVariables() {
return variableNames.clone();
}
public int getNumberOfVariables() {
return variableNamesLength;
}
public void setVariables(String[] names) {
assert names != null : "names is not null";
// Clear constructor since we are changing names
constructor = null;
variableNames = new String[names.length];
variableNamesLength = names.length;
System.arraycopy(names, 0, variableNames, 0, names.length);
}
/**
* Gets a constant back from lexical search from the cref in this scope.
* As it is for defined? we will not forced resolution of autoloads nor
* call const_defined
*/
public IRubyObject getConstantDefined(String internedName) {
IRubyObject result = cref.fetchConstant(internedName);
if (result != null) return result;
return previousCRefScope == null ? null : previousCRefScope.getConstantDefinedNoObject(internedName);
}
public IRubyObject getConstantDefinedNoObject(String internedName) {
if (previousCRefScope == null) return null;
return getConstantDefined(internedName);
}
public IRubyObject getConstant(String internedName) {
IRubyObject result = getConstantInner(internedName);
// If we could not find the constant from cref..then try getting from inheritence hierarchy
return result == null ? cref.getConstantNoConstMissing(internedName) : result;
}
public IRubyObject getConstantInner(String internedName) {
IRubyObject result = cref.fetchConstant(internedName);
if (result != null) {
return result == RubyObject.UNDEF ? cref.resolveUndefConstant(internedName) : result;
}
return previousCRefScope == null ? null : previousCRefScope.getConstantInnerNoObject(internedName);
}
private IRubyObject getConstantInnerNoObject(String internedName) {
if (previousCRefScope == null) return null;
return getConstantInner(internedName);
}
/**
* Next outer most scope in list of scopes. An enclosing scope may have no direct scoping
* relationship to its child. If I am in a localScope and then I enter something which
* creates another localScope the enclosing scope will be the first scope, but there are
* no valid scoping relationships between the two. Methods which walk the enclosing scopes
* are responsible for enforcing appropriate scoping relationships.
*
* @return the parent scope
*/
public StaticScope getEnclosingScope() {
return enclosingScope;
}
/**
* Does the variable exist?
*
* @param name of the variable to find
* @return index of variable or -1 if it does not exist
*/
public int exists(String name) {
return findVariableName(name);
}
private int findVariableName(String name) {
for (int i = 0; i < variableNames.length; i++) {
if (name.equals(variableNames[i])) return i;
}
return -1;
}
/**
* Is this name in the visible to the current scope
*
* @param name to be looked for
* @return a location where the left-most 16 bits of number of scopes down it is and the
* right-most 16 bits represents its index in that scope
*/
public int isDefined(String name) {
return isDefined(name, 0);
}
/**
* Make a DASgn or LocalAsgn node based on scope logic
*
* @param position
* @param name
* @param value
* @return
*
* Note: This is private code made public only for parser.
*/
public AssignableNode assign(ISourcePosition position, RubySymbol name, Node value) {
return assign(position, name, value, this, 0);
}
/**
* Register a keyword argument with this staticScope. It additionally will track
* where the first keyword argument started so we can test and tell whether we have
* a kwarg or an ordinary variable during live execution (See keywordExists).
* @param position
* @param symbolID
* @param value
* @return
*/
public AssignableNode assignKeyword(ISourcePosition position, RubySymbol symbolID, Node value) {
AssignableNode assignment = assign(position, symbolID, value, this, 0);
// register first keyword index encountered
if (firstKeywordIndex == -1) firstKeywordIndex = ((IScopedNode) assignment).getIndex();
return assignment;
}
public boolean keywordExists(String name) {
int slot = exists(name);
return slot >= 0 && firstKeywordIndex != -1 && slot >= firstKeywordIndex;
}
/**
* Get all visible variables that we can see from this scope that have been assigned
* (e.g. seen so far)
*
* @return a list of all names (sans $~ and $_ which are special names)
*/
public String[] getAllNamesInScope() {
return collectVariables(ArrayList::new, ArrayList::add).stream().toArray(String[]::new);
}
/**
* Populate a deduplicated collection of variable names in scope using the given functions.
*
* This may include variables that are not strictly Ruby local variable names, so the consumer should validate
* names as appropriate.
*
* @param collectionFactory used to construct the collection
* @param collectionPopulator used to pass values into the collection
* @param <T> resulting collection type
* @return populated collection
*/
public <T> T collectVariables(IntFunction<T> collectionFactory, BiConsumer<T, String> collectionPopulator) {
StaticScope current = this;
T collection = collectionFactory.apply(current.variableNamesLength);
HashMap<String, Object> dedup = new HashMap<>();
while (current.isBlockOrEval) {
for (String name : current.variableNames) {
dedup.computeIfAbsent(name, key -> {collectionPopulator.accept(collection, key); return key;});
}
current = current.enclosingScope;
}
// once more for method scope
for (String name : current.variableNames) {
dedup.computeIfAbsent(name, key -> {collectionPopulator.accept(collection, key); return key;});
}
return collection;
}
/**
* Convenience wrapper around {@link #collectVariables(IntFunction, BiConsumer)}.
*
* @param runtime current runtime
* @return populated RubyArray
*/
public RubyArray getLocalVariables(Ruby runtime) {
return collectVariables(
runtime::newArray,
(a, s) -> {
if (IdUtil.isLocal(s)) a.append(runtime.newSymbol(s));
});
}
public int isDefined(String name, int depth) {
if (isBlockOrEval) {
int slot = exists(name);
if (slot >= 0) return (depth << 16) | slot;
return enclosingScope.isDefined(name, depth + 1);
} else {
return (depth << 16) | exists(name);
}
}
public AssignableNode addAssign(ISourcePosition position, RubySymbol symbolID, Node value) {
int slot = addVariable(symbolID.idString());
// No bit math to store level since we know level is zero for this case
return new DAsgnNode(position, symbolID, slot, value);
}
public AssignableNode assign(ISourcePosition position, RubySymbol symbolID, Node value,
StaticScope topScope, int depth) {
String id = symbolID.idString();
int slot = exists(id);
// We can assign if we already have variable of that name here or we are the only
// scope in the chain (which Local scopes always are).
if (slot >= 0) {
return isBlockOrEval ? new DAsgnNode(position, symbolID, ((depth << 16) | slot), value)
: new LocalAsgnNode(position, symbolID, ((depth << 16) | slot), value);
} else if (!isBlockOrEval && (topScope == this)) {
slot = addVariable(id);
return new LocalAsgnNode(position, symbolID, slot, value);
}
// If we are not a block-scope and we go there, we know that 'topScope' is a block scope
// because a local scope cannot be within a local scope
// If topScope was itself it would have created a LocalAsgnNode above.
return isBlockOrEval ?
enclosingScope.assign(position, symbolID, value, topScope, depth + 1) :
topScope.addAssign(position, symbolID, value);
}
// Note: This is private code made public only for parser.
public Node declare(ISourcePosition position, RubySymbol symbolID, int depth) {
int slot = exists(symbolID.idString());
if (slot >= 0) {
return isBlockOrEval ?
new DVarNode(position, ((depth << 16) | slot), symbolID) :
new LocalVarNode(position, ((depth << 16) | slot), symbolID);
}
return isBlockOrEval ? enclosingScope.declare(position, symbolID, depth + 1) : new VCallNode(position, symbolID);
}
/**
* Make a DVar or LocalVar node based on scoping logic
*
* @param position the location that in the source that the new node will come from
* @param symbolID of the variable to be created is named
* @return a DVarNode or LocalVarNode
*
* Note: This is private code made public only for parser.
*/
public Node declare(ISourcePosition position, RubySymbol symbolID) {
return declare(position, symbolID, 0);
}
/**
* Gets the Local Scope relative to the current Scope. For LocalScopes this will be itself.
* Blocks will contain the LocalScope it contains.
*
* @return localScope
*/
public StaticScope getLocalScope() {
return (type != Type.BLOCK) ? this : enclosingScope.getLocalScope();
}
/**
* Get the live CRef module associated with this scope.
*
* @return the live module
*/
public RubyModule getModule() {
return cref;
}
public StaticScope getPreviousCRefScope() {
return previousCRefScope;
}
public void setPreviousCRefScope(StaticScope crefScope) {
this.previousCRefScope = crefScope;
}
public void setModule(RubyModule module) {
this.cref = module;
for (StaticScope scope = getEnclosingScope(); scope != null; scope = scope.getEnclosingScope()) {
if (scope.cref != null) {
previousCRefScope = scope;
return;
}
}
}
/**
* Update current scoping structure to populate with proper cref scoping values. This should
* be called at any point when you reference a scope for the first time. For the interpreter
* this is done in a small number of places (defnNode, defsNode, and getBlock). The compiler
* does this in the same places.
*
* @return the current cref, though this is largely an implementation detail
*/
public RubyModule determineModule() {
if (cref == null) {
cref = getEnclosingScope().determineModule();
assert cref != null : "CRef is always created before determine happens";
previousCRefScope = getEnclosingScope().previousCRefScope;
}
return cref;
}
public boolean isBlockScope() {
return isBlockOrEval;
}
/**
* Argument scopes represent scopes which contain arguments for zsuper. All LocalStaticScopes
* are argument scopes and BlockStaticScopes can be when they are used by define_method.
*/
public boolean isArgumentScope() {
return isArgumentScope;
}
public void makeArgumentScope() {
this.isArgumentScope = true;
}
/**
* For all block or method associated with static scopes this will return the signature for that
* signature-providing scope. module bodies and other non-arity specific code will return null.
*/
public Signature getSignature() {
return signature;
}
/**
* This happens in when first defining ArgsNodes or when reifying a method from AOT.
*/
public void setSignature(Signature signature) {
this.signature = signature;
}
public DynamicScope getDummyScope() {
return dummyScope == null ? dummyScope = DynamicScope.newDynamicScope(this) : dummyScope;
}
public void setCommandArgumentStack(long commandArgumentStack) {
this.commandArgumentStack = commandArgumentStack;
}
public long getCommandArgumentStack() {
return commandArgumentStack;
}
private void growVariableNames(String name) {
String[] newVariableNames = new String[variableNames.length + 1];
System.arraycopy(variableNames, 0, newVariableNames, 0, variableNames.length);
variableNames = newVariableNames;
variableNamesLength = newVariableNames.length;
variableNames[variableNames.length - 1] = name;
}
private void growNamedCaptures(int index) {
boolean[] namedCaptures = this.namedCaptures;
boolean[] newNamedCaptures;
if (namedCaptures != null) {
newNamedCaptures = new boolean[Math.max(index + 1, namedCaptures.length)];
System.arraycopy(namedCaptures, 0, newNamedCaptures, 0, namedCaptures.length);
} else {
newNamedCaptures = new boolean[index + 1];
}
newNamedCaptures[index] = true;
this.namedCaptures = newNamedCaptures;
}
public boolean isNamedCapture(int index) {
boolean[] namedCaptures = this.namedCaptures;
return namedCaptures != null && index < namedCaptures.length && namedCaptures[index];
}
@Override
public String toString() {
// FIXME: Do we need to persist cref as well?
return "StaticScope(" + type + "):" + Arrays.toString(variableNames);
}
public Type getType() {
return type;
}
public String getFile() {
return file;
}
public StaticScope duplicate() {
StaticScope dupe = new StaticScope(type, enclosingScope, variableNames == null ? NO_NAMES : variableNames);
// irScope is not guaranteed to be set onto StaticScope until it is executed for the first time.
// We can call duplicate before its first execution.
if (irScope != null) dupe.setIRScope(irScope);
dupe.setScopeType(scopeType);
dupe.setPreviousCRefScope(previousCRefScope);
dupe.setModule(cref);
dupe.setSignature(signature);
return dupe;
}
public RubyModule getOverlayModuleForRead() {
return overlayModule;
}
public RubyModule getOverlayModuleForWrite(ThreadContext context) {
RubyModule omod = overlayModule;
if (omod == null) {
overlayModule = omod = RubyModule.newModule(context.runtime);
}
return omod;
}
public void enableProcRefinements() {
procRefinementsEnabled = true;
}
public boolean procRefinementsEnabled() {
for (StaticScope scope = this; scope != null; scope = scope.getEnclosingScope()) {
if (scope.procRefinementsEnabled)
return true;
}
return false;
}
}
|
thruflo/py-soocial | 1 | src/soocial/client.py | import base64
import cookielib
import httplib2
import re
import socket
import urllib2
import vobject
from elementtree import ElementTree
from urllib import quote, urlencode
try:
import json # Python 2.6
except ImportError:
import simplejson as json
from xml2dict import XmlDictParser, XmlListParser
DEFAULT_URI = u'https://www.soocial.com'
class Soocial(object):
"""
Python wrapper for the Soocial developer API.
>>> myemail = 'me@foo.com'
>>> mypassword = '***'
>>> soo = Soocial(myemail, mypassword)
Let's start with an empty account::
>>> len(soo)
0
Now let's add a contact::
>>> id = soo.add({'first_name': 'Buddy', 'last_name': 'Holly'})
The contact id is a string representing an integer::
>>> str(int(id)) == id
True
This can now be used to lookup the contact::
>>> buddy = soo[id]
>>> buddy['family-name']
'Holly'
>>> buddy.keys()
['addresses', 'urls', 'family-name', 'deleted', 'organisations', 'updated-at', 'created-at', 'emails', 'id', 'given-name', 'parents', 'telephones', 'vcard', 'similarity-matrix', 'user-id', 'created-by', 'g-name-for-sorting', 'latest']
You can iterate through all the contacts::
>>> for item in soo:
... item['given-name']
'Buddy'
Edit name information directly::
>>> soo[id] = {'first_name': 'Charles Hardin', 'last_name': 'Holley'}
>>> buddy = soo[id]
>>> buddy['given-name']
'Charles Hardin'
You can also get data in vcard format. Either parsed into a
Python representation using the vobject library::
>>> soo.get_all_vcards()
[<VCARD| [<VERSION{}3.0>, <FN{u'CHARSET': [u'UTF-8']}Charles Hardin Holley>, <N{u'CHARSET': [u'UTF-8']} Charles Hardin Holley >]>]
>>> soo.get_vcard(id)
<VCARD| [<VERSION{}3.0>, <FN{u'CHARSET': [u'UTF-8']}Charles Hardin Holley>, <N{u'CHARSET': [u'UTF-8']} Charles Hardin Holley >]>
Or as raw text::
>>> soo.get_all_vcards(parse=False) #doctest: +ELLIPSIS
['BEGIN:VCARD...END:VCARD']
>>> soo.get_vcard(id, parse=False) #doctest: +ELLIPSIS
'BEGIN:VCARD...END:VCARD'
Contacts contain ``phone_numbers``, ``email_addresses``,
``organisations``, ``urls`` and ``street_addresses``::
>>> soo.get_phones(id)
[]
>>> soo.get_emails(id)
[]
>>> soo.get_urls(id)
[]
>>> soo.get_addresses(id)
[]
>>> soo.get_organisations(id)
[]
Plus there's support to get a small set of data on the existing
user and, presumably, the phone numbers of people the user
is connected with (?)::
>>> user = soo.get_user()
>>> user.keys()
['username', 'name', 'number-of-contacts', 'updated-at', 'created-at', 'allow-newsletters', 'invites-available']
>>> soo.get_connection_phones()
[]
Atm, these are read only, until perhaps I get a little more
info on the API, which atm doesn't work as documented
"""
def __init__(self, email, password, uri=DEFAULT_URI, cache=None, timeout=None):
"""
Initialize the Soocial account.
:param uri: the URI of the server (for example
``https://www.soocial.com/contacts.xml``)
:param cache: either a cache directory path (as a string)
or an object compatible with the ``httplib2.FileCache``
interface. If `None` (the default), no caching is performed.
:param timeout: socket timeout in number of seconds, or
`None` for no timeout
"""
h = httplib2.Http(cache=cache, timeout=timeout)
h.add_credentials(email, password)
h.force_exception_to_status_code = False
self.conn = Connection(h, uri)
self.email = email
self.password = password
def __repr__(self):
return '<%s %r>' % (type(self).__name__, self.conn.uri)
def __contains__(self, id):
"""
Return whether the account contains a contact with the
specified id.
:param id: the contact id
:return: `True` if a database with the name exists, `False` otherwise
Would be nice if the HEAD method was supported...
"""
path = 'contacts/%s.xml' % validate_id(id)
try:
self.conn.get(path)
return True
except ResourceNotFound:
return False
def __iter__(self):
"""
Iterate over contacts list.
"""
resp, data = self.conn.get('contacts.xml')
try:
return iter(data) # ['contacts']['contact'])
except KeyError:
return iter({})
def __len__(self):
"""
Return the number of contacts.
"""
resp, data = self.conn.get('contacts.xml')
try:
return len(data)
except KeyError:
return 0
def __nonzero__(self):
"""
Return whether soocial.com is alive.
Would be nice if the HEAD method was supported...
"""
try:
self.conn.get('contacts.xml')
return True
except:
return False
def __getitem__(self, id):
"""
Return a dict representing the contact with the
specified id.
:param id: the id of the contact
:return: a dict representing the contact
:rtype: dict
:raise ResourceNotFound: if no contact with that id exists
"""
path = 'contacts/%s.xml' % validate_id(id)
resp, data = self.conn.get(path)
return data
def __setitem__(self, id, postdata):
"""
Update the contact with the specified data.
:param id: the id of the contact
:param postdata: the data to update the contact with
:return: a dict representing the contact
:rtype: dict
:raise ResourceNotFound: if no contact with that id exists
"""
path = 'contacts/%s.xml' % validate_id(id)
data = {}
for item in postdata:
data['contact[%s]' % item] = postdata[item]
postdata = urlencode(data, True)
resp, data = self.conn.put(path, content=postdata)
return data
def __delitem__(self, id):
"""
Remove the contact with the specified id.
:param id: the id of the contact
:raise ResourceNotFound: if no contact with that id exists
"""
path = 'contacts/%s.xml' % validate_id(id)
self.conn.delete(path)
def add(self, postdata):
"""
Create a new contact.
:param postdata: the data to create the new contact with
:return: id of the created contact
:rtype: string
"""
path = 'contacts.xml'
data = {}
for item in postdata:
data['contact[%s]' % item] = postdata[item]
postdata = urlencode(data, True)
resp, data = self.conn.post(path, content=postdata)
return data
def get_phones(self, id):
path = 'contacts/%s/telephones.xml' % id
resp, data = self.conn.get(path)
return data
def get_emails(self, id):
path = 'contacts/%s/emails.xml' % id
resp, data = self.conn.get(path)
return data
def get_urls(self, id):
path = 'contacts/%s/urls.xml' % id
resp, data = self.conn.get(path)
return data
def get_addresses(self, id):
path = 'contacts/%s/addresses.xml' % id
resp, data = self.conn.get(path)
return data
def get_organisations(self, id):
path = 'contacts/%s/organisations.xml' % id
resp, data = self.conn.get(path)
return data
def get_user(self):
"""
Special case: requires cookie based authentication.
"""
raw = "%s:%s" % (self.email, self.password)
auth = base64.encodestring(raw).strip()
headers = {'AUTHORIZATION': 'Basic %s' % auth}
opener = urllib2.build_opener(
urllib2.HTTPCookieProcessor(
cookielib.LWPCookieJar()
)
)
url = '%s/user.xml' % DEFAULT_URI
request = urllib2.Request(url, headers=headers)
sock = opener.open(request)
xml = ElementTree.fromstring(sock.read())
return XmlDictParser(xml)
def get_connection_phones(self):
resp, data = self.conn.get('/connections/phones.xml')
return data
def get_all_vcards(self, parse=True):
"""
Get all the contacts as a list of vcards.
The vcards are parsed from plain text into vobject.vCard
form (a python wrapper class) by default.
:param parse: set this to False to return just the raw text
:return: list of vcards
:rtype: list
"""
resp, data = self.conn.get('contacts.vcf')
data = data.replace('END:VCARDBEGIN:VCARD', 'END:VCARD\nBEGIN:VCARD')
data = data.strip()
vcards = []
while True:
i = data.find('END:VCARD')
if i > -1:
i += len('END:VCARD')
text = data[:i]
data = data[i:]
if parse:
vcard = vobject.readOne(text.strip())
vcards.append(vcard)
else:
vcards.append(text.strip())
else: # no more left, we're done
break
return vcards
def get_vcard(self, id, parse=True):
"""
Get contact vcard.
:param id: contact id
:param parse: set this to False to return just the raw text
:return: vcard
:rtype: vobject.vCard or string
"""
path = 'contacts/%s.vcf' % validate_id(id)
resp, data = self.conn.get(path)
if parse:
vcard = vobject.readOne(data)
else:
vcard = data
return vcard
class Connection(object):
def __init__(self, http, uri):
if http is None:
http = httplib2.Http()
http.force_exception_to_status_code = False
self.http = http
self.uri = uri
def get(self, path, headers=None, **params):
return self._request('GET', path, headers=headers, **params)
def post(self, path, content=None, headers=None, **params):
return self._request(
'POST', path, content=content, headers=headers, **params
)
def put(self, path, content=None, headers=None, **params):
return self._request(
'PUT', path, content=content, headers=headers, **params
)
def head(self, path, headers=None, **params):
return self._request('HEAD', path, headers=headers, **params)
def delete(self, path, headers=None, **params):
return self._request('DELETE', path, headers=headers, **params)
def _request(self, method, path, content=None, headers=None, **params):
headers = headers or {}
headers.setdefault('Accept', '*/*')
headers.setdefault('User-Agent', 'py-soocial')
body = None
if content is not None:
body = content
headers.setdefault('Content-Type', 'application/x-www-form-urlencoded')
headers.setdefault('Content-Length', str(len(body)))
def _make_request(retry=1):
try:
url = uri(
self.uri,
path,
**params
)
return self.http.request(
url,
method,
body = body,
headers = headers
)
except socket.error, e:
if retry > 0 and e.args[0] == 54: # reset by peer
return _make_request(retry - 1)
raise
resp, data = _make_request()
code = int(resp.status)
if data:
if code == 200 and resp.get('content-type').startswith('application/xml'):
xml = ElementTree.fromstring(data)
# hack logic to differentiate between the two types of
# response from soocial
# one day it would be nice to have a *proper*
# xml <-> py dict <-> xml convertor
tagname = u''
config = XmlListParser
for item in xml.getchildren():
if not tagname:
tagname = item.tag
else:
if not item.tag == tagname:
config = XmlDictParser
break
data = config(xml)
elif code == 201:
data = resp['location'].split('/')[-1]
if code >= 400:
if type(data) is dict:
error = (data.get('error'), data.get('reason'))
else:
error = data
if code == 404:
raise ResourceNotFound(error)
elif code == 409:
raise ResourceConflict(error)
elif code == 412:
raise PreconditionFailed(error)
else:
raise ServerError((code, error))
return resp, data
class PreconditionFailed(Exception):
"""412"""
class ResourceNotFound(Exception):
"""404"""
class ResourceConflict(Exception):
"""409"""
class ServerError(Exception):
"""Unexpected HTTP error"""
def uri(base, *path, **query):
"""
Assemble a uri based on a base, any number of path segments,
and query string parameters.
>>> uri('http://example.org/', '/_all_dbs')
'http://example.org/_all_dbs'
"""
if base and base.endswith('/'):
base = base[:-1]
retval = [base]
# build the path
path = '/'.join([''] +
[s.strip('/') for s in path
if s is not None])
if path:
retval.append(path)
# build the query string
params = []
for name, value in query.items():
if type(value) in (list, tuple):
params.extend([(name, i) for i in value if i is not None])
elif value is not None:
if value is True:
value = 'true'
elif value is False:
value = 'false'
params.append((name, value))
if params:
retval.extend(['?', unicode_urlencode(params)])
return ''.join(retval)
def unicode_quote(string, safe=''):
if isinstance(string, unicode):
string = string.encode('utf-8')
return quote(string, safe)
def unicode_urlencode(data, doseq=None):
if isinstance(data, dict):
data = data.items()
params = []
for name, value in data:
if isinstance(value, unicode):
value = value.encode('utf-8')
params.append((name, value))
return urlencode(params, doseq)
VALID_ID = re.compile(r'^[0-9]+$')
def validate_id(id):
if not VALID_ID.match(id):
raise ValueError('Invalid contact ID')
return id
if __name__ == '__main__':
import sys
email = sys.args[0]
password = sys.args[1]
soo = Soocial(email, password)
for item in soo:
print item
| # -*- coding: utf-8 -*-
import base64
import cookielib
import httplib2
import re
import socket
import urllib2
import vobject
from elementtree import ElementTree
from urllib import quote, urlencode
try:
import json # Python 2.6
except ImportError:
import simplejson as json
from xml2dict import XmlDictParser, XmlListParser
DEFAULT_URI = u'https://api.soocial.com'
class Soocial(object):
"""
Python wrapper for the Soocial developer API.
>>> myemail = 'me@foo.com'
>>> mypassword = '***'
>>> soo = Soocial(myemail, mypassword)
Let's start with an empty account::
>>> len(soo)
0
Now let's add a contact::
>>> id = soo.add({'first_name': 'Buddy', 'last_name': 'Holly'})
The contact id is a string representing an integer::
>>> str(int(id)) == id
True
This can now be used to lookup the contact::
>>> buddy = soo[id]
>>> buddy['family-name']
'Holly'
>>> buddy.keys()
['addresses', 'urls', 'family-name', 'deleted', 'organisations', 'updated-at', 'created-at', 'emails', 'id', 'given-name', 'parents', 'telephones', 'vcard', 'similarity-matrix', 'user-id', 'created-by', 'g-name-for-sorting', 'latest']
You can iterate through all the contacts::
>>> for item in soo:
... item['given-name']
'Buddy'
Edit name information directly::
>>> soo[id] = {'first_name': 'Charles Hardin', 'last_name': 'Holley'}
>>> buddy = soo[id]
>>> buddy['given-name']
'Charles Hardin'
You can also get data in vcard format. Either parsed into a
Python representation using the vobject library::
>>> soo.get_all_vcards()
[<VCARD| [<VERSION{}3.0>, <FN{u'CHARSET': [u'UTF-8']}Charles Hardin Holley>, <N{u'CHARSET': [u'UTF-8']} Charles Hardin Holley >]>]
>>> soo.get_vcard(id)
<VCARD| [<VERSION{}3.0>, <FN{u'CHARSET': [u'UTF-8']}Charles Hardin Holley>, <N{u'CHARSET': [u'UTF-8']} Charles Hardin Holley >]>
Or as raw text::
>>> soo.get_all_vcards(parse=False) #doctest: +ELLIPSIS
['BEGIN:VCARD...END:VCARD']
>>> soo.get_vcard(id, parse=False) #doctest: +ELLIPSIS
'BEGIN:VCARD...END:VCARD'
Contacts contain ``phone_numbers``, ``email_addresses``,
``organisations``, ``urls`` and ``street_addresses``::
>>> soo.get_phones(id)
[]
>>> soo.get_emails(id)
[]
>>> soo.get_urls(id)
[]
>>> soo.get_addresses(id)
[]
>>> soo.get_organisations(id)
[]
Plus there's support to get a small set of data on the existing
user and, presumably, the phone numbers of people the user
is connected with (?)::
>>> user = soo.get_user()
>>> user.keys()
['username', 'name', 'number-of-contacts', 'updated-at', 'created-at', 'allow-newsletters', 'invites-available']
>>> soo.get_connection_phones()
[]
Atm, these are read only, until perhaps I get a little more
info on the API, which atm doesn't work as documented
"""
def __init__(self, email, password, uri=DEFAULT_URI, cache=None, timeout=None):
"""
Initialize the Soocial account.
:param uri: the URI of the server (for example
``https://www.soocial.com/contacts.xml``)
:param cache: either a cache directory path (as a string)
or an object compatible with the ``httplib2.FileCache``
interface. If `None` (the default), no caching is performed.
:param timeout: socket timeout in number of seconds, or
`None` for no timeout
"""
h = httplib2.Http(cache=cache, timeout=timeout)
h.add_credentials(email, password)
h.force_exception_to_status_code = False
self.conn = Connection(h, uri)
self.email = email
self.password = password
def __repr__(self):
return '<%s %r>' % (type(self).__name__, self.conn.uri)
def __contains__(self, id):
"""
Return whether the account contains a contact with the
specified id.
:param id: the contact id
:return: `True` if a database with the name exists, `False` otherwise
Would be nice if the HEAD method was supported...
"""
path = 'contacts/%s.xml' % validate_id(id)
try:
self.conn.get(path)
return True
except ResourceNotFound:
return False
def __iter__(self):
"""
Iterate over contacts list.
"""
resp, data = self.conn.get('contacts.xml')
try:
return iter(data) # ['contacts']['contact'])
except KeyError:
return iter({})
def __len__(self):
"""
Return the number of contacts.
"""
resp, data = self.conn.get('contacts.xml')
try:
return len(data)
except KeyError:
return 0
def __nonzero__(self):
"""
Return whether soocial.com is alive.
Would be nice if the HEAD method was supported...
"""
try:
self.conn.get('contacts.xml')
return True
except:
return False
def __getitem__(self, id):
"""
Return a dict representing the contact with the
specified id.
:param id: the id of the contact
:return: a dict representing the contact
:rtype: dict
:raise ResourceNotFound: if no contact with that id exists
"""
path = 'contacts/%s.xml' % validate_id(id)
resp, data = self.conn.get(path)
return data
def __setitem__(self, id, postdata):
"""
Update the contact with the specified data.
:param id: the id of the contact
:param postdata: the data to update the contact with
:return: a dict representing the contact
:rtype: dict
:raise ResourceNotFound: if no contact with that id exists
"""
path = 'contacts/%s.xml' % validate_id(id)
data = {}
for item in postdata:
data['contact[%s]' % item] = postdata[item]
postdata = urlencode(data, True)
resp, data = self.conn.put(path, content=postdata)
return data
def __delitem__(self, id):
"""
Remove the contact with the specified id.
:param id: the id of the contact
:raise ResourceNotFound: if no contact with that id exists
"""
path = 'contacts/%s.xml' % validate_id(id)
self.conn.delete(path)
def add(self, postdata):
"""
Create a new contact.
:param postdata: the data to create the new contact with
:return: id of the created contact
:rtype: string
"""
path = 'contacts.xml'
data = {}
for item in postdata:
data['contact[%s]' % item] = postdata[item]
postdata = urlencode(data, True)
resp, data = self.conn.post(path, content=postdata)
return data
def get_phones(self, id):
path = 'contacts/%s/telephones.xml' % id
resp, data = self.conn.get(path)
return data
def get_emails(self, id):
path = 'contacts/%s/emails.xml' % id
resp, data = self.conn.get(path)
return data
def get_urls(self, id):
path = 'contacts/%s/urls.xml' % id
resp, data = self.conn.get(path)
return data
def get_addresses(self, id):
path = 'contacts/%s/addresses.xml' % id
resp, data = self.conn.get(path)
return data
def get_organisations(self, id):
path = 'contacts/%s/organisations.xml' % id
resp, data = self.conn.get(path)
return data
def get_user(self):
"""
Special case: requires cookie based authentication.
"""
raw = "%s:%s" % (self.email, self.password)
auth = base64.encodestring(raw).strip()
headers = {'AUTHORIZATION': 'Basic %s' % auth}
opener = urllib2.build_opener(
urllib2.HTTPCookieProcessor(
cookielib.LWPCookieJar()
)
)
url = '%s/user.xml' % DEFAULT_URI
request = urllib2.Request(url, headers=headers)
sock = opener.open(request)
xml = ElementTree.fromstring(sock.read())
return XmlDictParser(xml)
def get_connection_phones(self):
resp, data = self.conn.get('/connections/phones.xml')
return data
def get_all_vcards(self, parse=True):
"""
Get all the contacts as a list of vcards.
The vcards are parsed from plain text into vobject.vCard
form (a python wrapper class) by default.
:param parse: set this to False to return just the raw text
:return: list of vcards
:rtype: list
"""
resp, data = self.conn.get('contacts.vcf')
data = data.replace('END:VCARDBEGIN:VCARD', 'END:VCARD\nBEGIN:VCARD')
data = data.strip()
vcards = []
while True:
i = data.find('END:VCARD')
if i > -1:
i += len('END:VCARD')
text = data[:i]
data = data[i:]
if parse:
vcard = vobject.readOne(text.strip())
vcards.append(vcard)
else:
vcards.append(text.strip())
else: # no more left, we're done
break
return vcards
def get_vcard(self, id, parse=True):
"""
Get contact vcard.
:param id: contact id
:param parse: set this to False to return just the raw text
:return: vcard
:rtype: vobject.vCard or string
"""
path = 'contacts/%s.vcf' % validate_id(id)
resp, data = self.conn.get(path)
if parse:
vcard = vobject.readOne(data)
else:
vcard = data
return vcard
class Connection(object):
def __init__(self, http, uri):
if http is None:
http = httplib2.Http()
http.force_exception_to_status_code = False
self.http = http
self.uri = uri
def get(self, path, headers=None, **params):
return self._request('GET', path, headers=headers, **params)
def post(self, path, content=None, headers=None, **params):
return self._request(
'POST', path, content=content, headers=headers, **params
)
def put(self, path, content=None, headers=None, **params):
return self._request(
'PUT', path, content=content, headers=headers, **params
)
def head(self, path, headers=None, **params):
return self._request('HEAD', path, headers=headers, **params)
def delete(self, path, headers=None, **params):
return self._request('DELETE', path, headers=headers, **params)
def _request(self, method, path, content=None, headers=None, **params):
headers = headers or {}
headers.setdefault('Accept', '*/*')
headers.setdefault('User-Agent', 'py-soocial')
body = None
if content is not None:
body = content
headers.setdefault('Content-Type', 'application/x-www-form-urlencoded')
headers.setdefault('Content-Length', str(len(body)))
def _make_request(retry=1):
try:
url = uri(
self.uri,
path,
**params
)
return self.http.request(
url,
method,
body = body,
headers = headers
)
except socket.error, e:
if retry > 0 and e.args[0] == 54: # reset by peer
return _make_request(retry - 1)
raise
resp, data = _make_request()
code = int(resp.status)
if data:
if code == 200 and resp.get('content-type').startswith('application/xml'):
xml = ElementTree.fromstring(data)
# hack logic to differentiate between the two types of
# response from soocial
# one day it would be nice to have a *proper*
# xml <-> py dict <-> xml convertor
tagname = u''
config = XmlListParser
for item in xml.getchildren():
if not tagname:
tagname = item.tag
else:
if not item.tag == tagname:
config = XmlDictParser
break
data = config(xml)
elif code == 201:
data = resp['location'].split('/')[-1]
if code >= 400:
if type(data) is dict:
error = (data.get('error'), data.get('reason'))
else:
error = data
if code == 404:
raise ResourceNotFound(error)
elif code == 409:
raise ResourceConflict(error)
elif code == 412:
raise PreconditionFailed(error)
else:
raise ServerError((code, error))
return resp, data
class PreconditionFailed(Exception):
"""412"""
class ResourceNotFound(Exception):
"""404"""
class ResourceConflict(Exception):
"""409"""
class ServerError(Exception):
"""Unexpected HTTP error"""
def uri(base, *path, **query):
"""
Assemble a uri based on a base, any number of path segments,
and query string parameters.
>>> uri('http://example.org/', '/_all_dbs')
'http://example.org/_all_dbs'
"""
if base and base.endswith('/'):
base = base[:-1]
retval = [base]
# build the path
path = '/'.join([''] +
[s.strip('/') for s in path
if s is not None])
if path:
retval.append(path)
# build the query string
params = []
for name, value in query.items():
if type(value) in (list, tuple):
params.extend([(name, i) for i in value if i is not None])
elif value is not None:
if value is True:
value = 'true'
elif value is False:
value = 'false'
params.append((name, value))
if params:
retval.extend(['?', unicode_urlencode(params)])
return ''.join(retval)
def unicode_quote(string, safe=''):
if isinstance(string, unicode):
string = string.encode('utf-8')
return quote(string, safe)
def unicode_urlencode(data, doseq=None):
if isinstance(data, dict):
data = data.items()
params = []
for name, value in data:
if isinstance(value, unicode):
value = value.encode('utf-8')
params.append((name, value))
return urlencode(params, doseq)
VALID_ID = re.compile(r'^[0-9]+$')
def validate_id(id):
if not VALID_ID.match(id):
raise ValueError('Invalid contact ID')
return id
if __name__ == '__main__':
import sys
email = sys.args[0]
password = sys.args[1]
soo = Soocial(email, password)
for item in soo:
print item
|
dovadi/active_merchant_ideal | 2 | lib/active_merchant_ideal/ideal_response.rb | require 'openssl'
require 'base64'
require 'rexml/document'
module ActiveMerchant #:nodoc:
module Billing #:nodoc:
# The base class for all iDEAL response classes.
#
# Note that if the iDEAL system is under load it will _not_ allow more
# then two retries per request.
class IdealResponse < Response
def initialize(response_body, options = {})
@response = REXML::Document.new(response_body).root
@success = !error_occured?
@test = options[:test]
end
# Returns a technical error message.
def error_message
text('//Error/errorMessage') unless success?
end
# Returns a consumer friendly error message.
def consumer_error_message
text('//Error/consumerMessage') unless success?
end
# Returns details on the error if available.
def error_details
text('//Error/errorDetail') unless success?
end
# Returns an error type inflected from the first two characters of the
# error code. See error_code for a full list of errors.
#
# Error code to type mappings:
#
# * +IX+ - <tt>:xml</tt>
# * +SO+ - <tt>:system</tt>
# * +SE+ - <tt>:security</tt>
# * +BR+ - <tt>:value</tt>
# * +AP+ - <tt>:application</tt>
def error_type
unless success?
case error_code[0,2]
when 'IX' then :xml
when 'SO' then :system
when 'SE' then :security
when 'BR' then :value
when 'AP' then :application
end
end
end
# Returns the code of the error that occured.
#
# === Codes
#
# ==== IX: Invalid XML and all related problems
#
# Such as incorrect encoding, invalid version, or otherwise unreadable:
#
# * <tt>IX1000</tt> - Received XML not well-formed.
# * <tt>IX1100</tt> - Received XML not valid.
# * <tt>IX1200</tt> - Encoding type not UTF-8.
# * <tt>IX1300</tt> - XML version number invalid.
# * <tt>IX1400</tt> - Unknown message.
# * <tt>IX1500</tt> - Mandatory main value missing. (Merchant ID ?)
# * <tt>IX1600</tt> - Mandatory value missing.
#
# ==== SO: System maintenance or failure
#
# The errors that are communicated in the event of system maintenance or
# system failure. Also covers the situation where new requests are no
# longer being accepted but requests already submitted will be dealt with
# (until a certain time):
#
# * <tt>SO1000</tt> - Failure in system.
# * <tt>SO1200</tt> - System busy. Try again later.
# * <tt>SO1400</tt> - Unavailable due to maintenance.
#
# ==== SE: Security and authentication errors
#
# Incorrect authentication methods and expired certificates:
#
# * <tt>SE2000</tt> - Authentication error.
# * <tt>SE2100</tt> - Authentication method not supported.
# * <tt>SE2700</tt> - Invalid electronic signature.
#
# ==== BR: Field errors
#
# Extra information on incorrect fields:
#
# * <tt>BR1200</tt> - iDEAL version number invalid.
# * <tt>BR1210</tt> - Value contains non-permitted character.
# * <tt>BR1220</tt> - Value too long.
# * <tt>BR1230</tt> - Value too short.
# * <tt>BR1240</tt> - Value too high.
# * <tt>BR1250</tt> - Value too low.
# * <tt>BR1250</tt> - Unknown entry in list.
# * <tt>BR1270</tt> - Invalid date/time.
# * <tt>BR1280</tt> - Invalid URL.
#
# ==== AP: Application errors
#
# Errors relating to IDs, account numbers, time zones, transactions:
#
# * <tt>AP1000</tt> - Acquirer ID unknown.
# * <tt>AP1100</tt> - Merchant ID unknown.
# * <tt>AP1200</tt> - Issuer ID unknown.
# * <tt>AP1300</tt> - Sub ID unknown.
# * <tt>AP1500</tt> - Merchant ID not active.
# * <tt>AP2600</tt> - Transaction does not exist.
# * <tt>AP2620</tt> - Transaction already submitted.
# * <tt>AP2700</tt> - Bank account number not 11-proof.
# * <tt>AP2900</tt> - Selected currency not supported.
# * <tt>AP2910</tt> - Maximum amount exceeded. (Detailed record states the maximum amount).
# * <tt>AP2915</tt> - Amount too low. (Detailed record states the minimum amount).
# * <tt>AP2920</tt> - Please adjust expiration period. See suggested expiration period.
def error_code
text('//errorCode') unless success?
end
private
def error_occured?
@response.name == 'ErrorRes'
end
def text(path)
@response.get_text(path).to_s
end
end
# An instance of IdealTransactionResponse is returned from
# IdealGateway#setup_purchase which returns the service_url to where the
# user should be redirected to perform the transaction _and_ the
# transaction ID.
class IdealTransactionResponse < IdealResponse
# Returns the URL to the issuer’s page where the consumer should be
# redirected to in order to perform the payment.
def service_url
CGI::unescapeHTML(text('//issuerAuthenticationURL'))
end
# Returns the transaction ID which is needed for requesting the status
# of a transaction. See IdealGateway#capture.
def transaction_id
text('//transactionID')
end
# Returns the <tt>:order_id</tt> for this transaction.
def order_id
text('//purchaseID')
end
end
# An instance of IdealStatusResponse is returned from IdealGateway#capture
# which returns whether or not the transaction that was started with
# IdealGateway#setup_purchase was successful.
#
# It takes care of checking if the message was authentic by verifying the
# the message and its signature against the iDEAL certificate.
#
# If success? returns +false+ because the authenticity wasn't verified
# there will be no error_code, error_message, and error_type. Use verified?
# to check if the authenticity has been verified.
class IdealStatusResponse < IdealResponse
def initialize(response_body, options = {})
super
@success = transaction_successful?
end
# Returns the status message, which is one of: <tt>:success</tt>,
# <tt>:cancelled</tt>, <tt>:expired</tt>, <tt>:open</tt>, or
# <tt>:failure</tt>.
def status
text('//status').downcase.to_sym
end
# Returns whether or not the authenticity of the message could be
# verified.
def verified?
@verified ||= IdealGateway.ideal_certificate.public_key.
verify(OpenSSL::Digest::SHA1.new, signature, message)
end
private
# Checks if no errors occured _and_ if the message was authentic.
def transaction_successful?
!error_occured? && status == :success && verified?
end
# The message that we need to verify the authenticity.
def message
text('//createDateTimeStamp') + text('//transactionID') + text('//status') + text('//consumerAccountNumber')
end
def signature
Base64.decode64(text('//signatureValue'))
end
end
# An instance of IdealDirectoryResponse is returned from
# IdealGateway#issuers which returns the list of issuers available at the
# acquirer.
class IdealDirectoryResponse < IdealResponse
# Returns a list of issuers available at the acquirer.
#
# gateway.issuers.list # => [{ :id => '1006', :name => 'ABN AMRO Bank' }]
def list
@response.get_elements('//Issuer').map do |issuer|
{ :id => issuer.get_text('issuerID').to_s, :name => issuer.get_text('issuerName').to_s }
end
end
end
end
end | require 'openssl'
require 'base64'
require 'rexml/document'
module ActiveMerchant #:nodoc:
module Billing #:nodoc:
# The base class for all iDEAL response classes.
#
# Note that if the iDEAL system is under load it will _not_ allow more
# then two retries per request.
class IdealResponse < Response
def initialize(response_body, options = {})
@response = REXML::Document.new(response_body).root
@success = !error_occured?
@test = options[:test]
end
# Returns a technical error message.
def error_message
text('//Error/errorMessage') unless success?
end
# Returns a consumer friendly error message.
def consumer_error_message
text('//Error/consumerMessage') unless success?
end
# Returns details on the error if available.
def error_details
text('//Error/errorDetail') unless success?
end
def suggested_action
text('//Error/suggestedAction') unless success?
end
# Returns an error type inflected from the first two characters of the
# error code. See error_code for a full list of errors.
#
# Error code to type mappings:
#
# * +IX+ - <tt>:xml</tt>
# * +SO+ - <tt>:system</tt>
# * +SE+ - <tt>:security</tt>
# * +BR+ - <tt>:value</tt>
# * +AP+ - <tt>:application</tt>
def error_type
unless success?
case error_code[0,2]
when 'IX' then :xml
when 'SO' then :system
when 'SE' then :security
when 'BR' then :value
when 'AP' then :application
end
end
end
# Returns the code of the error that occured.
#
# === Codes
#
# ==== IX: Invalid XML and all related problems
#
# Such as incorrect encoding, invalid version, or otherwise unreadable:
#
# * <tt>IX1000</tt> - Received XML not well-formed.
# * <tt>IX1100</tt> - Received XML not valid.
# * <tt>IX1200</tt> - Encoding type not UTF-8.
# * <tt>IX1300</tt> - XML version number invalid.
# * <tt>IX1400</tt> - Unknown message.
# * <tt>IX1500</tt> - Mandatory main value missing. (Merchant ID ?)
# * <tt>IX1600</tt> - Mandatory value missing.
#
# ==== SO: System maintenance or failure
#
# The errors that are communicated in the event of system maintenance or
# system failure. Also covers the situation where new requests are no
# longer being accepted but requests already submitted will be dealt with
# (until a certain time):
#
# * <tt>SO1000</tt> - Failure in system.
# * <tt>SO1200</tt> - System busy. Try again later.
# * <tt>SO1400</tt> - Unavailable due to maintenance.
#
# ==== SE: Security and authentication errors
#
# Incorrect authentication methods and expired certificates:
#
# * <tt>SE2000</tt> - Authentication error.
# * <tt>SE2100</tt> - Authentication method not supported.
# * <tt>SE2700</tt> - Invalid electronic signature.
#
# ==== BR: Field errors
#
# Extra information on incorrect fields:
#
# * <tt>BR1200</tt> - iDEAL version number invalid.
# * <tt>BR1210</tt> - Value contains non-permitted character.
# * <tt>BR1220</tt> - Value too long.
# * <tt>BR1230</tt> - Value too short.
# * <tt>BR1240</tt> - Value too high.
# * <tt>BR1250</tt> - Value too low.
# * <tt>BR1250</tt> - Unknown entry in list.
# * <tt>BR1270</tt> - Invalid date/time.
# * <tt>BR1280</tt> - Invalid URL.
#
# ==== AP: Application errors
#
# Errors relating to IDs, account numbers, time zones, transactions:
#
# * <tt>AP1000</tt> - Acquirer ID unknown.
# * <tt>AP1100</tt> - Merchant ID unknown.
# * <tt>AP1200</tt> - Issuer ID unknown.
# * <tt>AP1300</tt> - Sub ID unknown.
# * <tt>AP1500</tt> - Merchant ID not active.
# * <tt>AP2600</tt> - Transaction does not exist.
# * <tt>AP2620</tt> - Transaction already submitted.
# * <tt>AP2700</tt> - Bank account number not 11-proof.
# * <tt>AP2900</tt> - Selected currency not supported.
# * <tt>AP2910</tt> - Maximum amount exceeded. (Detailed record states the maximum amount).
# * <tt>AP2915</tt> - Amount too low. (Detailed record states the minimum amount).
# * <tt>AP2920</tt> - Please adjust expiration period. See suggested expiration period.
def error_code
text('//errorCode') unless success?
end
private
def error_occured?
@response.name == 'ErrorRes'
end
def text(path)
@response.get_text(path).to_s
end
end
# An instance of IdealTransactionResponse is returned from
# IdealGateway#setup_purchase which returns the service_url to where the
# user should be redirected to perform the transaction _and_ the
# transaction ID.
class IdealTransactionResponse < IdealResponse
# Returns the URL to the issuer’s page where the consumer should be
# redirected to in order to perform the payment.
def service_url
CGI::unescapeHTML(text('//issuerAuthenticationURL'))
end
# Returns the transaction ID which is needed for requesting the status
# of a transaction. See IdealGateway#capture.
def transaction_id
text('//transactionID')
end
# Returns the <tt>:order_id</tt> for this transaction.
def order_id
text('//purchaseID')
end
end
# An instance of IdealStatusResponse is returned from IdealGateway#capture
# which returns whether or not the transaction that was started with
# IdealGateway#setup_purchase was successful.
#
# It takes care of checking if the message was authentic by verifying the
# the message and its signature against the iDEAL certificate.
#
# If success? returns +false+ because the authenticity wasn't verified
# there will be no error_code, error_message, and error_type. Use verified?
# to check if the authenticity has been verified.
class IdealStatusResponse < IdealResponse
def initialize(response_body, options = {})
super
@success = transaction_successful?
end
# Returns the status message, which is one of: <tt>:success</tt>,
# <tt>:cancelled</tt>, <tt>:expired</tt>, <tt>:open</tt>, or
# <tt>:failure</tt>.
def status
status = text('//status')
status.downcase.to_sym unless status.blank?
end
# Returns whether or not the authenticity of the message could be
# verified.
def verified?
@verified ||= IdealGateway.ideal_certificate.public_key.
verify(OpenSSL::Digest::SHA1.new, signature, message)
end
private
# Checks if no errors occured _and_ if the message was authentic.
def transaction_successful?
!error_occured? && status == :success && verified?
end
# The message that we need to verify the authenticity.
def message
text('//createDateTimeStamp') + text('//transactionID') + text('//status') + text('//consumerAccountNumber')
end
def signature
Base64.decode64(text('//signatureValue'))
end
end
# An instance of IdealDirectoryResponse is returned from
# IdealGateway#issuers which returns the list of issuers available at the
# acquirer.
class IdealDirectoryResponse < IdealResponse
# Returns a list of issuers available at the acquirer.
#
# gateway.issuers.list # => [{ :id => '1006', :name => 'ABN AMRO Bank' }]
def list
@response.get_elements('//Issuer').map do |issuer|
{ :id => issuer.get_text('issuerID').to_s, :name => issuer.get_text('issuerName').to_s }
end
end
end
end
end |
newrelic/newrelic-ruby-agent | 2,880 | lib/new_relic/agent/instrumentation/rdkafka/prepend.rb | # This file is distributed under New Relic's license terms.
# See https://github.com/newrelic/newrelic-ruby-agent/blob/main/LICENSE for complete details.
# frozen_string_literal: true
require_relative 'instrumentation'
module NewRelic::Agent::Instrumentation
module RdkafkaProducer
module Prepend
include NewRelic::Agent::Instrumentation::Rdkafka
def produce(**kwargs)
produce_with_new_relic(kwargs) do |headers|
kwargs[:headers] = headers
super
end
end
end
end
module RdkafkaConsumer
module Prepend
include NewRelic::Agent::Instrumentation::Rdkafka
def each
super do |message|
each_with_new_relic(message) do
yield(message)
end
end
end
end
end
module RdkafkaConfig
module Prepend
include NewRelic::Agent::Instrumentation::RdkafkaConfig
if defined?(::Rdkafka) && Gem::Version.new(::Rdkafka::VERSION) >= Gem::Version.new('0.16.0')
def producer(**kwargs)
super.tap do |producer|
set_nr_config(producer)
end
end
def consumer(**kwargs)
super.tap do |consumer|
set_nr_config(consumer)
end
end
else # older versions
def producer
super.tap do |producer|
set_nr_config(producer)
end
end
def consumer
super.tap do |consumer|
set_nr_config(consumer)
end
end
end
end
end
end
| # This file is distributed under New Relic's license terms.
# See https://github.com/newrelic/newrelic-ruby-agent/blob/main/LICENSE for complete details.
# frozen_string_literal: true
require_relative 'instrumentation'
module NewRelic::Agent::Instrumentation
module RdkafkaProducer
module Prepend
include NewRelic::Agent::Instrumentation::Rdkafka
def produce(**kwargs)
produce_with_new_relic(kwargs) do |headers|
kwargs[:headers] = headers
super
end
end
end
end
module RdkafkaConsumer
module Prepend
include NewRelic::Agent::Instrumentation::Rdkafka
def each
super do |message|
each_with_new_relic(message) do
yield(message)
end
end
end
end
end
module RdkafkaConfig
module Prepend
include NewRelic::Agent::Instrumentation::RdkafkaConfig
if (defined?(::Rdkafka) && Gem::Version.new(::Rdkafka::VERSION) >= Gem::Version.new('0.16.0')) ||
(Gem::Version.new(RUBY_VERSION) >= Gem::Version.new('2.7.0'))
def producer(**kwargs)
super.tap do |producer|
set_nr_config(producer)
end
end
def consumer(**kwargs)
super.tap do |consumer|
set_nr_config(consumer)
end
end
else # older versions
def producer
super.tap do |producer|
set_nr_config(producer)
end
end
def consumer
super.tap do |consumer|
set_nr_config(consumer)
end
end
end
end
end
end
|
brettporter/centrepoint | 3 | centrepoint/modules/webapp/src/main/java/com/effectivemaven/centrepoint/web/BuildNumber.java | package com.effectivemaven.centrepoint.web;
/**
* Copyright 2009
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.util.MissingResourceException;
import java.util.ResourceBundle;
import com.google.inject.Singleton;
/**
* Capture the build number and report to the application.
*/
@Singleton
public class BuildNumber
{
private final String buildMessage;
public BuildNumber()
{
String msg;
try
{
ResourceBundle bundle = ResourceBundle.getBundle( "build" );
msg = bundle.getString( "build.message" );
}
catch ( MissingResourceException e )
{
msg = "Unknown Build";
}
buildMessage = msg;
}
public String getBuildMessage()
{
return buildMessage;
}
}
| package com.effectivemaven.centrepoint.web;
/**
* Copyright 2009
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.util.Date;
import java.util.MissingResourceException;
import java.util.ResourceBundle;
import com.google.inject.Singleton;
/**
* Capture the build number and report to the application.
*/
@Singleton
public class BuildNumber
{
private final String buildMessage;
private Date buildDate = null;
public BuildNumber()
{
String msg;
try
{
ResourceBundle bundle = ResourceBundle.getBundle( "build" );
msg = bundle.getString( "build.message" );
buildDate = new Date( Long.valueOf( bundle.getString( "build.timestamp" ) ) );
}
catch ( MissingResourceException e )
{
msg = "Unknown Build";
}
buildMessage = msg;
}
public String getBuildMessage()
{
return buildMessage;
}
public String getBuildDate()
{
return buildDate.toString();
}
}
|
cvpcs/android_kernel_incrediblec | 1 | arch/arm/mach-msm/board-incrediblec.c | /* linux/arch/arm/mach-msm/board-incrediblec.c
*
* Copyright (C) 2009 Google, Inc.
* Copyright (C) 2009 HTC Corporation.
* Author: Dima Zavin <dima@android.com>
*
* This software is licensed under the terms of the GNU General Public
* License version 2, as published by the Free Software Foundation, and
* may be copied, distributed, and modified under those terms.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
*/
#include <linux/delay.h>
#include <linux/gpio.h>
#include <linux/i2c.h>
#include <linux/i2c-msm.h>
#include <linux/init.h>
#include <linux/io.h>
#include <linux/kernel.h>
#include <linux/platform_device.h>
#include <linux/android_pmem.h>
#include <linux/input.h>
#include <linux/akm8973.h>
#include <linux/bma150.h>
#include <linux/capella_cm3602.h>
#include <linux/regulator/machine.h>
#include <asm/mach-types.h>
#include <asm/mach/arch.h>
#include <asm/mach/map.h>
#include <asm/setup.h>
#include <mach/htc_headset_mgr.h>
#include <mach/htc_headset_gpio.h>
#include <mach/htc_headset_microp.h>
#include <mach/board.h>
#include <mach/board_htc.h>
#include <mach/hardware.h>
#include <mach/atmega_microp.h>
#include <mach/camera.h>
#include <mach/msm_iomap.h>
#include <mach/htc_battery.h>
#include <mach/htc_usb.h>
#include <mach/perflock.h>
#include <mach/msm_serial_debugger.h>
#include <mach/system.h>
#include <linux/spi/spi.h>
#include <linux/curcial_oj.h>
#include <mach/msm_panel.h>
#include "board-incrediblec.h"
#include "devices.h"
#include "proc_comm.h"
#include "smd_private.h"
#if 1 /*allenou, bt for bcm, 2009/7/8 */
#include <mach/msm_serial_hs.h>
#endif
#include <mach/tpa6130.h>
#include <mach/msm_flashlight.h>
#include <linux/atmel_qt602240.h>
#include <mach/vreg.h>
/* #include <mach/pmic.h> */
#include <mach/msm_hsusb.h>
#define SMEM_SPINLOCK_I2C 6
#define INCREDIBLEC_MICROP_VER 0x04
#ifdef CONFIG_ARCH_QSD8X50
extern unsigned char *get_bt_bd_ram(void);
#endif
void msm_init_pmic_vibrator(void);
extern void __init incrediblec_audio_init(void);
#ifdef CONFIG_MICROP_COMMON
void __init incrediblec_microp_init(void);
#endif
#define SAMSUNG_PANEL 0
/*Bitwise mask for SONY PANEL ONLY*/
#define SONY_PANEL 0x1 /*Set bit 0 as 1 when it is SONY PANEL*/
#define SONY_PWM_SPI 0x2 /*Set bit 1 as 1 as PWM_SPI mode, otherwise it is PWM_MICROP mode*/
#define SONY_GAMMA 0x4 /*Set bit 2 as 1 when panel contains GAMMA table in its NVM*/
#define SONY_RGB666 0x8 /*Set bit 3 as 1 when panel is 18 bit, otherwise it is 16 bit*/
extern int panel_type;
unsigned int engineerid;
static struct htc_battery_platform_data htc_battery_pdev_data = {
/* .gpio_mbat_in = INCREDIBLEC_GPIO_MBAT_IN,*/
/* .gpio_mchg_en_n = INCREDIBLEC_GPIO_MCHG_EN_N,*/
/* .gpio_iset = INCREDIBLEC_GPIO_ISET,*/
.guage_driver = GUAGE_MODEM,
.m2a_cable_detect = 1,
.charger = SWITCH_CHARGER,
};
static struct platform_device htc_battery_pdev = {
.name = "htc_battery",
.id = -1,
.dev = {
.platform_data = &htc_battery_pdev_data,
},
};
static int capella_cm3602_power(int pwr_device, uint8_t enable);
/*XA, XB*/
static struct microp_function_config microp_functions[] = {
{
.name = "microp_intrrupt",
.category = MICROP_FUNCTION_INTR,
},
{
.name = "reset-int",
.category = MICROP_FUNCTION_RESET_INT,
.int_pin = 1 << 8,
},
{
.name = "oj",
.category = MICROP_FUNCTION_OJ,
.int_pin = 1 << 12,
},
{
.name = "proximity",
.category = MICROP_FUNCTION_P,
.int_pin = 1 << 11,
.mask_r = {0x00, 0x00, 0x10},
.mask_w = {0x00, 0x00, 0x04},
},
};
/*For XC: Change ALS chip from CM3602 to CM3605*/
static struct microp_function_config microp_functions_1[] = {
{
.name = "remote-key",
.category = MICROP_FUNCTION_REMOTEKEY,
.levels = {0, 33, 50, 110, 160, 220},
.channel = 1,
.int_pin = 1 << 5,
},
{
.name = "microp_intrrupt",
.category = MICROP_FUNCTION_INTR,
},
{
.name = "reset-int",
.category = MICROP_FUNCTION_RESET_INT,
.int_pin = 1 << 8,
},
{
.name = "oj",
.category = MICROP_FUNCTION_OJ,
.int_pin = 1 << 12,
},
{
.name = "proximity",
.category = MICROP_FUNCTION_P,
.int_pin = 1 << 11,
.mask_r = {0x00, 0x00, 0x10},
.mask_w = {0x00, 0x00, 0x04},
},
};
static struct microp_function_config microp_lightsensor = {
.name = "light_sensor",
.category = MICROP_FUNCTION_LSENSOR,
.levels = { 0, 11, 16, 22, 75, 209, 362, 488, 560, 0x3FF },
.channel = 3,
.int_pin = 1 << 9,
.golden_adc = 0xD2,
.mask_w = {0x00, 0x00, 0x04},
.ls_power = capella_cm3602_power,
};
static struct lightsensor_platform_data lightsensor_data = {
.config = µp_lightsensor,
.irq = MSM_uP_TO_INT(9),
};
static struct microp_led_config led_config[] = {
{
.name = "amber",
.type = LED_RGB,
},
{
.name = "green",
.type = LED_RGB,
},
};
static struct microp_led_platform_data microp_leds_data = {
.num_leds = ARRAY_SIZE(led_config),
.led_config = led_config,
};
static struct bma150_platform_data incrediblec_g_sensor_pdata = {
.microp_new_cmd = 1,
};
/* Proximity Sensor (Capella_CM3602)*/
static int __capella_cm3602_power(int on)
{
uint8_t data[3], addr;
int ret;
printk(KERN_DEBUG "%s: Turn the capella_cm3602 power %s\n",
__func__, (on) ? "on" : "off");
if (on)
gpio_direction_output(INCREDIBLEC_GPIO_PROXIMITY_EN_N, 1);
data[0] = 0x00;
data[1] = 0x00;
data[2] = 0x04;
addr = on ? MICROP_I2C_WCMD_GPO_LED_STATUS_EN :
MICROP_I2C_WCMD_GPO_LED_STATUS_DIS;
ret = microp_i2c_write(addr, data, 3);
if (ret < 0)
pr_err("%s: %s capella power failed\n",
__func__, (on ? "enable" : "disable"));
if (!on)
gpio_direction_output(INCREDIBLEC_GPIO_PROXIMITY_EN_N, 0);
return ret;
}
static DEFINE_MUTEX(capella_cm3602_lock);
static unsigned int als_power_control;
static int capella_cm3602_power(int pwr_device, uint8_t enable)
{
unsigned int old_status = 0;
int ret = 0, on = 0;
mutex_lock(&capella_cm3602_lock);
old_status = als_power_control;
if (enable)
als_power_control |= pwr_device;
else
als_power_control &= ~pwr_device;
on = als_power_control ? 1 : 0;
if (old_status == 0 && on)
ret = __capella_cm3602_power(1);
else if (!on)
ret = __capella_cm3602_power(0);
mutex_unlock(&capella_cm3602_lock);
return ret;
}
static struct capella_cm3602_platform_data capella_cm3602_pdata = {
.power = capella_cm3602_power,
.p_en = INCREDIBLEC_GPIO_PROXIMITY_EN_N,
.p_out = MSM_uP_TO_INT(11),
};
/* End Proximity Sensor (Capella_CM3602)*/
static struct htc_headset_microp_platform_data htc_headset_microp_data = {
.remote_int = 1 << 5,
.remote_irq = MSM_uP_TO_INT(5),
.remote_enable_pin = NULL,
.adc_channel = 0x01,
.adc_remote = {0, 33, 50, 110, 160, 220},
};
static struct platform_device microp_devices[] = {
{
.name = "lightsensor_microp",
.dev = {
.platform_data = &lightsensor_data,
},
},
{
.name = "leds-microp",
.id = -1,
.dev = {
.platform_data = µp_leds_data,
},
},
{
.name = BMA150_G_SENSOR_NAME,
.dev = {
.platform_data = &incrediblec_g_sensor_pdata,
},
},
{
.name = "incrediblec_proximity",
.id = -1,
.dev = {
.platform_data = &capella_cm3602_pdata,
},
},
{
.name = "HTC_HEADSET_MICROP",
.id = -1,
.dev = {
.platform_data = &htc_headset_microp_data,
},
},
};
static struct microp_i2c_platform_data microp_data = {
.num_functions = ARRAY_SIZE(microp_functions),
.microp_function = microp_functions,
.num_devices = ARRAY_SIZE(microp_devices),
.microp_devices = microp_devices,
.gpio_reset = INCREDIBLEC_GPIO_UP_RESET_N,
.microp_ls_on = LS_PWR_ON | PS_PWR_ON,
.spi_devices = SPI_OJ | SPI_GSENSOR,
};
static struct gpio_led incrediblec_led_list[] = {
{
.name = "button-backlight",
.gpio = INCREDIBLEC_AP_KEY_LED_EN,
.active_low = 0,
},
};
static struct gpio_led_platform_data incrediblec_leds_data = {
.num_leds = ARRAY_SIZE(incrediblec_led_list),
.leds = incrediblec_led_list,
};
static struct platform_device incrediblec_leds = {
.name = "leds-gpio",
.id = -1,
.dev = {
.platform_data = &incrediblec_leds_data,
},
};
static uint32_t usb_phy_3v3_table[] = {
PCOM_GPIO_CFG(INCREDIBLEC_USB_PHY_3V3_ENABLE, 0, GPIO_OUTPUT, GPIO_NO_PULL, GPIO_4MA)
};
static uint32_t usb_ID_PIN_table[] = {
PCOM_GPIO_CFG(INCREDIBLEC_GPIO_USB_ID_PIN, 0, GPIO_INPUT, GPIO_NO_PULL, GPIO_4MA),
};
static int incrediblec_phy_init_seq[] = { 0x1D, 0x0D, 0x1D, 0x10, -1 };
#ifdef CONFIG_USB_ANDROID
static struct msm_hsusb_platform_data msm_hsusb_pdata = {
.phy_init_seq = incrediblec_phy_init_seq,
.phy_reset = msm_hsusb_8x50_phy_reset,
.usb_id_pin_gpio = INCREDIBLEC_GPIO_USB_ID_PIN,
};
static struct usb_mass_storage_platform_data mass_storage_pdata = {
.nluns = 3,
.vendor = "HTC",
.product = "Android Phone",
.release = 0x0100,
.cdrom_lun = 4,
};
static struct platform_device usb_mass_storage_device = {
.name = "usb_mass_storage",
.id = -1,
.dev = {
.platform_data = &mass_storage_pdata,
},
};
static struct android_usb_platform_data android_usb_pdata = {
.vendor_id = 0x0bb4,
.product_id = 0x0c9e,
.version = 0x0100,
.product_name = "Android Phone",
.manufacturer_name = "HTC",
.num_products = ARRAY_SIZE(usb_products),
.products = usb_products,
.num_functions = ARRAY_SIZE(usb_functions_all),
.functions = usb_functions_all,
};
static struct platform_device android_usb_device = {
.name = "android_usb",
.id = -1,
.dev = {
.platform_data = &android_usb_pdata,
},
};
static void inc_add_usb_devices(void)
{
android_usb_pdata.products[0].product_id =
android_usb_pdata.product_id;
android_usb_pdata.serial_number = board_serialno();
msm_hsusb_pdata.serial_number = board_serialno();
msm_device_hsusb.dev.platform_data = &msm_hsusb_pdata;
config_gpio_table(usb_phy_3v3_table, ARRAY_SIZE(usb_phy_3v3_table));
gpio_set_value(INCREDIBLEC_USB_PHY_3V3_ENABLE, 1);
config_gpio_table(usb_ID_PIN_table, ARRAY_SIZE(usb_ID_PIN_table));
platform_device_register(&msm_device_hsusb);
platform_device_register(&usb_mass_storage_device);
platform_device_register(&android_usb_device);
}
#endif
static struct platform_device incrediblec_rfkill = {
.name = "incrediblec_rfkill",
.id = -1,
};
static struct resource qsd_spi_resources[] = {
{
.name = "spi_irq_in",
.start = INT_SPI_INPUT,
.end = INT_SPI_INPUT,
.flags = IORESOURCE_IRQ,
},
{
.name = "spi_irq_out",
.start = INT_SPI_OUTPUT,
.end = INT_SPI_OUTPUT,
.flags = IORESOURCE_IRQ,
},
{
.name = "spi_irq_err",
.start = INT_SPI_ERROR,
.end = INT_SPI_ERROR,
.flags = IORESOURCE_IRQ,
},
{
.name = "spi_base",
.start = 0xA1200000,
.end = 0xA1200000 + SZ_4K - 1,
.flags = IORESOURCE_MEM,
},
{
.name = "spi_clk",
.start = 17,
.end = 1,
.flags = IORESOURCE_IRQ,
},
{
.name = "spi_mosi",
.start = 18,
.end = 1,
.flags = IORESOURCE_IRQ,
},
{
.name = "spi_miso",
.start = 19,
.end = 1,
.flags = IORESOURCE_IRQ,
},
{
.name = "spi_cs0",
.start = 20,
.end = 1,
.flags = IORESOURCE_IRQ,
},
{
.name = "spi_pwr",
.start = 21,
.end = 0,
.flags = IORESOURCE_IRQ,
},
{
.name = "spi_irq_cs0",
.start = 22,
.end = 0,
.flags = IORESOURCE_IRQ,
},
};
static struct platform_device qsd_device_spi = {
.name = "spi_qsd",
.id = 0,
.num_resources = ARRAY_SIZE(qsd_spi_resources),
.resource = qsd_spi_resources,
};
static struct resource msm_kgsl_resources[] = {
{
.name = "kgsl_reg_memory",
.start = MSM_GPU_REG_PHYS,
.end = MSM_GPU_REG_PHYS + MSM_GPU_REG_SIZE - 1,
.flags = IORESOURCE_MEM,
},
{
.name = "kgsl_phys_memory",
.start = MSM_GPU_MEM_BASE,
.end = MSM_GPU_MEM_BASE + MSM_GPU_MEM_SIZE - 1,
.flags = IORESOURCE_MEM,
},
{
.start = INT_GRAPHICS,
.end = INT_GRAPHICS,
.flags = IORESOURCE_IRQ,
},
};
#define PWR_RAIL_GRP_CLK 8
static int incrediblec_kgsl_power_rail_mode(int follow_clk)
{
int mode = follow_clk ? 0 : 1;
int rail_id = PWR_RAIL_GRP_CLK;
return msm_proc_comm(PCOM_CLKCTL_RPC_RAIL_CONTROL, &rail_id, &mode);
}
static int incrediblec_kgsl_power(bool on)
{
int cmd;
int rail_id = PWR_RAIL_GRP_CLK;
cmd = on ? PCOM_CLKCTL_RPC_RAIL_ENABLE : PCOM_CLKCTL_RPC_RAIL_DISABLE;
return msm_proc_comm(cmd, &rail_id, NULL);
}
static struct platform_device msm_kgsl_device = {
.name = "kgsl",
.id = -1,
.resource = msm_kgsl_resources,
.num_resources = ARRAY_SIZE(msm_kgsl_resources),
};
static struct android_pmem_platform_data mdp_pmem_pdata = {
.name = "pmem",
.start = MSM_PMEM_MDP_BASE,
.size = MSM_PMEM_MDP_SIZE,
.no_allocator = 0,
.cached = 1,
};
static struct android_pmem_platform_data android_pmem_adsp_pdata = {
.name = "pmem_adsp",
.start = MSM_PMEM_ADSP_BASE,
.size = MSM_PMEM_ADSP_SIZE,
.no_allocator = 0,
.cached = 1,
};
#ifdef CONFIG_720P_CAMERA
static struct android_pmem_platform_data android_pmem_venc_pdata = {
.name = "pmem_venc",
.start = MSM_PMEM_VENC_BASE,
.size = MSM_PMEM_VENC_SIZE,
.no_allocator = 0,
.cached = 1,
};
#else
static struct android_pmem_platform_data android_pmem_camera_pdata = {
.name = "pmem_camera",
.start = MSM_PMEM_CAMERA_BASE,
.size = MSM_PMEM_CAMERA_SIZE,
.no_allocator = 1,
.cached = 1,
};
#endif
static struct platform_device android_pmem_mdp_device = {
.name = "android_pmem",
.id = 0,
.dev = {
.platform_data = &mdp_pmem_pdata
},
};
static struct platform_device android_pmem_adsp_device = {
.name = "android_pmem",
.id = 4,
.dev = {
.platform_data = &android_pmem_adsp_pdata,
},
};
#ifdef CONFIG_720P_CAMERA
static struct platform_device android_pmem_venc_device = {
.name = "android_pmem",
.id = 5,
.dev = {
.platform_data = &android_pmem_venc_pdata,
},
};
#else
static struct platform_device android_pmem_camera_device = {
.name = "android_pmem",
.id = 5,
.dev = {
.platform_data = &android_pmem_camera_pdata,
},
};
#endif
static struct resource ram_console_resources[] = {
{
.start = MSM_RAM_CONSOLE_BASE,
.end = MSM_RAM_CONSOLE_BASE + MSM_RAM_CONSOLE_SIZE - 1,
.flags = IORESOURCE_MEM,
},
};
static struct platform_device ram_console_device = {
.name = "ram_console",
.id = -1,
.num_resources = ARRAY_SIZE(ram_console_resources),
.resource = ram_console_resources,
};
static int incrediblec_atmel_ts_power(int on)
{
printk(KERN_INFO "incrediblec_atmel_ts_power(%d)\n", on);
if (on) {
gpio_set_value(INCREDIBLEC_GPIO_TP_EN, 1);
msleep(2);
gpio_set_value(INCREDIBLEC_GPIO_TP_RST, 1);
} else {
gpio_set_value(INCREDIBLEC_GPIO_TP_EN, 0);
msleep(2);
}
return 0;
}
struct atmel_i2c_platform_data incrediblec_atmel_ts_data[] = {
{
.version = 0x016,
.abs_x_min = 1,
.abs_x_max = 1023,
.abs_y_min = 2,
.abs_y_max = 966,
.abs_pressure_min = 0,
.abs_pressure_max = 255,
.abs_width_min = 0,
.abs_width_max = 20,
.gpio_irq = INCREDIBLEC_GPIO_TP_INT_N,
.power = incrediblec_atmel_ts_power,
.config_T6 = {0, 0, 0, 0, 0, 0},
.config_T7 = {50, 15, 25},
.config_T8 = {10, 0, 20, 10, 0, 0, 5, 15},
.config_T9 = {139, 0, 0, 18, 12, 0, 16, 38, 3, 7, 0, 5, 2, 15, 2, 10, 25, 5, 0, 0, 0, 0, 0, 0, 0, 0, 159, 47, 149, 81, 40},
.config_T15 = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
.config_T19 = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
.config_T20 = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
.config_T22 = {15, 0, 0, 0, 0, 0, 0, 0, 16, 0, 1, 0, 7, 18, 25, 30, 0},
.config_T23 = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
.config_T24 = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
.config_T25 = {3, 0, 200, 50, 64, 31, 0, 0, 0, 0, 0, 0, 0, 0},
.config_T27 = {0, 0, 0, 0, 0, 0, 0},
.config_T28 = {0, 0, 2, 4, 8, 60},
.object_crc = {0xDB, 0xBF, 0x60},
.cable_config = {35, 30, 8, 16},
.GCAF_level = {20, 24, 28, 40, 63},
.filter_level = {15, 60, 963, 1008},
},
{
.version = 0x015,
.abs_x_min = 13,
.abs_x_max = 1009,
.abs_y_min = 15,
.abs_y_max = 960,
.abs_pressure_min = 0,
.abs_pressure_max = 255,
.abs_width_min = 0,
.abs_width_max = 20,
.gpio_irq = INCREDIBLEC_GPIO_TP_INT_N,
.power = incrediblec_atmel_ts_power,
.config_T6 = {0, 0, 0, 0, 0, 0},
.config_T7 = {50, 15, 25},
.config_T8 = {12, 0, 20, 20, 0, 0, 20, 0},
.config_T9 = {139, 0, 0, 18, 12, 0, 32, 40, 2, 7, 0, 5, 2, 0, 2, 10, 25, 0, 0, 0, 0, 0, 0, 0, 0, 0, 159, 47, 149, 81},
.config_T15 = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
.config_T19 = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
.config_T20 = {7, 0, 0, 0, 0, 0, 0, 30, 20, 4, 15, 5},
.config_T22 = {7, 0, 0, 25, 0, -25, 255, 4, 50, 0, 1, 10, 15, 20, 25, 30, 4},
.config_T23 = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
.config_T24 = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
.config_T25 = {3, 0, 200, 50, 64, 31, 0, 0, 0, 0, 0, 0, 0, 0},
.config_T27 = {0, 0, 0, 0, 0, 0, 0},
.config_T28 = {0, 0, 2, 4, 8, 60},
.object_crc = {0x19, 0x87, 0x7E},
},
{
.version = 0x014,
.abs_x_min = 13,
.abs_x_max = 1009,
.abs_y_min = 15,
.abs_y_max = 960,
.abs_pressure_min = 0,
.abs_pressure_max = 255,
.abs_width_min = 0,
.abs_width_max = 20,
.gpio_irq = INCREDIBLEC_GPIO_TP_INT_N,
.power = incrediblec_atmel_ts_power,
.config_T6 = {0, 0, 0, 0, 0, 0},
.config_T7 = {50, 15, 25},
.config_T8 = {12, 0, 20, 20, 0, 0, 10, 15},
.config_T9 = {3, 0, 0, 18, 12, 0, 48, 45, 2, 7, 0, 0, 0, 0, 2, 10, 25, 0, 0, 0, 0, 0, 0, 0, 0, 0, 143, 47, 143, 81},
.config_T15 = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
.config_T19 = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
.config_T20 = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
.config_T22 = {5, 0, 0, 25, 0, -25, 255, 4, 50, 0, 1, 10, 15, 20, 25, 30, 4},
.config_T23 = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
.config_T24 = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
.config_T25 = {3, 0, 200, 50, 64, 31, 0, 0, 0, 0, 0, 0, 0, 0},
.config_T27 = {0, 0, 0, 0, 0, 0, 0},
.config_T28 = {0, 0, 2, 4, 8, 60},
}
};
static struct regulator_consumer_supply tps65023_dcdc1_supplies[] = {
{
.supply = "acpu_vcore",
},
};
static struct regulator_init_data tps65023_data[5] = {
{
.constraints = {
.name = "dcdc1", /* VREG_MSMC2_1V29 */
.min_uV = 1000000,
.max_uV = 1300000,
.valid_ops_mask = REGULATOR_CHANGE_VOLTAGE,
},
.consumer_supplies = tps65023_dcdc1_supplies,
.num_consumer_supplies = ARRAY_SIZE(tps65023_dcdc1_supplies),
},
/* dummy values for unused regulators to not crash driver: */
{
.constraints = {
.name = "dcdc2", /* VREG_MSMC1_1V26 */
.min_uV = 1260000,
.max_uV = 1260000,
},
},
{
.constraints = {
.name = "dcdc3", /* unused */
.min_uV = 800000,
.max_uV = 3300000,
},
},
{
.constraints = {
.name = "ldo1", /* unused */
.min_uV = 1000000,
.max_uV = 3150000,
},
},
{
.constraints = {
.name = "ldo2", /* V_USBPHY_3V3 */
.min_uV = 3300000,
.max_uV = 3300000,
},
},
};
static void set_h2w_dat(int n)
{
gpio_set_value(INCREDIBLEC_GPIO_H2W_DATA, n);
}
static void set_h2w_clk(int n)
{
gpio_set_value(INCREDIBLEC_GPIO_H2W_CLK, n);
}
static int get_h2w_dat(void)
{
return gpio_get_value(INCREDIBLEC_GPIO_H2W_DATA);
}
static int get_h2w_clk(void)
{
return gpio_get_value(INCREDIBLEC_GPIO_H2W_CLK);
}
static void h2w_dev_power_on(int on)
{
printk(KERN_INFO "Not support H2W power\n");
}
/* default TX,RX to GPI */
static uint32_t uart3_off_gpi_table[] = {
/* RX, H2W DATA */
PCOM_GPIO_CFG(INCREDIBLEC_GPIO_H2W_DATA, 0,
GPIO_INPUT, GPIO_NO_PULL, GPIO_2MA),
/* TX, H2W CLK */
PCOM_GPIO_CFG(INCREDIBLEC_GPIO_H2W_CLK, 0,
GPIO_INPUT, GPIO_NO_PULL, GPIO_2MA),
};
/* set TX,RX to GPO */
static uint32_t uart3_off_gpo_table[] = {
/* RX, H2W DATA */
PCOM_GPIO_CFG(INCREDIBLEC_GPIO_H2W_DATA, 0,
GPIO_OUTPUT, GPIO_NO_PULL, GPIO_2MA),
/* TX, H2W CLK */
PCOM_GPIO_CFG(INCREDIBLEC_GPIO_H2W_CLK, 0,
GPIO_OUTPUT, GPIO_NO_PULL, GPIO_2MA),
};
static void set_h2w_dat_dir(int n)
{
#if 0
if (n == 0) /* input */
gpio_direction_input(INCREDIBLEC_GPIO_H2W_DATA);
else
gpio_configure(INCREDIBLEC_GPIO_H2W_DATA, GPIOF_DRIVE_OUTPUT);
#else
if (n == 0) /* input */
msm_proc_comm(PCOM_RPC_GPIO_TLMM_CONFIG_EX,
uart3_off_gpi_table + 0, 0);
else
msm_proc_comm(PCOM_RPC_GPIO_TLMM_CONFIG_EX,
uart3_off_gpo_table + 0, 0);
#endif
}
static void set_h2w_clk_dir(int n)
{
#if 0
if (n == 0) /* input */
gpio_direction_input(INCREDIBLEC_GPIO_H2W_CLK);
else
gpio_configure(INCREDIBLEC_GPIO_H2W_CLK, GPIOF_DRIVE_OUTPUT);
#else
if (n == 0) /* input */
msm_proc_comm(PCOM_RPC_GPIO_TLMM_CONFIG_EX,
uart3_off_gpi_table + 1, 0);
else
msm_proc_comm(PCOM_RPC_GPIO_TLMM_CONFIG_EX,
uart3_off_gpo_table + 1, 0);
#endif
}
static void incrediblec_config_serial_debug_gpios(void);
static void h2w_configure(int route)
{
printk(KERN_INFO "H2W route = %d \n", route);
switch (route) {
case H2W_UART3:
incrediblec_config_serial_debug_gpios();
printk(KERN_INFO "H2W -> UART3\n");
break;
case H2W_GPIO:
msm_proc_comm(PCOM_RPC_GPIO_TLMM_CONFIG_EX,
uart3_off_gpi_table + 0, 0);
msm_proc_comm(PCOM_RPC_GPIO_TLMM_CONFIG_EX,
uart3_off_gpi_table + 1, 0);
printk(KERN_INFO "H2W -> GPIO\n");
break;
}
}
static struct htc_headset_mgr_platform_data htc_headset_mgr_data = {
};
static struct platform_device htc_headset_mgr = {
.name = "HTC_HEADSET_MGR",
.id = -1,
.dev = {
.platform_data = &htc_headset_mgr_data,
},
};
static struct htc_headset_gpio_platform_data htc_headset_gpio_data = {
.hpin_gpio = INCREDIBLEC_GPIO_35MM_HEADSET_DET,
.key_enable_gpio = NULL,
.mic_select_gpio = NULL,
};
static struct platform_device htc_headset_gpio = {
.name = "HTC_HEADSET_GPIO",
.id = -1,
.dev = {
.platform_data = &htc_headset_gpio_data,
},
};
static struct akm8973_platform_data compass_platform_data = {
.layouts = INCREDIBLEC_LAYOUTS,
.project_name = INCREDIBLEC_PROJECT_NAME,
.reset = INCREDIBLEC_GPIO_COMPASS_RST_N,
.intr = INCREDIBLEC_GPIO_COMPASS_INT_N,
};
static struct tpa6130_platform_data headset_amp_platform_data = {
.enable_rpc_server = 0,
};
static struct i2c_board_info i2c_devices[] = {
{
I2C_BOARD_INFO(ATMEL_QT602240_NAME, 0x94 >> 1),
.platform_data = &incrediblec_atmel_ts_data,
.irq = MSM_GPIO_TO_INT(INCREDIBLEC_GPIO_TP_INT_N)
},
{
I2C_BOARD_INFO(MICROP_I2C_NAME, 0xCC >> 1),
.platform_data = µp_data,
.irq = MSM_GPIO_TO_INT(INCREDIBLEC_GPIO_UP_INT_N)
},
{
I2C_BOARD_INFO("ds2482", 0x30 >> 1),
/*.platform_data = µp_data,*/
/*.irq = MSM_GPIO_TO_INT(PASSION_GPIO_UP_INT_N)*/
},
{
I2C_BOARD_INFO("smb329", 0x6E >> 1),
},
{
I2C_BOARD_INFO("akm8973", 0x1C),
.platform_data = &compass_platform_data,
.irq = MSM_GPIO_TO_INT(INCREDIBLEC_GPIO_COMPASS_INT_N),
},
#ifdef CONFIG_MSM_CAMERA
#ifdef CONFIG_OV8810
{
I2C_BOARD_INFO("ov8810", 0x6C >> 1),
},
#endif
#endif/*CONIFIG_MSM_CAMERA*/
{
I2C_BOARD_INFO(TPA6130_I2C_NAME, 0xC0 >> 1),
.platform_data = &headset_amp_platform_data,
},
{
I2C_BOARD_INFO("tps65023", 0x48),
.platform_data = tps65023_data,
},
};
#ifdef CONFIG_ARCH_QSD8X50
static char bdaddress[20];
static void bt_export_bd_address(void)
{
unsigned char cTemp[6];
memcpy(cTemp, get_bt_bd_ram(), 6);
sprintf(bdaddress, "%02x:%02x:%02x:%02x:%02x:%02x", cTemp[0], cTemp[1], cTemp[2], cTemp[3], cTemp[4], cTemp[5]);
printk(KERN_INFO "YoYo--BD_ADDRESS=%s\n", bdaddress);
}
module_param_string(bdaddress, bdaddress, sizeof(bdaddress), S_IWUSR | S_IRUGO);
MODULE_PARM_DESC(bdaddress, "BT MAC ADDRESS");
#endif
static uint32_t camera_off_gpio_table[] = {
#if 0 /* CAMERA OFF*/
PCOM_GPIO_CFG(0, 0, GPIO_INPUT, GPIO_PULL_DOWN, GPIO_4MA), /* DAT0 */
PCOM_GPIO_CFG(1, 0, GPIO_INPUT, GPIO_PULL_DOWN, GPIO_4MA), /* DAT1 */
PCOM_GPIO_CFG(2, 0, GPIO_INPUT, GPIO_PULL_DOWN, GPIO_4MA), /* DAT2 */
PCOM_GPIO_CFG(3, 0, GPIO_INPUT, GPIO_PULL_DOWN, GPIO_4MA), /* DAT3 */
PCOM_GPIO_CFG(4, 0, GPIO_INPUT, GPIO_PULL_DOWN, GPIO_4MA), /* DAT4 */
PCOM_GPIO_CFG(5, 0, GPIO_INPUT, GPIO_PULL_DOWN, GPIO_4MA), /* DAT5 */
PCOM_GPIO_CFG(6, 0, GPIO_INPUT, GPIO_PULL_DOWN, GPIO_4MA), /* DAT6 */
PCOM_GPIO_CFG(7, 0, GPIO_INPUT, GPIO_PULL_DOWN, GPIO_4MA), /* DAT7 */
PCOM_GPIO_CFG(8, 0, GPIO_INPUT, GPIO_PULL_DOWN, GPIO_4MA), /* DAT8 */
PCOM_GPIO_CFG(9, 0, GPIO_INPUT, GPIO_PULL_DOWN, GPIO_4MA), /* DAT9 */
PCOM_GPIO_CFG(10, 0, GPIO_INPUT, GPIO_PULL_DOWN, GPIO_4MA), /* DAT10 */
PCOM_GPIO_CFG(11, 0, GPIO_INPUT, GPIO_PULL_DOWN, GPIO_4MA), /* DAT11 */
PCOM_GPIO_CFG(12, 0, GPIO_INPUT, GPIO_PULL_DOWN, GPIO_4MA), /* PCLK */
PCOM_GPIO_CFG(13, 0, GPIO_INPUT, GPIO_PULL_DOWN, GPIO_4MA), /* HSYNC */
PCOM_GPIO_CFG(14, 0, GPIO_INPUT, GPIO_PULL_DOWN, GPIO_4MA), /* VSYNC */
PCOM_GPIO_CFG(15, 0, GPIO_OUTPUT, GPIO_NO_PULL, GPIO_4MA), /* MCLK */
#endif
/* CAMERA SUSPEND*/
PCOM_GPIO_CFG(0, 0, GPIO_OUTPUT, GPIO_NO_PULL, GPIO_4MA), /* DAT0 */
PCOM_GPIO_CFG(1, 0, GPIO_OUTPUT, GPIO_NO_PULL, GPIO_4MA), /* DAT1 */
PCOM_GPIO_CFG(2, 0, GPIO_OUTPUT, GPIO_NO_PULL, GPIO_4MA), /* DAT2 */
PCOM_GPIO_CFG(3, 0, GPIO_OUTPUT, GPIO_NO_PULL, GPIO_4MA), /* DAT3 */
PCOM_GPIO_CFG(4, 0, GPIO_OUTPUT, GPIO_NO_PULL, GPIO_4MA), /* DAT4 */
PCOM_GPIO_CFG(5, 0, GPIO_OUTPUT, GPIO_NO_PULL, GPIO_4MA), /* DAT5 */
PCOM_GPIO_CFG(6, 0, GPIO_OUTPUT, GPIO_NO_PULL, GPIO_4MA), /* DAT6 */
PCOM_GPIO_CFG(7, 0, GPIO_OUTPUT, GPIO_NO_PULL, GPIO_4MA), /* DAT7 */
PCOM_GPIO_CFG(8, 0, GPIO_OUTPUT, GPIO_NO_PULL, GPIO_4MA), /* DAT8 */
PCOM_GPIO_CFG(9, 0, GPIO_OUTPUT, GPIO_NO_PULL, GPIO_4MA), /* DAT9 */
PCOM_GPIO_CFG(10, 0, GPIO_OUTPUT, GPIO_NO_PULL, GPIO_4MA), /* DAT10 */
PCOM_GPIO_CFG(11, 0, GPIO_OUTPUT, GPIO_NO_PULL, GPIO_4MA), /* DAT11 */
PCOM_GPIO_CFG(12, 0, GPIO_OUTPUT, GPIO_NO_PULL, GPIO_4MA), /* PCLK */
PCOM_GPIO_CFG(13, 0, GPIO_OUTPUT, GPIO_NO_PULL, GPIO_4MA), /* HSYNC */
PCOM_GPIO_CFG(14, 0, GPIO_OUTPUT, GPIO_NO_PULL, GPIO_4MA), /* VSYNC */
PCOM_GPIO_CFG(15, 0, GPIO_OUTPUT, GPIO_NO_PULL, GPIO_4MA), /* MCLK */
PCOM_GPIO_CFG(99, 0, GPIO_OUTPUT, GPIO_NO_PULL, GPIO_2MA), /* CAM1_RST */
PCOM_GPIO_CFG(INCREDIBLEC_CAM_PWD,
0, GPIO_OUTPUT, GPIO_NO_PULL, GPIO_2MA), /* CAM1_PWD */
};
static uint32_t camera_on_gpio_table[] = {
/* CAMERA ON */
PCOM_GPIO_CFG(0, 1, GPIO_INPUT, GPIO_PULL_UP, GPIO_2MA), /* DAT0 */
PCOM_GPIO_CFG(1, 1, GPIO_INPUT, GPIO_PULL_UP, GPIO_2MA), /* DAT1 */
PCOM_GPIO_CFG(2, 1, GPIO_INPUT, GPIO_PULL_UP, GPIO_2MA), /* DAT2 */
PCOM_GPIO_CFG(3, 1, GPIO_INPUT, GPIO_PULL_UP, GPIO_2MA), /* DAT3 */
PCOM_GPIO_CFG(4, 1, GPIO_INPUT, GPIO_PULL_UP, GPIO_2MA), /* DAT4 */
PCOM_GPIO_CFG(5, 1, GPIO_INPUT, GPIO_PULL_UP, GPIO_2MA), /* DAT5 */
PCOM_GPIO_CFG(6, 1, GPIO_INPUT, GPIO_PULL_UP, GPIO_2MA), /* DAT6 */
PCOM_GPIO_CFG(7, 1, GPIO_INPUT, GPIO_PULL_UP, GPIO_2MA), /* DAT7 */
PCOM_GPIO_CFG(8, 1, GPIO_INPUT, GPIO_PULL_UP, GPIO_2MA), /* DAT8 */
PCOM_GPIO_CFG(9, 1, GPIO_INPUT, GPIO_PULL_UP, GPIO_2MA), /* DAT9 */
PCOM_GPIO_CFG(10, 1, GPIO_INPUT, GPIO_PULL_UP, GPIO_2MA), /* DAT10 */
PCOM_GPIO_CFG(11, 1, GPIO_INPUT, GPIO_PULL_UP, GPIO_2MA), /* DAT11 */
PCOM_GPIO_CFG(12, 1, GPIO_INPUT, GPIO_PULL_UP, GPIO_16MA), /* PCLK */
PCOM_GPIO_CFG(13, 1, GPIO_INPUT, GPIO_PULL_UP, GPIO_2MA), /* HSYNC */
PCOM_GPIO_CFG(14, 1, GPIO_INPUT, GPIO_PULL_UP, GPIO_2MA), /* VSYNC */
PCOM_GPIO_CFG(15, 1, GPIO_OUTPUT, GPIO_PULL_UP, GPIO_16MA), /* MCLK */
};
static void config_camera_on_gpios(void)
{
config_gpio_table(camera_on_gpio_table,
ARRAY_SIZE(camera_on_gpio_table));
}
static void config_camera_off_gpios(void)
{
config_gpio_table(camera_off_gpio_table,
ARRAY_SIZE(camera_off_gpio_table));
}
static struct resource msm_camera_resources[] = {
{
.start = MSM_VFE_PHYS,
.end = MSM_VFE_PHYS + MSM_VFE_SIZE - 1,
.flags = IORESOURCE_MEM,
},
{
.start = INT_VFE,
INT_VFE,
.flags = IORESOURCE_IRQ,
},
};
static struct msm_camera_device_platform_data msm_camera_device_data = {
.camera_gpio_on = config_camera_on_gpios,
.camera_gpio_off = config_camera_off_gpios,
.ioext.mdcphy = MSM_MDC_PHYS,
.ioext.mdcsz = MSM_MDC_SIZE,
.ioext.appphy = MSM_CLK_CTL_PHYS,
.ioext.appsz = MSM_CLK_CTL_SIZE,
};
static int flashlight_control(int mode)
{
return aat1271_flashlight_control(mode);
}
static struct camera_flash_cfg msm_camera_sensor_flash_cfg = {
.camera_flash = flashlight_control,
.num_flash_levels = FLASHLIGHT_NUM,
.low_temp_limit = 10,
.low_cap_limit = 15,
};
static struct msm_camera_sensor_info msm_camera_sensor_ov8810_data = {
.sensor_name = "ov8810",
.sensor_reset = INCREDIBLEC_CAM_RST, /* CAM1_RST */
.sensor_pwd = INCREDIBLEC_CAM_PWD, /* CAM1_PWDN, enabled in a9 */
.pdata = &msm_camera_device_data,
.resource = msm_camera_resources,
.num_resources = ARRAY_SIZE(msm_camera_resources),
.waked_up = 0,
.need_suspend = 0,
.flash_cfg = &msm_camera_sensor_flash_cfg,
};
static struct platform_device msm_camera_sensor_ov8810 = {
.name = "msm_camera_ov8810",
.dev = {
.platform_data = &msm_camera_sensor_ov8810_data,
},
};
static void config_incrediblec_flashlight_gpios(void)
{
static uint32_t flashlight_gpio_table[] = {
PCOM_GPIO_CFG(INCREDIBLEC_GPIO_FLASHLIGHT_TORCH, 0,
GPIO_OUTPUT, GPIO_NO_PULL, GPIO_2MA),
PCOM_GPIO_CFG(INCREDIBLEC_GPIO_FLASHLIGHT_FLASH, 0,
GPIO_OUTPUT, GPIO_NO_PULL, GPIO_2MA),
PCOM_GPIO_CFG(INCREDIBLEC_GPIO_FLASHLIGHT_FLASH_ADJ, 0,
GPIO_OUTPUT, GPIO_NO_PULL, GPIO_2MA),
};
config_gpio_table(flashlight_gpio_table,
ARRAY_SIZE(flashlight_gpio_table));
}
static struct flashlight_platform_data incrediblec_flashlight_data = {
.gpio_init = config_incrediblec_flashlight_gpios,
.torch = INCREDIBLEC_GPIO_FLASHLIGHT_TORCH,
.flash = INCREDIBLEC_GPIO_FLASHLIGHT_FLASH,
.flash_adj = INCREDIBLEC_GPIO_FLASHLIGHT_FLASH_ADJ,
.flash_duration_ms = 600,
.led_count = 1,
};
static struct platform_device incrediblec_flashlight_device = {
.name = "flashlight",
.dev = {
.platform_data = &incrediblec_flashlight_data,
},
};
static void curcial_oj_shutdown(int enable)
{
uint8_t cmd[3];
memset(cmd, 0, sizeof(uint8_t)*3);
cmd[2] = 0x80;
if (enable)
microp_i2c_write(0x91, cmd, 3);
else
microp_i2c_write(0x90, cmd, 3);
}
static int curcial_oj_poweron(int on)
{
struct vreg *oj_power = vreg_get(0, "synt");
if (IS_ERR(oj_power)) {
printk(KERN_ERR "%s: Error power domain\n", __func__);
return 0;
}
if (on) {
vreg_set_level(oj_power, 2750);
vreg_enable(oj_power);
} else
vreg_disable(oj_power);
printk(KERN_INFO "%s: OJ power enable(%d)\n", __func__, on);
return 1;
};
static void curcial_oj_adjust_xy(uint8_t *data, int16_t *mSumDeltaX, int16_t *mSumDeltaY)
{
int8_t deltaX;
int8_t deltaY;
if (data[2] == 0x80)
data[2] = 0x81;
if (data[1] == 0x80)
data[1] = 0x81;
if (0) {
deltaX = (1)*((int8_t) data[2]); /*X=2*/
deltaY = (1)*((int8_t) data[1]); /*Y=1*/
} else {
deltaX = (1)*((int8_t) data[1]);
deltaY = (1)*((int8_t) data[2]);
}
*mSumDeltaX += -((int16_t)deltaX);
*mSumDeltaY += -((int16_t)deltaY);
}
static struct curcial_oj_platform_data incrediblec_oj_data = {
.oj_poweron = curcial_oj_poweron,
.oj_shutdown = curcial_oj_shutdown,
.oj_adjust_xy = curcial_oj_adjust_xy,
.microp_version = INCREDIBLEC_MICROP_VER,
.debugflag = 0,
.mdelay_time = 0,
.normal_th = 8,
.xy_ratio = 15,
.interval = 20,
.swap = true,
.ap_code = false,
.x = 1,
.y = 1,
.share_power = true,
.Xsteps = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9},
.Ysteps = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9},
.sht_tbl = {0, 2000, 2250, 2500, 2750, 3000},
.pxsum_tbl = {0, 0, 40, 50, 60, 70},
.degree = 6,
.irq = MSM_uP_TO_INT(12),
};
static struct platform_device incrediblec_oj = {
.name = CURCIAL_OJ_NAME,
.id = -1,
.dev = {
.platform_data = &incrediblec_oj_data,
}
};
static int amoled_power(int on)
{
static struct vreg *vreg_lcm_2v6;
if (!vreg_lcm_2v6) {
vreg_lcm_2v6 = vreg_get(0, "gp1");
if (IS_ERR(vreg_lcm_2v6))
return -EINVAL;
}
if (on) {
unsigned id, on = 1;
id = PM_VREG_PDOWN_CAM_ID;
msm_proc_comm(PCOM_VREG_PULLDOWN, &on, &id);
vreg_enable(vreg_lcm_2v6);
gpio_set_value(INCREDIBLEC_LCD_RST_ID1, 1);
mdelay(25);
gpio_set_value(INCREDIBLEC_LCD_RST_ID1, 0);
mdelay(10);
gpio_set_value(INCREDIBLEC_LCD_RST_ID1, 1);
mdelay(20);
} else {
unsigned id, on = 0;
gpio_set_value(INCREDIBLEC_LCD_RST_ID1, 0);
id = PM_VREG_PDOWN_CAM_ID;
msm_proc_comm(PCOM_VREG_PULLDOWN, &on, &id);
vreg_disable(vreg_lcm_2v6);
}
return 0;
}
static int sonywvga_power(int on)
{
unsigned id, on_off;
static struct vreg *vreg_lcm_2v6;
if (!vreg_lcm_2v6) {
vreg_lcm_2v6 = vreg_get(0, "gp1");
if (IS_ERR(vreg_lcm_2v6))
return -EINVAL;
}
if (on) {
on_off = 0;
id = PM_VREG_PDOWN_CAM_ID;
msm_proc_comm(PCOM_VREG_PULLDOWN, &on, &id);
vreg_enable(vreg_lcm_2v6);
gpio_set_value(INCREDIBLEC_LCD_RST_ID1, 1);
mdelay(10);
gpio_set_value(INCREDIBLEC_LCD_RST_ID1, 0);
udelay(500);
gpio_set_value(INCREDIBLEC_LCD_RST_ID1, 1);
mdelay(10);
} else {
on_off = 1;
gpio_set_value(INCREDIBLEC_LCD_RST_ID1, 0);
mdelay(120);
id = PM_VREG_PDOWN_CAM_ID;
msm_proc_comm(PCOM_VREG_PULLDOWN, &on, &id);
vreg_disable(vreg_lcm_2v6);
}
return 0;
}
#define LCM_GPIO_CFG(gpio, func) \
PCOM_GPIO_CFG(gpio, func, GPIO_OUTPUT, GPIO_NO_PULL, GPIO_4MA)
static uint32_t display_on_gpio_table[] = {
LCM_GPIO_CFG(INCREDIBLEC_LCD_R0, 1),
LCM_GPIO_CFG(INCREDIBLEC_LCD_R1, 1),
LCM_GPIO_CFG(INCREDIBLEC_LCD_R2, 1),
LCM_GPIO_CFG(INCREDIBLEC_LCD_R3, 1),
LCM_GPIO_CFG(INCREDIBLEC_LCD_R4, 1),
LCM_GPIO_CFG(INCREDIBLEC_LCD_R5, 1),
LCM_GPIO_CFG(INCREDIBLEC_LCD_G0, 1),
LCM_GPIO_CFG(INCREDIBLEC_LCD_G1, 1),
LCM_GPIO_CFG(INCREDIBLEC_LCD_G2, 1),
LCM_GPIO_CFG(INCREDIBLEC_LCD_G3, 1),
LCM_GPIO_CFG(INCREDIBLEC_LCD_G4, 1),
LCM_GPIO_CFG(INCREDIBLEC_LCD_G5, 1),
LCM_GPIO_CFG(INCREDIBLEC_LCD_B0, 1),
LCM_GPIO_CFG(INCREDIBLEC_LCD_B1, 1),
LCM_GPIO_CFG(INCREDIBLEC_LCD_B2, 1),
LCM_GPIO_CFG(INCREDIBLEC_LCD_B3, 1),
LCM_GPIO_CFG(INCREDIBLEC_LCD_B4, 1),
LCM_GPIO_CFG(INCREDIBLEC_LCD_B5, 1),
LCM_GPIO_CFG(INCREDIBLEC_LCD_PCLK, 1),
LCM_GPIO_CFG(INCREDIBLEC_LCD_VSYNC, 1),
LCM_GPIO_CFG(INCREDIBLEC_LCD_HSYNC, 1),
LCM_GPIO_CFG(INCREDIBLEC_LCD_DE, 1),
};
static uint32_t display_off_gpio_table[] = {
LCM_GPIO_CFG(INCREDIBLEC_LCD_R0, 0),
LCM_GPIO_CFG(INCREDIBLEC_LCD_R1, 0),
LCM_GPIO_CFG(INCREDIBLEC_LCD_R2, 0),
LCM_GPIO_CFG(INCREDIBLEC_LCD_R3, 0),
LCM_GPIO_CFG(INCREDIBLEC_LCD_R4, 0),
LCM_GPIO_CFG(INCREDIBLEC_LCD_R5, 0),
LCM_GPIO_CFG(INCREDIBLEC_LCD_G0, 0),
LCM_GPIO_CFG(INCREDIBLEC_LCD_G1, 0),
LCM_GPIO_CFG(INCREDIBLEC_LCD_G2, 0),
LCM_GPIO_CFG(INCREDIBLEC_LCD_G3, 0),
LCM_GPIO_CFG(INCREDIBLEC_LCD_G4, 0),
LCM_GPIO_CFG(INCREDIBLEC_LCD_G5, 0),
LCM_GPIO_CFG(INCREDIBLEC_LCD_B0, 0),
LCM_GPIO_CFG(INCREDIBLEC_LCD_B1, 0),
LCM_GPIO_CFG(INCREDIBLEC_LCD_B2, 0),
LCM_GPIO_CFG(INCREDIBLEC_LCD_B3, 0),
LCM_GPIO_CFG(INCREDIBLEC_LCD_B4, 0),
LCM_GPIO_CFG(INCREDIBLEC_LCD_B5, 0),
LCM_GPIO_CFG(INCREDIBLEC_LCD_PCLK, 0),
LCM_GPIO_CFG(INCREDIBLEC_LCD_VSYNC, 0),
LCM_GPIO_CFG(INCREDIBLEC_LCD_HSYNC, 0),
LCM_GPIO_CFG(INCREDIBLEC_LCD_DE, 0),
};
static uint32_t sony_display_on_gpio_table[] = {
LCM_GPIO_CFG(INCREDIBLEC_SPI_CLK, 1),
LCM_GPIO_CFG(INCREDIBLEC_SPI_CS, 1),
LCM_GPIO_CFG(INCREDIBLEC_LCD_ID0, 1),
LCM_GPIO_CFG(INCREDIBLEC_SPI_DO, 1),
};
static uint32_t sony_display_off_gpio_table[] = {
LCM_GPIO_CFG(INCREDIBLEC_SPI_CLK, 0),
LCM_GPIO_CFG(INCREDIBLEC_SPI_CS, 0),
LCM_GPIO_CFG(INCREDIBLEC_LCD_ID0, 0),
LCM_GPIO_CFG(INCREDIBLEC_SPI_DO, 0),
};
static int panel_gpio_switch(int on)
{
if (on) {
config_gpio_table(display_on_gpio_table,
ARRAY_SIZE(display_on_gpio_table));
if(panel_type != SAMSUNG_PANEL) {
config_gpio_table(sony_display_on_gpio_table,
ARRAY_SIZE(sony_display_on_gpio_table));
}
}
else {
int i;
config_gpio_table(display_off_gpio_table,
ARRAY_SIZE(display_off_gpio_table));
for (i = INCREDIBLEC_LCD_R0; i <= INCREDIBLEC_LCD_R5; i++)
gpio_set_value(i, 0);
for (i = INCREDIBLEC_LCD_G0; i <= INCREDIBLEC_LCD_G5; i++)
gpio_set_value(i, 0);
for (i = INCREDIBLEC_LCD_B0; i <= INCREDIBLEC_LCD_DE; i++)
gpio_set_value(i, 0);
if(panel_type != SAMSUNG_PANEL) {
config_gpio_table(sony_display_off_gpio_table,
ARRAY_SIZE(sony_display_off_gpio_table));
}
}
return 0;
}
static struct resource resources_msm_fb[] = {
{
.start = MSM_FB_BASE,
.end = MSM_FB_BASE + MSM_FB_SIZE - 1,
.flags = IORESOURCE_MEM,
},
};
static struct panel_platform_data amoled_data = {
.fb_res = &resources_msm_fb[0],
.power = amoled_power,
.gpio_switch = panel_gpio_switch,
};
static struct platform_device amoled_panel = {
.name = "panel-tl2796a",
.id = -1,
.dev = {
.platform_data = &amoled_data
},
};
static struct panel_platform_data sonywvga_data = {
.fb_res = &resources_msm_fb[0],
.power = sonywvga_power,
.gpio_switch = panel_gpio_switch,
};
static struct platform_device sonywvga_panel = {
.name = "panel-sonywvga-s6d16a0x21",
.id = -1,
.dev = {
.platform_data = &sonywvga_data,
},
};
static struct platform_device *devices[] __initdata = {
&msm_device_uart1,
#ifdef CONFIG_SERIAL_MSM_HS
&msm_device_uart_dm1,
#endif
&htc_battery_pdev,
&htc_headset_mgr,
&htc_headset_gpio,
&ram_console_device,
&incrediblec_rfkill,
&msm_device_smd,
&msm_device_nand,
/*&msm_device_hsusb,*/
/*&usb_mass_storage_device,*/
&android_pmem_mdp_device,
&android_pmem_adsp_device,
#ifdef CONFIG_720P_CAMERA
&android_pmem_venc_device,
#else
&android_pmem_camera_device,
#endif
&msm_camera_sensor_ov8810,
&msm_kgsl_device,
&msm_device_i2c,
&incrediblec_flashlight_device,
&incrediblec_leds,
#if defined(CONFIG_SPI_QSD)
&qsd_device_spi,
#endif
&incrediblec_oj,
};
static uint32_t incrediblec_serial_debug_table[] = {
/* RX */
PCOM_GPIO_CFG(INCREDIBLEC_GPIO_UART3_RX, 3, GPIO_INPUT, GPIO_NO_PULL,
GPIO_4MA),
/* TX */
PCOM_GPIO_CFG(INCREDIBLEC_GPIO_UART3_TX, 3, GPIO_OUTPUT, GPIO_NO_PULL,
GPIO_4MA),
};
static uint32_t incrediblec_uart_gpio_table[] = {
/* RX */
PCOM_GPIO_CFG(INCREDIBLEC_GPIO_UART3_RX, 3, GPIO_INPUT, GPIO_NO_PULL,
GPIO_4MA),
/* TX */
PCOM_GPIO_CFG(INCREDIBLEC_GPIO_UART3_TX, 3, GPIO_INPUT, GPIO_NO_PULL,
GPIO_4MA),
};
static void incrediblec_config_serial_debug_gpios(void)
{
config_gpio_table(incrediblec_serial_debug_table,
ARRAY_SIZE(incrediblec_serial_debug_table));
}
static void incrediblec_config_uart_gpios(void)
{
config_gpio_table(incrediblec_uart_gpio_table,
ARRAY_SIZE(incrediblec_uart_gpio_table));
}
static struct msm_i2c_device_platform_data msm_i2c_pdata = {
.i2c_clock = 100000,
.clock_strength = GPIO_8MA,
.data_strength = GPIO_8MA,
};
static void __init msm_device_i2c_init(void)
{
msm_i2c_gpio_init();
msm_device_i2c.dev.platform_data = &msm_i2c_pdata;
}
static struct msm_acpu_clock_platform_data incrediblec_clock_data = {
.acpu_switch_time_us = 20,
.max_speed_delta_khz = 256000,
.vdd_switch_time_us = 62,
.power_collapse_khz = 245000,
.wait_for_irq_khz = 245000,
};
static unsigned incrediblec_perf_acpu_table[] = {
245000000,
576000000,
998400000,
};
static struct perflock_platform_data incrediblec_perflock_data = {
.perf_acpu_table = incrediblec_perf_acpu_table,
.table_size = ARRAY_SIZE(incrediblec_perf_acpu_table),
};
int incrediblec_init_mmc(int sysrev);
#ifdef CONFIG_SERIAL_MSM_HS
static struct msm_serial_hs_platform_data msm_uart_dm1_pdata = {
.rx_wakeup_irq = MSM_GPIO_TO_INT(INCREDIBLEC_GPIO_BT_HOST_WAKE), /*Chip to Device*/
.inject_rx_on_wakeup = 0,
.cpu_lock_supported = 0,
/* for bcm */
.bt_wakeup_pin_supported = 1,
.bt_wakeup_pin = INCREDIBLEC_GPIO_BT_CHIP_WAKE,
.host_wakeup_pin = INCREDIBLEC_GPIO_BT_HOST_WAKE,
};
#endif
static int OJ_BMA_power(void)
{
int ret;
struct vreg *vreg = vreg_get(0, "synt");
if (!vreg) {
printk(KERN_ERR "%s: vreg error\n", __func__);
return -EIO;
}
ret = vreg_set_level(vreg, 2850);
ret = vreg_enable(vreg);
if (ret < 0)
printk(KERN_ERR "%s: vreg enable failed\n", __func__);
return 0;
}
unsigned int incrediblec_get_engineerid(void)
{
return engineerid;
}
static ssize_t incrediblec_virtual_keys_show(struct kobject *kobj,
struct kobj_attribute *attr, char *buf)
{
if (engineerid > 1 && system_rev > 1) {
/* center: x: home: 45, menu: 152, back: 318, search 422, y: 830 */
return sprintf(buf,
__stringify(EV_KEY) ":" __stringify(KEY_HOME) ":47:830:74:50"
":" __stringify(EV_KEY) ":" __stringify(KEY_MENU) ":155:830:80:50"
":" __stringify(EV_KEY) ":" __stringify(KEY_BACK) ":337:830:90:50"
":" __stringify(EV_KEY) ":" __stringify(KEY_SEARCH) ":434:830:60:50"
"\n");
} else {
/* center: x: home: 50, menu: 184, back: 315, search 435, y: 830*/
return sprintf(buf,
__stringify(EV_KEY) ":" __stringify(KEY_HOME) ":50:830:98:50"
":" __stringify(EV_KEY) ":" __stringify(KEY_MENU) ":184:830:120:50"
":" __stringify(EV_KEY) ":" __stringify(KEY_BACK) ":315:830:100:50"
":" __stringify(EV_KEY) ":" __stringify(KEY_SEARCH) ":435:830:88:50"
"\n");
}
}
static struct kobj_attribute incrediblec_virtual_keys_attr = {
.attr = {
.name = "virtualkeys.atmel-touchscreen",
.mode = S_IRUGO,
},
.show = &incrediblec_virtual_keys_show,
};
static struct attribute *incrediblec_properties_attrs[] = {
&incrediblec_virtual_keys_attr.attr,
NULL
};
static struct attribute_group incrediblec_properties_attr_group = {
.attrs = incrediblec_properties_attrs,
};
static void incrediblec_reset(void)
{
gpio_set_value(INCREDIBLEC_GPIO_PS_HOLD, 0);
}
static int incrediblec_init_panel(void)
{
int ret = 0;
if (panel_type != SAMSUNG_PANEL)
ret = platform_device_register(&sonywvga_panel);
else
ret = platform_device_register(&amoled_panel);
return ret;
}
static void __init incrediblec_init(void)
{
int ret;
struct kobject *properties_kobj;
printk("incrediblec_init() revision=%d, engineerid=%d\n", system_rev, engineerid);
msm_hw_reset_hook = incrediblec_reset;
if (0 == engineerid || 0xF == engineerid) {
mdp_pmem_pdata.start = MSM_PMEM_MDP_XA_BASE;
android_pmem_adsp_pdata.start = MSM_PMEM_ADSP_XA_BASE;
msm_kgsl_resources[1].start = MSM_GPU_MEM_XA_BASE;
msm_kgsl_resources[1].end = MSM_GPU_MEM_XA_BASE + MSM_GPU_MEM_SIZE - 1;
} else if (engineerid >= 3) {
mdp_pmem_pdata.start = MSM_PMEM_MDP_BASE + MSM_MEM_128MB_OFFSET;
android_pmem_adsp_pdata.start = MSM_PMEM_ADSP_BASE + MSM_MEM_128MB_OFFSET;
msm_kgsl_resources[1].start = MSM_GPU_MEM_BASE;
msm_kgsl_resources[1].end = msm_kgsl_resources[1].start + MSM_GPU_MEM_SIZE - 1;
}
OJ_BMA_power();
msm_acpu_clock_init(&incrediblec_clock_data);
perflock_init(&incrediblec_perflock_data);
#if defined(CONFIG_MSM_SERIAL_DEBUGGER)
msm_serial_debug_init(MSM_UART1_PHYS, INT_UART1,
&msm_device_uart1.dev, 1, INT_UART1_RX);
#endif
#ifdef CONFIG_ARCH_QSD8X50
bt_export_bd_address();
#endif
/* set the gpu power rail to manual mode so clk en/dis will not
* turn off gpu power, and hang it on resume */
incrediblec_kgsl_power_rail_mode(0);
incrediblec_kgsl_power(true);
#ifdef CONFIG_SERIAL_MSM_HS
msm_device_uart_dm1.dev.platform_data = &msm_uart_dm1_pdata;
msm_device_uart_dm1.name = "msm_serial_hs_bcm"; /* for bcm */
#endif
incrediblec_config_uart_gpios();
config_gpio_table(camera_off_gpio_table,
ARRAY_SIZE(camera_off_gpio_table));
/*gpio_direction_output(INCREDIBLEC_GPIO_TP_LS_EN, 0);*/
gpio_direction_output(INCREDIBLEC_GPIO_TP_EN, 0);
incrediblec_audio_init();
msm_device_i2c_init();
#ifdef CONFIG_MICROP_COMMON
incrediblec_microp_init();
#endif
#ifdef CONFIG_USB_ANDROID
inc_add_usb_devices();
#endif
if (system_rev >= 2) {
microp_data.num_functions = ARRAY_SIZE(microp_functions_1);
microp_data.microp_function = microp_functions_1;
}
platform_add_devices(devices, ARRAY_SIZE(devices));
incrediblec_init_panel();
if (system_rev > 2) {
incrediblec_atmel_ts_data[0].config_T9[7] = 33;
incrediblec_atmel_ts_data[0].object_crc[0] = 0x2E;
incrediblec_atmel_ts_data[0].object_crc[1] = 0x80;
incrediblec_atmel_ts_data[0].object_crc[2] = 0xE0;
}
i2c_register_board_info(0, i2c_devices, ARRAY_SIZE(i2c_devices));
ret = incrediblec_init_mmc(system_rev);
if (ret != 0)
pr_crit("%s: Unable to initialize MMC\n", __func__);
properties_kobj = kobject_create_and_add("board_properties", NULL);
if (properties_kobj)
ret = sysfs_create_group(properties_kobj,
&incrediblec_properties_attr_group);
if (!properties_kobj || ret)
pr_err("failed to create board_properties\n");
msm_init_pmic_vibrator();
}
static void __init incrediblec_fixup(struct machine_desc *desc, struct tag *tags,
char **cmdline, struct meminfo *mi)
{
engineerid = parse_tag_engineerid(tags);
mi->nr_banks = 1;
mi->bank[0].start = PHYS_OFFSET;
mi->bank[0].node = PHYS_TO_NID(PHYS_OFFSET);
if (0 == engineerid || 0xF == engineerid)
mi->bank[0].size = (MSM_LINUX_XA_SIZE);
else if (engineerid <= 2) { /* 4G3G */
mi->bank[0].size = MSM_EBI1_BANK0_SIZE;
mi->nr_banks++;
mi->bank[1].start = MSM_EBI1_BANK1_BASE;
mi->bank[1].node = PHYS_TO_NID(MSM_EBI1_BANK1_BASE);
mi->bank[1].size = MSM_EBI1_BANK1_SIZE;
} else {
mi->bank[0].size = MSM_EBI1_BANK0_SIZE;
mi->nr_banks++;
mi->bank[1].start = MSM_EBI1_BANK1_BASE;
mi->bank[1].node = PHYS_TO_NID(MSM_EBI1_BANK1_BASE);
mi->bank[1].size = MSM_EBI1_BANK1_SIZE + MSM_MEM_128MB_OFFSET;
}
}
static void __init incrediblec_map_io(void)
{
msm_map_common_io();
msm_clock_init();
}
extern struct sys_timer msm_timer;
MACHINE_START(INCREDIBLEC, "incrediblec")
#ifdef CONFIG_MSM_DEBUG_UART
.phys_io = MSM_DEBUG_UART_PHYS,
.io_pg_offst = ((MSM_DEBUG_UART_BASE) >> 18) & 0xfffc,
#endif
.boot_params = 0x20000100,
.fixup = incrediblec_fixup,
.map_io = incrediblec_map_io,
.init_irq = msm_init_irq,
.init_machine = incrediblec_init,
.timer = &msm_timer,
MACHINE_END
| /* linux/arch/arm/mach-msm/board-incrediblec.c
*
* Copyright (C) 2009 Google, Inc.
* Copyright (C) 2009 HTC Corporation.
* Author: Dima Zavin <dima@android.com>
*
* This software is licensed under the terms of the GNU General Public
* License version 2, as published by the Free Software Foundation, and
* may be copied, distributed, and modified under those terms.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
*/
#include <linux/delay.h>
#include <linux/gpio.h>
#include <linux/i2c.h>
#include <linux/i2c-msm.h>
#include <linux/init.h>
#include <linux/io.h>
#include <linux/kernel.h>
#include <linux/platform_device.h>
#include <linux/android_pmem.h>
#include <linux/input.h>
#include <linux/akm8973.h>
#include <linux/bma150.h>
#include <linux/capella_cm3602.h>
#include <linux/regulator/machine.h>
#include <asm/mach-types.h>
#include <asm/mach/arch.h>
#include <asm/mach/map.h>
#include <asm/setup.h>
#include <mach/htc_headset_mgr.h>
#include <mach/htc_headset_gpio.h>
#include <mach/htc_headset_microp.h>
#include <mach/board.h>
#include <mach/board_htc.h>
#include <mach/hardware.h>
#include <mach/atmega_microp.h>
#include <mach/camera.h>
#include <mach/msm_iomap.h>
#include <mach/htc_battery.h>
#include <mach/htc_usb.h>
#include <mach/perflock.h>
#include <mach/msm_serial_debugger.h>
#include <mach/system.h>
#include <linux/spi/spi.h>
#include <linux/curcial_oj.h>
#include <mach/msm_panel.h>
#include "board-incrediblec.h"
#include "devices.h"
#include "proc_comm.h"
#include "smd_private.h"
#if 1 /*allenou, bt for bcm, 2009/7/8 */
#include <mach/msm_serial_hs.h>
#endif
#include <mach/tpa6130.h>
#include <mach/msm_flashlight.h>
#include <linux/atmel_qt602240.h>
#include <mach/vreg.h>
/* #include <mach/pmic.h> */
#include <mach/msm_hsusb.h>
#define SMEM_SPINLOCK_I2C 6
#define INCREDIBLEC_MICROP_VER 0x04
#ifdef CONFIG_ARCH_QSD8X50
extern unsigned char *get_bt_bd_ram(void);
#endif
void msm_init_pmic_vibrator(void);
extern void __init incrediblec_audio_init(void);
#ifdef CONFIG_MICROP_COMMON
void __init incrediblec_microp_init(void);
#endif
#define SAMSUNG_PANEL 0
/*Bitwise mask for SONY PANEL ONLY*/
#define SONY_PANEL 0x1 /*Set bit 0 as 1 when it is SONY PANEL*/
#define SONY_PWM_SPI 0x2 /*Set bit 1 as 1 as PWM_SPI mode, otherwise it is PWM_MICROP mode*/
#define SONY_GAMMA 0x4 /*Set bit 2 as 1 when panel contains GAMMA table in its NVM*/
#define SONY_RGB666 0x8 /*Set bit 3 as 1 when panel is 18 bit, otherwise it is 16 bit*/
extern int panel_type;
unsigned int engineerid;
static struct htc_battery_platform_data htc_battery_pdev_data = {
/* .gpio_mbat_in = INCREDIBLEC_GPIO_MBAT_IN,*/
/* .gpio_mchg_en_n = INCREDIBLEC_GPIO_MCHG_EN_N,*/
/* .gpio_iset = INCREDIBLEC_GPIO_ISET,*/
.guage_driver = GUAGE_MODEM,
.m2a_cable_detect = 1,
.charger = SWITCH_CHARGER,
};
static struct platform_device htc_battery_pdev = {
.name = "htc_battery",
.id = -1,
.dev = {
.platform_data = &htc_battery_pdev_data,
},
};
static int capella_cm3602_power(int pwr_device, uint8_t enable);
/*XA, XB*/
static struct microp_function_config microp_functions[] = {
{
.name = "microp_intrrupt",
.category = MICROP_FUNCTION_INTR,
},
{
.name = "reset-int",
.category = MICROP_FUNCTION_RESET_INT,
.int_pin = 1 << 8,
},
{
.name = "oj",
.category = MICROP_FUNCTION_OJ,
.int_pin = 1 << 12,
},
{
.name = "proximity",
.category = MICROP_FUNCTION_P,
.int_pin = 1 << 11,
.mask_r = {0x00, 0x00, 0x10},
.mask_w = {0x00, 0x00, 0x04},
},
};
/*For XC: Change ALS chip from CM3602 to CM3605*/
static struct microp_function_config microp_functions_1[] = {
{
.name = "remote-key",
.category = MICROP_FUNCTION_REMOTEKEY,
.levels = {0, 33, 50, 110, 160, 220},
.channel = 1,
.int_pin = 1 << 5,
},
{
.name = "microp_intrrupt",
.category = MICROP_FUNCTION_INTR,
},
{
.name = "reset-int",
.category = MICROP_FUNCTION_RESET_INT,
.int_pin = 1 << 8,
},
{
.name = "oj",
.category = MICROP_FUNCTION_OJ,
.int_pin = 1 << 12,
},
{
.name = "proximity",
.category = MICROP_FUNCTION_P,
.int_pin = 1 << 11,
.mask_r = {0x00, 0x00, 0x10},
.mask_w = {0x00, 0x00, 0x04},
},
};
static struct microp_function_config microp_lightsensor = {
.name = "light_sensor",
.category = MICROP_FUNCTION_LSENSOR,
.levels = { 0, 11, 16, 22, 75, 209, 362, 488, 560, 0x3FF },
.channel = 3,
.int_pin = 1 << 9,
.golden_adc = 0xD2,
.mask_w = {0x00, 0x00, 0x04},
.ls_power = capella_cm3602_power,
};
static struct lightsensor_platform_data lightsensor_data = {
.config = µp_lightsensor,
.irq = MSM_uP_TO_INT(9),
};
static struct microp_led_config led_config[] = {
{
.name = "amber",
.type = LED_RGB,
},
{
.name = "green",
.type = LED_RGB,
},
};
static struct microp_led_platform_data microp_leds_data = {
.num_leds = ARRAY_SIZE(led_config),
.led_config = led_config,
};
static struct bma150_platform_data incrediblec_g_sensor_pdata = {
.microp_new_cmd = 1,
};
/* Proximity Sensor (Capella_CM3602)*/
static int __capella_cm3602_power(int on)
{
uint8_t data[3], addr;
int ret;
printk(KERN_DEBUG "%s: Turn the capella_cm3602 power %s\n",
__func__, (on) ? "on" : "off");
if (on)
gpio_direction_output(INCREDIBLEC_GPIO_PROXIMITY_EN_N, 1);
data[0] = 0x00;
data[1] = 0x00;
data[2] = 0x04;
addr = on ? MICROP_I2C_WCMD_GPO_LED_STATUS_EN :
MICROP_I2C_WCMD_GPO_LED_STATUS_DIS;
ret = microp_i2c_write(addr, data, 3);
if (ret < 0)
pr_err("%s: %s capella power failed\n",
__func__, (on ? "enable" : "disable"));
if (!on)
gpio_direction_output(INCREDIBLEC_GPIO_PROXIMITY_EN_N, 0);
return ret;
}
static DEFINE_MUTEX(capella_cm3602_lock);
static unsigned int als_power_control;
static int capella_cm3602_power(int pwr_device, uint8_t enable)
{
unsigned int old_status = 0;
int ret = 0, on = 0;
mutex_lock(&capella_cm3602_lock);
old_status = als_power_control;
if (enable)
als_power_control |= pwr_device;
else
als_power_control &= ~pwr_device;
on = als_power_control ? 1 : 0;
if (old_status == 0 && on)
ret = __capella_cm3602_power(1);
else if (!on)
ret = __capella_cm3602_power(0);
mutex_unlock(&capella_cm3602_lock);
return ret;
}
static struct capella_cm3602_platform_data capella_cm3602_pdata = {
.power = capella_cm3602_power,
.p_en = INCREDIBLEC_GPIO_PROXIMITY_EN_N,
.p_out = MSM_uP_TO_INT(11),
};
/* End Proximity Sensor (Capella_CM3602)*/
static struct htc_headset_microp_platform_data htc_headset_microp_data = {
.remote_int = 1 << 5,
.remote_irq = MSM_uP_TO_INT(5),
.remote_enable_pin = NULL,
.adc_channel = 0x01,
.adc_remote = {0, 33, 50, 110, 160, 220},
};
static struct platform_device microp_devices[] = {
{
.name = "lightsensor_microp",
.dev = {
.platform_data = &lightsensor_data,
},
},
{
.name = "leds-microp",
.id = -1,
.dev = {
.platform_data = µp_leds_data,
},
},
{
.name = BMA150_G_SENSOR_NAME,
.dev = {
.platform_data = &incrediblec_g_sensor_pdata,
},
},
{
.name = "incrediblec_proximity",
.id = -1,
.dev = {
.platform_data = &capella_cm3602_pdata,
},
},
{
.name = "HTC_HEADSET_MICROP",
.id = -1,
.dev = {
.platform_data = &htc_headset_microp_data,
},
},
};
static struct microp_i2c_platform_data microp_data = {
.num_functions = ARRAY_SIZE(microp_functions),
.microp_function = microp_functions,
.num_devices = ARRAY_SIZE(microp_devices),
.microp_devices = microp_devices,
.gpio_reset = INCREDIBLEC_GPIO_UP_RESET_N,
.microp_ls_on = LS_PWR_ON | PS_PWR_ON,
.spi_devices = SPI_OJ | SPI_GSENSOR,
};
static struct gpio_led incrediblec_led_list[] = {
{
.name = "button-backlight",
.gpio = INCREDIBLEC_AP_KEY_LED_EN,
.active_low = 0,
},
};
static struct gpio_led_platform_data incrediblec_leds_data = {
.num_leds = ARRAY_SIZE(incrediblec_led_list),
.leds = incrediblec_led_list,
};
static struct platform_device incrediblec_leds = {
.name = "leds-gpio",
.id = -1,
.dev = {
.platform_data = &incrediblec_leds_data,
},
};
static uint32_t usb_phy_3v3_table[] = {
PCOM_GPIO_CFG(INCREDIBLEC_USB_PHY_3V3_ENABLE, 0, GPIO_OUTPUT, GPIO_NO_PULL, GPIO_4MA)
};
static uint32_t usb_ID_PIN_table[] = {
PCOM_GPIO_CFG(INCREDIBLEC_GPIO_USB_ID_PIN, 0, GPIO_INPUT, GPIO_NO_PULL, GPIO_4MA),
};
static int incrediblec_phy_init_seq[] = { 0x1D, 0x0D, 0x1D, 0x10, -1 };
#ifdef CONFIG_USB_ANDROID
static struct msm_hsusb_platform_data msm_hsusb_pdata = {
.phy_init_seq = incrediblec_phy_init_seq,
.phy_reset = msm_hsusb_8x50_phy_reset,
.usb_id_pin_gpio = INCREDIBLEC_GPIO_USB_ID_PIN,
};
static struct usb_mass_storage_platform_data mass_storage_pdata = {
.nluns = 3,
.vendor = "HTC",
.product = "Android Phone",
.release = 0x0100,
.cdrom_lun = 4,
};
static struct platform_device usb_mass_storage_device = {
.name = "usb_mass_storage",
.id = -1,
.dev = {
.platform_data = &mass_storage_pdata,
},
};
static struct android_usb_platform_data android_usb_pdata = {
.vendor_id = 0x0bb4,
.product_id = 0x0c9e,
.version = 0x0100,
.product_name = "Android Phone",
.manufacturer_name = "HTC",
.num_products = ARRAY_SIZE(usb_products),
.products = usb_products,
.num_functions = ARRAY_SIZE(usb_functions_all),
.functions = usb_functions_all,
};
static struct platform_device android_usb_device = {
.name = "android_usb",
.id = -1,
.dev = {
.platform_data = &android_usb_pdata,
},
};
static void inc_add_usb_devices(void)
{
android_usb_pdata.products[0].product_id =
android_usb_pdata.product_id;
android_usb_pdata.serial_number = board_serialno();
msm_hsusb_pdata.serial_number = board_serialno();
msm_device_hsusb.dev.platform_data = &msm_hsusb_pdata;
config_gpio_table(usb_phy_3v3_table, ARRAY_SIZE(usb_phy_3v3_table));
gpio_set_value(INCREDIBLEC_USB_PHY_3V3_ENABLE, 1);
config_gpio_table(usb_ID_PIN_table, ARRAY_SIZE(usb_ID_PIN_table));
platform_device_register(&msm_device_hsusb);
platform_device_register(&usb_mass_storage_device);
platform_device_register(&android_usb_device);
}
#endif
static struct platform_device incrediblec_rfkill = {
.name = "incrediblec_rfkill",
.id = -1,
};
static struct resource qsd_spi_resources[] = {
{
.name = "spi_irq_in",
.start = INT_SPI_INPUT,
.end = INT_SPI_INPUT,
.flags = IORESOURCE_IRQ,
},
{
.name = "spi_irq_out",
.start = INT_SPI_OUTPUT,
.end = INT_SPI_OUTPUT,
.flags = IORESOURCE_IRQ,
},
{
.name = "spi_irq_err",
.start = INT_SPI_ERROR,
.end = INT_SPI_ERROR,
.flags = IORESOURCE_IRQ,
},
{
.name = "spi_base",
.start = 0xA1200000,
.end = 0xA1200000 + SZ_4K - 1,
.flags = IORESOURCE_MEM,
},
{
.name = "spi_clk",
.start = 17,
.end = 1,
.flags = IORESOURCE_IRQ,
},
{
.name = "spi_mosi",
.start = 18,
.end = 1,
.flags = IORESOURCE_IRQ,
},
{
.name = "spi_miso",
.start = 19,
.end = 1,
.flags = IORESOURCE_IRQ,
},
{
.name = "spi_cs0",
.start = 20,
.end = 1,
.flags = IORESOURCE_IRQ,
},
{
.name = "spi_pwr",
.start = 21,
.end = 0,
.flags = IORESOURCE_IRQ,
},
{
.name = "spi_irq_cs0",
.start = 22,
.end = 0,
.flags = IORESOURCE_IRQ,
},
};
static struct platform_device qsd_device_spi = {
.name = "spi_qsd",
.id = 0,
.num_resources = ARRAY_SIZE(qsd_spi_resources),
.resource = qsd_spi_resources,
};
static struct resource msm_kgsl_resources[] = {
{
.name = "kgsl_reg_memory",
.start = MSM_GPU_REG_PHYS,
.end = MSM_GPU_REG_PHYS + MSM_GPU_REG_SIZE - 1,
.flags = IORESOURCE_MEM,
},
{
.name = "kgsl_phys_memory",
.start = MSM_GPU_MEM_BASE,
.end = MSM_GPU_MEM_BASE + MSM_GPU_MEM_SIZE - 1,
.flags = IORESOURCE_MEM,
},
{
.start = INT_GRAPHICS,
.end = INT_GRAPHICS,
.flags = IORESOURCE_IRQ,
},
};
#define PWR_RAIL_GRP_CLK 8
static int incrediblec_kgsl_power_rail_mode(int follow_clk)
{
int mode = follow_clk ? 0 : 1;
int rail_id = PWR_RAIL_GRP_CLK;
return msm_proc_comm(PCOM_CLKCTL_RPC_RAIL_CONTROL, &rail_id, &mode);
}
static int incrediblec_kgsl_power(bool on)
{
int cmd;
int rail_id = PWR_RAIL_GRP_CLK;
cmd = on ? PCOM_CLKCTL_RPC_RAIL_ENABLE : PCOM_CLKCTL_RPC_RAIL_DISABLE;
return msm_proc_comm(cmd, &rail_id, NULL);
}
static struct platform_device msm_kgsl_device = {
.name = "kgsl",
.id = -1,
.resource = msm_kgsl_resources,
.num_resources = ARRAY_SIZE(msm_kgsl_resources),
};
static struct android_pmem_platform_data mdp_pmem_pdata = {
.name = "pmem",
.start = MSM_PMEM_MDP_BASE,
.size = MSM_PMEM_MDP_SIZE,
.no_allocator = 0,
.cached = 1,
};
static struct android_pmem_platform_data android_pmem_adsp_pdata = {
.name = "pmem_adsp",
.start = MSM_PMEM_ADSP_BASE,
.size = MSM_PMEM_ADSP_SIZE,
.no_allocator = 0,
.cached = 1,
};
#ifdef CONFIG_720P_CAMERA
static struct android_pmem_platform_data android_pmem_venc_pdata = {
.name = "pmem_venc",
.start = MSM_PMEM_VENC_BASE,
.size = MSM_PMEM_VENC_SIZE,
.no_allocator = 0,
.cached = 1,
};
#else
static struct android_pmem_platform_data android_pmem_camera_pdata = {
.name = "pmem_camera",
.start = MSM_PMEM_CAMERA_BASE,
.size = MSM_PMEM_CAMERA_SIZE,
.no_allocator = 1,
.cached = 1,
};
#endif
static struct platform_device android_pmem_mdp_device = {
.name = "android_pmem",
.id = 0,
.dev = {
.platform_data = &mdp_pmem_pdata
},
};
static struct platform_device android_pmem_adsp_device = {
.name = "android_pmem",
.id = 4,
.dev = {
.platform_data = &android_pmem_adsp_pdata,
},
};
#ifdef CONFIG_720P_CAMERA
static struct platform_device android_pmem_venc_device = {
.name = "android_pmem",
.id = 5,
.dev = {
.platform_data = &android_pmem_venc_pdata,
},
};
#else
static struct platform_device android_pmem_camera_device = {
.name = "android_pmem",
.id = 5,
.dev = {
.platform_data = &android_pmem_camera_pdata,
},
};
#endif
static struct resource ram_console_resources[] = {
{
.start = MSM_RAM_CONSOLE_BASE,
.end = MSM_RAM_CONSOLE_BASE + MSM_RAM_CONSOLE_SIZE - 1,
.flags = IORESOURCE_MEM,
},
};
static struct platform_device ram_console_device = {
.name = "ram_console",
.id = -1,
.num_resources = ARRAY_SIZE(ram_console_resources),
.resource = ram_console_resources,
};
static int incrediblec_atmel_ts_power(int on)
{
printk(KERN_INFO "incrediblec_atmel_ts_power(%d)\n", on);
if (on) {
gpio_set_value(INCREDIBLEC_GPIO_TP_EN, 1);
msleep(2);
gpio_set_value(INCREDIBLEC_GPIO_TP_RST, 1);
} else {
gpio_set_value(INCREDIBLEC_GPIO_TP_EN, 0);
msleep(2);
}
return 0;
}
struct atmel_i2c_platform_data incrediblec_atmel_ts_data[] = {
{
.version = 0x016,
.abs_x_min = 1,
.abs_x_max = 1023,
.abs_y_min = 2,
.abs_y_max = 966,
.abs_pressure_min = 0,
.abs_pressure_max = 255,
.abs_width_min = 0,
.abs_width_max = 20,
.gpio_irq = INCREDIBLEC_GPIO_TP_INT_N,
.power = incrediblec_atmel_ts_power,
.config_T6 = {0, 0, 0, 0, 0, 0},
.config_T7 = {50, 15, 25},
.config_T8 = {10, 0, 20, 10, 0, 0, 5, 15},
.config_T9 = {139, 0, 0, 18, 12, 0, 16, 38, 3, 7, 0, 5, 2, 15, 2, 10, 25, 5, 0, 0, 0, 0, 0, 0, 0, 0, 159, 47, 149, 81, 40},
.config_T15 = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
.config_T19 = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
.config_T20 = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
.config_T22 = {15, 0, 0, 0, 0, 0, 0, 0, 16, 0, 1, 0, 7, 18, 25, 30, 0},
.config_T23 = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
.config_T24 = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
.config_T25 = {3, 0, 200, 50, 64, 31, 0, 0, 0, 0, 0, 0, 0, 0},
.config_T27 = {0, 0, 0, 0, 0, 0, 0},
.config_T28 = {0, 0, 2, 4, 8, 60},
.object_crc = {0xDB, 0xBF, 0x60},
.cable_config = {35, 30, 8, 16},
.GCAF_level = {20, 24, 28, 40, 63},
.filter_level = {15, 60, 963, 1008},
},
{
.version = 0x015,
.abs_x_min = 13,
.abs_x_max = 1009,
.abs_y_min = 15,
.abs_y_max = 960,
.abs_pressure_min = 0,
.abs_pressure_max = 255,
.abs_width_min = 0,
.abs_width_max = 20,
.gpio_irq = INCREDIBLEC_GPIO_TP_INT_N,
.power = incrediblec_atmel_ts_power,
.config_T6 = {0, 0, 0, 0, 0, 0},
.config_T7 = {50, 15, 25},
.config_T8 = {12, 0, 20, 20, 0, 0, 20, 0},
.config_T9 = {139, 0, 0, 18, 12, 0, 32, 40, 2, 7, 0, 5, 2, 0, 2, 10, 25, 0, 0, 0, 0, 0, 0, 0, 0, 0, 159, 47, 149, 81},
.config_T15 = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
.config_T19 = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
.config_T20 = {7, 0, 0, 0, 0, 0, 0, 30, 20, 4, 15, 5},
.config_T22 = {7, 0, 0, 25, 0, -25, 255, 4, 50, 0, 1, 10, 15, 20, 25, 30, 4},
.config_T23 = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
.config_T24 = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
.config_T25 = {3, 0, 200, 50, 64, 31, 0, 0, 0, 0, 0, 0, 0, 0},
.config_T27 = {0, 0, 0, 0, 0, 0, 0},
.config_T28 = {0, 0, 2, 4, 8, 60},
.object_crc = {0x19, 0x87, 0x7E},
},
{
.version = 0x014,
.abs_x_min = 13,
.abs_x_max = 1009,
.abs_y_min = 15,
.abs_y_max = 960,
.abs_pressure_min = 0,
.abs_pressure_max = 255,
.abs_width_min = 0,
.abs_width_max = 20,
.gpio_irq = INCREDIBLEC_GPIO_TP_INT_N,
.power = incrediblec_atmel_ts_power,
.config_T6 = {0, 0, 0, 0, 0, 0},
.config_T7 = {50, 15, 25},
.config_T8 = {12, 0, 20, 20, 0, 0, 10, 15},
.config_T9 = {3, 0, 0, 18, 12, 0, 48, 45, 2, 7, 0, 0, 0, 0, 2, 10, 25, 0, 0, 0, 0, 0, 0, 0, 0, 0, 143, 47, 143, 81},
.config_T15 = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
.config_T19 = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
.config_T20 = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
.config_T22 = {5, 0, 0, 25, 0, -25, 255, 4, 50, 0, 1, 10, 15, 20, 25, 30, 4},
.config_T23 = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
.config_T24 = {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0},
.config_T25 = {3, 0, 200, 50, 64, 31, 0, 0, 0, 0, 0, 0, 0, 0},
.config_T27 = {0, 0, 0, 0, 0, 0, 0},
.config_T28 = {0, 0, 2, 4, 8, 60},
}
};
static struct regulator_consumer_supply tps65023_dcdc1_supplies[] = {
{
.supply = "acpu_vcore",
},
};
static struct regulator_init_data tps65023_data[5] = {
{
.constraints = {
.name = "dcdc1", /* VREG_MSMC2_1V29 */
.min_uV = 900000,
.max_uV = 1350000,
.valid_ops_mask = REGULATOR_CHANGE_VOLTAGE,
},
.consumer_supplies = tps65023_dcdc1_supplies,
.num_consumer_supplies = ARRAY_SIZE(tps65023_dcdc1_supplies),
},
/* dummy values for unused regulators to not crash driver: */
{
.constraints = {
.name = "dcdc2", /* VREG_MSMC1_1V26 */
.min_uV = 1260000,
.max_uV = 1260000,
},
},
{
.constraints = {
.name = "dcdc3", /* unused */
.min_uV = 800000,
.max_uV = 3300000,
},
},
{
.constraints = {
.name = "ldo1", /* unused */
.min_uV = 1000000,
.max_uV = 3150000,
},
},
{
.constraints = {
.name = "ldo2", /* V_USBPHY_3V3 */
.min_uV = 3300000,
.max_uV = 3300000,
},
},
};
static void set_h2w_dat(int n)
{
gpio_set_value(INCREDIBLEC_GPIO_H2W_DATA, n);
}
static void set_h2w_clk(int n)
{
gpio_set_value(INCREDIBLEC_GPIO_H2W_CLK, n);
}
static int get_h2w_dat(void)
{
return gpio_get_value(INCREDIBLEC_GPIO_H2W_DATA);
}
static int get_h2w_clk(void)
{
return gpio_get_value(INCREDIBLEC_GPIO_H2W_CLK);
}
static void h2w_dev_power_on(int on)
{
printk(KERN_INFO "Not support H2W power\n");
}
/* default TX,RX to GPI */
static uint32_t uart3_off_gpi_table[] = {
/* RX, H2W DATA */
PCOM_GPIO_CFG(INCREDIBLEC_GPIO_H2W_DATA, 0,
GPIO_INPUT, GPIO_NO_PULL, GPIO_2MA),
/* TX, H2W CLK */
PCOM_GPIO_CFG(INCREDIBLEC_GPIO_H2W_CLK, 0,
GPIO_INPUT, GPIO_NO_PULL, GPIO_2MA),
};
/* set TX,RX to GPO */
static uint32_t uart3_off_gpo_table[] = {
/* RX, H2W DATA */
PCOM_GPIO_CFG(INCREDIBLEC_GPIO_H2W_DATA, 0,
GPIO_OUTPUT, GPIO_NO_PULL, GPIO_2MA),
/* TX, H2W CLK */
PCOM_GPIO_CFG(INCREDIBLEC_GPIO_H2W_CLK, 0,
GPIO_OUTPUT, GPIO_NO_PULL, GPIO_2MA),
};
static void set_h2w_dat_dir(int n)
{
#if 0
if (n == 0) /* input */
gpio_direction_input(INCREDIBLEC_GPIO_H2W_DATA);
else
gpio_configure(INCREDIBLEC_GPIO_H2W_DATA, GPIOF_DRIVE_OUTPUT);
#else
if (n == 0) /* input */
msm_proc_comm(PCOM_RPC_GPIO_TLMM_CONFIG_EX,
uart3_off_gpi_table + 0, 0);
else
msm_proc_comm(PCOM_RPC_GPIO_TLMM_CONFIG_EX,
uart3_off_gpo_table + 0, 0);
#endif
}
static void set_h2w_clk_dir(int n)
{
#if 0
if (n == 0) /* input */
gpio_direction_input(INCREDIBLEC_GPIO_H2W_CLK);
else
gpio_configure(INCREDIBLEC_GPIO_H2W_CLK, GPIOF_DRIVE_OUTPUT);
#else
if (n == 0) /* input */
msm_proc_comm(PCOM_RPC_GPIO_TLMM_CONFIG_EX,
uart3_off_gpi_table + 1, 0);
else
msm_proc_comm(PCOM_RPC_GPIO_TLMM_CONFIG_EX,
uart3_off_gpo_table + 1, 0);
#endif
}
static void incrediblec_config_serial_debug_gpios(void);
static void h2w_configure(int route)
{
printk(KERN_INFO "H2W route = %d \n", route);
switch (route) {
case H2W_UART3:
incrediblec_config_serial_debug_gpios();
printk(KERN_INFO "H2W -> UART3\n");
break;
case H2W_GPIO:
msm_proc_comm(PCOM_RPC_GPIO_TLMM_CONFIG_EX,
uart3_off_gpi_table + 0, 0);
msm_proc_comm(PCOM_RPC_GPIO_TLMM_CONFIG_EX,
uart3_off_gpi_table + 1, 0);
printk(KERN_INFO "H2W -> GPIO\n");
break;
}
}
static struct htc_headset_mgr_platform_data htc_headset_mgr_data = {
};
static struct platform_device htc_headset_mgr = {
.name = "HTC_HEADSET_MGR",
.id = -1,
.dev = {
.platform_data = &htc_headset_mgr_data,
},
};
static struct htc_headset_gpio_platform_data htc_headset_gpio_data = {
.hpin_gpio = INCREDIBLEC_GPIO_35MM_HEADSET_DET,
.key_enable_gpio = NULL,
.mic_select_gpio = NULL,
};
static struct platform_device htc_headset_gpio = {
.name = "HTC_HEADSET_GPIO",
.id = -1,
.dev = {
.platform_data = &htc_headset_gpio_data,
},
};
static struct akm8973_platform_data compass_platform_data = {
.layouts = INCREDIBLEC_LAYOUTS,
.project_name = INCREDIBLEC_PROJECT_NAME,
.reset = INCREDIBLEC_GPIO_COMPASS_RST_N,
.intr = INCREDIBLEC_GPIO_COMPASS_INT_N,
};
static struct tpa6130_platform_data headset_amp_platform_data = {
.enable_rpc_server = 0,
};
static struct i2c_board_info i2c_devices[] = {
{
I2C_BOARD_INFO(ATMEL_QT602240_NAME, 0x94 >> 1),
.platform_data = &incrediblec_atmel_ts_data,
.irq = MSM_GPIO_TO_INT(INCREDIBLEC_GPIO_TP_INT_N)
},
{
I2C_BOARD_INFO(MICROP_I2C_NAME, 0xCC >> 1),
.platform_data = µp_data,
.irq = MSM_GPIO_TO_INT(INCREDIBLEC_GPIO_UP_INT_N)
},
{
I2C_BOARD_INFO("ds2482", 0x30 >> 1),
/*.platform_data = µp_data,*/
/*.irq = MSM_GPIO_TO_INT(PASSION_GPIO_UP_INT_N)*/
},
{
I2C_BOARD_INFO("smb329", 0x6E >> 1),
},
{
I2C_BOARD_INFO("akm8973", 0x1C),
.platform_data = &compass_platform_data,
.irq = MSM_GPIO_TO_INT(INCREDIBLEC_GPIO_COMPASS_INT_N),
},
#ifdef CONFIG_MSM_CAMERA
#ifdef CONFIG_OV8810
{
I2C_BOARD_INFO("ov8810", 0x6C >> 1),
},
#endif
#endif/*CONIFIG_MSM_CAMERA*/
{
I2C_BOARD_INFO(TPA6130_I2C_NAME, 0xC0 >> 1),
.platform_data = &headset_amp_platform_data,
},
{
I2C_BOARD_INFO("tps65023", 0x48),
.platform_data = tps65023_data,
},
};
#ifdef CONFIG_ARCH_QSD8X50
static char bdaddress[20];
static void bt_export_bd_address(void)
{
unsigned char cTemp[6];
memcpy(cTemp, get_bt_bd_ram(), 6);
sprintf(bdaddress, "%02x:%02x:%02x:%02x:%02x:%02x", cTemp[0], cTemp[1], cTemp[2], cTemp[3], cTemp[4], cTemp[5]);
printk(KERN_INFO "YoYo--BD_ADDRESS=%s\n", bdaddress);
}
module_param_string(bdaddress, bdaddress, sizeof(bdaddress), S_IWUSR | S_IRUGO);
MODULE_PARM_DESC(bdaddress, "BT MAC ADDRESS");
#endif
static uint32_t camera_off_gpio_table[] = {
#if 0 /* CAMERA OFF*/
PCOM_GPIO_CFG(0, 0, GPIO_INPUT, GPIO_PULL_DOWN, GPIO_4MA), /* DAT0 */
PCOM_GPIO_CFG(1, 0, GPIO_INPUT, GPIO_PULL_DOWN, GPIO_4MA), /* DAT1 */
PCOM_GPIO_CFG(2, 0, GPIO_INPUT, GPIO_PULL_DOWN, GPIO_4MA), /* DAT2 */
PCOM_GPIO_CFG(3, 0, GPIO_INPUT, GPIO_PULL_DOWN, GPIO_4MA), /* DAT3 */
PCOM_GPIO_CFG(4, 0, GPIO_INPUT, GPIO_PULL_DOWN, GPIO_4MA), /* DAT4 */
PCOM_GPIO_CFG(5, 0, GPIO_INPUT, GPIO_PULL_DOWN, GPIO_4MA), /* DAT5 */
PCOM_GPIO_CFG(6, 0, GPIO_INPUT, GPIO_PULL_DOWN, GPIO_4MA), /* DAT6 */
PCOM_GPIO_CFG(7, 0, GPIO_INPUT, GPIO_PULL_DOWN, GPIO_4MA), /* DAT7 */
PCOM_GPIO_CFG(8, 0, GPIO_INPUT, GPIO_PULL_DOWN, GPIO_4MA), /* DAT8 */
PCOM_GPIO_CFG(9, 0, GPIO_INPUT, GPIO_PULL_DOWN, GPIO_4MA), /* DAT9 */
PCOM_GPIO_CFG(10, 0, GPIO_INPUT, GPIO_PULL_DOWN, GPIO_4MA), /* DAT10 */
PCOM_GPIO_CFG(11, 0, GPIO_INPUT, GPIO_PULL_DOWN, GPIO_4MA), /* DAT11 */
PCOM_GPIO_CFG(12, 0, GPIO_INPUT, GPIO_PULL_DOWN, GPIO_4MA), /* PCLK */
PCOM_GPIO_CFG(13, 0, GPIO_INPUT, GPIO_PULL_DOWN, GPIO_4MA), /* HSYNC */
PCOM_GPIO_CFG(14, 0, GPIO_INPUT, GPIO_PULL_DOWN, GPIO_4MA), /* VSYNC */
PCOM_GPIO_CFG(15, 0, GPIO_OUTPUT, GPIO_NO_PULL, GPIO_4MA), /* MCLK */
#endif
/* CAMERA SUSPEND*/
PCOM_GPIO_CFG(0, 0, GPIO_OUTPUT, GPIO_NO_PULL, GPIO_4MA), /* DAT0 */
PCOM_GPIO_CFG(1, 0, GPIO_OUTPUT, GPIO_NO_PULL, GPIO_4MA), /* DAT1 */
PCOM_GPIO_CFG(2, 0, GPIO_OUTPUT, GPIO_NO_PULL, GPIO_4MA), /* DAT2 */
PCOM_GPIO_CFG(3, 0, GPIO_OUTPUT, GPIO_NO_PULL, GPIO_4MA), /* DAT3 */
PCOM_GPIO_CFG(4, 0, GPIO_OUTPUT, GPIO_NO_PULL, GPIO_4MA), /* DAT4 */
PCOM_GPIO_CFG(5, 0, GPIO_OUTPUT, GPIO_NO_PULL, GPIO_4MA), /* DAT5 */
PCOM_GPIO_CFG(6, 0, GPIO_OUTPUT, GPIO_NO_PULL, GPIO_4MA), /* DAT6 */
PCOM_GPIO_CFG(7, 0, GPIO_OUTPUT, GPIO_NO_PULL, GPIO_4MA), /* DAT7 */
PCOM_GPIO_CFG(8, 0, GPIO_OUTPUT, GPIO_NO_PULL, GPIO_4MA), /* DAT8 */
PCOM_GPIO_CFG(9, 0, GPIO_OUTPUT, GPIO_NO_PULL, GPIO_4MA), /* DAT9 */
PCOM_GPIO_CFG(10, 0, GPIO_OUTPUT, GPIO_NO_PULL, GPIO_4MA), /* DAT10 */
PCOM_GPIO_CFG(11, 0, GPIO_OUTPUT, GPIO_NO_PULL, GPIO_4MA), /* DAT11 */
PCOM_GPIO_CFG(12, 0, GPIO_OUTPUT, GPIO_NO_PULL, GPIO_4MA), /* PCLK */
PCOM_GPIO_CFG(13, 0, GPIO_OUTPUT, GPIO_NO_PULL, GPIO_4MA), /* HSYNC */
PCOM_GPIO_CFG(14, 0, GPIO_OUTPUT, GPIO_NO_PULL, GPIO_4MA), /* VSYNC */
PCOM_GPIO_CFG(15, 0, GPIO_OUTPUT, GPIO_NO_PULL, GPIO_4MA), /* MCLK */
PCOM_GPIO_CFG(99, 0, GPIO_OUTPUT, GPIO_NO_PULL, GPIO_2MA), /* CAM1_RST */
PCOM_GPIO_CFG(INCREDIBLEC_CAM_PWD,
0, GPIO_OUTPUT, GPIO_NO_PULL, GPIO_2MA), /* CAM1_PWD */
};
static uint32_t camera_on_gpio_table[] = {
/* CAMERA ON */
PCOM_GPIO_CFG(0, 1, GPIO_INPUT, GPIO_PULL_UP, GPIO_2MA), /* DAT0 */
PCOM_GPIO_CFG(1, 1, GPIO_INPUT, GPIO_PULL_UP, GPIO_2MA), /* DAT1 */
PCOM_GPIO_CFG(2, 1, GPIO_INPUT, GPIO_PULL_UP, GPIO_2MA), /* DAT2 */
PCOM_GPIO_CFG(3, 1, GPIO_INPUT, GPIO_PULL_UP, GPIO_2MA), /* DAT3 */
PCOM_GPIO_CFG(4, 1, GPIO_INPUT, GPIO_PULL_UP, GPIO_2MA), /* DAT4 */
PCOM_GPIO_CFG(5, 1, GPIO_INPUT, GPIO_PULL_UP, GPIO_2MA), /* DAT5 */
PCOM_GPIO_CFG(6, 1, GPIO_INPUT, GPIO_PULL_UP, GPIO_2MA), /* DAT6 */
PCOM_GPIO_CFG(7, 1, GPIO_INPUT, GPIO_PULL_UP, GPIO_2MA), /* DAT7 */
PCOM_GPIO_CFG(8, 1, GPIO_INPUT, GPIO_PULL_UP, GPIO_2MA), /* DAT8 */
PCOM_GPIO_CFG(9, 1, GPIO_INPUT, GPIO_PULL_UP, GPIO_2MA), /* DAT9 */
PCOM_GPIO_CFG(10, 1, GPIO_INPUT, GPIO_PULL_UP, GPIO_2MA), /* DAT10 */
PCOM_GPIO_CFG(11, 1, GPIO_INPUT, GPIO_PULL_UP, GPIO_2MA), /* DAT11 */
PCOM_GPIO_CFG(12, 1, GPIO_INPUT, GPIO_PULL_UP, GPIO_16MA), /* PCLK */
PCOM_GPIO_CFG(13, 1, GPIO_INPUT, GPIO_PULL_UP, GPIO_2MA), /* HSYNC */
PCOM_GPIO_CFG(14, 1, GPIO_INPUT, GPIO_PULL_UP, GPIO_2MA), /* VSYNC */
PCOM_GPIO_CFG(15, 1, GPIO_OUTPUT, GPIO_PULL_UP, GPIO_16MA), /* MCLK */
};
static void config_camera_on_gpios(void)
{
config_gpio_table(camera_on_gpio_table,
ARRAY_SIZE(camera_on_gpio_table));
}
static void config_camera_off_gpios(void)
{
config_gpio_table(camera_off_gpio_table,
ARRAY_SIZE(camera_off_gpio_table));
}
static struct resource msm_camera_resources[] = {
{
.start = MSM_VFE_PHYS,
.end = MSM_VFE_PHYS + MSM_VFE_SIZE - 1,
.flags = IORESOURCE_MEM,
},
{
.start = INT_VFE,
INT_VFE,
.flags = IORESOURCE_IRQ,
},
};
static struct msm_camera_device_platform_data msm_camera_device_data = {
.camera_gpio_on = config_camera_on_gpios,
.camera_gpio_off = config_camera_off_gpios,
.ioext.mdcphy = MSM_MDC_PHYS,
.ioext.mdcsz = MSM_MDC_SIZE,
.ioext.appphy = MSM_CLK_CTL_PHYS,
.ioext.appsz = MSM_CLK_CTL_SIZE,
};
static int flashlight_control(int mode)
{
return aat1271_flashlight_control(mode);
}
static struct camera_flash_cfg msm_camera_sensor_flash_cfg = {
.camera_flash = flashlight_control,
.num_flash_levels = FLASHLIGHT_NUM,
.low_temp_limit = 10,
.low_cap_limit = 15,
};
static struct msm_camera_sensor_info msm_camera_sensor_ov8810_data = {
.sensor_name = "ov8810",
.sensor_reset = INCREDIBLEC_CAM_RST, /* CAM1_RST */
.sensor_pwd = INCREDIBLEC_CAM_PWD, /* CAM1_PWDN, enabled in a9 */
.pdata = &msm_camera_device_data,
.resource = msm_camera_resources,
.num_resources = ARRAY_SIZE(msm_camera_resources),
.waked_up = 0,
.need_suspend = 0,
.flash_cfg = &msm_camera_sensor_flash_cfg,
};
static struct platform_device msm_camera_sensor_ov8810 = {
.name = "msm_camera_ov8810",
.dev = {
.platform_data = &msm_camera_sensor_ov8810_data,
},
};
static void config_incrediblec_flashlight_gpios(void)
{
static uint32_t flashlight_gpio_table[] = {
PCOM_GPIO_CFG(INCREDIBLEC_GPIO_FLASHLIGHT_TORCH, 0,
GPIO_OUTPUT, GPIO_NO_PULL, GPIO_2MA),
PCOM_GPIO_CFG(INCREDIBLEC_GPIO_FLASHLIGHT_FLASH, 0,
GPIO_OUTPUT, GPIO_NO_PULL, GPIO_2MA),
PCOM_GPIO_CFG(INCREDIBLEC_GPIO_FLASHLIGHT_FLASH_ADJ, 0,
GPIO_OUTPUT, GPIO_NO_PULL, GPIO_2MA),
};
config_gpio_table(flashlight_gpio_table,
ARRAY_SIZE(flashlight_gpio_table));
}
static struct flashlight_platform_data incrediblec_flashlight_data = {
.gpio_init = config_incrediblec_flashlight_gpios,
.torch = INCREDIBLEC_GPIO_FLASHLIGHT_TORCH,
.flash = INCREDIBLEC_GPIO_FLASHLIGHT_FLASH,
.flash_adj = INCREDIBLEC_GPIO_FLASHLIGHT_FLASH_ADJ,
.flash_duration_ms = 600,
.led_count = 1,
};
static struct platform_device incrediblec_flashlight_device = {
.name = "flashlight",
.dev = {
.platform_data = &incrediblec_flashlight_data,
},
};
static void curcial_oj_shutdown(int enable)
{
uint8_t cmd[3];
memset(cmd, 0, sizeof(uint8_t)*3);
cmd[2] = 0x80;
if (enable)
microp_i2c_write(0x91, cmd, 3);
else
microp_i2c_write(0x90, cmd, 3);
}
static int curcial_oj_poweron(int on)
{
struct vreg *oj_power = vreg_get(0, "synt");
if (IS_ERR(oj_power)) {
printk(KERN_ERR "%s: Error power domain\n", __func__);
return 0;
}
if (on) {
vreg_set_level(oj_power, 2750);
vreg_enable(oj_power);
} else
vreg_disable(oj_power);
printk(KERN_INFO "%s: OJ power enable(%d)\n", __func__, on);
return 1;
};
static void curcial_oj_adjust_xy(uint8_t *data, int16_t *mSumDeltaX, int16_t *mSumDeltaY)
{
int8_t deltaX;
int8_t deltaY;
if (data[2] == 0x80)
data[2] = 0x81;
if (data[1] == 0x80)
data[1] = 0x81;
if (0) {
deltaX = (1)*((int8_t) data[2]); /*X=2*/
deltaY = (1)*((int8_t) data[1]); /*Y=1*/
} else {
deltaX = (1)*((int8_t) data[1]);
deltaY = (1)*((int8_t) data[2]);
}
*mSumDeltaX += -((int16_t)deltaX);
*mSumDeltaY += -((int16_t)deltaY);
}
static struct curcial_oj_platform_data incrediblec_oj_data = {
.oj_poweron = curcial_oj_poweron,
.oj_shutdown = curcial_oj_shutdown,
.oj_adjust_xy = curcial_oj_adjust_xy,
.microp_version = INCREDIBLEC_MICROP_VER,
.debugflag = 0,
.mdelay_time = 0,
.normal_th = 8,
.xy_ratio = 15,
.interval = 20,
.swap = true,
.ap_code = false,
.x = 1,
.y = 1,
.share_power = true,
.Xsteps = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9},
.Ysteps = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9,
9, 9, 9, 9, 9, 9, 9, 9, 9, 9},
.sht_tbl = {0, 2000, 2250, 2500, 2750, 3000},
.pxsum_tbl = {0, 0, 40, 50, 60, 70},
.degree = 6,
.irq = MSM_uP_TO_INT(12),
};
static struct platform_device incrediblec_oj = {
.name = CURCIAL_OJ_NAME,
.id = -1,
.dev = {
.platform_data = &incrediblec_oj_data,
}
};
static int amoled_power(int on)
{
static struct vreg *vreg_lcm_2v6;
if (!vreg_lcm_2v6) {
vreg_lcm_2v6 = vreg_get(0, "gp1");
if (IS_ERR(vreg_lcm_2v6))
return -EINVAL;
}
if (on) {
unsigned id, on = 1;
id = PM_VREG_PDOWN_CAM_ID;
msm_proc_comm(PCOM_VREG_PULLDOWN, &on, &id);
vreg_enable(vreg_lcm_2v6);
gpio_set_value(INCREDIBLEC_LCD_RST_ID1, 1);
mdelay(25);
gpio_set_value(INCREDIBLEC_LCD_RST_ID1, 0);
mdelay(10);
gpio_set_value(INCREDIBLEC_LCD_RST_ID1, 1);
mdelay(20);
} else {
unsigned id, on = 0;
gpio_set_value(INCREDIBLEC_LCD_RST_ID1, 0);
id = PM_VREG_PDOWN_CAM_ID;
msm_proc_comm(PCOM_VREG_PULLDOWN, &on, &id);
vreg_disable(vreg_lcm_2v6);
}
return 0;
}
static int sonywvga_power(int on)
{
unsigned id, on_off;
static struct vreg *vreg_lcm_2v6;
if (!vreg_lcm_2v6) {
vreg_lcm_2v6 = vreg_get(0, "gp1");
if (IS_ERR(vreg_lcm_2v6))
return -EINVAL;
}
if (on) {
on_off = 0;
id = PM_VREG_PDOWN_CAM_ID;
msm_proc_comm(PCOM_VREG_PULLDOWN, &on, &id);
vreg_enable(vreg_lcm_2v6);
gpio_set_value(INCREDIBLEC_LCD_RST_ID1, 1);
mdelay(10);
gpio_set_value(INCREDIBLEC_LCD_RST_ID1, 0);
udelay(500);
gpio_set_value(INCREDIBLEC_LCD_RST_ID1, 1);
mdelay(10);
} else {
on_off = 1;
gpio_set_value(INCREDIBLEC_LCD_RST_ID1, 0);
mdelay(120);
id = PM_VREG_PDOWN_CAM_ID;
msm_proc_comm(PCOM_VREG_PULLDOWN, &on, &id);
vreg_disable(vreg_lcm_2v6);
}
return 0;
}
#define LCM_GPIO_CFG(gpio, func) \
PCOM_GPIO_CFG(gpio, func, GPIO_OUTPUT, GPIO_NO_PULL, GPIO_4MA)
static uint32_t display_on_gpio_table[] = {
LCM_GPIO_CFG(INCREDIBLEC_LCD_R0, 1),
LCM_GPIO_CFG(INCREDIBLEC_LCD_R1, 1),
LCM_GPIO_CFG(INCREDIBLEC_LCD_R2, 1),
LCM_GPIO_CFG(INCREDIBLEC_LCD_R3, 1),
LCM_GPIO_CFG(INCREDIBLEC_LCD_R4, 1),
LCM_GPIO_CFG(INCREDIBLEC_LCD_R5, 1),
LCM_GPIO_CFG(INCREDIBLEC_LCD_G0, 1),
LCM_GPIO_CFG(INCREDIBLEC_LCD_G1, 1),
LCM_GPIO_CFG(INCREDIBLEC_LCD_G2, 1),
LCM_GPIO_CFG(INCREDIBLEC_LCD_G3, 1),
LCM_GPIO_CFG(INCREDIBLEC_LCD_G4, 1),
LCM_GPIO_CFG(INCREDIBLEC_LCD_G5, 1),
LCM_GPIO_CFG(INCREDIBLEC_LCD_B0, 1),
LCM_GPIO_CFG(INCREDIBLEC_LCD_B1, 1),
LCM_GPIO_CFG(INCREDIBLEC_LCD_B2, 1),
LCM_GPIO_CFG(INCREDIBLEC_LCD_B3, 1),
LCM_GPIO_CFG(INCREDIBLEC_LCD_B4, 1),
LCM_GPIO_CFG(INCREDIBLEC_LCD_B5, 1),
LCM_GPIO_CFG(INCREDIBLEC_LCD_PCLK, 1),
LCM_GPIO_CFG(INCREDIBLEC_LCD_VSYNC, 1),
LCM_GPIO_CFG(INCREDIBLEC_LCD_HSYNC, 1),
LCM_GPIO_CFG(INCREDIBLEC_LCD_DE, 1),
};
static uint32_t display_off_gpio_table[] = {
LCM_GPIO_CFG(INCREDIBLEC_LCD_R0, 0),
LCM_GPIO_CFG(INCREDIBLEC_LCD_R1, 0),
LCM_GPIO_CFG(INCREDIBLEC_LCD_R2, 0),
LCM_GPIO_CFG(INCREDIBLEC_LCD_R3, 0),
LCM_GPIO_CFG(INCREDIBLEC_LCD_R4, 0),
LCM_GPIO_CFG(INCREDIBLEC_LCD_R5, 0),
LCM_GPIO_CFG(INCREDIBLEC_LCD_G0, 0),
LCM_GPIO_CFG(INCREDIBLEC_LCD_G1, 0),
LCM_GPIO_CFG(INCREDIBLEC_LCD_G2, 0),
LCM_GPIO_CFG(INCREDIBLEC_LCD_G3, 0),
LCM_GPIO_CFG(INCREDIBLEC_LCD_G4, 0),
LCM_GPIO_CFG(INCREDIBLEC_LCD_G5, 0),
LCM_GPIO_CFG(INCREDIBLEC_LCD_B0, 0),
LCM_GPIO_CFG(INCREDIBLEC_LCD_B1, 0),
LCM_GPIO_CFG(INCREDIBLEC_LCD_B2, 0),
LCM_GPIO_CFG(INCREDIBLEC_LCD_B3, 0),
LCM_GPIO_CFG(INCREDIBLEC_LCD_B4, 0),
LCM_GPIO_CFG(INCREDIBLEC_LCD_B5, 0),
LCM_GPIO_CFG(INCREDIBLEC_LCD_PCLK, 0),
LCM_GPIO_CFG(INCREDIBLEC_LCD_VSYNC, 0),
LCM_GPIO_CFG(INCREDIBLEC_LCD_HSYNC, 0),
LCM_GPIO_CFG(INCREDIBLEC_LCD_DE, 0),
};
static uint32_t sony_display_on_gpio_table[] = {
LCM_GPIO_CFG(INCREDIBLEC_SPI_CLK, 1),
LCM_GPIO_CFG(INCREDIBLEC_SPI_CS, 1),
LCM_GPIO_CFG(INCREDIBLEC_LCD_ID0, 1),
LCM_GPIO_CFG(INCREDIBLEC_SPI_DO, 1),
};
static uint32_t sony_display_off_gpio_table[] = {
LCM_GPIO_CFG(INCREDIBLEC_SPI_CLK, 0),
LCM_GPIO_CFG(INCREDIBLEC_SPI_CS, 0),
LCM_GPIO_CFG(INCREDIBLEC_LCD_ID0, 0),
LCM_GPIO_CFG(INCREDIBLEC_SPI_DO, 0),
};
static int panel_gpio_switch(int on)
{
if (on) {
config_gpio_table(display_on_gpio_table,
ARRAY_SIZE(display_on_gpio_table));
if(panel_type != SAMSUNG_PANEL) {
config_gpio_table(sony_display_on_gpio_table,
ARRAY_SIZE(sony_display_on_gpio_table));
}
}
else {
int i;
config_gpio_table(display_off_gpio_table,
ARRAY_SIZE(display_off_gpio_table));
for (i = INCREDIBLEC_LCD_R0; i <= INCREDIBLEC_LCD_R5; i++)
gpio_set_value(i, 0);
for (i = INCREDIBLEC_LCD_G0; i <= INCREDIBLEC_LCD_G5; i++)
gpio_set_value(i, 0);
for (i = INCREDIBLEC_LCD_B0; i <= INCREDIBLEC_LCD_DE; i++)
gpio_set_value(i, 0);
if(panel_type != SAMSUNG_PANEL) {
config_gpio_table(sony_display_off_gpio_table,
ARRAY_SIZE(sony_display_off_gpio_table));
}
}
return 0;
}
static struct resource resources_msm_fb[] = {
{
.start = MSM_FB_BASE,
.end = MSM_FB_BASE + MSM_FB_SIZE - 1,
.flags = IORESOURCE_MEM,
},
};
static struct panel_platform_data amoled_data = {
.fb_res = &resources_msm_fb[0],
.power = amoled_power,
.gpio_switch = panel_gpio_switch,
};
static struct platform_device amoled_panel = {
.name = "panel-tl2796a",
.id = -1,
.dev = {
.platform_data = &amoled_data
},
};
static struct panel_platform_data sonywvga_data = {
.fb_res = &resources_msm_fb[0],
.power = sonywvga_power,
.gpio_switch = panel_gpio_switch,
};
static struct platform_device sonywvga_panel = {
.name = "panel-sonywvga-s6d16a0x21",
.id = -1,
.dev = {
.platform_data = &sonywvga_data,
},
};
static struct platform_device *devices[] __initdata = {
&msm_device_uart1,
#ifdef CONFIG_SERIAL_MSM_HS
&msm_device_uart_dm1,
#endif
&htc_battery_pdev,
&htc_headset_mgr,
&htc_headset_gpio,
&ram_console_device,
&incrediblec_rfkill,
&msm_device_smd,
&msm_device_nand,
/*&msm_device_hsusb,*/
/*&usb_mass_storage_device,*/
&android_pmem_mdp_device,
&android_pmem_adsp_device,
#ifdef CONFIG_720P_CAMERA
&android_pmem_venc_device,
#else
&android_pmem_camera_device,
#endif
&msm_camera_sensor_ov8810,
&msm_kgsl_device,
&msm_device_i2c,
&incrediblec_flashlight_device,
&incrediblec_leds,
#if defined(CONFIG_SPI_QSD)
&qsd_device_spi,
#endif
&incrediblec_oj,
};
static uint32_t incrediblec_serial_debug_table[] = {
/* RX */
PCOM_GPIO_CFG(INCREDIBLEC_GPIO_UART3_RX, 3, GPIO_INPUT, GPIO_NO_PULL,
GPIO_4MA),
/* TX */
PCOM_GPIO_CFG(INCREDIBLEC_GPIO_UART3_TX, 3, GPIO_OUTPUT, GPIO_NO_PULL,
GPIO_4MA),
};
static uint32_t incrediblec_uart_gpio_table[] = {
/* RX */
PCOM_GPIO_CFG(INCREDIBLEC_GPIO_UART3_RX, 3, GPIO_INPUT, GPIO_NO_PULL,
GPIO_4MA),
/* TX */
PCOM_GPIO_CFG(INCREDIBLEC_GPIO_UART3_TX, 3, GPIO_INPUT, GPIO_NO_PULL,
GPIO_4MA),
};
static void incrediblec_config_serial_debug_gpios(void)
{
config_gpio_table(incrediblec_serial_debug_table,
ARRAY_SIZE(incrediblec_serial_debug_table));
}
static void incrediblec_config_uart_gpios(void)
{
config_gpio_table(incrediblec_uart_gpio_table,
ARRAY_SIZE(incrediblec_uart_gpio_table));
}
static struct msm_i2c_device_platform_data msm_i2c_pdata = {
.i2c_clock = 100000,
.clock_strength = GPIO_8MA,
.data_strength = GPIO_8MA,
};
static void __init msm_device_i2c_init(void)
{
msm_i2c_gpio_init();
msm_device_i2c.dev.platform_data = &msm_i2c_pdata;
}
static struct msm_acpu_clock_platform_data incrediblec_clock_data = {
.acpu_switch_time_us = 20,
.max_speed_delta_khz = 256000,
.vdd_switch_time_us = 62,
.power_collapse_khz = 245000,
.wait_for_irq_khz = 245000,
};
static unsigned incrediblec_perf_acpu_table[] = {
245000000,
576000000,
998400000,
};
static struct perflock_platform_data incrediblec_perflock_data = {
.perf_acpu_table = incrediblec_perf_acpu_table,
.table_size = ARRAY_SIZE(incrediblec_perf_acpu_table),
};
int incrediblec_init_mmc(int sysrev);
#ifdef CONFIG_SERIAL_MSM_HS
static struct msm_serial_hs_platform_data msm_uart_dm1_pdata = {
.rx_wakeup_irq = MSM_GPIO_TO_INT(INCREDIBLEC_GPIO_BT_HOST_WAKE), /*Chip to Device*/
.inject_rx_on_wakeup = 0,
.cpu_lock_supported = 0,
/* for bcm */
.bt_wakeup_pin_supported = 1,
.bt_wakeup_pin = INCREDIBLEC_GPIO_BT_CHIP_WAKE,
.host_wakeup_pin = INCREDIBLEC_GPIO_BT_HOST_WAKE,
};
#endif
static int OJ_BMA_power(void)
{
int ret;
struct vreg *vreg = vreg_get(0, "synt");
if (!vreg) {
printk(KERN_ERR "%s: vreg error\n", __func__);
return -EIO;
}
ret = vreg_set_level(vreg, 2850);
ret = vreg_enable(vreg);
if (ret < 0)
printk(KERN_ERR "%s: vreg enable failed\n", __func__);
return 0;
}
unsigned int incrediblec_get_engineerid(void)
{
return engineerid;
}
static ssize_t incrediblec_virtual_keys_show(struct kobject *kobj,
struct kobj_attribute *attr, char *buf)
{
if (engineerid > 1 && system_rev > 1) {
/* center: x: home: 45, menu: 152, back: 318, search 422, y: 830 */
return sprintf(buf,
__stringify(EV_KEY) ":" __stringify(KEY_HOME) ":47:830:74:50"
":" __stringify(EV_KEY) ":" __stringify(KEY_MENU) ":155:830:80:50"
":" __stringify(EV_KEY) ":" __stringify(KEY_BACK) ":337:830:90:50"
":" __stringify(EV_KEY) ":" __stringify(KEY_SEARCH) ":434:830:60:50"
"\n");
} else {
/* center: x: home: 50, menu: 184, back: 315, search 435, y: 830*/
return sprintf(buf,
__stringify(EV_KEY) ":" __stringify(KEY_HOME) ":50:830:98:50"
":" __stringify(EV_KEY) ":" __stringify(KEY_MENU) ":184:830:120:50"
":" __stringify(EV_KEY) ":" __stringify(KEY_BACK) ":315:830:100:50"
":" __stringify(EV_KEY) ":" __stringify(KEY_SEARCH) ":435:830:88:50"
"\n");
}
}
static struct kobj_attribute incrediblec_virtual_keys_attr = {
.attr = {
.name = "virtualkeys.atmel-touchscreen",
.mode = S_IRUGO,
},
.show = &incrediblec_virtual_keys_show,
};
static struct attribute *incrediblec_properties_attrs[] = {
&incrediblec_virtual_keys_attr.attr,
NULL
};
static struct attribute_group incrediblec_properties_attr_group = {
.attrs = incrediblec_properties_attrs,
};
static void incrediblec_reset(void)
{
gpio_set_value(INCREDIBLEC_GPIO_PS_HOLD, 0);
}
static int incrediblec_init_panel(void)
{
int ret = 0;
if (panel_type != SAMSUNG_PANEL)
ret = platform_device_register(&sonywvga_panel);
else
ret = platform_device_register(&amoled_panel);
return ret;
}
static void __init incrediblec_init(void)
{
int ret;
struct kobject *properties_kobj;
printk("incrediblec_init() revision=%d, engineerid=%d\n", system_rev, engineerid);
msm_hw_reset_hook = incrediblec_reset;
if (0 == engineerid || 0xF == engineerid) {
mdp_pmem_pdata.start = MSM_PMEM_MDP_XA_BASE;
android_pmem_adsp_pdata.start = MSM_PMEM_ADSP_XA_BASE;
msm_kgsl_resources[1].start = MSM_GPU_MEM_XA_BASE;
msm_kgsl_resources[1].end = MSM_GPU_MEM_XA_BASE + MSM_GPU_MEM_SIZE - 1;
} else if (engineerid >= 3) {
mdp_pmem_pdata.start = MSM_PMEM_MDP_BASE + MSM_MEM_128MB_OFFSET;
android_pmem_adsp_pdata.start = MSM_PMEM_ADSP_BASE + MSM_MEM_128MB_OFFSET;
msm_kgsl_resources[1].start = MSM_GPU_MEM_BASE;
msm_kgsl_resources[1].end = msm_kgsl_resources[1].start + MSM_GPU_MEM_SIZE - 1;
}
OJ_BMA_power();
msm_acpu_clock_init(&incrediblec_clock_data);
perflock_init(&incrediblec_perflock_data);
#if defined(CONFIG_MSM_SERIAL_DEBUGGER)
msm_serial_debug_init(MSM_UART1_PHYS, INT_UART1,
&msm_device_uart1.dev, 1, INT_UART1_RX);
#endif
#ifdef CONFIG_ARCH_QSD8X50
bt_export_bd_address();
#endif
/* set the gpu power rail to manual mode so clk en/dis will not
* turn off gpu power, and hang it on resume */
incrediblec_kgsl_power_rail_mode(0);
incrediblec_kgsl_power(true);
#ifdef CONFIG_SERIAL_MSM_HS
msm_device_uart_dm1.dev.platform_data = &msm_uart_dm1_pdata;
msm_device_uart_dm1.name = "msm_serial_hs_bcm"; /* for bcm */
#endif
incrediblec_config_uart_gpios();
config_gpio_table(camera_off_gpio_table,
ARRAY_SIZE(camera_off_gpio_table));
/*gpio_direction_output(INCREDIBLEC_GPIO_TP_LS_EN, 0);*/
gpio_direction_output(INCREDIBLEC_GPIO_TP_EN, 0);
incrediblec_audio_init();
msm_device_i2c_init();
#ifdef CONFIG_MICROP_COMMON
incrediblec_microp_init();
#endif
#ifdef CONFIG_USB_ANDROID
inc_add_usb_devices();
#endif
if (system_rev >= 2) {
microp_data.num_functions = ARRAY_SIZE(microp_functions_1);
microp_data.microp_function = microp_functions_1;
}
platform_add_devices(devices, ARRAY_SIZE(devices));
incrediblec_init_panel();
if (system_rev > 2) {
incrediblec_atmel_ts_data[0].config_T9[7] = 33;
incrediblec_atmel_ts_data[0].object_crc[0] = 0x2E;
incrediblec_atmel_ts_data[0].object_crc[1] = 0x80;
incrediblec_atmel_ts_data[0].object_crc[2] = 0xE0;
}
i2c_register_board_info(0, i2c_devices, ARRAY_SIZE(i2c_devices));
ret = incrediblec_init_mmc(system_rev);
if (ret != 0)
pr_crit("%s: Unable to initialize MMC\n", __func__);
properties_kobj = kobject_create_and_add("board_properties", NULL);
if (properties_kobj)
ret = sysfs_create_group(properties_kobj,
&incrediblec_properties_attr_group);
if (!properties_kobj || ret)
pr_err("failed to create board_properties\n");
msm_init_pmic_vibrator();
}
static void __init incrediblec_fixup(struct machine_desc *desc, struct tag *tags,
char **cmdline, struct meminfo *mi)
{
engineerid = parse_tag_engineerid(tags);
mi->nr_banks = 1;
mi->bank[0].start = PHYS_OFFSET;
mi->bank[0].node = PHYS_TO_NID(PHYS_OFFSET);
if (0 == engineerid || 0xF == engineerid)
mi->bank[0].size = (MSM_LINUX_XA_SIZE);
else if (engineerid <= 2) { /* 4G3G */
mi->bank[0].size = MSM_EBI1_BANK0_SIZE;
mi->nr_banks++;
mi->bank[1].start = MSM_EBI1_BANK1_BASE;
mi->bank[1].node = PHYS_TO_NID(MSM_EBI1_BANK1_BASE);
mi->bank[1].size = MSM_EBI1_BANK1_SIZE;
} else {
mi->bank[0].size = MSM_EBI1_BANK0_SIZE;
mi->nr_banks++;
mi->bank[1].start = MSM_EBI1_BANK1_BASE;
mi->bank[1].node = PHYS_TO_NID(MSM_EBI1_BANK1_BASE);
mi->bank[1].size = MSM_EBI1_BANK1_SIZE + MSM_MEM_128MB_OFFSET;
}
}
static void __init incrediblec_map_io(void)
{
msm_map_common_io();
msm_clock_init();
}
extern struct sys_timer msm_timer;
MACHINE_START(INCREDIBLEC, "incrediblec")
#ifdef CONFIG_MSM_DEBUG_UART
.phys_io = MSM_DEBUG_UART_PHYS,
.io_pg_offst = ((MSM_DEBUG_UART_BASE) >> 18) & 0xfffc,
#endif
.boot_params = 0x20000100,
.fixup = incrediblec_fixup,
.map_io = incrediblec_map_io,
.init_irq = msm_init_irq,
.init_machine = incrediblec_init,
.timer = &msm_timer,
MACHINE_END
|
tonytw1/wellynews | 71 | src/main/java/nz/co/searchwellington/Main.java | package nz.co.searchwellington;
import com.google.common.collect.Maps;
import nz.co.searchwellington.commentfeeds.detectors.CommentFeedDetector;
import nz.co.searchwellington.commentfeeds.detectors.DateRegexCommentFeedDetector;
import nz.co.searchwellington.commentfeeds.detectors.GenericCommentFeedDetector;
import nz.co.searchwellington.urls.RssUrlBuilder;
import nz.co.searchwellington.urls.AdminUrlBuilder;
import nz.co.searchwellington.model.SiteInformation;
import nz.co.searchwellington.model.UrlWordsGenerator;
import nz.co.searchwellington.urls.UrlBuilder;
import nz.co.searchwellington.views.*;
import org.apache.velocity.app.Velocity;
import org.apache.velocity.spring.VelocityEngineFactoryBean;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration;
import org.springframework.boot.autoconfigure.mongo.MongoAutoConfiguration;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.task.TaskExecutor;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import uk.co.eelpieconsulting.common.caching.MemcachedCache;
import uk.co.eelpieconsulting.spring.views.velocity.VelocityViewResolver;
import java.io.IOException;
import java.util.Map;
import java.util.Properties;
@SpringBootApplication(exclude = {DataSourceAutoConfiguration.class, MongoAutoConfiguration.class})
@EnableScheduling
@ComponentScan({"nz.co.searchwellington","uk.co.eelpieconsulting.common"})
@Configuration
public class Main {
private static ApplicationContext ctx;
public static void main(String[] args) {
ctx = SpringApplication.run(Main.class, args);
}
@Bean
public CommentFeedDetector newswireCommentFeedDetector() {
return new GenericCommentFeedDetector("^http://www.newswire.co.nz/\\d{4}/\\d{2}/.*?/feed/$");
}
@Bean
public CommentFeedDetector aucklandTrainsCommentFeedDetector() {
return new GenericCommentFeedDetector("^http://www.aucklandtrains.co.nz/\\d{4}/\\d{2}/\\d{2}/.*?/feed/$");
}
@Bean
public CommentFeedDetector tepapaBlogCommentFeedDetector() {
return new GenericCommentFeedDetector("^http://blog.tepapa.govt.nz/\\d{4}/\\d{2}/\\d{2}/.*?/feed/$");
}
@Bean
public CommentFeedDetector wellingtonistaCommentFeedDetector() {
return new GenericCommentFeedDetector("^http://wellingtonista.com/crss/node/\\d+$");
}
@Bean
public CommentFeedDetector yearMonthCommentFeedDetector() {
return new GenericCommentFeedDetector("^http://.*?/\\d{4}/\\d{2}/.*?$");
}
@Bean
public CommentFeedDetector dateRegexCommentFeedDetector() {
return new DateRegexCommentFeedDetector();
}
@Bean("feedReaderTaskExecutor")
public TaskExecutor feedReaderTaskExecutor() {
ThreadPoolTaskExecutor threadPoolTaskExecutor = new ThreadPoolTaskExecutor();
threadPoolTaskExecutor.setCorePoolSize(5);
threadPoolTaskExecutor.setMaxPoolSize(10);
threadPoolTaskExecutor.setQueueCapacity(50000);
return threadPoolTaskExecutor;
}
@Bean("linkCheckerTaskExecutor")
public TaskExecutor linkCheckerTaskExecutor() {
ThreadPoolTaskExecutor threadPoolTaskExecutor = new ThreadPoolTaskExecutor();
threadPoolTaskExecutor.setCorePoolSize(5);
threadPoolTaskExecutor.setMaxPoolSize(10);
threadPoolTaskExecutor.setQueueCapacity(50000);
return threadPoolTaskExecutor;
}
@Bean
public MemcachedCache memcachedCache(@Value("${memcached.urls}") String memcacheUrl) throws IOException {
return new MemcachedCache(memcacheUrl);
}
@Bean
public VelocityViewResolver velocityViewResolver(
AdminUrlBuilder adminUrlBuilder,
ColumnSplitter columnSplitter,
DateFormatter dateFormatter,
RssUrlBuilder rssUrlBuilder,
SiteInformation siteInformation,
UrlBuilder urlBuilder,
UrlWordsGenerator urlWordsGenerator,
MapPinDeduplicator mapPinDeduplicator,
DeduplicateImageService deduplicateImageService) {
final VelocityViewResolver viewResolver = new VelocityViewResolver();
viewResolver.setCache(true);
viewResolver.setSuffix(".vm");
viewResolver.setContentType("text/html;charset=UTF-8");
final Map<String, Object> attributes = Maps.newHashMap();
attributes.put("adminUrlBuilder", adminUrlBuilder);
attributes.put("columnSplitter", columnSplitter);
attributes.put("dateFormatter", dateFormatter);
attributes.put("escape", new EscapeTools());
attributes.put("rssUrlBuilder", rssUrlBuilder);
attributes.put("siteInformation", siteInformation);
attributes.put("urlBuilder", urlBuilder);
attributes.put("urlWordsGenerator", urlWordsGenerator);
attributes.put("mapPinDeduplicator", mapPinDeduplicator);
attributes.put("deduplicateImageService", deduplicateImageService);
viewResolver.setAttributesMap(attributes);
return viewResolver;
}
@Bean("velocityEngine")
public VelocityEngineFactoryBean velocityEngineFactoryBean() {
VelocityEngineFactoryBean velocityEngineFactory= new VelocityEngineFactoryBean();
Properties vp = new Properties();
vp.setProperty(Velocity.INPUT_ENCODING, "UTF-8");
vp.setProperty(Velocity.EVENTHANDLER_REFERENCEINSERTION, "org.apache.velocity.app.event.implement.EscapeHtmlReference");
vp.setProperty("resource.loader", "class");
vp.setProperty("class.resource.loader.class", "org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader");
vp.setProperty("resource.loader.class.cache", "true");
// When resource.manager.cache.default_size is set to 0, then the default implementation uses the standard Java ConcurrentHashMap.
vp.setProperty("resource.manager.cache.default_size", "0");
vp.setProperty("velocimacro.library", "spring.vm");
velocityEngineFactory.setVelocityProperties(vp);
return velocityEngineFactory;
}
}
| package nz.co.searchwellington;
import com.google.common.collect.Maps;
import nz.co.searchwellington.commentfeeds.detectors.CommentFeedDetector;
import nz.co.searchwellington.commentfeeds.detectors.DateRegexCommentFeedDetector;
import nz.co.searchwellington.commentfeeds.detectors.GenericCommentFeedDetector;
import nz.co.searchwellington.urls.RssUrlBuilder;
import nz.co.searchwellington.urls.AdminUrlBuilder;
import nz.co.searchwellington.model.SiteInformation;
import nz.co.searchwellington.model.UrlWordsGenerator;
import nz.co.searchwellington.urls.UrlBuilder;
import nz.co.searchwellington.views.*;
import org.apache.velocity.app.Velocity;
import org.apache.velocity.spring.VelocityEngineFactoryBean;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration;
import org.springframework.boot.autoconfigure.mongo.MongoAutoConfiguration;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.task.TaskExecutor;
import org.springframework.scheduling.annotation.EnableScheduling;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import uk.co.eelpieconsulting.common.caching.MemcachedCache;
import uk.co.eelpieconsulting.spring.views.velocity.VelocityViewResolver;
import java.io.IOException;
import java.util.Map;
import java.util.Properties;
@SpringBootApplication(exclude = {DataSourceAutoConfiguration.class, MongoAutoConfiguration.class})
@EnableScheduling
@ComponentScan({"nz.co.searchwellington","uk.co.eelpieconsulting.common"})
@Configuration
public class Main {
private static ApplicationContext ctx;
public static void main(String[] args) {
ctx = SpringApplication.run(Main.class, args);
}
@Bean
public CommentFeedDetector newswireCommentFeedDetector() {
return new GenericCommentFeedDetector("^http://www.newswire.co.nz/\\d{4}/\\d{2}/.*?/feed/$");
}
@Bean
public CommentFeedDetector aucklandTrainsCommentFeedDetector() {
return new GenericCommentFeedDetector("^http://www.aucklandtrains.co.nz/\\d{4}/\\d{2}/\\d{2}/.*?/feed/$");
}
@Bean
public CommentFeedDetector tepapaBlogCommentFeedDetector() {
return new GenericCommentFeedDetector("^http://blog.tepapa.govt.nz/\\d{4}/\\d{2}/\\d{2}/.*?/feed/$");
}
@Bean
public CommentFeedDetector wellingtonistaCommentFeedDetector() {
return new GenericCommentFeedDetector("^http://wellingtonista.com/crss/node/\\d+$");
}
@Bean
public CommentFeedDetector yearMonthCommentFeedDetector() {
return new GenericCommentFeedDetector("^http://.*?/\\d{4}/\\d{2}/.*?$");
}
@Bean
public CommentFeedDetector dateRegexCommentFeedDetector() {
return new DateRegexCommentFeedDetector();
}
@Bean("elasticIndexTaskExecutor")
public TaskExecutor elasticIndexTaskExecutor() {
ThreadPoolTaskExecutor threadPoolTaskExecutor = new ThreadPoolTaskExecutor();
threadPoolTaskExecutor.setCorePoolSize(1);
threadPoolTaskExecutor.setMaxPoolSize(2);
threadPoolTaskExecutor.setQueueCapacity(50000);
return threadPoolTaskExecutor;
}
@Bean("feedReaderTaskExecutor")
public TaskExecutor feedReaderTaskExecutor() {
ThreadPoolTaskExecutor threadPoolTaskExecutor = new ThreadPoolTaskExecutor();
threadPoolTaskExecutor.setCorePoolSize(5);
threadPoolTaskExecutor.setMaxPoolSize(10);
threadPoolTaskExecutor.setQueueCapacity(50000);
return threadPoolTaskExecutor;
}
@Bean("linkCheckerTaskExecutor")
public TaskExecutor linkCheckerTaskExecutor() {
ThreadPoolTaskExecutor threadPoolTaskExecutor = new ThreadPoolTaskExecutor();
threadPoolTaskExecutor.setCorePoolSize(5);
threadPoolTaskExecutor.setMaxPoolSize(10);
threadPoolTaskExecutor.setQueueCapacity(50000);
return threadPoolTaskExecutor;
}
@Bean
public MemcachedCache memcachedCache(@Value("${memcached.urls}") String memcacheUrl) throws IOException {
return new MemcachedCache(memcacheUrl);
}
@Bean
public VelocityViewResolver velocityViewResolver(
AdminUrlBuilder adminUrlBuilder,
ColumnSplitter columnSplitter,
DateFormatter dateFormatter,
RssUrlBuilder rssUrlBuilder,
SiteInformation siteInformation,
UrlBuilder urlBuilder,
UrlWordsGenerator urlWordsGenerator,
MapPinDeduplicator mapPinDeduplicator,
DeduplicateImageService deduplicateImageService) {
final VelocityViewResolver viewResolver = new VelocityViewResolver();
viewResolver.setCache(true);
viewResolver.setSuffix(".vm");
viewResolver.setContentType("text/html;charset=UTF-8");
final Map<String, Object> attributes = Maps.newHashMap();
attributes.put("adminUrlBuilder", adminUrlBuilder);
attributes.put("columnSplitter", columnSplitter);
attributes.put("dateFormatter", dateFormatter);
attributes.put("escape", new EscapeTools());
attributes.put("rssUrlBuilder", rssUrlBuilder);
attributes.put("siteInformation", siteInformation);
attributes.put("urlBuilder", urlBuilder);
attributes.put("urlWordsGenerator", urlWordsGenerator);
attributes.put("mapPinDeduplicator", mapPinDeduplicator);
attributes.put("deduplicateImageService", deduplicateImageService);
viewResolver.setAttributesMap(attributes);
return viewResolver;
}
@Bean("velocityEngine")
public VelocityEngineFactoryBean velocityEngineFactoryBean() {
VelocityEngineFactoryBean velocityEngineFactory= new VelocityEngineFactoryBean();
Properties vp = new Properties();
vp.setProperty(Velocity.INPUT_ENCODING, "UTF-8");
vp.setProperty(Velocity.EVENTHANDLER_REFERENCEINSERTION, "org.apache.velocity.app.event.implement.EscapeHtmlReference");
vp.setProperty("resource.loader", "class");
vp.setProperty("class.resource.loader.class", "org.apache.velocity.runtime.resource.loader.ClasspathResourceLoader");
vp.setProperty("resource.loader.class.cache", "true");
// When resource.manager.cache.default_size is set to 0, then the default implementation uses the standard Java ConcurrentHashMap.
vp.setProperty("resource.manager.cache.default_size", "0");
vp.setProperty("velocimacro.library", "spring.vm");
velocityEngineFactory.setVelocityProperties(vp);
return velocityEngineFactory;
}
}
|
wardencommunity/sinatra_warden | 9 | lib/sinatra_warden/sinatra.rb | require 'sinatra/base'
module Sinatra
module Warden
module Helpers
# The main accessor to the warden middleware
def warden
request.env['warden']
end
# Return session info
#
# @param [Symbol] the scope to retrieve session info for
def session_info(scope=nil)
scope ? warden.session(scope) : scope
end
# Check the current session is authenticated to a given scope
def authenticated?(scope=nil)
scope ? warden.authenticated?(scope) : warden.authenticated?
end
alias_method :logged_in?, :authenticated?
# Authenticate a user against defined strategies
def authenticate(*args)
warden.authenticate!(*args)
end
alias_method :login, :authenticate
# Terminate the current session
#
# @param [Symbol] the session scope to terminate
def logout(scopes=nil)
scopes ? warden.logout(scopes) : warden.logout(warden.config.default_scope)
end
# Access the user from the current session
#
# @param [Symbol] the scope for the logged in user
def user(scope=nil)
scope ? warden.user(scope) : warden.user
end
alias_method :current_user, :user
# Store the logged in user in the session
#
# @param [Object] the user you want to store in the session
# @option opts [Symbol] :scope The scope to assign the user
# @example Set John as the current user
# user = User.find_by_name('John')
def user=(new_user, opts={})
warden.set_user(new_user, opts)
end
alias_method :current_user=, :user=
# Require authorization for an action
#
# @param [String] path to redirect to if user is unauthenticated
def authorize!(failure_path=nil)
unless authenticated?
session[:return_to] = request.path if options.auth_use_referrer
redirect(failure_path ? failure_path : options.auth_failure_path)
end
end
end
def self.registered(app)
app.helpers Warden::Helpers
# Enable Sessions
app.set :sessions, true
app.set :auth_failure_path, '/'
app.set :auth_success_path, '/'
# Setting this to true will store last request URL
# into a user's session so that to redirect back to it
# upon successful authentication
app.set :auth_use_referrer, false
app.set :auth_error_message, "Could not log you in."
app.set :auth_success_message, "You have logged in successfully."
app.set :auth_template_renderer, :haml
app.set :auth_login_template, :login
# OAuth Specific Settings
app.set :auth_use_oauth, false
app.post '/unauthenticated/?' do
status 401
warden.custom_failure! if warden.config.failure_app == self.class
env['x-rack.flash'][:error] = options.auth_error_message if defined?(Rack::Flash)
self.send(options.auth_template_renderer, options.auth_login_template)
end
app.get '/login/?' do
if options.auth_use_oauth && !@auth_oauth_request_token.nil?
session[:request_token] = @auth_oauth_request_token.token
session[:request_token_secret] = @auth_oauth_request_token.secret
redirect @auth_oauth_request_token.authorize_url
else
self.send(options.auth_template_renderer, options.auth_login_template)
end
end
app.get '/oauth_callback/?' do
if options.auth_use_oauth
authenticate
env['x-rack.flash'][:success] = options.auth_success_message if defined?(Rack::Flash)
redirect options.auth_success_path
else
redirect options.auth_failure_path
end
end
app.post '/login/?' do
authenticate
env['x-rack.flash'][:success] = options.auth_success_message if defined?(Rack::Flash)
redirect options.auth_use_referrer && session[:return_to] ? session.delete(:return_to) :
options.auth_success_path
end
app.get '/logout/?' do
authorize!
logout
env['x-rack.flash'][:success] = options.auth_success_message if defined?(Rack::Flash)
redirect options.auth_success_path
end
end
end # Warden
register Warden
end # Sinatra
| require 'sinatra/base'
module Sinatra
module Warden
module Helpers
# The main accessor to the warden middleware
def warden
request.env['warden']
end
# Return session info
#
# @param [Symbol] the scope to retrieve session info for
def session_info(scope=nil)
scope ? warden.session(scope) : scope
end
# Check the current session is authenticated to a given scope
def authenticated?(scope=nil)
scope ? warden.authenticated?(scope) : warden.authenticated?
end
alias_method :logged_in?, :authenticated?
# Authenticate a user against defined strategies
def authenticate(*args)
warden.authenticate!(*args)
end
alias_method :login, :authenticate
# Terminate the current session
#
# @param [Symbol] the session scope to terminate
def logout(scopes=nil)
scopes ? warden.logout(scopes) : warden.logout(warden.config.default_scope)
end
# Access the user from the current session
#
# @param [Symbol] the scope for the logged in user
def user(scope=nil)
scope ? warden.user(scope) : warden.user
end
alias_method :current_user, :user
# Store the logged in user in the session
#
# @param [Object] the user you want to store in the session
# @option opts [Symbol] :scope The scope to assign the user
# @example Set John as the current user
# user = User.find_by_name('John')
def user=(new_user, opts={})
warden.set_user(new_user, opts)
end
alias_method :current_user=, :user=
# Require authorization for an action
#
# @param [String] path to redirect to if user is unauthenticated
def authorize!(failure_path=nil)
unless authenticated?
session[:return_to] = request.path if settings.auth_use_referrer
redirect(failure_path ? failure_path : settings.auth_failure_path)
end
end
end
def self.registered(app)
app.helpers Warden::Helpers
# Enable Sessions
app.set :sessions, true
app.set :auth_failure_path, '/'
app.set :auth_success_path, '/'
# Setting this to true will store last request URL
# into a user's session so that to redirect back to it
# upon successful authentication
app.set :auth_use_referrer, false
app.set :auth_error_message, "Could not log you in."
app.set :auth_success_message, "You have logged in successfully."
app.set :auth_template_renderer, :haml
app.set :auth_login_template, :login
# OAuth Specific Settings
app.set :auth_use_oauth, false
app.post '/unauthenticated/?' do
status 401
warden.custom_failure! if warden.config.failure_app == self.class
env['x-rack.flash'][:error] = settings.auth_error_message if defined?(Rack::Flash)
self.send(settings.auth_template_renderer, settings.auth_login_template)
end
app.get '/login/?' do
if settings.auth_use_oauth && !@auth_oauth_request_token.nil?
session[:request_token] = @auth_oauth_request_token.token
session[:request_token_secret] = @auth_oauth_request_token.secret
redirect @auth_oauth_request_token.authorize_url
else
self.send(settings.auth_template_renderer, settings.auth_login_template)
end
end
app.get '/oauth_callback/?' do
if settings.auth_use_oauth
authenticate
env['x-rack.flash'][:success] = settings.auth_success_message if defined?(Rack::Flash)
redirect settings.auth_success_path
else
redirect settings.auth_failure_path
end
end
app.post '/login/?' do
authenticate
env['x-rack.flash'][:success] = settings.auth_success_message if defined?(Rack::Flash)
redirect settings.auth_use_referrer && session[:return_to] ? session.delete(:return_to) :
settings.auth_success_path
end
app.get '/logout/?' do
authorize!
logout
env['x-rack.flash'][:success] = settings.auth_success_message if defined?(Rack::Flash)
redirect settings.auth_success_path
end
end
end # Warden
register Warden
end # Sinatra
|
srgvg/sysadmintools | 1 | zimbrackup/backup_zimbra.sh | #!/bin/bash
#
# Script to backup a Zimbra installation (open source version)
# by installing the Zimbra on a separate LVM Logical Volume,
# taking a snapshot of that partition after stopping Zimbra,
# restarting Zimbra services, then rsyncing the snapshot to a
# separate backup point.
# This script was originally based on a script found on the Zimbra wiki
# http://wiki.zimbra.com/index.php?title=Open_Source_Edition_Backup_Procedure
# and totally rewritten since then.
# Copyright (C) 2007 Serge van Ginderachter <svg@ginsys.be>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# Or download it from http://www.gnu.org/licenses/old-licenses/gpl-2.0.html
####################################################################################
# Read config
source backup_zimbra_config
# zm_backup_path=/opt.bak
# zm_lv=opt
# zm_vg=data
# zm_path=
# zm_lv_fs=ext3
# LVCREATE=/sbin/lvcreate
# LVREMOVE=/sbin/lvremove
# zm_snapshot=opt-snapshot
# zm_snapshot_size=1GB
# zm_snapshot_path=/tmp/opt-snapshot
# V=
# debug=
##########################################
# Do not change anything beyond this point
##########################################
pause() {
if [ -n "$debug" ]; then
echo "Press Enter to execute this step..";
read input;
fi
}
say() {
MESSAGE_PREFIX="zimbra backup:"
MESSAGE="$1"
TIMESTAMP=$(date +"%F %T")
echo -e "$TIMESTAMP $MESSAGE_PREFIX $MESSAGE"
logger -t $log_tag -p $log_facility.$log_level "$MESSAGE"
logger -t $log_tag -p $log_facility_mail.$log_level "$MESSAGE"
pause
}
error () {
MESSAGE_PREFIX="zimbra backup:"
MESSAGE="$1"
TIMESTAMP=$(date +"%F %T")
echo -e $TIMESTAMP $MESSAGE >&2
logger -t $log_tag -p $log_facility.$log_level_err "$MESSAGE"
logger -t $log_tag -p $log_facility_mail.$log_level_err "$MESSAGE"
exit
}
# load kernel module to enable LVM snapshots
/sbin/modprobe dm-snapshot || error "Error loading dm-snapshot module"
# Output date
say "backup started"
# Stop the Zimbra services
say "stopping the Zimbra services, this may take some time"
/etc/init.d/zimbra stop || error "error stopping Zimbra"
[ "$(ps -u zimbra -o "pid=")" ] && kill -9 $(ps -u zimbra -o "pid=") #added as a workaround to zimbra bug 18653
# Create a logical volume called ZimbraBackup
say "creating a LV called $zm_snapshot"
$LVCREATE -L $zm_snapshot_size -s -n $zm_snapshot /dev/$zm_vg/$zm_lv || error "error creating snapshot, exiting"
# Start the Zimbra services
say "starting the Zimbra services in the background....."
(/etc/init.d/zimbra start && say "services background startup completed") || error "services background startup FAILED" &
# Create a mountpoint to mount the logical volume to
say "creating mountpoint for the LV"
mkdir -p $zm_snapshot_path || error "error creating snapshot mount point $zm_snapshot_path"
# Mount the logical volume snapshot to the mountpoint
say "mounting the snapshot $zm_snapshot"
mount -t $zm_lv_fs -o nouuid,ro /dev/$zm_vg/$zm_snapshot $zm_snapshot_path
# Create the current backup
say "rsyncing the snapshot to the backup directory $backup_dir"
rsync -aAH$V --delete $zm_snapshot_path/$zm_path $zm_backup_path || say "error during rsync but continuing the backup script"
# Unmount $zm_snapshot from $zm_snapshot_mnt
say "unmounting the snapshot"
umount $zm_snapshot_path || error "error unmounting snapshot"
# Delete the snapshot mount dir
rmdir $zm_snapshot_path
# Remove the snapshot volume
# https://bugs.launchpad.net/ubuntu/+source/linux-source-2.6.15/+bug/71567
say "pausing 1s and syncing before removing the snapshot from LVM"
sleep 1 ; sync
say "removing the snapshot"
$LVREMOVE --force /dev/$zm_vg/$zm_snapshot || say "error removing the snapshot"
# Done!
say "backup ended"
date >$zm_backup_path/lastsync
| #!/bin/bash
#
# Script to backup a Zimbra installation (open source version)
# by installing the Zimbra on a separate LVM Logical Volume,
# taking a snapshot of that partition after stopping Zimbra,
# restarting Zimbra services, then rsyncing the snapshot to a
# separate backup point.
# This script was originally based on a script found on the Zimbra wiki
# http://wiki.zimbra.com/index.php?title=Open_Source_Edition_Backup_Procedure
# and totally rewritten since then.
# Copyright (C) 2007 Serge van Ginderachter <svg@ginsys.be>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# Or download it from http://www.gnu.org/licenses/old-licenses/gpl-2.0.html
####################################################################################
# Read config
source backup_zimbra_config
# zm_backup_path=/opt.bak
# zm_lv=opt
# zm_lv_mount_point=
# zm_vg=data
# zm_path=
# zm_lv_fs=auto
# zm_mount_opts=ro
# LVCREATE=/sbin/lvcreate
# LVREMOVE=/sbin/lvremove
# zm_snapshot=opt-snapshot
# zm_snapshot_size=1G
# zm_snapshot_extents=
# zm_snapshot_path=/tmp/opt-snapshot
# backup_util=rsync
# obnam_tune="--lru-size=1024 --upload-queue-size=512"
# obnam_keep_policy=14d,8w,12m
# V=
# debug=
##########################################
# Do not change anything beyond this point
##########################################
pause() {
if [ -n "$debug" ]; then
echo "Press Enter to execute this step..";
read input;
fi
}
say() {
MESSAGE_PREFIX="zimbra backup:"
MESSAGE="$1"
TIMESTAMP=$(date +"%F %T")
echo -e "$TIMESTAMP $MESSAGE_PREFIX $MESSAGE"
logger -t $log_tag -p $log_facility.$log_level "$MESSAGE"
logger -t $log_tag -p $log_facility_mail.$log_level "$MESSAGE"
pause
}
error () {
MESSAGE_PREFIX="zimbra backup:"
MESSAGE="$1"
TIMESTAMP=$(date +"%F %T")
echo -e $TIMESTAMP $MESSAGE >&2
logger -t $log_tag -p $log_facility.$log_level_err "$MESSAGE"
logger -t $log_tag -p $log_facility_mail.$log_level_err "$MESSAGE"
exit 1
}
# Check for sane lv settings
if [[ $zm_snapshot_size && $zm_snapshot_extents ]]; then
error "cannot specify both byte size ($zm_snapshot_size) and number of extents ($zm_snapshot_extents) for snapshot; please set only one or the other"
fi
# Output date
say "backup started"
# Stop the Zimbra services
say "stopping the Zimbra services, this may take some time"
/etc/init.d/zimbra stop || error "error stopping Zimbra"
[ "$(ps -u zimbra -o "pid=")" ] && kill -9 $(ps -u zimbra -o "pid=") #added as a workaround to zimbra bug 18653
# Unmount volume to ensure clean filesystem for snapshot
if [[ $zm_lv_mount_point ]]; then
say "unmounting $zm_lv_mount_point"
umount $zm_lv_mount_point || error "unable to unmount $zm_lv_mount_point"
fi
# Create a logical volume called ZimbraBackup
say "creating a LV called $zm_snapshot"
if [[ $zm_snapshot_size ]]; then
lv_size="-L $zm_snapshot_size"
else
lv_size="-l $zm_snapshot_extents"
fi
$LVCREATE $lv_size -s -n $zm_snapshot /dev/$zm_vg/$zm_lv || error "error creating snapshot, exiting"
# Remount original volume
if [[ $zm_lv_mount_point ]]; then
say "re-mounting $zm_lv_mount_point"
mount $zm_lv_mount_point || error "unable to re-mount $zm_lv_mount_point"
fi
# Start the Zimbra services
say "starting the Zimbra services in the background....."
(/etc/init.d/zimbra start && say "services background startup completed") || error "services background startup FAILED" &
# zmconfigd in Zimbra 8.6 seems to have a hard time getting going during heavy I/O; let's give it time to start up before the backup begins
sleep 120
# Create a mountpoint to mount the logical volume to
say "creating mountpoint for the LV"
mkdir -p $zm_snapshot_path || error "error creating snapshot mount point $zm_snapshot_path"
# Mount the logical volume snapshot to the mountpoint
say "mounting the snapshot $zm_snapshot"
mount -t $zm_lv_fs -o $zm_mount_opts /dev/$zm_vg/$zm_snapshot $zm_snapshot_path
# Create the current backup using the configured tool
case $backup_util in
rsync)
# Use rsync
say "rsyncing the snapshot to the backup directory $zm_backup_path"
rsync -aAHS$V --delete $zm_snapshot_path/$zm_path $zm_backup_path || say "error during rsync but continuing the backup script"
;;
obnam)
# Use obnam
say "backing up via obnam to the backup directory $zm_backup_path"
[[ $V = "v" ]] && verbose="--verbose"
obnam backup $verbose $obnam_tune --repository $zm_backup_path $zm_snapshot_path/$zm_path || error "error creating obnam backup"
if [[ $obnam_keep_policy ]]; then
say "forgetting old obnam backups according to policy: $obnam_keep_policy"
obnam forget $verbose --repository $zm_backup_path --keep $obnam_keep_policy || error "error forgetting obnam backups"
fi
;;
esac
# Unmount $zm_snapshot from $zm_snapshot_mnt
say "unmounting the snapshot"
umount $zm_snapshot_path || error "error unmounting snapshot"
# Delete the snapshot mount dir
rmdir $zm_snapshot_path
# Remove the snapshot volume
# https://bugs.launchpad.net/ubuntu/+source/linux-source-2.6.15/+bug/71567
say "pausing 1s and syncing before removing the snapshot from LVM"
sleep 1 ; sync
say "removing the snapshot"
$LVREMOVE --force /dev/$zm_vg/$zm_snapshot || say "error removing the snapshot"
# Done!
say "backup ended"
date >$zm_backup_path/lastsync
|
fjordllc/next-holiday | 4 | test/app_test.rb | require File.expand_path(File.join(File.dirname(__FILE__), 'test_helper'))
require 'app'
class AppTest < Test::Unit::TestCase
context "Access pages" do
should "show index" do
get '/'
assert_match 'Next Holiday', last_response.body
end
end
end
| # frozen_string_literal: true
require File.expand_path(File.join(File.dirname(__FILE__), 'test_helper'))
require 'app'
class AppTest < Test::Unit::TestCase
context "Access pages" do
should "show index" do
get '/'
assert_match 'Next Holiday', last_response.body
end
end
end
|
heycarsten/email-veracity | 5 | lib/email_veracity/domain.rb | module EmailVeracity
class Domain
include Validatability
def self.whitelisted?(name)
Config[:whitelist].include?(name.downcase.strip)
end
def self.blacklisted?(name)
Config[:blacklist].include?(name.downcase.strip)
end
def initialize(name = '')
@name = name
end
def to_s
name
end
def name
@name.to_s.downcase.strip
end
def whitelisted?
Domain.whitelisted?(name)
end
def blacklisted?
Domain.blacklisted?(name)
end
def address_servers
@address_servers ||= servers_in(:a)
end
def exchange_servers
@exchange_servers ||= servers_in(:mx)
end
def servers
address_servers + exchange_servers
end
protected
def validate!
return if whitelisted?
add_error(:blacklisted) if blacklisted? &&
Config[:enforce_blacklist]
unless Config[:skip_lookup]
add_error(:no_records) if servers.empty? &&
!Config.enforced_record?(:a) &&
!Config.enforced_record?(:mx)
add_error(:no_address_servers) if address_servers.empty? &&
Config.enforced_record?(:a)
add_error(:no_exchange_servers) if exchange_servers.empty? &&
Config.enforced_record?(:mx)
end
end
def servers_in(record)
return [] if Config[:skip_lookup] || Utils.blank?(name)
Resolver.get_servers_for(name, record)
rescue DomainResourcesTimeoutError
add_error :timed_out
end
end
end
| module EmailVeracity
class Domain
include Validatability
def self.whitelisted?(name)
Config[:whitelist].include?(name.downcase.strip)
end
def self.blacklisted?(name)
Config[:blacklist].include?(name.downcase.strip)
end
def initialize(name = '')
@name = name
end
def to_s
name
end
def name
@name.to_s.downcase.strip
end
def whitelisted?
Domain.whitelisted?(name)
end
def blacklisted?
Domain.blacklisted?(name)
end
def address_servers
@address_servers ||= servers_in(:a)
end
def exchange_servers
@exchange_servers ||= servers_in(:mx)
end
def servers
address_servers + exchange_servers
end
protected
def validate!
return if whitelisted?
add_error(:blacklisted) if blacklisted? &&
Config[:enforce_blacklist]
unless Config[:skip_lookup]
add_error(:no_records) if !Config.enforced_record?(:a) &&
!Config.enforced_record?(:mx) &&
servers.empty?
add_error(:no_address_servers) if Config.enforced_record?(:a) &&
address_servers.empty?
add_error(:no_exchange_servers) if Config.enforced_record?(:mx) &&
exchange_servers.empty?
end
end
def servers_in(record)
return [] if Config[:skip_lookup] || Utils.blank?(name)
Resolver.get_servers_for(name, record)
rescue DomainResourcesTimeoutError
add_error :timed_out
end
end
end
|
X-Plane/XPlane2Blender | 464 | io_xplane2blender/xplane_types/xplane_mesh.py | import array
import time
import re
import bpy
from ..xplane_config import getDebug
from ..xplane_helpers import floatToStr, logger
from ..xplane_constants import *
from .xplane_face import XPlaneFace
# Class: XPlaneMesh
# Creates the OBJ meshes.
class XPlaneMesh():
# Constructor: __init__
def __init__(self):
# list - contains all mesh vertices
self.vertices = []
# array - contains all face indices
self.indices = array.array('i')
self.faces = []
# int - Stores the current global vertex index.
self.globalindex = 0
self.debug = []
# Method: collectXPlaneObjects
# Fills the <vertices> and <indices> from a list of <XPlaneObjects>.
# This method works recursively on the children of each <XPlaneObject>.
#
# Parameters:
# list xplaneObjects - list of <XPlaneObjects>.
def collectXPlaneObjects(self, xplaneObjects):
# start = time.perf_counter()
debug = getDebug()
supports_split_normals = False
def getSortKey(xplaneObject):
return xplaneObject.name
# sort objects by name for consitent vertex and indices table output
# this is usefull for unit tests and version control, as file changes are kept at a minimum
xplaneObjects = sorted(xplaneObjects, key = getSortKey)
for xplaneObject in xplaneObjects:
# skip non-mesh objects and objects that do not have a xplane bone
if xplaneObject.type == 'MESH' and xplaneObject.xplaneBone:
xplaneObject.indices[0] = len(self.indices)
first_vertice_of_this_xplaneObject = len(self.vertices)
# create a copy of the xplaneObject mesh with modifiers applied and triangulated
mesh = xplaneObject.blenderObject.to_mesh(bpy.context.scene, True, "PREVIEW")
# now get the bake matrix
# and bake it to the mesh
xplaneObject.bakeMatrix = xplaneObject.xplaneBone.getBakeMatrixForAttached()
mesh.transform(xplaneObject.bakeMatrix)
if hasattr(mesh, 'calc_normals_split'): # split normals
mesh.calc_normals_split()
supports_split_normals = True
if hasattr(mesh, 'polygons'): # BMesh
mesh.update(calc_tessface = True)
mesh.calc_tessface()
mesh_faces = mesh.tessfaces
else:
mesh_faces = mesh.faces
# with the new mesh get uvFaces list
uvFaces = self.getUVFaces(mesh, xplaneObject.material.uv_name)
faces = []
vertices_dct = {}
d = {'name': xplaneObject.name,'obj_face': 0,'faces': len(mesh_faces),'quads': 0,'vertices': len(mesh.vertices),'uvs': 0}
# convert faces to triangles
if len(mesh_faces) > 0:
tempfaces = []
for i in range(0, len(mesh_faces)):
if uvFaces != None:
f = self.faceToTrianglesWithUV(mesh_faces[i], uvFaces[i])
tempfaces.extend(f)
d['uvs'] += 1
if len(f) > 1:
d['quads'] += 1
else:
f = self.faceToTrianglesWithUV(mesh_faces[i], None)
tempfaces.extend(f)
if len(f) > 1:
d['quads']+=1
d['obj_faces'] = len(tempfaces)
for f in tempfaces:
xplaneFace = XPlaneFace()
l = len(f['indices'])
for i in range(0, l):
# get the original index but reverse order, as this is reversing normals
vindex = f['indices'][2 - i]
# get the vertice from original mesh
v = mesh.vertices[vindex]
co = v.co
ns = f['norms'][2 - i] if supports_split_normals else v.normal
if f['original_face'].use_smooth: # use smoothed vertex normal
vert = (
co[0], co[2], -co[1],
ns[0], ns[2], -ns[1],
f['uv'][i][0], f['uv'][i][1]
)
else: # use flat face normal
vert = (
co[0], co[2], -co[1],
f['original_face'].normal[0], f['original_face'].normal[2], -f['original_face'].normal[1],
f['uv'][i][0], f['uv'][i][1]
)
if bpy.context.scene.xplane.optimize:
#check for duplicates
index = vertices_dct.get(vert, -1)
else:
index = -1
if index == -1:
index = self.globalindex
self.vertices.append(vert)
self.globalindex += 1
if bpy.context.scene.xplane.optimize:
vertices_dct[vert] = index
# store face information in one struct
xplaneFace.vertices[i] = (vert[0], vert[1], vert[2])
xplaneFace.normals[i] = (vert[3], vert[4], vert[5])
xplaneFace.uvs[i] = (vert[6], vert[7])
xplaneFace.indices[i] = index
self.indices.append(index)
faces.append(xplaneFace)
# store the faces in the prim
xplaneObject.faces = faces
xplaneObject.indices[1] = len(self.indices)
self.faces.extend(faces)
d['start_index'] = xplaneObject.indices[0]
d['end_index'] = xplaneObject.indices[1]
self.debug.append(d)
if debug:
try:
self.debug.sort(key=lambda k: k['obj_faces'],reverse=True)
except:
pass
for d in self.debug:
tris_to_quads = 1.0
if not 'obj_faces' in d:
d['obj_faces'] = 0
if d['faces'] > 0:
tris_to_quads = d['obj_faces'] / d['faces']
logger.info('%s: faces %d | xplaneObject-faces %d | tris-to-quads ratio %6.2f | indices %d | vertices %d' % (d['name'],d['faces'],d['obj_faces'],tris_to_quads,d['end_index']-d['start_index'],d['vertices']))
logger.info('POINT COUNTS: faces %d - vertices %d - indices %d' % (len(self.faces),len(self.vertices),len(self.indices)))
# logger.info("End XPlaneMesh .collectXPlaneObjects: " + str(time.perf_counter()-start))
# Method: getUVFaces
# Returns Blender the UV faces of a Blender mesh.
#
# Parameters:
# mesh - Blender mesh
# string uv_name - Name of the uv layer to use. If not given the first layer will be used.
#
# Returns:
# None if no UV faces could be found or the Blender UV Faces.
def getUVFaces(self, mesh, uv_name):
# get the uv_texture
if hasattr(mesh,'polygons'): # BMesh
uv_textures = mesh.tessface_uv_textures
else:
uv_textures = mesh.uv_textures
if (uv_name != None and len(uv_textures) > 0):
uv_layer = None
if uv_name=="":
uv_layer = uv_textures[0]
else:
i = 0
while uv_layer == None and i < len(uv_textures):
if uv_textures[i].name == uv_name:
uv_layer = uv_textures[i]
i += 1
if uv_layer != None:
return uv_layer.data
else:
return None
else:
return None
# Method: getTriangulatedMesh
# Returns a triangulated mesh from a given Blender xplaneObjectect.
#
# Parameters:
# blenderObject - Blender Object
#
# Returns:
# A Blender mesh
#
# Todos:
# - Does not remove temporarily created mesh/xplaneObjectect yet.
def getTriangulatedMesh(self, blenderObject):
me_da = blenderObject.data.copy() #copy data
me_ob = blenderObject.copy() #copy xplaneObjectect
#note two copy two types else it will use the current data or mesh
me_ob.data = me_da
bpy.context.scene.objects.link(me_ob) #link the xplaneObjectect to the scene #current xplaneObjectect location
for i in bpy.context.scene.objects: i.select = False #deselect all xplaneObjectects
me_ob.select = True
bpy.context.scene.objects.active = me_ob #set the mesh xplaneObjectect to current
bpy.ops.object.mode_set(mode = 'EDIT') #Operators
bpy.ops.mesh.select_all(action = 'SELECT')#select all the face/vertex/edge
bpy.ops.mesh.quads_convert_to_tris() #Operators
bpy.context.scene.update()
bpy.ops.object.mode_set(mode = 'OBJECT') # set it in xplaneObjectect
mesh = me_ob.to_mesh(bpy.context.scene, True, "PREVIEW")
bpy.context.scene.objects.unlink(me_ob)
return mesh
# Method: faceToTrianglesWithUV
# Converts a Blender face (3 or 4 sided) into one or two 3-sided faces together with the texture coordinates.
#
# Parameters:
# face - A Blender face.
# uv - UV coordiantes of a Blender UV face.
#
# Returns:
# list - [{'uv':[[u1,v1],[u2,v2],[u3,v3]],'indices':[i1,i2,i3]},..] In length 1 or 2.
def faceToTrianglesWithUV(self,face,uv):
triangles = []
#inverse uv's as we are inversing face indices later
i0 = face.vertices[0]
i1 = face.vertices[1]
i2 = face.vertices[2]
if len(face.vertices) == 4: #quad
i3 = face.vertices[3]
if uv != None:
triangles.append( {"uv":[[uv.uv3[0], uv.uv3[1]], [uv.uv2[0], uv.uv2[1]], [uv.uv1[0], uv.uv1[1]]], "indices":[face.vertices[0], face.vertices[1], face.vertices[2]],'original_face':face,
"norms":[face.split_normals[0], face.split_normals[1], face.split_normals[2]]})
triangles.append( {"uv":[[uv.uv1[0], uv.uv1[1]], [uv.uv4[0], uv.uv4[1]], [uv.uv3[0], uv.uv3[1]]], "indices":[face.vertices[2], face.vertices[3], face.vertices[0]],'original_face':face,
"norms":[face.split_normals[2], face.split_normals[3], face.split_normals[0]]})
else:
triangles.append( {"uv":[[0.0, 0.0], [0.0, 0.0], [0.0, 0.0]], "indices":[face.vertices[0], face.vertices[1], face.vertices[2]],'original_face':face,
"norms":[face.split_normals[0], face.split_normals[1], face.split_normals[2]]})
triangles.append( {"uv":[[0.0, 0.0], [0.0, 0.0], [0.0, 0.0]], "indices":[face.vertices[2], face.vertices[3], face.vertices[0]],'original_face':face,
"norms":[face.split_normals[2], face.split_normals[3], face.split_normals[0]]})
else:
if uv != None:
triangles.append( {"uv":[[uv.uv3[0], uv.uv3[1]], [uv.uv2[0], uv.uv2[1]], [uv.uv1[0], uv.uv1[1]]], "indices":face.vertices,'original_face':face,
"norms":[face.split_normals[0], face.split_normals[1], face.split_normals[2]]})
else:
triangles.append( {"uv":[[0.0, 0.0], [0.0, 0.0], [0.0, 0.0]], "indices":face.vertices,'original_face':face,
"norms":[face.split_normals[0], face.split_normals[1], face.split_normals[2]]})
return triangles
# Method: faceValues
# Returns the converted vertices of a face.
#
# Parameters:
# face - A Blender face.
# mesh - A Blender mesh.
# Matrix matrix - The conversion matrix.
#
# Returns:
# list - List of vertices.
def faceValues(self,face, mesh, matrix):
fv = []
for verti in face.vertices_raw:
fv.append(matrix * mesh.vertices[verti].co)
return fv
# Method: writeVertices
# Returns the OBJ vertex table by iterating <vertices>.
#
# Returns:
# string - The OBJ vertex table.
def writeVertices(self):
######################################################################
# WARNING! This is a hot path! So don't change it without profiling! #
######################################################################
o = bytearray()
#print("Begin XPlaneMesh.writeVertices")
#start = time.perf_counter()
debug = getDebug()
vt_array = array.array('f', [round(component,8) for vertice in self.vertices for component in vertice])
#Loop through every line, format it's 8 components, use rstrip, if statement for 10.00000000->10.0
#52-60 seconds
for i,line in enumerate(range(0,len(vt_array),8)):
o += b"VT"
for component in vt_array[line:line+8]:
sb = bytes("\t%.8f" % component,"utf-8").rstrip(b'0')
if sb[-1] == 46:#'.':
o += sb[:-1]
else:
o += sb
if debug:
o += bytes("\t# %d\n" % i,"utf-8")
else:
o += b"\n"
#print("end XPlaneMesh.writeVertices " + str(time.perf_counter()-start))
return o.decode("utf-8")
# Method: writeIndices
# Returns the OBJ indices table by itering <indices>.
#
# Returns:
# string - The OBJ indices table.
def writeIndices(self):
######################################################################
# WARNING! This is a hot path! So don't change it without profiling! #
######################################################################
o=''
#print("Begin XPlaneMesh.writeIndices")
#start = time.perf_counter()
s_idx10 = "IDX10\t%d\t%d\t%d\t%d\t%d\t%d\t%d\t%d\t%d\t%d\n"
s_idx = "IDX\t%d\n"
partition_point = len(self.indices) - (len(self.indices) % 10)
if len(self.indices) >= 10:
o += ''.join([s_idx10 % (*self.indices[i:i+10],) for i in range(0,partition_point-1,10)])
o += ''.join([s_idx % (self.indices[i]) for i in range(partition_point,len(self.indices))])
#print("End XPlaneMesh.writeIndices: " + str(time.perf_counter()-start))
return o
def write(self):
o = ''
debug = False
verticesOut = self.writeVertices()
o += verticesOut
if len(verticesOut):
o += '\n'
o += self.writeIndices()
return o
| import array
import time
import re
import bpy
from ..xplane_config import getDebug
from ..xplane_helpers import floatToStr, logger
from ..xplane_constants import *
from .xplane_face import XPlaneFace
# Class: XPlaneMesh
# Creates the OBJ meshes.
class XPlaneMesh():
# Constructor: __init__
def __init__(self):
# list - contains all mesh vertices
self.vertices = []
# array - contains all face indices
self.indices = array.array('i')
self.faces = []
# int - Stores the current global vertex index.
self.globalindex = 0
self.debug = []
# Method: collectXPlaneObjects
# Fills the <vertices> and <indices> from a list of <XPlaneObjects>.
# This method works recursively on the children of each <XPlaneObject>.
#
# Parameters:
# list xplaneObjects - list of <XPlaneObjects>.
def collectXPlaneObjects(self, xplaneObjects):
# start = time.perf_counter()
debug = getDebug()
supports_split_normals = False
def getSortKey(xplaneObject):
return xplaneObject.name
# sort objects by name for consitent vertex and indices table output
# this is usefull for unit tests and version control, as file changes are kept at a minimum
xplaneObjects = sorted(xplaneObjects, key = getSortKey)
for xplaneObject in xplaneObjects:
# skip non-mesh objects and objects that do not have a xplane bone
if xplaneObject.type == 'MESH' and xplaneObject.xplaneBone:
xplaneObject.indices[0] = len(self.indices)
first_vertice_of_this_xplaneObject = len(self.vertices)
# create a copy of the xplaneObject mesh with modifiers applied and triangulated
mesh = xplaneObject.blenderObject.to_mesh(bpy.context.scene, True, "PREVIEW")
# now get the bake matrix
# and bake it to the mesh
xplaneObject.bakeMatrix = xplaneObject.xplaneBone.getBakeMatrixForAttached()
mesh.transform(xplaneObject.bakeMatrix)
if hasattr(mesh, 'calc_normals_split'): # split normals
mesh.calc_normals_split()
supports_split_normals = True
if hasattr(mesh, 'polygons'): # BMesh
mesh.update(calc_tessface = True)
mesh.calc_tessface()
mesh_faces = mesh.tessfaces
else:
mesh_faces = mesh.faces
# with the new mesh get uvFaces list
uvFaces = self.getUVFaces(mesh, xplaneObject.material.uv_name)
faces = []
vertices_dct = {}
d = {'name': xplaneObject.name,'obj_face': 0,'faces': len(mesh_faces),'quads': 0,'vertices': len(mesh.vertices),'uvs': 0}
# convert faces to triangles
if len(mesh_faces) > 0:
tempfaces = []
for i in range(0, len(mesh_faces)):
if uvFaces != None:
f = self.faceToTrianglesWithUV(mesh_faces[i], uvFaces[i])
tempfaces.extend(f)
d['uvs'] += 1
if len(f) > 1:
d['quads'] += 1
else:
f = self.faceToTrianglesWithUV(mesh_faces[i], None)
tempfaces.extend(f)
if len(f) > 1:
d['quads']+=1
d['obj_faces'] = len(tempfaces)
for f in tempfaces:
xplaneFace = XPlaneFace()
l = len(f['indices'])
for i in range(0, l):
# get the original index but reverse order, as this is reversing normals
vindex = f['indices'][2 - i]
# get the vertice from original mesh
v = mesh.vertices[vindex]
co = v.co
ns = f['norms'][2 - i] if supports_split_normals else v.normal
if f['original_face'].use_smooth: # use smoothed vertex normal
vert = (
co[0], co[2], -co[1],
ns[0], ns[2], -ns[1],
f['uv'][i][0], f['uv'][i][1]
)
else: # use flat face normal
vert = (
co[0], co[2], -co[1],
f['original_face'].normal[0], f['original_face'].normal[2], -f['original_face'].normal[1],
f['uv'][i][0], f['uv'][i][1]
)
if bpy.context.scene.xplane.optimize:
#check for duplicates
index = vertices_dct.get(vert, -1)
else:
index = -1
if index == -1:
index = self.globalindex
self.vertices.append(vert)
self.globalindex += 1
if bpy.context.scene.xplane.optimize:
vertices_dct[vert] = index
# store face information in one struct
xplaneFace.vertices[i] = (vert[0], vert[1], vert[2])
xplaneFace.normals[i] = (vert[3], vert[4], vert[5])
xplaneFace.uvs[i] = (vert[6], vert[7])
xplaneFace.indices[i] = index
self.indices.append(index)
faces.append(xplaneFace)
# store the faces in the prim
xplaneObject.faces = faces
xplaneObject.indices[1] = len(self.indices)
self.faces.extend(faces)
d['start_index'] = xplaneObject.indices[0]
d['end_index'] = xplaneObject.indices[1]
self.debug.append(d)
if debug:
try:
self.debug.sort(key=lambda k: k['obj_faces'],reverse=True)
except:
pass
for d in self.debug:
tris_to_quads = 1.0
if not 'obj_faces' in d:
d['obj_faces'] = 0
if d['faces'] > 0:
tris_to_quads = d['obj_faces'] / d['faces']
logger.info('%s: faces %d | xplaneObject-faces %d | tris-to-quads ratio %6.2f | indices %d | vertices %d' % (d['name'],d['faces'],d['obj_faces'],tris_to_quads,d['end_index']-d['start_index'],d['vertices']))
logger.info('POINT COUNTS: faces %d - vertices %d - indices %d' % (len(self.faces),len(self.vertices),len(self.indices)))
# logger.info("End XPlaneMesh .collectXPlaneObjects: " + str(time.perf_counter()-start))
# Method: getUVFaces
# Returns Blender the UV faces of a Blender mesh.
#
# Parameters:
# mesh - Blender mesh
# string uv_name - Name of the uv layer to use. If not given the first layer will be used.
#
# Returns:
# None if no UV faces could be found or the Blender UV Faces.
def getUVFaces(self, mesh, uv_name):
# get the uv_texture
if hasattr(mesh,'polygons'): # BMesh
uv_textures = mesh.tessface_uv_textures
else:
uv_textures = mesh.uv_textures
if (uv_name != None and len(uv_textures) > 0):
uv_layer = None
if uv_name=="":
uv_layer = uv_textures[0]
else:
i = 0
while uv_layer == None and i < len(uv_textures):
if uv_textures[i].name == uv_name:
uv_layer = uv_textures[i]
i += 1
if uv_layer != None:
return uv_layer.data
else:
return None
else:
return None
# Method: getTriangulatedMesh
# Returns a triangulated mesh from a given Blender xplaneObjectect.
#
# Parameters:
# blenderObject - Blender Object
#
# Returns:
# A Blender mesh
#
# Todos:
# - Does not remove temporarily created mesh/xplaneObjectect yet.
def getTriangulatedMesh(self, blenderObject):
me_da = blenderObject.data.copy() #copy data
me_ob = blenderObject.copy() #copy xplaneObjectect
#note two copy two types else it will use the current data or mesh
me_ob.data = me_da
bpy.context.scene.objects.link(me_ob) #link the xplaneObjectect to the scene #current xplaneObjectect location
for i in bpy.context.scene.objects: i.select = False #deselect all xplaneObjectects
me_ob.select = True
bpy.context.scene.objects.active = me_ob #set the mesh xplaneObjectect to current
bpy.ops.object.mode_set(mode = 'EDIT') #Operators
bpy.ops.mesh.select_all(action = 'SELECT')#select all the face/vertex/edge
bpy.ops.mesh.quads_convert_to_tris() #Operators
bpy.context.scene.update()
bpy.ops.object.mode_set(mode = 'OBJECT') # set it in xplaneObjectect
mesh = me_ob.to_mesh(bpy.context.scene, True, "PREVIEW")
bpy.context.scene.objects.unlink(me_ob)
return mesh
# Method: faceToTrianglesWithUV
# Converts a Blender face (3 or 4 sided) into one or two 3-sided faces together with the texture coordinates.
#
# Parameters:
# face - A Blender face.
# uv - UV coordiantes of a Blender UV face.
#
# Returns:
# list - [{'uv':[[u1,v1],[u2,v2],[u3,v3]],'indices':[i1,i2,i3]},..] In length 1 or 2.
def faceToTrianglesWithUV(self,face,uv):
triangles = []
#inverse uv's as we are inversing face indices later
i0 = face.vertices[0]
i1 = face.vertices[1]
i2 = face.vertices[2]
if len(face.vertices) == 4: #quad
i3 = face.vertices[3]
if uv != None:
triangles.append( {"uv":[[uv.uv3[0], uv.uv3[1]], [uv.uv2[0], uv.uv2[1]], [uv.uv1[0], uv.uv1[1]]], "indices":[face.vertices[0], face.vertices[1], face.vertices[2]],'original_face':face,
"norms":[face.split_normals[0], face.split_normals[1], face.split_normals[2]]})
triangles.append( {"uv":[[uv.uv1[0], uv.uv1[1]], [uv.uv4[0], uv.uv4[1]], [uv.uv3[0], uv.uv3[1]]], "indices":[face.vertices[2], face.vertices[3], face.vertices[0]],'original_face':face,
"norms":[face.split_normals[2], face.split_normals[3], face.split_normals[0]]})
else:
triangles.append( {"uv":[[0.0, 0.0], [0.0, 0.0], [0.0, 0.0]], "indices":[face.vertices[0], face.vertices[1], face.vertices[2]],'original_face':face,
"norms":[face.split_normals[0], face.split_normals[1], face.split_normals[2]]})
triangles.append( {"uv":[[0.0, 0.0], [0.0, 0.0], [0.0, 0.0]], "indices":[face.vertices[2], face.vertices[3], face.vertices[0]],'original_face':face,
"norms":[face.split_normals[2], face.split_normals[3], face.split_normals[0]]})
else:
if uv != None:
triangles.append( {"uv":[[uv.uv3[0], uv.uv3[1]], [uv.uv2[0], uv.uv2[1]], [uv.uv1[0], uv.uv1[1]]], "indices":face.vertices,'original_face':face,
"norms":[face.split_normals[0], face.split_normals[1], face.split_normals[2]]})
else:
triangles.append( {"uv":[[0.0, 0.0], [0.0, 0.0], [0.0, 0.0]], "indices":face.vertices,'original_face':face,
"norms":[face.split_normals[0], face.split_normals[1], face.split_normals[2]]})
return triangles
# Method: faceValues
# Returns the converted vertices of a face.
#
# Parameters:
# face - A Blender face.
# mesh - A Blender mesh.
# Matrix matrix - The conversion matrix.
#
# Returns:
# list - List of vertices.
def faceValues(self,face, mesh, matrix):
fv = []
for verti in face.vertices_raw:
fv.append(matrix * mesh.vertices[verti].co)
return fv
# Method: writeVertices
# Returns the OBJ vertex table by iterating <vertices>.
#
# Returns:
# string - The OBJ vertex table.
def writeVertices(self):
######################################################################
# WARNING! This is a hot path! So don't change it without profiling! #
######################################################################
#print("Begin XPlaneMesh.writeVertices")
#start = time.perf_counter()
debug = getDebug()
precision = 10**8
#Loop through every line, format it's 8 components
#52-60 seconds
if debug:
fmt = 'VT\t%.8g\t%.8g\t%.8g\t%.8g\t%.8g\t%.8g\t%.8g\t%.8g\t# %d\n'
return ''.join([fmt % (*[(int(component * precision + 0.5) / precision) for component in line],i) for i, line in enumerate(self.vertices)])
else:
fmt = 'VT\t%.8g\t%.8g\t%.8g\t%.8g\t%.8g\t%.8g\t%.8g\t%.8g\n'
return ''.join([fmt % (*[(int(component * precision + 0.5) / precision) for component in line],) for line in self.vertices])
#print("end XPlaneMesh.writeVertices " + str(time.perf_counter()-start))
# Method: writeIndices
# Returns the OBJ indices table by itering <indices>.
#
# Returns:
# string - The OBJ indices table.
def writeIndices(self):
######################################################################
# WARNING! This is a hot path! So don't change it without profiling! #
######################################################################
o=''
indices = self.indices
length = len(indices)
# print("Begin XPlaneMesh.writeIndices")
# start = time.perf_counter()
s_idx10 = "IDX10\t%d\t%d\t%d\t%d\t%d\t%d\t%d\t%d\t%d\t%d\n"
s_idx = "IDX\t%d\n"
partition_point = length - (length % 10)
if length >= 10:
o += ''.join([s_idx10 % (*indices[i:i+10],) for i in range(0,partition_point-1,10)])
o += ''.join([s_idx % (indices[i]) for i in range(partition_point,length)])
# print("End XPlaneMesh.writeIndices: " + str(time.perf_counter()-start))
return o
def write(self):
o = ''
debug = False
verticesOut = self.writeVertices()
o += verticesOut
if len(verticesOut):
o += '\n'
o += self.writeIndices()
return o
|
macdonst/phonegap-android | 1 | example/index.html | <!DOCTYPE HTML>
<html>
<head>
<meta name="viewport" content="width=320; user-scalable=no" />
<meta http-equiv="Content-type" content="text/html; charset=utf-8">
<title>PhoneGap</title>
<link rel="stylesheet" href="master.css" type="text/css" media="screen" title="no title" charset="utf-8">
<script type="text/javascript" charset="utf-8" src="phonegap.0.9.6.1.min.js"></script>
<script type="text/javascript" charset="utf-8" src="main.js"></script>
</head>
<body onload="init();" id="stage" class="theme">
<h1>Welcome to PhoneGap!</h1>
<h2>this file is located at assets/www/index.html</h2>
<div id="info">
<h4>Platform: <span id="platform"> </span>, Version: <span id="version"> </span></h4>
<h4>UUID: <span id="uuid"> </span>, Name: <span id="name"> </span></h4>
<h4>Width: <span id="width"> </span>, Height: <span id="height">
</span>, Color Depth: <span id="colorDepth"></span></h4>
</div>
<dl id="accel-data">
<dt>X:</dt><dd id="x"> </dd>
<dt>Y:</dt><dd id="y"> </dd>
<dt>Z:</dt><dd id="z"> </dd>
</dl>
<a href="#" class="btn large" onclick="toggleAccel();">Toggle Accelerometer</a>
<a href="#" class="btn large" onclick="getLocation();">Get Location</a>
<a href="tel://411" class="btn large">Call 411</a>
<a href="#" class="btn large" onclick="beep();">Beep</a>
<a href="#" class="btn large" onclick="vibrate();">Vibrate</a>
<a href="#" class="btn large" onclick="show_pic();">Get a Picture</a>
<a href="#" class="btn large" onclick="get_contacts();">Get Phone's Contacts</a>
<a href="#" class="btn large" onclick="check_network();">Check Network</a>
<div id="viewport" class="viewport" style="display: none;">
<img style="width:60px;height:60px" id="test_img" src="" />
</div>
</body>
</html>
| <!DOCTYPE HTML>
<html>
<head>
<meta name="viewport" content="width=320; user-scalable=no" />
<meta http-equiv="Content-type" content="text/html; charset=utf-8">
<title>PhoneGap</title>
<link rel="stylesheet" href="master.css" type="text/css" media="screen" title="no title" charset="utf-8">
<script type="text/javascript" charset="utf-8" src="phonegap-0.9.6.1.min.js"></script>
<script type="text/javascript" charset="utf-8" src="main.js"></script>
</head>
<body onload="init();" id="stage" class="theme">
<h1>Welcome to PhoneGap!</h1>
<h2>this file is located at assets/www/index.html</h2>
<div id="info">
<h4>Platform: <span id="platform"> </span>, Version: <span id="version"> </span></h4>
<h4>UUID: <span id="uuid"> </span>, Name: <span id="name"> </span></h4>
<h4>Width: <span id="width"> </span>, Height: <span id="height">
</span>, Color Depth: <span id="colorDepth"></span></h4>
</div>
<dl id="accel-data">
<dt>X:</dt><dd id="x"> </dd>
<dt>Y:</dt><dd id="y"> </dd>
<dt>Z:</dt><dd id="z"> </dd>
</dl>
<a href="#" class="btn large" onclick="toggleAccel();">Toggle Accelerometer</a>
<a href="#" class="btn large" onclick="getLocation();">Get Location</a>
<a href="tel://411" class="btn large">Call 411</a>
<a href="#" class="btn large" onclick="beep();">Beep</a>
<a href="#" class="btn large" onclick="vibrate();">Vibrate</a>
<a href="#" class="btn large" onclick="show_pic();">Get a Picture</a>
<a href="#" class="btn large" onclick="get_contacts();">Get Phone's Contacts</a>
<a href="#" class="btn large" onclick="check_network();">Check Network</a>
<div id="viewport" class="viewport" style="display: none;">
<img style="width:60px;height:60px" id="test_img" src="" />
</div>
</body>
</html>
|
fabiomcosta/mootools-meio-mask | 17 | Source/Meio.Mask.Reverse.js | /*
---
name: Meio.Mask.Reverse
description: A mask used for currency and decimal numbers.
authors:
- Fábio Miranda Costa
requires:
- Meio.Mask
license: MIT-style license
provides: [Meio.Mask.Reverse]
...
*/
Meio.Mask.Reverse = new Class({
Extends: Meio.Mask,
options: {
autoSetSize: false,
autoEmpty: false,
alignText: true,
symbol: '',
precision: 2,
decimal: ',',
thousands: '.',
maxLength: 18
},
initialize: function(options){
this.parent(options);
var thousandsChar = this.options.thousands,
escapedThousandsChars = thousandsChar.escapeRegExp(),
escapedDecimalChar = this.options.decimal.escapeRegExp();
this.maxlength = this.options.maxLength;
this.reThousands = /(\d+)(\d{3})/;
this.reRemoveLeadingZeros = /^0+(.*)$/;
this.reDecimalNumber = /^\d$/;
this.thousandsReplaceStr = '$1' + thousandsChar + '$2';
this.reThousandsReplace = new RegExp(escapedThousandsChars, 'g');
this.reCleanup = new RegExp('[' + escapedThousandsChars + escapedDecimalChar + ']', 'g');
this.reRemoveNonNumbers = new RegExp('[^\\d' + escapedThousandsChars + escapedDecimalChar + ']', 'g');
},
link: function(element){
this.parent(element);
if (this.options.alignText) this.element.setStyle('text-align', 'right');
var elementValue = this.element.get('value');
if (elementValue === '' && !this.options.autoEmpty){
this.element.set('value', this.forceMask(elementValue, false));
}
return this;
},
focus: function(e, o){
var element = this.element,
elValue = element.get('value');
if (this.options.autoEmpty){
if (elValue === '') element.set('value', this.mask(elValue));
} else {
element.set('value', this.getValue(elValue, true));
}
this.parent(e, o);
},
blur: function(e, o){
this.parent(e, o);
var element = this.element,
value = this.getValue(element.get('value'));
if (this.options.autoEmpty && this.mask(value) == this.mask()) value = '';
element.set('value', value);
},
keypress: function(e, o){
if (this.ignore) return true;
e.preventDefault();
var state = this.getCurrentState(e, o), elementValue = state.value;
if (!this.testEvents(elementValue, state._char, e.code, o.isRemoveKey)) return true;
elementValue = this.forceMask(elementValue, true);
this.element.set('value', elementValue).setCaretPosition(elementValue.length);
return this.parent();
},
testEvents: function(elementValue, _char, code, isRemoveKey){
var args = [this.element, code, _char];
if (!isRemoveKey){
var elementValueLength = this.getValue(elementValue, false).length;
if (!(this.reDecimalNumber).test(_char) || (this.maxlength && elementValueLength > this.maxlength)){
this.fireEvent('invalid', args);
return false;
}
this.fireEvent('valid', args);
}
return true;
},
paste: function(e, o){
var element = this.element;
var elValue = element.get('value');
element.set('value', (elValue = this.forceMask(elValue, true))).setCaretPosition(elValue.length);
return true;
},
forceMask: function(str, applySymbol){
str = this.cleanup(str);
var precision = this.options.precision;
var zeros = precision + 1 - str.length;
if (zeros > 0) str = this.zeroize(str, zeros);
if (precision){
var decimalIndex = str.length - precision;
str = str.substring(0, decimalIndex) + this.options.decimal + str.substring(decimalIndex);
}
return this.getValue(this.maskThousands(str), applySymbol);
},
cleanup: function(str){
return this.getValue(str.replace(this.reCleanup, '')).replace(this.reRemoveLeadingZeros, '$1');
},
mask: function(str){
str = this.unmask(str || '0').replace('.', this.options.decimal);
return this.getValue(this.maskThousands(str), false);
},
unmask: function(str){
return this.toNumber(this.getValue(str));
},
toNumber: function(str){
str = str.replace(this.reRemoveNonNumbers, '');
if (!isFinite(str)){
if (this.options.thousands) str = str.replace(this.reThousandsReplace, '');
var decimalChar = this.options.decimal;
if (decimalChar) str = str.replace(decimalChar, '.');
}
return str.toFloat().toFixed(this.options.precision);
},
getValue: function(str, applySymbol){
var symbol = this.options.symbol;
return (str.substring(0, symbol.length) === symbol) ?
applySymbol ? str : str.substring(symbol.length) :
applySymbol ? symbol + str : str;
},
maskThousands: function(str){
if (this.options.thousands){
while (this.reThousands.test(str)) str = str.replace(this.reThousands, this.thousandsReplaceStr);
}
return str;
},
zeroize: function(str, zeros){
while (zeros--) str = '0' + str;
return str;
},
shouldFocusNext: function(){
return this.getValue(this.element.get('value'), false).length >= this.options.maxLength;
}
});
Meio.Mask.createMasks('Reverse', {
'Integer' : {precision: 0, maxLength: 18},
'Decimal' : { },
'DecimalUs' : {thousands: ',', decimal: '.'},
'Reais' : {symbol: 'R$ ' },
'Dollar' : {symbol: 'US$ ', thousands: ',', decimal: '.'}
});
| /*
---
name: Meio.Mask.Reverse
description: A mask used for currency and decimal numbers.
authors:
- Fábio Miranda Costa
requires:
- Meio.Mask
license: MIT-style license
provides: [Meio.Mask.Reverse]
...
*/
Meio.Mask.Reverse = new Class({
Extends: Meio.Mask,
options: {
autoSetSize: false,
autoEmpty: false,
alignText: true,
symbol: '',
precision: 2,
decimal: ',',
thousands: '.',
maxLength: 18
},
initialize: function(options){
this.parent(options);
var thousandsChar = this.options.thousands,
escapedThousandsChars = thousandsChar.escapeRegExp(),
escapedDecimalChar = this.options.decimal.escapeRegExp();
this.maxlength = this.options.maxLength;
this.reThousands = /(\d+)(\d{3})/;
this.reRemoveLeadingZeros = /^0+(.*)$/;
this.reDecimalNumber = /^\d$/;
this.thousandsReplaceStr = '$1' + thousandsChar + '$2';
this.reThousandsReplace = new RegExp(escapedThousandsChars, 'g');
this.reCleanup = new RegExp('[' + escapedThousandsChars + escapedDecimalChar + ']', 'g');
this.reRemoveNonNumbers = new RegExp('[^\\d' + escapedThousandsChars + escapedDecimalChar + ']', 'g');
},
link: function(element){
this.parent(element);
if (this.options.alignText) this.element.setStyle('text-align', 'right');
var elementValue = this.element.get('value');
if (elementValue === '' && !this.options.autoEmpty){
this.element.set('value', this.forceMask(elementValue, false));
}
return this;
},
focus: function(e, o){
var element = this.element,
elValue = element.get('value');
if (this.options.autoEmpty){
if (elValue === '') element.set('value', this.mask(elValue));
} else {
element.set('value', this.getValue(elValue, true));
}
this.parent(e, o);
},
blur: function(e, o){
this.parent(e, o);
var element = this.element,
value = this.getValue(element.get('value'));
if (this.options.autoEmpty && this.mask(value) == this.mask()) value = '';
element.set('value', value);
},
keypress: function(e, o){
if (this.ignore) return true;
e.preventDefault();
var state = this.getCurrentState(e, o), elementValue = state.value;
if (!this.testEvents(elementValue, state._char, e.code, o.isRemoveKey)) return true;
elementValue = this.forceMask(elementValue, true);
this.element.set('value', elementValue).setCaretPosition(elementValue.length);
return this.parent();
},
testEvents: function(elementValue, _char, code, isRemoveKey){
var args = [this.element, code, _char];
if (!isRemoveKey){
var elementValueLength = this.getValue(elementValue, false).length;
if (!(this.reDecimalNumber).test(_char) || (this.maxlength && elementValueLength > this.maxlength)){
this.fireEvent('invalid', args);
return false;
}
this.fireEvent('valid', args);
}
return true;
},
paste: function(e, o){
var element = this.element;
var elValue = element.get('value');
element.set('value', (elValue = this.forceMask(elValue, true))).setCaretPosition(elValue.length);
return true;
},
forceMask: function(str, applySymbol){
str = this.cleanup(str);
var precision = this.options.precision;
var zeros = precision + 1 - str.length;
if (zeros > 0) str = this.zeroize(str, zeros);
if (precision){
var decimalIndex = str.length - precision;
str = str.substring(0, decimalIndex) + this.options.decimal + str.substring(decimalIndex);
}
return this.getValue(this.maskThousands(str), applySymbol);
},
cleanup: function(str){
return this.getValue(str.replace(this.reCleanup, '')).replace(this.reRemoveLeadingZeros, '$1');
},
mask: function(str){
str = this.unmask(str || '0').replace('.', this.options.decimal);
return this.getValue(this.maskThousands(str), false);
},
unmask: function(str){
return this.toNumber(this.getValue(str));
},
toNumber: function(str){
str = str.replace(this.reRemoveNonNumbers, '');
if (!isFinite(str)){
if (this.options.thousands) str = str.replace(this.reThousandsReplace, '');
var decimalChar = this.options.decimal;
if (decimalChar) str = str.replace(decimalChar, '.');
}
return str.toFloat().toFixed(this.options.precision);
},
getValue: function(str, applySymbol){
var symbol = this.options.symbol;
return (str.substring(0, symbol.length) === symbol) ?
applySymbol ? str : str.substring(symbol.length) :
applySymbol ? symbol + str : str;
},
maskThousands: function(str){
if (this.options.thousands){
while (str.toFloat() >= 1000 && this.reThousands.test(str)) {
str = str.replace(this.reThousands, this.thousandsReplaceStr);
}
}
return str;
},
zeroize: function(str, zeros){
while (zeros--) str = '0' + str;
return str;
},
shouldFocusNext: function(){
return this.getValue(this.element.get('value'), false).length >= this.options.maxLength;
}
});
Meio.Mask.createMasks('Reverse', {
'Integer' : {precision: 0, maxLength: 18},
'Decimal' : { },
'DecimalUs' : {thousands: ',', decimal: '.'},
'Reais' : {symbol: 'R$ ' },
'Dollar' : {symbol: 'US$ ', thousands: ',', decimal: '.'}
});
|